PR rtl-optimization/82913
[official-gcc.git] / gcc / reload.c
blobdae3b027bb387d52f5a84697308fecd993f3db6d
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2017 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains subroutines used only from the file reload1.c.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
56 NOTE SIDE EFFECTS:
58 find_reloads can alter the operands of the instruction it is called on.
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
72 Using a reload register for several reloads in one insn:
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
86 #define REG_OK_STRICT
88 /* We do not enable this with CHECKING_P, since it is awfully slow. */
89 #undef DEBUG_RELOAD
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "memmodel.h"
100 #include "tm_p.h"
101 #include "optabs.h"
102 #include "regs.h"
103 #include "ira.h"
104 #include "recog.h"
105 #include "rtl-error.h"
106 #include "reload.h"
107 #include "addresses.h"
108 #include "params.h"
110 /* True if X is a constant that can be forced into the constant pool.
111 MODE is the mode of the operand, or VOIDmode if not known. */
112 #define CONST_POOL_OK_P(MODE, X) \
113 ((MODE) != VOIDmode \
114 && CONSTANT_P (X) \
115 && GET_CODE (X) != HIGH \
116 && !targetm.cannot_force_const_mem (MODE, X))
118 /* True if C is a non-empty register class that has too few registers
119 to be safely used as a reload target class. */
121 static inline bool
122 small_register_class_p (reg_class_t rclass)
124 return (reg_class_size [(int) rclass] == 1
125 || (reg_class_size [(int) rclass] >= 1
126 && targetm.class_likely_spilled_p (rclass)));
130 /* All reloads of the current insn are recorded here. See reload.h for
131 comments. */
132 int n_reloads;
133 struct reload rld[MAX_RELOADS];
135 /* All the "earlyclobber" operands of the current insn
136 are recorded here. */
137 int n_earlyclobbers;
138 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
140 int reload_n_operands;
142 /* Replacing reloads.
144 If `replace_reloads' is nonzero, then as each reload is recorded
145 an entry is made for it in the table `replacements'.
146 Then later `subst_reloads' can look through that table and
147 perform all the replacements needed. */
149 /* Nonzero means record the places to replace. */
150 static int replace_reloads;
152 /* Each replacement is recorded with a structure like this. */
153 struct replacement
155 rtx *where; /* Location to store in */
156 int what; /* which reload this is for */
157 machine_mode mode; /* mode it must have */
160 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
162 /* Number of replacements currently recorded. */
163 static int n_replacements;
165 /* Used to track what is modified by an operand. */
166 struct decomposition
168 int reg_flag; /* Nonzero if referencing a register. */
169 int safe; /* Nonzero if this can't conflict with anything. */
170 rtx base; /* Base address for MEM. */
171 HOST_WIDE_INT start; /* Starting offset or register number. */
172 HOST_WIDE_INT end; /* Ending offset or register number. */
175 /* Save MEMs needed to copy from one class of registers to another. One MEM
176 is used per mode, but normally only one or two modes are ever used.
178 We keep two versions, before and after register elimination. The one
179 after register elimination is record separately for each operand. This
180 is done in case the address is not valid to be sure that we separately
181 reload each. */
183 static rtx secondary_memlocs[NUM_MACHINE_MODES];
184 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
185 static int secondary_memlocs_elim_used = 0;
187 /* The instruction we are doing reloads for;
188 so we can test whether a register dies in it. */
189 static rtx_insn *this_insn;
191 /* Nonzero if this instruction is a user-specified asm with operands. */
192 static int this_insn_is_asm;
194 /* If hard_regs_live_known is nonzero,
195 we can tell which hard regs are currently live,
196 at least enough to succeed in choosing dummy reloads. */
197 static int hard_regs_live_known;
199 /* Indexed by hard reg number,
200 element is nonnegative if hard reg has been spilled.
201 This vector is passed to `find_reloads' as an argument
202 and is not changed here. */
203 static short *static_reload_reg_p;
205 /* Set to 1 in subst_reg_equivs if it changes anything. */
206 static int subst_reg_equivs_changed;
208 /* On return from push_reload, holds the reload-number for the OUT
209 operand, which can be different for that from the input operand. */
210 static int output_reloadnum;
212 /* Compare two RTX's. */
213 #define MATCHES(x, y) \
214 (x == y || (x != 0 && (REG_P (x) \
215 ? REG_P (y) && REGNO (x) == REGNO (y) \
216 : rtx_equal_p (x, y) && ! side_effects_p (x))))
218 /* Indicates if two reloads purposes are for similar enough things that we
219 can merge their reloads. */
220 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
221 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
222 || ((when1) == (when2) && (op1) == (op2)) \
223 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
224 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
225 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
226 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
227 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
229 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
230 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
231 ((when1) != (when2) \
232 || ! ((op1) == (op2) \
233 || (when1) == RELOAD_FOR_INPUT \
234 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
235 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
237 /* If we are going to reload an address, compute the reload type to
238 use. */
239 #define ADDR_TYPE(type) \
240 ((type) == RELOAD_FOR_INPUT_ADDRESS \
241 ? RELOAD_FOR_INPADDR_ADDRESS \
242 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
243 ? RELOAD_FOR_OUTADDR_ADDRESS \
244 : (type)))
246 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
247 machine_mode, enum reload_type,
248 enum insn_code *, secondary_reload_info *);
249 static enum reg_class find_valid_class (machine_mode, machine_mode,
250 int, unsigned int);
251 static void push_replacement (rtx *, int, machine_mode);
252 static void dup_replacements (rtx *, rtx *);
253 static void combine_reloads (void);
254 static int find_reusable_reload (rtx *, rtx, enum reg_class,
255 enum reload_type, int, int);
256 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
257 machine_mode, reg_class_t, int, int);
258 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
259 static struct decomposition decompose (rtx);
260 static int immune_p (rtx, rtx, struct decomposition);
261 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
262 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
263 rtx_insn *, int *);
264 static rtx make_memloc (rtx, int);
265 static int maybe_memory_address_addr_space_p (machine_mode, rtx,
266 addr_space_t, rtx *);
267 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
268 int, enum reload_type, int, rtx_insn *);
269 static rtx subst_reg_equivs (rtx, rtx_insn *);
270 static rtx subst_indexed_address (rtx);
271 static void update_auto_inc_notes (rtx_insn *, int, int);
272 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
273 enum rtx_code, enum rtx_code, rtx *,
274 int, enum reload_type,int, rtx_insn *);
275 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
276 machine_mode, int,
277 enum reload_type, int);
278 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
279 int, rtx_insn *, int *);
280 static void copy_replacements_1 (rtx *, rtx *, int);
281 static int find_inc_amount (rtx, rtx);
282 static int refers_to_mem_for_reload_p (rtx);
283 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
284 rtx, rtx *);
286 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
287 list yet. */
289 static void
290 push_reg_equiv_alt_mem (int regno, rtx mem)
292 rtx it;
294 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
295 if (rtx_equal_p (XEXP (it, 0), mem))
296 return;
298 reg_equiv_alt_mem_list (regno)
299 = alloc_EXPR_LIST (REG_EQUIV, mem,
300 reg_equiv_alt_mem_list (regno));
303 /* Determine if any secondary reloads are needed for loading (if IN_P is
304 nonzero) or storing (if IN_P is zero) X to or from a reload register of
305 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
306 are needed, push them.
308 Return the reload number of the secondary reload we made, or -1 if
309 we didn't need one. *PICODE is set to the insn_code to use if we do
310 need a secondary reload. */
312 static int
313 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
314 enum reg_class reload_class,
315 machine_mode reload_mode, enum reload_type type,
316 enum insn_code *picode, secondary_reload_info *prev_sri)
318 enum reg_class rclass = NO_REGS;
319 enum reg_class scratch_class;
320 machine_mode mode = reload_mode;
321 enum insn_code icode = CODE_FOR_nothing;
322 enum insn_code t_icode = CODE_FOR_nothing;
323 enum reload_type secondary_type;
324 int s_reload, t_reload = -1;
325 const char *scratch_constraint;
326 secondary_reload_info sri;
328 if (type == RELOAD_FOR_INPUT_ADDRESS
329 || type == RELOAD_FOR_OUTPUT_ADDRESS
330 || type == RELOAD_FOR_INPADDR_ADDRESS
331 || type == RELOAD_FOR_OUTADDR_ADDRESS)
332 secondary_type = type;
333 else
334 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
336 *picode = CODE_FOR_nothing;
338 /* If X is a paradoxical SUBREG, use the inner value to determine both the
339 mode and object being reloaded. */
340 if (paradoxical_subreg_p (x))
342 x = SUBREG_REG (x);
343 reload_mode = GET_MODE (x);
346 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
347 is still a pseudo-register by now, it *must* have an equivalent MEM
348 but we don't want to assume that), use that equivalent when seeing if
349 a secondary reload is needed since whether or not a reload is needed
350 might be sensitive to the form of the MEM. */
352 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
353 && reg_equiv_mem (REGNO (x)))
354 x = reg_equiv_mem (REGNO (x));
356 sri.icode = CODE_FOR_nothing;
357 sri.prev_sri = prev_sri;
358 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
359 reload_mode, &sri);
360 icode = (enum insn_code) sri.icode;
362 /* If we don't need any secondary registers, done. */
363 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
364 return -1;
366 if (rclass != NO_REGS)
367 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
368 reload_mode, type, &t_icode, &sri);
370 /* If we will be using an insn, the secondary reload is for a
371 scratch register. */
373 if (icode != CODE_FOR_nothing)
375 /* If IN_P is nonzero, the reload register will be the output in
376 operand 0. If IN_P is zero, the reload register will be the input
377 in operand 1. Outputs should have an initial "=", which we must
378 skip. */
380 /* ??? It would be useful to be able to handle only two, or more than
381 three, operands, but for now we can only handle the case of having
382 exactly three: output, input and one temp/scratch. */
383 gcc_assert (insn_data[(int) icode].n_operands == 3);
385 /* ??? We currently have no way to represent a reload that needs
386 an icode to reload from an intermediate tertiary reload register.
387 We should probably have a new field in struct reload to tag a
388 chain of scratch operand reloads onto. */
389 gcc_assert (rclass == NO_REGS);
391 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
392 gcc_assert (*scratch_constraint == '=');
393 scratch_constraint++;
394 if (*scratch_constraint == '&')
395 scratch_constraint++;
396 scratch_class = (reg_class_for_constraint
397 (lookup_constraint (scratch_constraint)));
399 rclass = scratch_class;
400 mode = insn_data[(int) icode].operand[2].mode;
403 /* This case isn't valid, so fail. Reload is allowed to use the same
404 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
405 in the case of a secondary register, we actually need two different
406 registers for correct code. We fail here to prevent the possibility of
407 silently generating incorrect code later.
409 The convention is that secondary input reloads are valid only if the
410 secondary_class is different from class. If you have such a case, you
411 can not use secondary reloads, you must work around the problem some
412 other way.
414 Allow this when a reload_in/out pattern is being used. I.e. assume
415 that the generated code handles this case. */
417 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
418 || t_icode != CODE_FOR_nothing);
420 /* See if we can reuse an existing secondary reload. */
421 for (s_reload = 0; s_reload < n_reloads; s_reload++)
422 if (rld[s_reload].secondary_p
423 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
424 || reg_class_subset_p (rld[s_reload].rclass, rclass))
425 && ((in_p && rld[s_reload].inmode == mode)
426 || (! in_p && rld[s_reload].outmode == mode))
427 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
428 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
429 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
430 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
431 && (small_register_class_p (rclass)
432 || targetm.small_register_classes_for_mode_p (VOIDmode))
433 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
434 opnum, rld[s_reload].opnum))
436 if (in_p)
437 rld[s_reload].inmode = mode;
438 if (! in_p)
439 rld[s_reload].outmode = mode;
441 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
442 rld[s_reload].rclass = rclass;
444 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
445 rld[s_reload].optional &= optional;
446 rld[s_reload].secondary_p = 1;
447 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
448 opnum, rld[s_reload].opnum))
449 rld[s_reload].when_needed = RELOAD_OTHER;
451 break;
454 if (s_reload == n_reloads)
456 /* If we need a memory location to copy between the two reload regs,
457 set it up now. Note that we do the input case before making
458 the reload and the output case after. This is due to the
459 way reloads are output. */
461 if (in_p && icode == CODE_FOR_nothing
462 && targetm.secondary_memory_needed (mode, rclass, reload_class))
464 get_secondary_mem (x, reload_mode, opnum, type);
466 /* We may have just added new reloads. Make sure we add
467 the new reload at the end. */
468 s_reload = n_reloads;
471 /* We need to make a new secondary reload for this register class. */
472 rld[s_reload].in = rld[s_reload].out = 0;
473 rld[s_reload].rclass = rclass;
475 rld[s_reload].inmode = in_p ? mode : VOIDmode;
476 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
477 rld[s_reload].reg_rtx = 0;
478 rld[s_reload].optional = optional;
479 rld[s_reload].inc = 0;
480 /* Maybe we could combine these, but it seems too tricky. */
481 rld[s_reload].nocombine = 1;
482 rld[s_reload].in_reg = 0;
483 rld[s_reload].out_reg = 0;
484 rld[s_reload].opnum = opnum;
485 rld[s_reload].when_needed = secondary_type;
486 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
487 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
488 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
489 rld[s_reload].secondary_out_icode
490 = ! in_p ? t_icode : CODE_FOR_nothing;
491 rld[s_reload].secondary_p = 1;
493 n_reloads++;
495 if (! in_p && icode == CODE_FOR_nothing
496 && targetm.secondary_memory_needed (mode, reload_class, rclass))
497 get_secondary_mem (x, mode, opnum, type);
500 *picode = icode;
501 return s_reload;
504 /* If a secondary reload is needed, return its class. If both an intermediate
505 register and a scratch register is needed, we return the class of the
506 intermediate register. */
507 reg_class_t
508 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
509 rtx x)
511 enum insn_code icode;
512 secondary_reload_info sri;
514 sri.icode = CODE_FOR_nothing;
515 sri.prev_sri = NULL;
516 rclass
517 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
518 icode = (enum insn_code) sri.icode;
520 /* If there are no secondary reloads at all, we return NO_REGS.
521 If an intermediate register is needed, we return its class. */
522 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
523 return rclass;
525 /* No intermediate register is needed, but we have a special reload
526 pattern, which we assume for now needs a scratch register. */
527 return scratch_reload_class (icode);
530 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
531 three operands, verify that operand 2 is an output operand, and return
532 its register class.
533 ??? We'd like to be able to handle any pattern with at least 2 operands,
534 for zero or more scratch registers, but that needs more infrastructure. */
535 enum reg_class
536 scratch_reload_class (enum insn_code icode)
538 const char *scratch_constraint;
539 enum reg_class rclass;
541 gcc_assert (insn_data[(int) icode].n_operands == 3);
542 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
543 gcc_assert (*scratch_constraint == '=');
544 scratch_constraint++;
545 if (*scratch_constraint == '&')
546 scratch_constraint++;
547 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
548 gcc_assert (rclass != NO_REGS);
549 return rclass;
552 /* Return a memory location that will be used to copy X in mode MODE.
553 If we haven't already made a location for this mode in this insn,
554 call find_reloads_address on the location being returned. */
557 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
558 int opnum, enum reload_type type)
560 rtx loc;
561 int mem_valid;
563 /* By default, if MODE is narrower than a word, widen it to a word.
564 This is required because most machines that require these memory
565 locations do not support short load and stores from all registers
566 (e.g., FP registers). */
568 mode = targetm.secondary_memory_needed_mode (mode);
570 /* If we already have made a MEM for this operand in MODE, return it. */
571 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
572 return secondary_memlocs_elim[(int) mode][opnum];
574 /* If this is the first time we've tried to get a MEM for this mode,
575 allocate a new one. `something_changed' in reload will get set
576 by noticing that the frame size has changed. */
578 if (secondary_memlocs[(int) mode] == 0)
580 #ifdef SECONDARY_MEMORY_NEEDED_RTX
581 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
582 #else
583 secondary_memlocs[(int) mode]
584 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
585 #endif
588 /* Get a version of the address doing any eliminations needed. If that
589 didn't give us a new MEM, make a new one if it isn't valid. */
591 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
592 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
593 MEM_ADDR_SPACE (loc));
595 if (! mem_valid && loc == secondary_memlocs[(int) mode])
596 loc = copy_rtx (loc);
598 /* The only time the call below will do anything is if the stack
599 offset is too large. In that case IND_LEVELS doesn't matter, so we
600 can just pass a zero. Adjust the type to be the address of the
601 corresponding object. If the address was valid, save the eliminated
602 address. If it wasn't valid, we need to make a reload each time, so
603 don't save it. */
605 if (! mem_valid)
607 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
608 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
609 : RELOAD_OTHER);
611 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
612 opnum, type, 0, 0);
615 secondary_memlocs_elim[(int) mode][opnum] = loc;
616 if (secondary_memlocs_elim_used <= (int)mode)
617 secondary_memlocs_elim_used = (int)mode + 1;
618 return loc;
621 /* Clear any secondary memory locations we've made. */
623 void
624 clear_secondary_mem (void)
626 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
630 /* Find the largest class which has at least one register valid in
631 mode INNER, and which for every such register, that register number
632 plus N is also valid in OUTER (if in range) and is cheap to move
633 into REGNO. Such a class must exist. */
635 static enum reg_class
636 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
637 machine_mode inner ATTRIBUTE_UNUSED, int n,
638 unsigned int dest_regno ATTRIBUTE_UNUSED)
640 int best_cost = -1;
641 int rclass;
642 int regno;
643 enum reg_class best_class = NO_REGS;
644 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
645 unsigned int best_size = 0;
646 int cost;
648 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
650 int bad = 0;
651 int good = 0;
652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
653 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
655 if (targetm.hard_regno_mode_ok (regno, inner))
657 good = 1;
658 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
659 && !targetm.hard_regno_mode_ok (regno + n, outer))
660 bad = 1;
664 if (bad || !good)
665 continue;
666 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
668 if ((reg_class_size[rclass] > best_size
669 && (best_cost < 0 || best_cost >= cost))
670 || best_cost > cost)
672 best_class = (enum reg_class) rclass;
673 best_size = reg_class_size[rclass];
674 best_cost = register_move_cost (outer, (enum reg_class) rclass,
675 dest_class);
679 gcc_assert (best_size != 0);
681 return best_class;
684 /* We are trying to reload a subreg of something that is not a register.
685 Find the largest class which contains only registers valid in
686 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
687 which we would eventually like to obtain the object. */
689 static enum reg_class
690 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
691 machine_mode mode ATTRIBUTE_UNUSED,
692 enum reg_class dest_class ATTRIBUTE_UNUSED)
694 int best_cost = -1;
695 int rclass;
696 int regno;
697 enum reg_class best_class = NO_REGS;
698 unsigned int best_size = 0;
699 int cost;
701 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
703 unsigned int computed_rclass_size = 0;
705 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
707 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
708 && targetm.hard_regno_mode_ok (regno, mode))
709 computed_rclass_size++;
712 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
714 if ((computed_rclass_size > best_size
715 && (best_cost < 0 || best_cost >= cost))
716 || best_cost > cost)
718 best_class = (enum reg_class) rclass;
719 best_size = computed_rclass_size;
720 best_cost = register_move_cost (outer, (enum reg_class) rclass,
721 dest_class);
725 gcc_assert (best_size != 0);
727 #ifdef LIMIT_RELOAD_CLASS
728 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
729 #endif
730 return best_class;
733 /* Return the number of a previously made reload that can be combined with
734 a new one, or n_reloads if none of the existing reloads can be used.
735 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
736 push_reload, they determine the kind of the new reload that we try to
737 combine. P_IN points to the corresponding value of IN, which can be
738 modified by this function.
739 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
741 static int
742 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
743 enum reload_type type, int opnum, int dont_share)
745 rtx in = *p_in;
746 int i;
747 /* We can't merge two reloads if the output of either one is
748 earlyclobbered. */
750 if (earlyclobber_operand_p (out))
751 return n_reloads;
753 /* We can use an existing reload if the class is right
754 and at least one of IN and OUT is a match
755 and the other is at worst neutral.
756 (A zero compared against anything is neutral.)
758 For targets with small register classes, don't use existing reloads
759 unless they are for the same thing since that can cause us to need
760 more reload registers than we otherwise would. */
762 for (i = 0; i < n_reloads; i++)
763 if ((reg_class_subset_p (rclass, rld[i].rclass)
764 || reg_class_subset_p (rld[i].rclass, rclass))
765 /* If the existing reload has a register, it must fit our class. */
766 && (rld[i].reg_rtx == 0
767 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
768 true_regnum (rld[i].reg_rtx)))
769 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
770 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
771 || (out != 0 && MATCHES (rld[i].out, out)
772 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
773 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
774 && (small_register_class_p (rclass)
775 || targetm.small_register_classes_for_mode_p (VOIDmode))
776 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
777 return i;
779 /* Reloading a plain reg for input can match a reload to postincrement
780 that reg, since the postincrement's value is the right value.
781 Likewise, it can match a preincrement reload, since we regard
782 the preincrementation as happening before any ref in this insn
783 to that register. */
784 for (i = 0; i < n_reloads; i++)
785 if ((reg_class_subset_p (rclass, rld[i].rclass)
786 || reg_class_subset_p (rld[i].rclass, rclass))
787 /* If the existing reload has a register, it must fit our
788 class. */
789 && (rld[i].reg_rtx == 0
790 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
791 true_regnum (rld[i].reg_rtx)))
792 && out == 0 && rld[i].out == 0 && rld[i].in != 0
793 && ((REG_P (in)
794 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
795 && MATCHES (XEXP (rld[i].in, 0), in))
796 || (REG_P (rld[i].in)
797 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
798 && MATCHES (XEXP (in, 0), rld[i].in)))
799 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
800 && (small_register_class_p (rclass)
801 || targetm.small_register_classes_for_mode_p (VOIDmode))
802 && MERGABLE_RELOADS (type, rld[i].when_needed,
803 opnum, rld[i].opnum))
805 /* Make sure reload_in ultimately has the increment,
806 not the plain register. */
807 if (REG_P (in))
808 *p_in = rld[i].in;
809 return i;
811 return n_reloads;
814 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
815 expression. MODE is the mode that X will be used in. OUTPUT is true if
816 the function is invoked for the output part of an enclosing reload. */
818 static bool
819 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
821 rtx inner;
823 /* Only SUBREGs are problematical. */
824 if (GET_CODE (x) != SUBREG)
825 return false;
827 inner = SUBREG_REG (x);
829 /* If INNER is a constant or PLUS, then INNER will need reloading. */
830 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
831 return true;
833 /* If INNER is not a hard register, then INNER will not need reloading. */
834 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
835 return false;
837 /* If INNER is not ok for MODE, then INNER will need reloading. */
838 if (!targetm.hard_regno_mode_ok (subreg_regno (x), mode))
839 return true;
841 /* If this is for an output, and the outer part is a word or smaller,
842 INNER is larger than a word and the number of registers in INNER is
843 not the same as the number of words in INNER, then INNER will need
844 reloading (with an in-out reload). */
845 return (output
846 && GET_MODE_SIZE (mode) <= UNITS_PER_WORD
847 && GET_MODE_SIZE (GET_MODE (inner)) > UNITS_PER_WORD
848 && ((GET_MODE_SIZE (GET_MODE (inner)) / UNITS_PER_WORD)
849 != REG_NREGS (inner)));
852 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
853 requiring an extra reload register. The caller has already found that
854 IN contains some reference to REGNO, so check that we can produce the
855 new value in a single step. E.g. if we have
856 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
857 instruction that adds one to a register, this should succeed.
858 However, if we have something like
859 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
860 needs to be loaded into a register first, we need a separate reload
861 register.
862 Such PLUS reloads are generated by find_reload_address_part.
863 The out-of-range PLUS expressions are usually introduced in the instruction
864 patterns by register elimination and substituting pseudos without a home
865 by their function-invariant equivalences. */
866 static int
867 can_reload_into (rtx in, int regno, machine_mode mode)
869 rtx dst;
870 rtx_insn *test_insn;
871 int r = 0;
872 struct recog_data_d save_recog_data;
874 /* For matching constraints, we often get notional input reloads where
875 we want to use the original register as the reload register. I.e.
876 technically this is a non-optional input-output reload, but IN is
877 already a valid register, and has been chosen as the reload register.
878 Speed this up, since it trivially works. */
879 if (REG_P (in))
880 return 1;
882 /* To test MEMs properly, we'd have to take into account all the reloads
883 that are already scheduled, which can become quite complicated.
884 And since we've already handled address reloads for this MEM, it
885 should always succeed anyway. */
886 if (MEM_P (in))
887 return 1;
889 /* If we can make a simple SET insn that does the job, everything should
890 be fine. */
891 dst = gen_rtx_REG (mode, regno);
892 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
893 save_recog_data = recog_data;
894 if (recog_memoized (test_insn) >= 0)
896 extract_insn (test_insn);
897 r = constrain_operands (1, get_enabled_alternatives (test_insn));
899 recog_data = save_recog_data;
900 return r;
903 /* Record one reload that needs to be performed.
904 IN is an rtx saying where the data are to be found before this instruction.
905 OUT says where they must be stored after the instruction.
906 (IN is zero for data not read, and OUT is zero for data not written.)
907 INLOC and OUTLOC point to the places in the instructions where
908 IN and OUT were found.
909 If IN and OUT are both nonzero, it means the same register must be used
910 to reload both IN and OUT.
912 RCLASS is a register class required for the reloaded data.
913 INMODE is the machine mode that the instruction requires
914 for the reg that replaces IN and OUTMODE is likewise for OUT.
916 If IN is zero, then OUT's location and mode should be passed as
917 INLOC and INMODE.
919 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
921 OPTIONAL nonzero means this reload does not need to be performed:
922 it can be discarded if that is more convenient.
924 OPNUM and TYPE say what the purpose of this reload is.
926 The return value is the reload-number for this reload.
928 If both IN and OUT are nonzero, in some rare cases we might
929 want to make two separate reloads. (Actually we never do this now.)
930 Therefore, the reload-number for OUT is stored in
931 output_reloadnum when we return; the return value applies to IN.
932 Usually (presently always), when IN and OUT are nonzero,
933 the two reload-numbers are equal, but the caller should be careful to
934 distinguish them. */
937 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
938 enum reg_class rclass, machine_mode inmode,
939 machine_mode outmode, int strict_low, int optional,
940 int opnum, enum reload_type type)
942 int i;
943 int dont_share = 0;
944 int dont_remove_subreg = 0;
945 #ifdef LIMIT_RELOAD_CLASS
946 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
947 #endif
948 int secondary_in_reload = -1, secondary_out_reload = -1;
949 enum insn_code secondary_in_icode = CODE_FOR_nothing;
950 enum insn_code secondary_out_icode = CODE_FOR_nothing;
951 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
952 subreg_in_class = NO_REGS;
954 /* INMODE and/or OUTMODE could be VOIDmode if no mode
955 has been specified for the operand. In that case,
956 use the operand's mode as the mode to reload. */
957 if (inmode == VOIDmode && in != 0)
958 inmode = GET_MODE (in);
959 if (outmode == VOIDmode && out != 0)
960 outmode = GET_MODE (out);
962 /* If find_reloads and friends until now missed to replace a pseudo
963 with a constant of reg_equiv_constant something went wrong
964 beforehand.
965 Note that it can't simply be done here if we missed it earlier
966 since the constant might need to be pushed into the literal pool
967 and the resulting memref would probably need further
968 reloading. */
969 if (in != 0 && REG_P (in))
971 int regno = REGNO (in);
973 gcc_assert (regno < FIRST_PSEUDO_REGISTER
974 || reg_renumber[regno] >= 0
975 || reg_equiv_constant (regno) == NULL_RTX);
978 /* reg_equiv_constant only contains constants which are obviously
979 not appropriate as destination. So if we would need to replace
980 the destination pseudo with a constant we are in real
981 trouble. */
982 if (out != 0 && REG_P (out))
984 int regno = REGNO (out);
986 gcc_assert (regno < FIRST_PSEUDO_REGISTER
987 || reg_renumber[regno] >= 0
988 || reg_equiv_constant (regno) == NULL_RTX);
991 /* If we have a read-write operand with an address side-effect,
992 change either IN or OUT so the side-effect happens only once. */
993 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
994 switch (GET_CODE (XEXP (in, 0)))
996 case POST_INC: case POST_DEC: case POST_MODIFY:
997 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
998 break;
1000 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1001 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1002 break;
1004 default:
1005 break;
1008 /* If we are reloading a (SUBREG constant ...), really reload just the
1009 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1010 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1011 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1012 register is a pseudo, also reload the inside expression.
1013 For machines that extend byte loads, do this for any SUBREG of a pseudo
1014 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1015 M2 is an integral mode that gets extended when loaded.
1016 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1017 where either M1 is not valid for R or M2 is wider than a word but we
1018 only need one register to store an M2-sized quantity in R.
1019 (However, if OUT is nonzero, we need to reload the reg *and*
1020 the subreg, so do nothing here, and let following statement handle it.)
1022 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1023 we can't handle it here because CONST_INT does not indicate a mode.
1025 Similarly, we must reload the inside expression if we have a
1026 STRICT_LOW_PART (presumably, in == out in this case).
1028 Also reload the inner expression if it does not require a secondary
1029 reload but the SUBREG does.
1031 Finally, reload the inner expression if it is a register that is in
1032 the class whose registers cannot be referenced in a different size
1033 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1034 cannot reload just the inside since we might end up with the wrong
1035 register class. But if it is inside a STRICT_LOW_PART, we have
1036 no choice, so we hope we do get the right register class there. */
1038 scalar_int_mode inner_mode;
1039 if (in != 0 && GET_CODE (in) == SUBREG
1040 && (subreg_lowpart_p (in) || strict_low)
1041 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)),
1042 inmode, rclass)
1043 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1044 && (CONSTANT_P (SUBREG_REG (in))
1045 || GET_CODE (SUBREG_REG (in)) == PLUS
1046 || strict_low
1047 || (((REG_P (SUBREG_REG (in))
1048 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1049 || MEM_P (SUBREG_REG (in)))
1050 && (paradoxical_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1051 || (GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1052 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG (in)),
1053 &inner_mode)
1054 && GET_MODE_SIZE (inner_mode) <= UNITS_PER_WORD
1055 && paradoxical_subreg_p (inmode, inner_mode)
1056 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
1057 || (WORD_REGISTER_OPERATIONS
1058 && partial_subreg_p (inmode, GET_MODE (SUBREG_REG (in)))
1059 && ((GET_MODE_SIZE (inmode) - 1) / UNITS_PER_WORD ==
1060 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))) - 1)
1061 / UNITS_PER_WORD)))))
1062 || (REG_P (SUBREG_REG (in))
1063 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1064 /* The case where out is nonzero
1065 is handled differently in the following statement. */
1066 && (out == 0 || subreg_lowpart_p (in))
1067 && ((GET_MODE_SIZE (inmode) <= UNITS_PER_WORD
1068 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1069 > UNITS_PER_WORD)
1070 && ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1071 / UNITS_PER_WORD)
1072 != REG_NREGS (SUBREG_REG (in))))
1073 || !targetm.hard_regno_mode_ok (subreg_regno (in), inmode)))
1074 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1075 && (secondary_reload_class (1, rclass, GET_MODE (SUBREG_REG (in)),
1076 SUBREG_REG (in))
1077 == NO_REGS))
1078 || (REG_P (SUBREG_REG (in))
1079 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1080 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (in)),
1081 GET_MODE (SUBREG_REG (in)), inmode))))
1083 #ifdef LIMIT_RELOAD_CLASS
1084 in_subreg_loc = inloc;
1085 #endif
1086 inloc = &SUBREG_REG (in);
1087 in = *inloc;
1089 if (!WORD_REGISTER_OPERATIONS
1090 && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1091 && MEM_P (in))
1092 /* This is supposed to happen only for paradoxical subregs made by
1093 combine.c. (SUBREG (MEM)) isn't supposed to occur other ways. */
1094 gcc_assert (GET_MODE_SIZE (GET_MODE (in)) <= GET_MODE_SIZE (inmode));
1096 inmode = GET_MODE (in);
1099 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1100 where M1 is not valid for R if it was not handled by the code above.
1102 Similar issue for (SUBREG constant ...) if it was not handled by the
1103 code above. This can happen if SUBREG_BYTE != 0.
1105 However, we must reload the inner reg *as well as* the subreg in
1106 that case. */
1108 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1110 if (REG_P (SUBREG_REG (in)))
1111 subreg_in_class
1112 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1113 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1114 GET_MODE (SUBREG_REG (in)),
1115 SUBREG_BYTE (in),
1116 GET_MODE (in)),
1117 REGNO (SUBREG_REG (in)));
1118 else if (CONSTANT_P (SUBREG_REG (in))
1119 || GET_CODE (SUBREG_REG (in)) == PLUS)
1120 subreg_in_class = find_valid_class_1 (inmode,
1121 GET_MODE (SUBREG_REG (in)),
1122 rclass);
1124 /* This relies on the fact that emit_reload_insns outputs the
1125 instructions for input reloads of type RELOAD_OTHER in the same
1126 order as the reloads. Thus if the outer reload is also of type
1127 RELOAD_OTHER, we are guaranteed that this inner reload will be
1128 output before the outer reload. */
1129 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1130 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1131 dont_remove_subreg = 1;
1134 /* Similarly for paradoxical and problematical SUBREGs on the output.
1135 Note that there is no reason we need worry about the previous value
1136 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1137 entitled to clobber it all (except in the case of a word mode subreg
1138 or of a STRICT_LOW_PART, in that latter case the constraint should
1139 label it input-output.) */
1140 if (out != 0 && GET_CODE (out) == SUBREG
1141 && (subreg_lowpart_p (out) || strict_low)
1142 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (out)),
1143 outmode, rclass)
1144 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1145 && (CONSTANT_P (SUBREG_REG (out))
1146 || strict_low
1147 || (((REG_P (SUBREG_REG (out))
1148 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1149 || MEM_P (SUBREG_REG (out)))
1150 && (paradoxical_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1151 || (WORD_REGISTER_OPERATIONS
1152 && partial_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1153 && ((GET_MODE_SIZE (outmode) - 1) / UNITS_PER_WORD ==
1154 ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1)
1155 / UNITS_PER_WORD)))))
1156 || (REG_P (SUBREG_REG (out))
1157 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1158 /* The case of a word mode subreg
1159 is handled differently in the following statement. */
1160 && ! (GET_MODE_SIZE (outmode) <= UNITS_PER_WORD
1161 && (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out)))
1162 > UNITS_PER_WORD))
1163 && !targetm.hard_regno_mode_ok (subreg_regno (out), outmode))
1164 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1165 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1166 SUBREG_REG (out))
1167 == NO_REGS))
1168 || (REG_P (SUBREG_REG (out))
1169 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1170 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1171 GET_MODE (SUBREG_REG (out)),
1172 outmode))))
1174 #ifdef LIMIT_RELOAD_CLASS
1175 out_subreg_loc = outloc;
1176 #endif
1177 outloc = &SUBREG_REG (out);
1178 out = *outloc;
1179 gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1180 || GET_MODE_SIZE (GET_MODE (out))
1181 <= GET_MODE_SIZE (outmode));
1182 outmode = GET_MODE (out);
1185 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1186 where either M1 is not valid for R or M2 is wider than a word but we
1187 only need one register to store an M2-sized quantity in R.
1189 However, we must reload the inner reg *as well as* the subreg in
1190 that case and the inner reg is an in-out reload. */
1192 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1194 enum reg_class in_out_class
1195 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1196 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1197 GET_MODE (SUBREG_REG (out)),
1198 SUBREG_BYTE (out),
1199 GET_MODE (out)),
1200 REGNO (SUBREG_REG (out)));
1202 /* This relies on the fact that emit_reload_insns outputs the
1203 instructions for output reloads of type RELOAD_OTHER in reverse
1204 order of the reloads. Thus if the outer reload is also of type
1205 RELOAD_OTHER, we are guaranteed that this inner reload will be
1206 output after the outer reload. */
1207 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1208 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1209 0, 0, opnum, RELOAD_OTHER);
1210 dont_remove_subreg = 1;
1213 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1214 if (in != 0 && out != 0 && MEM_P (out)
1215 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1216 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1217 dont_share = 1;
1219 /* If IN is a SUBREG of a hard register, make a new REG. This
1220 simplifies some of the cases below. */
1222 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1223 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1224 && ! dont_remove_subreg)
1225 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1227 /* Similarly for OUT. */
1228 if (out != 0 && GET_CODE (out) == SUBREG
1229 && REG_P (SUBREG_REG (out))
1230 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1231 && ! dont_remove_subreg)
1232 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1234 /* Narrow down the class of register wanted if that is
1235 desirable on this machine for efficiency. */
1237 reg_class_t preferred_class = rclass;
1239 if (in != 0)
1240 preferred_class = targetm.preferred_reload_class (in, rclass);
1242 /* Output reloads may need analogous treatment, different in detail. */
1243 if (out != 0)
1244 preferred_class
1245 = targetm.preferred_output_reload_class (out, preferred_class);
1247 /* Discard what the target said if we cannot do it. */
1248 if (preferred_class != NO_REGS
1249 || (optional && type == RELOAD_FOR_OUTPUT))
1250 rclass = (enum reg_class) preferred_class;
1253 /* Make sure we use a class that can handle the actual pseudo
1254 inside any subreg. For example, on the 386, QImode regs
1255 can appear within SImode subregs. Although GENERAL_REGS
1256 can handle SImode, QImode needs a smaller class. */
1257 #ifdef LIMIT_RELOAD_CLASS
1258 if (in_subreg_loc)
1259 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1260 else if (in != 0 && GET_CODE (in) == SUBREG)
1261 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1263 if (out_subreg_loc)
1264 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1265 if (out != 0 && GET_CODE (out) == SUBREG)
1266 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1267 #endif
1269 /* Verify that this class is at least possible for the mode that
1270 is specified. */
1271 if (this_insn_is_asm)
1273 machine_mode mode;
1274 if (paradoxical_subreg_p (inmode, outmode))
1275 mode = inmode;
1276 else
1277 mode = outmode;
1278 if (mode == VOIDmode)
1280 error_for_asm (this_insn, "cannot reload integer constant "
1281 "operand in %<asm%>");
1282 mode = word_mode;
1283 if (in != 0)
1284 inmode = word_mode;
1285 if (out != 0)
1286 outmode = word_mode;
1288 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1289 if (targetm.hard_regno_mode_ok (i, mode)
1290 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1291 break;
1292 if (i == FIRST_PSEUDO_REGISTER)
1294 error_for_asm (this_insn, "impossible register constraint "
1295 "in %<asm%>");
1296 /* Avoid further trouble with this insn. */
1297 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1298 /* We used to continue here setting class to ALL_REGS, but it triggers
1299 sanity check on i386 for:
1300 void foo(long double d)
1302 asm("" :: "a" (d));
1304 Returning zero here ought to be safe as we take care in
1305 find_reloads to not process the reloads when instruction was
1306 replaced by USE. */
1308 return 0;
1312 /* Optional output reloads are always OK even if we have no register class,
1313 since the function of these reloads is only to have spill_reg_store etc.
1314 set, so that the storing insn can be deleted later. */
1315 gcc_assert (rclass != NO_REGS
1316 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1318 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1320 if (i == n_reloads)
1322 /* See if we need a secondary reload register to move between CLASS
1323 and IN or CLASS and OUT. Get the icode and push any required reloads
1324 needed for each of them if so. */
1326 if (in != 0)
1327 secondary_in_reload
1328 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1329 &secondary_in_icode, NULL);
1330 if (out != 0 && GET_CODE (out) != SCRATCH)
1331 secondary_out_reload
1332 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1333 type, &secondary_out_icode, NULL);
1335 /* We found no existing reload suitable for re-use.
1336 So add an additional reload. */
1338 if (subreg_in_class == NO_REGS
1339 && in != 0
1340 && (REG_P (in)
1341 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1342 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1343 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1344 /* If a memory location is needed for the copy, make one. */
1345 if (subreg_in_class != NO_REGS
1346 && targetm.secondary_memory_needed (inmode, subreg_in_class, rclass))
1347 get_secondary_mem (in, inmode, opnum, type);
1349 i = n_reloads;
1350 rld[i].in = in;
1351 rld[i].out = out;
1352 rld[i].rclass = rclass;
1353 rld[i].inmode = inmode;
1354 rld[i].outmode = outmode;
1355 rld[i].reg_rtx = 0;
1356 rld[i].optional = optional;
1357 rld[i].inc = 0;
1358 rld[i].nocombine = 0;
1359 rld[i].in_reg = inloc ? *inloc : 0;
1360 rld[i].out_reg = outloc ? *outloc : 0;
1361 rld[i].opnum = opnum;
1362 rld[i].when_needed = type;
1363 rld[i].secondary_in_reload = secondary_in_reload;
1364 rld[i].secondary_out_reload = secondary_out_reload;
1365 rld[i].secondary_in_icode = secondary_in_icode;
1366 rld[i].secondary_out_icode = secondary_out_icode;
1367 rld[i].secondary_p = 0;
1369 n_reloads++;
1371 if (out != 0
1372 && (REG_P (out)
1373 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1374 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1375 && (targetm.secondary_memory_needed
1376 (outmode, rclass, REGNO_REG_CLASS (reg_or_subregno (out)))))
1377 get_secondary_mem (out, outmode, opnum, type);
1379 else
1381 /* We are reusing an existing reload,
1382 but we may have additional information for it.
1383 For example, we may now have both IN and OUT
1384 while the old one may have just one of them. */
1386 /* The modes can be different. If they are, we want to reload in
1387 the larger mode, so that the value is valid for both modes. */
1388 if (inmode != VOIDmode
1389 && partial_subreg_p (rld[i].inmode, inmode))
1390 rld[i].inmode = inmode;
1391 if (outmode != VOIDmode
1392 && partial_subreg_p (rld[i].outmode, outmode))
1393 rld[i].outmode = outmode;
1394 if (in != 0)
1396 rtx in_reg = inloc ? *inloc : 0;
1397 /* If we merge reloads for two distinct rtl expressions that
1398 are identical in content, there might be duplicate address
1399 reloads. Remove the extra set now, so that if we later find
1400 that we can inherit this reload, we can get rid of the
1401 address reloads altogether.
1403 Do not do this if both reloads are optional since the result
1404 would be an optional reload which could potentially leave
1405 unresolved address replacements.
1407 It is not sufficient to call transfer_replacements since
1408 choose_reload_regs will remove the replacements for address
1409 reloads of inherited reloads which results in the same
1410 problem. */
1411 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1412 && ! (rld[i].optional && optional))
1414 /* We must keep the address reload with the lower operand
1415 number alive. */
1416 if (opnum > rld[i].opnum)
1418 remove_address_replacements (in);
1419 in = rld[i].in;
1420 in_reg = rld[i].in_reg;
1422 else
1423 remove_address_replacements (rld[i].in);
1425 /* When emitting reloads we don't necessarily look at the in-
1426 and outmode, but also directly at the operands (in and out).
1427 So we can't simply overwrite them with whatever we have found
1428 for this (to-be-merged) reload, we have to "merge" that too.
1429 Reusing another reload already verified that we deal with the
1430 same operands, just possibly in different modes. So we
1431 overwrite the operands only when the new mode is larger.
1432 See also PR33613. */
1433 if (!rld[i].in
1434 || partial_subreg_p (GET_MODE (rld[i].in), GET_MODE (in)))
1435 rld[i].in = in;
1436 if (!rld[i].in_reg
1437 || (in_reg
1438 && partial_subreg_p (GET_MODE (rld[i].in_reg),
1439 GET_MODE (in_reg))))
1440 rld[i].in_reg = in_reg;
1442 if (out != 0)
1444 if (!rld[i].out
1445 || (out
1446 && partial_subreg_p (GET_MODE (rld[i].out),
1447 GET_MODE (out))))
1448 rld[i].out = out;
1449 if (outloc
1450 && (!rld[i].out_reg
1451 || partial_subreg_p (GET_MODE (rld[i].out_reg),
1452 GET_MODE (*outloc))))
1453 rld[i].out_reg = *outloc;
1455 if (reg_class_subset_p (rclass, rld[i].rclass))
1456 rld[i].rclass = rclass;
1457 rld[i].optional &= optional;
1458 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1459 opnum, rld[i].opnum))
1460 rld[i].when_needed = RELOAD_OTHER;
1461 rld[i].opnum = MIN (rld[i].opnum, opnum);
1464 /* If the ostensible rtx being reloaded differs from the rtx found
1465 in the location to substitute, this reload is not safe to combine
1466 because we cannot reliably tell whether it appears in the insn. */
1468 if (in != 0 && in != *inloc)
1469 rld[i].nocombine = 1;
1471 #if 0
1472 /* This was replaced by changes in find_reloads_address_1 and the new
1473 function inc_for_reload, which go with a new meaning of reload_inc. */
1475 /* If this is an IN/OUT reload in an insn that sets the CC,
1476 it must be for an autoincrement. It doesn't work to store
1477 the incremented value after the insn because that would clobber the CC.
1478 So we must do the increment of the value reloaded from,
1479 increment it, store it back, then decrement again. */
1480 if (out != 0 && sets_cc0_p (PATTERN (this_insn)))
1482 out = 0;
1483 rld[i].out = 0;
1484 rld[i].inc = find_inc_amount (PATTERN (this_insn), in);
1485 /* If we did not find a nonzero amount-to-increment-by,
1486 that contradicts the belief that IN is being incremented
1487 in an address in this insn. */
1488 gcc_assert (rld[i].inc != 0);
1490 #endif
1492 /* If we will replace IN and OUT with the reload-reg,
1493 record where they are located so that substitution need
1494 not do a tree walk. */
1496 if (replace_reloads)
1498 if (inloc != 0)
1500 struct replacement *r = &replacements[n_replacements++];
1501 r->what = i;
1502 r->where = inloc;
1503 r->mode = inmode;
1505 if (outloc != 0 && outloc != inloc)
1507 struct replacement *r = &replacements[n_replacements++];
1508 r->what = i;
1509 r->where = outloc;
1510 r->mode = outmode;
1514 /* If this reload is just being introduced and it has both
1515 an incoming quantity and an outgoing quantity that are
1516 supposed to be made to match, see if either one of the two
1517 can serve as the place to reload into.
1519 If one of them is acceptable, set rld[i].reg_rtx
1520 to that one. */
1522 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1524 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1525 inmode, outmode,
1526 rld[i].rclass, i,
1527 earlyclobber_operand_p (out));
1529 /* If the outgoing register already contains the same value
1530 as the incoming one, we can dispense with loading it.
1531 The easiest way to tell the caller that is to give a phony
1532 value for the incoming operand (same as outgoing one). */
1533 if (rld[i].reg_rtx == out
1534 && (REG_P (in) || CONSTANT_P (in))
1535 && 0 != find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1536 static_reload_reg_p, i, inmode))
1537 rld[i].in = out;
1540 /* If this is an input reload and the operand contains a register that
1541 dies in this insn and is used nowhere else, see if it is the right class
1542 to be used for this reload. Use it if so. (This occurs most commonly
1543 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1544 this if it is also an output reload that mentions the register unless
1545 the output is a SUBREG that clobbers an entire register.
1547 Note that the operand might be one of the spill regs, if it is a
1548 pseudo reg and we are in a block where spilling has not taken place.
1549 But if there is no spilling in this block, that is OK.
1550 An explicitly used hard reg cannot be a spill reg. */
1552 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1554 rtx note;
1555 int regno;
1556 machine_mode rel_mode = inmode;
1558 if (out && partial_subreg_p (rel_mode, outmode))
1559 rel_mode = outmode;
1561 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1562 if (REG_NOTE_KIND (note) == REG_DEAD
1563 && REG_P (XEXP (note, 0))
1564 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1565 && reg_mentioned_p (XEXP (note, 0), in)
1566 /* Check that a former pseudo is valid; see find_dummy_reload. */
1567 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1568 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1569 ORIGINAL_REGNO (XEXP (note, 0)))
1570 && REG_NREGS (XEXP (note, 0)) == 1))
1571 && ! refers_to_regno_for_reload_p (regno,
1572 end_hard_regno (rel_mode,
1573 regno),
1574 PATTERN (this_insn), inloc)
1575 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1576 /* If this is also an output reload, IN cannot be used as
1577 the reload register if it is set in this insn unless IN
1578 is also OUT. */
1579 && (out == 0 || in == out
1580 || ! hard_reg_set_here_p (regno,
1581 end_hard_regno (rel_mode, regno),
1582 PATTERN (this_insn)))
1583 /* ??? Why is this code so different from the previous?
1584 Is there any simple coherent way to describe the two together?
1585 What's going on here. */
1586 && (in != out
1587 || (GET_CODE (in) == SUBREG
1588 && (((GET_MODE_SIZE (GET_MODE (in)) + (UNITS_PER_WORD - 1))
1589 / UNITS_PER_WORD)
1590 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (in)))
1591 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))))
1592 /* Make sure the operand fits in the reg that dies. */
1593 && (GET_MODE_SIZE (rel_mode)
1594 <= GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1595 && targetm.hard_regno_mode_ok (regno, inmode)
1596 && targetm.hard_regno_mode_ok (regno, outmode))
1598 unsigned int offs;
1599 unsigned int nregs = MAX (hard_regno_nregs (regno, inmode),
1600 hard_regno_nregs (regno, outmode));
1602 for (offs = 0; offs < nregs; offs++)
1603 if (fixed_regs[regno + offs]
1604 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1605 regno + offs))
1606 break;
1608 if (offs == nregs
1609 && (! (refers_to_regno_for_reload_p
1610 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1611 || can_reload_into (in, regno, inmode)))
1613 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1614 break;
1619 if (out)
1620 output_reloadnum = i;
1622 return i;
1625 /* Record an additional place we must replace a value
1626 for which we have already recorded a reload.
1627 RELOADNUM is the value returned by push_reload
1628 when the reload was recorded.
1629 This is used in insn patterns that use match_dup. */
1631 static void
1632 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1634 if (replace_reloads)
1636 struct replacement *r = &replacements[n_replacements++];
1637 r->what = reloadnum;
1638 r->where = loc;
1639 r->mode = mode;
1643 /* Duplicate any replacement we have recorded to apply at
1644 location ORIG_LOC to also be performed at DUP_LOC.
1645 This is used in insn patterns that use match_dup. */
1647 static void
1648 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1650 int i, n = n_replacements;
1652 for (i = 0; i < n; i++)
1654 struct replacement *r = &replacements[i];
1655 if (r->where == orig_loc)
1656 push_replacement (dup_loc, r->what, r->mode);
1660 /* Transfer all replacements that used to be in reload FROM to be in
1661 reload TO. */
1663 void
1664 transfer_replacements (int to, int from)
1666 int i;
1668 for (i = 0; i < n_replacements; i++)
1669 if (replacements[i].what == from)
1670 replacements[i].what = to;
1673 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1674 or a subpart of it. If we have any replacements registered for IN_RTX,
1675 cancel the reloads that were supposed to load them.
1676 Return nonzero if we canceled any reloads. */
1678 remove_address_replacements (rtx in_rtx)
1680 int i, j;
1681 char reload_flags[MAX_RELOADS];
1682 int something_changed = 0;
1684 memset (reload_flags, 0, sizeof reload_flags);
1685 for (i = 0, j = 0; i < n_replacements; i++)
1687 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1688 reload_flags[replacements[i].what] |= 1;
1689 else
1691 replacements[j++] = replacements[i];
1692 reload_flags[replacements[i].what] |= 2;
1695 /* Note that the following store must be done before the recursive calls. */
1696 n_replacements = j;
1698 for (i = n_reloads - 1; i >= 0; i--)
1700 if (reload_flags[i] == 1)
1702 deallocate_reload_reg (i);
1703 remove_address_replacements (rld[i].in);
1704 rld[i].in = 0;
1705 something_changed = 1;
1708 return something_changed;
1711 /* If there is only one output reload, and it is not for an earlyclobber
1712 operand, try to combine it with a (logically unrelated) input reload
1713 to reduce the number of reload registers needed.
1715 This is safe if the input reload does not appear in
1716 the value being output-reloaded, because this implies
1717 it is not needed any more once the original insn completes.
1719 If that doesn't work, see we can use any of the registers that
1720 die in this insn as a reload register. We can if it is of the right
1721 class and does not appear in the value being output-reloaded. */
1723 static void
1724 combine_reloads (void)
1726 int i, regno;
1727 int output_reload = -1;
1728 int secondary_out = -1;
1729 rtx note;
1731 /* Find the output reload; return unless there is exactly one
1732 and that one is mandatory. */
1734 for (i = 0; i < n_reloads; i++)
1735 if (rld[i].out != 0)
1737 if (output_reload >= 0)
1738 return;
1739 output_reload = i;
1742 if (output_reload < 0 || rld[output_reload].optional)
1743 return;
1745 /* An input-output reload isn't combinable. */
1747 if (rld[output_reload].in != 0)
1748 return;
1750 /* If this reload is for an earlyclobber operand, we can't do anything. */
1751 if (earlyclobber_operand_p (rld[output_reload].out))
1752 return;
1754 /* If there is a reload for part of the address of this operand, we would
1755 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1756 its life to the point where doing this combine would not lower the
1757 number of spill registers needed. */
1758 for (i = 0; i < n_reloads; i++)
1759 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1760 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1761 && rld[i].opnum == rld[output_reload].opnum)
1762 return;
1764 /* Check each input reload; can we combine it? */
1766 for (i = 0; i < n_reloads; i++)
1767 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1768 /* Life span of this reload must not extend past main insn. */
1769 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1770 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1771 && rld[i].when_needed != RELOAD_OTHER
1772 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1773 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1774 [(int) rld[output_reload].outmode])
1775 && rld[i].inc == 0
1776 && rld[i].reg_rtx == 0
1777 /* Don't combine two reloads with different secondary
1778 memory locations. */
1779 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1780 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1781 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1782 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1783 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1784 ? (rld[i].rclass == rld[output_reload].rclass)
1785 : (reg_class_subset_p (rld[i].rclass,
1786 rld[output_reload].rclass)
1787 || reg_class_subset_p (rld[output_reload].rclass,
1788 rld[i].rclass)))
1789 && (MATCHES (rld[i].in, rld[output_reload].out)
1790 /* Args reversed because the first arg seems to be
1791 the one that we imagine being modified
1792 while the second is the one that might be affected. */
1793 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1794 rld[i].in)
1795 /* However, if the input is a register that appears inside
1796 the output, then we also can't share.
1797 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1798 If the same reload reg is used for both reg 69 and the
1799 result to be stored in memory, then that result
1800 will clobber the address of the memory ref. */
1801 && ! (REG_P (rld[i].in)
1802 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1803 rld[output_reload].out))))
1804 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1805 rld[i].when_needed != RELOAD_FOR_INPUT)
1806 && (reg_class_size[(int) rld[i].rclass]
1807 || targetm.small_register_classes_for_mode_p (VOIDmode))
1808 /* We will allow making things slightly worse by combining an
1809 input and an output, but no worse than that. */
1810 && (rld[i].when_needed == RELOAD_FOR_INPUT
1811 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1813 int j;
1815 /* We have found a reload to combine with! */
1816 rld[i].out = rld[output_reload].out;
1817 rld[i].out_reg = rld[output_reload].out_reg;
1818 rld[i].outmode = rld[output_reload].outmode;
1819 /* Mark the old output reload as inoperative. */
1820 rld[output_reload].out = 0;
1821 /* The combined reload is needed for the entire insn. */
1822 rld[i].when_needed = RELOAD_OTHER;
1823 /* If the output reload had a secondary reload, copy it. */
1824 if (rld[output_reload].secondary_out_reload != -1)
1826 rld[i].secondary_out_reload
1827 = rld[output_reload].secondary_out_reload;
1828 rld[i].secondary_out_icode
1829 = rld[output_reload].secondary_out_icode;
1832 /* Copy any secondary MEM. */
1833 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1834 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1835 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1836 /* If required, minimize the register class. */
1837 if (reg_class_subset_p (rld[output_reload].rclass,
1838 rld[i].rclass))
1839 rld[i].rclass = rld[output_reload].rclass;
1841 /* Transfer all replacements from the old reload to the combined. */
1842 for (j = 0; j < n_replacements; j++)
1843 if (replacements[j].what == output_reload)
1844 replacements[j].what = i;
1846 return;
1849 /* If this insn has only one operand that is modified or written (assumed
1850 to be the first), it must be the one corresponding to this reload. It
1851 is safe to use anything that dies in this insn for that output provided
1852 that it does not occur in the output (we already know it isn't an
1853 earlyclobber. If this is an asm insn, give up. */
1855 if (INSN_CODE (this_insn) == -1)
1856 return;
1858 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1859 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1860 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1861 return;
1863 /* See if some hard register that dies in this insn and is not used in
1864 the output is the right class. Only works if the register we pick
1865 up can fully hold our output reload. */
1866 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1867 if (REG_NOTE_KIND (note) == REG_DEAD
1868 && REG_P (XEXP (note, 0))
1869 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1870 rld[output_reload].out)
1871 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1872 && targetm.hard_regno_mode_ok (regno, rld[output_reload].outmode)
1873 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1874 regno)
1875 && (hard_regno_nregs (regno, rld[output_reload].outmode)
1876 <= REG_NREGS (XEXP (note, 0)))
1877 /* Ensure that a secondary or tertiary reload for this output
1878 won't want this register. */
1879 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1880 || (!(TEST_HARD_REG_BIT
1881 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1882 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1883 || !(TEST_HARD_REG_BIT
1884 (reg_class_contents[(int) rld[secondary_out].rclass],
1885 regno)))))
1886 && !fixed_regs[regno]
1887 /* Check that a former pseudo is valid; see find_dummy_reload. */
1888 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1889 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1890 ORIGINAL_REGNO (XEXP (note, 0)))
1891 && REG_NREGS (XEXP (note, 0)) == 1)))
1893 rld[output_reload].reg_rtx
1894 = gen_rtx_REG (rld[output_reload].outmode, regno);
1895 return;
1899 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1900 See if one of IN and OUT is a register that may be used;
1901 this is desirable since a spill-register won't be needed.
1902 If so, return the register rtx that proves acceptable.
1904 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1905 RCLASS is the register class required for the reload.
1907 If FOR_REAL is >= 0, it is the number of the reload,
1908 and in some cases when it can be discovered that OUT doesn't need
1909 to be computed, clear out rld[FOR_REAL].out.
1911 If FOR_REAL is -1, this should not be done, because this call
1912 is just to see if a register can be found, not to find and install it.
1914 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1915 puts an additional constraint on being able to use IN for OUT since
1916 IN must not appear elsewhere in the insn (it is assumed that IN itself
1917 is safe from the earlyclobber). */
1919 static rtx
1920 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1921 machine_mode inmode, machine_mode outmode,
1922 reg_class_t rclass, int for_real, int earlyclobber)
1924 rtx in = real_in;
1925 rtx out = real_out;
1926 int in_offset = 0;
1927 int out_offset = 0;
1928 rtx value = 0;
1930 /* If operands exceed a word, we can't use either of them
1931 unless they have the same size. */
1932 if (GET_MODE_SIZE (outmode) != GET_MODE_SIZE (inmode)
1933 && (GET_MODE_SIZE (outmode) > UNITS_PER_WORD
1934 || GET_MODE_SIZE (inmode) > UNITS_PER_WORD))
1935 return 0;
1937 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1938 respectively refers to a hard register. */
1940 /* Find the inside of any subregs. */
1941 while (GET_CODE (out) == SUBREG)
1943 if (REG_P (SUBREG_REG (out))
1944 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1945 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1946 GET_MODE (SUBREG_REG (out)),
1947 SUBREG_BYTE (out),
1948 GET_MODE (out));
1949 out = SUBREG_REG (out);
1951 while (GET_CODE (in) == SUBREG)
1953 if (REG_P (SUBREG_REG (in))
1954 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1955 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1956 GET_MODE (SUBREG_REG (in)),
1957 SUBREG_BYTE (in),
1958 GET_MODE (in));
1959 in = SUBREG_REG (in);
1962 /* Narrow down the reg class, the same way push_reload will;
1963 otherwise we might find a dummy now, but push_reload won't. */
1965 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1966 if (preferred_class != NO_REGS)
1967 rclass = (enum reg_class) preferred_class;
1970 /* See if OUT will do. */
1971 if (REG_P (out)
1972 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1974 unsigned int regno = REGNO (out) + out_offset;
1975 unsigned int nwords = hard_regno_nregs (regno, outmode);
1976 rtx saved_rtx;
1978 /* When we consider whether the insn uses OUT,
1979 ignore references within IN. They don't prevent us
1980 from copying IN into OUT, because those refs would
1981 move into the insn that reloads IN.
1983 However, we only ignore IN in its role as this reload.
1984 If the insn uses IN elsewhere and it contains OUT,
1985 that counts. We can't be sure it's the "same" operand
1986 so it might not go through this reload.
1988 We also need to avoid using OUT if it, or part of it, is a
1989 fixed register. Modifying such registers, even transiently,
1990 may have undefined effects on the machine, such as modifying
1991 the stack pointer. */
1992 saved_rtx = *inloc;
1993 *inloc = const0_rtx;
1995 if (regno < FIRST_PSEUDO_REGISTER
1996 && targetm.hard_regno_mode_ok (regno, outmode)
1997 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
1998 PATTERN (this_insn), outloc))
2000 unsigned int i;
2002 for (i = 0; i < nwords; i++)
2003 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2004 regno + i)
2005 || fixed_regs[regno + i])
2006 break;
2008 if (i == nwords)
2010 if (REG_P (real_out))
2011 value = real_out;
2012 else
2013 value = gen_rtx_REG (outmode, regno);
2017 *inloc = saved_rtx;
2020 /* Consider using IN if OUT was not acceptable
2021 or if OUT dies in this insn (like the quotient in a divmod insn).
2022 We can't use IN unless it is dies in this insn,
2023 which means we must know accurately which hard regs are live.
2024 Also, the result can't go in IN if IN is used within OUT,
2025 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2026 if (hard_regs_live_known
2027 && REG_P (in)
2028 && REGNO (in) < FIRST_PSEUDO_REGISTER
2029 && (value == 0
2030 || find_reg_note (this_insn, REG_UNUSED, real_out))
2031 && find_reg_note (this_insn, REG_DEAD, real_in)
2032 && !fixed_regs[REGNO (in)]
2033 && targetm.hard_regno_mode_ok (REGNO (in),
2034 /* The only case where out and real_out
2035 might have different modes is where
2036 real_out is a subreg, and in that
2037 case, out has a real mode. */
2038 (GET_MODE (out) != VOIDmode
2039 ? GET_MODE (out) : outmode))
2040 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2041 /* However only do this if we can be sure that this input
2042 operand doesn't correspond with an uninitialized pseudo.
2043 global can assign some hardreg to it that is the same as
2044 the one assigned to a different, also live pseudo (as it
2045 can ignore the conflict). We must never introduce writes
2046 to such hardregs, as they would clobber the other live
2047 pseudo. See PR 20973. */
2048 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2049 ORIGINAL_REGNO (in))
2050 /* Similarly, only do this if we can be sure that the death
2051 note is still valid. global can assign some hardreg to
2052 the pseudo referenced in the note and simultaneously a
2053 subword of this hardreg to a different, also live pseudo,
2054 because only another subword of the hardreg is actually
2055 used in the insn. This cannot happen if the pseudo has
2056 been assigned exactly one hardreg. See PR 33732. */
2057 && REG_NREGS (in) == 1)))
2059 unsigned int regno = REGNO (in) + in_offset;
2060 unsigned int nwords = hard_regno_nregs (regno, inmode);
2062 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2063 && ! hard_reg_set_here_p (regno, regno + nwords,
2064 PATTERN (this_insn))
2065 && (! earlyclobber
2066 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2067 PATTERN (this_insn), inloc)))
2069 unsigned int i;
2071 for (i = 0; i < nwords; i++)
2072 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2073 regno + i))
2074 break;
2076 if (i == nwords)
2078 /* If we were going to use OUT as the reload reg
2079 and changed our mind, it means OUT is a dummy that
2080 dies here. So don't bother copying value to it. */
2081 if (for_real >= 0 && value == real_out)
2082 rld[for_real].out = 0;
2083 if (REG_P (real_in))
2084 value = real_in;
2085 else
2086 value = gen_rtx_REG (inmode, regno);
2091 return value;
2094 /* This page contains subroutines used mainly for determining
2095 whether the IN or an OUT of a reload can serve as the
2096 reload register. */
2098 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2101 earlyclobber_operand_p (rtx x)
2103 int i;
2105 for (i = 0; i < n_earlyclobbers; i++)
2106 if (reload_earlyclobbers[i] == x)
2107 return 1;
2109 return 0;
2112 /* Return 1 if expression X alters a hard reg in the range
2113 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2114 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2115 X should be the body of an instruction. */
2117 static int
2118 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2120 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2122 rtx op0 = SET_DEST (x);
2124 while (GET_CODE (op0) == SUBREG)
2125 op0 = SUBREG_REG (op0);
2126 if (REG_P (op0))
2128 unsigned int r = REGNO (op0);
2130 /* See if this reg overlaps range under consideration. */
2131 if (r < end_regno
2132 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2133 return 1;
2136 else if (GET_CODE (x) == PARALLEL)
2138 int i = XVECLEN (x, 0) - 1;
2140 for (; i >= 0; i--)
2141 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2142 return 1;
2145 return 0;
2148 /* Return 1 if ADDR is a valid memory address for mode MODE
2149 in address space AS, and check that each pseudo reg has the
2150 proper kind of hard reg. */
2153 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2154 rtx addr, addr_space_t as)
2156 #ifdef GO_IF_LEGITIMATE_ADDRESS
2157 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2158 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2159 return 0;
2161 win:
2162 return 1;
2163 #else
2164 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as);
2165 #endif
2168 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2169 if they are the same hard reg, and has special hacks for
2170 autoincrement and autodecrement.
2171 This is specifically intended for find_reloads to use
2172 in determining whether two operands match.
2173 X is the operand whose number is the lower of the two.
2175 The value is 2 if Y contains a pre-increment that matches
2176 a non-incrementing address in X. */
2178 /* ??? To be completely correct, we should arrange to pass
2179 for X the output operand and for Y the input operand.
2180 For now, we assume that the output operand has the lower number
2181 because that is natural in (SET output (... input ...)). */
2184 operands_match_p (rtx x, rtx y)
2186 int i;
2187 RTX_CODE code = GET_CODE (x);
2188 const char *fmt;
2189 int success_2;
2191 if (x == y)
2192 return 1;
2193 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2194 && (REG_P (y) || (GET_CODE (y) == SUBREG
2195 && REG_P (SUBREG_REG (y)))))
2197 int j;
2199 if (code == SUBREG)
2201 i = REGNO (SUBREG_REG (x));
2202 if (i >= FIRST_PSEUDO_REGISTER)
2203 goto slow;
2204 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2205 GET_MODE (SUBREG_REG (x)),
2206 SUBREG_BYTE (x),
2207 GET_MODE (x));
2209 else
2210 i = REGNO (x);
2212 if (GET_CODE (y) == SUBREG)
2214 j = REGNO (SUBREG_REG (y));
2215 if (j >= FIRST_PSEUDO_REGISTER)
2216 goto slow;
2217 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2218 GET_MODE (SUBREG_REG (y)),
2219 SUBREG_BYTE (y),
2220 GET_MODE (y));
2222 else
2223 j = REGNO (y);
2225 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2226 multiple hard register group of scalar integer registers, so that
2227 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2228 register. */
2229 scalar_int_mode xmode;
2230 if (REG_WORDS_BIG_ENDIAN
2231 && is_a <scalar_int_mode> (GET_MODE (x), &xmode)
2232 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD
2233 && i < FIRST_PSEUDO_REGISTER)
2234 i += hard_regno_nregs (i, xmode) - 1;
2235 scalar_int_mode ymode;
2236 if (REG_WORDS_BIG_ENDIAN
2237 && is_a <scalar_int_mode> (GET_MODE (y), &ymode)
2238 && GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2239 && j < FIRST_PSEUDO_REGISTER)
2240 j += hard_regno_nregs (j, ymode) - 1;
2242 return i == j;
2244 /* If two operands must match, because they are really a single
2245 operand of an assembler insn, then two postincrements are invalid
2246 because the assembler insn would increment only once.
2247 On the other hand, a postincrement matches ordinary indexing
2248 if the postincrement is the output operand. */
2249 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2250 return operands_match_p (XEXP (x, 0), y);
2251 /* Two preincrements are invalid
2252 because the assembler insn would increment only once.
2253 On the other hand, a preincrement matches ordinary indexing
2254 if the preincrement is the input operand.
2255 In this case, return 2, since some callers need to do special
2256 things when this happens. */
2257 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2258 || GET_CODE (y) == PRE_MODIFY)
2259 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2261 slow:
2263 /* Now we have disposed of all the cases in which different rtx codes
2264 can match. */
2265 if (code != GET_CODE (y))
2266 return 0;
2268 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2269 if (GET_MODE (x) != GET_MODE (y))
2270 return 0;
2272 /* MEMs referring to different address space are not equivalent. */
2273 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2274 return 0;
2276 switch (code)
2278 CASE_CONST_UNIQUE:
2279 return 0;
2281 case LABEL_REF:
2282 return label_ref_label (x) == label_ref_label (y);
2283 case SYMBOL_REF:
2284 return XSTR (x, 0) == XSTR (y, 0);
2286 default:
2287 break;
2290 /* Compare the elements. If any pair of corresponding elements
2291 fail to match, return 0 for the whole things. */
2293 success_2 = 0;
2294 fmt = GET_RTX_FORMAT (code);
2295 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2297 int val, j;
2298 switch (fmt[i])
2300 case 'w':
2301 if (XWINT (x, i) != XWINT (y, i))
2302 return 0;
2303 break;
2305 case 'i':
2306 if (XINT (x, i) != XINT (y, i))
2307 return 0;
2308 break;
2310 case 'e':
2311 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2312 if (val == 0)
2313 return 0;
2314 /* If any subexpression returns 2,
2315 we should return 2 if we are successful. */
2316 if (val == 2)
2317 success_2 = 1;
2318 break;
2320 case '0':
2321 break;
2323 case 'E':
2324 if (XVECLEN (x, i) != XVECLEN (y, i))
2325 return 0;
2326 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2328 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2329 if (val == 0)
2330 return 0;
2331 if (val == 2)
2332 success_2 = 1;
2334 break;
2336 /* It is believed that rtx's at this level will never
2337 contain anything but integers and other rtx's,
2338 except for within LABEL_REFs and SYMBOL_REFs. */
2339 default:
2340 gcc_unreachable ();
2343 return 1 + success_2;
2346 /* Describe the range of registers or memory referenced by X.
2347 If X is a register, set REG_FLAG and put the first register
2348 number into START and the last plus one into END.
2349 If X is a memory reference, put a base address into BASE
2350 and a range of integer offsets into START and END.
2351 If X is pushing on the stack, we can assume it causes no trouble,
2352 so we set the SAFE field. */
2354 static struct decomposition
2355 decompose (rtx x)
2357 struct decomposition val;
2358 int all_const = 0;
2360 memset (&val, 0, sizeof (val));
2362 switch (GET_CODE (x))
2364 case MEM:
2366 rtx base = NULL_RTX, offset = 0;
2367 rtx addr = XEXP (x, 0);
2369 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2370 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2372 val.base = XEXP (addr, 0);
2373 val.start = -GET_MODE_SIZE (GET_MODE (x));
2374 val.end = GET_MODE_SIZE (GET_MODE (x));
2375 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2376 return val;
2379 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2381 if (GET_CODE (XEXP (addr, 1)) == PLUS
2382 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2383 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2385 val.base = XEXP (addr, 0);
2386 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2387 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2388 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2389 return val;
2393 if (GET_CODE (addr) == CONST)
2395 addr = XEXP (addr, 0);
2396 all_const = 1;
2398 if (GET_CODE (addr) == PLUS)
2400 if (CONSTANT_P (XEXP (addr, 0)))
2402 base = XEXP (addr, 1);
2403 offset = XEXP (addr, 0);
2405 else if (CONSTANT_P (XEXP (addr, 1)))
2407 base = XEXP (addr, 0);
2408 offset = XEXP (addr, 1);
2412 if (offset == 0)
2414 base = addr;
2415 offset = const0_rtx;
2417 if (GET_CODE (offset) == CONST)
2418 offset = XEXP (offset, 0);
2419 if (GET_CODE (offset) == PLUS)
2421 if (CONST_INT_P (XEXP (offset, 0)))
2423 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2424 offset = XEXP (offset, 0);
2426 else if (CONST_INT_P (XEXP (offset, 1)))
2428 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2429 offset = XEXP (offset, 1);
2431 else
2433 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2434 offset = const0_rtx;
2437 else if (!CONST_INT_P (offset))
2439 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2440 offset = const0_rtx;
2443 if (all_const && GET_CODE (base) == PLUS)
2444 base = gen_rtx_CONST (GET_MODE (base), base);
2446 gcc_assert (CONST_INT_P (offset));
2448 val.start = INTVAL (offset);
2449 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2450 val.base = base;
2452 break;
2454 case REG:
2455 val.reg_flag = 1;
2456 val.start = true_regnum (x);
2457 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2459 /* A pseudo with no hard reg. */
2460 val.start = REGNO (x);
2461 val.end = val.start + 1;
2463 else
2464 /* A hard reg. */
2465 val.end = end_hard_regno (GET_MODE (x), val.start);
2466 break;
2468 case SUBREG:
2469 if (!REG_P (SUBREG_REG (x)))
2470 /* This could be more precise, but it's good enough. */
2471 return decompose (SUBREG_REG (x));
2472 val.reg_flag = 1;
2473 val.start = true_regnum (x);
2474 if (val.start < 0 || val.start >= FIRST_PSEUDO_REGISTER)
2475 return decompose (SUBREG_REG (x));
2476 else
2477 /* A hard reg. */
2478 val.end = val.start + subreg_nregs (x);
2479 break;
2481 case SCRATCH:
2482 /* This hasn't been assigned yet, so it can't conflict yet. */
2483 val.safe = 1;
2484 break;
2486 default:
2487 gcc_assert (CONSTANT_P (x));
2488 val.safe = 1;
2489 break;
2491 return val;
2494 /* Return 1 if altering Y will not modify the value of X.
2495 Y is also described by YDATA, which should be decompose (Y). */
2497 static int
2498 immune_p (rtx x, rtx y, struct decomposition ydata)
2500 struct decomposition xdata;
2502 if (ydata.reg_flag)
2503 return !refers_to_regno_for_reload_p (ydata.start, ydata.end, x, (rtx*) 0);
2504 if (ydata.safe)
2505 return 1;
2507 gcc_assert (MEM_P (y));
2508 /* If Y is memory and X is not, Y can't affect X. */
2509 if (!MEM_P (x))
2510 return 1;
2512 xdata = decompose (x);
2514 if (! rtx_equal_p (xdata.base, ydata.base))
2516 /* If bases are distinct symbolic constants, there is no overlap. */
2517 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2518 return 1;
2519 /* Constants and stack slots never overlap. */
2520 if (CONSTANT_P (xdata.base)
2521 && (ydata.base == frame_pointer_rtx
2522 || ydata.base == hard_frame_pointer_rtx
2523 || ydata.base == stack_pointer_rtx))
2524 return 1;
2525 if (CONSTANT_P (ydata.base)
2526 && (xdata.base == frame_pointer_rtx
2527 || xdata.base == hard_frame_pointer_rtx
2528 || xdata.base == stack_pointer_rtx))
2529 return 1;
2530 /* If either base is variable, we don't know anything. */
2531 return 0;
2534 return (xdata.start >= ydata.end || ydata.start >= xdata.end);
2537 /* Similar, but calls decompose. */
2540 safe_from_earlyclobber (rtx op, rtx clobber)
2542 struct decomposition early_data;
2544 early_data = decompose (clobber);
2545 return immune_p (op, clobber, early_data);
2548 /* Main entry point of this file: search the body of INSN
2549 for values that need reloading and record them with push_reload.
2550 REPLACE nonzero means record also where the values occur
2551 so that subst_reloads can be used.
2553 IND_LEVELS says how many levels of indirection are supported by this
2554 machine; a value of zero means that a memory reference is not a valid
2555 memory address.
2557 LIVE_KNOWN says we have valid information about which hard
2558 regs are live at each point in the program; this is true when
2559 we are called from global_alloc but false when stupid register
2560 allocation has been done.
2562 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2563 which is nonnegative if the reg has been commandeered for reloading into.
2564 It is copied into STATIC_RELOAD_REG_P and referenced from there
2565 by various subroutines.
2567 Return TRUE if some operands need to be changed, because of swapping
2568 commutative operands, reg_equiv_address substitution, or whatever. */
2571 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2572 short *reload_reg_p)
2574 int insn_code_number;
2575 int i, j;
2576 int noperands;
2577 /* These start out as the constraints for the insn
2578 and they are chewed up as we consider alternatives. */
2579 const char *constraints[MAX_RECOG_OPERANDS];
2580 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2581 a register. */
2582 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2583 char pref_or_nothing[MAX_RECOG_OPERANDS];
2584 /* Nonzero for a MEM operand whose entire address needs a reload.
2585 May be -1 to indicate the entire address may or may not need a reload. */
2586 int address_reloaded[MAX_RECOG_OPERANDS];
2587 /* Nonzero for an address operand that needs to be completely reloaded.
2588 May be -1 to indicate the entire operand may or may not need a reload. */
2589 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2590 /* Value of enum reload_type to use for operand. */
2591 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2592 /* Value of enum reload_type to use within address of operand. */
2593 enum reload_type address_type[MAX_RECOG_OPERANDS];
2594 /* Save the usage of each operand. */
2595 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2596 int no_input_reloads = 0, no_output_reloads = 0;
2597 int n_alternatives;
2598 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2599 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2600 char this_alternative_win[MAX_RECOG_OPERANDS];
2601 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2602 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2603 int this_alternative_matches[MAX_RECOG_OPERANDS];
2604 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2605 int this_alternative_number;
2606 int goal_alternative_number = 0;
2607 int operand_reloadnum[MAX_RECOG_OPERANDS];
2608 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2609 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2610 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2611 char goal_alternative_win[MAX_RECOG_OPERANDS];
2612 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2613 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2614 int goal_alternative_swapped;
2615 int best;
2616 int commutative;
2617 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2618 rtx substed_operand[MAX_RECOG_OPERANDS];
2619 rtx body = PATTERN (insn);
2620 rtx set = single_set (insn);
2621 int goal_earlyclobber = 0, this_earlyclobber;
2622 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2623 int retval = 0;
2625 this_insn = insn;
2626 n_reloads = 0;
2627 n_replacements = 0;
2628 n_earlyclobbers = 0;
2629 replace_reloads = replace;
2630 hard_regs_live_known = live_known;
2631 static_reload_reg_p = reload_reg_p;
2633 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads;
2634 neither are insns that SET cc0. Insns that use CC0 are not allowed
2635 to have any input reloads. */
2636 if (JUMP_P (insn) || CALL_P (insn))
2637 no_output_reloads = 1;
2639 if (HAVE_cc0 && reg_referenced_p (cc0_rtx, PATTERN (insn)))
2640 no_input_reloads = 1;
2641 if (HAVE_cc0 && reg_set_p (cc0_rtx, PATTERN (insn)))
2642 no_output_reloads = 1;
2644 /* The eliminated forms of any secondary memory locations are per-insn, so
2645 clear them out here. */
2647 if (secondary_memlocs_elim_used)
2649 memset (secondary_memlocs_elim, 0,
2650 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2651 secondary_memlocs_elim_used = 0;
2654 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2655 is cheap to move between them. If it is not, there may not be an insn
2656 to do the copy, so we may need a reload. */
2657 if (GET_CODE (body) == SET
2658 && REG_P (SET_DEST (body))
2659 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2660 && REG_P (SET_SRC (body))
2661 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2662 && register_move_cost (GET_MODE (SET_SRC (body)),
2663 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2664 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2665 return 0;
2667 extract_insn (insn);
2669 noperands = reload_n_operands = recog_data.n_operands;
2670 n_alternatives = recog_data.n_alternatives;
2672 /* Just return "no reloads" if insn has no operands with constraints. */
2673 if (noperands == 0 || n_alternatives == 0)
2674 return 0;
2676 insn_code_number = INSN_CODE (insn);
2677 this_insn_is_asm = insn_code_number < 0;
2679 memcpy (operand_mode, recog_data.operand_mode,
2680 noperands * sizeof (machine_mode));
2681 memcpy (constraints, recog_data.constraints,
2682 noperands * sizeof (const char *));
2684 commutative = -1;
2686 /* If we will need to know, later, whether some pair of operands
2687 are the same, we must compare them now and save the result.
2688 Reloading the base and index registers will clobber them
2689 and afterward they will fail to match. */
2691 for (i = 0; i < noperands; i++)
2693 const char *p;
2694 int c;
2695 char *end;
2697 substed_operand[i] = recog_data.operand[i];
2698 p = constraints[i];
2700 modified[i] = RELOAD_READ;
2702 /* Scan this operand's constraint to see if it is an output operand,
2703 an in-out operand, is commutative, or should match another. */
2705 while ((c = *p))
2707 p += CONSTRAINT_LEN (c, p);
2708 switch (c)
2710 case '=':
2711 modified[i] = RELOAD_WRITE;
2712 break;
2713 case '+':
2714 modified[i] = RELOAD_READ_WRITE;
2715 break;
2716 case '%':
2718 /* The last operand should not be marked commutative. */
2719 gcc_assert (i != noperands - 1);
2721 /* We currently only support one commutative pair of
2722 operands. Some existing asm code currently uses more
2723 than one pair. Previously, that would usually work,
2724 but sometimes it would crash the compiler. We
2725 continue supporting that case as well as we can by
2726 silently ignoring all but the first pair. In the
2727 future we may handle it correctly. */
2728 if (commutative < 0)
2729 commutative = i;
2730 else
2731 gcc_assert (this_insn_is_asm);
2733 break;
2734 /* Use of ISDIGIT is tempting here, but it may get expensive because
2735 of locale support we don't want. */
2736 case '0': case '1': case '2': case '3': case '4':
2737 case '5': case '6': case '7': case '8': case '9':
2739 c = strtoul (p - 1, &end, 10);
2740 p = end;
2742 operands_match[c][i]
2743 = operands_match_p (recog_data.operand[c],
2744 recog_data.operand[i]);
2746 /* An operand may not match itself. */
2747 gcc_assert (c != i);
2749 /* If C can be commuted with C+1, and C might need to match I,
2750 then C+1 might also need to match I. */
2751 if (commutative >= 0)
2753 if (c == commutative || c == commutative + 1)
2755 int other = c + (c == commutative ? 1 : -1);
2756 operands_match[other][i]
2757 = operands_match_p (recog_data.operand[other],
2758 recog_data.operand[i]);
2760 if (i == commutative || i == commutative + 1)
2762 int other = i + (i == commutative ? 1 : -1);
2763 operands_match[c][other]
2764 = operands_match_p (recog_data.operand[c],
2765 recog_data.operand[other]);
2767 /* Note that C is supposed to be less than I.
2768 No need to consider altering both C and I because in
2769 that case we would alter one into the other. */
2776 /* Examine each operand that is a memory reference or memory address
2777 and reload parts of the addresses into index registers.
2778 Also here any references to pseudo regs that didn't get hard regs
2779 but are equivalent to constants get replaced in the insn itself
2780 with those constants. Nobody will ever see them again.
2782 Finally, set up the preferred classes of each operand. */
2784 for (i = 0; i < noperands; i++)
2786 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2788 address_reloaded[i] = 0;
2789 address_operand_reloaded[i] = 0;
2790 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2791 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2792 : RELOAD_OTHER);
2793 address_type[i]
2794 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2795 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2796 : RELOAD_OTHER);
2798 if (*constraints[i] == 0)
2799 /* Ignore things like match_operator operands. */
2801 else if (insn_extra_address_constraint
2802 (lookup_constraint (constraints[i])))
2804 address_operand_reloaded[i]
2805 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2806 recog_data.operand[i],
2807 recog_data.operand_loc[i],
2808 i, operand_type[i], ind_levels, insn);
2810 /* If we now have a simple operand where we used to have a
2811 PLUS or MULT, re-recognize and try again. */
2812 if ((OBJECT_P (*recog_data.operand_loc[i])
2813 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2814 && (GET_CODE (recog_data.operand[i]) == MULT
2815 || GET_CODE (recog_data.operand[i]) == PLUS))
2817 INSN_CODE (insn) = -1;
2818 retval = find_reloads (insn, replace, ind_levels, live_known,
2819 reload_reg_p);
2820 return retval;
2823 recog_data.operand[i] = *recog_data.operand_loc[i];
2824 substed_operand[i] = recog_data.operand[i];
2826 /* Address operands are reloaded in their existing mode,
2827 no matter what is specified in the machine description. */
2828 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2830 /* If the address is a single CONST_INT pick address mode
2831 instead otherwise we will later not know in which mode
2832 the reload should be performed. */
2833 if (operand_mode[i] == VOIDmode)
2834 operand_mode[i] = Pmode;
2837 else if (code == MEM)
2839 address_reloaded[i]
2840 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2841 recog_data.operand_loc[i],
2842 XEXP (recog_data.operand[i], 0),
2843 &XEXP (recog_data.operand[i], 0),
2844 i, address_type[i], ind_levels, insn);
2845 recog_data.operand[i] = *recog_data.operand_loc[i];
2846 substed_operand[i] = recog_data.operand[i];
2848 else if (code == SUBREG)
2850 rtx reg = SUBREG_REG (recog_data.operand[i]);
2851 rtx op
2852 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2853 ind_levels,
2854 set != 0
2855 && &SET_DEST (set) == recog_data.operand_loc[i],
2856 insn,
2857 &address_reloaded[i]);
2859 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2860 that didn't get a hard register, emit a USE with a REG_EQUAL
2861 note in front so that we might inherit a previous, possibly
2862 wider reload. */
2864 if (replace
2865 && MEM_P (op)
2866 && REG_P (reg)
2867 && (GET_MODE_SIZE (GET_MODE (reg))
2868 >= GET_MODE_SIZE (GET_MODE (op)))
2869 && reg_equiv_constant (REGNO (reg)) == 0)
2870 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2871 insn),
2872 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2874 substed_operand[i] = recog_data.operand[i] = op;
2876 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2877 /* We can get a PLUS as an "operand" as a result of register
2878 elimination. See eliminate_regs and gen_reload. We handle
2879 a unary operator by reloading the operand. */
2880 substed_operand[i] = recog_data.operand[i]
2881 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2882 ind_levels, 0, insn,
2883 &address_reloaded[i]);
2884 else if (code == REG)
2886 /* This is equivalent to calling find_reloads_toplev.
2887 The code is duplicated for speed.
2888 When we find a pseudo always equivalent to a constant,
2889 we replace it by the constant. We must be sure, however,
2890 that we don't try to replace it in the insn in which it
2891 is being set. */
2892 int regno = REGNO (recog_data.operand[i]);
2893 if (reg_equiv_constant (regno) != 0
2894 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2896 /* Record the existing mode so that the check if constants are
2897 allowed will work when operand_mode isn't specified. */
2899 if (operand_mode[i] == VOIDmode)
2900 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2902 substed_operand[i] = recog_data.operand[i]
2903 = reg_equiv_constant (regno);
2905 if (reg_equiv_memory_loc (regno) != 0
2906 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2907 /* We need not give a valid is_set_dest argument since the case
2908 of a constant equivalence was checked above. */
2909 substed_operand[i] = recog_data.operand[i]
2910 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2911 ind_levels, 0, insn,
2912 &address_reloaded[i]);
2914 /* If the operand is still a register (we didn't replace it with an
2915 equivalent), get the preferred class to reload it into. */
2916 code = GET_CODE (recog_data.operand[i]);
2917 preferred_class[i]
2918 = ((code == REG && REGNO (recog_data.operand[i])
2919 >= FIRST_PSEUDO_REGISTER)
2920 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2921 : NO_REGS);
2922 pref_or_nothing[i]
2923 = (code == REG
2924 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2925 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2928 /* If this is simply a copy from operand 1 to operand 0, merge the
2929 preferred classes for the operands. */
2930 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2931 && recog_data.operand[1] == SET_SRC (set))
2933 preferred_class[0] = preferred_class[1]
2934 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2935 pref_or_nothing[0] |= pref_or_nothing[1];
2936 pref_or_nothing[1] |= pref_or_nothing[0];
2939 /* Now see what we need for pseudo-regs that didn't get hard regs
2940 or got the wrong kind of hard reg. For this, we must consider
2941 all the operands together against the register constraints. */
2943 best = MAX_RECOG_OPERANDS * 2 + 600;
2945 goal_alternative_swapped = 0;
2947 /* The constraints are made of several alternatives.
2948 Each operand's constraint looks like foo,bar,... with commas
2949 separating the alternatives. The first alternatives for all
2950 operands go together, the second alternatives go together, etc.
2952 First loop over alternatives. */
2954 alternative_mask enabled = get_enabled_alternatives (insn);
2955 for (this_alternative_number = 0;
2956 this_alternative_number < n_alternatives;
2957 this_alternative_number++)
2959 int swapped;
2961 if (!TEST_BIT (enabled, this_alternative_number))
2963 int i;
2965 for (i = 0; i < recog_data.n_operands; i++)
2966 constraints[i] = skip_alternative (constraints[i]);
2968 continue;
2971 /* If insn is commutative (it's safe to exchange a certain pair
2972 of operands) then we need to try each alternative twice, the
2973 second time matching those two operands as if we had
2974 exchanged them. To do this, really exchange them in
2975 operands. */
2976 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
2978 /* Loop over operands for one constraint alternative. */
2979 /* LOSERS counts those that don't fit this alternative
2980 and would require loading. */
2981 int losers = 0;
2982 /* BAD is set to 1 if it some operand can't fit this alternative
2983 even after reloading. */
2984 int bad = 0;
2985 /* REJECT is a count of how undesirable this alternative says it is
2986 if any reloading is required. If the alternative matches exactly
2987 then REJECT is ignored, but otherwise it gets this much
2988 counted against it in addition to the reloading needed. Each
2989 ? counts three times here since we want the disparaging caused by
2990 a bad register class to only count 1/3 as much. */
2991 int reject = 0;
2993 if (swapped)
2995 recog_data.operand[commutative] = substed_operand[commutative + 1];
2996 recog_data.operand[commutative + 1] = substed_operand[commutative];
2997 /* Swap the duplicates too. */
2998 for (i = 0; i < recog_data.n_dups; i++)
2999 if (recog_data.dup_num[i] == commutative
3000 || recog_data.dup_num[i] == commutative + 1)
3001 *recog_data.dup_loc[i]
3002 = recog_data.operand[(int) recog_data.dup_num[i]];
3004 std::swap (preferred_class[commutative],
3005 preferred_class[commutative + 1]);
3006 std::swap (pref_or_nothing[commutative],
3007 pref_or_nothing[commutative + 1]);
3008 std::swap (address_reloaded[commutative],
3009 address_reloaded[commutative + 1]);
3012 this_earlyclobber = 0;
3014 for (i = 0; i < noperands; i++)
3016 const char *p = constraints[i];
3017 char *end;
3018 int len;
3019 int win = 0;
3020 int did_match = 0;
3021 /* 0 => this operand can be reloaded somehow for this alternative. */
3022 int badop = 1;
3023 /* 0 => this operand can be reloaded if the alternative allows regs. */
3024 int winreg = 0;
3025 int c;
3026 int m;
3027 rtx operand = recog_data.operand[i];
3028 int offset = 0;
3029 /* Nonzero means this is a MEM that must be reloaded into a reg
3030 regardless of what the constraint says. */
3031 int force_reload = 0;
3032 int offmemok = 0;
3033 /* Nonzero if a constant forced into memory would be OK for this
3034 operand. */
3035 int constmemok = 0;
3036 int earlyclobber = 0;
3037 enum constraint_num cn;
3038 enum reg_class cl;
3040 /* If the predicate accepts a unary operator, it means that
3041 we need to reload the operand, but do not do this for
3042 match_operator and friends. */
3043 if (UNARY_P (operand) && *p != 0)
3044 operand = XEXP (operand, 0);
3046 /* If the operand is a SUBREG, extract
3047 the REG or MEM (or maybe even a constant) within.
3048 (Constants can occur as a result of reg_equiv_constant.) */
3050 while (GET_CODE (operand) == SUBREG)
3052 /* Offset only matters when operand is a REG and
3053 it is a hard reg. This is because it is passed
3054 to reg_fits_class_p if it is a REG and all pseudos
3055 return 0 from that function. */
3056 if (REG_P (SUBREG_REG (operand))
3057 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3059 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3060 GET_MODE (SUBREG_REG (operand)),
3061 SUBREG_BYTE (operand),
3062 GET_MODE (operand)) < 0)
3063 force_reload = 1;
3064 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3065 GET_MODE (SUBREG_REG (operand)),
3066 SUBREG_BYTE (operand),
3067 GET_MODE (operand));
3069 operand = SUBREG_REG (operand);
3070 /* Force reload if this is a constant or PLUS or if there may
3071 be a problem accessing OPERAND in the outer mode. */
3072 scalar_int_mode inner_mode;
3073 if (CONSTANT_P (operand)
3074 || GET_CODE (operand) == PLUS
3075 /* We must force a reload of paradoxical SUBREGs
3076 of a MEM because the alignment of the inner value
3077 may not be enough to do the outer reference. On
3078 big-endian machines, it may also reference outside
3079 the object.
3081 On machines that extend byte operations and we have a
3082 SUBREG where both the inner and outer modes are no wider
3083 than a word and the inner mode is narrower, is integral,
3084 and gets extended when loaded from memory, combine.c has
3085 made assumptions about the behavior of the machine in such
3086 register access. If the data is, in fact, in memory we
3087 must always load using the size assumed to be in the
3088 register and let the insn do the different-sized
3089 accesses.
3091 This is doubly true if WORD_REGISTER_OPERATIONS. In
3092 this case eliminate_regs has left non-paradoxical
3093 subregs for push_reload to see. Make sure it does
3094 by forcing the reload.
3096 ??? When is it right at this stage to have a subreg
3097 of a mem that is _not_ to be handled specially? IMO
3098 those should have been reduced to just a mem. */
3099 || ((MEM_P (operand)
3100 || (REG_P (operand)
3101 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3102 && (WORD_REGISTER_OPERATIONS
3103 || ((GET_MODE_BITSIZE (GET_MODE (operand))
3104 < BIGGEST_ALIGNMENT)
3105 && paradoxical_subreg_p (operand_mode[i],
3106 GET_MODE (operand)))
3107 || BYTES_BIG_ENDIAN
3108 || ((GET_MODE_SIZE (operand_mode[i])
3109 <= UNITS_PER_WORD)
3110 && (is_a <scalar_int_mode>
3111 (GET_MODE (operand), &inner_mode))
3112 && (GET_MODE_SIZE (inner_mode)
3113 <= UNITS_PER_WORD)
3114 && paradoxical_subreg_p (operand_mode[i],
3115 inner_mode)
3116 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)))
3118 force_reload = 1;
3121 this_alternative[i] = NO_REGS;
3122 this_alternative_win[i] = 0;
3123 this_alternative_match_win[i] = 0;
3124 this_alternative_offmemok[i] = 0;
3125 this_alternative_earlyclobber[i] = 0;
3126 this_alternative_matches[i] = -1;
3128 /* An empty constraint or empty alternative
3129 allows anything which matched the pattern. */
3130 if (*p == 0 || *p == ',')
3131 win = 1, badop = 0;
3133 /* Scan this alternative's specs for this operand;
3134 set WIN if the operand fits any letter in this alternative.
3135 Otherwise, clear BADOP if this operand could
3136 fit some letter after reloads,
3137 or set WINREG if this operand could fit after reloads
3138 provided the constraint allows some registers. */
3141 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3143 case '\0':
3144 len = 0;
3145 break;
3146 case ',':
3147 c = '\0';
3148 break;
3150 case '?':
3151 reject += 6;
3152 break;
3154 case '!':
3155 reject = 600;
3156 break;
3158 case '#':
3159 /* Ignore rest of this alternative as far as
3160 reloading is concerned. */
3162 p++;
3163 while (*p && *p != ',');
3164 len = 0;
3165 break;
3167 case '0': case '1': case '2': case '3': case '4':
3168 case '5': case '6': case '7': case '8': case '9':
3169 m = strtoul (p, &end, 10);
3170 p = end;
3171 len = 0;
3173 this_alternative_matches[i] = m;
3174 /* We are supposed to match a previous operand.
3175 If we do, we win if that one did.
3176 If we do not, count both of the operands as losers.
3177 (This is too conservative, since most of the time
3178 only a single reload insn will be needed to make
3179 the two operands win. As a result, this alternative
3180 may be rejected when it is actually desirable.) */
3181 if ((swapped && (m != commutative || i != commutative + 1))
3182 /* If we are matching as if two operands were swapped,
3183 also pretend that operands_match had been computed
3184 with swapped.
3185 But if I is the second of those and C is the first,
3186 don't exchange them, because operands_match is valid
3187 only on one side of its diagonal. */
3188 ? (operands_match
3189 [(m == commutative || m == commutative + 1)
3190 ? 2 * commutative + 1 - m : m]
3191 [(i == commutative || i == commutative + 1)
3192 ? 2 * commutative + 1 - i : i])
3193 : operands_match[m][i])
3195 /* If we are matching a non-offsettable address where an
3196 offsettable address was expected, then we must reject
3197 this combination, because we can't reload it. */
3198 if (this_alternative_offmemok[m]
3199 && MEM_P (recog_data.operand[m])
3200 && this_alternative[m] == NO_REGS
3201 && ! this_alternative_win[m])
3202 bad = 1;
3204 did_match = this_alternative_win[m];
3206 else
3208 /* Operands don't match. */
3209 rtx value;
3210 int loc1, loc2;
3211 /* Retroactively mark the operand we had to match
3212 as a loser, if it wasn't already. */
3213 if (this_alternative_win[m])
3214 losers++;
3215 this_alternative_win[m] = 0;
3216 if (this_alternative[m] == NO_REGS)
3217 bad = 1;
3218 /* But count the pair only once in the total badness of
3219 this alternative, if the pair can be a dummy reload.
3220 The pointers in operand_loc are not swapped; swap
3221 them by hand if necessary. */
3222 if (swapped && i == commutative)
3223 loc1 = commutative + 1;
3224 else if (swapped && i == commutative + 1)
3225 loc1 = commutative;
3226 else
3227 loc1 = i;
3228 if (swapped && m == commutative)
3229 loc2 = commutative + 1;
3230 else if (swapped && m == commutative + 1)
3231 loc2 = commutative;
3232 else
3233 loc2 = m;
3234 value
3235 = find_dummy_reload (recog_data.operand[i],
3236 recog_data.operand[m],
3237 recog_data.operand_loc[loc1],
3238 recog_data.operand_loc[loc2],
3239 operand_mode[i], operand_mode[m],
3240 this_alternative[m], -1,
3241 this_alternative_earlyclobber[m]);
3243 if (value != 0)
3244 losers--;
3246 /* This can be fixed with reloads if the operand
3247 we are supposed to match can be fixed with reloads. */
3248 badop = 0;
3249 this_alternative[i] = this_alternative[m];
3251 /* If we have to reload this operand and some previous
3252 operand also had to match the same thing as this
3253 operand, we don't know how to do that. So reject this
3254 alternative. */
3255 if (! did_match || force_reload)
3256 for (j = 0; j < i; j++)
3257 if (this_alternative_matches[j]
3258 == this_alternative_matches[i])
3260 badop = 1;
3261 break;
3263 break;
3265 case 'p':
3266 /* All necessary reloads for an address_operand
3267 were handled in find_reloads_address. */
3268 this_alternative[i]
3269 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3270 ADDRESS, SCRATCH);
3271 win = 1;
3272 badop = 0;
3273 break;
3275 case TARGET_MEM_CONSTRAINT:
3276 if (force_reload)
3277 break;
3278 if (MEM_P (operand)
3279 || (REG_P (operand)
3280 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3281 && reg_renumber[REGNO (operand)] < 0))
3282 win = 1;
3283 if (CONST_POOL_OK_P (operand_mode[i], operand))
3284 badop = 0;
3285 constmemok = 1;
3286 break;
3288 case '<':
3289 if (MEM_P (operand)
3290 && ! address_reloaded[i]
3291 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3292 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3293 win = 1;
3294 break;
3296 case '>':
3297 if (MEM_P (operand)
3298 && ! address_reloaded[i]
3299 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3300 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3301 win = 1;
3302 break;
3304 /* Memory operand whose address is not offsettable. */
3305 case 'V':
3306 if (force_reload)
3307 break;
3308 if (MEM_P (operand)
3309 && ! (ind_levels ? offsettable_memref_p (operand)
3310 : offsettable_nonstrict_memref_p (operand))
3311 /* Certain mem addresses will become offsettable
3312 after they themselves are reloaded. This is important;
3313 we don't want our own handling of unoffsettables
3314 to override the handling of reg_equiv_address. */
3315 && !(REG_P (XEXP (operand, 0))
3316 && (ind_levels == 0
3317 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3318 win = 1;
3319 break;
3321 /* Memory operand whose address is offsettable. */
3322 case 'o':
3323 if (force_reload)
3324 break;
3325 if ((MEM_P (operand)
3326 /* If IND_LEVELS, find_reloads_address won't reload a
3327 pseudo that didn't get a hard reg, so we have to
3328 reject that case. */
3329 && ((ind_levels ? offsettable_memref_p (operand)
3330 : offsettable_nonstrict_memref_p (operand))
3331 /* A reloaded address is offsettable because it is now
3332 just a simple register indirect. */
3333 || address_reloaded[i] == 1))
3334 || (REG_P (operand)
3335 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3336 && reg_renumber[REGNO (operand)] < 0
3337 /* If reg_equiv_address is nonzero, we will be
3338 loading it into a register; hence it will be
3339 offsettable, but we cannot say that reg_equiv_mem
3340 is offsettable without checking. */
3341 && ((reg_equiv_mem (REGNO (operand)) != 0
3342 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3343 || (reg_equiv_address (REGNO (operand)) != 0))))
3344 win = 1;
3345 if (CONST_POOL_OK_P (operand_mode[i], operand)
3346 || MEM_P (operand))
3347 badop = 0;
3348 constmemok = 1;
3349 offmemok = 1;
3350 break;
3352 case '&':
3353 /* Output operand that is stored before the need for the
3354 input operands (and their index registers) is over. */
3355 earlyclobber = 1, this_earlyclobber = 1;
3356 break;
3358 case 'X':
3359 force_reload = 0;
3360 win = 1;
3361 break;
3363 case 'g':
3364 if (! force_reload
3365 /* A PLUS is never a valid operand, but reload can make
3366 it from a register when eliminating registers. */
3367 && GET_CODE (operand) != PLUS
3368 /* A SCRATCH is not a valid operand. */
3369 && GET_CODE (operand) != SCRATCH
3370 && (! CONSTANT_P (operand)
3371 || ! flag_pic
3372 || LEGITIMATE_PIC_OPERAND_P (operand))
3373 && (GENERAL_REGS == ALL_REGS
3374 || !REG_P (operand)
3375 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3376 && reg_renumber[REGNO (operand)] < 0)))
3377 win = 1;
3378 cl = GENERAL_REGS;
3379 goto reg;
3381 default:
3382 cn = lookup_constraint (p);
3383 switch (get_constraint_type (cn))
3385 case CT_REGISTER:
3386 cl = reg_class_for_constraint (cn);
3387 if (cl != NO_REGS)
3388 goto reg;
3389 break;
3391 case CT_CONST_INT:
3392 if (CONST_INT_P (operand)
3393 && (insn_const_int_ok_for_constraint
3394 (INTVAL (operand), cn)))
3395 win = true;
3396 break;
3398 case CT_MEMORY:
3399 if (force_reload)
3400 break;
3401 if (constraint_satisfied_p (operand, cn))
3402 win = 1;
3403 /* If the address was already reloaded,
3404 we win as well. */
3405 else if (MEM_P (operand) && address_reloaded[i] == 1)
3406 win = 1;
3407 /* Likewise if the address will be reloaded because
3408 reg_equiv_address is nonzero. For reg_equiv_mem
3409 we have to check. */
3410 else if (REG_P (operand)
3411 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3412 && reg_renumber[REGNO (operand)] < 0
3413 && ((reg_equiv_mem (REGNO (operand)) != 0
3414 && (constraint_satisfied_p
3415 (reg_equiv_mem (REGNO (operand)),
3416 cn)))
3417 || (reg_equiv_address (REGNO (operand))
3418 != 0)))
3419 win = 1;
3421 /* If we didn't already win, we can reload
3422 constants via force_const_mem, and other
3423 MEMs by reloading the address like for 'o'. */
3424 if (CONST_POOL_OK_P (operand_mode[i], operand)
3425 || MEM_P (operand))
3426 badop = 0;
3427 constmemok = 1;
3428 offmemok = 1;
3429 break;
3431 case CT_SPECIAL_MEMORY:
3432 if (force_reload)
3433 break;
3434 if (constraint_satisfied_p (operand, cn))
3435 win = 1;
3436 /* Likewise if the address will be reloaded because
3437 reg_equiv_address is nonzero. For reg_equiv_mem
3438 we have to check. */
3439 else if (REG_P (operand)
3440 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3441 && reg_renumber[REGNO (operand)] < 0
3442 && reg_equiv_mem (REGNO (operand)) != 0
3443 && (constraint_satisfied_p
3444 (reg_equiv_mem (REGNO (operand)), cn)))
3445 win = 1;
3446 break;
3448 case CT_ADDRESS:
3449 if (constraint_satisfied_p (operand, cn))
3450 win = 1;
3452 /* If we didn't already win, we can reload
3453 the address into a base register. */
3454 this_alternative[i]
3455 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3456 ADDRESS, SCRATCH);
3457 badop = 0;
3458 break;
3460 case CT_FIXED_FORM:
3461 if (constraint_satisfied_p (operand, cn))
3462 win = 1;
3463 break;
3465 break;
3467 reg:
3468 this_alternative[i]
3469 = reg_class_subunion[this_alternative[i]][cl];
3470 if (GET_MODE (operand) == BLKmode)
3471 break;
3472 winreg = 1;
3473 if (REG_P (operand)
3474 && reg_fits_class_p (operand, this_alternative[i],
3475 offset, GET_MODE (recog_data.operand[i])))
3476 win = 1;
3477 break;
3479 while ((p += len), c);
3481 if (swapped == (commutative >= 0 ? 1 : 0))
3482 constraints[i] = p;
3484 /* If this operand could be handled with a reg,
3485 and some reg is allowed, then this operand can be handled. */
3486 if (winreg && this_alternative[i] != NO_REGS
3487 && (win || !class_only_fixed_regs[this_alternative[i]]))
3488 badop = 0;
3490 /* Record which operands fit this alternative. */
3491 this_alternative_earlyclobber[i] = earlyclobber;
3492 if (win && ! force_reload)
3493 this_alternative_win[i] = 1;
3494 else if (did_match && ! force_reload)
3495 this_alternative_match_win[i] = 1;
3496 else
3498 int const_to_mem = 0;
3500 this_alternative_offmemok[i] = offmemok;
3501 losers++;
3502 if (badop)
3503 bad = 1;
3504 /* Alternative loses if it has no regs for a reg operand. */
3505 if (REG_P (operand)
3506 && this_alternative[i] == NO_REGS
3507 && this_alternative_matches[i] < 0)
3508 bad = 1;
3510 /* If this is a constant that is reloaded into the desired
3511 class by copying it to memory first, count that as another
3512 reload. This is consistent with other code and is
3513 required to avoid choosing another alternative when
3514 the constant is moved into memory by this function on
3515 an early reload pass. Note that the test here is
3516 precisely the same as in the code below that calls
3517 force_const_mem. */
3518 if (CONST_POOL_OK_P (operand_mode[i], operand)
3519 && ((targetm.preferred_reload_class (operand,
3520 this_alternative[i])
3521 == NO_REGS)
3522 || no_input_reloads))
3524 const_to_mem = 1;
3525 if (this_alternative[i] != NO_REGS)
3526 losers++;
3529 /* Alternative loses if it requires a type of reload not
3530 permitted for this insn. We can always reload SCRATCH
3531 and objects with a REG_UNUSED note. */
3532 if (GET_CODE (operand) != SCRATCH
3533 && modified[i] != RELOAD_READ && no_output_reloads
3534 && ! find_reg_note (insn, REG_UNUSED, operand))
3535 bad = 1;
3536 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3537 && ! const_to_mem)
3538 bad = 1;
3540 /* If we can't reload this value at all, reject this
3541 alternative. Note that we could also lose due to
3542 LIMIT_RELOAD_CLASS, but we don't check that
3543 here. */
3545 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3547 if (targetm.preferred_reload_class (operand,
3548 this_alternative[i])
3549 == NO_REGS)
3550 reject = 600;
3552 if (operand_type[i] == RELOAD_FOR_OUTPUT
3553 && (targetm.preferred_output_reload_class (operand,
3554 this_alternative[i])
3555 == NO_REGS))
3556 reject = 600;
3559 /* We prefer to reload pseudos over reloading other things,
3560 since such reloads may be able to be eliminated later.
3561 If we are reloading a SCRATCH, we won't be generating any
3562 insns, just using a register, so it is also preferred.
3563 So bump REJECT in other cases. Don't do this in the
3564 case where we are forcing a constant into memory and
3565 it will then win since we don't want to have a different
3566 alternative match then. */
3567 if (! (REG_P (operand)
3568 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3569 && GET_CODE (operand) != SCRATCH
3570 && ! (const_to_mem && constmemok))
3571 reject += 2;
3573 /* Input reloads can be inherited more often than output
3574 reloads can be removed, so penalize output reloads. */
3575 if (operand_type[i] != RELOAD_FOR_INPUT
3576 && GET_CODE (operand) != SCRATCH)
3577 reject++;
3580 /* If this operand is a pseudo register that didn't get
3581 a hard reg and this alternative accepts some
3582 register, see if the class that we want is a subset
3583 of the preferred class for this register. If not,
3584 but it intersects that class, use the preferred class
3585 instead. If it does not intersect the preferred
3586 class, show that usage of this alternative should be
3587 discouraged; it will be discouraged more still if the
3588 register is `preferred or nothing'. We do this
3589 because it increases the chance of reusing our spill
3590 register in a later insn and avoiding a pair of
3591 memory stores and loads.
3593 Don't bother with this if this alternative will
3594 accept this operand.
3596 Don't do this for a multiword operand, since it is
3597 only a small win and has the risk of requiring more
3598 spill registers, which could cause a large loss.
3600 Don't do this if the preferred class has only one
3601 register because we might otherwise exhaust the
3602 class. */
3604 if (! win && ! did_match
3605 && this_alternative[i] != NO_REGS
3606 && GET_MODE_SIZE (operand_mode[i]) <= UNITS_PER_WORD
3607 && reg_class_size [(int) preferred_class[i]] > 0
3608 && ! small_register_class_p (preferred_class[i]))
3610 if (! reg_class_subset_p (this_alternative[i],
3611 preferred_class[i]))
3613 /* Since we don't have a way of forming the intersection,
3614 we just do something special if the preferred class
3615 is a subset of the class we have; that's the most
3616 common case anyway. */
3617 if (reg_class_subset_p (preferred_class[i],
3618 this_alternative[i]))
3619 this_alternative[i] = preferred_class[i];
3620 else
3621 reject += (2 + 2 * pref_or_nothing[i]);
3626 /* Now see if any output operands that are marked "earlyclobber"
3627 in this alternative conflict with any input operands
3628 or any memory addresses. */
3630 for (i = 0; i < noperands; i++)
3631 if (this_alternative_earlyclobber[i]
3632 && (this_alternative_win[i] || this_alternative_match_win[i]))
3634 struct decomposition early_data;
3636 early_data = decompose (recog_data.operand[i]);
3638 gcc_assert (modified[i] != RELOAD_READ);
3640 if (this_alternative[i] == NO_REGS)
3642 this_alternative_earlyclobber[i] = 0;
3643 gcc_assert (this_insn_is_asm);
3644 error_for_asm (this_insn,
3645 "%<&%> constraint used with no register class");
3648 for (j = 0; j < noperands; j++)
3649 /* Is this an input operand or a memory ref? */
3650 if ((MEM_P (recog_data.operand[j])
3651 || modified[j] != RELOAD_WRITE)
3652 && j != i
3653 /* Ignore things like match_operator operands. */
3654 && !recog_data.is_operator[j]
3655 /* Don't count an input operand that is constrained to match
3656 the early clobber operand. */
3657 && ! (this_alternative_matches[j] == i
3658 && rtx_equal_p (recog_data.operand[i],
3659 recog_data.operand[j]))
3660 /* Is it altered by storing the earlyclobber operand? */
3661 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3662 early_data))
3664 /* If the output is in a non-empty few-regs class,
3665 it's costly to reload it, so reload the input instead. */
3666 if (small_register_class_p (this_alternative[i])
3667 && (REG_P (recog_data.operand[j])
3668 || GET_CODE (recog_data.operand[j]) == SUBREG))
3670 losers++;
3671 this_alternative_win[j] = 0;
3672 this_alternative_match_win[j] = 0;
3674 else
3675 break;
3677 /* If an earlyclobber operand conflicts with something,
3678 it must be reloaded, so request this and count the cost. */
3679 if (j != noperands)
3681 losers++;
3682 this_alternative_win[i] = 0;
3683 this_alternative_match_win[j] = 0;
3684 for (j = 0; j < noperands; j++)
3685 if (this_alternative_matches[j] == i
3686 && this_alternative_match_win[j])
3688 this_alternative_win[j] = 0;
3689 this_alternative_match_win[j] = 0;
3690 losers++;
3695 /* If one alternative accepts all the operands, no reload required,
3696 choose that alternative; don't consider the remaining ones. */
3697 if (losers == 0)
3699 /* Unswap these so that they are never swapped at `finish'. */
3700 if (swapped)
3702 recog_data.operand[commutative] = substed_operand[commutative];
3703 recog_data.operand[commutative + 1]
3704 = substed_operand[commutative + 1];
3706 for (i = 0; i < noperands; i++)
3708 goal_alternative_win[i] = this_alternative_win[i];
3709 goal_alternative_match_win[i] = this_alternative_match_win[i];
3710 goal_alternative[i] = this_alternative[i];
3711 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3712 goal_alternative_matches[i] = this_alternative_matches[i];
3713 goal_alternative_earlyclobber[i]
3714 = this_alternative_earlyclobber[i];
3716 goal_alternative_number = this_alternative_number;
3717 goal_alternative_swapped = swapped;
3718 goal_earlyclobber = this_earlyclobber;
3719 goto finish;
3722 /* REJECT, set by the ! and ? constraint characters and when a register
3723 would be reloaded into a non-preferred class, discourages the use of
3724 this alternative for a reload goal. REJECT is incremented by six
3725 for each ? and two for each non-preferred class. */
3726 losers = losers * 6 + reject;
3728 /* If this alternative can be made to work by reloading,
3729 and it needs less reloading than the others checked so far,
3730 record it as the chosen goal for reloading. */
3731 if (! bad)
3733 if (best > losers)
3735 for (i = 0; i < noperands; i++)
3737 goal_alternative[i] = this_alternative[i];
3738 goal_alternative_win[i] = this_alternative_win[i];
3739 goal_alternative_match_win[i]
3740 = this_alternative_match_win[i];
3741 goal_alternative_offmemok[i]
3742 = this_alternative_offmemok[i];
3743 goal_alternative_matches[i] = this_alternative_matches[i];
3744 goal_alternative_earlyclobber[i]
3745 = this_alternative_earlyclobber[i];
3747 goal_alternative_swapped = swapped;
3748 best = losers;
3749 goal_alternative_number = this_alternative_number;
3750 goal_earlyclobber = this_earlyclobber;
3754 if (swapped)
3756 /* If the commutative operands have been swapped, swap
3757 them back in order to check the next alternative. */
3758 recog_data.operand[commutative] = substed_operand[commutative];
3759 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3760 /* Unswap the duplicates too. */
3761 for (i = 0; i < recog_data.n_dups; i++)
3762 if (recog_data.dup_num[i] == commutative
3763 || recog_data.dup_num[i] == commutative + 1)
3764 *recog_data.dup_loc[i]
3765 = recog_data.operand[(int) recog_data.dup_num[i]];
3767 /* Unswap the operand related information as well. */
3768 std::swap (preferred_class[commutative],
3769 preferred_class[commutative + 1]);
3770 std::swap (pref_or_nothing[commutative],
3771 pref_or_nothing[commutative + 1]);
3772 std::swap (address_reloaded[commutative],
3773 address_reloaded[commutative + 1]);
3778 /* The operands don't meet the constraints.
3779 goal_alternative describes the alternative
3780 that we could reach by reloading the fewest operands.
3781 Reload so as to fit it. */
3783 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3785 /* No alternative works with reloads?? */
3786 if (insn_code_number >= 0)
3787 fatal_insn ("unable to generate reloads for:", insn);
3788 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3789 /* Avoid further trouble with this insn. */
3790 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3791 n_reloads = 0;
3792 return 0;
3795 /* Jump to `finish' from above if all operands are valid already.
3796 In that case, goal_alternative_win is all 1. */
3797 finish:
3799 /* Right now, for any pair of operands I and J that are required to match,
3800 with I < J,
3801 goal_alternative_matches[J] is I.
3802 Set up goal_alternative_matched as the inverse function:
3803 goal_alternative_matched[I] = J. */
3805 for (i = 0; i < noperands; i++)
3806 goal_alternative_matched[i] = -1;
3808 for (i = 0; i < noperands; i++)
3809 if (! goal_alternative_win[i]
3810 && goal_alternative_matches[i] >= 0)
3811 goal_alternative_matched[goal_alternative_matches[i]] = i;
3813 for (i = 0; i < noperands; i++)
3814 goal_alternative_win[i] |= goal_alternative_match_win[i];
3816 /* If the best alternative is with operands 1 and 2 swapped,
3817 consider them swapped before reporting the reloads. Update the
3818 operand numbers of any reloads already pushed. */
3820 if (goal_alternative_swapped)
3822 std::swap (substed_operand[commutative],
3823 substed_operand[commutative + 1]);
3824 std::swap (recog_data.operand[commutative],
3825 recog_data.operand[commutative + 1]);
3826 std::swap (*recog_data.operand_loc[commutative],
3827 *recog_data.operand_loc[commutative + 1]);
3829 for (i = 0; i < recog_data.n_dups; i++)
3830 if (recog_data.dup_num[i] == commutative
3831 || recog_data.dup_num[i] == commutative + 1)
3832 *recog_data.dup_loc[i]
3833 = recog_data.operand[(int) recog_data.dup_num[i]];
3835 for (i = 0; i < n_reloads; i++)
3837 if (rld[i].opnum == commutative)
3838 rld[i].opnum = commutative + 1;
3839 else if (rld[i].opnum == commutative + 1)
3840 rld[i].opnum = commutative;
3844 for (i = 0; i < noperands; i++)
3846 operand_reloadnum[i] = -1;
3848 /* If this is an earlyclobber operand, we need to widen the scope.
3849 The reload must remain valid from the start of the insn being
3850 reloaded until after the operand is stored into its destination.
3851 We approximate this with RELOAD_OTHER even though we know that we
3852 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3854 One special case that is worth checking is when we have an
3855 output that is earlyclobber but isn't used past the insn (typically
3856 a SCRATCH). In this case, we only need have the reload live
3857 through the insn itself, but not for any of our input or output
3858 reloads.
3859 But we must not accidentally narrow the scope of an existing
3860 RELOAD_OTHER reload - leave these alone.
3862 In any case, anything needed to address this operand can remain
3863 however they were previously categorized. */
3865 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3866 operand_type[i]
3867 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3868 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3871 /* Any constants that aren't allowed and can't be reloaded
3872 into registers are here changed into memory references. */
3873 for (i = 0; i < noperands; i++)
3874 if (! goal_alternative_win[i])
3876 rtx op = recog_data.operand[i];
3877 rtx subreg = NULL_RTX;
3878 rtx plus = NULL_RTX;
3879 machine_mode mode = operand_mode[i];
3881 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3882 push_reload so we have to let them pass here. */
3883 if (GET_CODE (op) == SUBREG)
3885 subreg = op;
3886 op = SUBREG_REG (op);
3887 mode = GET_MODE (op);
3890 if (GET_CODE (op) == PLUS)
3892 plus = op;
3893 op = XEXP (op, 1);
3896 if (CONST_POOL_OK_P (mode, op)
3897 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3898 == NO_REGS)
3899 || no_input_reloads))
3901 int this_address_reloaded;
3902 rtx tem = force_const_mem (mode, op);
3904 /* If we stripped a SUBREG or a PLUS above add it back. */
3905 if (plus != NULL_RTX)
3906 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3908 if (subreg != NULL_RTX)
3909 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3911 this_address_reloaded = 0;
3912 substed_operand[i] = recog_data.operand[i]
3913 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3914 0, insn, &this_address_reloaded);
3916 /* If the alternative accepts constant pool refs directly
3917 there will be no reload needed at all. */
3918 if (plus == NULL_RTX
3919 && subreg == NULL_RTX
3920 && alternative_allows_const_pool_ref (this_address_reloaded != 1
3921 ? substed_operand[i]
3922 : NULL,
3923 recog_data.constraints[i],
3924 goal_alternative_number))
3925 goal_alternative_win[i] = 1;
3929 /* Record the values of the earlyclobber operands for the caller. */
3930 if (goal_earlyclobber)
3931 for (i = 0; i < noperands; i++)
3932 if (goal_alternative_earlyclobber[i])
3933 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3935 /* Now record reloads for all the operands that need them. */
3936 for (i = 0; i < noperands; i++)
3937 if (! goal_alternative_win[i])
3939 /* Operands that match previous ones have already been handled. */
3940 if (goal_alternative_matches[i] >= 0)
3942 /* Handle an operand with a nonoffsettable address
3943 appearing where an offsettable address will do
3944 by reloading the address into a base register.
3946 ??? We can also do this when the operand is a register and
3947 reg_equiv_mem is not offsettable, but this is a bit tricky,
3948 so we don't bother with it. It may not be worth doing. */
3949 else if (goal_alternative_matched[i] == -1
3950 && goal_alternative_offmemok[i]
3951 && MEM_P (recog_data.operand[i]))
3953 /* If the address to be reloaded is a VOIDmode constant,
3954 use the default address mode as mode of the reload register,
3955 as would have been done by find_reloads_address. */
3956 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
3957 machine_mode address_mode;
3959 address_mode = get_address_mode (recog_data.operand[i]);
3960 operand_reloadnum[i]
3961 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
3962 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
3963 base_reg_class (VOIDmode, as, MEM, SCRATCH),
3964 address_mode,
3965 VOIDmode, 0, 0, i, RELOAD_OTHER);
3966 rld[operand_reloadnum[i]].inc
3967 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
3969 /* If this operand is an output, we will have made any
3970 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
3971 now we are treating part of the operand as an input, so
3972 we must change these to RELOAD_FOR_OTHER_ADDRESS. */
3974 if (modified[i] == RELOAD_WRITE)
3976 for (j = 0; j < n_reloads; j++)
3978 if (rld[j].opnum == i)
3980 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
3981 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
3982 else if (rld[j].when_needed
3983 == RELOAD_FOR_OUTADDR_ADDRESS)
3984 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
3989 else if (goal_alternative_matched[i] == -1)
3991 operand_reloadnum[i]
3992 = push_reload ((modified[i] != RELOAD_WRITE
3993 ? recog_data.operand[i] : 0),
3994 (modified[i] != RELOAD_READ
3995 ? recog_data.operand[i] : 0),
3996 (modified[i] != RELOAD_WRITE
3997 ? recog_data.operand_loc[i] : 0),
3998 (modified[i] != RELOAD_READ
3999 ? recog_data.operand_loc[i] : 0),
4000 (enum reg_class) goal_alternative[i],
4001 (modified[i] == RELOAD_WRITE
4002 ? VOIDmode : operand_mode[i]),
4003 (modified[i] == RELOAD_READ
4004 ? VOIDmode : operand_mode[i]),
4005 (insn_code_number < 0 ? 0
4006 : insn_data[insn_code_number].operand[i].strict_low),
4007 0, i, operand_type[i]);
4009 /* In a matching pair of operands, one must be input only
4010 and the other must be output only.
4011 Pass the input operand as IN and the other as OUT. */
4012 else if (modified[i] == RELOAD_READ
4013 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4015 operand_reloadnum[i]
4016 = push_reload (recog_data.operand[i],
4017 recog_data.operand[goal_alternative_matched[i]],
4018 recog_data.operand_loc[i],
4019 recog_data.operand_loc[goal_alternative_matched[i]],
4020 (enum reg_class) goal_alternative[i],
4021 operand_mode[i],
4022 operand_mode[goal_alternative_matched[i]],
4023 0, 0, i, RELOAD_OTHER);
4024 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4026 else if (modified[i] == RELOAD_WRITE
4027 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4029 operand_reloadnum[goal_alternative_matched[i]]
4030 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4031 recog_data.operand[i],
4032 recog_data.operand_loc[goal_alternative_matched[i]],
4033 recog_data.operand_loc[i],
4034 (enum reg_class) goal_alternative[i],
4035 operand_mode[goal_alternative_matched[i]],
4036 operand_mode[i],
4037 0, 0, i, RELOAD_OTHER);
4038 operand_reloadnum[i] = output_reloadnum;
4040 else
4042 gcc_assert (insn_code_number < 0);
4043 error_for_asm (insn, "inconsistent operand constraints "
4044 "in an %<asm%>");
4045 /* Avoid further trouble with this insn. */
4046 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4047 n_reloads = 0;
4048 return 0;
4051 else if (goal_alternative_matched[i] < 0
4052 && goal_alternative_matches[i] < 0
4053 && address_operand_reloaded[i] != 1
4054 && optimize)
4056 /* For each non-matching operand that's a MEM or a pseudo-register
4057 that didn't get a hard register, make an optional reload.
4058 This may get done even if the insn needs no reloads otherwise. */
4060 rtx operand = recog_data.operand[i];
4062 while (GET_CODE (operand) == SUBREG)
4063 operand = SUBREG_REG (operand);
4064 if ((MEM_P (operand)
4065 || (REG_P (operand)
4066 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4067 /* If this is only for an output, the optional reload would not
4068 actually cause us to use a register now, just note that
4069 something is stored here. */
4070 && (goal_alternative[i] != NO_REGS
4071 || modified[i] == RELOAD_WRITE)
4072 && ! no_input_reloads
4073 /* An optional output reload might allow to delete INSN later.
4074 We mustn't make in-out reloads on insns that are not permitted
4075 output reloads.
4076 If this is an asm, we can't delete it; we must not even call
4077 push_reload for an optional output reload in this case,
4078 because we can't be sure that the constraint allows a register,
4079 and push_reload verifies the constraints for asms. */
4080 && (modified[i] == RELOAD_READ
4081 || (! no_output_reloads && ! this_insn_is_asm)))
4082 operand_reloadnum[i]
4083 = push_reload ((modified[i] != RELOAD_WRITE
4084 ? recog_data.operand[i] : 0),
4085 (modified[i] != RELOAD_READ
4086 ? recog_data.operand[i] : 0),
4087 (modified[i] != RELOAD_WRITE
4088 ? recog_data.operand_loc[i] : 0),
4089 (modified[i] != RELOAD_READ
4090 ? recog_data.operand_loc[i] : 0),
4091 (enum reg_class) goal_alternative[i],
4092 (modified[i] == RELOAD_WRITE
4093 ? VOIDmode : operand_mode[i]),
4094 (modified[i] == RELOAD_READ
4095 ? VOIDmode : operand_mode[i]),
4096 (insn_code_number < 0 ? 0
4097 : insn_data[insn_code_number].operand[i].strict_low),
4098 1, i, operand_type[i]);
4099 /* If a memory reference remains (either as a MEM or a pseudo that
4100 did not get a hard register), yet we can't make an optional
4101 reload, check if this is actually a pseudo register reference;
4102 we then need to emit a USE and/or a CLOBBER so that reload
4103 inheritance will do the right thing. */
4104 else if (replace
4105 && (MEM_P (operand)
4106 || (REG_P (operand)
4107 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4108 && reg_renumber [REGNO (operand)] < 0)))
4110 operand = *recog_data.operand_loc[i];
4112 while (GET_CODE (operand) == SUBREG)
4113 operand = SUBREG_REG (operand);
4114 if (REG_P (operand))
4116 if (modified[i] != RELOAD_WRITE)
4117 /* We mark the USE with QImode so that we recognize
4118 it as one that can be safely deleted at the end
4119 of reload. */
4120 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4121 insn), QImode);
4122 if (modified[i] != RELOAD_READ)
4123 emit_insn_after (gen_clobber (operand), insn);
4127 else if (goal_alternative_matches[i] >= 0
4128 && goal_alternative_win[goal_alternative_matches[i]]
4129 && modified[i] == RELOAD_READ
4130 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4131 && ! no_input_reloads && ! no_output_reloads
4132 && optimize)
4134 /* Similarly, make an optional reload for a pair of matching
4135 objects that are in MEM or a pseudo that didn't get a hard reg. */
4137 rtx operand = recog_data.operand[i];
4139 while (GET_CODE (operand) == SUBREG)
4140 operand = SUBREG_REG (operand);
4141 if ((MEM_P (operand)
4142 || (REG_P (operand)
4143 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4144 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4145 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4146 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4147 recog_data.operand[i],
4148 recog_data.operand_loc[goal_alternative_matches[i]],
4149 recog_data.operand_loc[i],
4150 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4151 operand_mode[goal_alternative_matches[i]],
4152 operand_mode[i],
4153 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4156 /* Perform whatever substitutions on the operands we are supposed
4157 to make due to commutativity or replacement of registers
4158 with equivalent constants or memory slots. */
4160 for (i = 0; i < noperands; i++)
4162 /* We only do this on the last pass through reload, because it is
4163 possible for some data (like reg_equiv_address) to be changed during
4164 later passes. Moreover, we lose the opportunity to get a useful
4165 reload_{in,out}_reg when we do these replacements. */
4167 if (replace)
4169 rtx substitution = substed_operand[i];
4171 *recog_data.operand_loc[i] = substitution;
4173 /* If we're replacing an operand with a LABEL_REF, we need to
4174 make sure that there's a REG_LABEL_OPERAND note attached to
4175 this instruction. */
4176 if (GET_CODE (substitution) == LABEL_REF
4177 && !find_reg_note (insn, REG_LABEL_OPERAND,
4178 label_ref_label (substitution))
4179 /* For a JUMP_P, if it was a branch target it must have
4180 already been recorded as such. */
4181 && (!JUMP_P (insn)
4182 || !label_is_jump_target_p (label_ref_label (substitution),
4183 insn)))
4185 add_reg_note (insn, REG_LABEL_OPERAND,
4186 label_ref_label (substitution));
4187 if (LABEL_P (label_ref_label (substitution)))
4188 ++LABEL_NUSES (label_ref_label (substitution));
4192 else
4193 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4196 /* If this insn pattern contains any MATCH_DUP's, make sure that
4197 they will be substituted if the operands they match are substituted.
4198 Also do now any substitutions we already did on the operands.
4200 Don't do this if we aren't making replacements because we might be
4201 propagating things allocated by frame pointer elimination into places
4202 it doesn't expect. */
4204 if (insn_code_number >= 0 && replace)
4205 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4207 int opno = recog_data.dup_num[i];
4208 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4209 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4212 #if 0
4213 /* This loses because reloading of prior insns can invalidate the equivalence
4214 (or at least find_equiv_reg isn't smart enough to find it any more),
4215 causing this insn to need more reload regs than it needed before.
4216 It may be too late to make the reload regs available.
4217 Now this optimization is done safely in choose_reload_regs. */
4219 /* For each reload of a reg into some other class of reg,
4220 search for an existing equivalent reg (same value now) in the right class.
4221 We can use it as long as we don't need to change its contents. */
4222 for (i = 0; i < n_reloads; i++)
4223 if (rld[i].reg_rtx == 0
4224 && rld[i].in != 0
4225 && REG_P (rld[i].in)
4226 && rld[i].out == 0)
4228 rld[i].reg_rtx
4229 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4230 static_reload_reg_p, 0, rld[i].inmode);
4231 /* Prevent generation of insn to load the value
4232 because the one we found already has the value. */
4233 if (rld[i].reg_rtx)
4234 rld[i].in = rld[i].reg_rtx;
4236 #endif
4238 /* If we detected error and replaced asm instruction by USE, forget about the
4239 reloads. */
4240 if (GET_CODE (PATTERN (insn)) == USE
4241 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4242 n_reloads = 0;
4244 /* Perhaps an output reload can be combined with another
4245 to reduce needs by one. */
4246 if (!goal_earlyclobber)
4247 combine_reloads ();
4249 /* If we have a pair of reloads for parts of an address, they are reloading
4250 the same object, the operands themselves were not reloaded, and they
4251 are for two operands that are supposed to match, merge the reloads and
4252 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4254 for (i = 0; i < n_reloads; i++)
4256 int k;
4258 for (j = i + 1; j < n_reloads; j++)
4259 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4260 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4261 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4262 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4263 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4264 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4265 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4266 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4267 && rtx_equal_p (rld[i].in, rld[j].in)
4268 && (operand_reloadnum[rld[i].opnum] < 0
4269 || rld[operand_reloadnum[rld[i].opnum]].optional)
4270 && (operand_reloadnum[rld[j].opnum] < 0
4271 || rld[operand_reloadnum[rld[j].opnum]].optional)
4272 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4273 || (goal_alternative_matches[rld[j].opnum]
4274 == rld[i].opnum)))
4276 for (k = 0; k < n_replacements; k++)
4277 if (replacements[k].what == j)
4278 replacements[k].what = i;
4280 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4281 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4282 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4283 else
4284 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4285 rld[j].in = 0;
4289 /* Scan all the reloads and update their type.
4290 If a reload is for the address of an operand and we didn't reload
4291 that operand, change the type. Similarly, change the operand number
4292 of a reload when two operands match. If a reload is optional, treat it
4293 as though the operand isn't reloaded.
4295 ??? This latter case is somewhat odd because if we do the optional
4296 reload, it means the object is hanging around. Thus we need only
4297 do the address reload if the optional reload was NOT done.
4299 Change secondary reloads to be the address type of their operand, not
4300 the normal type.
4302 If an operand's reload is now RELOAD_OTHER, change any
4303 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4304 RELOAD_FOR_OTHER_ADDRESS. */
4306 for (i = 0; i < n_reloads; i++)
4308 if (rld[i].secondary_p
4309 && rld[i].when_needed == operand_type[rld[i].opnum])
4310 rld[i].when_needed = address_type[rld[i].opnum];
4312 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4313 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4314 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4315 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4316 && (operand_reloadnum[rld[i].opnum] < 0
4317 || rld[operand_reloadnum[rld[i].opnum]].optional))
4319 /* If we have a secondary reload to go along with this reload,
4320 change its type to RELOAD_FOR_OPADDR_ADDR. */
4322 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4323 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4324 && rld[i].secondary_in_reload != -1)
4326 int secondary_in_reload = rld[i].secondary_in_reload;
4328 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4330 /* If there's a tertiary reload we have to change it also. */
4331 if (secondary_in_reload > 0
4332 && rld[secondary_in_reload].secondary_in_reload != -1)
4333 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4334 = RELOAD_FOR_OPADDR_ADDR;
4337 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4338 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4339 && rld[i].secondary_out_reload != -1)
4341 int secondary_out_reload = rld[i].secondary_out_reload;
4343 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4345 /* If there's a tertiary reload we have to change it also. */
4346 if (secondary_out_reload
4347 && rld[secondary_out_reload].secondary_out_reload != -1)
4348 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4349 = RELOAD_FOR_OPADDR_ADDR;
4352 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4353 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4354 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4355 else
4356 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4359 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4360 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4361 && operand_reloadnum[rld[i].opnum] >= 0
4362 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4363 == RELOAD_OTHER))
4364 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4366 if (goal_alternative_matches[rld[i].opnum] >= 0)
4367 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4370 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4371 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4372 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4374 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4375 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4376 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4377 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4378 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4379 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4380 This is complicated by the fact that a single operand can have more
4381 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4382 choose_reload_regs without affecting code quality, and cases that
4383 actually fail are extremely rare, so it turns out to be better to fix
4384 the problem here by not generating cases that choose_reload_regs will
4385 fail for. */
4386 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4387 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4388 a single operand.
4389 We can reduce the register pressure by exploiting that a
4390 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4391 does not conflict with any of them, if it is only used for the first of
4392 the RELOAD_FOR_X_ADDRESS reloads. */
4394 int first_op_addr_num = -2;
4395 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4396 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4397 int need_change = 0;
4398 /* We use last_op_addr_reload and the contents of the above arrays
4399 first as flags - -2 means no instance encountered, -1 means exactly
4400 one instance encountered.
4401 If more than one instance has been encountered, we store the reload
4402 number of the first reload of the kind in question; reload numbers
4403 are known to be non-negative. */
4404 for (i = 0; i < noperands; i++)
4405 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4406 for (i = n_reloads - 1; i >= 0; i--)
4408 switch (rld[i].when_needed)
4410 case RELOAD_FOR_OPERAND_ADDRESS:
4411 if (++first_op_addr_num >= 0)
4413 first_op_addr_num = i;
4414 need_change = 1;
4416 break;
4417 case RELOAD_FOR_INPUT_ADDRESS:
4418 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4420 first_inpaddr_num[rld[i].opnum] = i;
4421 need_change = 1;
4423 break;
4424 case RELOAD_FOR_OUTPUT_ADDRESS:
4425 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4427 first_outpaddr_num[rld[i].opnum] = i;
4428 need_change = 1;
4430 break;
4431 default:
4432 break;
4436 if (need_change)
4438 for (i = 0; i < n_reloads; i++)
4440 int first_num;
4441 enum reload_type type;
4443 switch (rld[i].when_needed)
4445 case RELOAD_FOR_OPADDR_ADDR:
4446 first_num = first_op_addr_num;
4447 type = RELOAD_FOR_OPERAND_ADDRESS;
4448 break;
4449 case RELOAD_FOR_INPADDR_ADDRESS:
4450 first_num = first_inpaddr_num[rld[i].opnum];
4451 type = RELOAD_FOR_INPUT_ADDRESS;
4452 break;
4453 case RELOAD_FOR_OUTADDR_ADDRESS:
4454 first_num = first_outpaddr_num[rld[i].opnum];
4455 type = RELOAD_FOR_OUTPUT_ADDRESS;
4456 break;
4457 default:
4458 continue;
4460 if (first_num < 0)
4461 continue;
4462 else if (i > first_num)
4463 rld[i].when_needed = type;
4464 else
4466 /* Check if the only TYPE reload that uses reload I is
4467 reload FIRST_NUM. */
4468 for (j = n_reloads - 1; j > first_num; j--)
4470 if (rld[j].when_needed == type
4471 && (rld[i].secondary_p
4472 ? rld[j].secondary_in_reload == i
4473 : reg_mentioned_p (rld[i].in, rld[j].in)))
4475 rld[i].when_needed = type;
4476 break;
4484 /* See if we have any reloads that are now allowed to be merged
4485 because we've changed when the reload is needed to
4486 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4487 check for the most common cases. */
4489 for (i = 0; i < n_reloads; i++)
4490 if (rld[i].in != 0 && rld[i].out == 0
4491 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4492 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4493 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4494 for (j = 0; j < n_reloads; j++)
4495 if (i != j && rld[j].in != 0 && rld[j].out == 0
4496 && rld[j].when_needed == rld[i].when_needed
4497 && MATCHES (rld[i].in, rld[j].in)
4498 && rld[i].rclass == rld[j].rclass
4499 && !rld[i].nocombine && !rld[j].nocombine
4500 && rld[i].reg_rtx == rld[j].reg_rtx)
4502 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4503 transfer_replacements (i, j);
4504 rld[j].in = 0;
4507 /* If we made any reloads for addresses, see if they violate a
4508 "no input reloads" requirement for this insn. But loads that we
4509 do after the insn (such as for output addresses) are fine. */
4510 if (HAVE_cc0 && no_input_reloads)
4511 for (i = 0; i < n_reloads; i++)
4512 gcc_assert (rld[i].in == 0
4513 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS
4514 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS);
4516 /* Compute reload_mode and reload_nregs. */
4517 for (i = 0; i < n_reloads; i++)
4519 rld[i].mode = rld[i].inmode;
4520 if (rld[i].mode == VOIDmode
4521 || partial_subreg_p (rld[i].mode, rld[i].outmode))
4522 rld[i].mode = rld[i].outmode;
4524 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4527 /* Special case a simple move with an input reload and a
4528 destination of a hard reg, if the hard reg is ok, use it. */
4529 for (i = 0; i < n_reloads; i++)
4530 if (rld[i].when_needed == RELOAD_FOR_INPUT
4531 && GET_CODE (PATTERN (insn)) == SET
4532 && REG_P (SET_DEST (PATTERN (insn)))
4533 && (SET_SRC (PATTERN (insn)) == rld[i].in
4534 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4535 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4537 rtx dest = SET_DEST (PATTERN (insn));
4538 unsigned int regno = REGNO (dest);
4540 if (regno < FIRST_PSEUDO_REGISTER
4541 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4542 && targetm.hard_regno_mode_ok (regno, rld[i].mode))
4544 int nr = hard_regno_nregs (regno, rld[i].mode);
4545 int ok = 1, nri;
4547 for (nri = 1; nri < nr; nri ++)
4548 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4550 ok = 0;
4551 break;
4554 if (ok)
4555 rld[i].reg_rtx = dest;
4559 return retval;
4562 /* Return true if alternative number ALTNUM in constraint-string
4563 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4564 MEM gives the reference if its address hasn't been fully reloaded,
4565 otherwise it is NULL. */
4567 static bool
4568 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4569 const char *constraint, int altnum)
4571 int c;
4573 /* Skip alternatives before the one requested. */
4574 while (altnum > 0)
4576 while (*constraint++ != ',')
4578 altnum--;
4580 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4581 If one of them is present, this alternative accepts the result of
4582 passing a constant-pool reference through find_reloads_toplev.
4584 The same is true of extra memory constraints if the address
4585 was reloaded into a register. However, the target may elect
4586 to disallow the original constant address, forcing it to be
4587 reloaded into a register instead. */
4588 for (; (c = *constraint) && c != ',' && c != '#';
4589 constraint += CONSTRAINT_LEN (c, constraint))
4591 enum constraint_num cn = lookup_constraint (constraint);
4592 if (insn_extra_memory_constraint (cn)
4593 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4594 return true;
4596 return false;
4599 /* Scan X for memory references and scan the addresses for reloading.
4600 Also checks for references to "constant" regs that we want to eliminate
4601 and replaces them with the values they stand for.
4602 We may alter X destructively if it contains a reference to such.
4603 If X is just a constant reg, we return the equivalent value
4604 instead of X.
4606 IND_LEVELS says how many levels of indirect addressing this machine
4607 supports.
4609 OPNUM and TYPE identify the purpose of the reload.
4611 IS_SET_DEST is true if X is the destination of a SET, which is not
4612 appropriate to be replaced by a constant.
4614 INSN, if nonzero, is the insn in which we do the reload. It is used
4615 to determine if we may generate output reloads, and where to put USEs
4616 for pseudos that we have to replace with stack slots.
4618 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4619 result of find_reloads_address. */
4621 static rtx
4622 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4623 int ind_levels, int is_set_dest, rtx_insn *insn,
4624 int *address_reloaded)
4626 RTX_CODE code = GET_CODE (x);
4628 const char *fmt = GET_RTX_FORMAT (code);
4629 int i;
4630 int copied;
4632 if (code == REG)
4634 /* This code is duplicated for speed in find_reloads. */
4635 int regno = REGNO (x);
4636 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4637 x = reg_equiv_constant (regno);
4638 #if 0
4639 /* This creates (subreg (mem...)) which would cause an unnecessary
4640 reload of the mem. */
4641 else if (reg_equiv_mem (regno) != 0)
4642 x = reg_equiv_mem (regno);
4643 #endif
4644 else if (reg_equiv_memory_loc (regno)
4645 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4647 rtx mem = make_memloc (x, regno);
4648 if (reg_equiv_address (regno)
4649 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4651 /* If this is not a toplevel operand, find_reloads doesn't see
4652 this substitution. We have to emit a USE of the pseudo so
4653 that delete_output_reload can see it. */
4654 if (replace_reloads && recog_data.operand[opnum] != x)
4655 /* We mark the USE with QImode so that we recognize it
4656 as one that can be safely deleted at the end of
4657 reload. */
4658 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4659 QImode);
4660 x = mem;
4661 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4662 opnum, type, ind_levels, insn);
4663 if (!rtx_equal_p (x, mem))
4664 push_reg_equiv_alt_mem (regno, x);
4665 if (address_reloaded)
4666 *address_reloaded = i;
4669 return x;
4671 if (code == MEM)
4673 rtx tem = x;
4675 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4676 opnum, type, ind_levels, insn);
4677 if (address_reloaded)
4678 *address_reloaded = i;
4680 return tem;
4683 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4685 /* Check for SUBREG containing a REG that's equivalent to a
4686 constant. If the constant has a known value, truncate it
4687 right now. Similarly if we are extracting a single-word of a
4688 multi-word constant. If the constant is symbolic, allow it
4689 to be substituted normally. push_reload will strip the
4690 subreg later. The constant must not be VOIDmode, because we
4691 will lose the mode of the register (this should never happen
4692 because one of the cases above should handle it). */
4694 int regno = REGNO (SUBREG_REG (x));
4695 rtx tem;
4697 if (regno >= FIRST_PSEUDO_REGISTER
4698 && reg_renumber[regno] < 0
4699 && reg_equiv_constant (regno) != 0)
4701 tem =
4702 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4703 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4704 gcc_assert (tem);
4705 if (CONSTANT_P (tem)
4706 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4708 tem = force_const_mem (GET_MODE (x), tem);
4709 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4710 &XEXP (tem, 0), opnum, type,
4711 ind_levels, insn);
4712 if (address_reloaded)
4713 *address_reloaded = i;
4715 return tem;
4718 /* If the subreg contains a reg that will be converted to a mem,
4719 attempt to convert the whole subreg to a (narrower or wider)
4720 memory reference instead. If this succeeds, we're done --
4721 otherwise fall through to check whether the inner reg still
4722 needs address reloads anyway. */
4724 if (regno >= FIRST_PSEUDO_REGISTER
4725 && reg_equiv_memory_loc (regno) != 0)
4727 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4728 insn, address_reloaded);
4729 if (tem)
4730 return tem;
4734 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4736 if (fmt[i] == 'e')
4738 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4739 ind_levels, is_set_dest, insn,
4740 address_reloaded);
4741 /* If we have replaced a reg with it's equivalent memory loc -
4742 that can still be handled here e.g. if it's in a paradoxical
4743 subreg - we must make the change in a copy, rather than using
4744 a destructive change. This way, find_reloads can still elect
4745 not to do the change. */
4746 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4748 x = shallow_copy_rtx (x);
4749 copied = 1;
4751 XEXP (x, i) = new_part;
4754 return x;
4757 /* Return a mem ref for the memory equivalent of reg REGNO.
4758 This mem ref is not shared with anything. */
4760 static rtx
4761 make_memloc (rtx ad, int regno)
4763 /* We must rerun eliminate_regs, in case the elimination
4764 offsets have changed. */
4765 rtx tem
4766 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4769 /* If TEM might contain a pseudo, we must copy it to avoid
4770 modifying it when we do the substitution for the reload. */
4771 if (rtx_varies_p (tem, 0))
4772 tem = copy_rtx (tem);
4774 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4775 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4777 /* Copy the result if it's still the same as the equivalence, to avoid
4778 modifying it when we do the substitution for the reload. */
4779 if (tem == reg_equiv_memory_loc (regno))
4780 tem = copy_rtx (tem);
4781 return tem;
4784 /* Returns true if AD could be turned into a valid memory reference
4785 to mode MODE in address space AS by reloading the part pointed to
4786 by PART into a register. */
4788 static int
4789 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4790 addr_space_t as, rtx *part)
4792 int retv;
4793 rtx tem = *part;
4794 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4796 *part = reg;
4797 retv = memory_address_addr_space_p (mode, ad, as);
4798 *part = tem;
4800 return retv;
4803 /* Record all reloads needed for handling memory address AD
4804 which appears in *LOC in a memory reference to mode MODE
4805 which itself is found in location *MEMREFLOC.
4806 Note that we take shortcuts assuming that no multi-reg machine mode
4807 occurs as part of an address.
4809 OPNUM and TYPE specify the purpose of this reload.
4811 IND_LEVELS says how many levels of indirect addressing this machine
4812 supports.
4814 INSN, if nonzero, is the insn in which we do the reload. It is used
4815 to determine if we may generate output reloads, and where to put USEs
4816 for pseudos that we have to replace with stack slots.
4818 Value is one if this address is reloaded or replaced as a whole; it is
4819 zero if the top level of this address was not reloaded or replaced, and
4820 it is -1 if it may or may not have been reloaded or replaced.
4822 Note that there is no verification that the address will be valid after
4823 this routine does its work. Instead, we rely on the fact that the address
4824 was valid when reload started. So we need only undo things that reload
4825 could have broken. These are wrong register types, pseudos not allocated
4826 to a hard register, and frame pointer elimination. */
4828 static int
4829 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4830 rtx *loc, int opnum, enum reload_type type,
4831 int ind_levels, rtx_insn *insn)
4833 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4834 : ADDR_SPACE_GENERIC;
4835 int regno;
4836 int removed_and = 0;
4837 int op_index;
4838 rtx tem;
4840 /* If the address is a register, see if it is a legitimate address and
4841 reload if not. We first handle the cases where we need not reload
4842 or where we must reload in a non-standard way. */
4844 if (REG_P (ad))
4846 regno = REGNO (ad);
4848 if (reg_equiv_constant (regno) != 0)
4850 find_reloads_address_part (reg_equiv_constant (regno), loc,
4851 base_reg_class (mode, as, MEM, SCRATCH),
4852 GET_MODE (ad), opnum, type, ind_levels);
4853 return 1;
4856 tem = reg_equiv_memory_loc (regno);
4857 if (tem != 0)
4859 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4861 tem = make_memloc (ad, regno);
4862 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4863 XEXP (tem, 0),
4864 MEM_ADDR_SPACE (tem)))
4866 rtx orig = tem;
4868 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4869 &XEXP (tem, 0), opnum,
4870 ADDR_TYPE (type), ind_levels, insn);
4871 if (!rtx_equal_p (tem, orig))
4872 push_reg_equiv_alt_mem (regno, tem);
4874 /* We can avoid a reload if the register's equivalent memory
4875 expression is valid as an indirect memory address.
4876 But not all addresses are valid in a mem used as an indirect
4877 address: only reg or reg+constant. */
4879 if (ind_levels > 0
4880 && strict_memory_address_addr_space_p (mode, tem, as)
4881 && (REG_P (XEXP (tem, 0))
4882 || (GET_CODE (XEXP (tem, 0)) == PLUS
4883 && REG_P (XEXP (XEXP (tem, 0), 0))
4884 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4886 /* TEM is not the same as what we'll be replacing the
4887 pseudo with after reload, put a USE in front of INSN
4888 in the final reload pass. */
4889 if (replace_reloads
4890 && num_not_at_initial_offset
4891 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4893 *loc = tem;
4894 /* We mark the USE with QImode so that we
4895 recognize it as one that can be safely
4896 deleted at the end of reload. */
4897 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4898 insn), QImode);
4900 /* This doesn't really count as replacing the address
4901 as a whole, since it is still a memory access. */
4903 return 0;
4905 ad = tem;
4909 /* The only remaining case where we can avoid a reload is if this is a
4910 hard register that is valid as a base register and which is not the
4911 subject of a CLOBBER in this insn. */
4913 else if (regno < FIRST_PSEUDO_REGISTER
4914 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4915 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4916 return 0;
4918 /* If we do not have one of the cases above, we must do the reload. */
4919 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4920 base_reg_class (mode, as, MEM, SCRATCH),
4921 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4922 return 1;
4925 if (strict_memory_address_addr_space_p (mode, ad, as))
4927 /* The address appears valid, so reloads are not needed.
4928 But the address may contain an eliminable register.
4929 This can happen because a machine with indirect addressing
4930 may consider a pseudo register by itself a valid address even when
4931 it has failed to get a hard reg.
4932 So do a tree-walk to find and eliminate all such regs. */
4934 /* But first quickly dispose of a common case. */
4935 if (GET_CODE (ad) == PLUS
4936 && CONST_INT_P (XEXP (ad, 1))
4937 && REG_P (XEXP (ad, 0))
4938 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4939 return 0;
4941 subst_reg_equivs_changed = 0;
4942 *loc = subst_reg_equivs (ad, insn);
4944 if (! subst_reg_equivs_changed)
4945 return 0;
4947 /* Check result for validity after substitution. */
4948 if (strict_memory_address_addr_space_p (mode, ad, as))
4949 return 0;
4952 #ifdef LEGITIMIZE_RELOAD_ADDRESS
4955 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
4957 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
4958 ind_levels, win);
4960 break;
4961 win:
4962 *memrefloc = copy_rtx (*memrefloc);
4963 XEXP (*memrefloc, 0) = ad;
4964 move_replacements (&ad, &XEXP (*memrefloc, 0));
4965 return -1;
4967 while (0);
4968 #endif
4970 /* The address is not valid. We have to figure out why. First see if
4971 we have an outer AND and remove it if so. Then analyze what's inside. */
4973 if (GET_CODE (ad) == AND)
4975 removed_and = 1;
4976 loc = &XEXP (ad, 0);
4977 ad = *loc;
4980 /* One possibility for why the address is invalid is that it is itself
4981 a MEM. This can happen when the frame pointer is being eliminated, a
4982 pseudo is not allocated to a hard register, and the offset between the
4983 frame and stack pointers is not its initial value. In that case the
4984 pseudo will have been replaced by a MEM referring to the
4985 stack pointer. */
4986 if (MEM_P (ad))
4988 /* First ensure that the address in this MEM is valid. Then, unless
4989 indirect addresses are valid, reload the MEM into a register. */
4990 tem = ad;
4991 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
4992 opnum, ADDR_TYPE (type),
4993 ind_levels == 0 ? 0 : ind_levels - 1, insn);
4995 /* If tem was changed, then we must create a new memory reference to
4996 hold it and store it back into memrefloc. */
4997 if (tem != ad && memrefloc)
4999 *memrefloc = copy_rtx (*memrefloc);
5000 copy_replacements (tem, XEXP (*memrefloc, 0));
5001 loc = &XEXP (*memrefloc, 0);
5002 if (removed_and)
5003 loc = &XEXP (*loc, 0);
5006 /* Check similar cases as for indirect addresses as above except
5007 that we can allow pseudos and a MEM since they should have been
5008 taken care of above. */
5010 if (ind_levels == 0
5011 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5012 || MEM_P (XEXP (tem, 0))
5013 || ! (REG_P (XEXP (tem, 0))
5014 || (GET_CODE (XEXP (tem, 0)) == PLUS
5015 && REG_P (XEXP (XEXP (tem, 0), 0))
5016 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5018 /* Must use TEM here, not AD, since it is the one that will
5019 have any subexpressions reloaded, if needed. */
5020 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5021 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5022 VOIDmode, 0,
5023 0, opnum, type);
5024 return ! removed_and;
5026 else
5027 return 0;
5030 /* If we have address of a stack slot but it's not valid because the
5031 displacement is too large, compute the sum in a register.
5032 Handle all base registers here, not just fp/ap/sp, because on some
5033 targets (namely SH) we can also get too large displacements from
5034 big-endian corrections. */
5035 else if (GET_CODE (ad) == PLUS
5036 && REG_P (XEXP (ad, 0))
5037 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5038 && CONST_INT_P (XEXP (ad, 1))
5039 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5040 CONST_INT)
5041 /* Similarly, if we were to reload the base register and the
5042 mem+offset address is still invalid, then we want to reload
5043 the whole address, not just the base register. */
5044 || ! maybe_memory_address_addr_space_p
5045 (mode, ad, as, &(XEXP (ad, 0)))))
5048 /* Unshare the MEM rtx so we can safely alter it. */
5049 if (memrefloc)
5051 *memrefloc = copy_rtx (*memrefloc);
5052 loc = &XEXP (*memrefloc, 0);
5053 if (removed_and)
5054 loc = &XEXP (*loc, 0);
5057 if (double_reg_address_ok[mode]
5058 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5059 PLUS, CONST_INT))
5061 /* Unshare the sum as well. */
5062 *loc = ad = copy_rtx (ad);
5064 /* Reload the displacement into an index reg.
5065 We assume the frame pointer or arg pointer is a base reg. */
5066 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5067 INDEX_REG_CLASS, GET_MODE (ad), opnum,
5068 type, ind_levels);
5069 return 0;
5071 else
5073 /* If the sum of two regs is not necessarily valid,
5074 reload the sum into a base reg.
5075 That will at least work. */
5076 find_reloads_address_part (ad, loc,
5077 base_reg_class (mode, as, MEM, SCRATCH),
5078 GET_MODE (ad), opnum, type, ind_levels);
5080 return ! removed_and;
5083 /* If we have an indexed stack slot, there are three possible reasons why
5084 it might be invalid: The index might need to be reloaded, the address
5085 might have been made by frame pointer elimination and hence have a
5086 constant out of range, or both reasons might apply.
5088 We can easily check for an index needing reload, but even if that is the
5089 case, we might also have an invalid constant. To avoid making the
5090 conservative assumption and requiring two reloads, we see if this address
5091 is valid when not interpreted strictly. If it is, the only problem is
5092 that the index needs a reload and find_reloads_address_1 will take care
5093 of it.
5095 Handle all base registers here, not just fp/ap/sp, because on some
5096 targets (namely SPARC) we can also get invalid addresses from preventive
5097 subreg big-endian corrections made by find_reloads_toplev. We
5098 can also get expressions involving LO_SUM (rather than PLUS) from
5099 find_reloads_subreg_address.
5101 If we decide to do something, it must be that `double_reg_address_ok'
5102 is true. We generate a reload of the base register + constant and
5103 rework the sum so that the reload register will be added to the index.
5104 This is safe because we know the address isn't shared.
5106 We check for the base register as both the first and second operand of
5107 the innermost PLUS and/or LO_SUM. */
5109 for (op_index = 0; op_index < 2; ++op_index)
5111 rtx operand, addend;
5112 enum rtx_code inner_code;
5114 if (GET_CODE (ad) != PLUS)
5115 continue;
5117 inner_code = GET_CODE (XEXP (ad, 0));
5118 if (!(GET_CODE (ad) == PLUS
5119 && CONST_INT_P (XEXP (ad, 1))
5120 && (inner_code == PLUS || inner_code == LO_SUM)))
5121 continue;
5123 operand = XEXP (XEXP (ad, 0), op_index);
5124 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5125 continue;
5127 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5129 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5130 GET_CODE (addend))
5131 || operand == frame_pointer_rtx
5132 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5133 && operand == hard_frame_pointer_rtx)
5134 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5135 && operand == arg_pointer_rtx)
5136 || operand == stack_pointer_rtx)
5137 && ! maybe_memory_address_addr_space_p
5138 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5140 rtx offset_reg;
5141 enum reg_class cls;
5143 offset_reg = plus_constant (GET_MODE (ad), operand,
5144 INTVAL (XEXP (ad, 1)));
5146 /* Form the adjusted address. */
5147 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5148 ad = gen_rtx_PLUS (GET_MODE (ad),
5149 op_index == 0 ? offset_reg : addend,
5150 op_index == 0 ? addend : offset_reg);
5151 else
5152 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5153 op_index == 0 ? offset_reg : addend,
5154 op_index == 0 ? addend : offset_reg);
5155 *loc = ad;
5157 cls = base_reg_class (mode, as, MEM, GET_CODE (addend));
5158 find_reloads_address_part (XEXP (ad, op_index),
5159 &XEXP (ad, op_index), cls,
5160 GET_MODE (ad), opnum, type, ind_levels);
5161 find_reloads_address_1 (mode, as,
5162 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5163 GET_CODE (XEXP (ad, op_index)),
5164 &XEXP (ad, 1 - op_index), opnum,
5165 type, 0, insn);
5167 return 0;
5171 /* See if address becomes valid when an eliminable register
5172 in a sum is replaced. */
5174 tem = ad;
5175 if (GET_CODE (ad) == PLUS)
5176 tem = subst_indexed_address (ad);
5177 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5179 /* Ok, we win that way. Replace any additional eliminable
5180 registers. */
5182 subst_reg_equivs_changed = 0;
5183 tem = subst_reg_equivs (tem, insn);
5185 /* Make sure that didn't make the address invalid again. */
5187 if (! subst_reg_equivs_changed
5188 || strict_memory_address_addr_space_p (mode, tem, as))
5190 *loc = tem;
5191 return 0;
5195 /* If constants aren't valid addresses, reload the constant address
5196 into a register. */
5197 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5199 machine_mode address_mode = GET_MODE (ad);
5200 if (address_mode == VOIDmode)
5201 address_mode = targetm.addr_space.address_mode (as);
5203 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5204 Unshare it so we can safely alter it. */
5205 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5206 && CONSTANT_POOL_ADDRESS_P (ad))
5208 *memrefloc = copy_rtx (*memrefloc);
5209 loc = &XEXP (*memrefloc, 0);
5210 if (removed_and)
5211 loc = &XEXP (*loc, 0);
5214 find_reloads_address_part (ad, loc,
5215 base_reg_class (mode, as, MEM, SCRATCH),
5216 address_mode, opnum, type, ind_levels);
5217 return ! removed_and;
5220 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5221 opnum, type, ind_levels, insn);
5224 /* Find all pseudo regs appearing in AD
5225 that are eliminable in favor of equivalent values
5226 and do not have hard regs; replace them by their equivalents.
5227 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5228 front of it for pseudos that we have to replace with stack slots. */
5230 static rtx
5231 subst_reg_equivs (rtx ad, rtx_insn *insn)
5233 RTX_CODE code = GET_CODE (ad);
5234 int i;
5235 const char *fmt;
5237 switch (code)
5239 case HIGH:
5240 case CONST:
5241 CASE_CONST_ANY:
5242 case SYMBOL_REF:
5243 case LABEL_REF:
5244 case PC:
5245 case CC0:
5246 return ad;
5248 case REG:
5250 int regno = REGNO (ad);
5252 if (reg_equiv_constant (regno) != 0)
5254 subst_reg_equivs_changed = 1;
5255 return reg_equiv_constant (regno);
5257 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5259 rtx mem = make_memloc (ad, regno);
5260 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5262 subst_reg_equivs_changed = 1;
5263 /* We mark the USE with QImode so that we recognize it
5264 as one that can be safely deleted at the end of
5265 reload. */
5266 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5267 QImode);
5268 return mem;
5272 return ad;
5274 case PLUS:
5275 /* Quickly dispose of a common case. */
5276 if (XEXP (ad, 0) == frame_pointer_rtx
5277 && CONST_INT_P (XEXP (ad, 1)))
5278 return ad;
5279 break;
5281 default:
5282 break;
5285 fmt = GET_RTX_FORMAT (code);
5286 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5287 if (fmt[i] == 'e')
5288 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5289 return ad;
5292 /* Compute the sum of X and Y, making canonicalizations assumed in an
5293 address, namely: sum constant integers, surround the sum of two
5294 constants with a CONST, put the constant as the second operand, and
5295 group the constant on the outermost sum.
5297 This routine assumes both inputs are already in canonical form. */
5300 form_sum (machine_mode mode, rtx x, rtx y)
5302 rtx tem;
5304 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5305 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5307 if (CONST_INT_P (x))
5308 return plus_constant (mode, y, INTVAL (x));
5309 else if (CONST_INT_P (y))
5310 return plus_constant (mode, x, INTVAL (y));
5311 else if (CONSTANT_P (x))
5312 tem = x, x = y, y = tem;
5314 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5315 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5317 /* Note that if the operands of Y are specified in the opposite
5318 order in the recursive calls below, infinite recursion will occur. */
5319 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5320 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5322 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5323 constant will have been placed second. */
5324 if (CONSTANT_P (x) && CONSTANT_P (y))
5326 if (GET_CODE (x) == CONST)
5327 x = XEXP (x, 0);
5328 if (GET_CODE (y) == CONST)
5329 y = XEXP (y, 0);
5331 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5334 return gen_rtx_PLUS (mode, x, y);
5337 /* If ADDR is a sum containing a pseudo register that should be
5338 replaced with a constant (from reg_equiv_constant),
5339 return the result of doing so, and also apply the associative
5340 law so that the result is more likely to be a valid address.
5341 (But it is not guaranteed to be one.)
5343 Note that at most one register is replaced, even if more are
5344 replaceable. Also, we try to put the result into a canonical form
5345 so it is more likely to be a valid address.
5347 In all other cases, return ADDR. */
5349 static rtx
5350 subst_indexed_address (rtx addr)
5352 rtx op0 = 0, op1 = 0, op2 = 0;
5353 rtx tem;
5354 int regno;
5356 if (GET_CODE (addr) == PLUS)
5358 /* Try to find a register to replace. */
5359 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5360 if (REG_P (op0)
5361 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5362 && reg_renumber[regno] < 0
5363 && reg_equiv_constant (regno) != 0)
5364 op0 = reg_equiv_constant (regno);
5365 else if (REG_P (op1)
5366 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5367 && reg_renumber[regno] < 0
5368 && reg_equiv_constant (regno) != 0)
5369 op1 = reg_equiv_constant (regno);
5370 else if (GET_CODE (op0) == PLUS
5371 && (tem = subst_indexed_address (op0)) != op0)
5372 op0 = tem;
5373 else if (GET_CODE (op1) == PLUS
5374 && (tem = subst_indexed_address (op1)) != op1)
5375 op1 = tem;
5376 else
5377 return addr;
5379 /* Pick out up to three things to add. */
5380 if (GET_CODE (op1) == PLUS)
5381 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5382 else if (GET_CODE (op0) == PLUS)
5383 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5385 /* Compute the sum. */
5386 if (op2 != 0)
5387 op1 = form_sum (GET_MODE (addr), op1, op2);
5388 if (op1 != 0)
5389 op0 = form_sum (GET_MODE (addr), op0, op1);
5391 return op0;
5393 return addr;
5396 /* Update the REG_INC notes for an insn. It updates all REG_INC
5397 notes for the instruction which refer to REGNO the to refer
5398 to the reload number.
5400 INSN is the insn for which any REG_INC notes need updating.
5402 REGNO is the register number which has been reloaded.
5404 RELOADNUM is the reload number. */
5406 static void
5407 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5408 int reloadnum ATTRIBUTE_UNUSED)
5410 if (!AUTO_INC_DEC)
5411 return;
5413 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5414 if (REG_NOTE_KIND (link) == REG_INC
5415 && (int) REGNO (XEXP (link, 0)) == regno)
5416 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5419 /* Record the pseudo registers we must reload into hard registers in a
5420 subexpression of a would-be memory address, X referring to a value
5421 in mode MODE. (This function is not called if the address we find
5422 is strictly valid.)
5424 CONTEXT = 1 means we are considering regs as index regs,
5425 = 0 means we are considering them as base regs.
5426 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5427 or an autoinc code.
5428 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5429 is the code of the index part of the address. Otherwise, pass SCRATCH
5430 for this argument.
5431 OPNUM and TYPE specify the purpose of any reloads made.
5433 IND_LEVELS says how many levels of indirect addressing are
5434 supported at this point in the address.
5436 INSN, if nonzero, is the insn in which we do the reload. It is used
5437 to determine if we may generate output reloads.
5439 We return nonzero if X, as a whole, is reloaded or replaced. */
5441 /* Note that we take shortcuts assuming that no multi-reg machine mode
5442 occurs as part of an address.
5443 Also, this is not fully machine-customizable; it works for machines
5444 such as VAXen and 68000's and 32000's, but other possible machines
5445 could have addressing modes that this does not handle right.
5446 If you add push_reload calls here, you need to make sure gen_reload
5447 handles those cases gracefully. */
5449 static int
5450 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5451 rtx x, int context,
5452 enum rtx_code outer_code, enum rtx_code index_code,
5453 rtx *loc, int opnum, enum reload_type type,
5454 int ind_levels, rtx_insn *insn)
5456 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5457 ((CONTEXT) == 0 \
5458 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5459 : REGNO_OK_FOR_INDEX_P (REGNO))
5461 enum reg_class context_reg_class;
5462 RTX_CODE code = GET_CODE (x);
5463 bool reloaded_inner_of_autoinc = false;
5465 if (context == 1)
5466 context_reg_class = INDEX_REG_CLASS;
5467 else
5468 context_reg_class = base_reg_class (mode, as, outer_code, index_code);
5470 switch (code)
5472 case PLUS:
5474 rtx orig_op0 = XEXP (x, 0);
5475 rtx orig_op1 = XEXP (x, 1);
5476 RTX_CODE code0 = GET_CODE (orig_op0);
5477 RTX_CODE code1 = GET_CODE (orig_op1);
5478 rtx op0 = orig_op0;
5479 rtx op1 = orig_op1;
5481 if (GET_CODE (op0) == SUBREG)
5483 op0 = SUBREG_REG (op0);
5484 code0 = GET_CODE (op0);
5485 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5486 op0 = gen_rtx_REG (word_mode,
5487 (REGNO (op0) +
5488 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5489 GET_MODE (SUBREG_REG (orig_op0)),
5490 SUBREG_BYTE (orig_op0),
5491 GET_MODE (orig_op0))));
5494 if (GET_CODE (op1) == SUBREG)
5496 op1 = SUBREG_REG (op1);
5497 code1 = GET_CODE (op1);
5498 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5499 /* ??? Why is this given op1's mode and above for
5500 ??? op0 SUBREGs we use word_mode? */
5501 op1 = gen_rtx_REG (GET_MODE (op1),
5502 (REGNO (op1) +
5503 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5504 GET_MODE (SUBREG_REG (orig_op1)),
5505 SUBREG_BYTE (orig_op1),
5506 GET_MODE (orig_op1))));
5508 /* Plus in the index register may be created only as a result of
5509 register rematerialization for expression like &localvar*4. Reload it.
5510 It may be possible to combine the displacement on the outer level,
5511 but it is probably not worthwhile to do so. */
5512 if (context == 1)
5514 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5515 opnum, ADDR_TYPE (type), ind_levels, insn);
5516 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5517 context_reg_class,
5518 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5519 return 1;
5522 if (code0 == MULT || code0 == SIGN_EXTEND || code0 == TRUNCATE
5523 || code0 == ZERO_EXTEND || code1 == MEM)
5525 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5526 &XEXP (x, 0), opnum, type, ind_levels,
5527 insn);
5528 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5529 &XEXP (x, 1), opnum, type, ind_levels,
5530 insn);
5533 else if (code1 == MULT || code1 == SIGN_EXTEND || code1 == TRUNCATE
5534 || code1 == ZERO_EXTEND || code0 == MEM)
5536 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5537 &XEXP (x, 0), opnum, type, ind_levels,
5538 insn);
5539 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5540 &XEXP (x, 1), opnum, type, ind_levels,
5541 insn);
5544 else if (code0 == CONST_INT || code0 == CONST
5545 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5546 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5547 &XEXP (x, 1), opnum, type, ind_levels,
5548 insn);
5550 else if (code1 == CONST_INT || code1 == CONST
5551 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5552 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5553 &XEXP (x, 0), opnum, type, ind_levels,
5554 insn);
5556 else if (code0 == REG && code1 == REG)
5558 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5559 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5560 return 0;
5561 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5562 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5563 return 0;
5564 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5565 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5566 &XEXP (x, 1), opnum, type, ind_levels,
5567 insn);
5568 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5569 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5570 &XEXP (x, 0), opnum, type, ind_levels,
5571 insn);
5572 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5573 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5574 &XEXP (x, 0), opnum, type, ind_levels,
5575 insn);
5576 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5577 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5578 &XEXP (x, 1), opnum, type, ind_levels,
5579 insn);
5580 else
5582 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5583 &XEXP (x, 0), opnum, type, ind_levels,
5584 insn);
5585 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5586 &XEXP (x, 1), opnum, type, ind_levels,
5587 insn);
5591 else if (code0 == REG)
5593 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5594 &XEXP (x, 0), opnum, type, ind_levels,
5595 insn);
5596 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5597 &XEXP (x, 1), opnum, type, ind_levels,
5598 insn);
5601 else if (code1 == REG)
5603 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5604 &XEXP (x, 1), opnum, type, ind_levels,
5605 insn);
5606 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5607 &XEXP (x, 0), opnum, type, ind_levels,
5608 insn);
5612 return 0;
5614 case POST_MODIFY:
5615 case PRE_MODIFY:
5617 rtx op0 = XEXP (x, 0);
5618 rtx op1 = XEXP (x, 1);
5619 enum rtx_code index_code;
5620 int regno;
5621 int reloadnum;
5623 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5624 return 0;
5626 /* Currently, we only support {PRE,POST}_MODIFY constructs
5627 where a base register is {inc,dec}remented by the contents
5628 of another register or by a constant value. Thus, these
5629 operands must match. */
5630 gcc_assert (op0 == XEXP (op1, 0));
5632 /* Require index register (or constant). Let's just handle the
5633 register case in the meantime... If the target allows
5634 auto-modify by a constant then we could try replacing a pseudo
5635 register with its equivalent constant where applicable.
5637 We also handle the case where the register was eliminated
5638 resulting in a PLUS subexpression.
5640 If we later decide to reload the whole PRE_MODIFY or
5641 POST_MODIFY, inc_for_reload might clobber the reload register
5642 before reading the index. The index register might therefore
5643 need to live longer than a TYPE reload normally would, so be
5644 conservative and class it as RELOAD_OTHER. */
5645 if ((REG_P (XEXP (op1, 1))
5646 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5647 || GET_CODE (XEXP (op1, 1)) == PLUS)
5648 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5649 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5650 ind_levels, insn);
5652 gcc_assert (REG_P (XEXP (op1, 0)));
5654 regno = REGNO (XEXP (op1, 0));
5655 index_code = GET_CODE (XEXP (op1, 1));
5657 /* A register that is incremented cannot be constant! */
5658 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5659 || reg_equiv_constant (regno) == 0);
5661 /* Handle a register that is equivalent to a memory location
5662 which cannot be addressed directly. */
5663 if (reg_equiv_memory_loc (regno) != 0
5664 && (reg_equiv_address (regno) != 0
5665 || num_not_at_initial_offset))
5667 rtx tem = make_memloc (XEXP (x, 0), regno);
5669 if (reg_equiv_address (regno)
5670 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5672 rtx orig = tem;
5674 /* First reload the memory location's address.
5675 We can't use ADDR_TYPE (type) here, because we need to
5676 write back the value after reading it, hence we actually
5677 need two registers. */
5678 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5679 &XEXP (tem, 0), opnum,
5680 RELOAD_OTHER,
5681 ind_levels, insn);
5683 if (!rtx_equal_p (tem, orig))
5684 push_reg_equiv_alt_mem (regno, tem);
5686 /* Then reload the memory location into a base
5687 register. */
5688 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5689 &XEXP (op1, 0),
5690 base_reg_class (mode, as,
5691 code, index_code),
5692 GET_MODE (x), GET_MODE (x), 0,
5693 0, opnum, RELOAD_OTHER);
5695 update_auto_inc_notes (this_insn, regno, reloadnum);
5696 return 0;
5700 if (reg_renumber[regno] >= 0)
5701 regno = reg_renumber[regno];
5703 /* We require a base register here... */
5704 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5706 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5707 &XEXP (op1, 0), &XEXP (x, 0),
5708 base_reg_class (mode, as,
5709 code, index_code),
5710 GET_MODE (x), GET_MODE (x), 0, 0,
5711 opnum, RELOAD_OTHER);
5713 update_auto_inc_notes (this_insn, regno, reloadnum);
5714 return 0;
5717 return 0;
5719 case POST_INC:
5720 case POST_DEC:
5721 case PRE_INC:
5722 case PRE_DEC:
5723 if (REG_P (XEXP (x, 0)))
5725 int regno = REGNO (XEXP (x, 0));
5726 int value = 0;
5727 rtx x_orig = x;
5729 /* A register that is incremented cannot be constant! */
5730 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5731 || reg_equiv_constant (regno) == 0);
5733 /* Handle a register that is equivalent to a memory location
5734 which cannot be addressed directly. */
5735 if (reg_equiv_memory_loc (regno) != 0
5736 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5738 rtx tem = make_memloc (XEXP (x, 0), regno);
5739 if (reg_equiv_address (regno)
5740 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5742 rtx orig = tem;
5744 /* First reload the memory location's address.
5745 We can't use ADDR_TYPE (type) here, because we need to
5746 write back the value after reading it, hence we actually
5747 need two registers. */
5748 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5749 &XEXP (tem, 0), opnum, type,
5750 ind_levels, insn);
5751 reloaded_inner_of_autoinc = true;
5752 if (!rtx_equal_p (tem, orig))
5753 push_reg_equiv_alt_mem (regno, tem);
5754 /* Put this inside a new increment-expression. */
5755 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5756 /* Proceed to reload that, as if it contained a register. */
5760 /* If we have a hard register that is ok in this incdec context,
5761 don't make a reload. If the register isn't nice enough for
5762 autoincdec, we can reload it. But, if an autoincrement of a
5763 register that we here verified as playing nice, still outside
5764 isn't "valid", it must be that no autoincrement is "valid".
5765 If that is true and something made an autoincrement anyway,
5766 this must be a special context where one is allowed.
5767 (For example, a "push" instruction.)
5768 We can't improve this address, so leave it alone. */
5770 /* Otherwise, reload the autoincrement into a suitable hard reg
5771 and record how much to increment by. */
5773 if (reg_renumber[regno] >= 0)
5774 regno = reg_renumber[regno];
5775 if (regno >= FIRST_PSEUDO_REGISTER
5776 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5777 index_code))
5779 int reloadnum;
5781 /* If we can output the register afterwards, do so, this
5782 saves the extra update.
5783 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5784 CALL_INSN - and it does not set CC0.
5785 But don't do this if we cannot directly address the
5786 memory location, since this will make it harder to
5787 reuse address reloads, and increases register pressure.
5788 Also don't do this if we can probably update x directly. */
5789 rtx equiv = (MEM_P (XEXP (x, 0))
5790 ? XEXP (x, 0)
5791 : reg_equiv_mem (regno));
5792 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5793 if (insn && NONJUMP_INSN_P (insn)
5794 #if HAVE_cc0
5795 && ! sets_cc0_p (PATTERN (insn))
5796 #endif
5797 && (regno < FIRST_PSEUDO_REGISTER
5798 || (equiv
5799 && memory_operand (equiv, GET_MODE (equiv))
5800 && ! (icode != CODE_FOR_nothing
5801 && insn_operand_matches (icode, 0, equiv)
5802 && insn_operand_matches (icode, 1, equiv))))
5803 /* Using RELOAD_OTHER means we emit this and the reload we
5804 made earlier in the wrong order. */
5805 && !reloaded_inner_of_autoinc)
5807 /* We use the original pseudo for loc, so that
5808 emit_reload_insns() knows which pseudo this
5809 reload refers to and updates the pseudo rtx, not
5810 its equivalent memory location, as well as the
5811 corresponding entry in reg_last_reload_reg. */
5812 loc = &XEXP (x_orig, 0);
5813 x = XEXP (x, 0);
5814 reloadnum
5815 = push_reload (x, x, loc, loc,
5816 context_reg_class,
5817 GET_MODE (x), GET_MODE (x), 0, 0,
5818 opnum, RELOAD_OTHER);
5820 else
5822 reloadnum
5823 = push_reload (x, x, loc, (rtx*) 0,
5824 context_reg_class,
5825 GET_MODE (x), GET_MODE (x), 0, 0,
5826 opnum, type);
5827 rld[reloadnum].inc
5828 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5830 value = 1;
5833 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5834 reloadnum);
5836 return value;
5838 return 0;
5840 case TRUNCATE:
5841 case SIGN_EXTEND:
5842 case ZERO_EXTEND:
5843 /* Look for parts to reload in the inner expression and reload them
5844 too, in addition to this operation. Reloading all inner parts in
5845 addition to this one shouldn't be necessary, but at this point,
5846 we don't know if we can possibly omit any part that *can* be
5847 reloaded. Targets that are better off reloading just either part
5848 (or perhaps even a different part of an outer expression), should
5849 define LEGITIMIZE_RELOAD_ADDRESS. */
5850 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5851 context, code, SCRATCH, &XEXP (x, 0), opnum,
5852 type, ind_levels, insn);
5853 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5854 context_reg_class,
5855 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5856 return 1;
5858 case MEM:
5859 /* This is probably the result of a substitution, by eliminate_regs, of
5860 an equivalent address for a pseudo that was not allocated to a hard
5861 register. Verify that the specified address is valid and reload it
5862 into a register.
5864 Since we know we are going to reload this item, don't decrement for
5865 the indirection level.
5867 Note that this is actually conservative: it would be slightly more
5868 efficient to use the value of SPILL_INDIRECT_LEVELS from
5869 reload1.c here. */
5871 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5872 opnum, ADDR_TYPE (type), ind_levels, insn);
5873 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5874 context_reg_class,
5875 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5876 return 1;
5878 case REG:
5880 int regno = REGNO (x);
5882 if (reg_equiv_constant (regno) != 0)
5884 find_reloads_address_part (reg_equiv_constant (regno), loc,
5885 context_reg_class,
5886 GET_MODE (x), opnum, type, ind_levels);
5887 return 1;
5890 #if 0 /* This might screw code in reload1.c to delete prior output-reload
5891 that feeds this insn. */
5892 if (reg_equiv_mem (regno) != 0)
5894 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5895 context_reg_class,
5896 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5897 return 1;
5899 #endif
5901 if (reg_equiv_memory_loc (regno)
5902 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5904 rtx tem = make_memloc (x, regno);
5905 if (reg_equiv_address (regno) != 0
5906 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5908 x = tem;
5909 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5910 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5911 ind_levels, insn);
5912 if (!rtx_equal_p (x, tem))
5913 push_reg_equiv_alt_mem (regno, x);
5917 if (reg_renumber[regno] >= 0)
5918 regno = reg_renumber[regno];
5920 if (regno >= FIRST_PSEUDO_REGISTER
5921 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5922 index_code))
5924 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5925 context_reg_class,
5926 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5927 return 1;
5930 /* If a register appearing in an address is the subject of a CLOBBER
5931 in this insn, reload it into some other register to be safe.
5932 The CLOBBER is supposed to make the register unavailable
5933 from before this insn to after it. */
5934 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5936 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5937 context_reg_class,
5938 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5939 return 1;
5942 return 0;
5944 case SUBREG:
5945 if (REG_P (SUBREG_REG (x)))
5947 /* If this is a SUBREG of a hard register and the resulting register
5948 is of the wrong class, reload the whole SUBREG. This avoids
5949 needless copies if SUBREG_REG is multi-word. */
5950 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
5952 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
5954 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5955 index_code))
5957 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5958 context_reg_class,
5959 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5960 return 1;
5963 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
5964 is larger than the class size, then reload the whole SUBREG. */
5965 else
5967 enum reg_class rclass = context_reg_class;
5968 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
5969 > reg_class_size[(int) rclass])
5971 /* If the inner register will be replaced by a memory
5972 reference, we can do this only if we can replace the
5973 whole subreg by a (narrower) memory reference. If
5974 this is not possible, fall through and reload just
5975 the inner register (including address reloads). */
5976 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
5978 rtx tem = find_reloads_subreg_address (x, opnum,
5979 ADDR_TYPE (type),
5980 ind_levels, insn,
5981 NULL);
5982 if (tem)
5984 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
5985 GET_MODE (tem), VOIDmode, 0, 0,
5986 opnum, type);
5987 return 1;
5990 else
5992 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
5993 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5994 return 1;
5999 break;
6001 default:
6002 break;
6006 const char *fmt = GET_RTX_FORMAT (code);
6007 int i;
6009 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6011 if (fmt[i] == 'e')
6012 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6013 we get here. */
6014 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6015 code, SCRATCH, &XEXP (x, i),
6016 opnum, type, ind_levels, insn);
6020 #undef REG_OK_FOR_CONTEXT
6021 return 0;
6024 /* X, which is found at *LOC, is a part of an address that needs to be
6025 reloaded into a register of class RCLASS. If X is a constant, or if
6026 X is a PLUS that contains a constant, check that the constant is a
6027 legitimate operand and that we are supposed to be able to load
6028 it into the register.
6030 If not, force the constant into memory and reload the MEM instead.
6032 MODE is the mode to use, in case X is an integer constant.
6034 OPNUM and TYPE describe the purpose of any reloads made.
6036 IND_LEVELS says how many levels of indirect addressing this machine
6037 supports. */
6039 static void
6040 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6041 machine_mode mode, int opnum,
6042 enum reload_type type, int ind_levels)
6044 if (CONSTANT_P (x)
6045 && (!targetm.legitimate_constant_p (mode, x)
6046 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6048 x = force_const_mem (mode, x);
6049 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6050 opnum, type, ind_levels, 0);
6053 else if (GET_CODE (x) == PLUS
6054 && CONSTANT_P (XEXP (x, 1))
6055 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6056 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6057 == NO_REGS))
6059 rtx tem;
6061 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6062 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6063 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6064 opnum, type, ind_levels, 0);
6067 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6068 mode, VOIDmode, 0, 0, opnum, type);
6071 /* X, a subreg of a pseudo, is a part of an address that needs to be
6072 reloaded, and the pseusdo is equivalent to a memory location.
6074 Attempt to replace the whole subreg by a (possibly narrower or wider)
6075 memory reference. If this is possible, return this new memory
6076 reference, and push all required address reloads. Otherwise,
6077 return NULL.
6079 OPNUM and TYPE identify the purpose of the reload.
6081 IND_LEVELS says how many levels of indirect addressing are
6082 supported at this point in the address.
6084 INSN, if nonzero, is the insn in which we do the reload. It is used
6085 to determine where to put USEs for pseudos that we have to replace with
6086 stack slots. */
6088 static rtx
6089 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6090 int ind_levels, rtx_insn *insn,
6091 int *address_reloaded)
6093 machine_mode outer_mode = GET_MODE (x);
6094 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6095 int regno = REGNO (SUBREG_REG (x));
6096 int reloaded = 0;
6097 rtx tem, orig;
6098 int offset;
6100 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6102 /* We cannot replace the subreg with a modified memory reference if:
6104 - we have a paradoxical subreg that implicitly acts as a zero or
6105 sign extension operation due to LOAD_EXTEND_OP;
6107 - we have a subreg that is implicitly supposed to act on the full
6108 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6110 - the address of the equivalent memory location is mode-dependent; or
6112 - we have a paradoxical subreg and the resulting memory is not
6113 sufficiently aligned to allow access in the wider mode.
6115 In addition, we choose not to perform the replacement for *any*
6116 paradoxical subreg, even if it were possible in principle. This
6117 is to avoid generating wider memory references than necessary.
6119 This corresponds to how previous versions of reload used to handle
6120 paradoxical subregs where no address reload was required. */
6122 if (paradoxical_subreg_p (x))
6123 return NULL;
6125 if (WORD_REGISTER_OPERATIONS
6126 && partial_subreg_p (outer_mode, inner_mode)
6127 && ((GET_MODE_SIZE (outer_mode) - 1) / UNITS_PER_WORD
6128 == (GET_MODE_SIZE (inner_mode) - 1) / UNITS_PER_WORD))
6129 return NULL;
6131 /* Since we don't attempt to handle paradoxical subregs, we can just
6132 call into simplify_subreg, which will handle all remaining checks
6133 for us. */
6134 orig = make_memloc (SUBREG_REG (x), regno);
6135 offset = SUBREG_BYTE (x);
6136 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6137 if (!tem || !MEM_P (tem))
6138 return NULL;
6140 /* Now push all required address reloads, if any. */
6141 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6142 XEXP (tem, 0), &XEXP (tem, 0),
6143 opnum, type, ind_levels, insn);
6144 /* ??? Do we need to handle nonzero offsets somehow? */
6145 if (!offset && !rtx_equal_p (tem, orig))
6146 push_reg_equiv_alt_mem (regno, tem);
6148 /* For some processors an address may be valid in the original mode but
6149 not in a smaller mode. For example, ARM accepts a scaled index register
6150 in SImode but not in HImode. Note that this is only a problem if the
6151 address in reg_equiv_mem is already invalid in the new mode; other
6152 cases would be fixed by find_reloads_address as usual.
6154 ??? We attempt to handle such cases here by doing an additional reload
6155 of the full address after the usual processing by find_reloads_address.
6156 Note that this may not work in the general case, but it seems to cover
6157 the cases where this situation currently occurs. A more general fix
6158 might be to reload the *value* instead of the address, but this would
6159 not be expected by the callers of this routine as-is.
6161 If find_reloads_address already completed replaced the address, there
6162 is nothing further to do. */
6163 if (reloaded == 0
6164 && reg_equiv_mem (regno) != 0
6165 && !strict_memory_address_addr_space_p
6166 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6167 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6169 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6170 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6171 MEM, SCRATCH),
6172 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6173 reloaded = 1;
6176 /* If this is not a toplevel operand, find_reloads doesn't see this
6177 substitution. We have to emit a USE of the pseudo so that
6178 delete_output_reload can see it. */
6179 if (replace_reloads && recog_data.operand[opnum] != x)
6180 /* We mark the USE with QImode so that we recognize it as one that
6181 can be safely deleted at the end of reload. */
6182 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6183 QImode);
6185 if (address_reloaded)
6186 *address_reloaded = reloaded;
6188 return tem;
6191 /* Substitute into the current INSN the registers into which we have reloaded
6192 the things that need reloading. The array `replacements'
6193 contains the locations of all pointers that must be changed
6194 and says what to replace them with.
6196 Return the rtx that X translates into; usually X, but modified. */
6198 void
6199 subst_reloads (rtx_insn *insn)
6201 int i;
6203 for (i = 0; i < n_replacements; i++)
6205 struct replacement *r = &replacements[i];
6206 rtx reloadreg = rld[r->what].reg_rtx;
6207 if (reloadreg)
6209 #ifdef DEBUG_RELOAD
6210 /* This checking takes a very long time on some platforms
6211 causing the gcc.c-torture/compile/limits-fnargs.c test
6212 to time out during testing. See PR 31850.
6214 Internal consistency test. Check that we don't modify
6215 anything in the equivalence arrays. Whenever something from
6216 those arrays needs to be reloaded, it must be unshared before
6217 being substituted into; the equivalence must not be modified.
6218 Otherwise, if the equivalence is used after that, it will
6219 have been modified, and the thing substituted (probably a
6220 register) is likely overwritten and not a usable equivalence. */
6221 int check_regno;
6223 for (check_regno = 0; check_regno < max_regno; check_regno++)
6225 #define CHECK_MODF(ARRAY) \
6226 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6227 || !loc_mentioned_in_p (r->where, \
6228 (*reg_equivs)[check_regno].ARRAY))
6230 CHECK_MODF (constant);
6231 CHECK_MODF (memory_loc);
6232 CHECK_MODF (address);
6233 CHECK_MODF (mem);
6234 #undef CHECK_MODF
6236 #endif /* DEBUG_RELOAD */
6238 /* If we're replacing a LABEL_REF with a register, there must
6239 already be an indication (to e.g. flow) which label this
6240 register refers to. */
6241 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6242 || !JUMP_P (insn)
6243 || find_reg_note (insn,
6244 REG_LABEL_OPERAND,
6245 XEXP (*r->where, 0))
6246 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6248 /* Encapsulate RELOADREG so its machine mode matches what
6249 used to be there. Note that gen_lowpart_common will
6250 do the wrong thing if RELOADREG is multi-word. RELOADREG
6251 will always be a REG here. */
6252 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6253 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6255 *r->where = reloadreg;
6257 /* If reload got no reg and isn't optional, something's wrong. */
6258 else
6259 gcc_assert (rld[r->what].optional);
6263 /* Make a copy of any replacements being done into X and move those
6264 copies to locations in Y, a copy of X. */
6266 void
6267 copy_replacements (rtx x, rtx y)
6269 copy_replacements_1 (&x, &y, n_replacements);
6272 static void
6273 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6275 int i, j;
6276 rtx x, y;
6277 struct replacement *r;
6278 enum rtx_code code;
6279 const char *fmt;
6281 for (j = 0; j < orig_replacements; j++)
6282 if (replacements[j].where == px)
6284 r = &replacements[n_replacements++];
6285 r->where = py;
6286 r->what = replacements[j].what;
6287 r->mode = replacements[j].mode;
6290 x = *px;
6291 y = *py;
6292 code = GET_CODE (x);
6293 fmt = GET_RTX_FORMAT (code);
6295 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6297 if (fmt[i] == 'e')
6298 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6299 else if (fmt[i] == 'E')
6300 for (j = XVECLEN (x, i); --j >= 0; )
6301 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6302 orig_replacements);
6306 /* Change any replacements being done to *X to be done to *Y. */
6308 void
6309 move_replacements (rtx *x, rtx *y)
6311 int i;
6313 for (i = 0; i < n_replacements; i++)
6314 if (replacements[i].where == x)
6315 replacements[i].where = y;
6318 /* If LOC was scheduled to be replaced by something, return the replacement.
6319 Otherwise, return *LOC. */
6322 find_replacement (rtx *loc)
6324 struct replacement *r;
6326 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6328 rtx reloadreg = rld[r->what].reg_rtx;
6330 if (reloadreg && r->where == loc)
6332 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6333 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6335 return reloadreg;
6337 else if (reloadreg && GET_CODE (*loc) == SUBREG
6338 && r->where == &SUBREG_REG (*loc))
6340 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6341 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6343 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6344 GET_MODE (SUBREG_REG (*loc)),
6345 SUBREG_BYTE (*loc));
6349 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6350 what's inside and make a new rtl if so. */
6351 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6352 || GET_CODE (*loc) == MULT)
6354 rtx x = find_replacement (&XEXP (*loc, 0));
6355 rtx y = find_replacement (&XEXP (*loc, 1));
6357 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6358 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6361 return *loc;
6364 /* Return nonzero if register in range [REGNO, ENDREGNO)
6365 appears either explicitly or implicitly in X
6366 other than being stored into (except for earlyclobber operands).
6368 References contained within the substructure at LOC do not count.
6369 LOC may be zero, meaning don't ignore anything.
6371 This is similar to refers_to_regno_p in rtlanal.c except that we
6372 look at equivalences for pseudos that didn't get hard registers. */
6374 static int
6375 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6376 rtx x, rtx *loc)
6378 int i;
6379 unsigned int r;
6380 RTX_CODE code;
6381 const char *fmt;
6383 if (x == 0)
6384 return 0;
6386 repeat:
6387 code = GET_CODE (x);
6389 switch (code)
6391 case REG:
6392 r = REGNO (x);
6394 /* If this is a pseudo, a hard register must not have been allocated.
6395 X must therefore either be a constant or be in memory. */
6396 if (r >= FIRST_PSEUDO_REGISTER)
6398 if (reg_equiv_memory_loc (r))
6399 return refers_to_regno_for_reload_p (regno, endregno,
6400 reg_equiv_memory_loc (r),
6401 (rtx*) 0);
6403 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6404 return 0;
6407 return endregno > r && regno < END_REGNO (x);
6409 case SUBREG:
6410 /* If this is a SUBREG of a hard reg, we can see exactly which
6411 registers are being modified. Otherwise, handle normally. */
6412 if (REG_P (SUBREG_REG (x))
6413 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6415 unsigned int inner_regno = subreg_regno (x);
6416 unsigned int inner_endregno
6417 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6418 ? subreg_nregs (x) : 1);
6420 return endregno > inner_regno && regno < inner_endregno;
6422 break;
6424 case CLOBBER:
6425 case SET:
6426 if (&SET_DEST (x) != loc
6427 /* Note setting a SUBREG counts as referring to the REG it is in for
6428 a pseudo but not for hard registers since we can
6429 treat each word individually. */
6430 && ((GET_CODE (SET_DEST (x)) == SUBREG
6431 && loc != &SUBREG_REG (SET_DEST (x))
6432 && REG_P (SUBREG_REG (SET_DEST (x)))
6433 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6434 && refers_to_regno_for_reload_p (regno, endregno,
6435 SUBREG_REG (SET_DEST (x)),
6436 loc))
6437 /* If the output is an earlyclobber operand, this is
6438 a conflict. */
6439 || ((!REG_P (SET_DEST (x))
6440 || earlyclobber_operand_p (SET_DEST (x)))
6441 && refers_to_regno_for_reload_p (regno, endregno,
6442 SET_DEST (x), loc))))
6443 return 1;
6445 if (code == CLOBBER || loc == &SET_SRC (x))
6446 return 0;
6447 x = SET_SRC (x);
6448 goto repeat;
6450 default:
6451 break;
6454 /* X does not match, so try its subexpressions. */
6456 fmt = GET_RTX_FORMAT (code);
6457 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6459 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6461 if (i == 0)
6463 x = XEXP (x, 0);
6464 goto repeat;
6466 else
6467 if (refers_to_regno_for_reload_p (regno, endregno,
6468 XEXP (x, i), loc))
6469 return 1;
6471 else if (fmt[i] == 'E')
6473 int j;
6474 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6475 if (loc != &XVECEXP (x, i, j)
6476 && refers_to_regno_for_reload_p (regno, endregno,
6477 XVECEXP (x, i, j), loc))
6478 return 1;
6481 return 0;
6484 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6485 we check if any register number in X conflicts with the relevant register
6486 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6487 contains a MEM (we don't bother checking for memory addresses that can't
6488 conflict because we expect this to be a rare case.
6490 This function is similar to reg_overlap_mentioned_p in rtlanal.c except
6491 that we look at equivalences for pseudos that didn't get hard registers. */
6494 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6496 int regno, endregno;
6498 /* Overly conservative. */
6499 if (GET_CODE (x) == STRICT_LOW_PART
6500 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6501 x = XEXP (x, 0);
6503 /* If either argument is a constant, then modifying X can not affect IN. */
6504 if (CONSTANT_P (x) || CONSTANT_P (in))
6505 return 0;
6506 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6507 return refers_to_mem_for_reload_p (in);
6508 else if (GET_CODE (x) == SUBREG)
6510 regno = REGNO (SUBREG_REG (x));
6511 if (regno < FIRST_PSEUDO_REGISTER)
6512 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6513 GET_MODE (SUBREG_REG (x)),
6514 SUBREG_BYTE (x),
6515 GET_MODE (x));
6516 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6517 ? subreg_nregs (x) : 1);
6519 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6521 else if (REG_P (x))
6523 regno = REGNO (x);
6525 /* If this is a pseudo, it must not have been assigned a hard register.
6526 Therefore, it must either be in memory or be a constant. */
6528 if (regno >= FIRST_PSEUDO_REGISTER)
6530 if (reg_equiv_memory_loc (regno))
6531 return refers_to_mem_for_reload_p (in);
6532 gcc_assert (reg_equiv_constant (regno));
6533 return 0;
6536 endregno = END_REGNO (x);
6538 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6540 else if (MEM_P (x))
6541 return refers_to_mem_for_reload_p (in);
6542 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC
6543 || GET_CODE (x) == CC0)
6544 return reg_mentioned_p (x, in);
6545 else
6547 gcc_assert (GET_CODE (x) == PLUS);
6549 /* We actually want to know if X is mentioned somewhere inside IN.
6550 We must not say that (plus (sp) (const_int 124)) is in
6551 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6552 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6553 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6554 while (MEM_P (in))
6555 in = XEXP (in, 0);
6556 if (REG_P (in))
6557 return 0;
6558 else if (GET_CODE (in) == PLUS)
6559 return (rtx_equal_p (x, in)
6560 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6561 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6562 else return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6563 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6566 gcc_unreachable ();
6569 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6570 registers. */
6572 static int
6573 refers_to_mem_for_reload_p (rtx x)
6575 const char *fmt;
6576 int i;
6578 if (MEM_P (x))
6579 return 1;
6581 if (REG_P (x))
6582 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6583 && reg_equiv_memory_loc (REGNO (x)));
6585 fmt = GET_RTX_FORMAT (GET_CODE (x));
6586 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6587 if (fmt[i] == 'e'
6588 && (MEM_P (XEXP (x, i))
6589 || refers_to_mem_for_reload_p (XEXP (x, i))))
6590 return 1;
6592 return 0;
6595 /* Check the insns before INSN to see if there is a suitable register
6596 containing the same value as GOAL.
6597 If OTHER is -1, look for a register in class RCLASS.
6598 Otherwise, just see if register number OTHER shares GOAL's value.
6600 Return an rtx for the register found, or zero if none is found.
6602 If RELOAD_REG_P is (short *)1,
6603 we reject any hard reg that appears in reload_reg_rtx
6604 because such a hard reg is also needed coming into this insn.
6606 If RELOAD_REG_P is any other nonzero value,
6607 it is a vector indexed by hard reg number
6608 and we reject any hard reg whose element in the vector is nonnegative
6609 as well as any that appears in reload_reg_rtx.
6611 If GOAL is zero, then GOALREG is a register number; we look
6612 for an equivalent for that register.
6614 MODE is the machine mode of the value we want an equivalence for.
6615 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6617 This function is used by jump.c as well as in the reload pass.
6619 If GOAL is the sum of the stack pointer and a constant, we treat it
6620 as if it were a constant except that sp is required to be unchanging. */
6623 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6624 short *reload_reg_p, int goalreg, machine_mode mode)
6626 rtx_insn *p = insn;
6627 rtx goaltry, valtry, value;
6628 rtx_insn *where;
6629 rtx pat;
6630 int regno = -1;
6631 int valueno;
6632 int goal_mem = 0;
6633 int goal_const = 0;
6634 int goal_mem_addr_varies = 0;
6635 int need_stable_sp = 0;
6636 int nregs;
6637 int valuenregs;
6638 int num = 0;
6640 if (goal == 0)
6641 regno = goalreg;
6642 else if (REG_P (goal))
6643 regno = REGNO (goal);
6644 else if (MEM_P (goal))
6646 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6647 if (MEM_VOLATILE_P (goal))
6648 return 0;
6649 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6650 return 0;
6651 /* An address with side effects must be reexecuted. */
6652 switch (code)
6654 case POST_INC:
6655 case PRE_INC:
6656 case POST_DEC:
6657 case PRE_DEC:
6658 case POST_MODIFY:
6659 case PRE_MODIFY:
6660 return 0;
6661 default:
6662 break;
6664 goal_mem = 1;
6666 else if (CONSTANT_P (goal))
6667 goal_const = 1;
6668 else if (GET_CODE (goal) == PLUS
6669 && XEXP (goal, 0) == stack_pointer_rtx
6670 && CONSTANT_P (XEXP (goal, 1)))
6671 goal_const = need_stable_sp = 1;
6672 else if (GET_CODE (goal) == PLUS
6673 && XEXP (goal, 0) == frame_pointer_rtx
6674 && CONSTANT_P (XEXP (goal, 1)))
6675 goal_const = 1;
6676 else
6677 return 0;
6679 num = 0;
6680 /* Scan insns back from INSN, looking for one that copies
6681 a value into or out of GOAL.
6682 Stop and give up if we reach a label. */
6684 while (1)
6686 p = PREV_INSN (p);
6687 if (p && DEBUG_INSN_P (p))
6688 continue;
6689 num++;
6690 if (p == 0 || LABEL_P (p)
6691 || num > PARAM_VALUE (PARAM_MAX_RELOAD_SEARCH_INSNS))
6692 return 0;
6694 /* Don't reuse register contents from before a setjmp-type
6695 function call; on the second return (from the longjmp) it
6696 might have been clobbered by a later reuse. It doesn't
6697 seem worthwhile to actually go and see if it is actually
6698 reused even if that information would be readily available;
6699 just don't reuse it across the setjmp call. */
6700 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6701 return 0;
6703 if (NONJUMP_INSN_P (p)
6704 /* If we don't want spill regs ... */
6705 && (! (reload_reg_p != 0
6706 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6707 /* ... then ignore insns introduced by reload; they aren't
6708 useful and can cause results in reload_as_needed to be
6709 different from what they were when calculating the need for
6710 spills. If we notice an input-reload insn here, we will
6711 reject it below, but it might hide a usable equivalent.
6712 That makes bad code. It may even fail: perhaps no reg was
6713 spilled for this insn because it was assumed we would find
6714 that equivalent. */
6715 || INSN_UID (p) < reload_first_uid))
6717 rtx tem;
6718 pat = single_set (p);
6720 /* First check for something that sets some reg equal to GOAL. */
6721 if (pat != 0
6722 && ((regno >= 0
6723 && true_regnum (SET_SRC (pat)) == regno
6724 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6726 (regno >= 0
6727 && true_regnum (SET_DEST (pat)) == regno
6728 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6730 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6731 /* When looking for stack pointer + const,
6732 make sure we don't use a stack adjust. */
6733 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6734 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6735 || (goal_mem
6736 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6737 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6738 || (goal_mem
6739 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6740 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6741 /* If we are looking for a constant,
6742 and something equivalent to that constant was copied
6743 into a reg, we can use that reg. */
6744 || (goal_const && REG_NOTES (p) != 0
6745 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6746 && ((rtx_equal_p (XEXP (tem, 0), goal)
6747 && (valueno
6748 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6749 || (REG_P (SET_DEST (pat))
6750 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6751 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6752 && CONST_INT_P (goal)
6753 && 0 != (goaltry
6754 = operand_subword (XEXP (tem, 0), 0, 0,
6755 VOIDmode))
6756 && rtx_equal_p (goal, goaltry)
6757 && (valtry
6758 = operand_subword (SET_DEST (pat), 0, 0,
6759 VOIDmode))
6760 && (valueno = true_regnum (valtry)) >= 0)))
6761 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6762 NULL_RTX))
6763 && REG_P (SET_DEST (pat))
6764 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6765 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6766 && CONST_INT_P (goal)
6767 && 0 != (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6768 VOIDmode))
6769 && rtx_equal_p (goal, goaltry)
6770 && (valtry
6771 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6772 && (valueno = true_regnum (valtry)) >= 0)))
6774 if (other >= 0)
6776 if (valueno != other)
6777 continue;
6779 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6780 continue;
6781 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6782 mode, valueno))
6783 continue;
6784 value = valtry;
6785 where = p;
6786 break;
6791 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6792 (or copying VALUE into GOAL, if GOAL is also a register).
6793 Now verify that VALUE is really valid. */
6795 /* VALUENO is the register number of VALUE; a hard register. */
6797 /* Don't try to re-use something that is killed in this insn. We want
6798 to be able to trust REG_UNUSED notes. */
6799 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6800 return 0;
6802 /* If we propose to get the value from the stack pointer or if GOAL is
6803 a MEM based on the stack pointer, we need a stable SP. */
6804 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6805 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6806 goal)))
6807 need_stable_sp = 1;
6809 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6810 if (GET_MODE (value) != mode)
6811 return 0;
6813 /* Reject VALUE if it was loaded from GOAL
6814 and is also a register that appears in the address of GOAL. */
6816 if (goal_mem && value == SET_DEST (single_set (where))
6817 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6818 goal, (rtx*) 0))
6819 return 0;
6821 /* Reject registers that overlap GOAL. */
6823 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6824 nregs = hard_regno_nregs (regno, mode);
6825 else
6826 nregs = 1;
6827 valuenregs = hard_regno_nregs (valueno, mode);
6829 if (!goal_mem && !goal_const
6830 && regno + nregs > valueno && regno < valueno + valuenregs)
6831 return 0;
6833 /* Reject VALUE if it is one of the regs reserved for reloads.
6834 Reload1 knows how to reuse them anyway, and it would get
6835 confused if we allocated one without its knowledge.
6836 (Now that insns introduced by reload are ignored above,
6837 this case shouldn't happen, but I'm not positive.) */
6839 if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6841 int i;
6842 for (i = 0; i < valuenregs; ++i)
6843 if (reload_reg_p[valueno + i] >= 0)
6844 return 0;
6847 /* Reject VALUE if it is a register being used for an input reload
6848 even if it is not one of those reserved. */
6850 if (reload_reg_p != 0)
6852 int i;
6853 for (i = 0; i < n_reloads; i++)
6854 if (rld[i].reg_rtx != 0
6855 && rld[i].in
6856 && (int) REGNO (rld[i].reg_rtx) < valueno + valuenregs
6857 && (int) END_REGNO (rld[i].reg_rtx) > valueno)
6858 return 0;
6861 if (goal_mem)
6862 /* We must treat frame pointer as varying here,
6863 since it can vary--in a nonlocal goto as generated by expand_goto. */
6864 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6866 /* Now verify that the values of GOAL and VALUE remain unaltered
6867 until INSN is reached. */
6869 p = insn;
6870 while (1)
6872 p = PREV_INSN (p);
6873 if (p == where)
6874 return value;
6876 /* Don't trust the conversion past a function call
6877 if either of the two is in a call-clobbered register, or memory. */
6878 if (CALL_P (p))
6880 int i;
6882 if (goal_mem || need_stable_sp)
6883 return 0;
6885 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6886 for (i = 0; i < nregs; ++i)
6887 if (call_used_regs[regno + i]
6888 || targetm.hard_regno_call_part_clobbered (regno + i, mode))
6889 return 0;
6891 if (valueno >= 0 && valueno < FIRST_PSEUDO_REGISTER)
6892 for (i = 0; i < valuenregs; ++i)
6893 if (call_used_regs[valueno + i]
6894 || targetm.hard_regno_call_part_clobbered (valueno + i,
6895 mode))
6896 return 0;
6899 if (INSN_P (p))
6901 pat = PATTERN (p);
6903 /* Watch out for unspec_volatile, and volatile asms. */
6904 if (volatile_insn_p (pat))
6905 return 0;
6907 /* If this insn P stores in either GOAL or VALUE, return 0.
6908 If GOAL is a memory ref and this insn writes memory, return 0.
6909 If GOAL is a memory ref and its address is not constant,
6910 and this insn P changes a register used in GOAL, return 0. */
6912 if (GET_CODE (pat) == COND_EXEC)
6913 pat = COND_EXEC_CODE (pat);
6914 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6916 rtx dest = SET_DEST (pat);
6917 while (GET_CODE (dest) == SUBREG
6918 || GET_CODE (dest) == ZERO_EXTRACT
6919 || GET_CODE (dest) == STRICT_LOW_PART)
6920 dest = XEXP (dest, 0);
6921 if (REG_P (dest))
6923 int xregno = REGNO (dest);
6924 int end_xregno = END_REGNO (dest);
6925 if (xregno < regno + nregs && end_xregno > regno)
6926 return 0;
6927 if (xregno < valueno + valuenregs
6928 && end_xregno > valueno)
6929 return 0;
6930 if (goal_mem_addr_varies
6931 && reg_overlap_mentioned_for_reload_p (dest, goal))
6932 return 0;
6933 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6934 return 0;
6936 else if (goal_mem && MEM_P (dest)
6937 && ! push_operand (dest, GET_MODE (dest)))
6938 return 0;
6939 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6940 && reg_equiv_memory_loc (regno) != 0)
6941 return 0;
6942 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6943 return 0;
6945 else if (GET_CODE (pat) == PARALLEL)
6947 int i;
6948 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6950 rtx v1 = XVECEXP (pat, 0, i);
6951 if (GET_CODE (v1) == COND_EXEC)
6952 v1 = COND_EXEC_CODE (v1);
6953 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
6955 rtx dest = SET_DEST (v1);
6956 while (GET_CODE (dest) == SUBREG
6957 || GET_CODE (dest) == ZERO_EXTRACT
6958 || GET_CODE (dest) == STRICT_LOW_PART)
6959 dest = XEXP (dest, 0);
6960 if (REG_P (dest))
6962 int xregno = REGNO (dest);
6963 int end_xregno = END_REGNO (dest);
6964 if (xregno < regno + nregs
6965 && end_xregno > regno)
6966 return 0;
6967 if (xregno < valueno + valuenregs
6968 && end_xregno > valueno)
6969 return 0;
6970 if (goal_mem_addr_varies
6971 && reg_overlap_mentioned_for_reload_p (dest,
6972 goal))
6973 return 0;
6974 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6975 return 0;
6977 else if (goal_mem && MEM_P (dest)
6978 && ! push_operand (dest, GET_MODE (dest)))
6979 return 0;
6980 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6981 && reg_equiv_memory_loc (regno) != 0)
6982 return 0;
6983 else if (need_stable_sp
6984 && push_operand (dest, GET_MODE (dest)))
6985 return 0;
6990 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
6992 rtx link;
6994 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
6995 link = XEXP (link, 1))
6997 pat = XEXP (link, 0);
6998 if (GET_CODE (pat) == CLOBBER)
7000 rtx dest = SET_DEST (pat);
7002 if (REG_P (dest))
7004 int xregno = REGNO (dest);
7005 int end_xregno = END_REGNO (dest);
7007 if (xregno < regno + nregs
7008 && end_xregno > regno)
7009 return 0;
7010 else if (xregno < valueno + valuenregs
7011 && end_xregno > valueno)
7012 return 0;
7013 else if (goal_mem_addr_varies
7014 && reg_overlap_mentioned_for_reload_p (dest,
7015 goal))
7016 return 0;
7019 else if (goal_mem && MEM_P (dest)
7020 && ! push_operand (dest, GET_MODE (dest)))
7021 return 0;
7022 else if (need_stable_sp
7023 && push_operand (dest, GET_MODE (dest)))
7024 return 0;
7029 #if AUTO_INC_DEC
7030 /* If this insn auto-increments or auto-decrements
7031 either regno or valueno, return 0 now.
7032 If GOAL is a memory ref and its address is not constant,
7033 and this insn P increments a register used in GOAL, return 0. */
7035 rtx link;
7037 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7038 if (REG_NOTE_KIND (link) == REG_INC
7039 && REG_P (XEXP (link, 0)))
7041 int incno = REGNO (XEXP (link, 0));
7042 if (incno < regno + nregs && incno >= regno)
7043 return 0;
7044 if (incno < valueno + valuenregs && incno >= valueno)
7045 return 0;
7046 if (goal_mem_addr_varies
7047 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7048 goal))
7049 return 0;
7052 #endif
7057 /* Find a place where INCED appears in an increment or decrement operator
7058 within X, and return the amount INCED is incremented or decremented by.
7059 The value is always positive. */
7061 static int
7062 find_inc_amount (rtx x, rtx inced)
7064 enum rtx_code code = GET_CODE (x);
7065 const char *fmt;
7066 int i;
7068 if (code == MEM)
7070 rtx addr = XEXP (x, 0);
7071 if ((GET_CODE (addr) == PRE_DEC
7072 || GET_CODE (addr) == POST_DEC
7073 || GET_CODE (addr) == PRE_INC
7074 || GET_CODE (addr) == POST_INC)
7075 && XEXP (addr, 0) == inced)
7076 return GET_MODE_SIZE (GET_MODE (x));
7077 else if ((GET_CODE (addr) == PRE_MODIFY
7078 || GET_CODE (addr) == POST_MODIFY)
7079 && GET_CODE (XEXP (addr, 1)) == PLUS
7080 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7081 && XEXP (addr, 0) == inced
7082 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7084 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7085 return i < 0 ? -i : i;
7089 fmt = GET_RTX_FORMAT (code);
7090 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7092 if (fmt[i] == 'e')
7094 int tem = find_inc_amount (XEXP (x, i), inced);
7095 if (tem != 0)
7096 return tem;
7098 if (fmt[i] == 'E')
7100 int j;
7101 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7103 int tem = find_inc_amount (XVECEXP (x, i, j), inced);
7104 if (tem != 0)
7105 return tem;
7110 return 0;
7113 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7114 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7116 static int
7117 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7118 rtx insn)
7120 rtx link;
7122 if (!AUTO_INC_DEC)
7123 return 0;
7125 gcc_assert (insn);
7127 if (! INSN_P (insn))
7128 return 0;
7130 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7131 if (REG_NOTE_KIND (link) == REG_INC)
7133 unsigned int test = (int) REGNO (XEXP (link, 0));
7134 if (test >= regno && test < endregno)
7135 return 1;
7137 return 0;
7140 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7141 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7142 REG_INC. REGNO must refer to a hard register. */
7145 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7146 int sets)
7148 /* regno must be a hard register. */
7149 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7151 unsigned int endregno = end_hard_regno (mode, regno);
7153 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7154 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7155 && REG_P (XEXP (PATTERN (insn), 0)))
7157 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7159 return test >= regno && test < endregno;
7162 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7163 return 1;
7165 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7167 int i = XVECLEN (PATTERN (insn), 0) - 1;
7169 for (; i >= 0; i--)
7171 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7172 if ((GET_CODE (elt) == CLOBBER
7173 || (sets == 1 && GET_CODE (elt) == SET))
7174 && REG_P (XEXP (elt, 0)))
7176 unsigned int test = REGNO (XEXP (elt, 0));
7178 if (test >= regno && test < endregno)
7179 return 1;
7181 if (sets == 2
7182 && reg_inc_found_and_valid_p (regno, endregno, elt))
7183 return 1;
7187 return 0;
7190 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7192 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7194 int regno;
7196 if (GET_MODE (reloadreg) == mode)
7197 return reloadreg;
7199 regno = REGNO (reloadreg);
7201 if (REG_WORDS_BIG_ENDIAN)
7202 regno += ((int) REG_NREGS (reloadreg)
7203 - (int) hard_regno_nregs (regno, mode));
7205 return gen_rtx_REG (mode, regno);
7208 static const char *const reload_when_needed_name[] =
7210 "RELOAD_FOR_INPUT",
7211 "RELOAD_FOR_OUTPUT",
7212 "RELOAD_FOR_INSN",
7213 "RELOAD_FOR_INPUT_ADDRESS",
7214 "RELOAD_FOR_INPADDR_ADDRESS",
7215 "RELOAD_FOR_OUTPUT_ADDRESS",
7216 "RELOAD_FOR_OUTADDR_ADDRESS",
7217 "RELOAD_FOR_OPERAND_ADDRESS",
7218 "RELOAD_FOR_OPADDR_ADDR",
7219 "RELOAD_OTHER",
7220 "RELOAD_FOR_OTHER_ADDRESS"
7223 /* These functions are used to print the variables set by 'find_reloads' */
7225 DEBUG_FUNCTION void
7226 debug_reload_to_stream (FILE *f)
7228 int r;
7229 const char *prefix;
7231 if (! f)
7232 f = stderr;
7233 for (r = 0; r < n_reloads; r++)
7235 fprintf (f, "Reload %d: ", r);
7237 if (rld[r].in != 0)
7239 fprintf (f, "reload_in (%s) = ",
7240 GET_MODE_NAME (rld[r].inmode));
7241 print_inline_rtx (f, rld[r].in, 24);
7242 fprintf (f, "\n\t");
7245 if (rld[r].out != 0)
7247 fprintf (f, "reload_out (%s) = ",
7248 GET_MODE_NAME (rld[r].outmode));
7249 print_inline_rtx (f, rld[r].out, 24);
7250 fprintf (f, "\n\t");
7253 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7255 fprintf (f, "%s (opnum = %d)",
7256 reload_when_needed_name[(int) rld[r].when_needed],
7257 rld[r].opnum);
7259 if (rld[r].optional)
7260 fprintf (f, ", optional");
7262 if (rld[r].nongroup)
7263 fprintf (f, ", nongroup");
7265 if (rld[r].inc != 0)
7266 fprintf (f, ", inc by %d", rld[r].inc);
7268 if (rld[r].nocombine)
7269 fprintf (f, ", can't combine");
7271 if (rld[r].secondary_p)
7272 fprintf (f, ", secondary_reload_p");
7274 if (rld[r].in_reg != 0)
7276 fprintf (f, "\n\treload_in_reg: ");
7277 print_inline_rtx (f, rld[r].in_reg, 24);
7280 if (rld[r].out_reg != 0)
7282 fprintf (f, "\n\treload_out_reg: ");
7283 print_inline_rtx (f, rld[r].out_reg, 24);
7286 if (rld[r].reg_rtx != 0)
7288 fprintf (f, "\n\treload_reg_rtx: ");
7289 print_inline_rtx (f, rld[r].reg_rtx, 24);
7292 prefix = "\n\t";
7293 if (rld[r].secondary_in_reload != -1)
7295 fprintf (f, "%ssecondary_in_reload = %d",
7296 prefix, rld[r].secondary_in_reload);
7297 prefix = ", ";
7300 if (rld[r].secondary_out_reload != -1)
7301 fprintf (f, "%ssecondary_out_reload = %d\n",
7302 prefix, rld[r].secondary_out_reload);
7304 prefix = "\n\t";
7305 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7307 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7308 insn_data[rld[r].secondary_in_icode].name);
7309 prefix = ", ";
7312 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7313 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7314 insn_data[rld[r].secondary_out_icode].name);
7316 fprintf (f, "\n");
7320 DEBUG_FUNCTION void
7321 debug_reload (void)
7323 debug_reload_to_stream (stderr);