C99 testsuite readiness: Compile more tests with -std=gnu89
[official-gcc.git] / gcc / reload.cc
blob2e57ebb3cac6c068804fe309d38009e2a24d8283
1 /* Search an insn for pseudo regs that must be in hard regs and are not.
2 Copyright (C) 1987-2023 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* This file contains subroutines used only from the file reload1.cc.
21 It knows how to scan one insn for operands and values
22 that need to be copied into registers to make valid code.
23 It also finds other operands and values which are valid
24 but for which equivalent values in registers exist and
25 ought to be used instead.
27 Before processing the first insn of the function, call `init_reload'.
28 init_reload actually has to be called earlier anyway.
30 To scan an insn, call `find_reloads'. This does two things:
31 1. sets up tables describing which values must be reloaded
32 for this insn, and what kind of hard regs they must be reloaded into;
33 2. optionally record the locations where those values appear in
34 the data, so they can be replaced properly later.
35 This is done only if the second arg to `find_reloads' is nonzero.
37 The third arg to `find_reloads' specifies the number of levels
38 of indirect addressing supported by the machine. If it is zero,
39 indirect addressing is not valid. If it is one, (MEM (REG n))
40 is valid even if (REG n) did not get a hard register; if it is two,
41 (MEM (MEM (REG n))) is also valid even if (REG n) did not get a
42 hard register, and similarly for higher values.
44 Then you must choose the hard regs to reload those pseudo regs into,
45 and generate appropriate load insns before this insn and perhaps
46 also store insns after this insn. Set up the array `reload_reg_rtx'
47 to contain the REG rtx's for the registers you used. In some
48 cases `find_reloads' will return a nonzero value in `reload_reg_rtx'
49 for certain reloads. Then that tells you which register to use,
50 so you do not need to allocate one. But you still do need to add extra
51 instructions to copy the value into and out of that register.
53 Finally you must call `subst_reloads' to substitute the reload reg rtx's
54 into the locations already recorded.
56 NOTE SIDE EFFECTS:
58 find_reloads can alter the operands of the instruction it is called on.
60 1. Two operands of any sort may be interchanged, if they are in a
61 commutative instruction.
62 This happens only if find_reloads thinks the instruction will compile
63 better that way.
65 2. Pseudo-registers that are equivalent to constants are replaced
66 with those constants if they are not in hard registers.
68 1 happens every time find_reloads is called.
69 2 happens only when REPLACE is 1, which is only when
70 actually doing the reloads, not when just counting them.
72 Using a reload register for several reloads in one insn:
74 When an insn has reloads, it is considered as having three parts:
75 the input reloads, the insn itself after reloading, and the output reloads.
76 Reloads of values used in memory addresses are often needed for only one part.
78 When this is so, reload_when_needed records which part needs the reload.
79 Two reloads for different parts of the insn can share the same reload
80 register.
82 When a reload is used for addresses in multiple parts, or when it is
83 an ordinary operand, it is classified as RELOAD_OTHER, and cannot share
84 a register with any other reload. */
86 #define REG_OK_STRICT
88 /* We do not enable this with CHECKING_P, since it is awfully slow. */
89 #undef DEBUG_RELOAD
91 #include "config.h"
92 #include "system.h"
93 #include "coretypes.h"
94 #include "backend.h"
95 #include "target.h"
96 #include "rtl.h"
97 #include "tree.h"
98 #include "df.h"
99 #include "memmodel.h"
100 #include "tm_p.h"
101 #include "optabs.h"
102 #include "regs.h"
103 #include "ira.h"
104 #include "recog.h"
105 #include "rtl-error.h"
106 #include "reload.h"
107 #include "addresses.h"
108 #include "function-abi.h"
110 /* True if X is a constant that can be forced into the constant pool.
111 MODE is the mode of the operand, or VOIDmode if not known. */
112 #define CONST_POOL_OK_P(MODE, X) \
113 ((MODE) != VOIDmode \
114 && CONSTANT_P (X) \
115 && GET_CODE (X) != HIGH \
116 && !targetm.cannot_force_const_mem (MODE, X))
118 /* True if C is a non-empty register class that has too few registers
119 to be safely used as a reload target class. */
121 static inline bool
122 small_register_class_p (reg_class_t rclass)
124 return (reg_class_size [(int) rclass] == 1
125 || (reg_class_size [(int) rclass] >= 1
126 && targetm.class_likely_spilled_p (rclass)));
130 /* All reloads of the current insn are recorded here. See reload.h for
131 comments. */
132 int n_reloads;
133 struct reload rld[MAX_RELOADS];
135 /* All the "earlyclobber" operands of the current insn
136 are recorded here. */
137 int n_earlyclobbers;
138 rtx reload_earlyclobbers[MAX_RECOG_OPERANDS];
140 int reload_n_operands;
142 /* Replacing reloads.
144 If `replace_reloads' is nonzero, then as each reload is recorded
145 an entry is made for it in the table `replacements'.
146 Then later `subst_reloads' can look through that table and
147 perform all the replacements needed. */
149 /* Nonzero means record the places to replace. */
150 static int replace_reloads;
152 /* Each replacement is recorded with a structure like this. */
153 struct replacement
155 rtx *where; /* Location to store in */
156 int what; /* which reload this is for */
157 machine_mode mode; /* mode it must have */
160 static struct replacement replacements[MAX_RECOG_OPERANDS * ((MAX_REGS_PER_ADDRESS * 2) + 1)];
162 /* Number of replacements currently recorded. */
163 static int n_replacements;
165 /* Used to track what is modified by an operand. */
166 struct decomposition
168 int reg_flag; /* Nonzero if referencing a register. */
169 int safe; /* Nonzero if this can't conflict with anything. */
170 rtx base; /* Base address for MEM. */
171 poly_int64 start; /* Starting offset or register number. */
172 poly_int64 end; /* Ending offset or register number. */
175 /* Save MEMs needed to copy from one class of registers to another. One MEM
176 is used per mode, but normally only one or two modes are ever used.
178 We keep two versions, before and after register elimination. The one
179 after register elimination is record separately for each operand. This
180 is done in case the address is not valid to be sure that we separately
181 reload each. */
183 static rtx secondary_memlocs[NUM_MACHINE_MODES];
184 static rtx secondary_memlocs_elim[NUM_MACHINE_MODES][MAX_RECOG_OPERANDS];
185 static int secondary_memlocs_elim_used = 0;
187 /* The instruction we are doing reloads for;
188 so we can test whether a register dies in it. */
189 static rtx_insn *this_insn;
191 /* Nonzero if this instruction is a user-specified asm with operands. */
192 static int this_insn_is_asm;
194 /* If hard_regs_live_known is nonzero,
195 we can tell which hard regs are currently live,
196 at least enough to succeed in choosing dummy reloads. */
197 static int hard_regs_live_known;
199 /* Indexed by hard reg number,
200 element is nonnegative if hard reg has been spilled.
201 This vector is passed to `find_reloads' as an argument
202 and is not changed here. */
203 static short *static_reload_reg_p;
205 /* Set to 1 in subst_reg_equivs if it changes anything. */
206 static int subst_reg_equivs_changed;
208 /* On return from push_reload, holds the reload-number for the OUT
209 operand, which can be different for that from the input operand. */
210 static int output_reloadnum;
212 /* Compare two RTX's. */
213 #define MATCHES(x, y) \
214 (x == y || (x != 0 && (REG_P (x) \
215 ? REG_P (y) && REGNO (x) == REGNO (y) \
216 : rtx_equal_p (x, y) && ! side_effects_p (x))))
218 /* Indicates if two reloads purposes are for similar enough things that we
219 can merge their reloads. */
220 #define MERGABLE_RELOADS(when1, when2, op1, op2) \
221 ((when1) == RELOAD_OTHER || (when2) == RELOAD_OTHER \
222 || ((when1) == (when2) && (op1) == (op2)) \
223 || ((when1) == RELOAD_FOR_INPUT && (when2) == RELOAD_FOR_INPUT) \
224 || ((when1) == RELOAD_FOR_OPERAND_ADDRESS \
225 && (when2) == RELOAD_FOR_OPERAND_ADDRESS) \
226 || ((when1) == RELOAD_FOR_OTHER_ADDRESS \
227 && (when2) == RELOAD_FOR_OTHER_ADDRESS))
229 /* Nonzero if these two reload purposes produce RELOAD_OTHER when merged. */
230 #define MERGE_TO_OTHER(when1, when2, op1, op2) \
231 ((when1) != (when2) \
232 || ! ((op1) == (op2) \
233 || (when1) == RELOAD_FOR_INPUT \
234 || (when1) == RELOAD_FOR_OPERAND_ADDRESS \
235 || (when1) == RELOAD_FOR_OTHER_ADDRESS))
237 /* If we are going to reload an address, compute the reload type to
238 use. */
239 #define ADDR_TYPE(type) \
240 ((type) == RELOAD_FOR_INPUT_ADDRESS \
241 ? RELOAD_FOR_INPADDR_ADDRESS \
242 : ((type) == RELOAD_FOR_OUTPUT_ADDRESS \
243 ? RELOAD_FOR_OUTADDR_ADDRESS \
244 : (type)))
246 static int push_secondary_reload (int, rtx, int, int, enum reg_class,
247 machine_mode, enum reload_type,
248 enum insn_code *, secondary_reload_info *);
249 static enum reg_class find_valid_class (machine_mode, machine_mode,
250 int, unsigned int);
251 static void push_replacement (rtx *, int, machine_mode);
252 static void dup_replacements (rtx *, rtx *);
253 static void combine_reloads (void);
254 static int find_reusable_reload (rtx *, rtx, enum reg_class,
255 enum reload_type, int, int);
256 static rtx find_dummy_reload (rtx, rtx, rtx *, rtx *, machine_mode,
257 machine_mode, reg_class_t, int, int);
258 static int hard_reg_set_here_p (unsigned int, unsigned int, rtx);
259 static struct decomposition decompose (rtx);
260 static int immune_p (rtx, rtx, struct decomposition);
261 static bool alternative_allows_const_pool_ref (rtx, const char *, int);
262 static rtx find_reloads_toplev (rtx, int, enum reload_type, int, int,
263 rtx_insn *, int *);
264 static rtx make_memloc (rtx, int);
265 static bool maybe_memory_address_addr_space_p (machine_mode, rtx,
266 addr_space_t, rtx *);
267 static int find_reloads_address (machine_mode, rtx *, rtx, rtx *,
268 int, enum reload_type, int, rtx_insn *);
269 static rtx subst_reg_equivs (rtx, rtx_insn *);
270 static rtx subst_indexed_address (rtx);
271 static void update_auto_inc_notes (rtx_insn *, int, int);
272 static int find_reloads_address_1 (machine_mode, addr_space_t, rtx, int,
273 enum rtx_code, enum rtx_code, rtx *,
274 int, enum reload_type,int, rtx_insn *);
275 static void find_reloads_address_part (rtx, rtx *, enum reg_class,
276 machine_mode, int,
277 enum reload_type, int);
278 static rtx find_reloads_subreg_address (rtx, int, enum reload_type,
279 int, rtx_insn *, int *);
280 static void copy_replacements_1 (rtx *, rtx *, int);
281 static poly_int64 find_inc_amount (rtx, rtx);
282 static int refers_to_mem_for_reload_p (rtx);
283 static int refers_to_regno_for_reload_p (unsigned int, unsigned int,
284 rtx, rtx *);
286 /* Add NEW to reg_equiv_alt_mem_list[REGNO] if it's not present in the
287 list yet. */
289 static void
290 push_reg_equiv_alt_mem (int regno, rtx mem)
292 rtx it;
294 for (it = reg_equiv_alt_mem_list (regno); it; it = XEXP (it, 1))
295 if (rtx_equal_p (XEXP (it, 0), mem))
296 return;
298 reg_equiv_alt_mem_list (regno)
299 = alloc_EXPR_LIST (REG_EQUIV, mem,
300 reg_equiv_alt_mem_list (regno));
303 /* Determine if any secondary reloads are needed for loading (if IN_P is
304 nonzero) or storing (if IN_P is zero) X to or from a reload register of
305 register class RELOAD_CLASS in mode RELOAD_MODE. If secondary reloads
306 are needed, push them.
308 Return the reload number of the secondary reload we made, or -1 if
309 we didn't need one. *PICODE is set to the insn_code to use if we do
310 need a secondary reload. */
312 static int
313 push_secondary_reload (int in_p, rtx x, int opnum, int optional,
314 enum reg_class reload_class,
315 machine_mode reload_mode, enum reload_type type,
316 enum insn_code *picode, secondary_reload_info *prev_sri)
318 enum reg_class rclass = NO_REGS;
319 enum reg_class scratch_class;
320 machine_mode mode = reload_mode;
321 enum insn_code icode = CODE_FOR_nothing;
322 enum insn_code t_icode = CODE_FOR_nothing;
323 enum reload_type secondary_type;
324 int s_reload, t_reload = -1;
325 const char *scratch_constraint;
326 secondary_reload_info sri;
328 if (type == RELOAD_FOR_INPUT_ADDRESS
329 || type == RELOAD_FOR_OUTPUT_ADDRESS
330 || type == RELOAD_FOR_INPADDR_ADDRESS
331 || type == RELOAD_FOR_OUTADDR_ADDRESS)
332 secondary_type = type;
333 else
334 secondary_type = in_p ? RELOAD_FOR_INPUT_ADDRESS : RELOAD_FOR_OUTPUT_ADDRESS;
336 *picode = CODE_FOR_nothing;
338 /* If X is a paradoxical SUBREG, use the inner value to determine both the
339 mode and object being reloaded. */
340 if (paradoxical_subreg_p (x))
342 x = SUBREG_REG (x);
343 reload_mode = GET_MODE (x);
346 /* If X is a pseudo-register that has an equivalent MEM (actually, if it
347 is still a pseudo-register by now, it *must* have an equivalent MEM
348 but we don't want to assume that), use that equivalent when seeing if
349 a secondary reload is needed since whether or not a reload is needed
350 might be sensitive to the form of the MEM. */
352 if (REG_P (x) && REGNO (x) >= FIRST_PSEUDO_REGISTER
353 && reg_equiv_mem (REGNO (x)))
354 x = reg_equiv_mem (REGNO (x));
356 sri.icode = CODE_FOR_nothing;
357 sri.prev_sri = prev_sri;
358 rclass = (enum reg_class) targetm.secondary_reload (in_p, x, reload_class,
359 reload_mode, &sri);
360 icode = (enum insn_code) sri.icode;
362 /* If we don't need any secondary registers, done. */
363 if (rclass == NO_REGS && icode == CODE_FOR_nothing)
364 return -1;
366 if (rclass != NO_REGS)
367 t_reload = push_secondary_reload (in_p, x, opnum, optional, rclass,
368 reload_mode, type, &t_icode, &sri);
370 /* If we will be using an insn, the secondary reload is for a
371 scratch register. */
373 if (icode != CODE_FOR_nothing)
375 /* If IN_P is nonzero, the reload register will be the output in
376 operand 0. If IN_P is zero, the reload register will be the input
377 in operand 1. Outputs should have an initial "=", which we must
378 skip. */
380 /* ??? It would be useful to be able to handle only two, or more than
381 three, operands, but for now we can only handle the case of having
382 exactly three: output, input and one temp/scratch. */
383 gcc_assert (insn_data[(int) icode].n_operands == 3);
385 /* ??? We currently have no way to represent a reload that needs
386 an icode to reload from an intermediate tertiary reload register.
387 We should probably have a new field in struct reload to tag a
388 chain of scratch operand reloads onto. */
389 gcc_assert (rclass == NO_REGS);
391 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
392 gcc_assert (*scratch_constraint == '=');
393 scratch_constraint++;
394 if (*scratch_constraint == '&')
395 scratch_constraint++;
396 scratch_class = (reg_class_for_constraint
397 (lookup_constraint (scratch_constraint)));
399 rclass = scratch_class;
400 mode = insn_data[(int) icode].operand[2].mode;
403 /* This case isn't valid, so fail. Reload is allowed to use the same
404 register for RELOAD_FOR_INPUT_ADDRESS and RELOAD_FOR_INPUT reloads, but
405 in the case of a secondary register, we actually need two different
406 registers for correct code. We fail here to prevent the possibility of
407 silently generating incorrect code later.
409 The convention is that secondary input reloads are valid only if the
410 secondary_class is different from class. If you have such a case, you
411 cannot use secondary reloads, you must work around the problem some
412 other way.
414 Allow this when a reload_in/out pattern is being used. I.e. assume
415 that the generated code handles this case. */
417 gcc_assert (!in_p || rclass != reload_class || icode != CODE_FOR_nothing
418 || t_icode != CODE_FOR_nothing);
420 /* See if we can reuse an existing secondary reload. */
421 for (s_reload = 0; s_reload < n_reloads; s_reload++)
422 if (rld[s_reload].secondary_p
423 && (reg_class_subset_p (rclass, rld[s_reload].rclass)
424 || reg_class_subset_p (rld[s_reload].rclass, rclass))
425 && ((in_p && rld[s_reload].inmode == mode)
426 || (! in_p && rld[s_reload].outmode == mode))
427 && ((in_p && rld[s_reload].secondary_in_reload == t_reload)
428 || (! in_p && rld[s_reload].secondary_out_reload == t_reload))
429 && ((in_p && rld[s_reload].secondary_in_icode == t_icode)
430 || (! in_p && rld[s_reload].secondary_out_icode == t_icode))
431 && (small_register_class_p (rclass)
432 || targetm.small_register_classes_for_mode_p (VOIDmode))
433 && MERGABLE_RELOADS (secondary_type, rld[s_reload].when_needed,
434 opnum, rld[s_reload].opnum))
436 if (in_p)
437 rld[s_reload].inmode = mode;
438 if (! in_p)
439 rld[s_reload].outmode = mode;
441 if (reg_class_subset_p (rclass, rld[s_reload].rclass))
442 rld[s_reload].rclass = rclass;
444 rld[s_reload].opnum = MIN (rld[s_reload].opnum, opnum);
445 rld[s_reload].optional &= optional;
446 rld[s_reload].secondary_p = 1;
447 if (MERGE_TO_OTHER (secondary_type, rld[s_reload].when_needed,
448 opnum, rld[s_reload].opnum))
449 rld[s_reload].when_needed = RELOAD_OTHER;
451 break;
454 if (s_reload == n_reloads)
456 /* If we need a memory location to copy between the two reload regs,
457 set it up now. Note that we do the input case before making
458 the reload and the output case after. This is due to the
459 way reloads are output. */
461 if (in_p && icode == CODE_FOR_nothing
462 && targetm.secondary_memory_needed (mode, rclass, reload_class))
464 get_secondary_mem (x, reload_mode, opnum, type);
466 /* We may have just added new reloads. Make sure we add
467 the new reload at the end. */
468 s_reload = n_reloads;
471 /* We need to make a new secondary reload for this register class. */
472 rld[s_reload].in = rld[s_reload].out = 0;
473 rld[s_reload].rclass = rclass;
475 rld[s_reload].inmode = in_p ? mode : VOIDmode;
476 rld[s_reload].outmode = ! in_p ? mode : VOIDmode;
477 rld[s_reload].reg_rtx = 0;
478 rld[s_reload].optional = optional;
479 rld[s_reload].inc = 0;
480 /* Maybe we could combine these, but it seems too tricky. */
481 rld[s_reload].nocombine = 1;
482 rld[s_reload].in_reg = 0;
483 rld[s_reload].out_reg = 0;
484 rld[s_reload].opnum = opnum;
485 rld[s_reload].when_needed = secondary_type;
486 rld[s_reload].secondary_in_reload = in_p ? t_reload : -1;
487 rld[s_reload].secondary_out_reload = ! in_p ? t_reload : -1;
488 rld[s_reload].secondary_in_icode = in_p ? t_icode : CODE_FOR_nothing;
489 rld[s_reload].secondary_out_icode
490 = ! in_p ? t_icode : CODE_FOR_nothing;
491 rld[s_reload].secondary_p = 1;
493 n_reloads++;
495 if (! in_p && icode == CODE_FOR_nothing
496 && targetm.secondary_memory_needed (mode, reload_class, rclass))
497 get_secondary_mem (x, mode, opnum, type);
500 *picode = icode;
501 return s_reload;
504 /* If a secondary reload is needed, return its class. If both an intermediate
505 register and a scratch register is needed, we return the class of the
506 intermediate register. */
507 reg_class_t
508 secondary_reload_class (bool in_p, reg_class_t rclass, machine_mode mode,
509 rtx x)
511 enum insn_code icode;
512 secondary_reload_info sri;
514 sri.icode = CODE_FOR_nothing;
515 sri.prev_sri = NULL;
516 rclass
517 = (enum reg_class) targetm.secondary_reload (in_p, x, rclass, mode, &sri);
518 icode = (enum insn_code) sri.icode;
520 /* If there are no secondary reloads at all, we return NO_REGS.
521 If an intermediate register is needed, we return its class. */
522 if (icode == CODE_FOR_nothing || rclass != NO_REGS)
523 return rclass;
525 /* No intermediate register is needed, but we have a special reload
526 pattern, which we assume for now needs a scratch register. */
527 return scratch_reload_class (icode);
530 /* ICODE is the insn_code of a reload pattern. Check that it has exactly
531 three operands, verify that operand 2 is an output operand, and return
532 its register class.
533 ??? We'd like to be able to handle any pattern with at least 2 operands,
534 for zero or more scratch registers, but that needs more infrastructure. */
535 enum reg_class
536 scratch_reload_class (enum insn_code icode)
538 const char *scratch_constraint;
539 enum reg_class rclass;
541 gcc_assert (insn_data[(int) icode].n_operands == 3);
542 scratch_constraint = insn_data[(int) icode].operand[2].constraint;
543 gcc_assert (*scratch_constraint == '=');
544 scratch_constraint++;
545 if (*scratch_constraint == '&')
546 scratch_constraint++;
547 rclass = reg_class_for_constraint (lookup_constraint (scratch_constraint));
548 gcc_assert (rclass != NO_REGS);
549 return rclass;
552 /* Return a memory location that will be used to copy X in mode MODE.
553 If we haven't already made a location for this mode in this insn,
554 call find_reloads_address on the location being returned. */
557 get_secondary_mem (rtx x ATTRIBUTE_UNUSED, machine_mode mode,
558 int opnum, enum reload_type type)
560 rtx loc;
561 int mem_valid;
563 /* By default, if MODE is narrower than a word, widen it to a word.
564 This is required because most machines that require these memory
565 locations do not support short load and stores from all registers
566 (e.g., FP registers). */
568 mode = targetm.secondary_memory_needed_mode (mode);
570 /* If we already have made a MEM for this operand in MODE, return it. */
571 if (secondary_memlocs_elim[(int) mode][opnum] != 0)
572 return secondary_memlocs_elim[(int) mode][opnum];
574 /* If this is the first time we've tried to get a MEM for this mode,
575 allocate a new one. `something_changed' in reload will get set
576 by noticing that the frame size has changed. */
578 if (secondary_memlocs[(int) mode] == 0)
580 #ifdef SECONDARY_MEMORY_NEEDED_RTX
581 secondary_memlocs[(int) mode] = SECONDARY_MEMORY_NEEDED_RTX (mode);
582 #else
583 secondary_memlocs[(int) mode]
584 = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
585 #endif
588 /* Get a version of the address doing any eliminations needed. If that
589 didn't give us a new MEM, make a new one if it isn't valid. */
591 loc = eliminate_regs (secondary_memlocs[(int) mode], VOIDmode, NULL_RTX);
592 mem_valid = strict_memory_address_addr_space_p (mode, XEXP (loc, 0),
593 MEM_ADDR_SPACE (loc));
595 if (! mem_valid && loc == secondary_memlocs[(int) mode])
596 loc = copy_rtx (loc);
598 /* The only time the call below will do anything is if the stack
599 offset is too large. In that case IND_LEVELS doesn't matter, so we
600 can just pass a zero. Adjust the type to be the address of the
601 corresponding object. If the address was valid, save the eliminated
602 address. If it wasn't valid, we need to make a reload each time, so
603 don't save it. */
605 if (! mem_valid)
607 type = (type == RELOAD_FOR_INPUT ? RELOAD_FOR_INPUT_ADDRESS
608 : type == RELOAD_FOR_OUTPUT ? RELOAD_FOR_OUTPUT_ADDRESS
609 : RELOAD_OTHER);
611 find_reloads_address (mode, &loc, XEXP (loc, 0), &XEXP (loc, 0),
612 opnum, type, 0, 0);
615 secondary_memlocs_elim[(int) mode][opnum] = loc;
616 if (secondary_memlocs_elim_used <= (int)mode)
617 secondary_memlocs_elim_used = (int)mode + 1;
618 return loc;
621 /* Clear any secondary memory locations we've made. */
623 void
624 clear_secondary_mem (void)
626 memset (secondary_memlocs, 0, sizeof secondary_memlocs);
630 /* Find the largest class which has at least one register valid in
631 mode INNER, and which for every such register, that register number
632 plus N is also valid in OUTER (if in range) and is cheap to move
633 into REGNO. Such a class must exist. */
635 static enum reg_class
636 find_valid_class (machine_mode outer ATTRIBUTE_UNUSED,
637 machine_mode inner ATTRIBUTE_UNUSED, int n,
638 unsigned int dest_regno ATTRIBUTE_UNUSED)
640 int best_cost = -1;
641 int rclass;
642 int regno;
643 enum reg_class best_class = NO_REGS;
644 enum reg_class dest_class ATTRIBUTE_UNUSED = REGNO_REG_CLASS (dest_regno);
645 unsigned int best_size = 0;
646 int cost;
648 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
650 int bad = 0;
651 int good = 0;
652 for (regno = 0; regno < FIRST_PSEUDO_REGISTER - n && ! bad; regno++)
653 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno))
655 if (targetm.hard_regno_mode_ok (regno, inner))
657 good = 1;
658 if (TEST_HARD_REG_BIT (reg_class_contents[rclass], regno + n)
659 && !targetm.hard_regno_mode_ok (regno + n, outer))
660 bad = 1;
664 if (bad || !good)
665 continue;
666 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
668 if ((reg_class_size[rclass] > best_size
669 && (best_cost < 0 || best_cost >= cost))
670 || best_cost > cost)
672 best_class = (enum reg_class) rclass;
673 best_size = reg_class_size[rclass];
674 best_cost = register_move_cost (outer, (enum reg_class) rclass,
675 dest_class);
679 gcc_assert (best_size != 0);
681 return best_class;
684 /* We are trying to reload a subreg of something that is not a register.
685 Find the largest class which contains only registers valid in
686 mode MODE. OUTER is the mode of the subreg, DEST_CLASS the class in
687 which we would eventually like to obtain the object. */
689 static enum reg_class
690 find_valid_class_1 (machine_mode outer ATTRIBUTE_UNUSED,
691 machine_mode mode ATTRIBUTE_UNUSED,
692 enum reg_class dest_class ATTRIBUTE_UNUSED)
694 int best_cost = -1;
695 int rclass;
696 int regno;
697 enum reg_class best_class = NO_REGS;
698 unsigned int best_size = 0;
699 int cost;
701 for (rclass = 1; rclass < N_REG_CLASSES; rclass++)
703 unsigned int computed_rclass_size = 0;
705 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
707 if (in_hard_reg_set_p (reg_class_contents[rclass], mode, regno)
708 && targetm.hard_regno_mode_ok (regno, mode))
709 computed_rclass_size++;
712 cost = register_move_cost (outer, (enum reg_class) rclass, dest_class);
714 if ((computed_rclass_size > best_size
715 && (best_cost < 0 || best_cost >= cost))
716 || best_cost > cost)
718 best_class = (enum reg_class) rclass;
719 best_size = computed_rclass_size;
720 best_cost = register_move_cost (outer, (enum reg_class) rclass,
721 dest_class);
725 gcc_assert (best_size != 0);
727 #ifdef LIMIT_RELOAD_CLASS
728 best_class = LIMIT_RELOAD_CLASS (mode, best_class);
729 #endif
730 return best_class;
733 /* Return the number of a previously made reload that can be combined with
734 a new one, or n_reloads if none of the existing reloads can be used.
735 OUT, RCLASS, TYPE and OPNUM are the same arguments as passed to
736 push_reload, they determine the kind of the new reload that we try to
737 combine. P_IN points to the corresponding value of IN, which can be
738 modified by this function.
739 DONT_SHARE is nonzero if we can't share any input-only reload for IN. */
741 static int
742 find_reusable_reload (rtx *p_in, rtx out, enum reg_class rclass,
743 enum reload_type type, int opnum, int dont_share)
745 rtx in = *p_in;
746 int i;
747 /* We can't merge two reloads if the output of either one is
748 earlyclobbered. */
750 if (earlyclobber_operand_p (out))
751 return n_reloads;
753 /* We can use an existing reload if the class is right
754 and at least one of IN and OUT is a match
755 and the other is at worst neutral.
756 (A zero compared against anything is neutral.)
758 For targets with small register classes, don't use existing reloads
759 unless they are for the same thing since that can cause us to need
760 more reload registers than we otherwise would. */
762 for (i = 0; i < n_reloads; i++)
763 if ((reg_class_subset_p (rclass, rld[i].rclass)
764 || reg_class_subset_p (rld[i].rclass, rclass))
765 /* If the existing reload has a register, it must fit our class. */
766 && (rld[i].reg_rtx == 0
767 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
768 true_regnum (rld[i].reg_rtx)))
769 && ((in != 0 && MATCHES (rld[i].in, in) && ! dont_share
770 && (out == 0 || rld[i].out == 0 || MATCHES (rld[i].out, out)))
771 || (out != 0 && MATCHES (rld[i].out, out)
772 && (in == 0 || rld[i].in == 0 || MATCHES (rld[i].in, in))))
773 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
774 && (small_register_class_p (rclass)
775 || targetm.small_register_classes_for_mode_p (VOIDmode))
776 && MERGABLE_RELOADS (type, rld[i].when_needed, opnum, rld[i].opnum))
777 return i;
779 /* Reloading a plain reg for input can match a reload to postincrement
780 that reg, since the postincrement's value is the right value.
781 Likewise, it can match a preincrement reload, since we regard
782 the preincrementation as happening before any ref in this insn
783 to that register. */
784 for (i = 0; i < n_reloads; i++)
785 if ((reg_class_subset_p (rclass, rld[i].rclass)
786 || reg_class_subset_p (rld[i].rclass, rclass))
787 /* If the existing reload has a register, it must fit our
788 class. */
789 && (rld[i].reg_rtx == 0
790 || TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
791 true_regnum (rld[i].reg_rtx)))
792 && out == 0 && rld[i].out == 0 && rld[i].in != 0
793 && ((REG_P (in)
794 && GET_RTX_CLASS (GET_CODE (rld[i].in)) == RTX_AUTOINC
795 && MATCHES (XEXP (rld[i].in, 0), in))
796 || (REG_P (rld[i].in)
797 && GET_RTX_CLASS (GET_CODE (in)) == RTX_AUTOINC
798 && MATCHES (XEXP (in, 0), rld[i].in)))
799 && (rld[i].out == 0 || ! earlyclobber_operand_p (rld[i].out))
800 && (small_register_class_p (rclass)
801 || targetm.small_register_classes_for_mode_p (VOIDmode))
802 && MERGABLE_RELOADS (type, rld[i].when_needed,
803 opnum, rld[i].opnum))
805 /* Make sure reload_in ultimately has the increment,
806 not the plain register. */
807 if (REG_P (in))
808 *p_in = rld[i].in;
809 return i;
811 return n_reloads;
814 /* Return true if:
816 (a) (subreg:OUTER_MODE REG ...) represents a word or subword subreg
817 of a multiword value; and
819 (b) the number of *words* in REG does not match the number of *registers*
820 in REG. */
822 static bool
823 complex_word_subreg_p (machine_mode outer_mode, rtx reg)
825 machine_mode inner_mode = GET_MODE (reg);
826 poly_uint64 reg_words = REG_NREGS (reg) * UNITS_PER_WORD;
827 return (known_le (GET_MODE_SIZE (outer_mode), UNITS_PER_WORD)
828 && maybe_gt (GET_MODE_SIZE (inner_mode), UNITS_PER_WORD)
829 && !known_equal_after_align_up (GET_MODE_SIZE (inner_mode),
830 reg_words, UNITS_PER_WORD));
833 /* Return true if X is a SUBREG that will need reloading of its SUBREG_REG
834 expression. MODE is the mode that X will be used in. OUTPUT is true if
835 the function is invoked for the output part of an enclosing reload. */
837 static bool
838 reload_inner_reg_of_subreg (rtx x, machine_mode mode, bool output)
840 rtx inner;
842 /* Only SUBREGs are problematical. */
843 if (GET_CODE (x) != SUBREG)
844 return false;
846 inner = SUBREG_REG (x);
848 /* If INNER is a constant or PLUS, then INNER will need reloading. */
849 if (CONSTANT_P (inner) || GET_CODE (inner) == PLUS)
850 return true;
852 /* If INNER is not a hard register, then INNER will not need reloading. */
853 if (!(REG_P (inner) && HARD_REGISTER_P (inner)))
854 return false;
856 /* If INNER is not ok for MODE, then INNER will need reloading. */
857 if (!targetm.hard_regno_mode_ok (subreg_regno (x), mode))
858 return true;
860 /* If this is for an output, and the outer part is a word or smaller,
861 INNER is larger than a word and the number of registers in INNER is
862 not the same as the number of words in INNER, then INNER will need
863 reloading (with an in-out reload). */
864 return output && complex_word_subreg_p (mode, inner);
867 /* Return nonzero if IN can be reloaded into REGNO with mode MODE without
868 requiring an extra reload register. The caller has already found that
869 IN contains some reference to REGNO, so check that we can produce the
870 new value in a single step. E.g. if we have
871 (set (reg r13) (plus (reg r13) (const int 1))), and there is an
872 instruction that adds one to a register, this should succeed.
873 However, if we have something like
874 (set (reg r13) (plus (reg r13) (const int 999))), and the constant 999
875 needs to be loaded into a register first, we need a separate reload
876 register.
877 Such PLUS reloads are generated by find_reload_address_part.
878 The out-of-range PLUS expressions are usually introduced in the instruction
879 patterns by register elimination and substituting pseudos without a home
880 by their function-invariant equivalences. */
881 static int
882 can_reload_into (rtx in, int regno, machine_mode mode)
884 rtx dst;
885 rtx_insn *test_insn;
886 int r = 0;
887 struct recog_data_d save_recog_data;
889 /* For matching constraints, we often get notional input reloads where
890 we want to use the original register as the reload register. I.e.
891 technically this is a non-optional input-output reload, but IN is
892 already a valid register, and has been chosen as the reload register.
893 Speed this up, since it trivially works. */
894 if (REG_P (in))
895 return 1;
897 /* To test MEMs properly, we'd have to take into account all the reloads
898 that are already scheduled, which can become quite complicated.
899 And since we've already handled address reloads for this MEM, it
900 should always succeed anyway. */
901 if (MEM_P (in))
902 return 1;
904 /* If we can make a simple SET insn that does the job, everything should
905 be fine. */
906 dst = gen_rtx_REG (mode, regno);
907 test_insn = make_insn_raw (gen_rtx_SET (dst, in));
908 save_recog_data = recog_data;
909 if (recog_memoized (test_insn) >= 0)
911 extract_insn (test_insn);
912 r = constrain_operands (1, get_enabled_alternatives (test_insn));
914 recog_data = save_recog_data;
915 return r;
918 /* Record one reload that needs to be performed.
919 IN is an rtx saying where the data are to be found before this instruction.
920 OUT says where they must be stored after the instruction.
921 (IN is zero for data not read, and OUT is zero for data not written.)
922 INLOC and OUTLOC point to the places in the instructions where
923 IN and OUT were found.
924 If IN and OUT are both nonzero, it means the same register must be used
925 to reload both IN and OUT.
927 RCLASS is a register class required for the reloaded data.
928 INMODE is the machine mode that the instruction requires
929 for the reg that replaces IN and OUTMODE is likewise for OUT.
931 If IN is zero, then OUT's location and mode should be passed as
932 INLOC and INMODE.
934 STRICT_LOW is the 1 if there is a containing STRICT_LOW_PART rtx.
936 OPTIONAL nonzero means this reload does not need to be performed:
937 it can be discarded if that is more convenient.
939 OPNUM and TYPE say what the purpose of this reload is.
941 The return value is the reload-number for this reload.
943 If both IN and OUT are nonzero, in some rare cases we might
944 want to make two separate reloads. (Actually we never do this now.)
945 Therefore, the reload-number for OUT is stored in
946 output_reloadnum when we return; the return value applies to IN.
947 Usually (presently always), when IN and OUT are nonzero,
948 the two reload-numbers are equal, but the caller should be careful to
949 distinguish them. */
952 push_reload (rtx in, rtx out, rtx *inloc, rtx *outloc,
953 enum reg_class rclass, machine_mode inmode,
954 machine_mode outmode, int strict_low, int optional,
955 int opnum, enum reload_type type)
957 int i;
958 int dont_share = 0;
959 int dont_remove_subreg = 0;
960 #ifdef LIMIT_RELOAD_CLASS
961 rtx *in_subreg_loc = 0, *out_subreg_loc = 0;
962 #endif
963 int secondary_in_reload = -1, secondary_out_reload = -1;
964 enum insn_code secondary_in_icode = CODE_FOR_nothing;
965 enum insn_code secondary_out_icode = CODE_FOR_nothing;
966 enum reg_class subreg_in_class ATTRIBUTE_UNUSED;
967 subreg_in_class = NO_REGS;
969 /* INMODE and/or OUTMODE could be VOIDmode if no mode
970 has been specified for the operand. In that case,
971 use the operand's mode as the mode to reload. */
972 if (inmode == VOIDmode && in != 0)
973 inmode = GET_MODE (in);
974 if (outmode == VOIDmode && out != 0)
975 outmode = GET_MODE (out);
977 /* If find_reloads and friends until now missed to replace a pseudo
978 with a constant of reg_equiv_constant something went wrong
979 beforehand.
980 Note that it can't simply be done here if we missed it earlier
981 since the constant might need to be pushed into the literal pool
982 and the resulting memref would probably need further
983 reloading. */
984 if (in != 0 && REG_P (in))
986 int regno = REGNO (in);
988 gcc_assert (regno < FIRST_PSEUDO_REGISTER
989 || reg_renumber[regno] >= 0
990 || reg_equiv_constant (regno) == NULL_RTX);
993 /* reg_equiv_constant only contains constants which are obviously
994 not appropriate as destination. So if we would need to replace
995 the destination pseudo with a constant we are in real
996 trouble. */
997 if (out != 0 && REG_P (out))
999 int regno = REGNO (out);
1001 gcc_assert (regno < FIRST_PSEUDO_REGISTER
1002 || reg_renumber[regno] >= 0
1003 || reg_equiv_constant (regno) == NULL_RTX);
1006 /* If we have a read-write operand with an address side-effect,
1007 change either IN or OUT so the side-effect happens only once. */
1008 if (in != 0 && out != 0 && MEM_P (in) && rtx_equal_p (in, out))
1009 switch (GET_CODE (XEXP (in, 0)))
1011 case POST_INC: case POST_DEC: case POST_MODIFY:
1012 in = replace_equiv_address_nv (in, XEXP (XEXP (in, 0), 0));
1013 break;
1015 case PRE_INC: case PRE_DEC: case PRE_MODIFY:
1016 out = replace_equiv_address_nv (out, XEXP (XEXP (out, 0), 0));
1017 break;
1019 default:
1020 break;
1023 /* If we are reloading a (SUBREG constant ...), really reload just the
1024 inside expression in its own mode. Similarly for (SUBREG (PLUS ...)).
1025 If we have (SUBREG:M1 (MEM:M2 ...) ...) (or an inner REG that is still
1026 a pseudo and hence will become a MEM) with M1 wider than M2 and the
1027 register is a pseudo, also reload the inside expression.
1028 For machines that extend byte loads, do this for any SUBREG of a pseudo
1029 where both M1 and M2 are a word or smaller, M1 is wider than M2, and
1030 M2 is an integral mode that gets extended when loaded.
1031 Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1032 where either M1 is not valid for R or M2 is wider than a word but we
1033 only need one register to store an M2-sized quantity in R.
1034 (However, if OUT is nonzero, we need to reload the reg *and*
1035 the subreg, so do nothing here, and let following statement handle it.)
1037 Note that the case of (SUBREG (CONST_INT...)...) is handled elsewhere;
1038 we can't handle it here because CONST_INT does not indicate a mode.
1040 Similarly, we must reload the inside expression if we have a
1041 STRICT_LOW_PART (presumably, in == out in this case).
1043 Also reload the inner expression if it does not require a secondary
1044 reload but the SUBREG does.
1046 Also reload the inner expression if it is a register that is in
1047 the class whose registers cannot be referenced in a different size
1048 and M1 is not the same size as M2. If subreg_lowpart_p is false, we
1049 cannot reload just the inside since we might end up with the wrong
1050 register class. But if it is inside a STRICT_LOW_PART, we have
1051 no choice, so we hope we do get the right register class there.
1053 Finally, reload the inner expression if it is a pseudo that will
1054 become a MEM and the MEM has a mode-dependent address, as in that
1055 case we obviously cannot change the mode of the MEM to that of the
1056 containing SUBREG as that would change the interpretation of the
1057 address. */
1059 scalar_int_mode inner_mode;
1060 if (in != 0 && GET_CODE (in) == SUBREG
1061 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (in)),
1062 inmode, rclass)
1063 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (in))]
1064 && (strict_low
1065 || (subreg_lowpart_p (in)
1066 && (CONSTANT_P (SUBREG_REG (in))
1067 || GET_CODE (SUBREG_REG (in)) == PLUS
1068 || (((REG_P (SUBREG_REG (in))
1069 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER)
1070 || MEM_P (SUBREG_REG (in)))
1071 && (paradoxical_subreg_p (inmode,
1072 GET_MODE (SUBREG_REG (in)))
1073 || (known_le (GET_MODE_SIZE (inmode), UNITS_PER_WORD)
1074 && is_a <scalar_int_mode> (GET_MODE (SUBREG_REG
1075 (in)),
1076 &inner_mode)
1077 && GET_MODE_SIZE (inner_mode) <= UNITS_PER_WORD
1078 && paradoxical_subreg_p (inmode, inner_mode)
1079 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)
1080 || (WORD_REGISTER_OPERATIONS
1081 && partial_subreg_p (inmode,
1082 GET_MODE (SUBREG_REG (in)))
1083 && (known_equal_after_align_down
1084 (GET_MODE_SIZE (inmode) - 1,
1085 GET_MODE_SIZE (GET_MODE (SUBREG_REG
1086 (in))) - 1,
1087 UNITS_PER_WORD)))))
1088 || (REG_P (SUBREG_REG (in))
1089 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1090 /* The case where out is nonzero
1091 is handled differently in the following statement. */
1092 && (out == 0 || subreg_lowpart_p (in))
1093 && (complex_word_subreg_p (inmode, SUBREG_REG (in))
1094 || !targetm.hard_regno_mode_ok (subreg_regno (in),
1095 inmode)))
1096 || (secondary_reload_class (1, rclass, inmode, in) != NO_REGS
1097 && (secondary_reload_class (1, rclass,
1098 GET_MODE (SUBREG_REG (in)),
1099 SUBREG_REG (in))
1100 == NO_REGS))
1101 || (REG_P (SUBREG_REG (in))
1102 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1103 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (in)),
1104 GET_MODE (SUBREG_REG (in)),
1105 inmode))))
1106 || (REG_P (SUBREG_REG (in))
1107 && REGNO (SUBREG_REG (in)) >= FIRST_PSEUDO_REGISTER
1108 && reg_equiv_mem (REGNO (SUBREG_REG (in)))
1109 && (mode_dependent_address_p
1110 (XEXP (reg_equiv_mem (REGNO (SUBREG_REG (in))), 0),
1111 MEM_ADDR_SPACE (reg_equiv_mem (REGNO (SUBREG_REG (in)))))))))
1113 #ifdef LIMIT_RELOAD_CLASS
1114 in_subreg_loc = inloc;
1115 #endif
1116 inloc = &SUBREG_REG (in);
1117 in = *inloc;
1119 if (!WORD_REGISTER_OPERATIONS
1120 && LOAD_EXTEND_OP (GET_MODE (in)) == UNKNOWN
1121 && MEM_P (in))
1122 /* This is supposed to happen only for paradoxical subregs made by
1123 combine.cc. (SUBREG (MEM)) isn't supposed to occur other ways. */
1124 gcc_assert (known_le (GET_MODE_SIZE (GET_MODE (in)),
1125 GET_MODE_SIZE (inmode)));
1127 inmode = GET_MODE (in);
1130 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1131 where M1 is not valid for R if it was not handled by the code above.
1133 Similar issue for (SUBREG constant ...) if it was not handled by the
1134 code above. This can happen if SUBREG_BYTE != 0.
1136 However, we must reload the inner reg *as well as* the subreg in
1137 that case. */
1139 if (in != 0 && reload_inner_reg_of_subreg (in, inmode, false))
1141 if (REG_P (SUBREG_REG (in)))
1142 subreg_in_class
1143 = find_valid_class (inmode, GET_MODE (SUBREG_REG (in)),
1144 subreg_regno_offset (REGNO (SUBREG_REG (in)),
1145 GET_MODE (SUBREG_REG (in)),
1146 SUBREG_BYTE (in),
1147 GET_MODE (in)),
1148 REGNO (SUBREG_REG (in)));
1149 else if (CONSTANT_P (SUBREG_REG (in))
1150 || GET_CODE (SUBREG_REG (in)) == PLUS)
1151 subreg_in_class = find_valid_class_1 (inmode,
1152 GET_MODE (SUBREG_REG (in)),
1153 rclass);
1155 /* This relies on the fact that emit_reload_insns outputs the
1156 instructions for input reloads of type RELOAD_OTHER in the same
1157 order as the reloads. Thus if the outer reload is also of type
1158 RELOAD_OTHER, we are guaranteed that this inner reload will be
1159 output before the outer reload. */
1160 push_reload (SUBREG_REG (in), NULL_RTX, &SUBREG_REG (in), (rtx *) 0,
1161 subreg_in_class, VOIDmode, VOIDmode, 0, 0, opnum, type);
1162 dont_remove_subreg = 1;
1165 /* Similarly for paradoxical and problematical SUBREGs on the output.
1166 Note that there is no reason we need worry about the previous value
1167 of SUBREG_REG (out); even if wider than out, storing in a subreg is
1168 entitled to clobber it all (except in the case of a word mode subreg
1169 or of a STRICT_LOW_PART, in that latter case the constraint should
1170 label it input-output.) */
1171 if (out != 0 && GET_CODE (out) == SUBREG
1172 && (subreg_lowpart_p (out) || strict_low)
1173 && targetm.can_change_mode_class (GET_MODE (SUBREG_REG (out)),
1174 outmode, rclass)
1175 && contains_allocatable_reg_of_mode[rclass][GET_MODE (SUBREG_REG (out))]
1176 && (CONSTANT_P (SUBREG_REG (out))
1177 || strict_low
1178 || (((REG_P (SUBREG_REG (out))
1179 && REGNO (SUBREG_REG (out)) >= FIRST_PSEUDO_REGISTER)
1180 || MEM_P (SUBREG_REG (out)))
1181 && (paradoxical_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1182 || (WORD_REGISTER_OPERATIONS
1183 && partial_subreg_p (outmode, GET_MODE (SUBREG_REG (out)))
1184 && (known_equal_after_align_down
1185 (GET_MODE_SIZE (outmode) - 1,
1186 GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))) - 1,
1187 UNITS_PER_WORD)))))
1188 || (REG_P (SUBREG_REG (out))
1189 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1190 /* The case of a word mode subreg
1191 is handled differently in the following statement. */
1192 && ! (known_le (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1193 && maybe_gt (GET_MODE_SIZE (GET_MODE (SUBREG_REG (out))),
1194 UNITS_PER_WORD))
1195 && !targetm.hard_regno_mode_ok (subreg_regno (out), outmode))
1196 || (secondary_reload_class (0, rclass, outmode, out) != NO_REGS
1197 && (secondary_reload_class (0, rclass, GET_MODE (SUBREG_REG (out)),
1198 SUBREG_REG (out))
1199 == NO_REGS))
1200 || (REG_P (SUBREG_REG (out))
1201 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1202 && !REG_CAN_CHANGE_MODE_P (REGNO (SUBREG_REG (out)),
1203 GET_MODE (SUBREG_REG (out)),
1204 outmode))))
1206 #ifdef LIMIT_RELOAD_CLASS
1207 out_subreg_loc = outloc;
1208 #endif
1209 outloc = &SUBREG_REG (out);
1210 out = *outloc;
1211 gcc_assert (WORD_REGISTER_OPERATIONS || !MEM_P (out)
1212 || known_le (GET_MODE_SIZE (GET_MODE (out)),
1213 GET_MODE_SIZE (outmode)));
1214 outmode = GET_MODE (out);
1217 /* Similar issue for (SUBREG:M1 (REG:M2 ...) ...) for a hard register R
1218 where either M1 is not valid for R or M2 is wider than a word but we
1219 only need one register to store an M2-sized quantity in R.
1221 However, we must reload the inner reg *as well as* the subreg in
1222 that case and the inner reg is an in-out reload. */
1224 if (out != 0 && reload_inner_reg_of_subreg (out, outmode, true))
1226 enum reg_class in_out_class
1227 = find_valid_class (outmode, GET_MODE (SUBREG_REG (out)),
1228 subreg_regno_offset (REGNO (SUBREG_REG (out)),
1229 GET_MODE (SUBREG_REG (out)),
1230 SUBREG_BYTE (out),
1231 GET_MODE (out)),
1232 REGNO (SUBREG_REG (out)));
1234 /* This relies on the fact that emit_reload_insns outputs the
1235 instructions for output reloads of type RELOAD_OTHER in reverse
1236 order of the reloads. Thus if the outer reload is also of type
1237 RELOAD_OTHER, we are guaranteed that this inner reload will be
1238 output after the outer reload. */
1239 push_reload (SUBREG_REG (out), SUBREG_REG (out), &SUBREG_REG (out),
1240 &SUBREG_REG (out), in_out_class, VOIDmode, VOIDmode,
1241 0, 0, opnum, RELOAD_OTHER);
1242 dont_remove_subreg = 1;
1245 /* If IN appears in OUT, we can't share any input-only reload for IN. */
1246 if (in != 0 && out != 0 && MEM_P (out)
1247 && (REG_P (in) || MEM_P (in) || GET_CODE (in) == PLUS)
1248 && reg_overlap_mentioned_for_reload_p (in, XEXP (out, 0)))
1249 dont_share = 1;
1251 /* If IN is a SUBREG of a hard register, make a new REG. This
1252 simplifies some of the cases below. */
1254 if (in != 0 && GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))
1255 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER
1256 && ! dont_remove_subreg)
1257 in = gen_rtx_REG (GET_MODE (in), subreg_regno (in));
1259 /* Similarly for OUT. */
1260 if (out != 0 && GET_CODE (out) == SUBREG
1261 && REG_P (SUBREG_REG (out))
1262 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER
1263 && ! dont_remove_subreg)
1264 out = gen_rtx_REG (GET_MODE (out), subreg_regno (out));
1266 /* Narrow down the class of register wanted if that is
1267 desirable on this machine for efficiency. */
1269 reg_class_t preferred_class = rclass;
1271 if (in != 0)
1272 preferred_class = targetm.preferred_reload_class (in, rclass);
1274 /* Output reloads may need analogous treatment, different in detail. */
1275 if (out != 0)
1276 preferred_class
1277 = targetm.preferred_output_reload_class (out, preferred_class);
1279 /* Discard what the target said if we cannot do it. */
1280 if (preferred_class != NO_REGS
1281 || (optional && type == RELOAD_FOR_OUTPUT))
1282 rclass = (enum reg_class) preferred_class;
1285 /* Make sure we use a class that can handle the actual pseudo
1286 inside any subreg. For example, on the 386, QImode regs
1287 can appear within SImode subregs. Although GENERAL_REGS
1288 can handle SImode, QImode needs a smaller class. */
1289 #ifdef LIMIT_RELOAD_CLASS
1290 if (in_subreg_loc)
1291 rclass = LIMIT_RELOAD_CLASS (inmode, rclass);
1292 else if (in != 0 && GET_CODE (in) == SUBREG)
1293 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (in)), rclass);
1295 if (out_subreg_loc)
1296 rclass = LIMIT_RELOAD_CLASS (outmode, rclass);
1297 if (out != 0 && GET_CODE (out) == SUBREG)
1298 rclass = LIMIT_RELOAD_CLASS (GET_MODE (SUBREG_REG (out)), rclass);
1299 #endif
1301 /* Verify that this class is at least possible for the mode that
1302 is specified. */
1303 if (this_insn_is_asm)
1305 machine_mode mode;
1306 if (paradoxical_subreg_p (inmode, outmode))
1307 mode = inmode;
1308 else
1309 mode = outmode;
1310 if (mode == VOIDmode)
1312 error_for_asm (this_insn, "cannot reload integer constant "
1313 "operand in %<asm%>");
1314 mode = word_mode;
1315 if (in != 0)
1316 inmode = word_mode;
1317 if (out != 0)
1318 outmode = word_mode;
1320 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
1321 if (targetm.hard_regno_mode_ok (i, mode)
1322 && in_hard_reg_set_p (reg_class_contents[(int) rclass], mode, i))
1323 break;
1324 if (i == FIRST_PSEUDO_REGISTER)
1326 error_for_asm (this_insn, "impossible register constraint "
1327 "in %<asm%>");
1328 /* Avoid further trouble with this insn. */
1329 PATTERN (this_insn) = gen_rtx_USE (VOIDmode, const0_rtx);
1330 /* We used to continue here setting class to ALL_REGS, but it triggers
1331 sanity check on i386 for:
1332 void foo(long double d)
1334 asm("" :: "a" (d));
1336 Returning zero here ought to be safe as we take care in
1337 find_reloads to not process the reloads when instruction was
1338 replaced by USE. */
1340 return 0;
1344 /* Optional output reloads are always OK even if we have no register class,
1345 since the function of these reloads is only to have spill_reg_store etc.
1346 set, so that the storing insn can be deleted later. */
1347 gcc_assert (rclass != NO_REGS
1348 || (optional != 0 && type == RELOAD_FOR_OUTPUT));
1350 i = find_reusable_reload (&in, out, rclass, type, opnum, dont_share);
1352 if (i == n_reloads)
1354 /* See if we need a secondary reload register to move between CLASS
1355 and IN or CLASS and OUT. Get the icode and push any required reloads
1356 needed for each of them if so. */
1358 if (in != 0)
1359 secondary_in_reload
1360 = push_secondary_reload (1, in, opnum, optional, rclass, inmode, type,
1361 &secondary_in_icode, NULL);
1362 if (out != 0 && GET_CODE (out) != SCRATCH)
1363 secondary_out_reload
1364 = push_secondary_reload (0, out, opnum, optional, rclass, outmode,
1365 type, &secondary_out_icode, NULL);
1367 /* We found no existing reload suitable for re-use.
1368 So add an additional reload. */
1370 if (subreg_in_class == NO_REGS
1371 && in != 0
1372 && (REG_P (in)
1373 || (GET_CODE (in) == SUBREG && REG_P (SUBREG_REG (in))))
1374 && reg_or_subregno (in) < FIRST_PSEUDO_REGISTER)
1375 subreg_in_class = REGNO_REG_CLASS (reg_or_subregno (in));
1376 /* If a memory location is needed for the copy, make one. */
1377 if (subreg_in_class != NO_REGS
1378 && targetm.secondary_memory_needed (inmode, subreg_in_class, rclass))
1379 get_secondary_mem (in, inmode, opnum, type);
1381 i = n_reloads;
1382 rld[i].in = in;
1383 rld[i].out = out;
1384 rld[i].rclass = rclass;
1385 rld[i].inmode = inmode;
1386 rld[i].outmode = outmode;
1387 rld[i].reg_rtx = 0;
1388 rld[i].optional = optional;
1389 rld[i].inc = 0;
1390 rld[i].nocombine = 0;
1391 rld[i].in_reg = inloc ? *inloc : 0;
1392 rld[i].out_reg = outloc ? *outloc : 0;
1393 rld[i].opnum = opnum;
1394 rld[i].when_needed = type;
1395 rld[i].secondary_in_reload = secondary_in_reload;
1396 rld[i].secondary_out_reload = secondary_out_reload;
1397 rld[i].secondary_in_icode = secondary_in_icode;
1398 rld[i].secondary_out_icode = secondary_out_icode;
1399 rld[i].secondary_p = 0;
1401 n_reloads++;
1403 if (out != 0
1404 && (REG_P (out)
1405 || (GET_CODE (out) == SUBREG && REG_P (SUBREG_REG (out))))
1406 && reg_or_subregno (out) < FIRST_PSEUDO_REGISTER
1407 && (targetm.secondary_memory_needed
1408 (outmode, rclass, REGNO_REG_CLASS (reg_or_subregno (out)))))
1409 get_secondary_mem (out, outmode, opnum, type);
1411 else
1413 /* We are reusing an existing reload,
1414 but we may have additional information for it.
1415 For example, we may now have both IN and OUT
1416 while the old one may have just one of them. */
1418 /* The modes can be different. If they are, we want to reload in
1419 the larger mode, so that the value is valid for both modes. */
1420 if (inmode != VOIDmode
1421 && partial_subreg_p (rld[i].inmode, inmode))
1422 rld[i].inmode = inmode;
1423 if (outmode != VOIDmode
1424 && partial_subreg_p (rld[i].outmode, outmode))
1425 rld[i].outmode = outmode;
1426 if (in != 0)
1428 rtx in_reg = inloc ? *inloc : 0;
1429 /* If we merge reloads for two distinct rtl expressions that
1430 are identical in content, there might be duplicate address
1431 reloads. Remove the extra set now, so that if we later find
1432 that we can inherit this reload, we can get rid of the
1433 address reloads altogether.
1435 Do not do this if both reloads are optional since the result
1436 would be an optional reload which could potentially leave
1437 unresolved address replacements.
1439 It is not sufficient to call transfer_replacements since
1440 choose_reload_regs will remove the replacements for address
1441 reloads of inherited reloads which results in the same
1442 problem. */
1443 if (rld[i].in != in && rtx_equal_p (in, rld[i].in)
1444 && ! (rld[i].optional && optional))
1446 /* We must keep the address reload with the lower operand
1447 number alive. */
1448 if (opnum > rld[i].opnum)
1450 remove_address_replacements (in);
1451 in = rld[i].in;
1452 in_reg = rld[i].in_reg;
1454 else
1455 remove_address_replacements (rld[i].in);
1457 /* When emitting reloads we don't necessarily look at the in-
1458 and outmode, but also directly at the operands (in and out).
1459 So we can't simply overwrite them with whatever we have found
1460 for this (to-be-merged) reload, we have to "merge" that too.
1461 Reusing another reload already verified that we deal with the
1462 same operands, just possibly in different modes. So we
1463 overwrite the operands only when the new mode is larger.
1464 See also PR33613. */
1465 if (!rld[i].in
1466 || partial_subreg_p (GET_MODE (rld[i].in), GET_MODE (in)))
1467 rld[i].in = in;
1468 if (!rld[i].in_reg
1469 || (in_reg
1470 && partial_subreg_p (GET_MODE (rld[i].in_reg),
1471 GET_MODE (in_reg))))
1472 rld[i].in_reg = in_reg;
1474 if (out != 0)
1476 if (!rld[i].out
1477 || (out
1478 && partial_subreg_p (GET_MODE (rld[i].out),
1479 GET_MODE (out))))
1480 rld[i].out = out;
1481 if (outloc
1482 && (!rld[i].out_reg
1483 || partial_subreg_p (GET_MODE (rld[i].out_reg),
1484 GET_MODE (*outloc))))
1485 rld[i].out_reg = *outloc;
1487 if (reg_class_subset_p (rclass, rld[i].rclass))
1488 rld[i].rclass = rclass;
1489 rld[i].optional &= optional;
1490 if (MERGE_TO_OTHER (type, rld[i].when_needed,
1491 opnum, rld[i].opnum))
1492 rld[i].when_needed = RELOAD_OTHER;
1493 rld[i].opnum = MIN (rld[i].opnum, opnum);
1496 /* If the ostensible rtx being reloaded differs from the rtx found
1497 in the location to substitute, this reload is not safe to combine
1498 because we cannot reliably tell whether it appears in the insn. */
1500 if (in != 0 && in != *inloc)
1501 rld[i].nocombine = 1;
1503 /* If we will replace IN and OUT with the reload-reg,
1504 record where they are located so that substitution need
1505 not do a tree walk. */
1507 if (replace_reloads)
1509 if (inloc != 0)
1511 struct replacement *r = &replacements[n_replacements++];
1512 r->what = i;
1513 r->where = inloc;
1514 r->mode = inmode;
1516 if (outloc != 0 && outloc != inloc)
1518 struct replacement *r = &replacements[n_replacements++];
1519 r->what = i;
1520 r->where = outloc;
1521 r->mode = outmode;
1525 /* If this reload is just being introduced and it has both
1526 an incoming quantity and an outgoing quantity that are
1527 supposed to be made to match, see if either one of the two
1528 can serve as the place to reload into.
1530 If one of them is acceptable, set rld[i].reg_rtx
1531 to that one. */
1533 if (in != 0 && out != 0 && in != out && rld[i].reg_rtx == 0)
1535 rld[i].reg_rtx = find_dummy_reload (in, out, inloc, outloc,
1536 inmode, outmode,
1537 rld[i].rclass, i,
1538 earlyclobber_operand_p (out));
1540 /* If the outgoing register already contains the same value
1541 as the incoming one, we can dispense with loading it.
1542 The easiest way to tell the caller that is to give a phony
1543 value for the incoming operand (same as outgoing one). */
1544 if (rld[i].reg_rtx == out
1545 && (REG_P (in) || CONSTANT_P (in))
1546 && find_equiv_reg (in, this_insn, NO_REGS, REGNO (out),
1547 static_reload_reg_p, i, inmode) != 0)
1548 rld[i].in = out;
1551 /* If this is an input reload and the operand contains a register that
1552 dies in this insn and is used nowhere else, see if it is the right class
1553 to be used for this reload. Use it if so. (This occurs most commonly
1554 in the case of paradoxical SUBREGs and in-out reloads). We cannot do
1555 this if it is also an output reload that mentions the register unless
1556 the output is a SUBREG that clobbers an entire register.
1558 Note that the operand might be one of the spill regs, if it is a
1559 pseudo reg and we are in a block where spilling has not taken place.
1560 But if there is no spilling in this block, that is OK.
1561 An explicitly used hard reg cannot be a spill reg. */
1563 if (rld[i].reg_rtx == 0 && in != 0 && hard_regs_live_known)
1565 rtx note;
1566 int regno;
1567 machine_mode rel_mode = inmode;
1569 if (out && partial_subreg_p (rel_mode, outmode))
1570 rel_mode = outmode;
1572 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1573 if (REG_NOTE_KIND (note) == REG_DEAD
1574 && REG_P (XEXP (note, 0))
1575 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1576 && reg_mentioned_p (XEXP (note, 0), in)
1577 /* Check that a former pseudo is valid; see find_dummy_reload. */
1578 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1579 || (! bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1580 ORIGINAL_REGNO (XEXP (note, 0)))
1581 && REG_NREGS (XEXP (note, 0)) == 1))
1582 && ! refers_to_regno_for_reload_p (regno,
1583 end_hard_regno (rel_mode,
1584 regno),
1585 PATTERN (this_insn), inloc)
1586 && ! find_reg_fusage (this_insn, USE, XEXP (note, 0))
1587 /* If this is also an output reload, IN cannot be used as
1588 the reload register if it is set in this insn unless IN
1589 is also OUT. */
1590 && (out == 0 || in == out
1591 || ! hard_reg_set_here_p (regno,
1592 end_hard_regno (rel_mode, regno),
1593 PATTERN (this_insn)))
1594 /* ??? Why is this code so different from the previous?
1595 Is there any simple coherent way to describe the two together?
1596 What's going on here. */
1597 && (in != out
1598 || (GET_CODE (in) == SUBREG
1599 && (known_equal_after_align_up
1600 (GET_MODE_SIZE (GET_MODE (in)),
1601 GET_MODE_SIZE (GET_MODE (SUBREG_REG (in))),
1602 UNITS_PER_WORD))))
1603 /* Make sure the operand fits in the reg that dies. */
1604 && known_le (GET_MODE_SIZE (rel_mode),
1605 GET_MODE_SIZE (GET_MODE (XEXP (note, 0))))
1606 && targetm.hard_regno_mode_ok (regno, inmode)
1607 && targetm.hard_regno_mode_ok (regno, outmode))
1609 unsigned int offs;
1610 unsigned int nregs = MAX (hard_regno_nregs (regno, inmode),
1611 hard_regno_nregs (regno, outmode));
1613 for (offs = 0; offs < nregs; offs++)
1614 if (fixed_regs[regno + offs]
1615 || ! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
1616 regno + offs))
1617 break;
1619 if (offs == nregs
1620 && (! (refers_to_regno_for_reload_p
1621 (regno, end_hard_regno (inmode, regno), in, (rtx *) 0))
1622 || can_reload_into (in, regno, inmode)))
1624 rld[i].reg_rtx = gen_rtx_REG (rel_mode, regno);
1625 break;
1630 if (out)
1631 output_reloadnum = i;
1633 return i;
1636 /* Record an additional place we must replace a value
1637 for which we have already recorded a reload.
1638 RELOADNUM is the value returned by push_reload
1639 when the reload was recorded.
1640 This is used in insn patterns that use match_dup. */
1642 static void
1643 push_replacement (rtx *loc, int reloadnum, machine_mode mode)
1645 if (replace_reloads)
1647 struct replacement *r = &replacements[n_replacements++];
1648 r->what = reloadnum;
1649 r->where = loc;
1650 r->mode = mode;
1654 /* Duplicate any replacement we have recorded to apply at
1655 location ORIG_LOC to also be performed at DUP_LOC.
1656 This is used in insn patterns that use match_dup. */
1658 static void
1659 dup_replacements (rtx *dup_loc, rtx *orig_loc)
1661 int i, n = n_replacements;
1663 for (i = 0; i < n; i++)
1665 struct replacement *r = &replacements[i];
1666 if (r->where == orig_loc)
1667 push_replacement (dup_loc, r->what, r->mode);
1671 /* Transfer all replacements that used to be in reload FROM to be in
1672 reload TO. */
1674 void
1675 transfer_replacements (int to, int from)
1677 int i;
1679 for (i = 0; i < n_replacements; i++)
1680 if (replacements[i].what == from)
1681 replacements[i].what = to;
1684 /* IN_RTX is the value loaded by a reload that we now decided to inherit,
1685 or a subpart of it. If we have any replacements registered for IN_RTX,
1686 cancel the reloads that were supposed to load them.
1687 Return nonzero if we canceled any reloads. */
1689 remove_address_replacements (rtx in_rtx)
1691 int i, j;
1692 char reload_flags[MAX_RELOADS];
1693 int something_changed = 0;
1695 memset (reload_flags, 0, sizeof reload_flags);
1696 for (i = 0, j = 0; i < n_replacements; i++)
1698 if (loc_mentioned_in_p (replacements[i].where, in_rtx))
1699 reload_flags[replacements[i].what] |= 1;
1700 else
1702 replacements[j++] = replacements[i];
1703 reload_flags[replacements[i].what] |= 2;
1706 /* Note that the following store must be done before the recursive calls. */
1707 n_replacements = j;
1709 for (i = n_reloads - 1; i >= 0; i--)
1711 if (reload_flags[i] == 1)
1713 deallocate_reload_reg (i);
1714 remove_address_replacements (rld[i].in);
1715 rld[i].in = 0;
1716 something_changed = 1;
1719 return something_changed;
1722 /* If there is only one output reload, and it is not for an earlyclobber
1723 operand, try to combine it with a (logically unrelated) input reload
1724 to reduce the number of reload registers needed.
1726 This is safe if the input reload does not appear in
1727 the value being output-reloaded, because this implies
1728 it is not needed any more once the original insn completes.
1730 If that doesn't work, see we can use any of the registers that
1731 die in this insn as a reload register. We can if it is of the right
1732 class and does not appear in the value being output-reloaded. */
1734 static void
1735 combine_reloads (void)
1737 int i, regno;
1738 int output_reload = -1;
1739 int secondary_out = -1;
1740 rtx note;
1742 /* Find the output reload; return unless there is exactly one
1743 and that one is mandatory. */
1745 for (i = 0; i < n_reloads; i++)
1746 if (rld[i].out != 0)
1748 if (output_reload >= 0)
1749 return;
1750 output_reload = i;
1753 if (output_reload < 0 || rld[output_reload].optional)
1754 return;
1756 /* An input-output reload isn't combinable. */
1758 if (rld[output_reload].in != 0)
1759 return;
1761 /* If this reload is for an earlyclobber operand, we can't do anything. */
1762 if (earlyclobber_operand_p (rld[output_reload].out))
1763 return;
1765 /* If there is a reload for part of the address of this operand, we would
1766 need to change it to RELOAD_FOR_OTHER_ADDRESS. But that would extend
1767 its life to the point where doing this combine would not lower the
1768 number of spill registers needed. */
1769 for (i = 0; i < n_reloads; i++)
1770 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
1771 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
1772 && rld[i].opnum == rld[output_reload].opnum)
1773 return;
1775 /* Check each input reload; can we combine it? */
1777 for (i = 0; i < n_reloads; i++)
1778 if (rld[i].in && ! rld[i].optional && ! rld[i].nocombine
1779 /* Life span of this reload must not extend past main insn. */
1780 && rld[i].when_needed != RELOAD_FOR_OUTPUT_ADDRESS
1781 && rld[i].when_needed != RELOAD_FOR_OUTADDR_ADDRESS
1782 && rld[i].when_needed != RELOAD_OTHER
1783 && (ira_reg_class_max_nregs [(int)rld[i].rclass][(int) rld[i].inmode]
1784 == ira_reg_class_max_nregs [(int) rld[output_reload].rclass]
1785 [(int) rld[output_reload].outmode])
1786 && known_eq (rld[i].inc, 0)
1787 && rld[i].reg_rtx == 0
1788 /* Don't combine two reloads with different secondary
1789 memory locations. */
1790 && (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum] == 0
1791 || secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] == 0
1792 || rtx_equal_p (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum],
1793 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum]))
1794 && (targetm.small_register_classes_for_mode_p (VOIDmode)
1795 ? (rld[i].rclass == rld[output_reload].rclass)
1796 : (reg_class_subset_p (rld[i].rclass,
1797 rld[output_reload].rclass)
1798 || reg_class_subset_p (rld[output_reload].rclass,
1799 rld[i].rclass)))
1800 && (MATCHES (rld[i].in, rld[output_reload].out)
1801 /* Args reversed because the first arg seems to be
1802 the one that we imagine being modified
1803 while the second is the one that might be affected. */
1804 || (! reg_overlap_mentioned_for_reload_p (rld[output_reload].out,
1805 rld[i].in)
1806 /* However, if the input is a register that appears inside
1807 the output, then we also can't share.
1808 Imagine (set (mem (reg 69)) (plus (reg 69) ...)).
1809 If the same reload reg is used for both reg 69 and the
1810 result to be stored in memory, then that result
1811 will clobber the address of the memory ref. */
1812 && ! (REG_P (rld[i].in)
1813 && reg_overlap_mentioned_for_reload_p (rld[i].in,
1814 rld[output_reload].out))))
1815 && ! reload_inner_reg_of_subreg (rld[i].in, rld[i].inmode,
1816 rld[i].when_needed != RELOAD_FOR_INPUT)
1817 && (reg_class_size[(int) rld[i].rclass]
1818 || targetm.small_register_classes_for_mode_p (VOIDmode))
1819 /* We will allow making things slightly worse by combining an
1820 input and an output, but no worse than that. */
1821 && (rld[i].when_needed == RELOAD_FOR_INPUT
1822 || rld[i].when_needed == RELOAD_FOR_OUTPUT))
1824 int j;
1826 /* We have found a reload to combine with! */
1827 rld[i].out = rld[output_reload].out;
1828 rld[i].out_reg = rld[output_reload].out_reg;
1829 rld[i].outmode = rld[output_reload].outmode;
1830 /* Mark the old output reload as inoperative. */
1831 rld[output_reload].out = 0;
1832 /* The combined reload is needed for the entire insn. */
1833 rld[i].when_needed = RELOAD_OTHER;
1834 /* If the output reload had a secondary reload, copy it. */
1835 if (rld[output_reload].secondary_out_reload != -1)
1837 rld[i].secondary_out_reload
1838 = rld[output_reload].secondary_out_reload;
1839 rld[i].secondary_out_icode
1840 = rld[output_reload].secondary_out_icode;
1843 /* Copy any secondary MEM. */
1844 if (secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum] != 0)
1845 secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[i].opnum]
1846 = secondary_memlocs_elim[(int) rld[output_reload].outmode][rld[output_reload].opnum];
1847 /* If required, minimize the register class. */
1848 if (reg_class_subset_p (rld[output_reload].rclass,
1849 rld[i].rclass))
1850 rld[i].rclass = rld[output_reload].rclass;
1852 /* Transfer all replacements from the old reload to the combined. */
1853 for (j = 0; j < n_replacements; j++)
1854 if (replacements[j].what == output_reload)
1855 replacements[j].what = i;
1857 return;
1860 /* If this insn has only one operand that is modified or written (assumed
1861 to be the first), it must be the one corresponding to this reload. It
1862 is safe to use anything that dies in this insn for that output provided
1863 that it does not occur in the output (we already know it isn't an
1864 earlyclobber. If this is an asm insn, give up. */
1866 if (INSN_CODE (this_insn) == -1)
1867 return;
1869 for (i = 1; i < insn_data[INSN_CODE (this_insn)].n_operands; i++)
1870 if (insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '='
1871 || insn_data[INSN_CODE (this_insn)].operand[i].constraint[0] == '+')
1872 return;
1874 /* See if some hard register that dies in this insn and is not used in
1875 the output is the right class. Only works if the register we pick
1876 up can fully hold our output reload. */
1877 for (note = REG_NOTES (this_insn); note; note = XEXP (note, 1))
1878 if (REG_NOTE_KIND (note) == REG_DEAD
1879 && REG_P (XEXP (note, 0))
1880 && !reg_overlap_mentioned_for_reload_p (XEXP (note, 0),
1881 rld[output_reload].out)
1882 && (regno = REGNO (XEXP (note, 0))) < FIRST_PSEUDO_REGISTER
1883 && targetm.hard_regno_mode_ok (regno, rld[output_reload].outmode)
1884 && TEST_HARD_REG_BIT (reg_class_contents[(int) rld[output_reload].rclass],
1885 regno)
1886 && (hard_regno_nregs (regno, rld[output_reload].outmode)
1887 <= REG_NREGS (XEXP (note, 0)))
1888 /* Ensure that a secondary or tertiary reload for this output
1889 won't want this register. */
1890 && ((secondary_out = rld[output_reload].secondary_out_reload) == -1
1891 || (!(TEST_HARD_REG_BIT
1892 (reg_class_contents[(int) rld[secondary_out].rclass], regno))
1893 && ((secondary_out = rld[secondary_out].secondary_out_reload) == -1
1894 || !(TEST_HARD_REG_BIT
1895 (reg_class_contents[(int) rld[secondary_out].rclass],
1896 regno)))))
1897 && !fixed_regs[regno]
1898 /* Check that a former pseudo is valid; see find_dummy_reload. */
1899 && (ORIGINAL_REGNO (XEXP (note, 0)) < FIRST_PSEUDO_REGISTER
1900 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
1901 ORIGINAL_REGNO (XEXP (note, 0)))
1902 && REG_NREGS (XEXP (note, 0)) == 1)))
1904 rld[output_reload].reg_rtx
1905 = gen_rtx_REG (rld[output_reload].outmode, regno);
1906 return;
1910 /* Try to find a reload register for an in-out reload (expressions IN and OUT).
1911 See if one of IN and OUT is a register that may be used;
1912 this is desirable since a spill-register won't be needed.
1913 If so, return the register rtx that proves acceptable.
1915 INLOC and OUTLOC are locations where IN and OUT appear in the insn.
1916 RCLASS is the register class required for the reload.
1918 If FOR_REAL is >= 0, it is the number of the reload,
1919 and in some cases when it can be discovered that OUT doesn't need
1920 to be computed, clear out rld[FOR_REAL].out.
1922 If FOR_REAL is -1, this should not be done, because this call
1923 is just to see if a register can be found, not to find and install it.
1925 EARLYCLOBBER is nonzero if OUT is an earlyclobber operand. This
1926 puts an additional constraint on being able to use IN for OUT since
1927 IN must not appear elsewhere in the insn (it is assumed that IN itself
1928 is safe from the earlyclobber). */
1930 static rtx
1931 find_dummy_reload (rtx real_in, rtx real_out, rtx *inloc, rtx *outloc,
1932 machine_mode inmode, machine_mode outmode,
1933 reg_class_t rclass, int for_real, int earlyclobber)
1935 rtx in = real_in;
1936 rtx out = real_out;
1937 int in_offset = 0;
1938 int out_offset = 0;
1939 rtx value = 0;
1941 /* If operands exceed a word, we can't use either of them
1942 unless they have the same size. */
1943 if (maybe_ne (GET_MODE_SIZE (outmode), GET_MODE_SIZE (inmode))
1944 && (maybe_gt (GET_MODE_SIZE (outmode), UNITS_PER_WORD)
1945 || maybe_gt (GET_MODE_SIZE (inmode), UNITS_PER_WORD)))
1946 return 0;
1948 /* Note that {in,out}_offset are needed only when 'in' or 'out'
1949 respectively refers to a hard register. */
1951 /* Find the inside of any subregs. */
1952 while (GET_CODE (out) == SUBREG)
1954 if (REG_P (SUBREG_REG (out))
1955 && REGNO (SUBREG_REG (out)) < FIRST_PSEUDO_REGISTER)
1956 out_offset += subreg_regno_offset (REGNO (SUBREG_REG (out)),
1957 GET_MODE (SUBREG_REG (out)),
1958 SUBREG_BYTE (out),
1959 GET_MODE (out));
1960 out = SUBREG_REG (out);
1962 while (GET_CODE (in) == SUBREG)
1964 if (REG_P (SUBREG_REG (in))
1965 && REGNO (SUBREG_REG (in)) < FIRST_PSEUDO_REGISTER)
1966 in_offset += subreg_regno_offset (REGNO (SUBREG_REG (in)),
1967 GET_MODE (SUBREG_REG (in)),
1968 SUBREG_BYTE (in),
1969 GET_MODE (in));
1970 in = SUBREG_REG (in);
1973 /* Narrow down the reg class, the same way push_reload will;
1974 otherwise we might find a dummy now, but push_reload won't. */
1976 reg_class_t preferred_class = targetm.preferred_reload_class (in, rclass);
1977 if (preferred_class != NO_REGS)
1978 rclass = (enum reg_class) preferred_class;
1981 /* See if OUT will do. */
1982 if (REG_P (out)
1983 && REGNO (out) < FIRST_PSEUDO_REGISTER)
1985 unsigned int regno = REGNO (out) + out_offset;
1986 unsigned int nwords = hard_regno_nregs (regno, outmode);
1987 rtx saved_rtx;
1989 /* When we consider whether the insn uses OUT,
1990 ignore references within IN. They don't prevent us
1991 from copying IN into OUT, because those refs would
1992 move into the insn that reloads IN.
1994 However, we only ignore IN in its role as this reload.
1995 If the insn uses IN elsewhere and it contains OUT,
1996 that counts. We can't be sure it's the "same" operand
1997 so it might not go through this reload.
1999 We also need to avoid using OUT if it, or part of it, is a
2000 fixed register. Modifying such registers, even transiently,
2001 may have undefined effects on the machine, such as modifying
2002 the stack pointer. */
2003 saved_rtx = *inloc;
2004 *inloc = const0_rtx;
2006 if (regno < FIRST_PSEUDO_REGISTER
2007 && targetm.hard_regno_mode_ok (regno, outmode)
2008 && ! refers_to_regno_for_reload_p (regno, regno + nwords,
2009 PATTERN (this_insn), outloc))
2011 unsigned int i;
2013 for (i = 0; i < nwords; i++)
2014 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2015 regno + i)
2016 || fixed_regs[regno + i])
2017 break;
2019 if (i == nwords)
2021 if (REG_P (real_out))
2022 value = real_out;
2023 else
2024 value = gen_rtx_REG (outmode, regno);
2028 *inloc = saved_rtx;
2031 /* Consider using IN if OUT was not acceptable
2032 or if OUT dies in this insn (like the quotient in a divmod insn).
2033 We can't use IN unless it is dies in this insn,
2034 which means we must know accurately which hard regs are live.
2035 Also, the result can't go in IN if IN is used within OUT,
2036 or if OUT is an earlyclobber and IN appears elsewhere in the insn. */
2037 if (hard_regs_live_known
2038 && REG_P (in)
2039 && REGNO (in) < FIRST_PSEUDO_REGISTER
2040 && (value == 0
2041 || find_reg_note (this_insn, REG_UNUSED, real_out))
2042 && find_reg_note (this_insn, REG_DEAD, real_in)
2043 && !fixed_regs[REGNO (in)]
2044 && targetm.hard_regno_mode_ok (REGNO (in),
2045 /* The only case where out and real_out
2046 might have different modes is where
2047 real_out is a subreg, and in that
2048 case, out has a real mode. */
2049 (GET_MODE (out) != VOIDmode
2050 ? GET_MODE (out) : outmode))
2051 && (ORIGINAL_REGNO (in) < FIRST_PSEUDO_REGISTER
2052 /* However only do this if we can be sure that this input
2053 operand doesn't correspond with an uninitialized pseudo.
2054 global can assign some hardreg to it that is the same as
2055 the one assigned to a different, also live pseudo (as it
2056 can ignore the conflict). We must never introduce writes
2057 to such hardregs, as they would clobber the other live
2058 pseudo. See PR 20973. */
2059 || (!bitmap_bit_p (DF_LR_OUT (ENTRY_BLOCK_PTR_FOR_FN (cfun)),
2060 ORIGINAL_REGNO (in))
2061 /* Similarly, only do this if we can be sure that the death
2062 note is still valid. global can assign some hardreg to
2063 the pseudo referenced in the note and simultaneously a
2064 subword of this hardreg to a different, also live pseudo,
2065 because only another subword of the hardreg is actually
2066 used in the insn. This cannot happen if the pseudo has
2067 been assigned exactly one hardreg. See PR 33732. */
2068 && REG_NREGS (in) == 1)))
2070 unsigned int regno = REGNO (in) + in_offset;
2071 unsigned int nwords = hard_regno_nregs (regno, inmode);
2073 if (! refers_to_regno_for_reload_p (regno, regno + nwords, out, (rtx*) 0)
2074 && ! hard_reg_set_here_p (regno, regno + nwords,
2075 PATTERN (this_insn))
2076 && (! earlyclobber
2077 || ! refers_to_regno_for_reload_p (regno, regno + nwords,
2078 PATTERN (this_insn), inloc)))
2080 unsigned int i;
2082 for (i = 0; i < nwords; i++)
2083 if (! TEST_HARD_REG_BIT (reg_class_contents[(int) rclass],
2084 regno + i))
2085 break;
2087 if (i == nwords)
2089 /* If we were going to use OUT as the reload reg
2090 and changed our mind, it means OUT is a dummy that
2091 dies here. So don't bother copying value to it. */
2092 if (for_real >= 0 && value == real_out)
2093 rld[for_real].out = 0;
2094 if (REG_P (real_in))
2095 value = real_in;
2096 else
2097 value = gen_rtx_REG (inmode, regno);
2102 return value;
2105 /* This page contains subroutines used mainly for determining
2106 whether the IN or an OUT of a reload can serve as the
2107 reload register. */
2109 /* Return 1 if X is an operand of an insn that is being earlyclobbered. */
2112 earlyclobber_operand_p (rtx x)
2114 int i;
2116 for (i = 0; i < n_earlyclobbers; i++)
2117 if (reload_earlyclobbers[i] == x)
2118 return 1;
2120 return 0;
2123 /* Return 1 if expression X alters a hard reg in the range
2124 from BEG_REGNO (inclusive) to END_REGNO (exclusive),
2125 either explicitly or in the guise of a pseudo-reg allocated to REGNO.
2126 X should be the body of an instruction. */
2128 static int
2129 hard_reg_set_here_p (unsigned int beg_regno, unsigned int end_regno, rtx x)
2131 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
2133 rtx op0 = SET_DEST (x);
2135 while (GET_CODE (op0) == SUBREG)
2136 op0 = SUBREG_REG (op0);
2137 if (REG_P (op0))
2139 unsigned int r = REGNO (op0);
2141 /* See if this reg overlaps range under consideration. */
2142 if (r < end_regno
2143 && end_hard_regno (GET_MODE (op0), r) > beg_regno)
2144 return 1;
2147 else if (GET_CODE (x) == PARALLEL)
2149 int i = XVECLEN (x, 0) - 1;
2151 for (; i >= 0; i--)
2152 if (hard_reg_set_here_p (beg_regno, end_regno, XVECEXP (x, 0, i)))
2153 return 1;
2156 return 0;
2159 /* Return true if ADDR is a valid memory address for mode MODE
2160 in address space AS, and check that each pseudo reg has the
2161 proper kind of hard reg. */
2163 bool
2164 strict_memory_address_addr_space_p (machine_mode mode ATTRIBUTE_UNUSED,
2165 rtx addr, addr_space_t as, code_helper)
2167 #ifdef GO_IF_LEGITIMATE_ADDRESS
2168 gcc_assert (ADDR_SPACE_GENERIC_P (as));
2169 GO_IF_LEGITIMATE_ADDRESS (mode, addr, win);
2170 return false;
2172 win:
2173 return true;
2174 #else
2175 return targetm.addr_space.legitimate_address_p (mode, addr, 1, as,
2176 ERROR_MARK);
2177 #endif
2180 /* Like rtx_equal_p except that it allows a REG and a SUBREG to match
2181 if they are the same hard reg, and has special hacks for
2182 autoincrement and autodecrement.
2183 This is specifically intended for find_reloads to use
2184 in determining whether two operands match.
2185 X is the operand whose number is the lower of the two.
2187 The value is 2 if Y contains a pre-increment that matches
2188 a non-incrementing address in X. */
2190 /* ??? To be completely correct, we should arrange to pass
2191 for X the output operand and for Y the input operand.
2192 For now, we assume that the output operand has the lower number
2193 because that is natural in (SET output (... input ...)). */
2196 operands_match_p (rtx x, rtx y)
2198 int i;
2199 RTX_CODE code = GET_CODE (x);
2200 const char *fmt;
2201 int success_2;
2203 if (x == y)
2204 return 1;
2205 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
2206 && (REG_P (y) || (GET_CODE (y) == SUBREG
2207 && REG_P (SUBREG_REG (y)))))
2209 int j;
2211 if (code == SUBREG)
2213 i = REGNO (SUBREG_REG (x));
2214 if (i >= FIRST_PSEUDO_REGISTER)
2215 goto slow;
2216 i += subreg_regno_offset (REGNO (SUBREG_REG (x)),
2217 GET_MODE (SUBREG_REG (x)),
2218 SUBREG_BYTE (x),
2219 GET_MODE (x));
2221 else
2222 i = REGNO (x);
2224 if (GET_CODE (y) == SUBREG)
2226 j = REGNO (SUBREG_REG (y));
2227 if (j >= FIRST_PSEUDO_REGISTER)
2228 goto slow;
2229 j += subreg_regno_offset (REGNO (SUBREG_REG (y)),
2230 GET_MODE (SUBREG_REG (y)),
2231 SUBREG_BYTE (y),
2232 GET_MODE (y));
2234 else
2235 j = REGNO (y);
2237 /* On a REG_WORDS_BIG_ENDIAN machine, point to the last register of a
2238 multiple hard register group of scalar integer registers, so that
2239 for example (reg:DI 0) and (reg:SI 1) will be considered the same
2240 register. */
2241 scalar_int_mode xmode;
2242 if (REG_WORDS_BIG_ENDIAN
2243 && is_a <scalar_int_mode> (GET_MODE (x), &xmode)
2244 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD
2245 && i < FIRST_PSEUDO_REGISTER)
2246 i += hard_regno_nregs (i, xmode) - 1;
2247 scalar_int_mode ymode;
2248 if (REG_WORDS_BIG_ENDIAN
2249 && is_a <scalar_int_mode> (GET_MODE (y), &ymode)
2250 && GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2251 && j < FIRST_PSEUDO_REGISTER)
2252 j += hard_regno_nregs (j, ymode) - 1;
2254 return i == j;
2256 /* If two operands must match, because they are really a single
2257 operand of an assembler insn, then two postincrements are invalid
2258 because the assembler insn would increment only once.
2259 On the other hand, a postincrement matches ordinary indexing
2260 if the postincrement is the output operand. */
2261 if (code == POST_DEC || code == POST_INC || code == POST_MODIFY)
2262 return operands_match_p (XEXP (x, 0), y);
2263 /* Two preincrements are invalid
2264 because the assembler insn would increment only once.
2265 On the other hand, a preincrement matches ordinary indexing
2266 if the preincrement is the input operand.
2267 In this case, return 2, since some callers need to do special
2268 things when this happens. */
2269 if (GET_CODE (y) == PRE_DEC || GET_CODE (y) == PRE_INC
2270 || GET_CODE (y) == PRE_MODIFY)
2271 return operands_match_p (x, XEXP (y, 0)) ? 2 : 0;
2273 slow:
2275 /* Now we have disposed of all the cases in which different rtx codes
2276 can match. */
2277 if (code != GET_CODE (y))
2278 return 0;
2280 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2281 if (GET_MODE (x) != GET_MODE (y))
2282 return 0;
2284 /* MEMs referring to different address space are not equivalent. */
2285 if (code == MEM && MEM_ADDR_SPACE (x) != MEM_ADDR_SPACE (y))
2286 return 0;
2288 switch (code)
2290 CASE_CONST_UNIQUE:
2291 return 0;
2293 case CONST_VECTOR:
2294 if (!same_vector_encodings_p (x, y))
2295 return false;
2296 break;
2298 case LABEL_REF:
2299 return label_ref_label (x) == label_ref_label (y);
2300 case SYMBOL_REF:
2301 return XSTR (x, 0) == XSTR (y, 0);
2303 default:
2304 break;
2307 /* Compare the elements. If any pair of corresponding elements
2308 fail to match, return 0 for the whole things. */
2310 success_2 = 0;
2311 fmt = GET_RTX_FORMAT (code);
2312 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2314 int val, j;
2315 switch (fmt[i])
2317 case 'w':
2318 if (XWINT (x, i) != XWINT (y, i))
2319 return 0;
2320 break;
2322 case 'i':
2323 if (XINT (x, i) != XINT (y, i))
2324 return 0;
2325 break;
2327 case 'p':
2328 if (maybe_ne (SUBREG_BYTE (x), SUBREG_BYTE (y)))
2329 return 0;
2330 break;
2332 case 'e':
2333 val = operands_match_p (XEXP (x, i), XEXP (y, i));
2334 if (val == 0)
2335 return 0;
2336 /* If any subexpression returns 2,
2337 we should return 2 if we are successful. */
2338 if (val == 2)
2339 success_2 = 1;
2340 break;
2342 case '0':
2343 break;
2345 case 'E':
2346 if (XVECLEN (x, i) != XVECLEN (y, i))
2347 return 0;
2348 for (j = XVECLEN (x, i) - 1; j >= 0; --j)
2350 val = operands_match_p (XVECEXP (x, i, j), XVECEXP (y, i, j));
2351 if (val == 0)
2352 return 0;
2353 if (val == 2)
2354 success_2 = 1;
2356 break;
2358 /* It is believed that rtx's at this level will never
2359 contain anything but integers and other rtx's,
2360 except for within LABEL_REFs and SYMBOL_REFs. */
2361 default:
2362 gcc_unreachable ();
2365 return 1 + success_2;
2368 /* Describe the range of registers or memory referenced by X.
2369 If X is a register, set REG_FLAG and put the first register
2370 number into START and the last plus one into END.
2371 If X is a memory reference, put a base address into BASE
2372 and a range of integer offsets into START and END.
2373 If X is pushing on the stack, we can assume it causes no trouble,
2374 so we set the SAFE field. */
2376 static struct decomposition
2377 decompose (rtx x)
2379 struct decomposition val;
2380 int all_const = 0, regno;
2382 memset (&val, 0, sizeof (val));
2384 switch (GET_CODE (x))
2386 case MEM:
2388 rtx base = NULL_RTX, offset = 0;
2389 rtx addr = XEXP (x, 0);
2391 if (GET_CODE (addr) == PRE_DEC || GET_CODE (addr) == PRE_INC
2392 || GET_CODE (addr) == POST_DEC || GET_CODE (addr) == POST_INC)
2394 val.base = XEXP (addr, 0);
2395 val.start = -GET_MODE_SIZE (GET_MODE (x));
2396 val.end = GET_MODE_SIZE (GET_MODE (x));
2397 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2398 return val;
2401 if (GET_CODE (addr) == PRE_MODIFY || GET_CODE (addr) == POST_MODIFY)
2403 if (GET_CODE (XEXP (addr, 1)) == PLUS
2404 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
2405 && CONSTANT_P (XEXP (XEXP (addr, 1), 1)))
2407 val.base = XEXP (addr, 0);
2408 val.start = -INTVAL (XEXP (XEXP (addr, 1), 1));
2409 val.end = INTVAL (XEXP (XEXP (addr, 1), 1));
2410 val.safe = REGNO (val.base) == STACK_POINTER_REGNUM;
2411 return val;
2415 if (GET_CODE (addr) == CONST)
2417 addr = XEXP (addr, 0);
2418 all_const = 1;
2420 if (GET_CODE (addr) == PLUS)
2422 if (CONSTANT_P (XEXP (addr, 0)))
2424 base = XEXP (addr, 1);
2425 offset = XEXP (addr, 0);
2427 else if (CONSTANT_P (XEXP (addr, 1)))
2429 base = XEXP (addr, 0);
2430 offset = XEXP (addr, 1);
2434 if (offset == 0)
2436 base = addr;
2437 offset = const0_rtx;
2439 if (GET_CODE (offset) == CONST)
2440 offset = XEXP (offset, 0);
2441 if (GET_CODE (offset) == PLUS)
2443 if (CONST_INT_P (XEXP (offset, 0)))
2445 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 1));
2446 offset = XEXP (offset, 0);
2448 else if (CONST_INT_P (XEXP (offset, 1)))
2450 base = gen_rtx_PLUS (GET_MODE (base), base, XEXP (offset, 0));
2451 offset = XEXP (offset, 1);
2453 else
2455 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2456 offset = const0_rtx;
2459 else if (!CONST_INT_P (offset))
2461 base = gen_rtx_PLUS (GET_MODE (base), base, offset);
2462 offset = const0_rtx;
2465 if (all_const && GET_CODE (base) == PLUS)
2466 base = gen_rtx_CONST (GET_MODE (base), base);
2468 gcc_assert (CONST_INT_P (offset));
2470 val.start = INTVAL (offset);
2471 val.end = val.start + GET_MODE_SIZE (GET_MODE (x));
2472 val.base = base;
2474 break;
2476 case REG:
2477 val.reg_flag = 1;
2478 regno = true_regnum (x);
2479 if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2481 /* A pseudo with no hard reg. */
2482 val.start = REGNO (x);
2483 val.end = val.start + 1;
2485 else
2487 /* A hard reg. */
2488 val.start = regno;
2489 val.end = end_hard_regno (GET_MODE (x), regno);
2491 break;
2493 case SUBREG:
2494 if (!REG_P (SUBREG_REG (x)))
2495 /* This could be more precise, but it's good enough. */
2496 return decompose (SUBREG_REG (x));
2497 regno = true_regnum (x);
2498 if (regno < 0 || regno >= FIRST_PSEUDO_REGISTER)
2499 return decompose (SUBREG_REG (x));
2501 /* A hard reg. */
2502 val.reg_flag = 1;
2503 val.start = regno;
2504 val.end = regno + subreg_nregs (x);
2505 break;
2507 case SCRATCH:
2508 /* This hasn't been assigned yet, so it can't conflict yet. */
2509 val.safe = 1;
2510 break;
2512 default:
2513 gcc_assert (CONSTANT_P (x));
2514 val.safe = 1;
2515 break;
2517 return val;
2520 /* Return 1 if altering Y will not modify the value of X.
2521 Y is also described by YDATA, which should be decompose (Y). */
2523 static int
2524 immune_p (rtx x, rtx y, struct decomposition ydata)
2526 struct decomposition xdata;
2528 if (ydata.reg_flag)
2529 /* In this case the decomposition structure contains register
2530 numbers rather than byte offsets. */
2531 return !refers_to_regno_for_reload_p (ydata.start.to_constant (),
2532 ydata.end.to_constant (),
2533 x, (rtx *) 0);
2534 if (ydata.safe)
2535 return 1;
2537 gcc_assert (MEM_P (y));
2538 /* If Y is memory and X is not, Y can't affect X. */
2539 if (!MEM_P (x))
2540 return 1;
2542 xdata = decompose (x);
2544 if (! rtx_equal_p (xdata.base, ydata.base))
2546 /* If bases are distinct symbolic constants, there is no overlap. */
2547 if (CONSTANT_P (xdata.base) && CONSTANT_P (ydata.base))
2548 return 1;
2549 /* Constants and stack slots never overlap. */
2550 if (CONSTANT_P (xdata.base)
2551 && (ydata.base == frame_pointer_rtx
2552 || ydata.base == hard_frame_pointer_rtx
2553 || ydata.base == stack_pointer_rtx))
2554 return 1;
2555 if (CONSTANT_P (ydata.base)
2556 && (xdata.base == frame_pointer_rtx
2557 || xdata.base == hard_frame_pointer_rtx
2558 || xdata.base == stack_pointer_rtx))
2559 return 1;
2560 /* If either base is variable, we don't know anything. */
2561 return 0;
2564 return known_ge (xdata.start, ydata.end) || known_ge (ydata.start, xdata.end);
2567 /* Similar, but calls decompose. */
2570 safe_from_earlyclobber (rtx op, rtx clobber)
2572 struct decomposition early_data;
2574 early_data = decompose (clobber);
2575 return immune_p (op, clobber, early_data);
2578 /* Main entry point of this file: search the body of INSN
2579 for values that need reloading and record them with push_reload.
2580 REPLACE nonzero means record also where the values occur
2581 so that subst_reloads can be used.
2583 IND_LEVELS says how many levels of indirection are supported by this
2584 machine; a value of zero means that a memory reference is not a valid
2585 memory address.
2587 LIVE_KNOWN says we have valid information about which hard
2588 regs are live at each point in the program; this is true when
2589 we are called from global_alloc but false when stupid register
2590 allocation has been done.
2592 RELOAD_REG_P if nonzero is a vector indexed by hard reg number
2593 which is nonnegative if the reg has been commandeered for reloading into.
2594 It is copied into STATIC_RELOAD_REG_P and referenced from there
2595 by various subroutines.
2597 Return TRUE if some operands need to be changed, because of swapping
2598 commutative operands, reg_equiv_address substitution, or whatever. */
2601 find_reloads (rtx_insn *insn, int replace, int ind_levels, int live_known,
2602 short *reload_reg_p)
2604 int insn_code_number;
2605 int i, j;
2606 int noperands;
2607 /* These start out as the constraints for the insn
2608 and they are chewed up as we consider alternatives. */
2609 const char *constraints[MAX_RECOG_OPERANDS];
2610 /* These are the preferred classes for an operand, or NO_REGS if it isn't
2611 a register. */
2612 enum reg_class preferred_class[MAX_RECOG_OPERANDS];
2613 char pref_or_nothing[MAX_RECOG_OPERANDS];
2614 /* Nonzero for a MEM operand whose entire address needs a reload.
2615 May be -1 to indicate the entire address may or may not need a reload. */
2616 int address_reloaded[MAX_RECOG_OPERANDS];
2617 /* Nonzero for an address operand that needs to be completely reloaded.
2618 May be -1 to indicate the entire operand may or may not need a reload. */
2619 int address_operand_reloaded[MAX_RECOG_OPERANDS];
2620 /* Value of enum reload_type to use for operand. */
2621 enum reload_type operand_type[MAX_RECOG_OPERANDS];
2622 /* Value of enum reload_type to use within address of operand. */
2623 enum reload_type address_type[MAX_RECOG_OPERANDS];
2624 /* Save the usage of each operand. */
2625 enum reload_usage { RELOAD_READ, RELOAD_READ_WRITE, RELOAD_WRITE } modified[MAX_RECOG_OPERANDS];
2626 int no_input_reloads = 0, no_output_reloads = 0;
2627 int n_alternatives;
2628 reg_class_t this_alternative[MAX_RECOG_OPERANDS];
2629 char this_alternative_match_win[MAX_RECOG_OPERANDS];
2630 char this_alternative_win[MAX_RECOG_OPERANDS];
2631 char this_alternative_offmemok[MAX_RECOG_OPERANDS];
2632 char this_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2633 int this_alternative_matches[MAX_RECOG_OPERANDS];
2634 reg_class_t goal_alternative[MAX_RECOG_OPERANDS];
2635 int this_alternative_number;
2636 int goal_alternative_number = 0;
2637 int operand_reloadnum[MAX_RECOG_OPERANDS];
2638 int goal_alternative_matches[MAX_RECOG_OPERANDS];
2639 int goal_alternative_matched[MAX_RECOG_OPERANDS];
2640 char goal_alternative_match_win[MAX_RECOG_OPERANDS];
2641 char goal_alternative_win[MAX_RECOG_OPERANDS];
2642 char goal_alternative_offmemok[MAX_RECOG_OPERANDS];
2643 char goal_alternative_earlyclobber[MAX_RECOG_OPERANDS];
2644 int goal_alternative_swapped;
2645 int best;
2646 int commutative;
2647 char operands_match[MAX_RECOG_OPERANDS][MAX_RECOG_OPERANDS];
2648 rtx substed_operand[MAX_RECOG_OPERANDS];
2649 rtx body = PATTERN (insn);
2650 rtx set = single_set (insn);
2651 int goal_earlyclobber = 0, this_earlyclobber;
2652 machine_mode operand_mode[MAX_RECOG_OPERANDS];
2653 int retval = 0;
2655 this_insn = insn;
2656 n_reloads = 0;
2657 n_replacements = 0;
2658 n_earlyclobbers = 0;
2659 replace_reloads = replace;
2660 hard_regs_live_known = live_known;
2661 static_reload_reg_p = reload_reg_p;
2663 if (JUMP_P (insn) && INSN_CODE (insn) < 0)
2665 extract_insn (insn);
2666 for (i = 0; i < recog_data.n_operands; i++)
2667 if (recog_data.operand_type[i] != OP_IN)
2668 break;
2669 if (i < recog_data.n_operands)
2671 error_for_asm (insn,
2672 "the target does not support %<asm goto%> "
2673 "with outputs in %<asm%>");
2674 ira_nullify_asm_goto (insn);
2675 return 0;
2679 /* JUMP_INSNs and CALL_INSNs are not allowed to have any output reloads. */
2680 if (JUMP_P (insn) || CALL_P (insn))
2681 no_output_reloads = 1;
2683 /* The eliminated forms of any secondary memory locations are per-insn, so
2684 clear them out here. */
2686 if (secondary_memlocs_elim_used)
2688 memset (secondary_memlocs_elim, 0,
2689 sizeof (secondary_memlocs_elim[0]) * secondary_memlocs_elim_used);
2690 secondary_memlocs_elim_used = 0;
2693 /* Dispose quickly of (set (reg..) (reg..)) if both have hard regs and it
2694 is cheap to move between them. If it is not, there may not be an insn
2695 to do the copy, so we may need a reload. */
2696 if (GET_CODE (body) == SET
2697 && REG_P (SET_DEST (body))
2698 && REGNO (SET_DEST (body)) < FIRST_PSEUDO_REGISTER
2699 && REG_P (SET_SRC (body))
2700 && REGNO (SET_SRC (body)) < FIRST_PSEUDO_REGISTER
2701 && register_move_cost (GET_MODE (SET_SRC (body)),
2702 REGNO_REG_CLASS (REGNO (SET_SRC (body))),
2703 REGNO_REG_CLASS (REGNO (SET_DEST (body)))) == 2)
2704 return 0;
2706 extract_insn (insn);
2708 noperands = reload_n_operands = recog_data.n_operands;
2709 n_alternatives = recog_data.n_alternatives;
2711 /* Just return "no reloads" if insn has no operands with constraints. */
2712 if (noperands == 0 || n_alternatives == 0)
2713 return 0;
2715 insn_code_number = INSN_CODE (insn);
2716 this_insn_is_asm = insn_code_number < 0;
2718 memcpy (operand_mode, recog_data.operand_mode,
2719 noperands * sizeof (machine_mode));
2720 memcpy (constraints, recog_data.constraints,
2721 noperands * sizeof (const char *));
2723 commutative = -1;
2725 /* If we will need to know, later, whether some pair of operands
2726 are the same, we must compare them now and save the result.
2727 Reloading the base and index registers will clobber them
2728 and afterward they will fail to match. */
2730 for (i = 0; i < noperands; i++)
2732 const char *p;
2733 int c;
2734 char *end;
2736 substed_operand[i] = recog_data.operand[i];
2737 p = constraints[i];
2739 modified[i] = RELOAD_READ;
2741 /* Scan this operand's constraint to see if it is an output operand,
2742 an in-out operand, is commutative, or should match another. */
2744 while ((c = *p))
2746 p += CONSTRAINT_LEN (c, p);
2747 switch (c)
2749 case '=':
2750 modified[i] = RELOAD_WRITE;
2751 break;
2752 case '+':
2753 modified[i] = RELOAD_READ_WRITE;
2754 break;
2755 case '%':
2757 /* The last operand should not be marked commutative. */
2758 gcc_assert (i != noperands - 1);
2760 /* We currently only support one commutative pair of
2761 operands. Some existing asm code currently uses more
2762 than one pair. Previously, that would usually work,
2763 but sometimes it would crash the compiler. We
2764 continue supporting that case as well as we can by
2765 silently ignoring all but the first pair. In the
2766 future we may handle it correctly. */
2767 if (commutative < 0)
2768 commutative = i;
2769 else
2770 gcc_assert (this_insn_is_asm);
2772 break;
2773 /* Use of ISDIGIT is tempting here, but it may get expensive because
2774 of locale support we don't want. */
2775 case '0': case '1': case '2': case '3': case '4':
2776 case '5': case '6': case '7': case '8': case '9':
2778 c = strtoul (p - 1, &end, 10);
2779 p = end;
2781 operands_match[c][i]
2782 = operands_match_p (recog_data.operand[c],
2783 recog_data.operand[i]);
2785 /* An operand may not match itself. */
2786 gcc_assert (c != i);
2788 /* If C can be commuted with C+1, and C might need to match I,
2789 then C+1 might also need to match I. */
2790 if (commutative >= 0)
2792 if (c == commutative || c == commutative + 1)
2794 int other = c + (c == commutative ? 1 : -1);
2795 operands_match[other][i]
2796 = operands_match_p (recog_data.operand[other],
2797 recog_data.operand[i]);
2799 if (i == commutative || i == commutative + 1)
2801 int other = i + (i == commutative ? 1 : -1);
2802 operands_match[c][other]
2803 = operands_match_p (recog_data.operand[c],
2804 recog_data.operand[other]);
2806 /* Note that C is supposed to be less than I.
2807 No need to consider altering both C and I because in
2808 that case we would alter one into the other. */
2815 /* Examine each operand that is a memory reference or memory address
2816 and reload parts of the addresses into index registers.
2817 Also here any references to pseudo regs that didn't get hard regs
2818 but are equivalent to constants get replaced in the insn itself
2819 with those constants. Nobody will ever see them again.
2821 Finally, set up the preferred classes of each operand. */
2823 for (i = 0; i < noperands; i++)
2825 RTX_CODE code = GET_CODE (recog_data.operand[i]);
2827 address_reloaded[i] = 0;
2828 address_operand_reloaded[i] = 0;
2829 operand_type[i] = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT
2830 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT
2831 : RELOAD_OTHER);
2832 address_type[i]
2833 = (modified[i] == RELOAD_READ ? RELOAD_FOR_INPUT_ADDRESS
2834 : modified[i] == RELOAD_WRITE ? RELOAD_FOR_OUTPUT_ADDRESS
2835 : RELOAD_OTHER);
2837 if (*constraints[i] == 0)
2838 /* Ignore things like match_operator operands. */
2840 else if (insn_extra_address_constraint
2841 (lookup_constraint (constraints[i])))
2843 address_operand_reloaded[i]
2844 = find_reloads_address (recog_data.operand_mode[i], (rtx*) 0,
2845 recog_data.operand[i],
2846 recog_data.operand_loc[i],
2847 i, operand_type[i], ind_levels, insn);
2849 /* If we now have a simple operand where we used to have a
2850 PLUS or MULT or ASHIFT, re-recognize and try again. */
2851 if ((OBJECT_P (*recog_data.operand_loc[i])
2852 || GET_CODE (*recog_data.operand_loc[i]) == SUBREG)
2853 && (GET_CODE (recog_data.operand[i]) == MULT
2854 || GET_CODE (recog_data.operand[i]) == ASHIFT
2855 || GET_CODE (recog_data.operand[i]) == PLUS))
2857 INSN_CODE (insn) = -1;
2858 retval = find_reloads (insn, replace, ind_levels, live_known,
2859 reload_reg_p);
2860 return retval;
2863 recog_data.operand[i] = *recog_data.operand_loc[i];
2864 substed_operand[i] = recog_data.operand[i];
2866 /* Address operands are reloaded in their existing mode,
2867 no matter what is specified in the machine description. */
2868 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2870 /* If the address is a single CONST_INT pick address mode
2871 instead otherwise we will later not know in which mode
2872 the reload should be performed. */
2873 if (operand_mode[i] == VOIDmode)
2874 operand_mode[i] = Pmode;
2877 else if (code == MEM)
2879 address_reloaded[i]
2880 = find_reloads_address (GET_MODE (recog_data.operand[i]),
2881 recog_data.operand_loc[i],
2882 XEXP (recog_data.operand[i], 0),
2883 &XEXP (recog_data.operand[i], 0),
2884 i, address_type[i], ind_levels, insn);
2885 recog_data.operand[i] = *recog_data.operand_loc[i];
2886 substed_operand[i] = recog_data.operand[i];
2888 else if (code == SUBREG)
2890 rtx reg = SUBREG_REG (recog_data.operand[i]);
2891 rtx op
2892 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2893 ind_levels,
2894 set != 0
2895 && &SET_DEST (set) == recog_data.operand_loc[i],
2896 insn,
2897 &address_reloaded[i]);
2899 /* If we made a MEM to load (a part of) the stackslot of a pseudo
2900 that didn't get a hard register, emit a USE with a REG_EQUAL
2901 note in front so that we might inherit a previous, possibly
2902 wider reload. */
2904 if (replace
2905 && MEM_P (op)
2906 && REG_P (reg)
2907 && known_ge (GET_MODE_SIZE (GET_MODE (reg)),
2908 GET_MODE_SIZE (GET_MODE (op)))
2909 && reg_equiv_constant (REGNO (reg)) == 0)
2910 set_unique_reg_note (emit_insn_before (gen_rtx_USE (VOIDmode, reg),
2911 insn),
2912 REG_EQUAL, reg_equiv_memory_loc (REGNO (reg)));
2914 substed_operand[i] = recog_data.operand[i] = op;
2916 else if (code == PLUS || GET_RTX_CLASS (code) == RTX_UNARY)
2917 /* We can get a PLUS as an "operand" as a result of register
2918 elimination. See eliminate_regs and gen_reload. We handle
2919 a unary operator by reloading the operand. */
2920 substed_operand[i] = recog_data.operand[i]
2921 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2922 ind_levels, 0, insn,
2923 &address_reloaded[i]);
2924 else if (code == REG)
2926 /* This is equivalent to calling find_reloads_toplev.
2927 The code is duplicated for speed.
2928 When we find a pseudo always equivalent to a constant,
2929 we replace it by the constant. We must be sure, however,
2930 that we don't try to replace it in the insn in which it
2931 is being set. */
2932 int regno = REGNO (recog_data.operand[i]);
2933 if (reg_equiv_constant (regno) != 0
2934 && (set == 0 || &SET_DEST (set) != recog_data.operand_loc[i]))
2936 /* Record the existing mode so that the check if constants are
2937 allowed will work when operand_mode isn't specified. */
2939 if (operand_mode[i] == VOIDmode)
2940 operand_mode[i] = GET_MODE (recog_data.operand[i]);
2942 substed_operand[i] = recog_data.operand[i]
2943 = reg_equiv_constant (regno);
2945 if (reg_equiv_memory_loc (regno) != 0
2946 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
2947 /* We need not give a valid is_set_dest argument since the case
2948 of a constant equivalence was checked above. */
2949 substed_operand[i] = recog_data.operand[i]
2950 = find_reloads_toplev (recog_data.operand[i], i, address_type[i],
2951 ind_levels, 0, insn,
2952 &address_reloaded[i]);
2954 /* If the operand is still a register (we didn't replace it with an
2955 equivalent), get the preferred class to reload it into. */
2956 code = GET_CODE (recog_data.operand[i]);
2957 preferred_class[i]
2958 = ((code == REG && REGNO (recog_data.operand[i])
2959 >= FIRST_PSEUDO_REGISTER)
2960 ? reg_preferred_class (REGNO (recog_data.operand[i]))
2961 : NO_REGS);
2962 pref_or_nothing[i]
2963 = (code == REG
2964 && REGNO (recog_data.operand[i]) >= FIRST_PSEUDO_REGISTER
2965 && reg_alternate_class (REGNO (recog_data.operand[i])) == NO_REGS);
2968 /* If this is simply a copy from operand 1 to operand 0, merge the
2969 preferred classes for the operands. */
2970 if (set != 0 && noperands >= 2 && recog_data.operand[0] == SET_DEST (set)
2971 && recog_data.operand[1] == SET_SRC (set))
2973 preferred_class[0] = preferred_class[1]
2974 = reg_class_subunion[(int) preferred_class[0]][(int) preferred_class[1]];
2975 pref_or_nothing[0] |= pref_or_nothing[1];
2976 pref_or_nothing[1] |= pref_or_nothing[0];
2979 /* Now see what we need for pseudo-regs that didn't get hard regs
2980 or got the wrong kind of hard reg. For this, we must consider
2981 all the operands together against the register constraints. */
2983 best = MAX_RECOG_OPERANDS * 2 + 600;
2985 goal_alternative_swapped = 0;
2987 /* The constraints are made of several alternatives.
2988 Each operand's constraint looks like foo,bar,... with commas
2989 separating the alternatives. The first alternatives for all
2990 operands go together, the second alternatives go together, etc.
2992 First loop over alternatives. */
2994 alternative_mask enabled = get_enabled_alternatives (insn);
2995 for (this_alternative_number = 0;
2996 this_alternative_number < n_alternatives;
2997 this_alternative_number++)
2999 int swapped;
3001 if (!TEST_BIT (enabled, this_alternative_number))
3003 int i;
3005 for (i = 0; i < recog_data.n_operands; i++)
3006 constraints[i] = skip_alternative (constraints[i]);
3008 continue;
3011 /* If insn is commutative (it's safe to exchange a certain pair
3012 of operands) then we need to try each alternative twice, the
3013 second time matching those two operands as if we had
3014 exchanged them. To do this, really exchange them in
3015 operands. */
3016 for (swapped = 0; swapped < (commutative >= 0 ? 2 : 1); swapped++)
3018 /* Loop over operands for one constraint alternative. */
3019 /* LOSERS counts those that don't fit this alternative
3020 and would require loading. */
3021 int losers = 0;
3022 /* BAD is set to 1 if it some operand can't fit this alternative
3023 even after reloading. */
3024 int bad = 0;
3025 /* REJECT is a count of how undesirable this alternative says it is
3026 if any reloading is required. If the alternative matches exactly
3027 then REJECT is ignored, but otherwise it gets this much
3028 counted against it in addition to the reloading needed. Each
3029 ? counts three times here since we want the disparaging caused by
3030 a bad register class to only count 1/3 as much. */
3031 int reject = 0;
3033 if (swapped)
3035 recog_data.operand[commutative] = substed_operand[commutative + 1];
3036 recog_data.operand[commutative + 1] = substed_operand[commutative];
3037 /* Swap the duplicates too. */
3038 for (i = 0; i < recog_data.n_dups; i++)
3039 if (recog_data.dup_num[i] == commutative
3040 || recog_data.dup_num[i] == commutative + 1)
3041 *recog_data.dup_loc[i]
3042 = recog_data.operand[(int) recog_data.dup_num[i]];
3044 std::swap (preferred_class[commutative],
3045 preferred_class[commutative + 1]);
3046 std::swap (pref_or_nothing[commutative],
3047 pref_or_nothing[commutative + 1]);
3048 std::swap (address_reloaded[commutative],
3049 address_reloaded[commutative + 1]);
3052 this_earlyclobber = 0;
3054 for (i = 0; i < noperands; i++)
3056 const char *p = constraints[i];
3057 char *end;
3058 int len;
3059 int win = 0;
3060 int did_match = 0;
3061 /* 0 => this operand can be reloaded somehow for this alternative. */
3062 int badop = 1;
3063 /* 0 => this operand can be reloaded if the alternative allows regs. */
3064 int winreg = 0;
3065 int c;
3066 int m;
3067 rtx operand = recog_data.operand[i];
3068 int offset = 0;
3069 /* Nonzero means this is a MEM that must be reloaded into a reg
3070 regardless of what the constraint says. */
3071 int force_reload = 0;
3072 int offmemok = 0;
3073 /* Nonzero if a constant forced into memory would be OK for this
3074 operand. */
3075 int constmemok = 0;
3076 int earlyclobber = 0;
3077 enum constraint_num cn;
3078 enum reg_class cl;
3080 /* If the predicate accepts a unary operator, it means that
3081 we need to reload the operand, but do not do this for
3082 match_operator and friends. */
3083 if (UNARY_P (operand) && *p != 0)
3084 operand = XEXP (operand, 0);
3086 /* If the operand is a SUBREG, extract
3087 the REG or MEM (or maybe even a constant) within.
3088 (Constants can occur as a result of reg_equiv_constant.) */
3090 while (GET_CODE (operand) == SUBREG)
3092 /* Offset only matters when operand is a REG and
3093 it is a hard reg. This is because it is passed
3094 to reg_fits_class_p if it is a REG and all pseudos
3095 return 0 from that function. */
3096 if (REG_P (SUBREG_REG (operand))
3097 && REGNO (SUBREG_REG (operand)) < FIRST_PSEUDO_REGISTER)
3099 if (simplify_subreg_regno (REGNO (SUBREG_REG (operand)),
3100 GET_MODE (SUBREG_REG (operand)),
3101 SUBREG_BYTE (operand),
3102 GET_MODE (operand)) < 0)
3103 force_reload = 1;
3104 offset += subreg_regno_offset (REGNO (SUBREG_REG (operand)),
3105 GET_MODE (SUBREG_REG (operand)),
3106 SUBREG_BYTE (operand),
3107 GET_MODE (operand));
3109 operand = SUBREG_REG (operand);
3110 /* Force reload if this is a constant or PLUS or if there may
3111 be a problem accessing OPERAND in the outer mode. */
3112 scalar_int_mode inner_mode;
3113 if (CONSTANT_P (operand)
3114 || GET_CODE (operand) == PLUS
3115 /* We must force a reload of paradoxical SUBREGs
3116 of a MEM because the alignment of the inner value
3117 may not be enough to do the outer reference. On
3118 big-endian machines, it may also reference outside
3119 the object.
3121 On machines that extend byte operations and we have a
3122 SUBREG where both the inner and outer modes are no wider
3123 than a word and the inner mode is narrower, is integral,
3124 and gets extended when loaded from memory, combine.cc has
3125 made assumptions about the behavior of the machine in such
3126 register access. If the data is, in fact, in memory we
3127 must always load using the size assumed to be in the
3128 register and let the insn do the different-sized
3129 accesses.
3131 This is doubly true if WORD_REGISTER_OPERATIONS. In
3132 this case eliminate_regs has left non-paradoxical
3133 subregs for push_reload to see. Make sure it does
3134 by forcing the reload.
3136 ??? When is it right at this stage to have a subreg
3137 of a mem that is _not_ to be handled specially? IMO
3138 those should have been reduced to just a mem. */
3139 || ((MEM_P (operand)
3140 || (REG_P (operand)
3141 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
3142 && (WORD_REGISTER_OPERATIONS
3143 || (((maybe_lt
3144 (GET_MODE_BITSIZE (GET_MODE (operand)),
3145 BIGGEST_ALIGNMENT))
3146 && (paradoxical_subreg_p
3147 (operand_mode[i], GET_MODE (operand)))))
3148 || BYTES_BIG_ENDIAN
3149 || (known_le (GET_MODE_SIZE (operand_mode[i]),
3150 UNITS_PER_WORD)
3151 && (is_a <scalar_int_mode>
3152 (GET_MODE (operand), &inner_mode))
3153 && (GET_MODE_SIZE (inner_mode)
3154 <= UNITS_PER_WORD)
3155 && paradoxical_subreg_p (operand_mode[i],
3156 inner_mode)
3157 && LOAD_EXTEND_OP (inner_mode) != UNKNOWN)))
3158 /* We must force a reload of a SUBREG's inner expression
3159 if it is a pseudo that will become a MEM and the MEM
3160 has a mode-dependent address, as in that case we
3161 obviously cannot change the mode of the MEM to that
3162 of the containing SUBREG as that would change the
3163 interpretation of the address. */
3164 || (REG_P (operand)
3165 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3166 && reg_equiv_mem (REGNO (operand))
3167 && (mode_dependent_address_p
3168 (XEXP (reg_equiv_mem (REGNO (operand)), 0),
3169 (MEM_ADDR_SPACE
3170 (reg_equiv_mem (REGNO (operand)))))))
3172 force_reload = 1;
3175 this_alternative[i] = NO_REGS;
3176 this_alternative_win[i] = 0;
3177 this_alternative_match_win[i] = 0;
3178 this_alternative_offmemok[i] = 0;
3179 this_alternative_earlyclobber[i] = 0;
3180 this_alternative_matches[i] = -1;
3182 /* An empty constraint or empty alternative
3183 allows anything which matched the pattern. */
3184 if (*p == 0 || *p == ',')
3185 win = 1, badop = 0;
3187 /* Scan this alternative's specs for this operand;
3188 set WIN if the operand fits any letter in this alternative.
3189 Otherwise, clear BADOP if this operand could
3190 fit some letter after reloads,
3191 or set WINREG if this operand could fit after reloads
3192 provided the constraint allows some registers. */
3195 switch ((c = *p, len = CONSTRAINT_LEN (c, p)), c)
3197 case '\0':
3198 len = 0;
3199 break;
3200 case ',':
3201 c = '\0';
3202 break;
3204 case '?':
3205 reject += 6;
3206 break;
3208 case '!':
3209 reject = 600;
3210 break;
3212 case '#':
3213 /* Ignore rest of this alternative as far as
3214 reloading is concerned. */
3216 p++;
3217 while (*p && *p != ',');
3218 len = 0;
3219 break;
3221 case '0': case '1': case '2': case '3': case '4':
3222 case '5': case '6': case '7': case '8': case '9':
3223 m = strtoul (p, &end, 10);
3224 p = end;
3225 len = 0;
3227 this_alternative_matches[i] = m;
3228 /* We are supposed to match a previous operand.
3229 If we do, we win if that one did.
3230 If we do not, count both of the operands as losers.
3231 (This is too conservative, since most of the time
3232 only a single reload insn will be needed to make
3233 the two operands win. As a result, this alternative
3234 may be rejected when it is actually desirable.) */
3235 if ((swapped && (m != commutative || i != commutative + 1))
3236 /* If we are matching as if two operands were swapped,
3237 also pretend that operands_match had been computed
3238 with swapped.
3239 But if I is the second of those and C is the first,
3240 don't exchange them, because operands_match is valid
3241 only on one side of its diagonal. */
3242 ? (operands_match
3243 [(m == commutative || m == commutative + 1)
3244 ? 2 * commutative + 1 - m : m]
3245 [(i == commutative || i == commutative + 1)
3246 ? 2 * commutative + 1 - i : i])
3247 : operands_match[m][i])
3249 /* If we are matching a non-offsettable address where an
3250 offsettable address was expected, then we must reject
3251 this combination, because we can't reload it. */
3252 if (this_alternative_offmemok[m]
3253 && MEM_P (recog_data.operand[m])
3254 && this_alternative[m] == NO_REGS
3255 && ! this_alternative_win[m])
3256 bad = 1;
3258 did_match = this_alternative_win[m];
3260 else
3262 /* Operands don't match. */
3263 rtx value;
3264 int loc1, loc2;
3265 /* Retroactively mark the operand we had to match
3266 as a loser, if it wasn't already. */
3267 if (this_alternative_win[m])
3268 losers++;
3269 this_alternative_win[m] = 0;
3270 if (this_alternative[m] == NO_REGS)
3271 bad = 1;
3272 /* But count the pair only once in the total badness of
3273 this alternative, if the pair can be a dummy reload.
3274 The pointers in operand_loc are not swapped; swap
3275 them by hand if necessary. */
3276 if (swapped && i == commutative)
3277 loc1 = commutative + 1;
3278 else if (swapped && i == commutative + 1)
3279 loc1 = commutative;
3280 else
3281 loc1 = i;
3282 if (swapped && m == commutative)
3283 loc2 = commutative + 1;
3284 else if (swapped && m == commutative + 1)
3285 loc2 = commutative;
3286 else
3287 loc2 = m;
3288 value
3289 = find_dummy_reload (recog_data.operand[i],
3290 recog_data.operand[m],
3291 recog_data.operand_loc[loc1],
3292 recog_data.operand_loc[loc2],
3293 operand_mode[i], operand_mode[m],
3294 this_alternative[m], -1,
3295 this_alternative_earlyclobber[m]);
3297 if (value != 0)
3298 losers--;
3300 /* This can be fixed with reloads if the operand
3301 we are supposed to match can be fixed with reloads. */
3302 badop = 0;
3303 this_alternative[i] = this_alternative[m];
3305 /* If we have to reload this operand and some previous
3306 operand also had to match the same thing as this
3307 operand, we don't know how to do that. So reject this
3308 alternative. */
3309 if (! did_match || force_reload)
3310 for (j = 0; j < i; j++)
3311 if (this_alternative_matches[j]
3312 == this_alternative_matches[i])
3314 badop = 1;
3315 break;
3317 break;
3319 case 'p':
3320 /* All necessary reloads for an address_operand
3321 were handled in find_reloads_address. */
3322 this_alternative[i]
3323 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3324 ADDRESS, SCRATCH, insn);
3325 win = 1;
3326 badop = 0;
3327 break;
3329 case TARGET_MEM_CONSTRAINT:
3330 if (force_reload)
3331 break;
3332 if (MEM_P (operand)
3333 || (REG_P (operand)
3334 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3335 && reg_renumber[REGNO (operand)] < 0))
3336 win = 1;
3337 if (CONST_POOL_OK_P (operand_mode[i], operand))
3338 badop = 0;
3339 constmemok = 1;
3340 break;
3342 case '<':
3343 if (MEM_P (operand)
3344 && ! address_reloaded[i]
3345 && (GET_CODE (XEXP (operand, 0)) == PRE_DEC
3346 || GET_CODE (XEXP (operand, 0)) == POST_DEC))
3347 win = 1;
3348 break;
3350 case '>':
3351 if (MEM_P (operand)
3352 && ! address_reloaded[i]
3353 && (GET_CODE (XEXP (operand, 0)) == PRE_INC
3354 || GET_CODE (XEXP (operand, 0)) == POST_INC))
3355 win = 1;
3356 break;
3358 /* Memory operand whose address is not offsettable. */
3359 case 'V':
3360 if (force_reload)
3361 break;
3362 if (MEM_P (operand)
3363 && ! (ind_levels ? offsettable_memref_p (operand)
3364 : offsettable_nonstrict_memref_p (operand))
3365 /* Certain mem addresses will become offsettable
3366 after they themselves are reloaded. This is important;
3367 we don't want our own handling of unoffsettables
3368 to override the handling of reg_equiv_address. */
3369 && !(REG_P (XEXP (operand, 0))
3370 && (ind_levels == 0
3371 || reg_equiv_address (REGNO (XEXP (operand, 0))) != 0)))
3372 win = 1;
3373 break;
3375 /* Memory operand whose address is offsettable. */
3376 case 'o':
3377 if (force_reload)
3378 break;
3379 if ((MEM_P (operand)
3380 /* If IND_LEVELS, find_reloads_address won't reload a
3381 pseudo that didn't get a hard reg, so we have to
3382 reject that case. */
3383 && ((ind_levels ? offsettable_memref_p (operand)
3384 : offsettable_nonstrict_memref_p (operand))
3385 /* A reloaded address is offsettable because it is now
3386 just a simple register indirect. */
3387 || address_reloaded[i] == 1))
3388 || (REG_P (operand)
3389 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3390 && reg_renumber[REGNO (operand)] < 0
3391 /* If reg_equiv_address is nonzero, we will be
3392 loading it into a register; hence it will be
3393 offsettable, but we cannot say that reg_equiv_mem
3394 is offsettable without checking. */
3395 && ((reg_equiv_mem (REGNO (operand)) != 0
3396 && offsettable_memref_p (reg_equiv_mem (REGNO (operand))))
3397 || (reg_equiv_address (REGNO (operand)) != 0))))
3398 win = 1;
3399 if (CONST_POOL_OK_P (operand_mode[i], operand)
3400 || MEM_P (operand))
3401 badop = 0;
3402 constmemok = 1;
3403 offmemok = 1;
3404 break;
3406 case '&':
3407 /* Output operand that is stored before the need for the
3408 input operands (and their index registers) is over. */
3409 earlyclobber = 1, this_earlyclobber = 1;
3410 break;
3412 case 'X':
3413 force_reload = 0;
3414 win = 1;
3415 break;
3417 case 'g':
3418 if (! force_reload
3419 /* A PLUS is never a valid operand, but reload can make
3420 it from a register when eliminating registers. */
3421 && GET_CODE (operand) != PLUS
3422 /* A SCRATCH is not a valid operand. */
3423 && GET_CODE (operand) != SCRATCH
3424 && (! CONSTANT_P (operand)
3425 || ! flag_pic
3426 || LEGITIMATE_PIC_OPERAND_P (operand))
3427 && (GENERAL_REGS == ALL_REGS
3428 || !REG_P (operand)
3429 || (REGNO (operand) >= FIRST_PSEUDO_REGISTER
3430 && reg_renumber[REGNO (operand)] < 0)))
3431 win = 1;
3432 cl = GENERAL_REGS;
3433 goto reg;
3435 default:
3436 cn = lookup_constraint (p);
3437 switch (get_constraint_type (cn))
3439 case CT_REGISTER:
3440 cl = reg_class_for_constraint (cn);
3441 if (cl != NO_REGS)
3442 goto reg;
3443 break;
3445 case CT_CONST_INT:
3446 if (CONST_INT_P (operand)
3447 && (insn_const_int_ok_for_constraint
3448 (INTVAL (operand), cn)))
3449 win = true;
3450 break;
3452 case CT_MEMORY:
3453 case CT_RELAXED_MEMORY:
3454 if (force_reload)
3455 break;
3456 if (constraint_satisfied_p (operand, cn))
3457 win = 1;
3458 /* If the address was already reloaded,
3459 we win as well. */
3460 else if (MEM_P (operand) && address_reloaded[i] == 1)
3461 win = 1;
3462 /* Likewise if the address will be reloaded because
3463 reg_equiv_address is nonzero. For reg_equiv_mem
3464 we have to check. */
3465 else if (REG_P (operand)
3466 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3467 && reg_renumber[REGNO (operand)] < 0
3468 && ((reg_equiv_mem (REGNO (operand)) != 0
3469 && (constraint_satisfied_p
3470 (reg_equiv_mem (REGNO (operand)),
3471 cn)))
3472 || (reg_equiv_address (REGNO (operand))
3473 != 0)))
3474 win = 1;
3476 /* If we didn't already win, we can reload
3477 constants via force_const_mem, and other
3478 MEMs by reloading the address like for 'o'. */
3479 if (CONST_POOL_OK_P (operand_mode[i], operand)
3480 || MEM_P (operand))
3481 badop = 0;
3482 constmemok = 1;
3483 offmemok = 1;
3484 break;
3486 case CT_SPECIAL_MEMORY:
3487 if (force_reload)
3488 break;
3489 if (constraint_satisfied_p (operand, cn))
3490 win = 1;
3491 /* Likewise if the address will be reloaded because
3492 reg_equiv_address is nonzero. For reg_equiv_mem
3493 we have to check. */
3494 else if (REG_P (operand)
3495 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
3496 && reg_renumber[REGNO (operand)] < 0
3497 && reg_equiv_mem (REGNO (operand)) != 0
3498 && (constraint_satisfied_p
3499 (reg_equiv_mem (REGNO (operand)), cn)))
3500 win = 1;
3501 break;
3503 case CT_ADDRESS:
3504 if (constraint_satisfied_p (operand, cn))
3505 win = 1;
3507 /* If we didn't already win, we can reload
3508 the address into a base register. */
3509 this_alternative[i]
3510 = base_reg_class (VOIDmode, ADDR_SPACE_GENERIC,
3511 ADDRESS, SCRATCH, insn);
3512 badop = 0;
3513 break;
3515 case CT_FIXED_FORM:
3516 if (constraint_satisfied_p (operand, cn))
3517 win = 1;
3518 break;
3520 break;
3522 reg:
3523 this_alternative[i]
3524 = reg_class_subunion[this_alternative[i]][cl];
3525 if (GET_MODE (operand) == BLKmode)
3526 break;
3527 winreg = 1;
3528 if (REG_P (operand)
3529 && reg_fits_class_p (operand, this_alternative[i],
3530 offset, GET_MODE (recog_data.operand[i])))
3531 win = 1;
3532 break;
3534 while ((p += len), c);
3536 if (swapped == (commutative >= 0 ? 1 : 0))
3537 constraints[i] = p;
3539 /* If this operand could be handled with a reg,
3540 and some reg is allowed, then this operand can be handled. */
3541 if (winreg && this_alternative[i] != NO_REGS
3542 && (win || !class_only_fixed_regs[this_alternative[i]]))
3543 badop = 0;
3545 /* Record which operands fit this alternative. */
3546 this_alternative_earlyclobber[i] = earlyclobber;
3547 if (win && ! force_reload)
3548 this_alternative_win[i] = 1;
3549 else if (did_match && ! force_reload)
3550 this_alternative_match_win[i] = 1;
3551 else
3553 int const_to_mem = 0;
3555 this_alternative_offmemok[i] = offmemok;
3556 losers++;
3557 if (badop)
3558 bad = 1;
3559 /* Alternative loses if it has no regs for a reg operand. */
3560 if (REG_P (operand)
3561 && this_alternative[i] == NO_REGS
3562 && this_alternative_matches[i] < 0)
3563 bad = 1;
3565 /* If this is a constant that is reloaded into the desired
3566 class by copying it to memory first, count that as another
3567 reload. This is consistent with other code and is
3568 required to avoid choosing another alternative when
3569 the constant is moved into memory by this function on
3570 an early reload pass. Note that the test here is
3571 precisely the same as in the code below that calls
3572 force_const_mem. */
3573 if (CONST_POOL_OK_P (operand_mode[i], operand)
3574 && ((targetm.preferred_reload_class (operand,
3575 this_alternative[i])
3576 == NO_REGS)
3577 || no_input_reloads))
3579 const_to_mem = 1;
3580 if (this_alternative[i] != NO_REGS)
3581 losers++;
3584 /* Alternative loses if it requires a type of reload not
3585 permitted for this insn. We can always reload SCRATCH
3586 and objects with a REG_UNUSED note. */
3587 if (GET_CODE (operand) != SCRATCH
3588 && modified[i] != RELOAD_READ && no_output_reloads
3589 && ! find_reg_note (insn, REG_UNUSED, operand))
3590 bad = 1;
3591 else if (modified[i] != RELOAD_WRITE && no_input_reloads
3592 && ! const_to_mem)
3593 bad = 1;
3595 /* If we can't reload this value at all, reject this
3596 alternative. Note that we could also lose due to
3597 LIMIT_RELOAD_CLASS, but we don't check that
3598 here. */
3600 if (! CONSTANT_P (operand) && this_alternative[i] != NO_REGS)
3602 if (targetm.preferred_reload_class (operand,
3603 this_alternative[i])
3604 == NO_REGS)
3605 reject = 600;
3607 if (operand_type[i] == RELOAD_FOR_OUTPUT
3608 && (targetm.preferred_output_reload_class (operand,
3609 this_alternative[i])
3610 == NO_REGS))
3611 reject = 600;
3614 /* We prefer to reload pseudos over reloading other things,
3615 since such reloads may be able to be eliminated later.
3616 If we are reloading a SCRATCH, we won't be generating any
3617 insns, just using a register, so it is also preferred.
3618 So bump REJECT in other cases. Don't do this in the
3619 case where we are forcing a constant into memory and
3620 it will then win since we don't want to have a different
3621 alternative match then. */
3622 if (! (REG_P (operand)
3623 && REGNO (operand) >= FIRST_PSEUDO_REGISTER)
3624 && GET_CODE (operand) != SCRATCH
3625 && ! (const_to_mem && constmemok))
3626 reject += 2;
3628 /* Input reloads can be inherited more often than output
3629 reloads can be removed, so penalize output reloads. */
3630 if (operand_type[i] != RELOAD_FOR_INPUT
3631 && GET_CODE (operand) != SCRATCH)
3632 reject++;
3635 /* If this operand is a pseudo register that didn't get
3636 a hard reg and this alternative accepts some
3637 register, see if the class that we want is a subset
3638 of the preferred class for this register. If not,
3639 but it intersects that class, we'd like to use the
3640 intersection, but the best we can do is to use the
3641 preferred class, if it is instead a subset of the
3642 class we want in this alternative. If we can't use
3643 it, show that usage of this alternative should be
3644 discouraged; it will be discouraged more still if the
3645 register is `preferred or nothing'. We do this
3646 because it increases the chance of reusing our spill
3647 register in a later insn and avoiding a pair of
3648 memory stores and loads.
3650 Don't bother with this if this alternative will
3651 accept this operand.
3653 Don't do this for a multiword operand, since it is
3654 only a small win and has the risk of requiring more
3655 spill registers, which could cause a large loss.
3657 Don't do this if the preferred class has only one
3658 register because we might otherwise exhaust the
3659 class. */
3661 if (! win && ! did_match
3662 && this_alternative[i] != NO_REGS
3663 && known_le (GET_MODE_SIZE (operand_mode[i]), UNITS_PER_WORD)
3664 && reg_class_size [(int) preferred_class[i]] > 0
3665 && ! small_register_class_p (preferred_class[i]))
3667 if (! reg_class_subset_p (this_alternative[i],
3668 preferred_class[i]))
3670 /* Since we don't have a way of forming a register
3671 class for the intersection, we just do
3672 something special if the preferred class is a
3673 subset of the class we have; that's the most
3674 common case anyway. */
3675 if (reg_class_subset_p (preferred_class[i],
3676 this_alternative[i]))
3677 this_alternative[i] = preferred_class[i];
3678 else
3679 reject += (2 + 2 * pref_or_nothing[i]);
3684 /* Now see if any output operands that are marked "earlyclobber"
3685 in this alternative conflict with any input operands
3686 or any memory addresses. */
3688 for (i = 0; i < noperands; i++)
3689 if (this_alternative_earlyclobber[i]
3690 && (this_alternative_win[i] || this_alternative_match_win[i]))
3692 struct decomposition early_data;
3694 early_data = decompose (recog_data.operand[i]);
3696 gcc_assert (modified[i] != RELOAD_READ);
3698 if (this_alternative[i] == NO_REGS)
3700 this_alternative_earlyclobber[i] = 0;
3701 gcc_assert (this_insn_is_asm);
3702 error_for_asm (this_insn,
3703 "%<&%> constraint used with no register class");
3706 for (j = 0; j < noperands; j++)
3707 /* Is this an input operand or a memory ref? */
3708 if ((MEM_P (recog_data.operand[j])
3709 || modified[j] != RELOAD_WRITE)
3710 && j != i
3711 /* Ignore things like match_operator operands. */
3712 && !recog_data.is_operator[j]
3713 /* Don't count an input operand that is constrained to match
3714 the early clobber operand. */
3715 && ! (this_alternative_matches[j] == i
3716 && rtx_equal_p (recog_data.operand[i],
3717 recog_data.operand[j]))
3718 /* Is it altered by storing the earlyclobber operand? */
3719 && !immune_p (recog_data.operand[j], recog_data.operand[i],
3720 early_data))
3722 /* If the output is in a non-empty few-regs class,
3723 it's costly to reload it, so reload the input instead. */
3724 if (small_register_class_p (this_alternative[i])
3725 && (REG_P (recog_data.operand[j])
3726 || GET_CODE (recog_data.operand[j]) == SUBREG))
3728 losers++;
3729 this_alternative_win[j] = 0;
3730 this_alternative_match_win[j] = 0;
3732 else
3733 break;
3735 /* If an earlyclobber operand conflicts with something,
3736 it must be reloaded, so request this and count the cost. */
3737 if (j != noperands)
3739 losers++;
3740 this_alternative_win[i] = 0;
3741 this_alternative_match_win[j] = 0;
3742 for (j = 0; j < noperands; j++)
3743 if (this_alternative_matches[j] == i
3744 && this_alternative_match_win[j])
3746 this_alternative_win[j] = 0;
3747 this_alternative_match_win[j] = 0;
3748 losers++;
3753 /* If one alternative accepts all the operands, no reload required,
3754 choose that alternative; don't consider the remaining ones. */
3755 if (losers == 0)
3757 /* Unswap these so that they are never swapped at `finish'. */
3758 if (swapped)
3760 recog_data.operand[commutative] = substed_operand[commutative];
3761 recog_data.operand[commutative + 1]
3762 = substed_operand[commutative + 1];
3764 for (i = 0; i < noperands; i++)
3766 goal_alternative_win[i] = this_alternative_win[i];
3767 goal_alternative_match_win[i] = this_alternative_match_win[i];
3768 goal_alternative[i] = this_alternative[i];
3769 goal_alternative_offmemok[i] = this_alternative_offmemok[i];
3770 goal_alternative_matches[i] = this_alternative_matches[i];
3771 goal_alternative_earlyclobber[i]
3772 = this_alternative_earlyclobber[i];
3774 goal_alternative_number = this_alternative_number;
3775 goal_alternative_swapped = swapped;
3776 goal_earlyclobber = this_earlyclobber;
3777 goto finish;
3780 /* REJECT, set by the ! and ? constraint characters and when a register
3781 would be reloaded into a non-preferred class, discourages the use of
3782 this alternative for a reload goal. REJECT is incremented by six
3783 for each ? and two for each non-preferred class. */
3784 losers = losers * 6 + reject;
3786 /* If this alternative can be made to work by reloading,
3787 and it needs less reloading than the others checked so far,
3788 record it as the chosen goal for reloading. */
3789 if (! bad)
3791 if (best > losers)
3793 for (i = 0; i < noperands; i++)
3795 goal_alternative[i] = this_alternative[i];
3796 goal_alternative_win[i] = this_alternative_win[i];
3797 goal_alternative_match_win[i]
3798 = this_alternative_match_win[i];
3799 goal_alternative_offmemok[i]
3800 = this_alternative_offmemok[i];
3801 goal_alternative_matches[i] = this_alternative_matches[i];
3802 goal_alternative_earlyclobber[i]
3803 = this_alternative_earlyclobber[i];
3805 goal_alternative_swapped = swapped;
3806 best = losers;
3807 goal_alternative_number = this_alternative_number;
3808 goal_earlyclobber = this_earlyclobber;
3812 if (swapped)
3814 /* If the commutative operands have been swapped, swap
3815 them back in order to check the next alternative. */
3816 recog_data.operand[commutative] = substed_operand[commutative];
3817 recog_data.operand[commutative + 1] = substed_operand[commutative + 1];
3818 /* Unswap the duplicates too. */
3819 for (i = 0; i < recog_data.n_dups; i++)
3820 if (recog_data.dup_num[i] == commutative
3821 || recog_data.dup_num[i] == commutative + 1)
3822 *recog_data.dup_loc[i]
3823 = recog_data.operand[(int) recog_data.dup_num[i]];
3825 /* Unswap the operand related information as well. */
3826 std::swap (preferred_class[commutative],
3827 preferred_class[commutative + 1]);
3828 std::swap (pref_or_nothing[commutative],
3829 pref_or_nothing[commutative + 1]);
3830 std::swap (address_reloaded[commutative],
3831 address_reloaded[commutative + 1]);
3836 /* The operands don't meet the constraints.
3837 goal_alternative describes the alternative
3838 that we could reach by reloading the fewest operands.
3839 Reload so as to fit it. */
3841 if (best == MAX_RECOG_OPERANDS * 2 + 600)
3843 /* No alternative works with reloads?? */
3844 if (insn_code_number >= 0)
3845 fatal_insn ("unable to generate reloads for:", insn);
3846 error_for_asm (insn, "inconsistent operand constraints in an %<asm%>");
3847 /* Avoid further trouble with this insn. */
3848 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
3849 n_reloads = 0;
3850 return 0;
3853 /* Jump to `finish' from above if all operands are valid already.
3854 In that case, goal_alternative_win is all 1. */
3855 finish:
3857 /* Right now, for any pair of operands I and J that are required to match,
3858 with I < J,
3859 goal_alternative_matches[J] is I.
3860 Set up goal_alternative_matched as the inverse function:
3861 goal_alternative_matched[I] = J. */
3863 for (i = 0; i < noperands; i++)
3864 goal_alternative_matched[i] = -1;
3866 for (i = 0; i < noperands; i++)
3867 if (! goal_alternative_win[i]
3868 && goal_alternative_matches[i] >= 0)
3869 goal_alternative_matched[goal_alternative_matches[i]] = i;
3871 for (i = 0; i < noperands; i++)
3872 goal_alternative_win[i] |= goal_alternative_match_win[i];
3874 /* If the best alternative is with operands 1 and 2 swapped,
3875 consider them swapped before reporting the reloads. Update the
3876 operand numbers of any reloads already pushed. */
3878 if (goal_alternative_swapped)
3880 std::swap (substed_operand[commutative],
3881 substed_operand[commutative + 1]);
3882 std::swap (recog_data.operand[commutative],
3883 recog_data.operand[commutative + 1]);
3884 std::swap (*recog_data.operand_loc[commutative],
3885 *recog_data.operand_loc[commutative + 1]);
3887 for (i = 0; i < recog_data.n_dups; i++)
3888 if (recog_data.dup_num[i] == commutative
3889 || recog_data.dup_num[i] == commutative + 1)
3890 *recog_data.dup_loc[i]
3891 = recog_data.operand[(int) recog_data.dup_num[i]];
3893 for (i = 0; i < n_reloads; i++)
3895 if (rld[i].opnum == commutative)
3896 rld[i].opnum = commutative + 1;
3897 else if (rld[i].opnum == commutative + 1)
3898 rld[i].opnum = commutative;
3902 for (i = 0; i < noperands; i++)
3904 operand_reloadnum[i] = -1;
3906 /* If this is an earlyclobber operand, we need to widen the scope.
3907 The reload must remain valid from the start of the insn being
3908 reloaded until after the operand is stored into its destination.
3909 We approximate this with RELOAD_OTHER even though we know that we
3910 do not conflict with RELOAD_FOR_INPUT_ADDRESS reloads.
3912 One special case that is worth checking is when we have an
3913 output that is earlyclobber but isn't used past the insn (typically
3914 a SCRATCH). In this case, we only need have the reload live
3915 through the insn itself, but not for any of our input or output
3916 reloads.
3917 But we must not accidentally narrow the scope of an existing
3918 RELOAD_OTHER reload - leave these alone.
3920 In any case, anything needed to address this operand can remain
3921 however they were previously categorized. */
3923 if (goal_alternative_earlyclobber[i] && operand_type[i] != RELOAD_OTHER)
3924 operand_type[i]
3925 = (find_reg_note (insn, REG_UNUSED, recog_data.operand[i])
3926 ? RELOAD_FOR_INSN : RELOAD_OTHER);
3929 /* Any constants that aren't allowed and can't be reloaded
3930 into registers are here changed into memory references. */
3931 for (i = 0; i < noperands; i++)
3932 if (! goal_alternative_win[i])
3934 rtx op = recog_data.operand[i];
3935 rtx subreg = NULL_RTX;
3936 rtx plus = NULL_RTX;
3937 machine_mode mode = operand_mode[i];
3939 /* Reloads of SUBREGs of CONSTANT RTXs are handled later in
3940 push_reload so we have to let them pass here. */
3941 if (GET_CODE (op) == SUBREG)
3943 subreg = op;
3944 op = SUBREG_REG (op);
3945 mode = GET_MODE (op);
3948 if (GET_CODE (op) == PLUS)
3950 plus = op;
3951 op = XEXP (op, 1);
3954 if (CONST_POOL_OK_P (mode, op)
3955 && ((targetm.preferred_reload_class (op, goal_alternative[i])
3956 == NO_REGS)
3957 || no_input_reloads))
3959 int this_address_reloaded;
3960 rtx tem = force_const_mem (mode, op);
3962 /* If we stripped a SUBREG or a PLUS above add it back. */
3963 if (plus != NULL_RTX)
3964 tem = gen_rtx_PLUS (mode, XEXP (plus, 0), tem);
3966 if (subreg != NULL_RTX)
3967 tem = gen_rtx_SUBREG (operand_mode[i], tem, SUBREG_BYTE (subreg));
3969 this_address_reloaded = 0;
3970 substed_operand[i] = recog_data.operand[i]
3971 = find_reloads_toplev (tem, i, address_type[i], ind_levels,
3972 0, insn, &this_address_reloaded);
3974 /* If the alternative accepts constant pool refs directly
3975 there will be no reload needed at all. */
3976 if (plus == NULL_RTX
3977 && subreg == NULL_RTX
3978 && alternative_allows_const_pool_ref (this_address_reloaded != 1
3979 ? substed_operand[i]
3980 : NULL,
3981 recog_data.constraints[i],
3982 goal_alternative_number))
3983 goal_alternative_win[i] = 1;
3987 /* Record the values of the earlyclobber operands for the caller. */
3988 if (goal_earlyclobber)
3989 for (i = 0; i < noperands; i++)
3990 if (goal_alternative_earlyclobber[i])
3991 reload_earlyclobbers[n_earlyclobbers++] = recog_data.operand[i];
3993 /* Now record reloads for all the operands that need them. */
3994 for (i = 0; i < noperands; i++)
3995 if (! goal_alternative_win[i])
3997 /* Operands that match previous ones have already been handled. */
3998 if (goal_alternative_matches[i] >= 0)
4000 /* Handle an operand with a nonoffsettable address
4001 appearing where an offsettable address will do
4002 by reloading the address into a base register.
4004 ??? We can also do this when the operand is a register and
4005 reg_equiv_mem is not offsettable, but this is a bit tricky,
4006 so we don't bother with it. It may not be worth doing. */
4007 else if (goal_alternative_matched[i] == -1
4008 && goal_alternative_offmemok[i]
4009 && MEM_P (recog_data.operand[i]))
4011 /* If the address to be reloaded is a VOIDmode constant,
4012 use the default address mode as mode of the reload register,
4013 as would have been done by find_reloads_address. */
4014 addr_space_t as = MEM_ADDR_SPACE (recog_data.operand[i]);
4015 machine_mode address_mode;
4017 address_mode = get_address_mode (recog_data.operand[i]);
4018 operand_reloadnum[i]
4019 = push_reload (XEXP (recog_data.operand[i], 0), NULL_RTX,
4020 &XEXP (recog_data.operand[i], 0), (rtx*) 0,
4021 base_reg_class (VOIDmode, as, MEM, SCRATCH, insn),
4022 address_mode,
4023 VOIDmode, 0, 0, i, RELOAD_OTHER);
4024 rld[operand_reloadnum[i]].inc
4025 = GET_MODE_SIZE (GET_MODE (recog_data.operand[i]));
4027 /* If this operand is an output, we will have made any
4028 reloads for its address as RELOAD_FOR_OUTPUT_ADDRESS, but
4029 now we are treating part of the operand as an input, so
4030 we must change these to RELOAD_FOR_OTHER_ADDRESS. */
4032 if (modified[i] == RELOAD_WRITE)
4034 for (j = 0; j < n_reloads; j++)
4036 if (rld[j].opnum == i)
4038 if (rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS)
4039 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4040 else if (rld[j].when_needed
4041 == RELOAD_FOR_OUTADDR_ADDRESS)
4042 rld[j].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4047 else if (goal_alternative_matched[i] == -1)
4049 operand_reloadnum[i]
4050 = push_reload ((modified[i] != RELOAD_WRITE
4051 ? recog_data.operand[i] : 0),
4052 (modified[i] != RELOAD_READ
4053 ? recog_data.operand[i] : 0),
4054 (modified[i] != RELOAD_WRITE
4055 ? recog_data.operand_loc[i] : 0),
4056 (modified[i] != RELOAD_READ
4057 ? recog_data.operand_loc[i] : 0),
4058 (enum reg_class) goal_alternative[i],
4059 (modified[i] == RELOAD_WRITE
4060 ? VOIDmode : operand_mode[i]),
4061 (modified[i] == RELOAD_READ
4062 ? VOIDmode : operand_mode[i]),
4063 (insn_code_number < 0 ? 0
4064 : insn_data[insn_code_number].operand[i].strict_low),
4065 0, i, operand_type[i]);
4067 /* In a matching pair of operands, one must be input only
4068 and the other must be output only.
4069 Pass the input operand as IN and the other as OUT. */
4070 else if (modified[i] == RELOAD_READ
4071 && modified[goal_alternative_matched[i]] == RELOAD_WRITE)
4073 operand_reloadnum[i]
4074 = push_reload (recog_data.operand[i],
4075 recog_data.operand[goal_alternative_matched[i]],
4076 recog_data.operand_loc[i],
4077 recog_data.operand_loc[goal_alternative_matched[i]],
4078 (enum reg_class) goal_alternative[i],
4079 operand_mode[i],
4080 operand_mode[goal_alternative_matched[i]],
4081 0, 0, i, RELOAD_OTHER);
4082 operand_reloadnum[goal_alternative_matched[i]] = output_reloadnum;
4084 else if (modified[i] == RELOAD_WRITE
4085 && modified[goal_alternative_matched[i]] == RELOAD_READ)
4087 operand_reloadnum[goal_alternative_matched[i]]
4088 = push_reload (recog_data.operand[goal_alternative_matched[i]],
4089 recog_data.operand[i],
4090 recog_data.operand_loc[goal_alternative_matched[i]],
4091 recog_data.operand_loc[i],
4092 (enum reg_class) goal_alternative[i],
4093 operand_mode[goal_alternative_matched[i]],
4094 operand_mode[i],
4095 0, 0, i, RELOAD_OTHER);
4096 operand_reloadnum[i] = output_reloadnum;
4098 else
4100 gcc_assert (insn_code_number < 0);
4101 error_for_asm (insn, "inconsistent operand constraints "
4102 "in an %<asm%>");
4103 /* Avoid further trouble with this insn. */
4104 PATTERN (insn) = gen_rtx_USE (VOIDmode, const0_rtx);
4105 n_reloads = 0;
4106 return 0;
4109 else if (goal_alternative_matched[i] < 0
4110 && goal_alternative_matches[i] < 0
4111 && address_operand_reloaded[i] != 1
4112 && optimize)
4114 /* For each non-matching operand that's a MEM or a pseudo-register
4115 that didn't get a hard register, make an optional reload.
4116 This may get done even if the insn needs no reloads otherwise. */
4118 rtx operand = recog_data.operand[i];
4120 while (GET_CODE (operand) == SUBREG)
4121 operand = SUBREG_REG (operand);
4122 if ((MEM_P (operand)
4123 || (REG_P (operand)
4124 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4125 /* If this is only for an output, the optional reload would not
4126 actually cause us to use a register now, just note that
4127 something is stored here. */
4128 && (goal_alternative[i] != NO_REGS
4129 || modified[i] == RELOAD_WRITE)
4130 && ! no_input_reloads
4131 /* An optional output reload might allow to delete INSN later.
4132 We mustn't make in-out reloads on insns that are not permitted
4133 output reloads.
4134 If this is an asm, we can't delete it; we must not even call
4135 push_reload for an optional output reload in this case,
4136 because we can't be sure that the constraint allows a register,
4137 and push_reload verifies the constraints for asms. */
4138 && (modified[i] == RELOAD_READ
4139 || (! no_output_reloads && ! this_insn_is_asm)))
4140 operand_reloadnum[i]
4141 = push_reload ((modified[i] != RELOAD_WRITE
4142 ? recog_data.operand[i] : 0),
4143 (modified[i] != RELOAD_READ
4144 ? recog_data.operand[i] : 0),
4145 (modified[i] != RELOAD_WRITE
4146 ? recog_data.operand_loc[i] : 0),
4147 (modified[i] != RELOAD_READ
4148 ? recog_data.operand_loc[i] : 0),
4149 (enum reg_class) goal_alternative[i],
4150 (modified[i] == RELOAD_WRITE
4151 ? VOIDmode : operand_mode[i]),
4152 (modified[i] == RELOAD_READ
4153 ? VOIDmode : operand_mode[i]),
4154 (insn_code_number < 0 ? 0
4155 : insn_data[insn_code_number].operand[i].strict_low),
4156 1, i, operand_type[i]);
4157 /* If a memory reference remains (either as a MEM or a pseudo that
4158 did not get a hard register), yet we can't make an optional
4159 reload, check if this is actually a pseudo register reference;
4160 we then need to emit a USE and/or a CLOBBER so that reload
4161 inheritance will do the right thing. */
4162 else if (replace
4163 && (MEM_P (operand)
4164 || (REG_P (operand)
4165 && REGNO (operand) >= FIRST_PSEUDO_REGISTER
4166 && reg_renumber [REGNO (operand)] < 0)))
4168 operand = *recog_data.operand_loc[i];
4170 while (GET_CODE (operand) == SUBREG)
4171 operand = SUBREG_REG (operand);
4172 if (REG_P (operand))
4174 if (modified[i] != RELOAD_WRITE)
4175 /* We mark the USE with QImode so that we recognize
4176 it as one that can be safely deleted at the end
4177 of reload. */
4178 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, operand),
4179 insn), QImode);
4180 if (modified[i] != RELOAD_READ)
4181 emit_insn_after (gen_clobber (operand), insn);
4185 else if (goal_alternative_matches[i] >= 0
4186 && goal_alternative_win[goal_alternative_matches[i]]
4187 && modified[i] == RELOAD_READ
4188 && modified[goal_alternative_matches[i]] == RELOAD_WRITE
4189 && ! no_input_reloads && ! no_output_reloads
4190 && optimize)
4192 /* Similarly, make an optional reload for a pair of matching
4193 objects that are in MEM or a pseudo that didn't get a hard reg. */
4195 rtx operand = recog_data.operand[i];
4197 while (GET_CODE (operand) == SUBREG)
4198 operand = SUBREG_REG (operand);
4199 if ((MEM_P (operand)
4200 || (REG_P (operand)
4201 && REGNO (operand) >= FIRST_PSEUDO_REGISTER))
4202 && (goal_alternative[goal_alternative_matches[i]] != NO_REGS))
4203 operand_reloadnum[i] = operand_reloadnum[goal_alternative_matches[i]]
4204 = push_reload (recog_data.operand[goal_alternative_matches[i]],
4205 recog_data.operand[i],
4206 recog_data.operand_loc[goal_alternative_matches[i]],
4207 recog_data.operand_loc[i],
4208 (enum reg_class) goal_alternative[goal_alternative_matches[i]],
4209 operand_mode[goal_alternative_matches[i]],
4210 operand_mode[i],
4211 0, 1, goal_alternative_matches[i], RELOAD_OTHER);
4214 /* Perform whatever substitutions on the operands we are supposed
4215 to make due to commutativity or replacement of registers
4216 with equivalent constants or memory slots. */
4218 for (i = 0; i < noperands; i++)
4220 /* We only do this on the last pass through reload, because it is
4221 possible for some data (like reg_equiv_address) to be changed during
4222 later passes. Moreover, we lose the opportunity to get a useful
4223 reload_{in,out}_reg when we do these replacements. */
4225 if (replace)
4227 rtx substitution = substed_operand[i];
4229 *recog_data.operand_loc[i] = substitution;
4231 /* If we're replacing an operand with a LABEL_REF, we need to
4232 make sure that there's a REG_LABEL_OPERAND note attached to
4233 this instruction. */
4234 if (GET_CODE (substitution) == LABEL_REF
4235 && !find_reg_note (insn, REG_LABEL_OPERAND,
4236 label_ref_label (substitution))
4237 /* For a JUMP_P, if it was a branch target it must have
4238 already been recorded as such. */
4239 && (!JUMP_P (insn)
4240 || !label_is_jump_target_p (label_ref_label (substitution),
4241 insn)))
4243 add_reg_note (insn, REG_LABEL_OPERAND,
4244 label_ref_label (substitution));
4245 if (LABEL_P (label_ref_label (substitution)))
4246 ++LABEL_NUSES (label_ref_label (substitution));
4250 else
4251 retval |= (substed_operand[i] != *recog_data.operand_loc[i]);
4254 /* If this insn pattern contains any MATCH_DUP's, make sure that
4255 they will be substituted if the operands they match are substituted.
4256 Also do now any substitutions we already did on the operands.
4258 Don't do this if we aren't making replacements because we might be
4259 propagating things allocated by frame pointer elimination into places
4260 it doesn't expect. */
4262 if (insn_code_number >= 0 && replace)
4263 for (i = insn_data[insn_code_number].n_dups - 1; i >= 0; i--)
4265 int opno = recog_data.dup_num[i];
4266 *recog_data.dup_loc[i] = *recog_data.operand_loc[opno];
4267 dup_replacements (recog_data.dup_loc[i], recog_data.operand_loc[opno]);
4270 #if 0
4271 /* This loses because reloading of prior insns can invalidate the equivalence
4272 (or at least find_equiv_reg isn't smart enough to find it any more),
4273 causing this insn to need more reload regs than it needed before.
4274 It may be too late to make the reload regs available.
4275 Now this optimization is done safely in choose_reload_regs. */
4277 /* For each reload of a reg into some other class of reg,
4278 search for an existing equivalent reg (same value now) in the right class.
4279 We can use it as long as we don't need to change its contents. */
4280 for (i = 0; i < n_reloads; i++)
4281 if (rld[i].reg_rtx == 0
4282 && rld[i].in != 0
4283 && REG_P (rld[i].in)
4284 && rld[i].out == 0)
4286 rld[i].reg_rtx
4287 = find_equiv_reg (rld[i].in, insn, rld[i].rclass, -1,
4288 static_reload_reg_p, 0, rld[i].inmode);
4289 /* Prevent generation of insn to load the value
4290 because the one we found already has the value. */
4291 if (rld[i].reg_rtx)
4292 rld[i].in = rld[i].reg_rtx;
4294 #endif
4296 /* If we detected error and replaced asm instruction by USE, forget about the
4297 reloads. */
4298 if (GET_CODE (PATTERN (insn)) == USE
4299 && CONST_INT_P (XEXP (PATTERN (insn), 0)))
4300 n_reloads = 0;
4302 /* Perhaps an output reload can be combined with another
4303 to reduce needs by one. */
4304 if (!goal_earlyclobber)
4305 combine_reloads ();
4307 /* If we have a pair of reloads for parts of an address, they are reloading
4308 the same object, the operands themselves were not reloaded, and they
4309 are for two operands that are supposed to match, merge the reloads and
4310 change the type of the surviving reload to RELOAD_FOR_OPERAND_ADDRESS. */
4312 for (i = 0; i < n_reloads; i++)
4314 int k;
4316 for (j = i + 1; j < n_reloads; j++)
4317 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4318 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4319 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4320 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4321 && (rld[j].when_needed == RELOAD_FOR_INPUT_ADDRESS
4322 || rld[j].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4323 || rld[j].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4324 || rld[j].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4325 && rtx_equal_p (rld[i].in, rld[j].in)
4326 && (operand_reloadnum[rld[i].opnum] < 0
4327 || rld[operand_reloadnum[rld[i].opnum]].optional)
4328 && (operand_reloadnum[rld[j].opnum] < 0
4329 || rld[operand_reloadnum[rld[j].opnum]].optional)
4330 && (goal_alternative_matches[rld[i].opnum] == rld[j].opnum
4331 || (goal_alternative_matches[rld[j].opnum]
4332 == rld[i].opnum)))
4334 for (k = 0; k < n_replacements; k++)
4335 if (replacements[k].what == j)
4336 replacements[k].what = i;
4338 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4339 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4340 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4341 else
4342 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4343 rld[j].in = 0;
4347 /* Scan all the reloads and update their type.
4348 If a reload is for the address of an operand and we didn't reload
4349 that operand, change the type. Similarly, change the operand number
4350 of a reload when two operands match. If a reload is optional, treat it
4351 as though the operand isn't reloaded.
4353 ??? This latter case is somewhat odd because if we do the optional
4354 reload, it means the object is hanging around. Thus we need only
4355 do the address reload if the optional reload was NOT done.
4357 Change secondary reloads to be the address type of their operand, not
4358 the normal type.
4360 If an operand's reload is now RELOAD_OTHER, change any
4361 RELOAD_FOR_INPUT_ADDRESS reloads of that operand to
4362 RELOAD_FOR_OTHER_ADDRESS. */
4364 for (i = 0; i < n_reloads; i++)
4366 if (rld[i].secondary_p
4367 && rld[i].when_needed == operand_type[rld[i].opnum])
4368 rld[i].when_needed = address_type[rld[i].opnum];
4370 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4371 || rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4372 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4373 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4374 && (operand_reloadnum[rld[i].opnum] < 0
4375 || rld[operand_reloadnum[rld[i].opnum]].optional))
4377 /* If we have a secondary reload to go along with this reload,
4378 change its type to RELOAD_FOR_OPADDR_ADDR. */
4380 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4381 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4382 && rld[i].secondary_in_reload != -1)
4384 int secondary_in_reload = rld[i].secondary_in_reload;
4386 rld[secondary_in_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4388 /* If there's a tertiary reload we have to change it also. */
4389 if (secondary_in_reload > 0
4390 && rld[secondary_in_reload].secondary_in_reload != -1)
4391 rld[rld[secondary_in_reload].secondary_in_reload].when_needed
4392 = RELOAD_FOR_OPADDR_ADDR;
4395 if ((rld[i].when_needed == RELOAD_FOR_OUTPUT_ADDRESS
4396 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4397 && rld[i].secondary_out_reload != -1)
4399 int secondary_out_reload = rld[i].secondary_out_reload;
4401 rld[secondary_out_reload].when_needed = RELOAD_FOR_OPADDR_ADDR;
4403 /* If there's a tertiary reload we have to change it also. */
4404 if (secondary_out_reload
4405 && rld[secondary_out_reload].secondary_out_reload != -1)
4406 rld[rld[secondary_out_reload].secondary_out_reload].when_needed
4407 = RELOAD_FOR_OPADDR_ADDR;
4410 if (rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS
4411 || rld[i].when_needed == RELOAD_FOR_OUTADDR_ADDRESS)
4412 rld[i].when_needed = RELOAD_FOR_OPADDR_ADDR;
4413 else
4414 rld[i].when_needed = RELOAD_FOR_OPERAND_ADDRESS;
4417 if ((rld[i].when_needed == RELOAD_FOR_INPUT_ADDRESS
4418 || rld[i].when_needed == RELOAD_FOR_INPADDR_ADDRESS)
4419 && operand_reloadnum[rld[i].opnum] >= 0
4420 && (rld[operand_reloadnum[rld[i].opnum]].when_needed
4421 == RELOAD_OTHER))
4422 rld[i].when_needed = RELOAD_FOR_OTHER_ADDRESS;
4424 if (goal_alternative_matches[rld[i].opnum] >= 0)
4425 rld[i].opnum = goal_alternative_matches[rld[i].opnum];
4428 /* Scan all the reloads, and check for RELOAD_FOR_OPERAND_ADDRESS reloads.
4429 If we have more than one, then convert all RELOAD_FOR_OPADDR_ADDR
4430 reloads to RELOAD_FOR_OPERAND_ADDRESS reloads.
4432 choose_reload_regs assumes that RELOAD_FOR_OPADDR_ADDR reloads never
4433 conflict with RELOAD_FOR_OPERAND_ADDRESS reloads. This is true for a
4434 single pair of RELOAD_FOR_OPADDR_ADDR/RELOAD_FOR_OPERAND_ADDRESS reloads.
4435 However, if there is more than one RELOAD_FOR_OPERAND_ADDRESS reload,
4436 then a RELOAD_FOR_OPADDR_ADDR reload conflicts with all
4437 RELOAD_FOR_OPERAND_ADDRESS reloads other than the one that uses it.
4438 This is complicated by the fact that a single operand can have more
4439 than one RELOAD_FOR_OPERAND_ADDRESS reload. It is very difficult to fix
4440 choose_reload_regs without affecting code quality, and cases that
4441 actually fail are extremely rare, so it turns out to be better to fix
4442 the problem here by not generating cases that choose_reload_regs will
4443 fail for. */
4444 /* There is a similar problem with RELOAD_FOR_INPUT_ADDRESS /
4445 RELOAD_FOR_OUTPUT_ADDRESS when there is more than one of a kind for
4446 a single operand.
4447 We can reduce the register pressure by exploiting that a
4448 RELOAD_FOR_X_ADDR_ADDR that precedes all RELOAD_FOR_X_ADDRESS reloads
4449 does not conflict with any of them, if it is only used for the first of
4450 the RELOAD_FOR_X_ADDRESS reloads. */
4452 int first_op_addr_num = -2;
4453 int first_inpaddr_num[MAX_RECOG_OPERANDS];
4454 int first_outpaddr_num[MAX_RECOG_OPERANDS];
4455 int need_change = 0;
4456 /* We use last_op_addr_reload and the contents of the above arrays
4457 first as flags - -2 means no instance encountered, -1 means exactly
4458 one instance encountered.
4459 If more than one instance has been encountered, we store the reload
4460 number of the first reload of the kind in question; reload numbers
4461 are known to be non-negative. */
4462 for (i = 0; i < noperands; i++)
4463 first_inpaddr_num[i] = first_outpaddr_num[i] = -2;
4464 for (i = n_reloads - 1; i >= 0; i--)
4466 switch (rld[i].when_needed)
4468 case RELOAD_FOR_OPERAND_ADDRESS:
4469 if (++first_op_addr_num >= 0)
4471 first_op_addr_num = i;
4472 need_change = 1;
4474 break;
4475 case RELOAD_FOR_INPUT_ADDRESS:
4476 if (++first_inpaddr_num[rld[i].opnum] >= 0)
4478 first_inpaddr_num[rld[i].opnum] = i;
4479 need_change = 1;
4481 break;
4482 case RELOAD_FOR_OUTPUT_ADDRESS:
4483 if (++first_outpaddr_num[rld[i].opnum] >= 0)
4485 first_outpaddr_num[rld[i].opnum] = i;
4486 need_change = 1;
4488 break;
4489 default:
4490 break;
4494 if (need_change)
4496 for (i = 0; i < n_reloads; i++)
4498 int first_num;
4499 enum reload_type type;
4501 switch (rld[i].when_needed)
4503 case RELOAD_FOR_OPADDR_ADDR:
4504 first_num = first_op_addr_num;
4505 type = RELOAD_FOR_OPERAND_ADDRESS;
4506 break;
4507 case RELOAD_FOR_INPADDR_ADDRESS:
4508 first_num = first_inpaddr_num[rld[i].opnum];
4509 type = RELOAD_FOR_INPUT_ADDRESS;
4510 break;
4511 case RELOAD_FOR_OUTADDR_ADDRESS:
4512 first_num = first_outpaddr_num[rld[i].opnum];
4513 type = RELOAD_FOR_OUTPUT_ADDRESS;
4514 break;
4515 default:
4516 continue;
4518 if (first_num < 0)
4519 continue;
4520 else if (i > first_num)
4521 rld[i].when_needed = type;
4522 else
4524 /* Check if the only TYPE reload that uses reload I is
4525 reload FIRST_NUM. */
4526 for (j = n_reloads - 1; j > first_num; j--)
4528 if (rld[j].when_needed == type
4529 && (rld[i].secondary_p
4530 ? rld[j].secondary_in_reload == i
4531 : reg_mentioned_p (rld[i].in, rld[j].in)))
4533 rld[i].when_needed = type;
4534 break;
4542 /* See if we have any reloads that are now allowed to be merged
4543 because we've changed when the reload is needed to
4544 RELOAD_FOR_OPERAND_ADDRESS or RELOAD_FOR_OTHER_ADDRESS. Only
4545 check for the most common cases. */
4547 for (i = 0; i < n_reloads; i++)
4548 if (rld[i].in != 0 && rld[i].out == 0
4549 && (rld[i].when_needed == RELOAD_FOR_OPERAND_ADDRESS
4550 || rld[i].when_needed == RELOAD_FOR_OPADDR_ADDR
4551 || rld[i].when_needed == RELOAD_FOR_OTHER_ADDRESS))
4552 for (j = 0; j < n_reloads; j++)
4553 if (i != j && rld[j].in != 0 && rld[j].out == 0
4554 && rld[j].when_needed == rld[i].when_needed
4555 && MATCHES (rld[i].in, rld[j].in)
4556 && rld[i].rclass == rld[j].rclass
4557 && !rld[i].nocombine && !rld[j].nocombine
4558 && rld[i].reg_rtx == rld[j].reg_rtx)
4560 rld[i].opnum = MIN (rld[i].opnum, rld[j].opnum);
4561 transfer_replacements (i, j);
4562 rld[j].in = 0;
4565 /* Compute reload_mode and reload_nregs. */
4566 for (i = 0; i < n_reloads; i++)
4568 rld[i].mode = rld[i].inmode;
4569 if (rld[i].mode == VOIDmode
4570 || partial_subreg_p (rld[i].mode, rld[i].outmode))
4571 rld[i].mode = rld[i].outmode;
4573 rld[i].nregs = ira_reg_class_max_nregs [rld[i].rclass][rld[i].mode];
4576 /* Special case a simple move with an input reload and a
4577 destination of a hard reg, if the hard reg is ok, use it. */
4578 for (i = 0; i < n_reloads; i++)
4579 if (rld[i].when_needed == RELOAD_FOR_INPUT
4580 && GET_CODE (PATTERN (insn)) == SET
4581 && REG_P (SET_DEST (PATTERN (insn)))
4582 && (SET_SRC (PATTERN (insn)) == rld[i].in
4583 || SET_SRC (PATTERN (insn)) == rld[i].in_reg)
4584 && !elimination_target_reg_p (SET_DEST (PATTERN (insn))))
4586 rtx dest = SET_DEST (PATTERN (insn));
4587 unsigned int regno = REGNO (dest);
4589 if (regno < FIRST_PSEUDO_REGISTER
4590 && TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno)
4591 && targetm.hard_regno_mode_ok (regno, rld[i].mode))
4593 int nr = hard_regno_nregs (regno, rld[i].mode);
4594 int ok = 1, nri;
4596 for (nri = 1; nri < nr; nri ++)
4597 if (! TEST_HARD_REG_BIT (reg_class_contents[rld[i].rclass], regno + nri))
4599 ok = 0;
4600 break;
4603 if (ok)
4604 rld[i].reg_rtx = dest;
4608 return retval;
4611 /* Return true if alternative number ALTNUM in constraint-string
4612 CONSTRAINT is guaranteed to accept a reloaded constant-pool reference.
4613 MEM gives the reference if its address hasn't been fully reloaded,
4614 otherwise it is NULL. */
4616 static bool
4617 alternative_allows_const_pool_ref (rtx mem ATTRIBUTE_UNUSED,
4618 const char *constraint, int altnum)
4620 int c;
4622 /* Skip alternatives before the one requested. */
4623 while (altnum > 0)
4625 while (*constraint++ != ',')
4627 altnum--;
4629 /* Scan the requested alternative for TARGET_MEM_CONSTRAINT or 'o'.
4630 If one of them is present, this alternative accepts the result of
4631 passing a constant-pool reference through find_reloads_toplev.
4633 The same is true of extra memory constraints if the address
4634 was reloaded into a register. However, the target may elect
4635 to disallow the original constant address, forcing it to be
4636 reloaded into a register instead. */
4637 for (; (c = *constraint) && c != ',' && c != '#';
4638 constraint += CONSTRAINT_LEN (c, constraint))
4640 enum constraint_num cn = lookup_constraint (constraint);
4641 if (insn_extra_memory_constraint (cn)
4642 && (mem == NULL || constraint_satisfied_p (mem, cn)))
4643 return true;
4645 return false;
4648 /* Scan X for memory references and scan the addresses for reloading.
4649 Also checks for references to "constant" regs that we want to eliminate
4650 and replaces them with the values they stand for.
4651 We may alter X destructively if it contains a reference to such.
4652 If X is just a constant reg, we return the equivalent value
4653 instead of X.
4655 IND_LEVELS says how many levels of indirect addressing this machine
4656 supports.
4658 OPNUM and TYPE identify the purpose of the reload.
4660 IS_SET_DEST is true if X is the destination of a SET, which is not
4661 appropriate to be replaced by a constant.
4663 INSN, if nonzero, is the insn in which we do the reload. It is used
4664 to determine if we may generate output reloads, and where to put USEs
4665 for pseudos that we have to replace with stack slots.
4667 ADDRESS_RELOADED. If nonzero, is a pointer to where we put the
4668 result of find_reloads_address. */
4670 static rtx
4671 find_reloads_toplev (rtx x, int opnum, enum reload_type type,
4672 int ind_levels, int is_set_dest, rtx_insn *insn,
4673 int *address_reloaded)
4675 RTX_CODE code = GET_CODE (x);
4677 const char *fmt = GET_RTX_FORMAT (code);
4678 int i;
4679 int copied;
4681 if (code == REG)
4683 /* This code is duplicated for speed in find_reloads. */
4684 int regno = REGNO (x);
4685 if (reg_equiv_constant (regno) != 0 && !is_set_dest)
4686 x = reg_equiv_constant (regno);
4687 #if 0
4688 /* This creates (subreg (mem...)) which would cause an unnecessary
4689 reload of the mem. */
4690 else if (reg_equiv_mem (regno) != 0)
4691 x = reg_equiv_mem (regno);
4692 #endif
4693 else if (reg_equiv_memory_loc (regno)
4694 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
4696 rtx mem = make_memloc (x, regno);
4697 if (reg_equiv_address (regno)
4698 || ! rtx_equal_p (mem, reg_equiv_mem (regno)))
4700 /* If this is not a toplevel operand, find_reloads doesn't see
4701 this substitution. We have to emit a USE of the pseudo so
4702 that delete_output_reload can see it. */
4703 if (replace_reloads && recog_data.operand[opnum] != x)
4704 /* We mark the USE with QImode so that we recognize it
4705 as one that can be safely deleted at the end of
4706 reload. */
4707 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, x), insn),
4708 QImode);
4709 x = mem;
4710 i = find_reloads_address (GET_MODE (x), &x, XEXP (x, 0), &XEXP (x, 0),
4711 opnum, type, ind_levels, insn);
4712 if (!rtx_equal_p (x, mem))
4713 push_reg_equiv_alt_mem (regno, x);
4714 if (address_reloaded)
4715 *address_reloaded = i;
4718 return x;
4720 if (code == MEM)
4722 rtx tem = x;
4724 i = find_reloads_address (GET_MODE (x), &tem, XEXP (x, 0), &XEXP (x, 0),
4725 opnum, type, ind_levels, insn);
4726 if (address_reloaded)
4727 *address_reloaded = i;
4729 return tem;
4732 if (code == SUBREG && REG_P (SUBREG_REG (x)))
4734 /* Check for SUBREG containing a REG that's equivalent to a
4735 constant. If the constant has a known value, truncate it
4736 right now. Similarly if we are extracting a single-word of a
4737 multi-word constant. If the constant is symbolic, allow it
4738 to be substituted normally. push_reload will strip the
4739 subreg later. The constant must not be VOIDmode, because we
4740 will lose the mode of the register (this should never happen
4741 because one of the cases above should handle it). */
4743 int regno = REGNO (SUBREG_REG (x));
4744 rtx tem;
4746 if (regno >= FIRST_PSEUDO_REGISTER
4747 && reg_renumber[regno] < 0
4748 && reg_equiv_constant (regno) != 0)
4750 tem =
4751 simplify_gen_subreg (GET_MODE (x), reg_equiv_constant (regno),
4752 GET_MODE (SUBREG_REG (x)), SUBREG_BYTE (x));
4753 gcc_assert (tem);
4754 if (CONSTANT_P (tem)
4755 && !targetm.legitimate_constant_p (GET_MODE (x), tem))
4757 tem = force_const_mem (GET_MODE (x), tem);
4758 i = find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4759 &XEXP (tem, 0), opnum, type,
4760 ind_levels, insn);
4761 if (address_reloaded)
4762 *address_reloaded = i;
4764 return tem;
4767 /* If the subreg contains a reg that will be converted to a mem,
4768 attempt to convert the whole subreg to a (narrower or wider)
4769 memory reference instead. If this succeeds, we're done --
4770 otherwise fall through to check whether the inner reg still
4771 needs address reloads anyway. */
4773 if (regno >= FIRST_PSEUDO_REGISTER
4774 && reg_equiv_memory_loc (regno) != 0)
4776 tem = find_reloads_subreg_address (x, opnum, type, ind_levels,
4777 insn, address_reloaded);
4778 if (tem)
4779 return tem;
4783 for (copied = 0, i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4785 if (fmt[i] == 'e')
4787 rtx new_part = find_reloads_toplev (XEXP (x, i), opnum, type,
4788 ind_levels, is_set_dest, insn,
4789 address_reloaded);
4790 /* If we have replaced a reg with it's equivalent memory loc -
4791 that can still be handled here e.g. if it's in a paradoxical
4792 subreg - we must make the change in a copy, rather than using
4793 a destructive change. This way, find_reloads can still elect
4794 not to do the change. */
4795 if (new_part != XEXP (x, i) && ! CONSTANT_P (new_part) && ! copied)
4797 x = shallow_copy_rtx (x);
4798 copied = 1;
4800 XEXP (x, i) = new_part;
4803 return x;
4806 /* Return a mem ref for the memory equivalent of reg REGNO.
4807 This mem ref is not shared with anything. */
4809 static rtx
4810 make_memloc (rtx ad, int regno)
4812 /* We must rerun eliminate_regs, in case the elimination
4813 offsets have changed. */
4814 rtx tem
4815 = XEXP (eliminate_regs (reg_equiv_memory_loc (regno), VOIDmode, NULL_RTX),
4818 /* If TEM might contain a pseudo, we must copy it to avoid
4819 modifying it when we do the substitution for the reload. */
4820 if (rtx_varies_p (tem, 0))
4821 tem = copy_rtx (tem);
4823 tem = replace_equiv_address_nv (reg_equiv_memory_loc (regno), tem);
4824 tem = adjust_address_nv (tem, GET_MODE (ad), 0);
4826 /* Copy the result if it's still the same as the equivalence, to avoid
4827 modifying it when we do the substitution for the reload. */
4828 if (tem == reg_equiv_memory_loc (regno))
4829 tem = copy_rtx (tem);
4830 return tem;
4833 /* Returns true if AD could be turned into a valid memory reference
4834 to mode MODE in address space AS by reloading the part pointed to
4835 by PART into a register. */
4837 static bool
4838 maybe_memory_address_addr_space_p (machine_mode mode, rtx ad,
4839 addr_space_t as, rtx *part)
4841 bool retv;
4842 rtx tem = *part;
4843 rtx reg = gen_rtx_REG (GET_MODE (tem), max_reg_num ());
4845 *part = reg;
4846 retv = memory_address_addr_space_p (mode, ad, as);
4847 *part = tem;
4849 return retv;
4852 /* Record all reloads needed for handling memory address AD
4853 which appears in *LOC in a memory reference to mode MODE
4854 which itself is found in location *MEMREFLOC.
4855 Note that we take shortcuts assuming that no multi-reg machine mode
4856 occurs as part of an address.
4858 OPNUM and TYPE specify the purpose of this reload.
4860 IND_LEVELS says how many levels of indirect addressing this machine
4861 supports.
4863 INSN, if nonzero, is the insn in which we do the reload. It is used
4864 to determine if we may generate output reloads, and where to put USEs
4865 for pseudos that we have to replace with stack slots.
4867 Value is one if this address is reloaded or replaced as a whole; it is
4868 zero if the top level of this address was not reloaded or replaced, and
4869 it is -1 if it may or may not have been reloaded or replaced.
4871 Note that there is no verification that the address will be valid after
4872 this routine does its work. Instead, we rely on the fact that the address
4873 was valid when reload started. So we need only undo things that reload
4874 could have broken. These are wrong register types, pseudos not allocated
4875 to a hard register, and frame pointer elimination. */
4877 static int
4878 find_reloads_address (machine_mode mode, rtx *memrefloc, rtx ad,
4879 rtx *loc, int opnum, enum reload_type type,
4880 int ind_levels, rtx_insn *insn)
4882 addr_space_t as = memrefloc? MEM_ADDR_SPACE (*memrefloc)
4883 : ADDR_SPACE_GENERIC;
4884 int regno;
4885 int removed_and = 0;
4886 int op_index;
4887 rtx tem;
4889 /* If the address is a register, see if it is a legitimate address and
4890 reload if not. We first handle the cases where we need not reload
4891 or where we must reload in a non-standard way. */
4893 if (REG_P (ad))
4895 regno = REGNO (ad);
4897 if (reg_equiv_constant (regno) != 0)
4899 find_reloads_address_part (reg_equiv_constant (regno), loc,
4900 base_reg_class (mode, as, MEM,
4901 SCRATCH, insn),
4902 GET_MODE (ad), opnum, type, ind_levels);
4903 return 1;
4906 tem = reg_equiv_memory_loc (regno);
4907 if (tem != 0)
4909 if (reg_equiv_address (regno) != 0 || num_not_at_initial_offset)
4911 tem = make_memloc (ad, regno);
4912 if (! strict_memory_address_addr_space_p (GET_MODE (tem),
4913 XEXP (tem, 0),
4914 MEM_ADDR_SPACE (tem)))
4916 rtx orig = tem;
4918 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
4919 &XEXP (tem, 0), opnum,
4920 ADDR_TYPE (type), ind_levels, insn);
4921 if (!rtx_equal_p (tem, orig))
4922 push_reg_equiv_alt_mem (regno, tem);
4924 /* We can avoid a reload if the register's equivalent memory
4925 expression is valid as an indirect memory address.
4926 But not all addresses are valid in a mem used as an indirect
4927 address: only reg or reg+constant. */
4929 if (ind_levels > 0
4930 && strict_memory_address_addr_space_p (mode, tem, as)
4931 && (REG_P (XEXP (tem, 0))
4932 || (GET_CODE (XEXP (tem, 0)) == PLUS
4933 && REG_P (XEXP (XEXP (tem, 0), 0))
4934 && CONSTANT_P (XEXP (XEXP (tem, 0), 1)))))
4936 /* TEM is not the same as what we'll be replacing the
4937 pseudo with after reload, put a USE in front of INSN
4938 in the final reload pass. */
4939 if (replace_reloads
4940 && num_not_at_initial_offset
4941 && ! rtx_equal_p (tem, reg_equiv_mem (regno)))
4943 *loc = tem;
4944 /* We mark the USE with QImode so that we
4945 recognize it as one that can be safely
4946 deleted at the end of reload. */
4947 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad),
4948 insn), QImode);
4950 /* This doesn't really count as replacing the address
4951 as a whole, since it is still a memory access. */
4953 return 0;
4955 ad = tem;
4959 /* The only remaining case where we can avoid a reload is if this is a
4960 hard register that is valid as a base register and which is not the
4961 subject of a CLOBBER in this insn. */
4963 else if (regno < FIRST_PSEUDO_REGISTER
4964 && regno_ok_for_base_p (regno, mode, as, MEM, SCRATCH)
4965 && ! regno_clobbered_p (regno, this_insn, mode, 0))
4966 return 0;
4968 /* If we do not have one of the cases above, we must do the reload. */
4969 push_reload (ad, NULL_RTX, loc, (rtx*) 0,
4970 base_reg_class (mode, as, MEM, SCRATCH, insn),
4971 GET_MODE (ad), VOIDmode, 0, 0, opnum, type);
4972 return 1;
4975 if (strict_memory_address_addr_space_p (mode, ad, as))
4977 /* The address appears valid, so reloads are not needed.
4978 But the address may contain an eliminable register.
4979 This can happen because a machine with indirect addressing
4980 may consider a pseudo register by itself a valid address even when
4981 it has failed to get a hard reg.
4982 So do a tree-walk to find and eliminate all such regs. */
4984 /* But first quickly dispose of a common case. */
4985 if (GET_CODE (ad) == PLUS
4986 && CONST_INT_P (XEXP (ad, 1))
4987 && REG_P (XEXP (ad, 0))
4988 && reg_equiv_constant (REGNO (XEXP (ad, 0))) == 0)
4989 return 0;
4991 subst_reg_equivs_changed = 0;
4992 *loc = subst_reg_equivs (ad, insn);
4994 if (! subst_reg_equivs_changed)
4995 return 0;
4997 /* Check result for validity after substitution. */
4998 if (strict_memory_address_addr_space_p (mode, ad, as))
4999 return 0;
5002 #ifdef LEGITIMIZE_RELOAD_ADDRESS
5005 if (memrefloc && ADDR_SPACE_GENERIC_P (as))
5007 LEGITIMIZE_RELOAD_ADDRESS (ad, GET_MODE (*memrefloc), opnum, type,
5008 ind_levels, win);
5010 break;
5011 win:
5012 *memrefloc = copy_rtx (*memrefloc);
5013 XEXP (*memrefloc, 0) = ad;
5014 move_replacements (&ad, &XEXP (*memrefloc, 0));
5015 return -1;
5017 while (0);
5018 #endif
5020 /* The address is not valid. We have to figure out why. First see if
5021 we have an outer AND and remove it if so. Then analyze what's inside. */
5023 if (GET_CODE (ad) == AND)
5025 removed_and = 1;
5026 loc = &XEXP (ad, 0);
5027 ad = *loc;
5030 /* One possibility for why the address is invalid is that it is itself
5031 a MEM. This can happen when the frame pointer is being eliminated, a
5032 pseudo is not allocated to a hard register, and the offset between the
5033 frame and stack pointers is not its initial value. In that case the
5034 pseudo will have been replaced by a MEM referring to the
5035 stack pointer. */
5036 if (MEM_P (ad))
5038 /* First ensure that the address in this MEM is valid. Then, unless
5039 indirect addresses are valid, reload the MEM into a register. */
5040 tem = ad;
5041 find_reloads_address (GET_MODE (ad), &tem, XEXP (ad, 0), &XEXP (ad, 0),
5042 opnum, ADDR_TYPE (type),
5043 ind_levels == 0 ? 0 : ind_levels - 1, insn);
5045 /* If tem was changed, then we must create a new memory reference to
5046 hold it and store it back into memrefloc. */
5047 if (tem != ad && memrefloc)
5049 *memrefloc = copy_rtx (*memrefloc);
5050 copy_replacements (tem, XEXP (*memrefloc, 0));
5051 loc = &XEXP (*memrefloc, 0);
5052 if (removed_and)
5053 loc = &XEXP (*loc, 0);
5056 /* Check similar cases as for indirect addresses as above except
5057 that we can allow pseudos and a MEM since they should have been
5058 taken care of above. */
5060 if (ind_levels == 0
5061 || (GET_CODE (XEXP (tem, 0)) == SYMBOL_REF && ! indirect_symref_ok)
5062 || MEM_P (XEXP (tem, 0))
5063 || ! (REG_P (XEXP (tem, 0))
5064 || (GET_CODE (XEXP (tem, 0)) == PLUS
5065 && REG_P (XEXP (XEXP (tem, 0), 0))
5066 && CONST_INT_P (XEXP (XEXP (tem, 0), 1)))))
5068 /* Must use TEM here, not AD, since it is the one that will
5069 have any subexpressions reloaded, if needed. */
5070 push_reload (tem, NULL_RTX, loc, (rtx*) 0,
5071 base_reg_class (mode, as, MEM, SCRATCH), GET_MODE (tem),
5072 VOIDmode, 0,
5073 0, opnum, type);
5074 return ! removed_and;
5076 else
5077 return 0;
5080 /* If we have address of a stack slot but it's not valid because the
5081 displacement is too large, compute the sum in a register.
5082 Handle all base registers here, not just fp/ap/sp, because on some
5083 targets (namely SH) we can also get too large displacements from
5084 big-endian corrections. */
5085 else if (GET_CODE (ad) == PLUS
5086 && REG_P (XEXP (ad, 0))
5087 && REGNO (XEXP (ad, 0)) < FIRST_PSEUDO_REGISTER
5088 && CONST_INT_P (XEXP (ad, 1))
5089 && (regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as, PLUS,
5090 CONST_INT)
5091 /* Similarly, if we were to reload the base register and the
5092 mem+offset address is still invalid, then we want to reload
5093 the whole address, not just the base register. */
5094 || ! maybe_memory_address_addr_space_p
5095 (mode, ad, as, &(XEXP (ad, 0)))))
5098 /* Unshare the MEM rtx so we can safely alter it. */
5099 if (memrefloc)
5101 *memrefloc = copy_rtx (*memrefloc);
5102 loc = &XEXP (*memrefloc, 0);
5103 if (removed_and)
5104 loc = &XEXP (*loc, 0);
5107 if (double_reg_address_ok[mode]
5108 && regno_ok_for_base_p (REGNO (XEXP (ad, 0)), mode, as,
5109 PLUS, CONST_INT))
5111 /* Unshare the sum as well. */
5112 *loc = ad = copy_rtx (ad);
5114 /* Reload the displacement into an index reg.
5115 We assume the frame pointer or arg pointer is a base reg. */
5116 find_reloads_address_part (XEXP (ad, 1), &XEXP (ad, 1),
5117 index_reg_class (insn), GET_MODE (ad), opnum,
5118 type, ind_levels);
5119 return 0;
5121 else
5123 /* If the sum of two regs is not necessarily valid,
5124 reload the sum into a base reg.
5125 That will at least work. */
5126 find_reloads_address_part (ad, loc,
5127 base_reg_class (mode, as, MEM,
5128 SCRATCH, insn),
5129 GET_MODE (ad), opnum, type, ind_levels);
5131 return ! removed_and;
5134 /* If we have an indexed stack slot, there are three possible reasons why
5135 it might be invalid: The index might need to be reloaded, the address
5136 might have been made by frame pointer elimination and hence have a
5137 constant out of range, or both reasons might apply.
5139 We can easily check for an index needing reload, but even if that is the
5140 case, we might also have an invalid constant. To avoid making the
5141 conservative assumption and requiring two reloads, we see if this address
5142 is valid when not interpreted strictly. If it is, the only problem is
5143 that the index needs a reload and find_reloads_address_1 will take care
5144 of it.
5146 Handle all base registers here, not just fp/ap/sp, because on some
5147 targets (namely SPARC) we can also get invalid addresses from preventive
5148 subreg big-endian corrections made by find_reloads_toplev. We
5149 can also get expressions involving LO_SUM (rather than PLUS) from
5150 find_reloads_subreg_address.
5152 If we decide to do something, it must be that `double_reg_address_ok'
5153 is true. We generate a reload of the base register + constant and
5154 rework the sum so that the reload register will be added to the index.
5155 This is safe because we know the address isn't shared.
5157 We check for the base register as both the first and second operand of
5158 the innermost PLUS and/or LO_SUM. */
5160 for (op_index = 0; op_index < 2; ++op_index)
5162 rtx operand, addend;
5163 enum rtx_code inner_code;
5165 if (GET_CODE (ad) != PLUS)
5166 continue;
5168 inner_code = GET_CODE (XEXP (ad, 0));
5169 if (!(GET_CODE (ad) == PLUS
5170 && CONST_INT_P (XEXP (ad, 1))
5171 && (inner_code == PLUS || inner_code == LO_SUM)))
5172 continue;
5174 operand = XEXP (XEXP (ad, 0), op_index);
5175 if (!REG_P (operand) || REGNO (operand) >= FIRST_PSEUDO_REGISTER)
5176 continue;
5178 addend = XEXP (XEXP (ad, 0), 1 - op_index);
5180 if ((regno_ok_for_base_p (REGNO (operand), mode, as, inner_code,
5181 GET_CODE (addend))
5182 || operand == frame_pointer_rtx
5183 || (!HARD_FRAME_POINTER_IS_FRAME_POINTER
5184 && operand == hard_frame_pointer_rtx)
5185 || (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
5186 && operand == arg_pointer_rtx)
5187 || operand == stack_pointer_rtx)
5188 && ! maybe_memory_address_addr_space_p
5189 (mode, ad, as, &XEXP (XEXP (ad, 0), 1 - op_index)))
5191 rtx offset_reg;
5192 enum reg_class cls;
5194 offset_reg = plus_constant (GET_MODE (ad), operand,
5195 INTVAL (XEXP (ad, 1)));
5197 /* Form the adjusted address. */
5198 if (GET_CODE (XEXP (ad, 0)) == PLUS)
5199 ad = gen_rtx_PLUS (GET_MODE (ad),
5200 op_index == 0 ? offset_reg : addend,
5201 op_index == 0 ? addend : offset_reg);
5202 else
5203 ad = gen_rtx_LO_SUM (GET_MODE (ad),
5204 op_index == 0 ? offset_reg : addend,
5205 op_index == 0 ? addend : offset_reg);
5206 *loc = ad;
5208 cls = base_reg_class (mode, as, MEM, GET_CODE (addend), insn);
5209 find_reloads_address_part (XEXP (ad, op_index),
5210 &XEXP (ad, op_index), cls,
5211 GET_MODE (ad), opnum, type, ind_levels);
5212 find_reloads_address_1 (mode, as,
5213 XEXP (ad, 1 - op_index), 1, GET_CODE (ad),
5214 GET_CODE (XEXP (ad, op_index)),
5215 &XEXP (ad, 1 - op_index), opnum,
5216 type, 0, insn);
5218 return 0;
5222 /* See if address becomes valid when an eliminable register
5223 in a sum is replaced. */
5225 tem = ad;
5226 if (GET_CODE (ad) == PLUS)
5227 tem = subst_indexed_address (ad);
5228 if (tem != ad && strict_memory_address_addr_space_p (mode, tem, as))
5230 /* Ok, we win that way. Replace any additional eliminable
5231 registers. */
5233 subst_reg_equivs_changed = 0;
5234 tem = subst_reg_equivs (tem, insn);
5236 /* Make sure that didn't make the address invalid again. */
5238 if (! subst_reg_equivs_changed
5239 || strict_memory_address_addr_space_p (mode, tem, as))
5241 *loc = tem;
5242 return 0;
5246 /* If constants aren't valid addresses, reload the constant address
5247 into a register. */
5248 if (CONSTANT_P (ad) && ! strict_memory_address_addr_space_p (mode, ad, as))
5250 machine_mode address_mode = GET_MODE (ad);
5251 if (address_mode == VOIDmode)
5252 address_mode = targetm.addr_space.address_mode (as);
5254 /* If AD is an address in the constant pool, the MEM rtx may be shared.
5255 Unshare it so we can safely alter it. */
5256 if (memrefloc && GET_CODE (ad) == SYMBOL_REF
5257 && CONSTANT_POOL_ADDRESS_P (ad))
5259 *memrefloc = copy_rtx (*memrefloc);
5260 loc = &XEXP (*memrefloc, 0);
5261 if (removed_and)
5262 loc = &XEXP (*loc, 0);
5265 find_reloads_address_part (ad, loc,
5266 base_reg_class (mode, as, MEM,
5267 SCRATCH, insn),
5268 address_mode, opnum, type, ind_levels);
5269 return ! removed_and;
5272 return find_reloads_address_1 (mode, as, ad, 0, MEM, SCRATCH, loc,
5273 opnum, type, ind_levels, insn);
5276 /* Find all pseudo regs appearing in AD
5277 that are eliminable in favor of equivalent values
5278 and do not have hard regs; replace them by their equivalents.
5279 INSN, if nonzero, is the insn in which we do the reload. We put USEs in
5280 front of it for pseudos that we have to replace with stack slots. */
5282 static rtx
5283 subst_reg_equivs (rtx ad, rtx_insn *insn)
5285 RTX_CODE code = GET_CODE (ad);
5286 int i;
5287 const char *fmt;
5289 switch (code)
5291 case HIGH:
5292 case CONST:
5293 CASE_CONST_ANY:
5294 case SYMBOL_REF:
5295 case LABEL_REF:
5296 case PC:
5297 return ad;
5299 case REG:
5301 int regno = REGNO (ad);
5303 if (reg_equiv_constant (regno) != 0)
5305 subst_reg_equivs_changed = 1;
5306 return reg_equiv_constant (regno);
5308 if (reg_equiv_memory_loc (regno) && num_not_at_initial_offset)
5310 rtx mem = make_memloc (ad, regno);
5311 if (! rtx_equal_p (mem, reg_equiv_mem (regno)))
5313 subst_reg_equivs_changed = 1;
5314 /* We mark the USE with QImode so that we recognize it
5315 as one that can be safely deleted at the end of
5316 reload. */
5317 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, ad), insn),
5318 QImode);
5319 return mem;
5323 return ad;
5325 case PLUS:
5326 /* Quickly dispose of a common case. */
5327 if (XEXP (ad, 0) == frame_pointer_rtx
5328 && CONST_INT_P (XEXP (ad, 1)))
5329 return ad;
5330 break;
5332 default:
5333 break;
5336 fmt = GET_RTX_FORMAT (code);
5337 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
5338 if (fmt[i] == 'e')
5339 XEXP (ad, i) = subst_reg_equivs (XEXP (ad, i), insn);
5340 return ad;
5343 /* Compute the sum of X and Y, making canonicalizations assumed in an
5344 address, namely: sum constant integers, surround the sum of two
5345 constants with a CONST, put the constant as the second operand, and
5346 group the constant on the outermost sum.
5348 This routine assumes both inputs are already in canonical form. */
5351 form_sum (machine_mode mode, rtx x, rtx y)
5353 rtx tem;
5355 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
5356 gcc_assert (GET_MODE (y) == mode || GET_MODE (y) == VOIDmode);
5358 if (CONST_INT_P (x))
5359 return plus_constant (mode, y, INTVAL (x));
5360 else if (CONST_INT_P (y))
5361 return plus_constant (mode, x, INTVAL (y));
5362 else if (CONSTANT_P (x))
5363 tem = x, x = y, y = tem;
5365 if (GET_CODE (x) == PLUS && CONSTANT_P (XEXP (x, 1)))
5366 return form_sum (mode, XEXP (x, 0), form_sum (mode, XEXP (x, 1), y));
5368 /* Note that if the operands of Y are specified in the opposite
5369 order in the recursive calls below, infinite recursion will occur. */
5370 if (GET_CODE (y) == PLUS && CONSTANT_P (XEXP (y, 1)))
5371 return form_sum (mode, form_sum (mode, x, XEXP (y, 0)), XEXP (y, 1));
5373 /* If both constant, encapsulate sum. Otherwise, just form sum. A
5374 constant will have been placed second. */
5375 if (CONSTANT_P (x) && CONSTANT_P (y))
5377 if (GET_CODE (x) == CONST)
5378 x = XEXP (x, 0);
5379 if (GET_CODE (y) == CONST)
5380 y = XEXP (y, 0);
5382 return gen_rtx_CONST (VOIDmode, gen_rtx_PLUS (mode, x, y));
5385 return gen_rtx_PLUS (mode, x, y);
5388 /* If ADDR is a sum containing a pseudo register that should be
5389 replaced with a constant (from reg_equiv_constant),
5390 return the result of doing so, and also apply the associative
5391 law so that the result is more likely to be a valid address.
5392 (But it is not guaranteed to be one.)
5394 Note that at most one register is replaced, even if more are
5395 replaceable. Also, we try to put the result into a canonical form
5396 so it is more likely to be a valid address.
5398 In all other cases, return ADDR. */
5400 static rtx
5401 subst_indexed_address (rtx addr)
5403 rtx op0 = 0, op1 = 0, op2 = 0;
5404 rtx tem;
5405 int regno;
5407 if (GET_CODE (addr) == PLUS)
5409 /* Try to find a register to replace. */
5410 op0 = XEXP (addr, 0), op1 = XEXP (addr, 1), op2 = 0;
5411 if (REG_P (op0)
5412 && (regno = REGNO (op0)) >= FIRST_PSEUDO_REGISTER
5413 && reg_renumber[regno] < 0
5414 && reg_equiv_constant (regno) != 0)
5415 op0 = reg_equiv_constant (regno);
5416 else if (REG_P (op1)
5417 && (regno = REGNO (op1)) >= FIRST_PSEUDO_REGISTER
5418 && reg_renumber[regno] < 0
5419 && reg_equiv_constant (regno) != 0)
5420 op1 = reg_equiv_constant (regno);
5421 else if (GET_CODE (op0) == PLUS
5422 && (tem = subst_indexed_address (op0)) != op0)
5423 op0 = tem;
5424 else if (GET_CODE (op1) == PLUS
5425 && (tem = subst_indexed_address (op1)) != op1)
5426 op1 = tem;
5427 else
5428 return addr;
5430 /* Pick out up to three things to add. */
5431 if (GET_CODE (op1) == PLUS)
5432 op2 = XEXP (op1, 1), op1 = XEXP (op1, 0);
5433 else if (GET_CODE (op0) == PLUS)
5434 op2 = op1, op1 = XEXP (op0, 1), op0 = XEXP (op0, 0);
5436 /* Compute the sum. */
5437 if (op2 != 0)
5438 op1 = form_sum (GET_MODE (addr), op1, op2);
5439 if (op1 != 0)
5440 op0 = form_sum (GET_MODE (addr), op0, op1);
5442 return op0;
5444 return addr;
5447 /* Update the REG_INC notes for an insn. It updates all REG_INC
5448 notes for the instruction which refer to REGNO the to refer
5449 to the reload number.
5451 INSN is the insn for which any REG_INC notes need updating.
5453 REGNO is the register number which has been reloaded.
5455 RELOADNUM is the reload number. */
5457 static void
5458 update_auto_inc_notes (rtx_insn *insn ATTRIBUTE_UNUSED, int regno ATTRIBUTE_UNUSED,
5459 int reloadnum ATTRIBUTE_UNUSED)
5461 if (!AUTO_INC_DEC)
5462 return;
5464 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
5465 if (REG_NOTE_KIND (link) == REG_INC
5466 && (int) REGNO (XEXP (link, 0)) == regno)
5467 push_replacement (&XEXP (link, 0), reloadnum, VOIDmode);
5470 /* Record the pseudo registers we must reload into hard registers in a
5471 subexpression of a would-be memory address, X referring to a value
5472 in mode MODE. (This function is not called if the address we find
5473 is strictly valid.)
5475 CONTEXT = 1 means we are considering regs as index regs,
5476 = 0 means we are considering them as base regs.
5477 OUTER_CODE is the code of the enclosing RTX, typically a MEM, a PLUS,
5478 or an autoinc code.
5479 If CONTEXT == 0 and OUTER_CODE is a PLUS or LO_SUM, then INDEX_CODE
5480 is the code of the index part of the address. Otherwise, pass SCRATCH
5481 for this argument.
5482 OPNUM and TYPE specify the purpose of any reloads made.
5484 IND_LEVELS says how many levels of indirect addressing are
5485 supported at this point in the address.
5487 INSN, if nonzero, is the insn in which we do the reload. It is used
5488 to determine if we may generate output reloads.
5490 We return nonzero if X, as a whole, is reloaded or replaced. */
5492 /* Note that we take shortcuts assuming that no multi-reg machine mode
5493 occurs as part of an address.
5494 Also, this is not fully machine-customizable; it works for machines
5495 such as VAXen and 68000's and 32000's, but other possible machines
5496 could have addressing modes that this does not handle right.
5497 If you add push_reload calls here, you need to make sure gen_reload
5498 handles those cases gracefully. */
5500 static int
5501 find_reloads_address_1 (machine_mode mode, addr_space_t as,
5502 rtx x, int context,
5503 enum rtx_code outer_code, enum rtx_code index_code,
5504 rtx *loc, int opnum, enum reload_type type,
5505 int ind_levels, rtx_insn *insn)
5507 #define REG_OK_FOR_CONTEXT(CONTEXT, REGNO, MODE, AS, OUTER, INDEX) \
5508 ((CONTEXT) == 0 \
5509 ? regno_ok_for_base_p (REGNO, MODE, AS, OUTER, INDEX) \
5510 : REGNO_OK_FOR_INDEX_P (REGNO))
5512 enum reg_class context_reg_class;
5513 RTX_CODE code = GET_CODE (x);
5514 bool reloaded_inner_of_autoinc = false;
5516 if (context == 1)
5517 context_reg_class = index_reg_class (insn);
5518 else
5519 context_reg_class = base_reg_class (mode, as, outer_code, index_code,
5520 insn);
5522 switch (code)
5524 case PLUS:
5526 rtx orig_op0 = XEXP (x, 0);
5527 rtx orig_op1 = XEXP (x, 1);
5528 RTX_CODE code0 = GET_CODE (orig_op0);
5529 RTX_CODE code1 = GET_CODE (orig_op1);
5530 rtx op0 = orig_op0;
5531 rtx op1 = orig_op1;
5533 if (GET_CODE (op0) == SUBREG)
5535 op0 = SUBREG_REG (op0);
5536 code0 = GET_CODE (op0);
5537 if (code0 == REG && REGNO (op0) < FIRST_PSEUDO_REGISTER)
5538 op0 = gen_rtx_REG (word_mode,
5539 (REGNO (op0) +
5540 subreg_regno_offset (REGNO (SUBREG_REG (orig_op0)),
5541 GET_MODE (SUBREG_REG (orig_op0)),
5542 SUBREG_BYTE (orig_op0),
5543 GET_MODE (orig_op0))));
5546 if (GET_CODE (op1) == SUBREG)
5548 op1 = SUBREG_REG (op1);
5549 code1 = GET_CODE (op1);
5550 if (code1 == REG && REGNO (op1) < FIRST_PSEUDO_REGISTER)
5551 /* ??? Why is this given op1's mode and above for
5552 ??? op0 SUBREGs we use word_mode? */
5553 op1 = gen_rtx_REG (GET_MODE (op1),
5554 (REGNO (op1) +
5555 subreg_regno_offset (REGNO (SUBREG_REG (orig_op1)),
5556 GET_MODE (SUBREG_REG (orig_op1)),
5557 SUBREG_BYTE (orig_op1),
5558 GET_MODE (orig_op1))));
5560 /* Plus in the index register may be created only as a result of
5561 register rematerialization for expression like &localvar*4. Reload it.
5562 It may be possible to combine the displacement on the outer level,
5563 but it is probably not worthwhile to do so. */
5564 if (context == 1)
5566 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5567 opnum, ADDR_TYPE (type), ind_levels, insn);
5568 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5569 context_reg_class,
5570 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5571 return 1;
5574 if (code0 == MULT || code0 == ASHIFT
5575 || code0 == SIGN_EXTEND || code0 == TRUNCATE
5576 || code0 == ZERO_EXTEND || code1 == MEM)
5578 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5579 &XEXP (x, 0), opnum, type, ind_levels,
5580 insn);
5581 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5582 &XEXP (x, 1), opnum, type, ind_levels,
5583 insn);
5586 else if (code1 == MULT || code1 == ASHIFT
5587 || code1 == SIGN_EXTEND || code1 == TRUNCATE
5588 || code1 == ZERO_EXTEND || code0 == MEM)
5590 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5591 &XEXP (x, 0), opnum, type, ind_levels,
5592 insn);
5593 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5594 &XEXP (x, 1), opnum, type, ind_levels,
5595 insn);
5598 else if (code0 == CONST_INT || code0 == CONST
5599 || code0 == SYMBOL_REF || code0 == LABEL_REF)
5600 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, code0,
5601 &XEXP (x, 1), opnum, type, ind_levels,
5602 insn);
5604 else if (code1 == CONST_INT || code1 == CONST
5605 || code1 == SYMBOL_REF || code1 == LABEL_REF)
5606 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, code1,
5607 &XEXP (x, 0), opnum, type, ind_levels,
5608 insn);
5610 else if (code0 == REG && code1 == REG)
5612 if (REGNO_OK_FOR_INDEX_P (REGNO (op1))
5613 && regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5614 return 0;
5615 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0))
5616 && regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5617 return 0;
5618 else if (regno_ok_for_base_p (REGNO (op0), mode, as, PLUS, REG))
5619 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5620 &XEXP (x, 1), opnum, type, ind_levels,
5621 insn);
5622 else if (REGNO_OK_FOR_INDEX_P (REGNO (op1)))
5623 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5624 &XEXP (x, 0), opnum, type, ind_levels,
5625 insn);
5626 else if (regno_ok_for_base_p (REGNO (op1), mode, as, PLUS, REG))
5627 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5628 &XEXP (x, 0), opnum, type, ind_levels,
5629 insn);
5630 else if (REGNO_OK_FOR_INDEX_P (REGNO (op0)))
5631 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5632 &XEXP (x, 1), opnum, type, ind_levels,
5633 insn);
5634 else
5636 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5637 &XEXP (x, 0), opnum, type, ind_levels,
5638 insn);
5639 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5640 &XEXP (x, 1), opnum, type, ind_levels,
5641 insn);
5645 else if (code0 == REG)
5647 find_reloads_address_1 (mode, as, orig_op0, 1, PLUS, SCRATCH,
5648 &XEXP (x, 0), opnum, type, ind_levels,
5649 insn);
5650 find_reloads_address_1 (mode, as, orig_op1, 0, PLUS, REG,
5651 &XEXP (x, 1), opnum, type, ind_levels,
5652 insn);
5655 else if (code1 == REG)
5657 find_reloads_address_1 (mode, as, orig_op1, 1, PLUS, SCRATCH,
5658 &XEXP (x, 1), opnum, type, ind_levels,
5659 insn);
5660 find_reloads_address_1 (mode, as, orig_op0, 0, PLUS, REG,
5661 &XEXP (x, 0), opnum, type, ind_levels,
5662 insn);
5666 return 0;
5668 case POST_MODIFY:
5669 case PRE_MODIFY:
5671 rtx op0 = XEXP (x, 0);
5672 rtx op1 = XEXP (x, 1);
5673 enum rtx_code index_code;
5674 int regno;
5675 int reloadnum;
5677 if (GET_CODE (op1) != PLUS && GET_CODE (op1) != MINUS)
5678 return 0;
5680 /* Currently, we only support {PRE,POST}_MODIFY constructs
5681 where a base register is {inc,dec}remented by the contents
5682 of another register or by a constant value. Thus, these
5683 operands must match. */
5684 gcc_assert (op0 == XEXP (op1, 0));
5686 /* Require index register (or constant). Let's just handle the
5687 register case in the meantime... If the target allows
5688 auto-modify by a constant then we could try replacing a pseudo
5689 register with its equivalent constant where applicable.
5691 We also handle the case where the register was eliminated
5692 resulting in a PLUS subexpression.
5694 If we later decide to reload the whole PRE_MODIFY or
5695 POST_MODIFY, inc_for_reload might clobber the reload register
5696 before reading the index. The index register might therefore
5697 need to live longer than a TYPE reload normally would, so be
5698 conservative and class it as RELOAD_OTHER. */
5699 if ((REG_P (XEXP (op1, 1))
5700 && !REGNO_OK_FOR_INDEX_P (REGNO (XEXP (op1, 1))))
5701 || GET_CODE (XEXP (op1, 1)) == PLUS)
5702 find_reloads_address_1 (mode, as, XEXP (op1, 1), 1, code, SCRATCH,
5703 &XEXP (op1, 1), opnum, RELOAD_OTHER,
5704 ind_levels, insn);
5706 gcc_assert (REG_P (XEXP (op1, 0)));
5708 regno = REGNO (XEXP (op1, 0));
5709 index_code = GET_CODE (XEXP (op1, 1));
5711 /* A register that is incremented cannot be constant! */
5712 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5713 || reg_equiv_constant (regno) == 0);
5715 /* Handle a register that is equivalent to a memory location
5716 which cannot be addressed directly. */
5717 if (reg_equiv_memory_loc (regno) != 0
5718 && (reg_equiv_address (regno) != 0
5719 || num_not_at_initial_offset))
5721 rtx tem = make_memloc (XEXP (x, 0), regno);
5723 if (reg_equiv_address (regno)
5724 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5726 rtx orig = tem;
5728 /* First reload the memory location's address.
5729 We can't use ADDR_TYPE (type) here, because we need to
5730 write back the value after reading it, hence we actually
5731 need two registers. */
5732 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5733 &XEXP (tem, 0), opnum,
5734 RELOAD_OTHER,
5735 ind_levels, insn);
5737 if (!rtx_equal_p (tem, orig))
5738 push_reg_equiv_alt_mem (regno, tem);
5740 /* Then reload the memory location into a base
5741 register. */
5742 reloadnum = push_reload (tem, tem, &XEXP (x, 0),
5743 &XEXP (op1, 0),
5744 base_reg_class (mode, as,
5745 code, index_code,
5746 insn),
5747 GET_MODE (x), GET_MODE (x), 0,
5748 0, opnum, RELOAD_OTHER);
5750 update_auto_inc_notes (this_insn, regno, reloadnum);
5751 return 0;
5755 if (reg_renumber[regno] >= 0)
5756 regno = reg_renumber[regno];
5758 /* We require a base register here... */
5759 if (!regno_ok_for_base_p (regno, GET_MODE (x), as, code, index_code))
5761 reloadnum = push_reload (XEXP (op1, 0), XEXP (x, 0),
5762 &XEXP (op1, 0), &XEXP (x, 0),
5763 base_reg_class (mode, as,
5764 code, index_code,
5765 insn),
5766 GET_MODE (x), GET_MODE (x), 0, 0,
5767 opnum, RELOAD_OTHER);
5769 update_auto_inc_notes (this_insn, regno, reloadnum);
5770 return 0;
5773 return 0;
5775 case POST_INC:
5776 case POST_DEC:
5777 case PRE_INC:
5778 case PRE_DEC:
5779 if (REG_P (XEXP (x, 0)))
5781 int regno = REGNO (XEXP (x, 0));
5782 int value = 0;
5783 rtx x_orig = x;
5785 /* A register that is incremented cannot be constant! */
5786 gcc_assert (regno < FIRST_PSEUDO_REGISTER
5787 || reg_equiv_constant (regno) == 0);
5789 /* Handle a register that is equivalent to a memory location
5790 which cannot be addressed directly. */
5791 if (reg_equiv_memory_loc (regno) != 0
5792 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5794 rtx tem = make_memloc (XEXP (x, 0), regno);
5795 if (reg_equiv_address (regno)
5796 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5798 rtx orig = tem;
5800 /* First reload the memory location's address.
5801 We can't use ADDR_TYPE (type) here, because we need to
5802 write back the value after reading it, hence we actually
5803 need two registers. */
5804 find_reloads_address (GET_MODE (tem), &tem, XEXP (tem, 0),
5805 &XEXP (tem, 0), opnum, type,
5806 ind_levels, insn);
5807 reloaded_inner_of_autoinc = true;
5808 if (!rtx_equal_p (tem, orig))
5809 push_reg_equiv_alt_mem (regno, tem);
5810 /* Put this inside a new increment-expression. */
5811 x = gen_rtx_fmt_e (GET_CODE (x), GET_MODE (x), tem);
5812 /* Proceed to reload that, as if it contained a register. */
5816 /* If we have a hard register that is ok in this incdec context,
5817 don't make a reload. If the register isn't nice enough for
5818 autoincdec, we can reload it. But, if an autoincrement of a
5819 register that we here verified as playing nice, still outside
5820 isn't "valid", it must be that no autoincrement is "valid".
5821 If that is true and something made an autoincrement anyway,
5822 this must be a special context where one is allowed.
5823 (For example, a "push" instruction.)
5824 We can't improve this address, so leave it alone. */
5826 /* Otherwise, reload the autoincrement into a suitable hard reg
5827 and record how much to increment by. */
5829 if (reg_renumber[regno] >= 0)
5830 regno = reg_renumber[regno];
5831 if (regno >= FIRST_PSEUDO_REGISTER
5832 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, code,
5833 index_code))
5835 int reloadnum;
5837 /* If we can output the register afterwards, do so, this
5838 saves the extra update.
5839 We can do so if we have an INSN - i.e. no JUMP_INSN nor
5840 CALL_INSN.
5841 But don't do this if we cannot directly address the
5842 memory location, since this will make it harder to
5843 reuse address reloads, and increases register pressure.
5844 Also don't do this if we can probably update x directly. */
5845 rtx equiv = (MEM_P (XEXP (x, 0))
5846 ? XEXP (x, 0)
5847 : reg_equiv_mem (regno));
5848 enum insn_code icode = optab_handler (add_optab, GET_MODE (x));
5849 if (insn && NONJUMP_INSN_P (insn)
5850 && (regno < FIRST_PSEUDO_REGISTER
5851 || (equiv
5852 && memory_operand (equiv, GET_MODE (equiv))
5853 && ! (icode != CODE_FOR_nothing
5854 && insn_operand_matches (icode, 0, equiv)
5855 && insn_operand_matches (icode, 1, equiv))))
5856 /* Using RELOAD_OTHER means we emit this and the reload we
5857 made earlier in the wrong order. */
5858 && !reloaded_inner_of_autoinc)
5860 /* We use the original pseudo for loc, so that
5861 emit_reload_insns() knows which pseudo this
5862 reload refers to and updates the pseudo rtx, not
5863 its equivalent memory location, as well as the
5864 corresponding entry in reg_last_reload_reg. */
5865 loc = &XEXP (x_orig, 0);
5866 x = XEXP (x, 0);
5867 reloadnum
5868 = push_reload (x, x, loc, loc,
5869 context_reg_class,
5870 GET_MODE (x), GET_MODE (x), 0, 0,
5871 opnum, RELOAD_OTHER);
5873 else
5875 reloadnum
5876 = push_reload (x, x, loc, (rtx*) 0,
5877 context_reg_class,
5878 GET_MODE (x), GET_MODE (x), 0, 0,
5879 opnum, type);
5880 rld[reloadnum].inc
5881 = find_inc_amount (PATTERN (this_insn), XEXP (x_orig, 0));
5883 value = 1;
5886 update_auto_inc_notes (this_insn, REGNO (XEXP (x_orig, 0)),
5887 reloadnum);
5889 return value;
5891 return 0;
5893 case TRUNCATE:
5894 case SIGN_EXTEND:
5895 case ZERO_EXTEND:
5896 /* Look for parts to reload in the inner expression and reload them
5897 too, in addition to this operation. Reloading all inner parts in
5898 addition to this one shouldn't be necessary, but at this point,
5899 we don't know if we can possibly omit any part that *can* be
5900 reloaded. Targets that are better off reloading just either part
5901 (or perhaps even a different part of an outer expression), should
5902 define LEGITIMIZE_RELOAD_ADDRESS. */
5903 find_reloads_address_1 (GET_MODE (XEXP (x, 0)), as, XEXP (x, 0),
5904 context, code, SCRATCH, &XEXP (x, 0), opnum,
5905 type, ind_levels, insn);
5906 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5907 context_reg_class,
5908 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5909 return 1;
5911 case MEM:
5912 /* This is probably the result of a substitution, by eliminate_regs, of
5913 an equivalent address for a pseudo that was not allocated to a hard
5914 register. Verify that the specified address is valid and reload it
5915 into a register.
5917 Since we know we are going to reload this item, don't decrement for
5918 the indirection level.
5920 Note that this is actually conservative: it would be slightly more
5921 efficient to use the value of SPILL_INDIRECT_LEVELS from
5922 reload1.cc here. */
5924 find_reloads_address (GET_MODE (x), loc, XEXP (x, 0), &XEXP (x, 0),
5925 opnum, ADDR_TYPE (type), ind_levels, insn);
5926 push_reload (*loc, NULL_RTX, loc, (rtx*) 0,
5927 context_reg_class,
5928 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5929 return 1;
5931 case REG:
5933 int regno = REGNO (x);
5935 if (reg_equiv_constant (regno) != 0)
5937 find_reloads_address_part (reg_equiv_constant (regno), loc,
5938 context_reg_class,
5939 GET_MODE (x), opnum, type, ind_levels);
5940 return 1;
5943 #if 0 /* This might screw code in reload1.cc to delete prior output-reload
5944 that feeds this insn. */
5945 if (reg_equiv_mem (regno) != 0)
5947 push_reload (reg_equiv_mem (regno), NULL_RTX, loc, (rtx*) 0,
5948 context_reg_class,
5949 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5950 return 1;
5952 #endif
5954 if (reg_equiv_memory_loc (regno)
5955 && (reg_equiv_address (regno) != 0 || num_not_at_initial_offset))
5957 rtx tem = make_memloc (x, regno);
5958 if (reg_equiv_address (regno) != 0
5959 || ! rtx_equal_p (tem, reg_equiv_mem (regno)))
5961 x = tem;
5962 find_reloads_address (GET_MODE (x), &x, XEXP (x, 0),
5963 &XEXP (x, 0), opnum, ADDR_TYPE (type),
5964 ind_levels, insn);
5965 if (!rtx_equal_p (x, tem))
5966 push_reg_equiv_alt_mem (regno, x);
5970 if (reg_renumber[regno] >= 0)
5971 regno = reg_renumber[regno];
5973 if (regno >= FIRST_PSEUDO_REGISTER
5974 || !REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
5975 index_code))
5977 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5978 context_reg_class,
5979 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5980 return 1;
5983 /* If a register appearing in an address is the subject of a CLOBBER
5984 in this insn, reload it into some other register to be safe.
5985 The CLOBBER is supposed to make the register unavailable
5986 from before this insn to after it. */
5987 if (regno_clobbered_p (regno, this_insn, GET_MODE (x), 0))
5989 push_reload (x, NULL_RTX, loc, (rtx*) 0,
5990 context_reg_class,
5991 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
5992 return 1;
5995 return 0;
5997 case SUBREG:
5998 if (REG_P (SUBREG_REG (x)))
6000 /* If this is a SUBREG of a hard register and the resulting register
6001 is of the wrong class, reload the whole SUBREG. This avoids
6002 needless copies if SUBREG_REG is multi-word. */
6003 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6005 int regno ATTRIBUTE_UNUSED = subreg_regno (x);
6007 if (!REG_OK_FOR_CONTEXT (context, regno, mode, as, outer_code,
6008 index_code))
6010 push_reload (x, NULL_RTX, loc, (rtx*) 0,
6011 context_reg_class,
6012 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6013 return 1;
6016 /* If this is a SUBREG of a pseudo-register, and the pseudo-register
6017 is larger than the class size, then reload the whole SUBREG. */
6018 else
6020 enum reg_class rclass = context_reg_class;
6021 if (ira_reg_class_max_nregs [rclass][GET_MODE (SUBREG_REG (x))]
6022 > reg_class_size[(int) rclass])
6024 /* If the inner register will be replaced by a memory
6025 reference, we can do this only if we can replace the
6026 whole subreg by a (narrower) memory reference. If
6027 this is not possible, fall through and reload just
6028 the inner register (including address reloads). */
6029 if (reg_equiv_memory_loc (REGNO (SUBREG_REG (x))) != 0)
6031 rtx tem = find_reloads_subreg_address (x, opnum,
6032 ADDR_TYPE (type),
6033 ind_levels, insn,
6034 NULL);
6035 if (tem)
6037 push_reload (tem, NULL_RTX, loc, (rtx*) 0, rclass,
6038 GET_MODE (tem), VOIDmode, 0, 0,
6039 opnum, type);
6040 return 1;
6043 else
6045 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6046 GET_MODE (x), VOIDmode, 0, 0, opnum, type);
6047 return 1;
6052 break;
6054 default:
6055 break;
6059 const char *fmt = GET_RTX_FORMAT (code);
6060 int i;
6062 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6064 if (fmt[i] == 'e')
6065 /* Pass SCRATCH for INDEX_CODE, since CODE can never be a PLUS once
6066 we get here. */
6067 find_reloads_address_1 (mode, as, XEXP (x, i), context,
6068 code, SCRATCH, &XEXP (x, i),
6069 opnum, type, ind_levels, insn);
6073 #undef REG_OK_FOR_CONTEXT
6074 return 0;
6077 /* X, which is found at *LOC, is a part of an address that needs to be
6078 reloaded into a register of class RCLASS. If X is a constant, or if
6079 X is a PLUS that contains a constant, check that the constant is a
6080 legitimate operand and that we are supposed to be able to load
6081 it into the register.
6083 If not, force the constant into memory and reload the MEM instead.
6085 MODE is the mode to use, in case X is an integer constant.
6087 OPNUM and TYPE describe the purpose of any reloads made.
6089 IND_LEVELS says how many levels of indirect addressing this machine
6090 supports. */
6092 static void
6093 find_reloads_address_part (rtx x, rtx *loc, enum reg_class rclass,
6094 machine_mode mode, int opnum,
6095 enum reload_type type, int ind_levels)
6097 if (CONSTANT_P (x)
6098 && (!targetm.legitimate_constant_p (mode, x)
6099 || targetm.preferred_reload_class (x, rclass) == NO_REGS))
6101 x = force_const_mem (mode, x);
6102 find_reloads_address (mode, &x, XEXP (x, 0), &XEXP (x, 0),
6103 opnum, type, ind_levels, 0);
6106 else if (GET_CODE (x) == PLUS
6107 && CONSTANT_P (XEXP (x, 1))
6108 && (!targetm.legitimate_constant_p (GET_MODE (x), XEXP (x, 1))
6109 || targetm.preferred_reload_class (XEXP (x, 1), rclass)
6110 == NO_REGS))
6112 rtx tem;
6114 tem = force_const_mem (GET_MODE (x), XEXP (x, 1));
6115 x = gen_rtx_PLUS (GET_MODE (x), XEXP (x, 0), tem);
6116 find_reloads_address (mode, &XEXP (x, 1), XEXP (tem, 0), &XEXP (tem, 0),
6117 opnum, type, ind_levels, 0);
6120 push_reload (x, NULL_RTX, loc, (rtx*) 0, rclass,
6121 mode, VOIDmode, 0, 0, opnum, type);
6124 /* X, a subreg of a pseudo, is a part of an address that needs to be
6125 reloaded, and the pseusdo is equivalent to a memory location.
6127 Attempt to replace the whole subreg by a (possibly narrower or wider)
6128 memory reference. If this is possible, return this new memory
6129 reference, and push all required address reloads. Otherwise,
6130 return NULL.
6132 OPNUM and TYPE identify the purpose of the reload.
6134 IND_LEVELS says how many levels of indirect addressing are
6135 supported at this point in the address.
6137 INSN, if nonzero, is the insn in which we do the reload. It is used
6138 to determine where to put USEs for pseudos that we have to replace with
6139 stack slots. */
6141 static rtx
6142 find_reloads_subreg_address (rtx x, int opnum, enum reload_type type,
6143 int ind_levels, rtx_insn *insn,
6144 int *address_reloaded)
6146 machine_mode outer_mode = GET_MODE (x);
6147 machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
6148 int regno = REGNO (SUBREG_REG (x));
6149 int reloaded = 0;
6150 rtx tem, orig;
6151 poly_int64 offset;
6153 gcc_assert (reg_equiv_memory_loc (regno) != 0);
6155 /* We cannot replace the subreg with a modified memory reference if:
6157 - we have a paradoxical subreg that implicitly acts as a zero or
6158 sign extension operation due to LOAD_EXTEND_OP;
6160 - we have a subreg that is implicitly supposed to act on the full
6161 register due to WORD_REGISTER_OPERATIONS (see also eliminate_regs);
6163 - the address of the equivalent memory location is mode-dependent; or
6165 - we have a paradoxical subreg and the resulting memory is not
6166 sufficiently aligned to allow access in the wider mode.
6168 In addition, we choose not to perform the replacement for *any*
6169 paradoxical subreg, even if it were possible in principle. This
6170 is to avoid generating wider memory references than necessary.
6172 This corresponds to how previous versions of reload used to handle
6173 paradoxical subregs where no address reload was required. */
6175 if (paradoxical_subreg_p (x))
6176 return NULL;
6178 if (WORD_REGISTER_OPERATIONS
6179 && partial_subreg_p (outer_mode, inner_mode)
6180 && known_equal_after_align_down (GET_MODE_SIZE (outer_mode) - 1,
6181 GET_MODE_SIZE (inner_mode) - 1,
6182 UNITS_PER_WORD))
6183 return NULL;
6185 /* Since we don't attempt to handle paradoxical subregs, we can just
6186 call into simplify_subreg, which will handle all remaining checks
6187 for us. */
6188 orig = make_memloc (SUBREG_REG (x), regno);
6189 offset = SUBREG_BYTE (x);
6190 tem = simplify_subreg (outer_mode, orig, inner_mode, offset);
6191 if (!tem || !MEM_P (tem))
6192 return NULL;
6194 /* Now push all required address reloads, if any. */
6195 reloaded = find_reloads_address (GET_MODE (tem), &tem,
6196 XEXP (tem, 0), &XEXP (tem, 0),
6197 opnum, type, ind_levels, insn);
6198 /* ??? Do we need to handle nonzero offsets somehow? */
6199 if (known_eq (offset, 0) && !rtx_equal_p (tem, orig))
6200 push_reg_equiv_alt_mem (regno, tem);
6202 /* For some processors an address may be valid in the original mode but
6203 not in a smaller mode. For example, ARM accepts a scaled index register
6204 in SImode but not in HImode. Note that this is only a problem if the
6205 address in reg_equiv_mem is already invalid in the new mode; other
6206 cases would be fixed by find_reloads_address as usual.
6208 ??? We attempt to handle such cases here by doing an additional reload
6209 of the full address after the usual processing by find_reloads_address.
6210 Note that this may not work in the general case, but it seems to cover
6211 the cases where this situation currently occurs. A more general fix
6212 might be to reload the *value* instead of the address, but this would
6213 not be expected by the callers of this routine as-is.
6215 If find_reloads_address already completed replaced the address, there
6216 is nothing further to do. */
6217 if (reloaded == 0
6218 && reg_equiv_mem (regno) != 0
6219 && !strict_memory_address_addr_space_p
6220 (GET_MODE (x), XEXP (reg_equiv_mem (regno), 0),
6221 MEM_ADDR_SPACE (reg_equiv_mem (regno))))
6223 push_reload (XEXP (tem, 0), NULL_RTX, &XEXP (tem, 0), (rtx*) 0,
6224 base_reg_class (GET_MODE (tem), MEM_ADDR_SPACE (tem),
6225 MEM, SCRATCH, insn),
6226 GET_MODE (XEXP (tem, 0)), VOIDmode, 0, 0, opnum, type);
6227 reloaded = 1;
6230 /* If this is not a toplevel operand, find_reloads doesn't see this
6231 substitution. We have to emit a USE of the pseudo so that
6232 delete_output_reload can see it. */
6233 if (replace_reloads && recog_data.operand[opnum] != x)
6234 /* We mark the USE with QImode so that we recognize it as one that
6235 can be safely deleted at the end of reload. */
6236 PUT_MODE (emit_insn_before (gen_rtx_USE (VOIDmode, SUBREG_REG (x)), insn),
6237 QImode);
6239 if (address_reloaded)
6240 *address_reloaded = reloaded;
6242 return tem;
6245 /* Substitute into the current INSN the registers into which we have reloaded
6246 the things that need reloading. The array `replacements'
6247 contains the locations of all pointers that must be changed
6248 and says what to replace them with.
6250 Return the rtx that X translates into; usually X, but modified. */
6252 void
6253 subst_reloads (rtx_insn *insn)
6255 int i;
6257 for (i = 0; i < n_replacements; i++)
6259 struct replacement *r = &replacements[i];
6260 rtx reloadreg = rld[r->what].reg_rtx;
6261 if (reloadreg)
6263 #ifdef DEBUG_RELOAD
6264 /* This checking takes a very long time on some platforms
6265 causing the gcc.c-torture/compile/limits-fnargs.c test
6266 to time out during testing. See PR 31850.
6268 Internal consistency test. Check that we don't modify
6269 anything in the equivalence arrays. Whenever something from
6270 those arrays needs to be reloaded, it must be unshared before
6271 being substituted into; the equivalence must not be modified.
6272 Otherwise, if the equivalence is used after that, it will
6273 have been modified, and the thing substituted (probably a
6274 register) is likely overwritten and not a usable equivalence. */
6275 int check_regno;
6277 for (check_regno = 0; check_regno < max_regno; check_regno++)
6279 #define CHECK_MODF(ARRAY) \
6280 gcc_assert (!(*reg_equivs)[check_regno].ARRAY \
6281 || !loc_mentioned_in_p (r->where, \
6282 (*reg_equivs)[check_regno].ARRAY))
6284 CHECK_MODF (constant);
6285 CHECK_MODF (memory_loc);
6286 CHECK_MODF (address);
6287 CHECK_MODF (mem);
6288 #undef CHECK_MODF
6290 #endif /* DEBUG_RELOAD */
6292 /* If we're replacing a LABEL_REF with a register, there must
6293 already be an indication (to e.g. flow) which label this
6294 register refers to. */
6295 gcc_assert (GET_CODE (*r->where) != LABEL_REF
6296 || !JUMP_P (insn)
6297 || find_reg_note (insn,
6298 REG_LABEL_OPERAND,
6299 XEXP (*r->where, 0))
6300 || label_is_jump_target_p (XEXP (*r->where, 0), insn));
6302 /* Encapsulate RELOADREG so its machine mode matches what
6303 used to be there. Note that gen_lowpart_common will
6304 do the wrong thing if RELOADREG is multi-word. RELOADREG
6305 will always be a REG here. */
6306 if (GET_MODE (reloadreg) != r->mode && r->mode != VOIDmode)
6307 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6309 *r->where = reloadreg;
6311 /* If reload got no reg and isn't optional, something's wrong. */
6312 else
6313 gcc_assert (rld[r->what].optional);
6317 /* Make a copy of any replacements being done into X and move those
6318 copies to locations in Y, a copy of X. */
6320 void
6321 copy_replacements (rtx x, rtx y)
6323 copy_replacements_1 (&x, &y, n_replacements);
6326 static void
6327 copy_replacements_1 (rtx *px, rtx *py, int orig_replacements)
6329 int i, j;
6330 rtx x, y;
6331 struct replacement *r;
6332 enum rtx_code code;
6333 const char *fmt;
6335 for (j = 0; j < orig_replacements; j++)
6336 if (replacements[j].where == px)
6338 r = &replacements[n_replacements++];
6339 r->where = py;
6340 r->what = replacements[j].what;
6341 r->mode = replacements[j].mode;
6344 x = *px;
6345 y = *py;
6346 code = GET_CODE (x);
6347 fmt = GET_RTX_FORMAT (code);
6349 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6351 if (fmt[i] == 'e')
6352 copy_replacements_1 (&XEXP (x, i), &XEXP (y, i), orig_replacements);
6353 else if (fmt[i] == 'E')
6354 for (j = XVECLEN (x, i); --j >= 0; )
6355 copy_replacements_1 (&XVECEXP (x, i, j), &XVECEXP (y, i, j),
6356 orig_replacements);
6360 /* Change any replacements being done to *X to be done to *Y. */
6362 void
6363 move_replacements (rtx *x, rtx *y)
6365 int i;
6367 for (i = 0; i < n_replacements; i++)
6368 if (replacements[i].where == x)
6369 replacements[i].where = y;
6372 /* If LOC was scheduled to be replaced by something, return the replacement.
6373 Otherwise, return *LOC. */
6376 find_replacement (rtx *loc)
6378 struct replacement *r;
6380 for (r = &replacements[0]; r < &replacements[n_replacements]; r++)
6382 rtx reloadreg = rld[r->what].reg_rtx;
6384 if (reloadreg && r->where == loc)
6386 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6387 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6389 return reloadreg;
6391 else if (reloadreg && GET_CODE (*loc) == SUBREG
6392 && r->where == &SUBREG_REG (*loc))
6394 if (r->mode != VOIDmode && GET_MODE (reloadreg) != r->mode)
6395 reloadreg = reload_adjust_reg_for_mode (reloadreg, r->mode);
6397 return simplify_gen_subreg (GET_MODE (*loc), reloadreg,
6398 GET_MODE (SUBREG_REG (*loc)),
6399 SUBREG_BYTE (*loc));
6403 /* If *LOC is a PLUS, MINUS, or MULT, see if a replacement is scheduled for
6404 what's inside and make a new rtl if so. */
6405 if (GET_CODE (*loc) == PLUS || GET_CODE (*loc) == MINUS
6406 || GET_CODE (*loc) == MULT)
6408 rtx x = find_replacement (&XEXP (*loc, 0));
6409 rtx y = find_replacement (&XEXP (*loc, 1));
6411 if (x != XEXP (*loc, 0) || y != XEXP (*loc, 1))
6412 return gen_rtx_fmt_ee (GET_CODE (*loc), GET_MODE (*loc), x, y);
6415 return *loc;
6418 /* Return nonzero if register in range [REGNO, ENDREGNO)
6419 appears either explicitly or implicitly in X
6420 other than being stored into (except for earlyclobber operands).
6422 References contained within the substructure at LOC do not count.
6423 LOC may be zero, meaning don't ignore anything.
6425 This is similar to refers_to_regno_p in rtlanal.cc except that we
6426 look at equivalences for pseudos that didn't get hard registers. */
6428 static int
6429 refers_to_regno_for_reload_p (unsigned int regno, unsigned int endregno,
6430 rtx x, rtx *loc)
6432 int i;
6433 unsigned int r;
6434 RTX_CODE code;
6435 const char *fmt;
6437 if (x == 0)
6438 return 0;
6440 repeat:
6441 code = GET_CODE (x);
6443 switch (code)
6445 case REG:
6446 r = REGNO (x);
6448 /* If this is a pseudo, a hard register must not have been allocated.
6449 X must therefore either be a constant or be in memory. */
6450 if (r >= FIRST_PSEUDO_REGISTER)
6452 if (reg_equiv_memory_loc (r))
6453 return refers_to_regno_for_reload_p (regno, endregno,
6454 reg_equiv_memory_loc (r),
6455 (rtx*) 0);
6457 gcc_assert (reg_equiv_constant (r) || reg_equiv_invariant (r));
6458 return 0;
6461 return endregno > r && regno < END_REGNO (x);
6463 case SUBREG:
6464 /* If this is a SUBREG of a hard reg, we can see exactly which
6465 registers are being modified. Otherwise, handle normally. */
6466 if (REG_P (SUBREG_REG (x))
6467 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
6469 unsigned int inner_regno = subreg_regno (x);
6470 unsigned int inner_endregno
6471 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
6472 ? subreg_nregs (x) : 1);
6474 return endregno > inner_regno && regno < inner_endregno;
6476 break;
6478 case CLOBBER:
6479 case SET:
6480 if (&SET_DEST (x) != loc
6481 /* Note setting a SUBREG counts as referring to the REG it is in for
6482 a pseudo but not for hard registers since we can
6483 treat each word individually. */
6484 && ((GET_CODE (SET_DEST (x)) == SUBREG
6485 && loc != &SUBREG_REG (SET_DEST (x))
6486 && REG_P (SUBREG_REG (SET_DEST (x)))
6487 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
6488 && refers_to_regno_for_reload_p (regno, endregno,
6489 SUBREG_REG (SET_DEST (x)),
6490 loc))
6491 /* If the output is an earlyclobber operand, this is
6492 a conflict. */
6493 || ((!REG_P (SET_DEST (x))
6494 || earlyclobber_operand_p (SET_DEST (x)))
6495 && refers_to_regno_for_reload_p (regno, endregno,
6496 SET_DEST (x), loc))))
6497 return 1;
6499 if (code == CLOBBER || loc == &SET_SRC (x))
6500 return 0;
6501 x = SET_SRC (x);
6502 goto repeat;
6504 default:
6505 break;
6508 /* X does not match, so try its subexpressions. */
6510 fmt = GET_RTX_FORMAT (code);
6511 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
6513 if (fmt[i] == 'e' && loc != &XEXP (x, i))
6515 if (i == 0)
6517 x = XEXP (x, 0);
6518 goto repeat;
6520 else
6521 if (refers_to_regno_for_reload_p (regno, endregno,
6522 XEXP (x, i), loc))
6523 return 1;
6525 else if (fmt[i] == 'E')
6527 int j;
6528 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
6529 if (loc != &XVECEXP (x, i, j)
6530 && refers_to_regno_for_reload_p (regno, endregno,
6531 XVECEXP (x, i, j), loc))
6532 return 1;
6535 return 0;
6538 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
6539 we check if any register number in X conflicts with the relevant register
6540 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
6541 contains a MEM (we don't bother checking for memory addresses that can't
6542 conflict because we expect this to be a rare case.
6544 This function is similar to reg_overlap_mentioned_p in rtlanal.cc except
6545 that we look at equivalences for pseudos that didn't get hard registers. */
6548 reg_overlap_mentioned_for_reload_p (rtx x, rtx in)
6550 int regno, endregno;
6552 /* Overly conservative. */
6553 if (GET_CODE (x) == STRICT_LOW_PART
6554 || GET_RTX_CLASS (GET_CODE (x)) == RTX_AUTOINC)
6555 x = XEXP (x, 0);
6557 /* If either argument is a constant, then modifying X cannot affect IN. */
6558 if (CONSTANT_P (x) || CONSTANT_P (in))
6559 return 0;
6560 else if (GET_CODE (x) == SUBREG && MEM_P (SUBREG_REG (x)))
6561 return refers_to_mem_for_reload_p (in);
6562 else if (GET_CODE (x) == SUBREG)
6564 regno = REGNO (SUBREG_REG (x));
6565 if (regno < FIRST_PSEUDO_REGISTER)
6566 regno += subreg_regno_offset (REGNO (SUBREG_REG (x)),
6567 GET_MODE (SUBREG_REG (x)),
6568 SUBREG_BYTE (x),
6569 GET_MODE (x));
6570 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
6571 ? subreg_nregs (x) : 1);
6573 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6575 else if (REG_P (x))
6577 regno = REGNO (x);
6579 /* If this is a pseudo, it must not have been assigned a hard register.
6580 Therefore, it must either be in memory or be a constant. */
6582 if (regno >= FIRST_PSEUDO_REGISTER)
6584 if (reg_equiv_memory_loc (regno))
6585 return refers_to_mem_for_reload_p (in);
6586 gcc_assert (reg_equiv_constant (regno));
6587 return 0;
6590 endregno = END_REGNO (x);
6592 return refers_to_regno_for_reload_p (regno, endregno, in, (rtx*) 0);
6594 else if (MEM_P (x))
6595 return refers_to_mem_for_reload_p (in);
6596 else if (GET_CODE (x) == SCRATCH || GET_CODE (x) == PC)
6597 return reg_mentioned_p (x, in);
6598 else
6600 gcc_assert (GET_CODE (x) == PLUS);
6602 /* We actually want to know if X is mentioned somewhere inside IN.
6603 We must not say that (plus (sp) (const_int 124)) is in
6604 (plus (sp) (const_int 64)), since that can lead to incorrect reload
6605 allocation when spuriously changing a RELOAD_FOR_OUTPUT_ADDRESS
6606 into a RELOAD_OTHER on behalf of another RELOAD_OTHER. */
6607 while (MEM_P (in))
6608 in = XEXP (in, 0);
6609 if (REG_P (in))
6610 return 0;
6611 else if (GET_CODE (in) == PLUS)
6612 return (rtx_equal_p (x, in)
6613 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 0))
6614 || reg_overlap_mentioned_for_reload_p (x, XEXP (in, 1)));
6615 else
6616 return (reg_overlap_mentioned_for_reload_p (XEXP (x, 0), in)
6617 || reg_overlap_mentioned_for_reload_p (XEXP (x, 1), in));
6621 /* Return nonzero if anything in X contains a MEM. Look also for pseudo
6622 registers. */
6624 static int
6625 refers_to_mem_for_reload_p (rtx x)
6627 const char *fmt;
6628 int i;
6630 if (MEM_P (x))
6631 return 1;
6633 if (REG_P (x))
6634 return (REGNO (x) >= FIRST_PSEUDO_REGISTER
6635 && reg_equiv_memory_loc (REGNO (x)));
6637 fmt = GET_RTX_FORMAT (GET_CODE (x));
6638 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
6639 if (fmt[i] == 'e'
6640 && (MEM_P (XEXP (x, i))
6641 || refers_to_mem_for_reload_p (XEXP (x, i))))
6642 return 1;
6644 return 0;
6647 /* Check the insns before INSN to see if there is a suitable register
6648 containing the same value as GOAL.
6649 If OTHER is -1, look for a register in class RCLASS.
6650 Otherwise, just see if register number OTHER shares GOAL's value.
6652 Return an rtx for the register found, or zero if none is found.
6654 If RELOAD_REG_P is (short *)1,
6655 we reject any hard reg that appears in reload_reg_rtx
6656 because such a hard reg is also needed coming into this insn.
6658 If RELOAD_REG_P is any other nonzero value,
6659 it is a vector indexed by hard reg number
6660 and we reject any hard reg whose element in the vector is nonnegative
6661 as well as any that appears in reload_reg_rtx.
6663 If GOAL is zero, then GOALREG is a register number; we look
6664 for an equivalent for that register.
6666 MODE is the machine mode of the value we want an equivalence for.
6667 If GOAL is nonzero and not VOIDmode, then it must have mode MODE.
6669 This function is used by jump.cc as well as in the reload pass.
6671 If GOAL is the sum of the stack pointer and a constant, we treat it
6672 as if it were a constant except that sp is required to be unchanging. */
6675 find_equiv_reg (rtx goal, rtx_insn *insn, enum reg_class rclass, int other,
6676 short *reload_reg_p, int goalreg, machine_mode mode)
6678 rtx_insn *p = insn;
6679 rtx goaltry, valtry, value;
6680 rtx_insn *where;
6681 rtx pat;
6682 int regno = -1;
6683 int valueno;
6684 int goal_mem = 0;
6685 int goal_const = 0;
6686 int goal_mem_addr_varies = 0;
6687 int need_stable_sp = 0;
6688 int nregs;
6689 int valuenregs;
6690 int num = 0;
6692 if (goal == 0)
6693 regno = goalreg;
6694 else if (REG_P (goal))
6695 regno = REGNO (goal);
6696 else if (MEM_P (goal))
6698 enum rtx_code code = GET_CODE (XEXP (goal, 0));
6699 if (MEM_VOLATILE_P (goal))
6700 return 0;
6701 if (flag_float_store && SCALAR_FLOAT_MODE_P (GET_MODE (goal)))
6702 return 0;
6703 /* An address with side effects must be reexecuted. */
6704 switch (code)
6706 case POST_INC:
6707 case PRE_INC:
6708 case POST_DEC:
6709 case PRE_DEC:
6710 case POST_MODIFY:
6711 case PRE_MODIFY:
6712 return 0;
6713 default:
6714 break;
6716 goal_mem = 1;
6718 else if (CONSTANT_P (goal))
6719 goal_const = 1;
6720 else if (GET_CODE (goal) == PLUS
6721 && XEXP (goal, 0) == stack_pointer_rtx
6722 && CONSTANT_P (XEXP (goal, 1)))
6723 goal_const = need_stable_sp = 1;
6724 else if (GET_CODE (goal) == PLUS
6725 && XEXP (goal, 0) == frame_pointer_rtx
6726 && CONSTANT_P (XEXP (goal, 1)))
6727 goal_const = 1;
6728 else
6729 return 0;
6731 num = 0;
6732 /* Scan insns back from INSN, looking for one that copies
6733 a value into or out of GOAL.
6734 Stop and give up if we reach a label. */
6736 while (1)
6738 p = PREV_INSN (p);
6739 if (p && DEBUG_INSN_P (p))
6740 continue;
6741 num++;
6742 if (p == 0 || LABEL_P (p)
6743 || num > param_max_reload_search_insns)
6744 return 0;
6746 /* Don't reuse register contents from before a setjmp-type
6747 function call; on the second return (from the longjmp) it
6748 might have been clobbered by a later reuse. It doesn't
6749 seem worthwhile to actually go and see if it is actually
6750 reused even if that information would be readily available;
6751 just don't reuse it across the setjmp call. */
6752 if (CALL_P (p) && find_reg_note (p, REG_SETJMP, NULL_RTX))
6753 return 0;
6755 if (NONJUMP_INSN_P (p)
6756 /* If we don't want spill regs ... */
6757 && (! (reload_reg_p != 0
6758 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6759 /* ... then ignore insns introduced by reload; they aren't
6760 useful and can cause results in reload_as_needed to be
6761 different from what they were when calculating the need for
6762 spills. If we notice an input-reload insn here, we will
6763 reject it below, but it might hide a usable equivalent.
6764 That makes bad code. It may even fail: perhaps no reg was
6765 spilled for this insn because it was assumed we would find
6766 that equivalent. */
6767 || INSN_UID (p) < reload_first_uid))
6769 rtx tem;
6770 pat = single_set (p);
6772 /* First check for something that sets some reg equal to GOAL. */
6773 if (pat != 0
6774 && ((regno >= 0
6775 && true_regnum (SET_SRC (pat)) == regno
6776 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6778 (regno >= 0
6779 && true_regnum (SET_DEST (pat)) == regno
6780 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0)
6782 (goal_const && rtx_equal_p (SET_SRC (pat), goal)
6783 /* When looking for stack pointer + const,
6784 make sure we don't use a stack adjust. */
6785 && !reg_overlap_mentioned_for_reload_p (SET_DEST (pat), goal)
6786 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0)
6787 || (goal_mem
6788 && (valueno = true_regnum (valtry = SET_DEST (pat))) >= 0
6789 && rtx_renumbered_equal_p (goal, SET_SRC (pat)))
6790 || (goal_mem
6791 && (valueno = true_regnum (valtry = SET_SRC (pat))) >= 0
6792 && rtx_renumbered_equal_p (goal, SET_DEST (pat)))
6793 /* If we are looking for a constant,
6794 and something equivalent to that constant was copied
6795 into a reg, we can use that reg. */
6796 || (goal_const && REG_NOTES (p) != 0
6797 && (tem = find_reg_note (p, REG_EQUIV, NULL_RTX))
6798 && ((rtx_equal_p (XEXP (tem, 0), goal)
6799 && (valueno
6800 = true_regnum (valtry = SET_DEST (pat))) >= 0)
6801 || (REG_P (SET_DEST (pat))
6802 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6803 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6804 && CONST_INT_P (goal)
6805 && (goaltry = operand_subword (XEXP (tem, 0), 0,
6806 0, VOIDmode)) != 0
6807 && rtx_equal_p (goal, goaltry)
6808 && (valtry
6809 = operand_subword (SET_DEST (pat), 0, 0,
6810 VOIDmode))
6811 && (valueno = true_regnum (valtry)) >= 0)))
6812 || (goal_const && (tem = find_reg_note (p, REG_EQUIV,
6813 NULL_RTX))
6814 && REG_P (SET_DEST (pat))
6815 && CONST_DOUBLE_AS_FLOAT_P (XEXP (tem, 0))
6816 && SCALAR_FLOAT_MODE_P (GET_MODE (XEXP (tem, 0)))
6817 && CONST_INT_P (goal)
6818 && (goaltry = operand_subword (XEXP (tem, 0), 1, 0,
6819 VOIDmode)) != 0
6820 && rtx_equal_p (goal, goaltry)
6821 && (valtry
6822 = operand_subword (SET_DEST (pat), 1, 0, VOIDmode))
6823 && (valueno = true_regnum (valtry)) >= 0)))
6825 if (other >= 0)
6827 if (valueno != other)
6828 continue;
6830 else if ((unsigned) valueno >= FIRST_PSEUDO_REGISTER)
6831 continue;
6832 else if (!in_hard_reg_set_p (reg_class_contents[(int) rclass],
6833 mode, valueno))
6834 continue;
6835 value = valtry;
6836 where = p;
6837 break;
6842 /* We found a previous insn copying GOAL into a suitable other reg VALUE
6843 (or copying VALUE into GOAL, if GOAL is also a register).
6844 Now verify that VALUE is really valid. */
6846 /* VALUENO is the register number of VALUE; a hard register. */
6848 /* Don't try to re-use something that is killed in this insn. We want
6849 to be able to trust REG_UNUSED notes. */
6850 if (REG_NOTES (where) != 0 && find_reg_note (where, REG_UNUSED, value))
6851 return 0;
6853 /* If we propose to get the value from the stack pointer or if GOAL is
6854 a MEM based on the stack pointer, we need a stable SP. */
6855 if (valueno == STACK_POINTER_REGNUM || regno == STACK_POINTER_REGNUM
6856 || (goal_mem && reg_overlap_mentioned_for_reload_p (stack_pointer_rtx,
6857 goal)))
6858 need_stable_sp = 1;
6860 /* Reject VALUE if the copy-insn moved the wrong sort of datum. */
6861 if (GET_MODE (value) != mode)
6862 return 0;
6864 /* Reject VALUE if it was loaded from GOAL
6865 and is also a register that appears in the address of GOAL. */
6867 if (goal_mem && value == SET_DEST (single_set (where))
6868 && refers_to_regno_for_reload_p (valueno, end_hard_regno (mode, valueno),
6869 goal, (rtx*) 0))
6870 return 0;
6872 /* Reject registers that overlap GOAL. */
6874 if (regno >= 0 && regno < FIRST_PSEUDO_REGISTER)
6875 nregs = hard_regno_nregs (regno, mode);
6876 else
6877 nregs = 1;
6878 valuenregs = hard_regno_nregs (valueno, mode);
6880 if (!goal_mem && !goal_const
6881 && regno + nregs > valueno && regno < valueno + valuenregs)
6882 return 0;
6884 /* Reject VALUE if it is one of the regs reserved for reloads.
6885 Reload1 knows how to reuse them anyway, and it would get
6886 confused if we allocated one without its knowledge.
6887 (Now that insns introduced by reload are ignored above,
6888 this case shouldn't happen, but I'm not positive.) */
6890 if (reload_reg_p != 0 && reload_reg_p != (short *) HOST_WIDE_INT_1)
6892 int i;
6893 for (i = 0; i < valuenregs; ++i)
6894 if (reload_reg_p[valueno + i] >= 0)
6895 return 0;
6898 /* Reject VALUE if it is a register being used for an input reload
6899 even if it is not one of those reserved. */
6901 if (reload_reg_p != 0)
6903 int i;
6904 for (i = 0; i < n_reloads; i++)
6905 if (rld[i].reg_rtx != 0
6906 && rld[i].in
6907 && (int) REGNO (rld[i].reg_rtx) < valueno + valuenregs
6908 && (int) END_REGNO (rld[i].reg_rtx) > valueno)
6909 return 0;
6912 if (goal_mem)
6913 /* We must treat frame pointer as varying here,
6914 since it can vary--in a nonlocal goto as generated by expand_goto. */
6915 goal_mem_addr_varies = !CONSTANT_ADDRESS_P (XEXP (goal, 0));
6917 /* Now verify that the values of GOAL and VALUE remain unaltered
6918 until INSN is reached. */
6920 p = insn;
6921 while (1)
6923 p = PREV_INSN (p);
6924 if (p == where)
6925 return value;
6927 /* Don't trust the conversion past a function call
6928 if either of the two is in a call-clobbered register, or memory. */
6929 if (CALL_P (p))
6931 if (goal_mem || need_stable_sp)
6932 return 0;
6934 function_abi callee_abi = insn_callee_abi (p);
6935 if (regno >= 0
6936 && regno < FIRST_PSEUDO_REGISTER
6937 && callee_abi.clobbers_reg_p (mode, regno))
6938 return 0;
6940 if (valueno >= 0
6941 && valueno < FIRST_PSEUDO_REGISTER
6942 && callee_abi.clobbers_reg_p (mode, valueno))
6943 return 0;
6946 if (INSN_P (p))
6948 pat = PATTERN (p);
6950 /* Watch out for unspec_volatile, and volatile asms. */
6951 if (volatile_insn_p (pat))
6952 return 0;
6954 /* If this insn P stores in either GOAL or VALUE, return 0.
6955 If GOAL is a memory ref and this insn writes memory, return 0.
6956 If GOAL is a memory ref and its address is not constant,
6957 and this insn P changes a register used in GOAL, return 0. */
6959 if (GET_CODE (pat) == COND_EXEC)
6960 pat = COND_EXEC_CODE (pat);
6961 if (GET_CODE (pat) == SET || GET_CODE (pat) == CLOBBER)
6963 rtx dest = SET_DEST (pat);
6964 while (GET_CODE (dest) == SUBREG
6965 || GET_CODE (dest) == ZERO_EXTRACT
6966 || GET_CODE (dest) == STRICT_LOW_PART)
6967 dest = XEXP (dest, 0);
6968 if (REG_P (dest))
6970 int xregno = REGNO (dest);
6971 int end_xregno = END_REGNO (dest);
6972 if (xregno < regno + nregs && end_xregno > regno)
6973 return 0;
6974 if (xregno < valueno + valuenregs
6975 && end_xregno > valueno)
6976 return 0;
6977 if (goal_mem_addr_varies
6978 && reg_overlap_mentioned_for_reload_p (dest, goal))
6979 return 0;
6980 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
6981 return 0;
6983 else if (goal_mem && MEM_P (dest)
6984 && ! push_operand (dest, GET_MODE (dest)))
6985 return 0;
6986 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
6987 && reg_equiv_memory_loc (regno) != 0)
6988 return 0;
6989 else if (need_stable_sp && push_operand (dest, GET_MODE (dest)))
6990 return 0;
6992 else if (GET_CODE (pat) == PARALLEL)
6994 int i;
6995 for (i = XVECLEN (pat, 0) - 1; i >= 0; i--)
6997 rtx v1 = XVECEXP (pat, 0, i);
6998 if (GET_CODE (v1) == COND_EXEC)
6999 v1 = COND_EXEC_CODE (v1);
7000 if (GET_CODE (v1) == SET || GET_CODE (v1) == CLOBBER)
7002 rtx dest = SET_DEST (v1);
7003 while (GET_CODE (dest) == SUBREG
7004 || GET_CODE (dest) == ZERO_EXTRACT
7005 || GET_CODE (dest) == STRICT_LOW_PART)
7006 dest = XEXP (dest, 0);
7007 if (REG_P (dest))
7009 int xregno = REGNO (dest);
7010 int end_xregno = END_REGNO (dest);
7011 if (xregno < regno + nregs
7012 && end_xregno > regno)
7013 return 0;
7014 if (xregno < valueno + valuenregs
7015 && end_xregno > valueno)
7016 return 0;
7017 if (goal_mem_addr_varies
7018 && reg_overlap_mentioned_for_reload_p (dest,
7019 goal))
7020 return 0;
7021 if (xregno == STACK_POINTER_REGNUM && need_stable_sp)
7022 return 0;
7024 else if (goal_mem && MEM_P (dest)
7025 && ! push_operand (dest, GET_MODE (dest)))
7026 return 0;
7027 else if (MEM_P (dest) && regno >= FIRST_PSEUDO_REGISTER
7028 && reg_equiv_memory_loc (regno) != 0)
7029 return 0;
7030 else if (need_stable_sp
7031 && push_operand (dest, GET_MODE (dest)))
7032 return 0;
7037 if (CALL_P (p) && CALL_INSN_FUNCTION_USAGE (p))
7039 rtx link;
7041 for (link = CALL_INSN_FUNCTION_USAGE (p); XEXP (link, 1) != 0;
7042 link = XEXP (link, 1))
7044 pat = XEXP (link, 0);
7045 if (GET_CODE (pat) == CLOBBER)
7047 rtx dest = SET_DEST (pat);
7049 if (REG_P (dest))
7051 int xregno = REGNO (dest);
7052 int end_xregno = END_REGNO (dest);
7054 if (xregno < regno + nregs
7055 && end_xregno > regno)
7056 return 0;
7057 else if (xregno < valueno + valuenregs
7058 && end_xregno > valueno)
7059 return 0;
7060 else if (goal_mem_addr_varies
7061 && reg_overlap_mentioned_for_reload_p (dest,
7062 goal))
7063 return 0;
7066 else if (goal_mem && MEM_P (dest)
7067 && ! push_operand (dest, GET_MODE (dest)))
7068 return 0;
7069 else if (need_stable_sp
7070 && push_operand (dest, GET_MODE (dest)))
7071 return 0;
7076 #if AUTO_INC_DEC
7077 /* If this insn auto-increments or auto-decrements
7078 either regno or valueno, return 0 now.
7079 If GOAL is a memory ref and its address is not constant,
7080 and this insn P increments a register used in GOAL, return 0. */
7082 rtx link;
7084 for (link = REG_NOTES (p); link; link = XEXP (link, 1))
7085 if (REG_NOTE_KIND (link) == REG_INC
7086 && REG_P (XEXP (link, 0)))
7088 int incno = REGNO (XEXP (link, 0));
7089 if (incno < regno + nregs && incno >= regno)
7090 return 0;
7091 if (incno < valueno + valuenregs && incno >= valueno)
7092 return 0;
7093 if (goal_mem_addr_varies
7094 && reg_overlap_mentioned_for_reload_p (XEXP (link, 0),
7095 goal))
7096 return 0;
7099 #endif
7104 /* Find a place where INCED appears in an increment or decrement operator
7105 within X, and return the amount INCED is incremented or decremented by.
7106 The value is always positive. */
7108 static poly_int64
7109 find_inc_amount (rtx x, rtx inced)
7111 enum rtx_code code = GET_CODE (x);
7112 const char *fmt;
7113 int i;
7115 if (code == MEM)
7117 rtx addr = XEXP (x, 0);
7118 if ((GET_CODE (addr) == PRE_DEC
7119 || GET_CODE (addr) == POST_DEC
7120 || GET_CODE (addr) == PRE_INC
7121 || GET_CODE (addr) == POST_INC)
7122 && XEXP (addr, 0) == inced)
7123 return GET_MODE_SIZE (GET_MODE (x));
7124 else if ((GET_CODE (addr) == PRE_MODIFY
7125 || GET_CODE (addr) == POST_MODIFY)
7126 && GET_CODE (XEXP (addr, 1)) == PLUS
7127 && XEXP (addr, 0) == XEXP (XEXP (addr, 1), 0)
7128 && XEXP (addr, 0) == inced
7129 && CONST_INT_P (XEXP (XEXP (addr, 1), 1)))
7131 i = INTVAL (XEXP (XEXP (addr, 1), 1));
7132 return i < 0 ? -i : i;
7136 fmt = GET_RTX_FORMAT (code);
7137 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
7139 if (fmt[i] == 'e')
7141 poly_int64 tem = find_inc_amount (XEXP (x, i), inced);
7142 if (maybe_ne (tem, 0))
7143 return tem;
7145 if (fmt[i] == 'E')
7147 int j;
7148 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
7150 poly_int64 tem = find_inc_amount (XVECEXP (x, i, j), inced);
7151 if (maybe_ne (tem, 0))
7152 return tem;
7157 return 0;
7160 /* Return 1 if registers from REGNO to ENDREGNO are the subjects of a
7161 REG_INC note in insn INSN. REGNO must refer to a hard register. */
7163 static int
7164 reg_inc_found_and_valid_p (unsigned int regno, unsigned int endregno,
7165 rtx insn)
7167 rtx link;
7169 if (!AUTO_INC_DEC)
7170 return 0;
7172 gcc_assert (insn);
7174 if (! INSN_P (insn))
7175 return 0;
7177 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
7178 if (REG_NOTE_KIND (link) == REG_INC)
7180 unsigned int test = (int) REGNO (XEXP (link, 0));
7181 if (test >= regno && test < endregno)
7182 return 1;
7184 return 0;
7187 /* Return 1 if register REGNO is the subject of a clobber in insn INSN.
7188 If SETS is 1, also consider SETs. If SETS is 2, enable checking
7189 REG_INC. REGNO must refer to a hard register. */
7192 regno_clobbered_p (unsigned int regno, rtx_insn *insn, machine_mode mode,
7193 int sets)
7195 /* regno must be a hard register. */
7196 gcc_assert (regno < FIRST_PSEUDO_REGISTER);
7198 unsigned int endregno = end_hard_regno (mode, regno);
7200 if ((GET_CODE (PATTERN (insn)) == CLOBBER
7201 || (sets == 1 && GET_CODE (PATTERN (insn)) == SET))
7202 && REG_P (XEXP (PATTERN (insn), 0)))
7204 unsigned int test = REGNO (XEXP (PATTERN (insn), 0));
7206 return test >= regno && test < endregno;
7209 if (sets == 2 && reg_inc_found_and_valid_p (regno, endregno, insn))
7210 return 1;
7212 if (GET_CODE (PATTERN (insn)) == PARALLEL)
7214 int i = XVECLEN (PATTERN (insn), 0) - 1;
7216 for (; i >= 0; i--)
7218 rtx elt = XVECEXP (PATTERN (insn), 0, i);
7219 if ((GET_CODE (elt) == CLOBBER
7220 || (sets == 1 && GET_CODE (elt) == SET))
7221 && REG_P (XEXP (elt, 0)))
7223 unsigned int test = REGNO (XEXP (elt, 0));
7225 if (test >= regno && test < endregno)
7226 return 1;
7228 if (sets == 2
7229 && reg_inc_found_and_valid_p (regno, endregno, elt))
7230 return 1;
7234 return 0;
7237 /* Find the low part, with mode MODE, of a hard regno RELOADREG. */
7239 reload_adjust_reg_for_mode (rtx reloadreg, machine_mode mode)
7241 int regno;
7243 if (GET_MODE (reloadreg) == mode)
7244 return reloadreg;
7246 regno = REGNO (reloadreg);
7248 if (REG_WORDS_BIG_ENDIAN)
7249 regno += ((int) REG_NREGS (reloadreg)
7250 - (int) hard_regno_nregs (regno, mode));
7252 return gen_rtx_REG (mode, regno);
7255 static const char *const reload_when_needed_name[] =
7257 "RELOAD_FOR_INPUT",
7258 "RELOAD_FOR_OUTPUT",
7259 "RELOAD_FOR_INSN",
7260 "RELOAD_FOR_INPUT_ADDRESS",
7261 "RELOAD_FOR_INPADDR_ADDRESS",
7262 "RELOAD_FOR_OUTPUT_ADDRESS",
7263 "RELOAD_FOR_OUTADDR_ADDRESS",
7264 "RELOAD_FOR_OPERAND_ADDRESS",
7265 "RELOAD_FOR_OPADDR_ADDR",
7266 "RELOAD_OTHER",
7267 "RELOAD_FOR_OTHER_ADDRESS"
7270 /* These functions are used to print the variables set by 'find_reloads' */
7272 DEBUG_FUNCTION void
7273 debug_reload_to_stream (FILE *f)
7275 int r;
7276 const char *prefix;
7278 if (! f)
7279 f = stderr;
7280 for (r = 0; r < n_reloads; r++)
7282 fprintf (f, "Reload %d: ", r);
7284 if (rld[r].in != 0)
7286 fprintf (f, "reload_in (%s) = ",
7287 GET_MODE_NAME (rld[r].inmode));
7288 print_inline_rtx (f, rld[r].in, 24);
7289 fprintf (f, "\n\t");
7292 if (rld[r].out != 0)
7294 fprintf (f, "reload_out (%s) = ",
7295 GET_MODE_NAME (rld[r].outmode));
7296 print_inline_rtx (f, rld[r].out, 24);
7297 fprintf (f, "\n\t");
7300 fprintf (f, "%s, ", reg_class_names[(int) rld[r].rclass]);
7302 fprintf (f, "%s (opnum = %d)",
7303 reload_when_needed_name[(int) rld[r].when_needed],
7304 rld[r].opnum);
7306 if (rld[r].optional)
7307 fprintf (f, ", optional");
7309 if (rld[r].nongroup)
7310 fprintf (f, ", nongroup");
7312 if (maybe_ne (rld[r].inc, 0))
7314 fprintf (f, ", inc by ");
7315 print_dec (rld[r].inc, f, SIGNED);
7318 if (rld[r].nocombine)
7319 fprintf (f, ", can't combine");
7321 if (rld[r].secondary_p)
7322 fprintf (f, ", secondary_reload_p");
7324 if (rld[r].in_reg != 0)
7326 fprintf (f, "\n\treload_in_reg: ");
7327 print_inline_rtx (f, rld[r].in_reg, 24);
7330 if (rld[r].out_reg != 0)
7332 fprintf (f, "\n\treload_out_reg: ");
7333 print_inline_rtx (f, rld[r].out_reg, 24);
7336 if (rld[r].reg_rtx != 0)
7338 fprintf (f, "\n\treload_reg_rtx: ");
7339 print_inline_rtx (f, rld[r].reg_rtx, 24);
7342 prefix = "\n\t";
7343 if (rld[r].secondary_in_reload != -1)
7345 fprintf (f, "%ssecondary_in_reload = %d",
7346 prefix, rld[r].secondary_in_reload);
7347 prefix = ", ";
7350 if (rld[r].secondary_out_reload != -1)
7351 fprintf (f, "%ssecondary_out_reload = %d\n",
7352 prefix, rld[r].secondary_out_reload);
7354 prefix = "\n\t";
7355 if (rld[r].secondary_in_icode != CODE_FOR_nothing)
7357 fprintf (f, "%ssecondary_in_icode = %s", prefix,
7358 insn_data[rld[r].secondary_in_icode].name);
7359 prefix = ", ";
7362 if (rld[r].secondary_out_icode != CODE_FOR_nothing)
7363 fprintf (f, "%ssecondary_out_icode = %s", prefix,
7364 insn_data[rld[r].secondary_out_icode].name);
7366 fprintf (f, "\n");
7370 DEBUG_FUNCTION void
7371 debug_reload (void)
7373 debug_reload_to_stream (stderr);