acx.m4 (NCN_STRICT_CHECK_TARGET_TOOLS): Fix incremental builds.
[official-gcc.git] / gcc / struct-equiv.c
blob3658e87b748fe5b872bbcc3989f3e1f1db2a0198
1 /* Control flow optimization code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
23 /* Try to match two basic blocks - or their ends - for structural equivalence.
24 We scan the blocks from their ends backwards, and expect that insns are
25 identical, except for certain cases involving registers. A mismatch
26 We scan the blocks from their ends backwards, hoping to find a match, I.e.
27 insns are identical, except for certain cases involving registers. A
28 mismatch between register number RX (used in block X) and RY (used in the
29 same way in block Y) can be handled in one of the following cases:
30 1. RX and RY are local to their respective blocks; they are set there and
31 die there. If so, they can effectively be ignored.
32 2. RX and RY die in their blocks, but live at the start. If any path
33 gets redirected through X instead of Y, the caller must emit
34 compensation code to move RY to RX. If there are overlapping inputs,
35 the function resolve_input_conflict ensures that this can be done.
36 Information about these registers are tracked in the X_LOCAL, Y_LOCAL,
37 LOCAL_COUNT and LOCAL_RVALUE fields.
38 3. RX and RY live throughout their blocks, including the start and the end.
39 Either RX and RY must be identical, or we have to replace all uses in
40 block X with a new pseudo, which is stored in the INPUT_REG field. The
41 caller can then use block X instead of block Y by copying RY to the new
42 pseudo.
44 The main entry point to this file is struct_equiv_block_eq. This function
45 uses a struct equiv_info to accept some of its inputs, to keep track of its
46 internal state, to pass down to its helper functions, and to communicate
47 some of the results back to the caller.
49 Most scans will result in a failure to match a sufficient number of insns
50 to make any optimization worth while, therefore the process is geared more
51 to quick scanning rather than the ability to exactly backtrack when we
52 find a mismatch. The information gathered is still meaningful to make a
53 preliminary decision if we want to do an optimization, we might only
54 slightly overestimate the number of matchable insns, and underestimate
55 the number of inputs an miss an input conflict. Sufficient information
56 is gathered so that when we make another pass, we won't have to backtrack
57 at the same point.
58 Another issue is that information in memory attributes and/or REG_NOTES
59 might have to be merged or discarded to make a valid match. We don't want
60 to discard such information when we are not certain that we want to merge
61 the two (partial) blocks.
62 For these reasons, struct_equiv_block_eq has to be called first with the
63 STRUCT_EQUIV_START bit set in the mode parameter. This will calculate the
64 number of matched insns and the number and types of inputs. If the
65 need_rerun field is set, the results are only tentative, and the caller
66 has to call again with STRUCT_EQUIV_RERUN till need_rerun is false in
67 order to get a reliable match.
68 To install the changes necessary for the match, the function has to be
69 called again with STRUCT_EQUIV_FINAL.
71 While scanning an insn, we process first all the SET_DESTs, then the
72 SET_SRCes, then the REG_NOTES, in order to keep the register liveness
73 information consistent.
74 If we were to mix up the order for sources / destinations in an insn where
75 a source is also a destination, we'd end up being mistaken to think that
76 the register is not live in the preceding insn. */
78 #include "config.h"
79 #include "system.h"
80 #include "coretypes.h"
81 #include "tm.h"
82 #include "rtl.h"
83 #include "regs.h"
84 #include "output.h"
85 #include "insn-config.h"
86 #include "flags.h"
87 #include "recog.h"
88 #include "tm_p.h"
89 #include "target.h"
90 #include "emit-rtl.h"
91 #include "reload.h"
92 #include "df.h"
94 static void merge_memattrs (rtx, rtx);
95 static bool set_dest_equiv_p (rtx x, rtx y, struct equiv_info *info);
96 static bool set_dest_addr_equiv_p (rtx x, rtx y, struct equiv_info *info);
97 static void find_dying_inputs (struct equiv_info *info);
98 static bool resolve_input_conflict (struct equiv_info *info);
100 /* After reload, some moves, as indicated by SECONDARY_RELOAD_CLASS and
101 SECONDARY_MEMORY_NEEDED, cannot be done directly. For our purposes, we
102 consider them impossible to generate after reload (even though some
103 might be synthesized when you throw enough code at them).
104 Since we don't know while processing a cross-jump if a local register
105 that is currently live will eventually be live and thus be an input,
106 we keep track of potential inputs that would require an impossible move
107 by using a prohibitively high cost for them.
108 This number, multiplied with the larger of STRUCT_EQUIV_MAX_LOCAL and
109 FIRST_PSEUDO_REGISTER, must fit in the input_cost field of
110 struct equiv_info. */
111 #define IMPOSSIBLE_MOVE_FACTOR 20000
115 /* Removes the memory attributes of MEM expression
116 if they are not equal. */
118 void
119 merge_memattrs (rtx x, rtx y)
121 int i;
122 int j;
123 enum rtx_code code;
124 const char *fmt;
126 if (x == y)
127 return;
128 if (x == 0 || y == 0)
129 return;
131 code = GET_CODE (x);
133 if (code != GET_CODE (y))
134 return;
136 if (GET_MODE (x) != GET_MODE (y))
137 return;
139 if (code == MEM && MEM_ATTRS (x) != MEM_ATTRS (y))
141 if (! MEM_ATTRS (x))
142 MEM_ATTRS (y) = 0;
143 else if (! MEM_ATTRS (y))
144 MEM_ATTRS (x) = 0;
145 else
147 rtx mem_size;
149 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
151 set_mem_alias_set (x, 0);
152 set_mem_alias_set (y, 0);
155 if (! mem_expr_equal_p (MEM_EXPR (x), MEM_EXPR (y)))
157 set_mem_expr (x, 0);
158 set_mem_expr (y, 0);
159 set_mem_offset (x, 0);
160 set_mem_offset (y, 0);
162 else if (MEM_OFFSET (x) != MEM_OFFSET (y))
164 set_mem_offset (x, 0);
165 set_mem_offset (y, 0);
168 if (!MEM_SIZE (x))
169 mem_size = NULL_RTX;
170 else if (!MEM_SIZE (y))
171 mem_size = NULL_RTX;
172 else
173 mem_size = GEN_INT (MAX (INTVAL (MEM_SIZE (x)),
174 INTVAL (MEM_SIZE (y))));
175 set_mem_size (x, mem_size);
176 set_mem_size (y, mem_size);
178 set_mem_align (x, MIN (MEM_ALIGN (x), MEM_ALIGN (y)));
179 set_mem_align (y, MEM_ALIGN (x));
183 fmt = GET_RTX_FORMAT (code);
184 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
186 switch (fmt[i])
188 case 'E':
189 /* Two vectors must have the same length. */
190 if (XVECLEN (x, i) != XVECLEN (y, i))
191 return;
193 for (j = 0; j < XVECLEN (x, i); j++)
194 merge_memattrs (XVECEXP (x, i, j), XVECEXP (y, i, j));
196 break;
198 case 'e':
199 merge_memattrs (XEXP (x, i), XEXP (y, i));
202 return;
205 /* In SET, assign the bit for the register number of REG the value VALUE.
206 If REG is a hard register, do so for all its constituent registers.
207 Return the number of registers that have become included (as a positive
208 number) or excluded (as a negative number). */
209 static int
210 assign_reg_reg_set (regset set, rtx reg, int value)
212 unsigned regno = REGNO (reg);
213 int nregs, i, old;
215 if (regno >= FIRST_PSEUDO_REGISTER)
217 gcc_assert (!reload_completed);
218 nregs = 1;
220 else
221 nregs = hard_regno_nregs[regno][GET_MODE (reg)];
222 for (old = 0, i = nregs; --i >= 0; regno++)
224 if ((value != 0) == REGNO_REG_SET_P (set, regno))
225 continue;
226 if (value)
227 old++, SET_REGNO_REG_SET (set, regno);
228 else
229 old--, CLEAR_REGNO_REG_SET (set, regno);
231 return old;
234 /* Record state about current inputs / local registers / liveness
235 in *P. */
236 static inline void
237 struct_equiv_make_checkpoint (struct struct_equiv_checkpoint *p,
238 struct equiv_info *info)
240 *p = info->cur;
243 /* Call struct_equiv_make_checkpoint (P, INFO) if the current partial block
244 is suitable to split off - i.e. there is no dangling cc0 user - and
245 if the current cost of the common instructions, minus the cost for
246 setting up the inputs, is higher than what has been recorded before
247 in CHECKPOINT[N]. Also, if we do so, confirm or cancel any pending
248 changes. */
249 static void
250 struct_equiv_improve_checkpoint (struct struct_equiv_checkpoint *p,
251 struct equiv_info *info)
253 #ifdef HAVE_cc0
254 if (reg_mentioned_p (cc0_rtx, info->cur.x_start)
255 && !sets_cc0_p (info->cur.x_start))
256 return;
257 #endif
258 if (info->cur.input_count >= IMPOSSIBLE_MOVE_FACTOR)
259 return;
260 if (info->input_cost >= 0
261 ? (COSTS_N_INSNS(info->cur.ninsns - p->ninsns)
262 > info->input_cost * (info->cur.input_count - p->input_count))
263 : info->cur.ninsns > p->ninsns && !info->cur.input_count)
265 if (info->check_input_conflict && ! resolve_input_conflict (info))
266 return;
267 /* We have a profitable set of changes. If this is the final pass,
268 commit them now. Otherwise, we don't know yet if we can make any
269 change, so put the old code back for now. */
270 if (info->mode & STRUCT_EQUIV_FINAL)
271 confirm_change_group ();
272 else
273 cancel_changes (0);
274 struct_equiv_make_checkpoint (p, info);
278 /* Restore state about current inputs / local registers / liveness
279 from P. */
280 static void
281 struct_equiv_restore_checkpoint (struct struct_equiv_checkpoint *p,
282 struct equiv_info *info)
284 info->cur.ninsns = p->ninsns;
285 info->cur.x_start = p->x_start;
286 info->cur.y_start = p->y_start;
287 info->cur.input_count = p->input_count;
288 info->cur.input_valid = p->input_valid;
289 while (info->cur.local_count > p->local_count)
291 info->cur.local_count--;
292 info->cur.version--;
293 if (REGNO_REG_SET_P (info->x_local_live,
294 REGNO (info->x_local[info->cur.local_count])))
296 assign_reg_reg_set (info->x_local_live,
297 info->x_local[info->cur.local_count], 0);
298 assign_reg_reg_set (info->y_local_live,
299 info->y_local[info->cur.local_count], 0);
300 info->cur.version--;
303 if (info->cur.version != p->version)
304 info->need_rerun = true;
308 /* Update register liveness to reflect that X is now life (if rvalue is
309 nonzero) or dead (if rvalue is zero) in INFO->x_block, and likewise Y
310 in INFO->y_block. Return the number of registers the liveness of which
311 changed in each block (as a negative number if registers became dead). */
312 static int
313 note_local_live (struct equiv_info *info, rtx x, rtx y, int rvalue)
315 unsigned x_regno = REGNO (x);
316 unsigned y_regno = REGNO (y);
317 int x_nominal_nregs = (x_regno >= FIRST_PSEUDO_REGISTER
318 ? 1 : hard_regno_nregs[x_regno][GET_MODE (x)]);
319 int y_nominal_nregs = (y_regno >= FIRST_PSEUDO_REGISTER
320 ? 1 : hard_regno_nregs[y_regno][GET_MODE (y)]);
321 int x_change = assign_reg_reg_set (info->x_local_live, x, rvalue);
322 int y_change = assign_reg_reg_set (info->y_local_live, y, rvalue);
324 gcc_assert (x_nominal_nregs && y_nominal_nregs);
325 gcc_assert (x_change * y_nominal_nregs == y_change * x_nominal_nregs);
326 if (y_change)
328 if (reload_completed)
330 unsigned x_regno ATTRIBUTE_UNUSED = REGNO (x);
331 unsigned y_regno = REGNO (y);
332 enum machine_mode x_mode = GET_MODE (x);
334 if (secondary_reload_class (0, REGNO_REG_CLASS (y_regno), x_mode, x)
335 != NO_REGS
336 #ifdef SECONDARY_MEMORY_NEEDED
337 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (y_regno),
338 REGNO_REG_CLASS (x_regno), x_mode)
339 #endif
341 y_change *= IMPOSSIBLE_MOVE_FACTOR;
343 info->cur.input_count += y_change;
344 info->cur.version++;
346 return x_change;
349 /* Check if *XP is equivalent to Y. Until an unreconcilable difference is
350 found, use in-group changes with validate_change on *XP to make register
351 assignments agree. It is the (not necessarily direct) callers
352 responsibility to verify / confirm / cancel these changes, as appropriate.
353 RVALUE indicates if the processed piece of rtl is used as a destination, in
354 which case we can't have different registers being an input. Returns
355 nonzero if the two blocks have been identified as equivalent, zero otherwise.
356 RVALUE == 0: destination
357 RVALUE == 1: source
358 RVALUE == -1: source, ignore SET_DEST of SET / clobber. */
359 bool
360 rtx_equiv_p (rtx *xp, rtx y, int rvalue, struct equiv_info *info)
362 rtx x = *xp;
363 enum rtx_code code;
364 int length;
365 const char *format;
366 int i;
368 if (!y || !x)
369 return x == y;
370 code = GET_CODE (y);
371 if (code != REG && x == y)
372 return true;
373 if (GET_CODE (x) != code
374 || GET_MODE (x) != GET_MODE (y))
375 return false;
377 /* ??? could extend to allow CONST_INT inputs. */
378 switch (code)
380 case REG:
382 unsigned x_regno = REGNO (x);
383 unsigned y_regno = REGNO (y);
384 int x_common_live, y_common_live;
386 if (reload_completed
387 && (x_regno >= FIRST_PSEUDO_REGISTER
388 || y_regno >= FIRST_PSEUDO_REGISTER))
390 /* We should only see this in REG_NOTEs. */
391 gcc_assert (!info->live_update);
392 /* Returning false will cause us to remove the notes. */
393 return false;
395 #ifdef STACK_REGS
396 /* After reg-stack, can only accept literal matches of stack regs. */
397 if (info->mode & CLEANUP_POST_REGSTACK
398 && (IN_RANGE (x_regno, FIRST_STACK_REG, LAST_STACK_REG)
399 || IN_RANGE (y_regno, FIRST_STACK_REG, LAST_STACK_REG)))
400 return x_regno == y_regno;
401 #endif
403 /* If the register is a locally live one in one block, the
404 corresponding one must be locally live in the other, too, and
405 match of identical regnos doesn't apply. */
406 if (REGNO_REG_SET_P (info->x_local_live, x_regno))
408 if (!REGNO_REG_SET_P (info->y_local_live, y_regno))
409 return false;
411 else if (REGNO_REG_SET_P (info->y_local_live, y_regno))
412 return false;
413 else if (x_regno == y_regno)
415 if (!rvalue && info->cur.input_valid
416 && (reg_overlap_mentioned_p (x, info->x_input)
417 || reg_overlap_mentioned_p (x, info->y_input)))
418 return false;
420 /* Update liveness information. */
421 if (info->live_update
422 && assign_reg_reg_set (info->common_live, x, rvalue))
423 info->cur.version++;
425 return true;
428 x_common_live = REGNO_REG_SET_P (info->common_live, x_regno);
429 y_common_live = REGNO_REG_SET_P (info->common_live, y_regno);
430 if (x_common_live != y_common_live)
431 return false;
432 else if (x_common_live)
434 if (! rvalue || info->input_cost < 0 || no_new_pseudos)
435 return false;
436 /* If info->live_update is not set, we are processing notes.
437 We then allow a match with x_input / y_input found in a
438 previous pass. */
439 if (info->live_update && !info->cur.input_valid)
441 info->cur.input_valid = true;
442 info->x_input = x;
443 info->y_input = y;
444 info->cur.input_count += optimize_size ? 2 : 1;
445 if (info->input_reg
446 && GET_MODE (info->input_reg) != GET_MODE (info->x_input))
447 info->input_reg = NULL_RTX;
448 if (!info->input_reg)
449 info->input_reg = gen_reg_rtx (GET_MODE (info->x_input));
451 else if ((info->live_update
452 ? ! info->cur.input_valid : ! info->x_input)
453 || ! rtx_equal_p (x, info->x_input)
454 || ! rtx_equal_p (y, info->y_input))
455 return false;
456 validate_change (info->cur.x_start, xp, info->input_reg, 1);
458 else
460 int x_nregs = (x_regno >= FIRST_PSEUDO_REGISTER
461 ? 1 : hard_regno_nregs[x_regno][GET_MODE (x)]);
462 int y_nregs = (y_regno >= FIRST_PSEUDO_REGISTER
463 ? 1 : hard_regno_nregs[y_regno][GET_MODE (y)]);
464 int size = GET_MODE_SIZE (GET_MODE (x));
465 enum machine_mode x_mode = GET_MODE (x);
466 unsigned x_regno_i, y_regno_i;
467 int x_nregs_i, y_nregs_i, size_i;
468 int local_count = info->cur.local_count;
470 /* This might be a register local to each block. See if we have
471 it already registered. */
472 for (i = local_count - 1; i >= 0; i--)
474 x_regno_i = REGNO (info->x_local[i]);
475 x_nregs_i = (x_regno_i >= FIRST_PSEUDO_REGISTER
476 ? 1 : hard_regno_nregs[x_regno_i][GET_MODE (x)]);
477 y_regno_i = REGNO (info->y_local[i]);
478 y_nregs_i = (y_regno_i >= FIRST_PSEUDO_REGISTER
479 ? 1 : hard_regno_nregs[y_regno_i][GET_MODE (y)]);
480 size_i = GET_MODE_SIZE (GET_MODE (info->x_local[i]));
482 /* If we have a new pair of registers that is wider than an
483 old pair and enclosing it with matching offsets,
484 remove the old pair. If we find a matching, wider, old
485 pair, use the old one. If the width is the same, use the
486 old one if the modes match, but the new if they don't.
487 We don't want to get too fancy with subreg_regno_offset
488 here, so we just test two straightforward cases each. */
489 if (info->live_update
490 && (x_mode != GET_MODE (info->x_local[i])
491 ? size >= size_i : size > size_i))
493 /* If the new pair is fully enclosing a matching
494 existing pair, remove the old one. N.B. because
495 we are removing one entry here, the check below
496 if we have space for a new entry will succeed. */
497 if ((x_regno <= x_regno_i
498 && x_regno + x_nregs >= x_regno_i + x_nregs_i
499 && x_nregs == y_nregs && x_nregs_i == y_nregs_i
500 && x_regno - x_regno_i == y_regno - y_regno_i)
501 || (x_regno == x_regno_i && y_regno == y_regno_i
502 && x_nregs >= x_nregs_i && y_nregs >= y_nregs_i))
504 info->cur.local_count = --local_count;
505 info->x_local[i] = info->x_local[local_count];
506 info->y_local[i] = info->y_local[local_count];
507 continue;
510 else
513 /* If the new pair is fully enclosed within a matching
514 existing pair, succeed. */
515 if (x_regno >= x_regno_i
516 && x_regno + x_nregs <= x_regno_i + x_nregs_i
517 && x_nregs == y_nregs && x_nregs_i == y_nregs_i
518 && x_regno - x_regno_i == y_regno - y_regno_i)
519 break;
520 if (x_regno == x_regno_i && y_regno == y_regno_i
521 && x_nregs <= x_nregs_i && y_nregs <= y_nregs_i)
522 break;
525 /* Any other overlap causes a match failure. */
526 if (x_regno + x_nregs > x_regno_i
527 && x_regno_i + x_nregs_i > x_regno)
528 return false;
529 if (y_regno + y_nregs > y_regno_i
530 && y_regno_i + y_nregs_i > y_regno)
531 return false;
533 if (i < 0)
535 /* Not found. Create a new entry if possible. */
536 if (!info->live_update
537 || info->cur.local_count >= STRUCT_EQUIV_MAX_LOCAL)
538 return false;
539 info->x_local[info->cur.local_count] = x;
540 info->y_local[info->cur.local_count] = y;
541 info->cur.local_count++;
542 info->cur.version++;
544 note_local_live (info, x, y, rvalue);
546 return true;
548 case SET:
549 gcc_assert (rvalue < 0);
550 /* Ignore the destinations role as a destination. Still, we have
551 to consider input registers embedded in the addresses of a MEM.
552 N.B., we process the rvalue aspect of STRICT_LOW_PART /
553 ZERO_EXTEND / SIGN_EXTEND along with their lvalue aspect. */
554 if(!set_dest_addr_equiv_p (SET_DEST (x), SET_DEST (y), info))
555 return false;
556 /* Process source. */
557 return rtx_equiv_p (&SET_SRC (x), SET_SRC (y), 1, info);
558 case PRE_MODIFY:
559 /* Process destination. */
560 if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info))
561 return false;
562 /* Process source. */
563 return rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 1, info);
564 case POST_MODIFY:
566 rtx x_dest0, x_dest1;
568 /* Process destination. */
569 x_dest0 = XEXP (x, 0);
570 gcc_assert (REG_P (x_dest0));
571 if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info))
572 return false;
573 x_dest1 = XEXP (x, 0);
574 /* validate_change might have changed the destination. Put it back
575 so that we can do a proper match for its role as an input. */
576 XEXP (x, 0) = x_dest0;
577 if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info))
578 return false;
579 gcc_assert (x_dest1 == XEXP (x, 0));
580 /* Process source. */
581 return rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 1, info);
583 case CLOBBER:
584 gcc_assert (rvalue < 0);
585 return true;
586 /* Some special forms are also rvalues when they appear in lvalue
587 positions. However, we must ont try to match a register after we
588 have already altered it with validate_change, consider the rvalue
589 aspect while we process the lvalue. */
590 case STRICT_LOW_PART:
591 case ZERO_EXTEND:
592 case SIGN_EXTEND:
594 rtx x_inner, y_inner;
595 enum rtx_code code;
596 int change;
598 if (rvalue)
599 break;
600 x_inner = XEXP (x, 0);
601 y_inner = XEXP (y, 0);
602 if (GET_MODE (x_inner) != GET_MODE (y_inner))
603 return false;
604 code = GET_CODE (x_inner);
605 if (code != GET_CODE (y_inner))
606 return false;
607 /* The address of a MEM is an input that will be processed during
608 rvalue == -1 processing. */
609 if (code == SUBREG)
611 if (SUBREG_BYTE (x_inner) != SUBREG_BYTE (y_inner))
612 return false;
613 x = x_inner;
614 x_inner = SUBREG_REG (x_inner);
615 y_inner = SUBREG_REG (y_inner);
616 if (GET_MODE (x_inner) != GET_MODE (y_inner))
617 return false;
618 code = GET_CODE (x_inner);
619 if (code != GET_CODE (y_inner))
620 return false;
622 if (code == MEM)
623 return true;
624 gcc_assert (code == REG);
625 if (! rtx_equiv_p (&XEXP (x, 0), y_inner, rvalue, info))
626 return false;
627 if (REGNO (x_inner) == REGNO (y_inner))
629 change = assign_reg_reg_set (info->common_live, x_inner, 1);
630 info->cur.version++;
632 else
633 change = note_local_live (info, x_inner, y_inner, 1);
634 gcc_assert (change);
635 return true;
637 /* The AUTO_INC / POST_MODIFY / PRE_MODIFY sets are modelled to take
638 place during input processing, however, that is benign, since they
639 are paired with reads. */
640 case MEM:
641 return !rvalue || rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), rvalue, info);
642 case POST_INC: case POST_DEC: case PRE_INC: case PRE_DEC:
643 return (rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info)
644 && rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info));
645 case PARALLEL:
646 /* If this is a top-level PATTERN PARALLEL, we expect the caller to
647 have handled the SET_DESTs. A complex or vector PARALLEL can be
648 identified by having a mode. */
649 gcc_assert (rvalue < 0 || GET_MODE (x) != VOIDmode);
650 break;
651 case LABEL_REF:
652 /* Check special tablejump match case. */
653 if (XEXP (y, 0) == info->y_label)
654 return (XEXP (x, 0) == info->x_label);
655 /* We can't assume nonlocal labels have their following insns yet. */
656 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
657 return XEXP (x, 0) == XEXP (y, 0);
659 /* Two label-refs are equivalent if they point at labels
660 in the same position in the instruction stream. */
661 return (next_real_insn (XEXP (x, 0))
662 == next_real_insn (XEXP (y, 0)));
663 case SYMBOL_REF:
664 return XSTR (x, 0) == XSTR (y, 0);
665 /* Some rtl is guaranteed to be shared, or unique; If we didn't match
666 EQ equality above, they aren't the same. */
667 case CONST_INT:
668 case CODE_LABEL:
669 return false;
670 default:
671 break;
674 /* For commutative operations, the RTX match if the operands match in any
675 order. */
676 if (targetm.commutative_p (x, UNKNOWN))
677 return ((rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), rvalue, info)
678 && rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), rvalue, info))
679 || (rtx_equiv_p (&XEXP (x, 0), XEXP (y, 1), rvalue, info)
680 && rtx_equiv_p (&XEXP (x, 1), XEXP (y, 0), rvalue, info)));
682 /* Process subexpressions - this is similar to rtx_equal_p. */
683 length = GET_RTX_LENGTH (code);
684 format = GET_RTX_FORMAT (code);
686 for (i = 0; i < length; ++i)
688 switch (format[i])
690 case 'w':
691 if (XWINT (x, i) != XWINT (y, i))
692 return false;
693 break;
694 case 'n':
695 case 'i':
696 if (XINT (x, i) != XINT (y, i))
697 return false;
698 break;
699 case 'V':
700 case 'E':
701 if (XVECLEN (x, i) != XVECLEN (y, i))
702 return false;
703 if (XVEC (x, i) != 0)
705 int j;
706 for (j = 0; j < XVECLEN (x, i); ++j)
708 if (! rtx_equiv_p (&XVECEXP (x, i, j), XVECEXP (y, i, j),
709 rvalue, info))
710 return false;
713 break;
714 case 'e':
715 if (! rtx_equiv_p (&XEXP (x, i), XEXP (y, i), rvalue, info))
716 return false;
717 break;
718 case 'S':
719 case 's':
720 if ((XSTR (x, i) || XSTR (y, i))
721 && (! XSTR (x, i) || ! XSTR (y, i)
722 || strcmp (XSTR (x, i), XSTR (y, i))))
723 return false;
724 break;
725 case 'u':
726 /* These are just backpointers, so they don't matter. */
727 break;
728 case '0':
729 case 't':
730 break;
731 /* It is believed that rtx's at this level will never
732 contain anything but integers and other rtx's,
733 except for within LABEL_REFs and SYMBOL_REFs. */
734 default:
735 gcc_unreachable ();
738 return true;
741 /* Do only the rtx_equiv_p SET_DEST processing for SETs and CLOBBERs.
742 Since we are scanning backwards, this the first step in processing each
743 insn. Return true for success. */
744 static bool
745 set_dest_equiv_p (rtx x, rtx y, struct equiv_info *info)
747 if (!x || !y)
748 return x == y;
749 if (GET_CODE (x) != GET_CODE (y))
750 return false;
751 else if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
752 return rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info);
753 else if (GET_CODE (x) == PARALLEL)
755 int j;
757 if (XVECLEN (x, 0) != XVECLEN (y, 0))
758 return false;
759 for (j = 0; j < XVECLEN (x, 0); ++j)
761 rtx xe = XVECEXP (x, 0, j);
762 rtx ye = XVECEXP (y, 0, j);
764 if (GET_CODE (xe) != GET_CODE (ye))
765 return false;
766 if ((GET_CODE (xe) == SET || GET_CODE (xe) == CLOBBER)
767 && ! rtx_equiv_p (&XEXP (xe, 0), XEXP (ye, 0), 0, info))
768 return false;
771 return true;
774 /* Process MEMs in SET_DEST destinations. We must not process this together
775 with REG SET_DESTs, but must do it separately, lest when we see
776 [(set (reg:SI foo) (bar))
777 (set (mem:SI (reg:SI foo) (baz)))]
778 struct_equiv_block_eq could get confused to assume that (reg:SI foo)
779 is not live before this instruction. */
780 static bool
781 set_dest_addr_equiv_p (rtx x, rtx y, struct equiv_info *info)
783 enum rtx_code code = GET_CODE (x);
784 int length;
785 const char *format;
786 int i;
788 if (code != GET_CODE (y))
789 return false;
790 if (code == MEM)
791 return rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info);
793 /* Process subexpressions. */
794 length = GET_RTX_LENGTH (code);
795 format = GET_RTX_FORMAT (code);
797 for (i = 0; i < length; ++i)
799 switch (format[i])
801 case 'V':
802 case 'E':
803 if (XVECLEN (x, i) != XVECLEN (y, i))
804 return false;
805 if (XVEC (x, i) != 0)
807 int j;
808 for (j = 0; j < XVECLEN (x, i); ++j)
810 if (! set_dest_addr_equiv_p (XVECEXP (x, i, j),
811 XVECEXP (y, i, j), info))
812 return false;
815 break;
816 case 'e':
817 if (! set_dest_addr_equiv_p (XEXP (x, i), XEXP (y, i), info))
818 return false;
819 break;
820 default:
821 break;
824 return true;
827 /* Check if the set of REG_DEAD notes attached to I1 and I2 allows us to
828 go ahead with merging I1 and I2, which otherwise look fine.
829 Inputs / local registers for the inputs of I1 and I2 have already been
830 set up. */
831 static bool
832 death_notes_match_p (rtx i1 ATTRIBUTE_UNUSED, rtx i2 ATTRIBUTE_UNUSED,
833 struct equiv_info *info ATTRIBUTE_UNUSED)
835 #ifdef STACK_REGS
836 /* If cross_jump_death_matters is not 0, the insn's mode
837 indicates whether or not the insn contains any stack-like regs. */
839 if ((info->mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
841 /* If register stack conversion has already been done, then
842 death notes must also be compared before it is certain that
843 the two instruction streams match. */
845 rtx note;
846 HARD_REG_SET i1_regset, i2_regset;
848 CLEAR_HARD_REG_SET (i1_regset);
849 CLEAR_HARD_REG_SET (i2_regset);
851 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
852 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
853 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
855 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
856 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
858 unsigned regno = REGNO (XEXP (note, 0));
859 int i;
861 for (i = info->cur.local_count - 1; i >= 0; i--)
862 if (regno == REGNO (info->y_local[i]))
864 regno = REGNO (info->x_local[i]);
865 break;
867 SET_HARD_REG_BIT (i2_regset, regno);
870 if (!hard_reg_set_equal_p (i1_regset, i2_regset))
871 return false;
873 #endif
874 return true;
877 /* Return true if I1 and I2 are equivalent and thus can be crossjumped. */
879 bool
880 insns_match_p (rtx i1, rtx i2, struct equiv_info *info)
882 int rvalue_change_start;
883 struct struct_equiv_checkpoint before_rvalue_change;
885 /* Verify that I1 and I2 are equivalent. */
886 if (GET_CODE (i1) != GET_CODE (i2))
887 return false;
889 info->cur.x_start = i1;
890 info->cur.y_start = i2;
892 /* If this is a CALL_INSN, compare register usage information.
893 If we don't check this on stack register machines, the two
894 CALL_INSNs might be merged leaving reg-stack.c with mismatching
895 numbers of stack registers in the same basic block.
896 If we don't check this on machines with delay slots, a delay slot may
897 be filled that clobbers a parameter expected by the subroutine.
899 ??? We take the simple route for now and assume that if they're
900 equal, they were constructed identically. */
902 if (CALL_P (i1))
904 if (SIBLING_CALL_P (i1) != SIBLING_CALL_P (i2)
905 || ! set_dest_equiv_p (PATTERN (i1), PATTERN (i2), info)
906 || ! set_dest_equiv_p (CALL_INSN_FUNCTION_USAGE (i1),
907 CALL_INSN_FUNCTION_USAGE (i2), info)
908 || ! rtx_equiv_p (&CALL_INSN_FUNCTION_USAGE (i1),
909 CALL_INSN_FUNCTION_USAGE (i2), -1, info))
911 cancel_changes (0);
912 return false;
915 else if (INSN_P (i1))
917 if (! set_dest_equiv_p (PATTERN (i1), PATTERN (i2), info))
919 cancel_changes (0);
920 return false;
923 rvalue_change_start = num_validated_changes ();
924 struct_equiv_make_checkpoint (&before_rvalue_change, info);
925 /* Check death_notes_match_p *after* the inputs have been processed,
926 so that local inputs will already have been set up. */
927 if (! INSN_P (i1)
928 || (!bitmap_bit_p (info->equiv_used, info->cur.ninsns)
929 && rtx_equiv_p (&PATTERN (i1), PATTERN (i2), -1, info)
930 && death_notes_match_p (i1, i2, info)
931 && verify_changes (0)))
932 return true;
934 /* Do not do EQUIV substitution after reload. First, we're undoing the
935 work of reload_cse. Second, we may be undoing the work of the post-
936 reload splitting pass. */
937 /* ??? Possibly add a new phase switch variable that can be used by
938 targets to disallow the troublesome insns after splitting. */
939 if (!reload_completed)
941 rtx equiv1, equiv2;
943 cancel_changes (rvalue_change_start);
944 struct_equiv_restore_checkpoint (&before_rvalue_change, info);
946 /* The following code helps take care of G++ cleanups. */
947 equiv1 = find_reg_equal_equiv_note (i1);
948 equiv2 = find_reg_equal_equiv_note (i2);
949 if (equiv1 && equiv2
950 /* If the equivalences are not to a constant, they may
951 reference pseudos that no longer exist, so we can't
952 use them. */
953 && (! reload_completed
954 || (CONSTANT_P (XEXP (equiv1, 0))
955 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))))
957 rtx s1 = single_set (i1);
958 rtx s2 = single_set (i2);
960 if (s1 != 0 && s2 != 0)
962 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
963 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
964 /* Only inspecting the new SET_SRC is not good enough,
965 because there may also be bare USEs in a single_set
966 PARALLEL. */
967 if (rtx_equiv_p (&PATTERN (i1), PATTERN (i2), -1, info)
968 && death_notes_match_p (i1, i2, info)
969 && verify_changes (0))
971 /* Mark this insn so that we'll use the equivalence in
972 all subsequent passes. */
973 bitmap_set_bit (info->equiv_used, info->cur.ninsns);
974 return true;
980 cancel_changes (0);
981 return false;
984 /* Set up mode and register information in INFO. Return true for success. */
985 bool
986 struct_equiv_init (int mode, struct equiv_info *info)
988 if (!REG_SET_EQUAL_P (DF_LR_OUT (info->x_block),
989 DF_LR_OUT (info->y_block)))
991 #ifdef STACK_REGS
992 unsigned rn;
994 if (!(mode & CLEANUP_POST_REGSTACK))
995 return false;
996 /* After reg-stack. Remove bogus live info about stack regs. N.B.
997 these regs are not necessarily all dead - we swap random bogosity
998 against constant bogosity. However, clearing these bits at
999 least makes the regsets comparable. */
1000 for (rn = FIRST_STACK_REG; rn <= LAST_STACK_REG; rn++)
1002 CLEAR_REGNO_REG_SET (DF_LR_OUT (info->x_block), rn);
1003 CLEAR_REGNO_REG_SET (DF_LR_OUT (info->y_block), rn);
1005 if (!REG_SET_EQUAL_P (DF_LR_OUT (info->x_block),
1006 DF_LR_OUT (info->y_block)))
1007 #endif
1008 return false;
1010 info->mode = mode;
1011 if (mode & STRUCT_EQUIV_START)
1013 info->x_input = info->y_input = info->input_reg = NULL_RTX;
1014 info->equiv_used = ALLOC_REG_SET (&reg_obstack);
1015 info->check_input_conflict = false;
1017 info->had_input_conflict = false;
1018 info->cur.ninsns = info->cur.version = 0;
1019 info->cur.local_count = info->cur.input_count = 0;
1020 info->cur.x_start = info->cur.y_start = NULL_RTX;
1021 info->x_label = info->y_label = NULL_RTX;
1022 info->need_rerun = false;
1023 info->live_update = true;
1024 info->cur.input_valid = false;
1025 info->common_live = ALLOC_REG_SET (&reg_obstack);
1026 info->x_local_live = ALLOC_REG_SET (&reg_obstack);
1027 info->y_local_live = ALLOC_REG_SET (&reg_obstack);
1028 COPY_REG_SET (info->common_live, DF_LR_OUT (info->x_block));
1029 struct_equiv_make_checkpoint (&info->best_match, info);
1030 return true;
1033 /* Insns XI and YI have been matched. Merge memory attributes and reg
1034 notes. */
1035 static void
1036 struct_equiv_merge (rtx xi, rtx yi, struct equiv_info *info)
1038 rtx equiv1, equiv2;
1040 merge_memattrs (xi, yi);
1042 /* If the merged insns have different REG_EQUAL notes, then
1043 remove them. */
1044 info->live_update = false;
1045 equiv1 = find_reg_equal_equiv_note (xi);
1046 equiv2 = find_reg_equal_equiv_note (yi);
1047 if (equiv1 && !equiv2)
1048 remove_note (xi, equiv1);
1049 else if (!equiv1 && equiv2)
1050 remove_note (yi, equiv2);
1051 else if (equiv1 && equiv2
1052 && !rtx_equiv_p (&XEXP (equiv1, 0), XEXP (equiv2, 0),
1053 1, info))
1055 remove_note (xi, equiv1);
1056 remove_note (yi, equiv2);
1058 info->live_update = true;
1061 /* Return number of matched insns.
1062 This function must be called up to three times for a successful cross-jump
1063 match:
1064 first to find out which instructions do match. While trying to match
1065 another instruction that doesn't match, we destroy information in info
1066 about the actual inputs. So if there have been any before the last
1067 match attempt, we need to call this function again to recompute the
1068 actual inputs up to the actual start of the matching sequence.
1069 When we are then satisfied that the cross-jump is worthwhile, we
1070 call this function a third time to make any changes needed to make the
1071 sequences match: apply equivalences, remove non-matching
1072 notes and merge memory attributes. */
1074 struct_equiv_block_eq (int mode, struct equiv_info *info)
1076 rtx x_stop, y_stop;
1077 rtx xi, yi;
1078 int i;
1080 if (mode & STRUCT_EQUIV_START)
1082 x_stop = BB_HEAD (info->x_block);
1083 y_stop = BB_HEAD (info->y_block);
1084 if (!x_stop || !y_stop)
1085 return 0;
1087 else
1089 x_stop = info->cur.x_start;
1090 y_stop = info->cur.y_start;
1092 if (!struct_equiv_init (mode, info))
1093 gcc_unreachable ();
1095 /* Skip simple jumps at the end of the blocks. Complex jumps still
1096 need to be compared for equivalence, which we'll do below. */
1098 xi = BB_END (info->x_block);
1099 if (onlyjump_p (xi)
1100 || (returnjump_p (xi) && !side_effects_p (PATTERN (xi))))
1102 info->cur.x_start = xi;
1103 xi = PREV_INSN (xi);
1106 yi = BB_END (info->y_block);
1107 if (onlyjump_p (yi)
1108 || (returnjump_p (yi) && !side_effects_p (PATTERN (yi))))
1110 info->cur.y_start = yi;
1111 /* Count everything except for unconditional jump as insn. */
1112 /* ??? Is it right to count unconditional jumps with a clobber?
1113 Should we count conditional returns? */
1114 if (!simplejump_p (yi) && !returnjump_p (yi) && info->cur.x_start)
1115 info->cur.ninsns++;
1116 yi = PREV_INSN (yi);
1119 if (mode & STRUCT_EQUIV_MATCH_JUMPS)
1121 /* The caller is expected to have compared the jumps already, but we
1122 need to match them again to get any local registers and inputs. */
1123 gcc_assert (!info->cur.x_start == !info->cur.y_start);
1124 if (info->cur.x_start)
1126 if (any_condjump_p (info->cur.x_start)
1127 ? !condjump_equiv_p (info, false)
1128 : !insns_match_p (info->cur.x_start, info->cur.y_start, info))
1129 gcc_unreachable ();
1131 else if (any_condjump_p (xi) && any_condjump_p (yi))
1133 info->cur.x_start = xi;
1134 info->cur.y_start = yi;
1135 xi = PREV_INSN (xi);
1136 yi = PREV_INSN (yi);
1137 info->cur.ninsns++;
1138 if (!condjump_equiv_p (info, false))
1139 gcc_unreachable ();
1141 if (info->cur.x_start && info->mode & STRUCT_EQUIV_FINAL)
1142 struct_equiv_merge (info->cur.x_start, info->cur.y_start, info);
1145 struct_equiv_improve_checkpoint (&info->best_match, info);
1146 info->x_end = xi;
1147 info->y_end = yi;
1148 if (info->cur.x_start != x_stop)
1149 for (;;)
1151 /* Ignore notes. */
1152 while (!INSN_P (xi) && xi != x_stop)
1153 xi = PREV_INSN (xi);
1155 while (!INSN_P (yi) && yi != y_stop)
1156 yi = PREV_INSN (yi);
1158 if (!insns_match_p (xi, yi, info))
1159 break;
1160 if (INSN_P (xi))
1162 if (info->mode & STRUCT_EQUIV_FINAL)
1163 struct_equiv_merge (xi, yi, info);
1164 info->cur.ninsns++;
1165 struct_equiv_improve_checkpoint (&info->best_match, info);
1167 if (xi == x_stop || yi == y_stop)
1169 /* If we reached the start of at least one of the blocks, but
1170 best_match hasn't been advanced back to the first valid insn
1171 yet, represent the increased benefit of completing the block
1172 as an increased instruction count. */
1173 if (info->best_match.x_start != info->cur.x_start
1174 && (xi == BB_HEAD (info->x_block)
1175 || yi == BB_HEAD (info->y_block)))
1177 info->cur.ninsns++;
1178 struct_equiv_improve_checkpoint (&info->best_match, info);
1179 info->cur.ninsns--;
1180 if (info->best_match.ninsns > info->cur.ninsns)
1181 info->best_match.ninsns = info->cur.ninsns;
1183 break;
1185 xi = PREV_INSN (xi);
1186 yi = PREV_INSN (yi);
1189 /* If we failed to match an insn, but had some changes registered from
1190 trying to make the insns match, we need to cancel these changes now. */
1191 cancel_changes (0);
1192 /* Restore to best_match to get the sequence with the best known-so-far
1193 cost-benefit difference. */
1194 struct_equiv_restore_checkpoint (&info->best_match, info);
1196 /* Include preceding notes and labels in the cross-jump / if-conversion.
1197 One, this may bring us to the head of the blocks.
1198 Two, it keeps line number notes as matched as may be. */
1199 if (info->cur.ninsns)
1201 xi = info->cur.x_start;
1202 yi = info->cur.y_start;
1203 while (xi != x_stop && !INSN_P (PREV_INSN (xi)))
1204 xi = PREV_INSN (xi);
1206 while (yi != y_stop && !INSN_P (PREV_INSN (yi)))
1207 yi = PREV_INSN (yi);
1209 info->cur.x_start = xi;
1210 info->cur.y_start = yi;
1213 if (!info->cur.input_valid)
1214 info->x_input = info->y_input = info->input_reg = NULL_RTX;
1215 if (!info->need_rerun)
1217 find_dying_inputs (info);
1218 if (info->mode & STRUCT_EQUIV_FINAL)
1220 if (info->check_input_conflict && ! resolve_input_conflict (info))
1221 gcc_unreachable ();
1223 else
1225 bool input_conflict = info->had_input_conflict;
1227 if (!input_conflict
1228 && info->dying_inputs > 1
1229 && bitmap_intersect_p (info->x_local_live, info->y_local_live))
1231 regset_head clobbered_regs;
1233 INIT_REG_SET (&clobbered_regs);
1234 for (i = 0; i < info->cur.local_count; i++)
1236 if (assign_reg_reg_set (&clobbered_regs, info->y_local[i], 0))
1238 input_conflict = true;
1239 break;
1241 assign_reg_reg_set (&clobbered_regs, info->x_local[i], 1);
1243 CLEAR_REG_SET (&clobbered_regs);
1245 if (input_conflict && !info->check_input_conflict)
1246 info->need_rerun = true;
1247 info->check_input_conflict = input_conflict;
1251 if (info->mode & STRUCT_EQUIV_NEED_FULL_BLOCK
1252 && (info->cur.x_start != x_stop || info->cur.y_start != y_stop))
1253 return 0;
1254 return info->cur.ninsns;
1257 /* For each local register, set info->local_rvalue to true iff the register
1258 is a dying input. Store the total number of these in info->dying_inputs. */
1259 static void
1260 find_dying_inputs (struct equiv_info *info)
1262 int i;
1264 info->dying_inputs = 0;
1265 for (i = info->cur.local_count-1; i >=0; i--)
1267 rtx x = info->x_local[i];
1268 unsigned regno = REGNO (x);
1269 int nregs = (regno >= FIRST_PSEUDO_REGISTER
1270 ? 1 : hard_regno_nregs[regno][GET_MODE (x)]);
1272 for (info->local_rvalue[i] = false; nregs > 0; regno++, --nregs)
1273 if (REGNO_REG_SET_P (info->x_local_live, regno))
1275 info->dying_inputs++;
1276 info->local_rvalue[i] = true;
1277 break;
1282 /* For each local register that is a dying input, y_local[i] will be
1283 copied to x_local[i]. We'll do this in ascending order. Try to
1284 re-order the locals to avoid conflicts like r3 = r2; r4 = r3; .
1285 Return true iff the re-ordering is successful, or not necessary. */
1286 static bool
1287 resolve_input_conflict (struct equiv_info *info)
1289 int i, j, end;
1290 int nswaps = 0;
1291 rtx save_x_local[STRUCT_EQUIV_MAX_LOCAL];
1292 rtx save_y_local[STRUCT_EQUIV_MAX_LOCAL];
1294 find_dying_inputs (info);
1295 if (info->dying_inputs <= 1)
1296 return true;
1297 memcpy (save_x_local, info->x_local, sizeof save_x_local);
1298 memcpy (save_y_local, info->y_local, sizeof save_y_local);
1299 end = info->cur.local_count - 1;
1300 for (i = 0; i <= end; i++)
1302 /* Cycle detection with regsets is expensive, so we just check that
1303 we don't exceed the maximum number of swaps needed in the acyclic
1304 case. */
1305 int max_swaps = end - i;
1307 /* Check if x_local[i] will be clobbered. */
1308 if (!info->local_rvalue[i])
1309 continue;
1310 /* Check if any later value needs to be copied earlier. */
1311 for (j = i + 1; j <= end; j++)
1313 rtx tmp;
1315 if (!info->local_rvalue[j])
1316 continue;
1317 if (!reg_overlap_mentioned_p (info->x_local[i], info->y_local[j]))
1318 continue;
1319 if (--max_swaps < 0)
1321 memcpy (info->x_local, save_x_local, sizeof save_x_local);
1322 memcpy (info->y_local, save_y_local, sizeof save_y_local);
1323 return false;
1325 nswaps++;
1326 tmp = info->x_local[i];
1327 info->x_local[i] = info->x_local[j];
1328 info->x_local[j] = tmp;
1329 tmp = info->y_local[i];
1330 info->y_local[i] = info->y_local[j];
1331 info->y_local[j] = tmp;
1332 j = i;
1335 info->had_input_conflict = true;
1336 if (dump_file && nswaps)
1337 fprintf (dump_file, "Resolved input conflict, %d %s.\n",
1338 nswaps, nswaps == 1 ? "swap" : "swaps");
1339 return true;