2005-12-14 J"orn Rennecke <joern.rennecke@st.com>
[official-gcc.git] / gcc / struct-equiv.c
blob3e6ba5deaba950797c728533e50e308dcfcada69
1 /* Control flow optimization code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /* Try to match two basic blocks - or their ends - for structural equivalence.
23 We scan the blocks from their ends backwards, and expect that insns are
24 identical, except for certain cases involving registers. A mismatch
25 We scan the blocks from their ends backwards, hoping to find a match, I.e.
26 insns are identical, except for certain cases involving registers. A
27 mismatch between register number RX (used in block X) and RY (used in the
28 same way in block Y) can be handled in one of the following cases:
29 1. RX and RY are local to their respective blocks; they are set there and
30 die there. If so, they can effectively be ignored.
31 2. RX and RY die in their blocks, but live at the start. If any path
32 gets redirected through X instead of Y, the caller must emit
33 compensation code to move RY to RX. If there are overlapping inputs,
34 the function resolve_input_conflict ensures that this can be done.
35 Information about these registers are tracked in the X_LOCAL, Y_LOCAL,
36 LOCAL_COUNT and LOCAL_RVALUE fields.
37 3. RX and RY live throughout their blocks, including the start and the end.
38 Either RX and RY must be identical, or we have to replace all uses in
39 block X with a new pseudo, which is stored in the INPUT_REG field. The
40 caller can then use block X instead of block Y by copying RY to the new
41 pseudo.
43 The main entry point to this file is struct_equiv_block_eq. This function
44 uses a struct equiv_info to accept some of its inputs, to keep track of its
45 internal state, to pass down to its helper functions, and to communicate
46 some of the results back to the caller.
48 Most scans will result in a failure to match a sufficient number of insns
49 to make any optimization worth while, therefore the process is geared more
50 to quick scanning rather than the ability to exactly backtrack when we
51 find a mismatch. The information gathered is still meaningful to make a
52 preliminary decision if we want to do an optimization, we might only
53 slightly overestimate the number of matchable insns, and underestimate
54 the number of inputs an miss an input conflict. Sufficient information
55 is gathered so that when we make another pass, we won't have to backtrack
56 at the same point.
57 Another issue is that information in memory atttributes and/or REG_NOTES
58 might have to be merged or discarded to make a valid match. We don't want
59 to discard such information when we are not certain that we want to merge
60 the two (partial) blocks.
61 For these reasons, struct_equiv_block_eq has to be called first with the
62 STRUCT_EQUIV_START bit set in the mode parameter. This will calculate the
63 number of matched insns and the number and types of inputs. If the
64 need_rerun field is set, the results are only tentative, and the caller
65 has to call again with STRUCT_EQUIV_RERUN till need_rerun is false in
66 order to get a reliable match.
67 To install the changes necessary for the match, the function has to be
68 called again with STRUCT_EQUIV_FINAL.
70 While scanning an insn, we process first all the SET_DESTs, then the
71 SET_SRCes, then the REG_NOTES, in order to keep the register liveness
72 information consistent.
73 If we were to mix up the order for sources / destinations in an insn where
74 a source is also a destination, we'd end up being mistaken to think that
75 the register is not live in the preceding insn. */
77 #include "config.h"
78 #include "system.h"
79 #include "coretypes.h"
80 #include "tm.h"
81 #include "rtl.h"
82 #include "regs.h"
83 #include "output.h"
84 #include "insn-config.h"
85 #include "flags.h"
86 #include "recog.h"
87 #include "tm_p.h"
88 #include "target.h"
89 #include "emit-rtl.h"
90 #include "reload.h"
92 static void merge_memattrs (rtx, rtx);
93 static bool set_dest_equiv_p (rtx x, rtx y, struct equiv_info *info);
94 static bool set_dest_addr_equiv_p (rtx x, rtx y, struct equiv_info *info);
95 static void find_dying_inputs (struct equiv_info *info);
96 static bool resolve_input_conflict (struct equiv_info *info);
98 /* After reload, some moves, as indicated by SECONDARY_RELOAD_CLASS and
99 SECONDARY_MEMORY_NEEDED, cannot be done directly. For our purposes, we
100 consider them impossible to generate after reload (even though some
101 might be synthesized when you throw enough code at them).
102 Since we don't know while procesing a cross-jump if a local register
103 that is currently live will eventually be live and thus be an input,
104 we keep track of potential inputs that would require an impossible move
105 by using a prohibitively high cost for them.
106 This number, multiplied with the larger of STRUCT_EQUIV_MAX_LOCAL and
107 FIRST_PSEUDO_REGISTER, must fit in the input_cost field of
108 struct equiv_info. */
109 #define IMPOSSIBLE_MOVE_FACTOR 20000
113 /* Removes the memory attributes of MEM expression
114 if they are not equal. */
116 void
117 merge_memattrs (rtx x, rtx y)
119 int i;
120 int j;
121 enum rtx_code code;
122 const char *fmt;
124 if (x == y)
125 return;
126 if (x == 0 || y == 0)
127 return;
129 code = GET_CODE (x);
131 if (code != GET_CODE (y))
132 return;
134 if (GET_MODE (x) != GET_MODE (y))
135 return;
137 if (code == MEM && MEM_ATTRS (x) != MEM_ATTRS (y))
139 if (! MEM_ATTRS (x))
140 MEM_ATTRS (y) = 0;
141 else if (! MEM_ATTRS (y))
142 MEM_ATTRS (x) = 0;
143 else
145 rtx mem_size;
147 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
149 set_mem_alias_set (x, 0);
150 set_mem_alias_set (y, 0);
153 if (! mem_expr_equal_p (MEM_EXPR (x), MEM_EXPR (y)))
155 set_mem_expr (x, 0);
156 set_mem_expr (y, 0);
157 set_mem_offset (x, 0);
158 set_mem_offset (y, 0);
160 else if (MEM_OFFSET (x) != MEM_OFFSET (y))
162 set_mem_offset (x, 0);
163 set_mem_offset (y, 0);
166 if (!MEM_SIZE (x))
167 mem_size = NULL_RTX;
168 else if (!MEM_SIZE (y))
169 mem_size = NULL_RTX;
170 else
171 mem_size = GEN_INT (MAX (INTVAL (MEM_SIZE (x)),
172 INTVAL (MEM_SIZE (y))));
173 set_mem_size (x, mem_size);
174 set_mem_size (y, mem_size);
176 set_mem_align (x, MIN (MEM_ALIGN (x), MEM_ALIGN (y)));
177 set_mem_align (y, MEM_ALIGN (x));
181 fmt = GET_RTX_FORMAT (code);
182 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
184 switch (fmt[i])
186 case 'E':
187 /* Two vectors must have the same length. */
188 if (XVECLEN (x, i) != XVECLEN (y, i))
189 return;
191 for (j = 0; j < XVECLEN (x, i); j++)
192 merge_memattrs (XVECEXP (x, i, j), XVECEXP (y, i, j));
194 break;
196 case 'e':
197 merge_memattrs (XEXP (x, i), XEXP (y, i));
200 return;
203 /* In SET, assign the bit for the register number of REG the value VALUE.
204 If REG is a hard register, do so for all its consituent registers.
205 Return the number of registers that have become included (as a positive
206 number) or excluded (as a negative number). */
207 static int
208 assign_reg_reg_set (regset set, rtx reg, int value)
210 unsigned regno = REGNO (reg);
211 int nregs, i, old;
213 if (regno >= FIRST_PSEUDO_REGISTER)
215 gcc_assert (!reload_completed);
216 nregs = 1;
218 else
219 nregs = hard_regno_nregs[regno][GET_MODE (reg)];
220 for (old = 0, i = nregs; --i >= 0; regno++)
222 if ((value != 0) == REGNO_REG_SET_P (set, regno))
223 continue;
224 if (value)
225 old++, SET_REGNO_REG_SET (set, regno);
226 else
227 old--, CLEAR_REGNO_REG_SET (set, regno);
229 return old;
232 /* Record state about current inputs / local registers / liveness
233 in *P. */
234 static inline void
235 struct_equiv_make_checkpoint (struct struct_equiv_checkpoint *p,
236 struct equiv_info *info)
238 *p = info->cur;
241 /* Call struct_equiv_make_checkpoint (P, INFO) if the current partial block
242 is suitable to split off - i.e. there is no dangling cc0 user - and
243 if the current cost of the common instructions, minus the cost for
244 setting up the inputs, is higher than what has been recorded before
245 in CHECKPOINT[N]. Also, if we do so, confirm or cancel any pending
246 changes. */
247 static void
248 struct_equiv_improve_checkpoint (struct struct_equiv_checkpoint *p,
249 struct equiv_info *info)
251 #ifdef HAVE_cc0
252 if (reg_mentioned_p (cc0_rtx, info->x_start) && !sets_cc0_p (info->x_start))
253 return;
254 #endif
255 if (info->cur.input_count >= IMPOSSIBLE_MOVE_FACTOR)
256 return;
257 if (info->input_cost >= 0
258 ? (COSTS_N_INSNS(info->cur.ninsns - p->ninsns)
259 > info->input_cost * (info->cur.input_count - p->input_count))
260 : info->cur.ninsns > p->ninsns && !info->cur.input_count)
262 if (info->check_input_conflict && ! resolve_input_conflict (info))
263 return;
264 /* We have a profitable set of changes. If this is the final pass,
265 commit them now. Otherwise, we don't know yet if we can make any
266 change, so put the old code back for now. */
267 if (info->mode & STRUCT_EQUIV_FINAL)
268 confirm_change_group ();
269 else
270 cancel_changes (0);
271 struct_equiv_make_checkpoint (p, info);
275 /* Restore state about current inputs / local registers / liveness
276 from P. */
277 static void
278 struct_equiv_restore_checkpoint (struct struct_equiv_checkpoint *p,
279 struct equiv_info *info)
281 info->cur.ninsns = p->ninsns;
282 info->cur.x_start = p->x_start;
283 info->cur.y_start = p->y_start;
284 info->cur.input_count = p->input_count;
285 info->cur.input_valid = p->input_valid;
286 while (info->cur.local_count > p->local_count)
288 info->cur.local_count--;
289 info->cur.version--;
290 if (REGNO_REG_SET_P (info->x_local_live,
291 REGNO (info->x_local[info->cur.local_count])))
293 assign_reg_reg_set (info->x_local_live,
294 info->x_local[info->cur.local_count], 0);
295 assign_reg_reg_set (info->y_local_live,
296 info->y_local[info->cur.local_count], 0);
297 info->cur.version--;
300 if (info->cur.version != p->version)
301 info->need_rerun = true;
305 /* Update register liveness to reflect that X is now life (if rvalue is
306 nonzero) or dead (if rvalue is zero) in INFO->x_block, and likewise Y
307 in INFO->y_block. Return the number of registers the liveness of which
308 changed in each block (as a negative number if registers became dead). */
309 static int
310 note_local_live (struct equiv_info *info, rtx x, rtx y, int rvalue)
312 int x_change = assign_reg_reg_set (info->x_local_live, x, rvalue);
313 int y_change = assign_reg_reg_set (info->y_local_live, y, rvalue);
315 gcc_assert (x_change == y_change);
316 if (y_change)
318 if (reload_completed)
320 unsigned x_regno ATTRIBUTE_UNUSED = REGNO (x);
321 unsigned y_regno = REGNO (y);
322 enum machine_mode x_mode = GET_MODE (x);
324 if (secondary_reload_class (0, REGNO_REG_CLASS (y_regno), x_mode, x)
325 != NO_REGS
326 #ifdef SECONDARY_MEMORY_NEEDED
327 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (y_regno),
328 REGNO_REG_CLASS (x_regno), x_mode)
329 #endif
331 y_change *= IMPOSSIBLE_MOVE_FACTOR;
333 info->cur.input_count += y_change;
334 info->cur.version++;
336 return x_change;
339 /* Check if *XP is equivalent to Y. Until an an unreconcilable difference is
340 found, use in-group changes with validate_change on *XP to make register
341 assignments agree. It is the (not necessarily direct) callers
342 responsibility to verify / confirm / cancel these changes, as appropriate.
343 RVALUE indicates if the processed piece of rtl is used as a destination, in
344 which case we can't have different registers being an input. Returns
345 nonzero if the two blocks have been identified as equivalent, zero otherwise.
346 RVALUE == 0: destination
347 RVALUE == 1: source
348 RVALUE == -1: source, ignore SET_DEST of SET / clobber. */
349 bool
350 rtx_equiv_p (rtx *xp, rtx y, int rvalue, struct equiv_info *info)
352 rtx x = *xp;
353 enum rtx_code code;
354 int length;
355 const char *format;
356 int i;
358 if (!y || !x)
359 return x == y;
360 code = GET_CODE (y);
361 if (code != REG && x == y)
362 return true;
363 if (GET_CODE (x) != code
364 || GET_MODE (x) != GET_MODE (y))
365 return false;
367 /* ??? could extend to allow CONST_INT inputs. */
368 switch (code)
370 case REG:
372 unsigned x_regno = REGNO (x);
373 unsigned y_regno = REGNO (y);
374 int x_common_live, y_common_live;
376 if (reload_completed
377 && (x_regno >= FIRST_PSEUDO_REGISTER
378 || y_regno >= FIRST_PSEUDO_REGISTER))
380 /* We should only see this in REG_NOTEs. */
381 gcc_assert (!info->live_update);
382 /* Returning false will cause us to remove the notes. */
383 return false;
385 #ifdef STACK_REGS
386 /* After reg-stack, can only accept literal matches of stack regs. */
387 if (info->mode & CLEANUP_POST_REGSTACK
388 && (IN_RANGE (x_regno, FIRST_STACK_REG, LAST_STACK_REG)
389 || IN_RANGE (y_regno, FIRST_STACK_REG, LAST_STACK_REG)))
390 return x_regno == y_regno;
391 #endif
393 /* If the register is a locally live one in one block, the
394 corresponding one must be locally live in the other, too, and
395 match of identical regnos doesn't apply. */
396 if (REGNO_REG_SET_P (info->x_local_live, x_regno))
398 if (!REGNO_REG_SET_P (info->y_local_live, y_regno))
399 return false;
401 else if (REGNO_REG_SET_P (info->y_local_live, y_regno))
402 return false;
403 else if (x_regno == y_regno)
405 if (!rvalue && info->cur.input_valid
406 && (reg_overlap_mentioned_p (x, info->x_input)
407 || reg_overlap_mentioned_p (x, info->y_input)))
408 return false;
410 /* Update liveness information. */
411 if (info->live_update
412 && assign_reg_reg_set (info->common_live, x, rvalue))
413 info->cur.version++;
415 return true;
418 x_common_live = REGNO_REG_SET_P (info->common_live, x_regno);
419 y_common_live = REGNO_REG_SET_P (info->common_live, y_regno);
420 if (x_common_live != y_common_live)
421 return false;
422 else if (x_common_live)
424 if (! rvalue || info->input_cost < 0 || no_new_pseudos)
425 return false;
426 /* If info->live_update is not set, we are processing notes.
427 We then allow a match with x_input / y_input found in a
428 previous pass. */
429 if (info->live_update && !info->cur.input_valid)
431 info->cur.input_valid = true;
432 info->x_input = x;
433 info->y_input = y;
434 info->cur.input_count += optimize_size ? 2 : 1;
435 if (info->input_reg
436 && GET_MODE (info->input_reg) != GET_MODE (info->x_input))
437 info->input_reg = NULL_RTX;
438 if (!info->input_reg)
439 info->input_reg = gen_reg_rtx (GET_MODE (info->x_input));
441 else if ((info->live_update
442 ? ! info->cur.input_valid : ! info->x_input)
443 || ! rtx_equal_p (x, info->x_input)
444 || ! rtx_equal_p (y, info->y_input))
445 return false;
446 validate_change (info->cur.x_start, xp, info->input_reg, 1);
448 else
450 int x_nregs = (x_regno >= FIRST_PSEUDO_REGISTER
451 ? 1 : hard_regno_nregs[x_regno][GET_MODE (x)]);
452 int y_nregs = (y_regno >= FIRST_PSEUDO_REGISTER
453 ? 1 : hard_regno_nregs[y_regno][GET_MODE (y)]);
454 int size = GET_MODE_SIZE (GET_MODE (x));
455 enum machine_mode x_mode = GET_MODE (x);
456 unsigned x_regno_i, y_regno_i;
457 int x_nregs_i, y_nregs_i, size_i;
458 int local_count = info->cur.local_count;
460 /* This might be a register local to each block. See if we have
461 it already registered. */
462 for (i = local_count - 1; i >= 0; i--)
464 x_regno_i = REGNO (info->x_local[i]);
465 x_nregs_i = (x_regno_i >= FIRST_PSEUDO_REGISTER
466 ? 1 : hard_regno_nregs[x_regno_i][GET_MODE (x)]);
467 y_regno_i = REGNO (info->y_local[i]);
468 y_nregs_i = (y_regno_i >= FIRST_PSEUDO_REGISTER
469 ? 1 : hard_regno_nregs[y_regno_i][GET_MODE (y)]);
470 size_i = GET_MODE_SIZE (GET_MODE (info->x_local[i]));
472 /* If we have a new pair of registers that is wider than an
473 old pair and enclosing it with matching offsets,
474 remove the old pair. If we find a matching, wider, old
475 pair, use the old one. If the width is the same, use the
476 old one if the modes match, but the new if they don't.
477 We don't want to get too fancy with subreg_regno_offset
478 here, so we just test two straightforwad cases each. */
479 if (info->live_update
480 && (x_mode != GET_MODE (info->x_local[i])
481 ? size >= size_i : size > size_i))
483 /* If the new pair is fully enclosing a matching
484 existing pair, remove the old one. N.B. because
485 we are removing one entry here, the check below
486 if we have space for a new entry will succeed. */
487 if ((x_regno <= x_regno_i
488 && x_regno + x_nregs >= x_regno_i + x_nregs_i
489 && x_nregs == y_nregs && x_nregs_i == y_nregs_i
490 && x_regno - x_regno_i == y_regno - y_regno_i)
491 || (x_regno == x_regno_i && y_regno == y_regno_i
492 && x_nregs >= x_nregs_i && y_nregs >= y_nregs_i))
494 info->cur.local_count = --local_count;
495 info->x_local[i] = info->x_local[local_count];
496 info->y_local[i] = info->y_local[local_count];
497 continue;
500 else
503 /* If the new pair is fully enclosed within a matching
504 existing pair, succeed. */
505 if (x_regno >= x_regno_i
506 && x_regno + x_nregs <= x_regno_i + x_nregs_i
507 && x_nregs == y_nregs && x_nregs_i == y_nregs_i
508 && x_regno - x_regno_i == y_regno - y_regno_i)
509 break;
510 if (x_regno == x_regno_i && y_regno == y_regno_i
511 && x_nregs <= x_nregs_i && y_nregs <= y_nregs_i)
512 break;
515 /* Any other overlap causes a match failure. */
516 if (x_regno + x_nregs > x_regno_i
517 && x_regno_i + x_nregs_i > x_regno)
518 return false;
519 if (y_regno + y_nregs > y_regno_i
520 && y_regno_i + y_nregs_i > y_regno)
521 return false;
523 if (i < 0)
525 /* Not found. Create a new entry if possible. */
526 if (!info->live_update
527 || info->cur.local_count >= STRUCT_EQUIV_MAX_LOCAL)
528 return false;
529 info->x_local[info->cur.local_count] = x;
530 info->y_local[info->cur.local_count] = y;
531 info->cur.local_count++;
532 info->cur.version++;
534 note_local_live (info, x, y, rvalue);
536 return true;
538 case SET:
539 gcc_assert (rvalue < 0);
540 /* Ignore the destinations role as a destination. Still, we have
541 to consider input registers embedded in the addresses of a MEM.
542 N.B., we process the rvalue aspect of STRICT_LOW_PART /
543 ZERO_EXTEND / SIGN_EXTEND along with their lvalue aspect. */
544 if(!set_dest_addr_equiv_p (SET_DEST (x), SET_DEST (y), info))
545 return false;
546 /* Process source. */
547 return rtx_equiv_p (&SET_SRC (x), SET_SRC (y), 1, info);
548 case PRE_MODIFY:
549 /* Process destination. */
550 if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info))
551 return false;
552 /* Process source. */
553 return rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 1, info);
554 case POST_MODIFY:
556 rtx x_dest0, x_dest1;
558 /* Process destination. */
559 x_dest0 = XEXP (x, 0);
560 gcc_assert (REG_P (x_dest0));
561 if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info))
562 return false;
563 x_dest1 = XEXP (x, 0);
564 /* validate_change might have changed the destination. Put it back
565 so that we can do a valid source match. */
566 XEXP (x, 0) = x_dest0;
567 if (!rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 0, info))
568 return false;
569 gcc_assert (x_dest1 == XEXP (x, 0));
570 /* Process source. */
571 return rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 1, info);
572 if (!rtx_equiv_p (&XEXP(x, 0), XEXP (y, 0), 0, info))
573 return false;
574 /* Process both subexpressions as inputs. */
575 break;
577 case CLOBBER:
578 gcc_assert (rvalue < 0);
579 return true;
580 /* Some special forms are also rvalues when they appear in lvalue
581 positions. However, we must ont try to match a register after we
582 have already altered it with validate_change, consider the rvalue
583 aspect while we process the lvalue. */
584 case STRICT_LOW_PART:
585 case ZERO_EXTEND:
586 case SIGN_EXTEND:
588 rtx x_inner, y_inner;
589 enum rtx_code code;
590 int change;
592 if (rvalue)
593 break;
594 x_inner = XEXP (x, 0);
595 y_inner = XEXP (y, 0);
596 if (GET_MODE (x_inner) != GET_MODE (y_inner))
597 return false;
598 code = GET_CODE (x_inner);
599 if (code != GET_CODE (y_inner))
600 return false;
601 /* The address of a MEM is an input that will be processed during
602 rvalue == -1 processing. */
603 if (code == SUBREG)
605 if (SUBREG_BYTE (x_inner) != SUBREG_BYTE (y_inner))
606 return false;
607 x = x_inner;
608 x_inner = SUBREG_REG (x_inner);
609 y_inner = SUBREG_REG (y_inner);
610 if (GET_MODE (x_inner) != GET_MODE (y_inner))
611 return false;
612 code = GET_CODE (x_inner);
613 if (code != GET_CODE (y_inner))
614 return false;
616 if (code == MEM)
617 return true;
618 gcc_assert (code == REG);
619 if (! rtx_equiv_p (&XEXP (x, 0), y_inner, rvalue, info))
620 return false;
621 if (REGNO (x_inner) == REGNO (y_inner))
623 change = assign_reg_reg_set (info->common_live, x_inner, 1);
624 info->cur.version++;
626 else
627 change = note_local_live (info, x_inner, y_inner, 1);
628 gcc_assert (change);
629 return true;
631 /* The AUTO_INC / POST_MODIFY / PRE_MODIFY sets are modelled to take
632 place during input processing, however, that is benign, since they
633 are paired with reads. */
634 case MEM:
635 return !rvalue || rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), rvalue, info);
636 case POST_INC: case POST_DEC: case PRE_INC: case PRE_DEC:
637 return (rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info)
638 && rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info));
639 case PARALLEL:
640 gcc_assert (rvalue < 0);
641 break;
642 case LABEL_REF:
643 /* Check special tablejump match case. */
644 if (XEXP (y, 0) == info->y_label)
645 return (XEXP (x, 0) == info->x_label);
646 /* We can't assume nonlocal labels have their following insns yet. */
647 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
648 return XEXP (x, 0) == XEXP (y, 0);
650 /* Two label-refs are equivalent if they point at labels
651 in the same position in the instruction stream. */
652 return (next_real_insn (XEXP (x, 0))
653 == next_real_insn (XEXP (y, 0)));
654 case SYMBOL_REF:
655 return XSTR (x, 0) == XSTR (y, 0);
656 /* Some rtl is guaranteed to be shared, or unique; If we didn't match
657 EQ equality above, they aren't the same. */
658 case CONST_INT:
659 case CODE_LABEL:
660 return false;
661 default:
662 break;
665 /* For commutative operations, the RTX match if the operands match in any
666 order. */
667 if (targetm.commutative_p (x, UNKNOWN))
668 return ((rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), rvalue, info)
669 && rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), rvalue, info))
670 || (rtx_equiv_p (&XEXP (x, 0), XEXP (y, 1), rvalue, info)
671 && rtx_equiv_p (&XEXP (x, 1), XEXP (y, 0), rvalue, info)));
673 /* Process subexpressions - this is similar to rtx_equal_p. */
674 length = GET_RTX_LENGTH (code);
675 format = GET_RTX_FORMAT (code);
677 for (i = 0; i < length; ++i)
679 switch (format[i])
681 case 'w':
682 if (XWINT (x, i) != XWINT (y, i))
683 return false;
684 break;
685 case 'n':
686 case 'i':
687 if (XINT (x, i) != XINT (y, i))
688 return false;
689 break;
690 case 'V':
691 case 'E':
692 if (XVECLEN (x, i) != XVECLEN (y, i))
693 return false;
694 if (XVEC (x, i) != 0)
696 int j;
697 for (j = 0; j < XVECLEN (x, i); ++j)
699 if (! rtx_equiv_p (&XVECEXP (x, i, j), XVECEXP (y, i, j),
700 rvalue, info))
701 return false;
704 break;
705 case 'e':
706 if (! rtx_equiv_p (&XEXP (x, i), XEXP (y, i), rvalue, info))
707 return false;
708 break;
709 case 'S':
710 case 's':
711 if ((XSTR (x, i) || XSTR (y, i))
712 && (! XSTR (x, i) || ! XSTR (y, i)
713 || strcmp (XSTR (x, i), XSTR (y, i))))
714 return false;
715 break;
716 case 'u':
717 /* These are just backpointers, so they don't matter. */
718 break;
719 case '0':
720 case 't':
721 break;
722 /* It is believed that rtx's at this level will never
723 contain anything but integers and other rtx's,
724 except for within LABEL_REFs and SYMBOL_REFs. */
725 default:
726 gcc_unreachable ();
729 return true;
732 /* Do only the rtx_equiv_p SET_DEST processing for SETs and CLOBBERs.
733 Since we are scanning backwards, this the first step in processing each
734 insn. Return true for success. */
735 static bool
736 set_dest_equiv_p (rtx x, rtx y, struct equiv_info *info)
738 if (!x || !y)
739 return x == y;
740 if (GET_CODE (x) != GET_CODE (y))
741 return false;
742 else if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
743 return rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info);
744 else if (GET_CODE (x) == PARALLEL)
746 int j;
748 if (XVECLEN (x, 0) != XVECLEN (y, 0))
749 return false;
750 for (j = 0; j < XVECLEN (x, 0); ++j)
752 rtx xe = XVECEXP (x, 0, j);
753 rtx ye = XVECEXP (y, 0, j);
755 if (GET_CODE (xe) != GET_CODE (ye))
756 return false;
757 if ((GET_CODE (xe) == SET || GET_CODE (xe) == CLOBBER)
758 && ! rtx_equiv_p (&XEXP (xe, 0), XEXP (ye, 0), 0, info))
759 return false;
762 return true;
765 /* Process MEMs in SET_DEST destinations. We must not process this together
766 with REG SET_DESTs, but must do it separately, lest when we see
767 [(set (reg:SI foo) (bar))
768 (set (mem:SI (reg:SI foo) (baz)))]
769 struct_equiv_block_eq could get confused to assume that (reg:SI foo)
770 is not live before this instruction. */
771 static bool
772 set_dest_addr_equiv_p (rtx x, rtx y, struct equiv_info *info)
774 enum rtx_code code = GET_CODE (x);
775 int length;
776 const char *format;
777 int i;
779 if (code != GET_CODE (y))
780 return false;
781 if (code == MEM)
782 return rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info);
784 /* Process subexpressions. */
785 length = GET_RTX_LENGTH (code);
786 format = GET_RTX_FORMAT (code);
788 for (i = 0; i < length; ++i)
790 switch (format[i])
792 case 'V':
793 case 'E':
794 if (XVECLEN (x, i) != XVECLEN (y, i))
795 return false;
796 if (XVEC (x, i) != 0)
798 int j;
799 for (j = 0; j < XVECLEN (x, i); ++j)
801 if (! set_dest_addr_equiv_p (XVECEXP (x, i, j),
802 XVECEXP (y, i, j), info))
803 return false;
806 break;
807 case 'e':
808 if (! set_dest_addr_equiv_p (XEXP (x, i), XEXP (y, i), info))
809 return false;
810 break;
811 default:
812 break;
815 return true;
818 /* Check if the set of REG_DEAD notes attached to I1 and I2 allows us to
819 go ahead with merging I1 and I2, which otherwise look fine.
820 Inputs / local registers for the inputs of I1 and I2 have already been
821 set up. */
822 static bool
823 death_notes_match_p (rtx i1 ATTRIBUTE_UNUSED, rtx i2 ATTRIBUTE_UNUSED,
824 struct equiv_info *info ATTRIBUTE_UNUSED)
826 #ifdef STACK_REGS
827 /* If cross_jump_death_matters is not 0, the insn's mode
828 indicates whether or not the insn contains any stack-like regs. */
830 if ((info->mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
832 /* If register stack conversion has already been done, then
833 death notes must also be compared before it is certain that
834 the two instruction streams match. */
836 rtx note;
837 HARD_REG_SET i1_regset, i2_regset;
839 CLEAR_HARD_REG_SET (i1_regset);
840 CLEAR_HARD_REG_SET (i2_regset);
842 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
843 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
844 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
846 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
847 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
849 unsigned regno = REGNO (XEXP (note, 0));
850 int i;
852 for (i = info->cur.local_count - 1; i >= 0; i--)
853 if (regno == REGNO (info->y_local[i]))
855 regno = REGNO (info->x_local[i]);
856 break;
858 SET_HARD_REG_BIT (i2_regset, regno);
861 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
863 return false;
865 done:
868 #endif
869 return true;
872 /* Return true if I1 and I2 are equivalent and thus can be crossjumped. */
874 bool
875 insns_match_p (rtx i1, rtx i2, struct equiv_info *info)
877 int rvalue_change_start;
878 struct struct_equiv_checkpoint before_rvalue_change;
880 /* Verify that I1 and I2 are equivalent. */
881 if (GET_CODE (i1) != GET_CODE (i2))
882 return false;
884 info->cur.x_start = i1;
885 info->cur.y_start = i2;
887 /* If this is a CALL_INSN, compare register usage information.
888 If we don't check this on stack register machines, the two
889 CALL_INSNs might be merged leaving reg-stack.c with mismatching
890 numbers of stack registers in the same basic block.
891 If we don't check this on machines with delay slots, a delay slot may
892 be filled that clobbers a parameter expected by the subroutine.
894 ??? We take the simple route for now and assume that if they're
895 equal, they were constructed identically. */
897 if (CALL_P (i1))
899 if (SIBLING_CALL_P (i1) != SIBLING_CALL_P (i2)
900 || ! set_dest_equiv_p (PATTERN (i1), PATTERN (i2), info)
901 || ! set_dest_equiv_p (CALL_INSN_FUNCTION_USAGE (i1),
902 CALL_INSN_FUNCTION_USAGE (i2), info)
903 || ! rtx_equiv_p (&CALL_INSN_FUNCTION_USAGE (i1),
904 CALL_INSN_FUNCTION_USAGE (i2), -1, info))
906 cancel_changes (0);
907 return false;
910 else if (INSN_P (i1))
912 if (! set_dest_equiv_p (PATTERN (i1), PATTERN (i2), info))
914 cancel_changes (0);
915 return false;
918 rvalue_change_start = num_validated_changes ();
919 struct_equiv_make_checkpoint (&before_rvalue_change, info);
920 /* Check death_notes_match_p *after* the inputs have been processed,
921 so that local inputs will already have been set up. */
922 if (! INSN_P (i1)
923 || (!bitmap_bit_p (info->equiv_used, info->cur.ninsns)
924 && rtx_equiv_p (&PATTERN (i1), PATTERN (i2), -1, info)
925 && death_notes_match_p (i1, i2, info)
926 && verify_changes (0)))
927 return true;
929 /* Do not do EQUIV substitution after reload. First, we're undoing the
930 work of reload_cse. Second, we may be undoing the work of the post-
931 reload splitting pass. */
932 /* ??? Possibly add a new phase switch variable that can be used by
933 targets to disallow the troublesome insns after splitting. */
934 if (!reload_completed)
936 rtx equiv1, equiv2;
938 cancel_changes (rvalue_change_start);
939 struct_equiv_restore_checkpoint (&before_rvalue_change, info);
941 /* The following code helps take care of G++ cleanups. */
942 equiv1 = find_reg_equal_equiv_note (i1);
943 equiv2 = find_reg_equal_equiv_note (i2);
944 if (equiv1 && equiv2
945 /* If the equivalences are not to a constant, they may
946 reference pseudos that no longer exist, so we can't
947 use them. */
948 && (! reload_completed
949 || (CONSTANT_P (XEXP (equiv1, 0))
950 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))))
952 rtx s1 = single_set (i1);
953 rtx s2 = single_set (i2);
955 if (s1 != 0 && s2 != 0)
957 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
958 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
959 /* Only inspecting the new SET_SRC is not good enough,
960 because there may also be bare USEs in a single_set
961 PARALLEL. */
962 if (rtx_equiv_p (&PATTERN (i1), PATTERN (i2), -1, info)
963 && death_notes_match_p (i1, i2, info)
964 && verify_changes (0))
966 /* Mark this insn so that we'll use the equivalence in
967 all subsequent passes. */
968 bitmap_set_bit (info->equiv_used, info->cur.ninsns);
969 return true;
975 cancel_changes (0);
976 return false;
979 /* Set up mode and register information in INFO. Return true for success. */
980 bool
981 struct_equiv_init (int mode, struct equiv_info *info)
983 if ((info->x_block->flags | info->y_block->flags) & BB_DIRTY)
984 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
985 (PROP_DEATH_NOTES
986 | ((mode & CLEANUP_POST_REGSTACK)
987 ? PROP_POST_REGSTACK : 0)));
988 if (!REG_SET_EQUAL_P (info->x_block->il.rtl->global_live_at_end,
989 info->y_block->il.rtl->global_live_at_end))
991 #ifdef STACK_REGS
992 unsigned rn;
994 if (!(mode & CLEANUP_POST_REGSTACK))
995 return false;
996 /* After reg-stack. Remove bogus live info about stack regs. N.B.
997 these regs are not necessarily all dead - we swap random bogosity
998 against constant bogosity. However, clearing these bits at
999 least makes the regsets comparable. */
1000 for (rn = FIRST_STACK_REG; rn <= LAST_STACK_REG; rn++)
1002 CLEAR_REGNO_REG_SET (info->x_block->il.rtl->global_live_at_end, rn);
1003 CLEAR_REGNO_REG_SET (info->y_block->il.rtl->global_live_at_end, rn);
1005 if (!REG_SET_EQUAL_P (info->x_block->il.rtl->global_live_at_end,
1006 info->y_block->il.rtl->global_live_at_end))
1007 #endif
1008 return false;
1010 info->mode = mode;
1011 if (mode & STRUCT_EQUIV_START)
1013 info->x_input = info->y_input = info->input_reg = NULL_RTX;
1014 info->equiv_used = ALLOC_REG_SET (&reg_obstack);
1015 info->check_input_conflict = false;
1017 info->had_input_conflict = false;
1018 info->cur.ninsns = info->cur.version = 0;
1019 info->cur.local_count = info->cur.input_count = 0;
1020 info->cur.x_start = info->cur.y_start = NULL_RTX;
1021 info->x_label = info->y_label = NULL_RTX;
1022 info->need_rerun = false;
1023 info->live_update = true;
1024 info->cur.input_valid = false;
1025 info->common_live = ALLOC_REG_SET (&reg_obstack);
1026 info->x_local_live = ALLOC_REG_SET (&reg_obstack);
1027 info->y_local_live = ALLOC_REG_SET (&reg_obstack);
1028 COPY_REG_SET (info->common_live, info->x_block->il.rtl->global_live_at_end);
1029 struct_equiv_make_checkpoint (&info->best_match, info);
1030 return true;
1033 /* Insns XI and YI have been matched. Merge memory attributes and reg
1034 notes. */
1035 static void
1036 struct_equiv_merge (rtx xi, rtx yi, struct equiv_info *info)
1038 rtx equiv1, equiv2;
1040 merge_memattrs (xi, yi);
1042 /* If the merged insns have different REG_EQUAL notes, then
1043 remove them. */
1044 info->live_update = false;
1045 equiv1 = find_reg_equal_equiv_note (xi);
1046 equiv2 = find_reg_equal_equiv_note (yi);
1047 if (equiv1 && !equiv2)
1048 remove_note (xi, equiv1);
1049 else if (!equiv1 && equiv2)
1050 remove_note (yi, equiv2);
1051 else if (equiv1 && equiv2
1052 && !rtx_equiv_p (&XEXP (equiv1, 0), XEXP (equiv2, 0),
1053 1, info))
1055 remove_note (xi, equiv1);
1056 remove_note (yi, equiv2);
1058 info->live_update = true;
1061 /* Return number of matched insns.
1062 This function must be called up to three times for a successful cross-jump
1063 match:
1064 first to find out which instructions do match. While trying to match
1065 another instruction that doesn't match, we destroy information in info
1066 about the actual inputs. So if there have been any before the last
1067 match attempt, we need to call this function again to recompute the
1068 actual inputs up to the actual start of the matching sequence.
1069 When we are then satisfied that the cross-jump is worthwhile, we
1070 call this function a third time to make any changes needed to make the
1071 sequences match: apply equivalences, remove non-matching
1072 notes and merge memory attributes. */
1074 struct_equiv_block_eq (int mode, struct equiv_info *info)
1076 rtx x_stop, y_stop;
1077 rtx xi, yi;
1078 int i;
1080 if (mode & STRUCT_EQUIV_START)
1082 x_stop = BB_HEAD (info->x_block);
1083 y_stop = BB_HEAD (info->y_block);
1084 if (!x_stop || !y_stop)
1085 return 0;
1087 else
1089 x_stop = info->cur.x_start;
1090 y_stop = info->cur.y_start;
1092 if (!struct_equiv_init (mode, info))
1093 gcc_unreachable ();
1095 /* Skip simple jumps at the end of the blocks. Complex jumps still
1096 need to be compared for equivalence, which we'll do below. */
1098 xi = BB_END (info->x_block);
1099 if (onlyjump_p (xi)
1100 || (returnjump_p (xi) && !side_effects_p (PATTERN (xi))))
1102 info->cur.x_start = xi;
1103 xi = PREV_INSN (xi);
1106 yi = BB_END (info->y_block);
1107 if (onlyjump_p (yi)
1108 || (returnjump_p (yi) && !side_effects_p (PATTERN (yi))))
1110 info->cur.y_start = yi;
1111 /* Count everything except for unconditional jump as insn. */
1112 /* ??? Is it right to count unconditional jumps with a clobber?
1113 Should we count conditional returns? */
1114 if (!simplejump_p (yi) && !returnjump_p (yi) && info->cur.x_start)
1115 info->cur.ninsns++;
1116 yi = PREV_INSN (yi);
1119 if (mode & STRUCT_EQUIV_MATCH_JUMPS)
1121 /* The caller is expected to have comapred the jumps already, but we
1122 need to match them again to get any local registers and inputs. */
1123 gcc_assert (!info->cur.x_start == !info->cur.y_start);
1124 if (info->cur.x_start)
1126 if (any_condjump_p (info->cur.x_start)
1127 ? !condjump_equiv_p (info, false)
1128 : !insns_match_p (info->cur.x_start, info->cur.y_start, info))
1129 gcc_unreachable ();
1131 else if (any_condjump_p (xi) && any_condjump_p (yi))
1133 info->cur.x_start = xi;
1134 info->cur.y_start = yi;
1135 xi = PREV_INSN (xi);
1136 yi = PREV_INSN (yi);
1137 info->cur.ninsns++;
1138 if (!condjump_equiv_p (info, false))
1139 gcc_unreachable ();
1141 if (info->cur.x_start && info->mode & STRUCT_EQUIV_FINAL)
1142 struct_equiv_merge (info->cur.x_start, info->cur.y_start, info);
1145 struct_equiv_improve_checkpoint (&info->best_match, info);
1146 info->x_end = xi;
1147 info->y_end = yi;
1148 if (info->cur.x_start != x_stop)
1149 for (;;)
1151 /* Ignore notes. */
1152 while (!INSN_P (xi) && xi != x_stop)
1153 xi = PREV_INSN (xi);
1155 while (!INSN_P (yi) && yi != y_stop)
1156 yi = PREV_INSN (yi);
1158 if (!insns_match_p (xi, yi, info))
1159 break;
1160 if (INSN_P (xi))
1162 if (info->mode & STRUCT_EQUIV_FINAL)
1163 struct_equiv_merge (xi, yi, info);
1164 info->cur.ninsns++;
1165 struct_equiv_improve_checkpoint (&info->best_match, info);
1167 if (xi == x_stop || yi == y_stop)
1169 /* If we reached the start of at least one of the blocks, but
1170 best_match hasn't been advanced back to the first valid insn
1171 yet, represent the increased benefit of completing the block
1172 as an increased instruction count. */
1173 if (info->best_match.x_start != info->cur.x_start
1174 && (xi == BB_HEAD (info->x_block)
1175 || yi == BB_HEAD (info->y_block)))
1177 info->cur.ninsns++;
1178 struct_equiv_improve_checkpoint (&info->best_match, info);
1179 info->cur.ninsns--;
1180 if (info->best_match.ninsns > info->cur.ninsns)
1181 info->best_match.ninsns = info->cur.ninsns;
1183 break;
1185 xi = PREV_INSN (xi);
1186 yi = PREV_INSN (yi);
1189 /* If we failed to match an insn, but had some changes registered from
1190 trying to make the insns match, we need to cancel these changes now. */
1191 cancel_changes (0);
1192 /* Restore to best_match to get the sequence with the best known-so-far
1193 cost-benefit difference. */
1194 struct_equiv_restore_checkpoint (&info->best_match, info);
1196 /* Include preceding notes and labels in the cross-jump / if-conversion.
1197 One, this may bring us to the head of the blocks.
1198 Two, it keeps line number notes as matched as may be. */
1199 if (info->cur.ninsns)
1201 xi = info->cur.x_start;
1202 yi = info->cur.y_start;
1203 while (xi != x_stop && !INSN_P (PREV_INSN (xi)))
1204 xi = PREV_INSN (xi);
1206 while (yi != y_stop && !INSN_P (PREV_INSN (yi)))
1207 yi = PREV_INSN (yi);
1209 info->cur.x_start = xi;
1210 info->cur.y_start = yi;
1213 if (!info->cur.input_valid)
1214 info->x_input = info->y_input = info->input_reg = NULL_RTX;
1215 if (!info->need_rerun)
1217 find_dying_inputs (info);
1218 if (info->mode & STRUCT_EQUIV_FINAL)
1220 if (info->check_input_conflict && ! resolve_input_conflict (info))
1221 gcc_unreachable ();
1223 else
1225 bool input_conflict = info->had_input_conflict;
1227 if (!input_conflict
1228 && info->dying_inputs > 1
1229 && bitmap_intersect_p (info->x_local_live, info->y_local_live))
1231 regset_head clobbered_regs;
1233 INIT_REG_SET (&clobbered_regs);
1234 for (i = 0; i < info->cur.local_count; i++)
1236 if (assign_reg_reg_set (&clobbered_regs, info->y_local[i], 0))
1238 input_conflict = true;
1239 break;
1241 assign_reg_reg_set (&clobbered_regs, info->x_local[i], 1);
1243 CLEAR_REG_SET (&clobbered_regs);
1245 if (input_conflict && !info->check_input_conflict)
1246 info->need_rerun = true;
1247 info->check_input_conflict = input_conflict;
1251 if (info->mode & STRUCT_EQUIV_NEED_FULL_BLOCK
1252 && (info->cur.x_start != x_stop || info->cur.y_start != y_stop))
1253 return 0;
1254 return info->cur.ninsns;
1257 /* For each local register, set info->local_rvalue to true iff the register
1258 is a dying input. Store the total number of these in info->dying_inputs. */
1259 static void
1260 find_dying_inputs (struct equiv_info *info)
1262 int i;
1264 info->dying_inputs = 0;
1265 for (i = info->cur.local_count-1; i >=0; i--)
1267 rtx x = info->x_local[i];
1268 unsigned regno = REGNO (x);
1269 int nregs = (regno >= FIRST_PSEUDO_REGISTER
1270 ? 1 : hard_regno_nregs[regno][GET_MODE (x)]);
1272 for (info->local_rvalue[i] = false; nregs >= 0; regno++, --nregs)
1273 if (REGNO_REG_SET_P (info->x_local_live, regno))
1275 info->dying_inputs++;
1276 info->local_rvalue[i] = true;
1277 break;
1282 /* For each local register that is a dying input, y_local[i] will be
1283 copied to x_local[i]. We'll do this in ascending order. Try to
1284 re-order the locals to avoid conflicts like r3 = r2; r4 = r3; .
1285 Return true iff the re-ordering is successful, or not necessary. */
1286 static bool
1287 resolve_input_conflict (struct equiv_info *info)
1289 int i, j, end;
1290 int nswaps = 0;
1291 rtx save_x_local[STRUCT_EQUIV_MAX_LOCAL];
1292 rtx save_y_local[STRUCT_EQUIV_MAX_LOCAL];
1294 find_dying_inputs (info);
1295 if (info->dying_inputs <= 1)
1296 return true;
1297 memcpy (save_x_local, info->x_local, sizeof save_x_local);
1298 memcpy (save_y_local, info->y_local, sizeof save_y_local);
1299 end = info->cur.local_count - 1;
1300 for (i = 0; i <= end; i++)
1302 /* Cycle detection with regsets is expensive, so we just check that
1303 we don't exceed the maximum number of swaps needed in the acyclic
1304 case. */
1305 int max_swaps = end - i;
1307 /* Check if x_local[i] will be clobbered. */
1308 if (!info->local_rvalue[i])
1309 continue;
1310 /* Check if any later value needs to be copied earlier. */
1311 for (j = i + 1; j <= end; j++)
1313 rtx tmp;
1315 if (!info->local_rvalue[j])
1316 continue;
1317 if (!reg_overlap_mentioned_p (info->x_local[i], info->y_local[j]))
1318 continue;
1319 if (--max_swaps < 0)
1321 memcpy (info->x_local, save_x_local, sizeof save_x_local);
1322 memcpy (info->y_local, save_y_local, sizeof save_y_local);
1323 return false;
1325 nswaps++;
1326 tmp = info->x_local[i];
1327 info->x_local[i] = info->x_local[j];
1328 info->x_local[j] = tmp;
1329 tmp = info->y_local[i];
1330 info->y_local[i] = info->y_local[j];
1331 info->y_local[j] = tmp;
1332 j = i;
1335 info->had_input_conflict = true;
1336 if (dump_file && nswaps)
1337 fprintf (dump_file, "Resolved input conflict, %d %s.\n",
1338 nswaps, nswaps == 1 ? "swap" : "swaps");
1339 return true;