gcc/
[official-gcc.git] / gcc / struct-equiv.c
blobe38ae73311361a8b9aab1eb3b72559add4a74125
1 /* Control flow optimization code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /* Try to match two basic blocks - or their ends - for structural equivalence.
23 We scan the blocks from their ends backwards, and expect that insns are
24 identical, except for certain cases involving registers. A mismatch
25 We scan the blocks from their ends backwards, hoping to find a match, I.e.
26 insns are identical, except for certain cases involving registers. A
27 mismatch between register number RX (used in block X) and RY (used in the
28 same way in block Y) can be handled in one of the following cases:
29 1. RX and RY are local to their respective blocks; they are set there and
30 die there. If so, they can effectively be ignored.
31 2. RX and RY die in their blocks, but live at the start. If any path
32 gets redirected through X instead of Y, the caller must emit
33 compensation code to move RY to RX. If there are overlapping inputs,
34 the function resolve_input_conflict ensures that this can be done.
35 Information about these registers are tracked in the X_LOCAL, Y_LOCAL,
36 LOCAL_COUNT and LOCAL_RVALUE fields.
37 3. RX and RY live throughout their blocks, including the start and the end.
38 Either RX and RY must be identical, or we have to replace all uses in
39 block X with a new pseudo, which is stored in the INPUT_REG field. The
40 caller can then use block X instead of block Y by copying RY to the new
41 pseudo.
43 The main entry point to this file is struct_equiv_block_eq. This function
44 uses a struct equiv_info to accept some of its inputs, to keep track of its
45 internal state, to pass down to its helper functions, and to communicate
46 some of the results back to the caller.
48 Most scans will result in a failure to match a sufficient number of insns
49 to make any optimization worth while, therefore the process is geared more
50 to quick scanning rather than the ability to exactly backtrack when we
51 find a mismatch. The information gathered is still meaningful to make a
52 preliminary decision if we want to do an optimization, we might only
53 slightly overestimate the number of matchable insns, and underestimate
54 the number of inputs an miss an input conflict. Sufficient information
55 is gathered so that when we make another pass, we won't have to backtrack
56 at the same point.
57 Another issue is that information in memory attributes and/or REG_NOTES
58 might have to be merged or discarded to make a valid match. We don't want
59 to discard such information when we are not certain that we want to merge
60 the two (partial) blocks.
61 For these reasons, struct_equiv_block_eq has to be called first with the
62 STRUCT_EQUIV_START bit set in the mode parameter. This will calculate the
63 number of matched insns and the number and types of inputs. If the
64 need_rerun field is set, the results are only tentative, and the caller
65 has to call again with STRUCT_EQUIV_RERUN till need_rerun is false in
66 order to get a reliable match.
67 To install the changes necessary for the match, the function has to be
68 called again with STRUCT_EQUIV_FINAL.
70 While scanning an insn, we process first all the SET_DESTs, then the
71 SET_SRCes, then the REG_NOTES, in order to keep the register liveness
72 information consistent.
73 If we were to mix up the order for sources / destinations in an insn where
74 a source is also a destination, we'd end up being mistaken to think that
75 the register is not live in the preceding insn. */
77 #include "config.h"
78 #include "system.h"
79 #include "coretypes.h"
80 #include "tm.h"
81 #include "rtl.h"
82 #include "regs.h"
83 #include "output.h"
84 #include "insn-config.h"
85 #include "flags.h"
86 #include "recog.h"
87 #include "tm_p.h"
88 #include "target.h"
89 #include "emit-rtl.h"
90 #include "reload.h"
92 static void merge_memattrs (rtx, rtx);
93 static bool set_dest_equiv_p (rtx x, rtx y, struct equiv_info *info);
94 static bool set_dest_addr_equiv_p (rtx x, rtx y, struct equiv_info *info);
95 static void find_dying_inputs (struct equiv_info *info);
96 static bool resolve_input_conflict (struct equiv_info *info);
98 /* After reload, some moves, as indicated by SECONDARY_RELOAD_CLASS and
99 SECONDARY_MEMORY_NEEDED, cannot be done directly. For our purposes, we
100 consider them impossible to generate after reload (even though some
101 might be synthesized when you throw enough code at them).
102 Since we don't know while processing a cross-jump if a local register
103 that is currently live will eventually be live and thus be an input,
104 we keep track of potential inputs that would require an impossible move
105 by using a prohibitively high cost for them.
106 This number, multiplied with the larger of STRUCT_EQUIV_MAX_LOCAL and
107 FIRST_PSEUDO_REGISTER, must fit in the input_cost field of
108 struct equiv_info. */
109 #define IMPOSSIBLE_MOVE_FACTOR 20000
113 /* Removes the memory attributes of MEM expression
114 if they are not equal. */
116 void
117 merge_memattrs (rtx x, rtx y)
119 int i;
120 int j;
121 enum rtx_code code;
122 const char *fmt;
124 if (x == y)
125 return;
126 if (x == 0 || y == 0)
127 return;
129 code = GET_CODE (x);
131 if (code != GET_CODE (y))
132 return;
134 if (GET_MODE (x) != GET_MODE (y))
135 return;
137 if (code == MEM && MEM_ATTRS (x) != MEM_ATTRS (y))
139 if (! MEM_ATTRS (x))
140 MEM_ATTRS (y) = 0;
141 else if (! MEM_ATTRS (y))
142 MEM_ATTRS (x) = 0;
143 else
145 rtx mem_size;
147 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
149 set_mem_alias_set (x, 0);
150 set_mem_alias_set (y, 0);
153 if (! mem_expr_equal_p (MEM_EXPR (x), MEM_EXPR (y)))
155 set_mem_expr (x, 0);
156 set_mem_expr (y, 0);
157 set_mem_offset (x, 0);
158 set_mem_offset (y, 0);
160 else if (MEM_OFFSET (x) != MEM_OFFSET (y))
162 set_mem_offset (x, 0);
163 set_mem_offset (y, 0);
166 if (!MEM_SIZE (x))
167 mem_size = NULL_RTX;
168 else if (!MEM_SIZE (y))
169 mem_size = NULL_RTX;
170 else
171 mem_size = GEN_INT (MAX (INTVAL (MEM_SIZE (x)),
172 INTVAL (MEM_SIZE (y))));
173 set_mem_size (x, mem_size);
174 set_mem_size (y, mem_size);
176 set_mem_align (x, MIN (MEM_ALIGN (x), MEM_ALIGN (y)));
177 set_mem_align (y, MEM_ALIGN (x));
181 fmt = GET_RTX_FORMAT (code);
182 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
184 switch (fmt[i])
186 case 'E':
187 /* Two vectors must have the same length. */
188 if (XVECLEN (x, i) != XVECLEN (y, i))
189 return;
191 for (j = 0; j < XVECLEN (x, i); j++)
192 merge_memattrs (XVECEXP (x, i, j), XVECEXP (y, i, j));
194 break;
196 case 'e':
197 merge_memattrs (XEXP (x, i), XEXP (y, i));
200 return;
203 /* In SET, assign the bit for the register number of REG the value VALUE.
204 If REG is a hard register, do so for all its constituent registers.
205 Return the number of registers that have become included (as a positive
206 number) or excluded (as a negative number). */
207 static int
208 assign_reg_reg_set (regset set, rtx reg, int value)
210 unsigned regno = REGNO (reg);
211 int nregs, i, old;
213 if (regno >= FIRST_PSEUDO_REGISTER)
215 gcc_assert (!reload_completed);
216 nregs = 1;
218 else
219 nregs = hard_regno_nregs[regno][GET_MODE (reg)];
220 for (old = 0, i = nregs; --i >= 0; regno++)
222 if ((value != 0) == REGNO_REG_SET_P (set, regno))
223 continue;
224 if (value)
225 old++, SET_REGNO_REG_SET (set, regno);
226 else
227 old--, CLEAR_REGNO_REG_SET (set, regno);
229 return old;
232 /* Record state about current inputs / local registers / liveness
233 in *P. */
234 static inline void
235 struct_equiv_make_checkpoint (struct struct_equiv_checkpoint *p,
236 struct equiv_info *info)
238 *p = info->cur;
241 /* Call struct_equiv_make_checkpoint (P, INFO) if the current partial block
242 is suitable to split off - i.e. there is no dangling cc0 user - and
243 if the current cost of the common instructions, minus the cost for
244 setting up the inputs, is higher than what has been recorded before
245 in CHECKPOINT[N]. Also, if we do so, confirm or cancel any pending
246 changes. */
247 static void
248 struct_equiv_improve_checkpoint (struct struct_equiv_checkpoint *p,
249 struct equiv_info *info)
251 #ifdef HAVE_cc0
252 if (reg_mentioned_p (cc0_rtx, info->cur.x_start)
253 && !sets_cc0_p (info->cur.x_start))
254 return;
255 #endif
256 if (info->cur.input_count >= IMPOSSIBLE_MOVE_FACTOR)
257 return;
258 if (info->input_cost >= 0
259 ? (COSTS_N_INSNS(info->cur.ninsns - p->ninsns)
260 > info->input_cost * (info->cur.input_count - p->input_count))
261 : info->cur.ninsns > p->ninsns && !info->cur.input_count)
263 if (info->check_input_conflict && ! resolve_input_conflict (info))
264 return;
265 /* We have a profitable set of changes. If this is the final pass,
266 commit them now. Otherwise, we don't know yet if we can make any
267 change, so put the old code back for now. */
268 if (info->mode & STRUCT_EQUIV_FINAL)
269 confirm_change_group ();
270 else
271 cancel_changes (0);
272 struct_equiv_make_checkpoint (p, info);
276 /* Restore state about current inputs / local registers / liveness
277 from P. */
278 static void
279 struct_equiv_restore_checkpoint (struct struct_equiv_checkpoint *p,
280 struct equiv_info *info)
282 info->cur.ninsns = p->ninsns;
283 info->cur.x_start = p->x_start;
284 info->cur.y_start = p->y_start;
285 info->cur.input_count = p->input_count;
286 info->cur.input_valid = p->input_valid;
287 while (info->cur.local_count > p->local_count)
289 info->cur.local_count--;
290 info->cur.version--;
291 if (REGNO_REG_SET_P (info->x_local_live,
292 REGNO (info->x_local[info->cur.local_count])))
294 assign_reg_reg_set (info->x_local_live,
295 info->x_local[info->cur.local_count], 0);
296 assign_reg_reg_set (info->y_local_live,
297 info->y_local[info->cur.local_count], 0);
298 info->cur.version--;
301 if (info->cur.version != p->version)
302 info->need_rerun = true;
306 /* Update register liveness to reflect that X is now life (if rvalue is
307 nonzero) or dead (if rvalue is zero) in INFO->x_block, and likewise Y
308 in INFO->y_block. Return the number of registers the liveness of which
309 changed in each block (as a negative number if registers became dead). */
310 static int
311 note_local_live (struct equiv_info *info, rtx x, rtx y, int rvalue)
313 unsigned x_regno = REGNO (x);
314 unsigned y_regno = REGNO (y);
315 int x_nominal_nregs = (x_regno >= FIRST_PSEUDO_REGISTER
316 ? 1 : hard_regno_nregs[x_regno][GET_MODE (x)]);
317 int y_nominal_nregs = (y_regno >= FIRST_PSEUDO_REGISTER
318 ? 1 : hard_regno_nregs[y_regno][GET_MODE (y)]);
319 int x_change = assign_reg_reg_set (info->x_local_live, x, rvalue);
320 int y_change = assign_reg_reg_set (info->y_local_live, y, rvalue);
322 gcc_assert (x_nominal_nregs && y_nominal_nregs);
323 gcc_assert (x_change * y_nominal_nregs == y_change * x_nominal_nregs);
324 if (y_change)
326 if (reload_completed)
328 unsigned x_regno ATTRIBUTE_UNUSED = REGNO (x);
329 unsigned y_regno = REGNO (y);
330 enum machine_mode x_mode = GET_MODE (x);
332 if (secondary_reload_class (0, REGNO_REG_CLASS (y_regno), x_mode, x)
333 != NO_REGS
334 #ifdef SECONDARY_MEMORY_NEEDED
335 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (y_regno),
336 REGNO_REG_CLASS (x_regno), x_mode)
337 #endif
339 y_change *= IMPOSSIBLE_MOVE_FACTOR;
341 info->cur.input_count += y_change;
342 info->cur.version++;
344 return x_change;
347 /* Check if *XP is equivalent to Y. Until an an unreconcilable difference is
348 found, use in-group changes with validate_change on *XP to make register
349 assignments agree. It is the (not necessarily direct) callers
350 responsibility to verify / confirm / cancel these changes, as appropriate.
351 RVALUE indicates if the processed piece of rtl is used as a destination, in
352 which case we can't have different registers being an input. Returns
353 nonzero if the two blocks have been identified as equivalent, zero otherwise.
354 RVALUE == 0: destination
355 RVALUE == 1: source
356 RVALUE == -1: source, ignore SET_DEST of SET / clobber. */
357 bool
358 rtx_equiv_p (rtx *xp, rtx y, int rvalue, struct equiv_info *info)
360 rtx x = *xp;
361 enum rtx_code code;
362 int length;
363 const char *format;
364 int i;
366 if (!y || !x)
367 return x == y;
368 code = GET_CODE (y);
369 if (code != REG && x == y)
370 return true;
371 if (GET_CODE (x) != code
372 || GET_MODE (x) != GET_MODE (y))
373 return false;
375 /* ??? could extend to allow CONST_INT inputs. */
376 switch (code)
378 case REG:
380 unsigned x_regno = REGNO (x);
381 unsigned y_regno = REGNO (y);
382 int x_common_live, y_common_live;
384 if (reload_completed
385 && (x_regno >= FIRST_PSEUDO_REGISTER
386 || y_regno >= FIRST_PSEUDO_REGISTER))
388 /* We should only see this in REG_NOTEs. */
389 gcc_assert (!info->live_update);
390 /* Returning false will cause us to remove the notes. */
391 return false;
393 #ifdef STACK_REGS
394 /* After reg-stack, can only accept literal matches of stack regs. */
395 if (info->mode & CLEANUP_POST_REGSTACK
396 && (IN_RANGE (x_regno, FIRST_STACK_REG, LAST_STACK_REG)
397 || IN_RANGE (y_regno, FIRST_STACK_REG, LAST_STACK_REG)))
398 return x_regno == y_regno;
399 #endif
401 /* If the register is a locally live one in one block, the
402 corresponding one must be locally live in the other, too, and
403 match of identical regnos doesn't apply. */
404 if (REGNO_REG_SET_P (info->x_local_live, x_regno))
406 if (!REGNO_REG_SET_P (info->y_local_live, y_regno))
407 return false;
409 else if (REGNO_REG_SET_P (info->y_local_live, y_regno))
410 return false;
411 else if (x_regno == y_regno)
413 if (!rvalue && info->cur.input_valid
414 && (reg_overlap_mentioned_p (x, info->x_input)
415 || reg_overlap_mentioned_p (x, info->y_input)))
416 return false;
418 /* Update liveness information. */
419 if (info->live_update
420 && assign_reg_reg_set (info->common_live, x, rvalue))
421 info->cur.version++;
423 return true;
426 x_common_live = REGNO_REG_SET_P (info->common_live, x_regno);
427 y_common_live = REGNO_REG_SET_P (info->common_live, y_regno);
428 if (x_common_live != y_common_live)
429 return false;
430 else if (x_common_live)
432 if (! rvalue || info->input_cost < 0 || no_new_pseudos)
433 return false;
434 /* If info->live_update is not set, we are processing notes.
435 We then allow a match with x_input / y_input found in a
436 previous pass. */
437 if (info->live_update && !info->cur.input_valid)
439 info->cur.input_valid = true;
440 info->x_input = x;
441 info->y_input = y;
442 info->cur.input_count += optimize_size ? 2 : 1;
443 if (info->input_reg
444 && GET_MODE (info->input_reg) != GET_MODE (info->x_input))
445 info->input_reg = NULL_RTX;
446 if (!info->input_reg)
447 info->input_reg = gen_reg_rtx (GET_MODE (info->x_input));
449 else if ((info->live_update
450 ? ! info->cur.input_valid : ! info->x_input)
451 || ! rtx_equal_p (x, info->x_input)
452 || ! rtx_equal_p (y, info->y_input))
453 return false;
454 validate_change (info->cur.x_start, xp, info->input_reg, 1);
456 else
458 int x_nregs = (x_regno >= FIRST_PSEUDO_REGISTER
459 ? 1 : hard_regno_nregs[x_regno][GET_MODE (x)]);
460 int y_nregs = (y_regno >= FIRST_PSEUDO_REGISTER
461 ? 1 : hard_regno_nregs[y_regno][GET_MODE (y)]);
462 int size = GET_MODE_SIZE (GET_MODE (x));
463 enum machine_mode x_mode = GET_MODE (x);
464 unsigned x_regno_i, y_regno_i;
465 int x_nregs_i, y_nregs_i, size_i;
466 int local_count = info->cur.local_count;
468 /* This might be a register local to each block. See if we have
469 it already registered. */
470 for (i = local_count - 1; i >= 0; i--)
472 x_regno_i = REGNO (info->x_local[i]);
473 x_nregs_i = (x_regno_i >= FIRST_PSEUDO_REGISTER
474 ? 1 : hard_regno_nregs[x_regno_i][GET_MODE (x)]);
475 y_regno_i = REGNO (info->y_local[i]);
476 y_nregs_i = (y_regno_i >= FIRST_PSEUDO_REGISTER
477 ? 1 : hard_regno_nregs[y_regno_i][GET_MODE (y)]);
478 size_i = GET_MODE_SIZE (GET_MODE (info->x_local[i]));
480 /* If we have a new pair of registers that is wider than an
481 old pair and enclosing it with matching offsets,
482 remove the old pair. If we find a matching, wider, old
483 pair, use the old one. If the width is the same, use the
484 old one if the modes match, but the new if they don't.
485 We don't want to get too fancy with subreg_regno_offset
486 here, so we just test two straightforwad cases each. */
487 if (info->live_update
488 && (x_mode != GET_MODE (info->x_local[i])
489 ? size >= size_i : size > size_i))
491 /* If the new pair is fully enclosing a matching
492 existing pair, remove the old one. N.B. because
493 we are removing one entry here, the check below
494 if we have space for a new entry will succeed. */
495 if ((x_regno <= x_regno_i
496 && x_regno + x_nregs >= x_regno_i + x_nregs_i
497 && x_nregs == y_nregs && x_nregs_i == y_nregs_i
498 && x_regno - x_regno_i == y_regno - y_regno_i)
499 || (x_regno == x_regno_i && y_regno == y_regno_i
500 && x_nregs >= x_nregs_i && y_nregs >= y_nregs_i))
502 info->cur.local_count = --local_count;
503 info->x_local[i] = info->x_local[local_count];
504 info->y_local[i] = info->y_local[local_count];
505 continue;
508 else
511 /* If the new pair is fully enclosed within a matching
512 existing pair, succeed. */
513 if (x_regno >= x_regno_i
514 && x_regno + x_nregs <= x_regno_i + x_nregs_i
515 && x_nregs == y_nregs && x_nregs_i == y_nregs_i
516 && x_regno - x_regno_i == y_regno - y_regno_i)
517 break;
518 if (x_regno == x_regno_i && y_regno == y_regno_i
519 && x_nregs <= x_nregs_i && y_nregs <= y_nregs_i)
520 break;
523 /* Any other overlap causes a match failure. */
524 if (x_regno + x_nregs > x_regno_i
525 && x_regno_i + x_nregs_i > x_regno)
526 return false;
527 if (y_regno + y_nregs > y_regno_i
528 && y_regno_i + y_nregs_i > y_regno)
529 return false;
531 if (i < 0)
533 /* Not found. Create a new entry if possible. */
534 if (!info->live_update
535 || info->cur.local_count >= STRUCT_EQUIV_MAX_LOCAL)
536 return false;
537 info->x_local[info->cur.local_count] = x;
538 info->y_local[info->cur.local_count] = y;
539 info->cur.local_count++;
540 info->cur.version++;
542 note_local_live (info, x, y, rvalue);
544 return true;
546 case SET:
547 gcc_assert (rvalue < 0);
548 /* Ignore the destinations role as a destination. Still, we have
549 to consider input registers embedded in the addresses of a MEM.
550 N.B., we process the rvalue aspect of STRICT_LOW_PART /
551 ZERO_EXTEND / SIGN_EXTEND along with their lvalue aspect. */
552 if(!set_dest_addr_equiv_p (SET_DEST (x), SET_DEST (y), info))
553 return false;
554 /* Process source. */
555 return rtx_equiv_p (&SET_SRC (x), SET_SRC (y), 1, info);
556 case PRE_MODIFY:
557 /* Process destination. */
558 if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info))
559 return false;
560 /* Process source. */
561 return rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 1, info);
562 case POST_MODIFY:
564 rtx x_dest0, x_dest1;
566 /* Process destination. */
567 x_dest0 = XEXP (x, 0);
568 gcc_assert (REG_P (x_dest0));
569 if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info))
570 return false;
571 x_dest1 = XEXP (x, 0);
572 /* validate_change might have changed the destination. Put it back
573 so that we can do a valid source match. */
574 XEXP (x, 0) = x_dest0;
575 if (!rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 0, info))
576 return false;
577 gcc_assert (x_dest1 == XEXP (x, 0));
578 /* Process source. */
579 return rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 1, info);
580 if (!rtx_equiv_p (&XEXP(x, 0), XEXP (y, 0), 0, info))
581 return false;
582 /* Process both subexpressions as inputs. */
583 break;
585 case CLOBBER:
586 gcc_assert (rvalue < 0);
587 return true;
588 /* Some special forms are also rvalues when they appear in lvalue
589 positions. However, we must ont try to match a register after we
590 have already altered it with validate_change, consider the rvalue
591 aspect while we process the lvalue. */
592 case STRICT_LOW_PART:
593 case ZERO_EXTEND:
594 case SIGN_EXTEND:
596 rtx x_inner, y_inner;
597 enum rtx_code code;
598 int change;
600 if (rvalue)
601 break;
602 x_inner = XEXP (x, 0);
603 y_inner = XEXP (y, 0);
604 if (GET_MODE (x_inner) != GET_MODE (y_inner))
605 return false;
606 code = GET_CODE (x_inner);
607 if (code != GET_CODE (y_inner))
608 return false;
609 /* The address of a MEM is an input that will be processed during
610 rvalue == -1 processing. */
611 if (code == SUBREG)
613 if (SUBREG_BYTE (x_inner) != SUBREG_BYTE (y_inner))
614 return false;
615 x = x_inner;
616 x_inner = SUBREG_REG (x_inner);
617 y_inner = SUBREG_REG (y_inner);
618 if (GET_MODE (x_inner) != GET_MODE (y_inner))
619 return false;
620 code = GET_CODE (x_inner);
621 if (code != GET_CODE (y_inner))
622 return false;
624 if (code == MEM)
625 return true;
626 gcc_assert (code == REG);
627 if (! rtx_equiv_p (&XEXP (x, 0), y_inner, rvalue, info))
628 return false;
629 if (REGNO (x_inner) == REGNO (y_inner))
631 change = assign_reg_reg_set (info->common_live, x_inner, 1);
632 info->cur.version++;
634 else
635 change = note_local_live (info, x_inner, y_inner, 1);
636 gcc_assert (change);
637 return true;
639 /* The AUTO_INC / POST_MODIFY / PRE_MODIFY sets are modelled to take
640 place during input processing, however, that is benign, since they
641 are paired with reads. */
642 case MEM:
643 return !rvalue || rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), rvalue, info);
644 case POST_INC: case POST_DEC: case PRE_INC: case PRE_DEC:
645 return (rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info)
646 && rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info));
647 case PARALLEL:
648 /* If this is a top-level PATTERN PARALLEL, we expect the caller to
649 have handled the SET_DESTs. A complex or vector PARALLEL can be
650 identified by having a mode. */
651 gcc_assert (rvalue < 0 || GET_MODE (x) != VOIDmode);
652 break;
653 case LABEL_REF:
654 /* Check special tablejump match case. */
655 if (XEXP (y, 0) == info->y_label)
656 return (XEXP (x, 0) == info->x_label);
657 /* We can't assume nonlocal labels have their following insns yet. */
658 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
659 return XEXP (x, 0) == XEXP (y, 0);
661 /* Two label-refs are equivalent if they point at labels
662 in the same position in the instruction stream. */
663 return (next_real_insn (XEXP (x, 0))
664 == next_real_insn (XEXP (y, 0)));
665 case SYMBOL_REF:
666 return XSTR (x, 0) == XSTR (y, 0);
667 /* Some rtl is guaranteed to be shared, or unique; If we didn't match
668 EQ equality above, they aren't the same. */
669 case CONST_INT:
670 case CODE_LABEL:
671 return false;
672 default:
673 break;
676 /* For commutative operations, the RTX match if the operands match in any
677 order. */
678 if (targetm.commutative_p (x, UNKNOWN))
679 return ((rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), rvalue, info)
680 && rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), rvalue, info))
681 || (rtx_equiv_p (&XEXP (x, 0), XEXP (y, 1), rvalue, info)
682 && rtx_equiv_p (&XEXP (x, 1), XEXP (y, 0), rvalue, info)));
684 /* Process subexpressions - this is similar to rtx_equal_p. */
685 length = GET_RTX_LENGTH (code);
686 format = GET_RTX_FORMAT (code);
688 for (i = 0; i < length; ++i)
690 switch (format[i])
692 case 'w':
693 if (XWINT (x, i) != XWINT (y, i))
694 return false;
695 break;
696 case 'n':
697 case 'i':
698 if (XINT (x, i) != XINT (y, i))
699 return false;
700 break;
701 case 'V':
702 case 'E':
703 if (XVECLEN (x, i) != XVECLEN (y, i))
704 return false;
705 if (XVEC (x, i) != 0)
707 int j;
708 for (j = 0; j < XVECLEN (x, i); ++j)
710 if (! rtx_equiv_p (&XVECEXP (x, i, j), XVECEXP (y, i, j),
711 rvalue, info))
712 return false;
715 break;
716 case 'e':
717 if (! rtx_equiv_p (&XEXP (x, i), XEXP (y, i), rvalue, info))
718 return false;
719 break;
720 case 'S':
721 case 's':
722 if ((XSTR (x, i) || XSTR (y, i))
723 && (! XSTR (x, i) || ! XSTR (y, i)
724 || strcmp (XSTR (x, i), XSTR (y, i))))
725 return false;
726 break;
727 case 'u':
728 /* These are just backpointers, so they don't matter. */
729 break;
730 case '0':
731 case 't':
732 break;
733 /* It is believed that rtx's at this level will never
734 contain anything but integers and other rtx's,
735 except for within LABEL_REFs and SYMBOL_REFs. */
736 default:
737 gcc_unreachable ();
740 return true;
743 /* Do only the rtx_equiv_p SET_DEST processing for SETs and CLOBBERs.
744 Since we are scanning backwards, this the first step in processing each
745 insn. Return true for success. */
746 static bool
747 set_dest_equiv_p (rtx x, rtx y, struct equiv_info *info)
749 if (!x || !y)
750 return x == y;
751 if (GET_CODE (x) != GET_CODE (y))
752 return false;
753 else if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
754 return rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info);
755 else if (GET_CODE (x) == PARALLEL)
757 int j;
759 if (XVECLEN (x, 0) != XVECLEN (y, 0))
760 return false;
761 for (j = 0; j < XVECLEN (x, 0); ++j)
763 rtx xe = XVECEXP (x, 0, j);
764 rtx ye = XVECEXP (y, 0, j);
766 if (GET_CODE (xe) != GET_CODE (ye))
767 return false;
768 if ((GET_CODE (xe) == SET || GET_CODE (xe) == CLOBBER)
769 && ! rtx_equiv_p (&XEXP (xe, 0), XEXP (ye, 0), 0, info))
770 return false;
773 return true;
776 /* Process MEMs in SET_DEST destinations. We must not process this together
777 with REG SET_DESTs, but must do it separately, lest when we see
778 [(set (reg:SI foo) (bar))
779 (set (mem:SI (reg:SI foo) (baz)))]
780 struct_equiv_block_eq could get confused to assume that (reg:SI foo)
781 is not live before this instruction. */
782 static bool
783 set_dest_addr_equiv_p (rtx x, rtx y, struct equiv_info *info)
785 enum rtx_code code = GET_CODE (x);
786 int length;
787 const char *format;
788 int i;
790 if (code != GET_CODE (y))
791 return false;
792 if (code == MEM)
793 return rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info);
795 /* Process subexpressions. */
796 length = GET_RTX_LENGTH (code);
797 format = GET_RTX_FORMAT (code);
799 for (i = 0; i < length; ++i)
801 switch (format[i])
803 case 'V':
804 case 'E':
805 if (XVECLEN (x, i) != XVECLEN (y, i))
806 return false;
807 if (XVEC (x, i) != 0)
809 int j;
810 for (j = 0; j < XVECLEN (x, i); ++j)
812 if (! set_dest_addr_equiv_p (XVECEXP (x, i, j),
813 XVECEXP (y, i, j), info))
814 return false;
817 break;
818 case 'e':
819 if (! set_dest_addr_equiv_p (XEXP (x, i), XEXP (y, i), info))
820 return false;
821 break;
822 default:
823 break;
826 return true;
829 /* Check if the set of REG_DEAD notes attached to I1 and I2 allows us to
830 go ahead with merging I1 and I2, which otherwise look fine.
831 Inputs / local registers for the inputs of I1 and I2 have already been
832 set up. */
833 static bool
834 death_notes_match_p (rtx i1 ATTRIBUTE_UNUSED, rtx i2 ATTRIBUTE_UNUSED,
835 struct equiv_info *info ATTRIBUTE_UNUSED)
837 #ifdef STACK_REGS
838 /* If cross_jump_death_matters is not 0, the insn's mode
839 indicates whether or not the insn contains any stack-like regs. */
841 if ((info->mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
843 /* If register stack conversion has already been done, then
844 death notes must also be compared before it is certain that
845 the two instruction streams match. */
847 rtx note;
848 HARD_REG_SET i1_regset, i2_regset;
850 CLEAR_HARD_REG_SET (i1_regset);
851 CLEAR_HARD_REG_SET (i2_regset);
853 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
854 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
855 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
857 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
858 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
860 unsigned regno = REGNO (XEXP (note, 0));
861 int i;
863 for (i = info->cur.local_count - 1; i >= 0; i--)
864 if (regno == REGNO (info->y_local[i]))
866 regno = REGNO (info->x_local[i]);
867 break;
869 SET_HARD_REG_BIT (i2_regset, regno);
872 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
874 return false;
876 done:
879 #endif
880 return true;
883 /* Return true if I1 and I2 are equivalent and thus can be crossjumped. */
885 bool
886 insns_match_p (rtx i1, rtx i2, struct equiv_info *info)
888 int rvalue_change_start;
889 struct struct_equiv_checkpoint before_rvalue_change;
891 /* Verify that I1 and I2 are equivalent. */
892 if (GET_CODE (i1) != GET_CODE (i2))
893 return false;
895 info->cur.x_start = i1;
896 info->cur.y_start = i2;
898 /* If this is a CALL_INSN, compare register usage information.
899 If we don't check this on stack register machines, the two
900 CALL_INSNs might be merged leaving reg-stack.c with mismatching
901 numbers of stack registers in the same basic block.
902 If we don't check this on machines with delay slots, a delay slot may
903 be filled that clobbers a parameter expected by the subroutine.
905 ??? We take the simple route for now and assume that if they're
906 equal, they were constructed identically. */
908 if (CALL_P (i1))
910 if (SIBLING_CALL_P (i1) != SIBLING_CALL_P (i2)
911 || ! set_dest_equiv_p (PATTERN (i1), PATTERN (i2), info)
912 || ! set_dest_equiv_p (CALL_INSN_FUNCTION_USAGE (i1),
913 CALL_INSN_FUNCTION_USAGE (i2), info)
914 || ! rtx_equiv_p (&CALL_INSN_FUNCTION_USAGE (i1),
915 CALL_INSN_FUNCTION_USAGE (i2), -1, info))
917 cancel_changes (0);
918 return false;
921 else if (INSN_P (i1))
923 if (! set_dest_equiv_p (PATTERN (i1), PATTERN (i2), info))
925 cancel_changes (0);
926 return false;
929 rvalue_change_start = num_validated_changes ();
930 struct_equiv_make_checkpoint (&before_rvalue_change, info);
931 /* Check death_notes_match_p *after* the inputs have been processed,
932 so that local inputs will already have been set up. */
933 if (! INSN_P (i1)
934 || (!bitmap_bit_p (info->equiv_used, info->cur.ninsns)
935 && rtx_equiv_p (&PATTERN (i1), PATTERN (i2), -1, info)
936 && death_notes_match_p (i1, i2, info)
937 && verify_changes (0)))
938 return true;
940 /* Do not do EQUIV substitution after reload. First, we're undoing the
941 work of reload_cse. Second, we may be undoing the work of the post-
942 reload splitting pass. */
943 /* ??? Possibly add a new phase switch variable that can be used by
944 targets to disallow the troublesome insns after splitting. */
945 if (!reload_completed)
947 rtx equiv1, equiv2;
949 cancel_changes (rvalue_change_start);
950 struct_equiv_restore_checkpoint (&before_rvalue_change, info);
952 /* The following code helps take care of G++ cleanups. */
953 equiv1 = find_reg_equal_equiv_note (i1);
954 equiv2 = find_reg_equal_equiv_note (i2);
955 if (equiv1 && equiv2
956 /* If the equivalences are not to a constant, they may
957 reference pseudos that no longer exist, so we can't
958 use them. */
959 && (! reload_completed
960 || (CONSTANT_P (XEXP (equiv1, 0))
961 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))))
963 rtx s1 = single_set (i1);
964 rtx s2 = single_set (i2);
966 if (s1 != 0 && s2 != 0)
968 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
969 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
970 /* Only inspecting the new SET_SRC is not good enough,
971 because there may also be bare USEs in a single_set
972 PARALLEL. */
973 if (rtx_equiv_p (&PATTERN (i1), PATTERN (i2), -1, info)
974 && death_notes_match_p (i1, i2, info)
975 && verify_changes (0))
977 /* Mark this insn so that we'll use the equivalence in
978 all subsequent passes. */
979 bitmap_set_bit (info->equiv_used, info->cur.ninsns);
980 return true;
986 cancel_changes (0);
987 return false;
990 /* Set up mode and register information in INFO. Return true for success. */
991 bool
992 struct_equiv_init (int mode, struct equiv_info *info)
994 if ((info->x_block->flags | info->y_block->flags) & BB_DIRTY)
995 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
996 (PROP_DEATH_NOTES
997 | ((mode & CLEANUP_POST_REGSTACK)
998 ? PROP_POST_REGSTACK : 0)));
999 if (!REG_SET_EQUAL_P (info->x_block->il.rtl->global_live_at_end,
1000 info->y_block->il.rtl->global_live_at_end))
1002 #ifdef STACK_REGS
1003 unsigned rn;
1005 if (!(mode & CLEANUP_POST_REGSTACK))
1006 return false;
1007 /* After reg-stack. Remove bogus live info about stack regs. N.B.
1008 these regs are not necessarily all dead - we swap random bogosity
1009 against constant bogosity. However, clearing these bits at
1010 least makes the regsets comparable. */
1011 for (rn = FIRST_STACK_REG; rn <= LAST_STACK_REG; rn++)
1013 CLEAR_REGNO_REG_SET (info->x_block->il.rtl->global_live_at_end, rn);
1014 CLEAR_REGNO_REG_SET (info->y_block->il.rtl->global_live_at_end, rn);
1016 if (!REG_SET_EQUAL_P (info->x_block->il.rtl->global_live_at_end,
1017 info->y_block->il.rtl->global_live_at_end))
1018 #endif
1019 return false;
1021 info->mode = mode;
1022 if (mode & STRUCT_EQUIV_START)
1024 info->x_input = info->y_input = info->input_reg = NULL_RTX;
1025 info->equiv_used = ALLOC_REG_SET (&reg_obstack);
1026 info->check_input_conflict = false;
1028 info->had_input_conflict = false;
1029 info->cur.ninsns = info->cur.version = 0;
1030 info->cur.local_count = info->cur.input_count = 0;
1031 info->cur.x_start = info->cur.y_start = NULL_RTX;
1032 info->x_label = info->y_label = NULL_RTX;
1033 info->need_rerun = false;
1034 info->live_update = true;
1035 info->cur.input_valid = false;
1036 info->common_live = ALLOC_REG_SET (&reg_obstack);
1037 info->x_local_live = ALLOC_REG_SET (&reg_obstack);
1038 info->y_local_live = ALLOC_REG_SET (&reg_obstack);
1039 COPY_REG_SET (info->common_live, info->x_block->il.rtl->global_live_at_end);
1040 struct_equiv_make_checkpoint (&info->best_match, info);
1041 return true;
1044 /* Insns XI and YI have been matched. Merge memory attributes and reg
1045 notes. */
1046 static void
1047 struct_equiv_merge (rtx xi, rtx yi, struct equiv_info *info)
1049 rtx equiv1, equiv2;
1051 merge_memattrs (xi, yi);
1053 /* If the merged insns have different REG_EQUAL notes, then
1054 remove them. */
1055 info->live_update = false;
1056 equiv1 = find_reg_equal_equiv_note (xi);
1057 equiv2 = find_reg_equal_equiv_note (yi);
1058 if (equiv1 && !equiv2)
1059 remove_note (xi, equiv1);
1060 else if (!equiv1 && equiv2)
1061 remove_note (yi, equiv2);
1062 else if (equiv1 && equiv2
1063 && !rtx_equiv_p (&XEXP (equiv1, 0), XEXP (equiv2, 0),
1064 1, info))
1066 remove_note (xi, equiv1);
1067 remove_note (yi, equiv2);
1069 info->live_update = true;
1072 /* Return number of matched insns.
1073 This function must be called up to three times for a successful cross-jump
1074 match:
1075 first to find out which instructions do match. While trying to match
1076 another instruction that doesn't match, we destroy information in info
1077 about the actual inputs. So if there have been any before the last
1078 match attempt, we need to call this function again to recompute the
1079 actual inputs up to the actual start of the matching sequence.
1080 When we are then satisfied that the cross-jump is worthwhile, we
1081 call this function a third time to make any changes needed to make the
1082 sequences match: apply equivalences, remove non-matching
1083 notes and merge memory attributes. */
1085 struct_equiv_block_eq (int mode, struct equiv_info *info)
1087 rtx x_stop, y_stop;
1088 rtx xi, yi;
1089 int i;
1091 if (mode & STRUCT_EQUIV_START)
1093 x_stop = BB_HEAD (info->x_block);
1094 y_stop = BB_HEAD (info->y_block);
1095 if (!x_stop || !y_stop)
1096 return 0;
1098 else
1100 x_stop = info->cur.x_start;
1101 y_stop = info->cur.y_start;
1103 if (!struct_equiv_init (mode, info))
1104 gcc_unreachable ();
1106 /* Skip simple jumps at the end of the blocks. Complex jumps still
1107 need to be compared for equivalence, which we'll do below. */
1109 xi = BB_END (info->x_block);
1110 if (onlyjump_p (xi)
1111 || (returnjump_p (xi) && !side_effects_p (PATTERN (xi))))
1113 info->cur.x_start = xi;
1114 xi = PREV_INSN (xi);
1117 yi = BB_END (info->y_block);
1118 if (onlyjump_p (yi)
1119 || (returnjump_p (yi) && !side_effects_p (PATTERN (yi))))
1121 info->cur.y_start = yi;
1122 /* Count everything except for unconditional jump as insn. */
1123 /* ??? Is it right to count unconditional jumps with a clobber?
1124 Should we count conditional returns? */
1125 if (!simplejump_p (yi) && !returnjump_p (yi) && info->cur.x_start)
1126 info->cur.ninsns++;
1127 yi = PREV_INSN (yi);
1130 if (mode & STRUCT_EQUIV_MATCH_JUMPS)
1132 /* The caller is expected to have compared the jumps already, but we
1133 need to match them again to get any local registers and inputs. */
1134 gcc_assert (!info->cur.x_start == !info->cur.y_start);
1135 if (info->cur.x_start)
1137 if (any_condjump_p (info->cur.x_start)
1138 ? !condjump_equiv_p (info, false)
1139 : !insns_match_p (info->cur.x_start, info->cur.y_start, info))
1140 gcc_unreachable ();
1142 else if (any_condjump_p (xi) && any_condjump_p (yi))
1144 info->cur.x_start = xi;
1145 info->cur.y_start = yi;
1146 xi = PREV_INSN (xi);
1147 yi = PREV_INSN (yi);
1148 info->cur.ninsns++;
1149 if (!condjump_equiv_p (info, false))
1150 gcc_unreachable ();
1152 if (info->cur.x_start && info->mode & STRUCT_EQUIV_FINAL)
1153 struct_equiv_merge (info->cur.x_start, info->cur.y_start, info);
1156 struct_equiv_improve_checkpoint (&info->best_match, info);
1157 info->x_end = xi;
1158 info->y_end = yi;
1159 if (info->cur.x_start != x_stop)
1160 for (;;)
1162 /* Ignore notes. */
1163 while (!INSN_P (xi) && xi != x_stop)
1164 xi = PREV_INSN (xi);
1166 while (!INSN_P (yi) && yi != y_stop)
1167 yi = PREV_INSN (yi);
1169 if (!insns_match_p (xi, yi, info))
1170 break;
1171 if (INSN_P (xi))
1173 if (info->mode & STRUCT_EQUIV_FINAL)
1174 struct_equiv_merge (xi, yi, info);
1175 info->cur.ninsns++;
1176 struct_equiv_improve_checkpoint (&info->best_match, info);
1178 if (xi == x_stop || yi == y_stop)
1180 /* If we reached the start of at least one of the blocks, but
1181 best_match hasn't been advanced back to the first valid insn
1182 yet, represent the increased benefit of completing the block
1183 as an increased instruction count. */
1184 if (info->best_match.x_start != info->cur.x_start
1185 && (xi == BB_HEAD (info->x_block)
1186 || yi == BB_HEAD (info->y_block)))
1188 info->cur.ninsns++;
1189 struct_equiv_improve_checkpoint (&info->best_match, info);
1190 info->cur.ninsns--;
1191 if (info->best_match.ninsns > info->cur.ninsns)
1192 info->best_match.ninsns = info->cur.ninsns;
1194 break;
1196 xi = PREV_INSN (xi);
1197 yi = PREV_INSN (yi);
1200 /* If we failed to match an insn, but had some changes registered from
1201 trying to make the insns match, we need to cancel these changes now. */
1202 cancel_changes (0);
1203 /* Restore to best_match to get the sequence with the best known-so-far
1204 cost-benefit difference. */
1205 struct_equiv_restore_checkpoint (&info->best_match, info);
1207 /* Include preceding notes and labels in the cross-jump / if-conversion.
1208 One, this may bring us to the head of the blocks.
1209 Two, it keeps line number notes as matched as may be. */
1210 if (info->cur.ninsns)
1212 xi = info->cur.x_start;
1213 yi = info->cur.y_start;
1214 while (xi != x_stop && !INSN_P (PREV_INSN (xi)))
1215 xi = PREV_INSN (xi);
1217 while (yi != y_stop && !INSN_P (PREV_INSN (yi)))
1218 yi = PREV_INSN (yi);
1220 info->cur.x_start = xi;
1221 info->cur.y_start = yi;
1224 if (!info->cur.input_valid)
1225 info->x_input = info->y_input = info->input_reg = NULL_RTX;
1226 if (!info->need_rerun)
1228 find_dying_inputs (info);
1229 if (info->mode & STRUCT_EQUIV_FINAL)
1231 if (info->check_input_conflict && ! resolve_input_conflict (info))
1232 gcc_unreachable ();
1234 else
1236 bool input_conflict = info->had_input_conflict;
1238 if (!input_conflict
1239 && info->dying_inputs > 1
1240 && bitmap_intersect_p (info->x_local_live, info->y_local_live))
1242 regset_head clobbered_regs;
1244 INIT_REG_SET (&clobbered_regs);
1245 for (i = 0; i < info->cur.local_count; i++)
1247 if (assign_reg_reg_set (&clobbered_regs, info->y_local[i], 0))
1249 input_conflict = true;
1250 break;
1252 assign_reg_reg_set (&clobbered_regs, info->x_local[i], 1);
1254 CLEAR_REG_SET (&clobbered_regs);
1256 if (input_conflict && !info->check_input_conflict)
1257 info->need_rerun = true;
1258 info->check_input_conflict = input_conflict;
1262 if (info->mode & STRUCT_EQUIV_NEED_FULL_BLOCK
1263 && (info->cur.x_start != x_stop || info->cur.y_start != y_stop))
1264 return 0;
1265 return info->cur.ninsns;
1268 /* For each local register, set info->local_rvalue to true iff the register
1269 is a dying input. Store the total number of these in info->dying_inputs. */
1270 static void
1271 find_dying_inputs (struct equiv_info *info)
1273 int i;
1275 info->dying_inputs = 0;
1276 for (i = info->cur.local_count-1; i >=0; i--)
1278 rtx x = info->x_local[i];
1279 unsigned regno = REGNO (x);
1280 int nregs = (regno >= FIRST_PSEUDO_REGISTER
1281 ? 1 : hard_regno_nregs[regno][GET_MODE (x)]);
1283 for (info->local_rvalue[i] = false; nregs >= 0; regno++, --nregs)
1284 if (REGNO_REG_SET_P (info->x_local_live, regno))
1286 info->dying_inputs++;
1287 info->local_rvalue[i] = true;
1288 break;
1293 /* For each local register that is a dying input, y_local[i] will be
1294 copied to x_local[i]. We'll do this in ascending order. Try to
1295 re-order the locals to avoid conflicts like r3 = r2; r4 = r3; .
1296 Return true iff the re-ordering is successful, or not necessary. */
1297 static bool
1298 resolve_input_conflict (struct equiv_info *info)
1300 int i, j, end;
1301 int nswaps = 0;
1302 rtx save_x_local[STRUCT_EQUIV_MAX_LOCAL];
1303 rtx save_y_local[STRUCT_EQUIV_MAX_LOCAL];
1305 find_dying_inputs (info);
1306 if (info->dying_inputs <= 1)
1307 return true;
1308 memcpy (save_x_local, info->x_local, sizeof save_x_local);
1309 memcpy (save_y_local, info->y_local, sizeof save_y_local);
1310 end = info->cur.local_count - 1;
1311 for (i = 0; i <= end; i++)
1313 /* Cycle detection with regsets is expensive, so we just check that
1314 we don't exceed the maximum number of swaps needed in the acyclic
1315 case. */
1316 int max_swaps = end - i;
1318 /* Check if x_local[i] will be clobbered. */
1319 if (!info->local_rvalue[i])
1320 continue;
1321 /* Check if any later value needs to be copied earlier. */
1322 for (j = i + 1; j <= end; j++)
1324 rtx tmp;
1326 if (!info->local_rvalue[j])
1327 continue;
1328 if (!reg_overlap_mentioned_p (info->x_local[i], info->y_local[j]))
1329 continue;
1330 if (--max_swaps < 0)
1332 memcpy (info->x_local, save_x_local, sizeof save_x_local);
1333 memcpy (info->y_local, save_y_local, sizeof save_y_local);
1334 return false;
1336 nswaps++;
1337 tmp = info->x_local[i];
1338 info->x_local[i] = info->x_local[j];
1339 info->x_local[j] = tmp;
1340 tmp = info->y_local[i];
1341 info->y_local[i] = info->y_local[j];
1342 info->y_local[j] = tmp;
1343 j = i;
1346 info->had_input_conflict = true;
1347 if (dump_file && nswaps)
1348 fprintf (dump_file, "Resolved input conflict, %d %s.\n",
1349 nswaps, nswaps == 1 ? "swap" : "swaps");
1350 return true;