2005-12-18 Richard Guenther <rguenther@suse.de>
[official-gcc.git] / gcc / struct-equiv.c
blob69639d254e45798ce3bbc761d162e119ac7fe941
1 /* Control flow optimization code for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
22 /* Try to match two basic blocks - or their ends - for structural equivalence.
23 We scan the blocks from their ends backwards, and expect that insns are
24 identical, except for certain cases involving registers. A mismatch
25 We scan the blocks from their ends backwards, hoping to find a match, I.e.
26 insns are identical, except for certain cases involving registers. A
27 mismatch between register number RX (used in block X) and RY (used in the
28 same way in block Y) can be handled in one of the following cases:
29 1. RX and RY are local to their respective blocks; they are set there and
30 die there. If so, they can effectively be ignored.
31 2. RX and RY die in their blocks, but live at the start. If any path
32 gets redirected through X instead of Y, the caller must emit
33 compensation code to move RY to RX. If there are overlapping inputs,
34 the function resolve_input_conflict ensures that this can be done.
35 Information about these registers are tracked in the X_LOCAL, Y_LOCAL,
36 LOCAL_COUNT and LOCAL_RVALUE fields.
37 3. RX and RY live throughout their blocks, including the start and the end.
38 Either RX and RY must be identical, or we have to replace all uses in
39 block X with a new pseudo, which is stored in the INPUT_REG field. The
40 caller can then use block X instead of block Y by copying RY to the new
41 pseudo.
43 The main entry point to this file is struct_equiv_block_eq. This function
44 uses a struct equiv_info to accept some of its inputs, to keep track of its
45 internal state, to pass down to its helper functions, and to communicate
46 some of the results back to the caller.
48 Most scans will result in a failure to match a sufficient number of insns
49 to make any optimization worth while, therefore the process is geared more
50 to quick scanning rather than the ability to exactly backtrack when we
51 find a mismatch. The information gathered is still meaningful to make a
52 preliminary decision if we want to do an optimization, we might only
53 slightly overestimate the number of matchable insns, and underestimate
54 the number of inputs an miss an input conflict. Sufficient information
55 is gathered so that when we make another pass, we won't have to backtrack
56 at the same point.
57 Another issue is that information in memory attributes and/or REG_NOTES
58 might have to be merged or discarded to make a valid match. We don't want
59 to discard such information when we are not certain that we want to merge
60 the two (partial) blocks.
61 For these reasons, struct_equiv_block_eq has to be called first with the
62 STRUCT_EQUIV_START bit set in the mode parameter. This will calculate the
63 number of matched insns and the number and types of inputs. If the
64 need_rerun field is set, the results are only tentative, and the caller
65 has to call again with STRUCT_EQUIV_RERUN till need_rerun is false in
66 order to get a reliable match.
67 To install the changes necessary for the match, the function has to be
68 called again with STRUCT_EQUIV_FINAL.
70 While scanning an insn, we process first all the SET_DESTs, then the
71 SET_SRCes, then the REG_NOTES, in order to keep the register liveness
72 information consistent.
73 If we were to mix up the order for sources / destinations in an insn where
74 a source is also a destination, we'd end up being mistaken to think that
75 the register is not live in the preceding insn. */
77 #include "config.h"
78 #include "system.h"
79 #include "coretypes.h"
80 #include "tm.h"
81 #include "rtl.h"
82 #include "regs.h"
83 #include "output.h"
84 #include "insn-config.h"
85 #include "flags.h"
86 #include "recog.h"
87 #include "tm_p.h"
88 #include "target.h"
89 #include "emit-rtl.h"
90 #include "reload.h"
92 static void merge_memattrs (rtx, rtx);
93 static bool set_dest_equiv_p (rtx x, rtx y, struct equiv_info *info);
94 static bool set_dest_addr_equiv_p (rtx x, rtx y, struct equiv_info *info);
95 static void find_dying_inputs (struct equiv_info *info);
96 static bool resolve_input_conflict (struct equiv_info *info);
98 /* After reload, some moves, as indicated by SECONDARY_RELOAD_CLASS and
99 SECONDARY_MEMORY_NEEDED, cannot be done directly. For our purposes, we
100 consider them impossible to generate after reload (even though some
101 might be synthesized when you throw enough code at them).
102 Since we don't know while processing a cross-jump if a local register
103 that is currently live will eventually be live and thus be an input,
104 we keep track of potential inputs that would require an impossible move
105 by using a prohibitively high cost for them.
106 This number, multiplied with the larger of STRUCT_EQUIV_MAX_LOCAL and
107 FIRST_PSEUDO_REGISTER, must fit in the input_cost field of
108 struct equiv_info. */
109 #define IMPOSSIBLE_MOVE_FACTOR 20000
113 /* Removes the memory attributes of MEM expression
114 if they are not equal. */
116 void
117 merge_memattrs (rtx x, rtx y)
119 int i;
120 int j;
121 enum rtx_code code;
122 const char *fmt;
124 if (x == y)
125 return;
126 if (x == 0 || y == 0)
127 return;
129 code = GET_CODE (x);
131 if (code != GET_CODE (y))
132 return;
134 if (GET_MODE (x) != GET_MODE (y))
135 return;
137 if (code == MEM && MEM_ATTRS (x) != MEM_ATTRS (y))
139 if (! MEM_ATTRS (x))
140 MEM_ATTRS (y) = 0;
141 else if (! MEM_ATTRS (y))
142 MEM_ATTRS (x) = 0;
143 else
145 rtx mem_size;
147 if (MEM_ALIAS_SET (x) != MEM_ALIAS_SET (y))
149 set_mem_alias_set (x, 0);
150 set_mem_alias_set (y, 0);
153 if (! mem_expr_equal_p (MEM_EXPR (x), MEM_EXPR (y)))
155 set_mem_expr (x, 0);
156 set_mem_expr (y, 0);
157 set_mem_offset (x, 0);
158 set_mem_offset (y, 0);
160 else if (MEM_OFFSET (x) != MEM_OFFSET (y))
162 set_mem_offset (x, 0);
163 set_mem_offset (y, 0);
166 if (!MEM_SIZE (x))
167 mem_size = NULL_RTX;
168 else if (!MEM_SIZE (y))
169 mem_size = NULL_RTX;
170 else
171 mem_size = GEN_INT (MAX (INTVAL (MEM_SIZE (x)),
172 INTVAL (MEM_SIZE (y))));
173 set_mem_size (x, mem_size);
174 set_mem_size (y, mem_size);
176 set_mem_align (x, MIN (MEM_ALIGN (x), MEM_ALIGN (y)));
177 set_mem_align (y, MEM_ALIGN (x));
181 fmt = GET_RTX_FORMAT (code);
182 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
184 switch (fmt[i])
186 case 'E':
187 /* Two vectors must have the same length. */
188 if (XVECLEN (x, i) != XVECLEN (y, i))
189 return;
191 for (j = 0; j < XVECLEN (x, i); j++)
192 merge_memattrs (XVECEXP (x, i, j), XVECEXP (y, i, j));
194 break;
196 case 'e':
197 merge_memattrs (XEXP (x, i), XEXP (y, i));
200 return;
203 /* In SET, assign the bit for the register number of REG the value VALUE.
204 If REG is a hard register, do so for all its constituent registers.
205 Return the number of registers that have become included (as a positive
206 number) or excluded (as a negative number). */
207 static int
208 assign_reg_reg_set (regset set, rtx reg, int value)
210 unsigned regno = REGNO (reg);
211 int nregs, i, old;
213 if (regno >= FIRST_PSEUDO_REGISTER)
215 gcc_assert (!reload_completed);
216 nregs = 1;
218 else
219 nregs = hard_regno_nregs[regno][GET_MODE (reg)];
220 for (old = 0, i = nregs; --i >= 0; regno++)
222 if ((value != 0) == REGNO_REG_SET_P (set, regno))
223 continue;
224 if (value)
225 old++, SET_REGNO_REG_SET (set, regno);
226 else
227 old--, CLEAR_REGNO_REG_SET (set, regno);
229 return old;
232 /* Record state about current inputs / local registers / liveness
233 in *P. */
234 static inline void
235 struct_equiv_make_checkpoint (struct struct_equiv_checkpoint *p,
236 struct equiv_info *info)
238 *p = info->cur;
241 /* Call struct_equiv_make_checkpoint (P, INFO) if the current partial block
242 is suitable to split off - i.e. there is no dangling cc0 user - and
243 if the current cost of the common instructions, minus the cost for
244 setting up the inputs, is higher than what has been recorded before
245 in CHECKPOINT[N]. Also, if we do so, confirm or cancel any pending
246 changes. */
247 static void
248 struct_equiv_improve_checkpoint (struct struct_equiv_checkpoint *p,
249 struct equiv_info *info)
251 #ifdef HAVE_cc0
252 if (reg_mentioned_p (cc0_rtx, p->x_start) && !sets_cc0_p (p->x_start))
253 return;
254 #endif
255 if (info->cur.input_count >= IMPOSSIBLE_MOVE_FACTOR)
256 return;
257 if (info->input_cost >= 0
258 ? (COSTS_N_INSNS(info->cur.ninsns - p->ninsns)
259 > info->input_cost * (info->cur.input_count - p->input_count))
260 : info->cur.ninsns > p->ninsns && !info->cur.input_count)
262 if (info->check_input_conflict && ! resolve_input_conflict (info))
263 return;
264 /* We have a profitable set of changes. If this is the final pass,
265 commit them now. Otherwise, we don't know yet if we can make any
266 change, so put the old code back for now. */
267 if (info->mode & STRUCT_EQUIV_FINAL)
268 confirm_change_group ();
269 else
270 cancel_changes (0);
271 struct_equiv_make_checkpoint (p, info);
275 /* Restore state about current inputs / local registers / liveness
276 from P. */
277 static void
278 struct_equiv_restore_checkpoint (struct struct_equiv_checkpoint *p,
279 struct equiv_info *info)
281 info->cur.ninsns = p->ninsns;
282 info->cur.x_start = p->x_start;
283 info->cur.y_start = p->y_start;
284 info->cur.input_count = p->input_count;
285 info->cur.input_valid = p->input_valid;
286 while (info->cur.local_count > p->local_count)
288 info->cur.local_count--;
289 info->cur.version--;
290 if (REGNO_REG_SET_P (info->x_local_live,
291 REGNO (info->x_local[info->cur.local_count])))
293 assign_reg_reg_set (info->x_local_live,
294 info->x_local[info->cur.local_count], 0);
295 assign_reg_reg_set (info->y_local_live,
296 info->y_local[info->cur.local_count], 0);
297 info->cur.version--;
300 if (info->cur.version != p->version)
301 info->need_rerun = true;
305 /* Update register liveness to reflect that X is now life (if rvalue is
306 nonzero) or dead (if rvalue is zero) in INFO->x_block, and likewise Y
307 in INFO->y_block. Return the number of registers the liveness of which
308 changed in each block (as a negative number if registers became dead). */
309 static int
310 note_local_live (struct equiv_info *info, rtx x, rtx y, int rvalue)
312 unsigned x_regno = REGNO (x);
313 unsigned y_regno = REGNO (y);
314 int x_nominal_nregs = (x_regno >= FIRST_PSEUDO_REGISTER
315 ? 1 : hard_regno_nregs[x_regno][GET_MODE (x)]);
316 int y_nominal_nregs = (y_regno >= FIRST_PSEUDO_REGISTER
317 ? 1 : hard_regno_nregs[y_regno][GET_MODE (y)]);
318 int x_change = assign_reg_reg_set (info->x_local_live, x, rvalue);
319 int y_change = assign_reg_reg_set (info->y_local_live, y, rvalue);
321 gcc_assert (x_nominal_nregs && y_nominal_nregs);
322 gcc_assert (x_change * y_nominal_nregs == y_change * x_nominal_nregs);
323 if (y_change)
325 if (reload_completed)
327 unsigned x_regno ATTRIBUTE_UNUSED = REGNO (x);
328 unsigned y_regno = REGNO (y);
329 enum machine_mode x_mode = GET_MODE (x);
331 if (secondary_reload_class (0, REGNO_REG_CLASS (y_regno), x_mode, x)
332 != NO_REGS
333 #ifdef SECONDARY_MEMORY_NEEDED
334 || SECONDARY_MEMORY_NEEDED (REGNO_REG_CLASS (y_regno),
335 REGNO_REG_CLASS (x_regno), x_mode)
336 #endif
338 y_change *= IMPOSSIBLE_MOVE_FACTOR;
340 info->cur.input_count += y_change;
341 info->cur.version++;
343 return x_change;
346 /* Check if *XP is equivalent to Y. Until an an unreconcilable difference is
347 found, use in-group changes with validate_change on *XP to make register
348 assignments agree. It is the (not necessarily direct) callers
349 responsibility to verify / confirm / cancel these changes, as appropriate.
350 RVALUE indicates if the processed piece of rtl is used as a destination, in
351 which case we can't have different registers being an input. Returns
352 nonzero if the two blocks have been identified as equivalent, zero otherwise.
353 RVALUE == 0: destination
354 RVALUE == 1: source
355 RVALUE == -1: source, ignore SET_DEST of SET / clobber. */
356 bool
357 rtx_equiv_p (rtx *xp, rtx y, int rvalue, struct equiv_info *info)
359 rtx x = *xp;
360 enum rtx_code code;
361 int length;
362 const char *format;
363 int i;
365 if (!y || !x)
366 return x == y;
367 code = GET_CODE (y);
368 if (code != REG && x == y)
369 return true;
370 if (GET_CODE (x) != code
371 || GET_MODE (x) != GET_MODE (y))
372 return false;
374 /* ??? could extend to allow CONST_INT inputs. */
375 switch (code)
377 case REG:
379 unsigned x_regno = REGNO (x);
380 unsigned y_regno = REGNO (y);
381 int x_common_live, y_common_live;
383 if (reload_completed
384 && (x_regno >= FIRST_PSEUDO_REGISTER
385 || y_regno >= FIRST_PSEUDO_REGISTER))
387 /* We should only see this in REG_NOTEs. */
388 gcc_assert (!info->live_update);
389 /* Returning false will cause us to remove the notes. */
390 return false;
392 #ifdef STACK_REGS
393 /* After reg-stack, can only accept literal matches of stack regs. */
394 if (info->mode & CLEANUP_POST_REGSTACK
395 && (IN_RANGE (x_regno, FIRST_STACK_REG, LAST_STACK_REG)
396 || IN_RANGE (y_regno, FIRST_STACK_REG, LAST_STACK_REG)))
397 return x_regno == y_regno;
398 #endif
400 /* If the register is a locally live one in one block, the
401 corresponding one must be locally live in the other, too, and
402 match of identical regnos doesn't apply. */
403 if (REGNO_REG_SET_P (info->x_local_live, x_regno))
405 if (!REGNO_REG_SET_P (info->y_local_live, y_regno))
406 return false;
408 else if (REGNO_REG_SET_P (info->y_local_live, y_regno))
409 return false;
410 else if (x_regno == y_regno)
412 if (!rvalue && info->cur.input_valid
413 && (reg_overlap_mentioned_p (x, info->x_input)
414 || reg_overlap_mentioned_p (x, info->y_input)))
415 return false;
417 /* Update liveness information. */
418 if (info->live_update
419 && assign_reg_reg_set (info->common_live, x, rvalue))
420 info->cur.version++;
422 return true;
425 x_common_live = REGNO_REG_SET_P (info->common_live, x_regno);
426 y_common_live = REGNO_REG_SET_P (info->common_live, y_regno);
427 if (x_common_live != y_common_live)
428 return false;
429 else if (x_common_live)
431 if (! rvalue || info->input_cost < 0 || no_new_pseudos)
432 return false;
433 /* If info->live_update is not set, we are processing notes.
434 We then allow a match with x_input / y_input found in a
435 previous pass. */
436 if (info->live_update && !info->cur.input_valid)
438 info->cur.input_valid = true;
439 info->x_input = x;
440 info->y_input = y;
441 info->cur.input_count += optimize_size ? 2 : 1;
442 if (info->input_reg
443 && GET_MODE (info->input_reg) != GET_MODE (info->x_input))
444 info->input_reg = NULL_RTX;
445 if (!info->input_reg)
446 info->input_reg = gen_reg_rtx (GET_MODE (info->x_input));
448 else if ((info->live_update
449 ? ! info->cur.input_valid : ! info->x_input)
450 || ! rtx_equal_p (x, info->x_input)
451 || ! rtx_equal_p (y, info->y_input))
452 return false;
453 validate_change (info->cur.x_start, xp, info->input_reg, 1);
455 else
457 int x_nregs = (x_regno >= FIRST_PSEUDO_REGISTER
458 ? 1 : hard_regno_nregs[x_regno][GET_MODE (x)]);
459 int y_nregs = (y_regno >= FIRST_PSEUDO_REGISTER
460 ? 1 : hard_regno_nregs[y_regno][GET_MODE (y)]);
461 int size = GET_MODE_SIZE (GET_MODE (x));
462 enum machine_mode x_mode = GET_MODE (x);
463 unsigned x_regno_i, y_regno_i;
464 int x_nregs_i, y_nregs_i, size_i;
465 int local_count = info->cur.local_count;
467 /* This might be a register local to each block. See if we have
468 it already registered. */
469 for (i = local_count - 1; i >= 0; i--)
471 x_regno_i = REGNO (info->x_local[i]);
472 x_nregs_i = (x_regno_i >= FIRST_PSEUDO_REGISTER
473 ? 1 : hard_regno_nregs[x_regno_i][GET_MODE (x)]);
474 y_regno_i = REGNO (info->y_local[i]);
475 y_nregs_i = (y_regno_i >= FIRST_PSEUDO_REGISTER
476 ? 1 : hard_regno_nregs[y_regno_i][GET_MODE (y)]);
477 size_i = GET_MODE_SIZE (GET_MODE (info->x_local[i]));
479 /* If we have a new pair of registers that is wider than an
480 old pair and enclosing it with matching offsets,
481 remove the old pair. If we find a matching, wider, old
482 pair, use the old one. If the width is the same, use the
483 old one if the modes match, but the new if they don't.
484 We don't want to get too fancy with subreg_regno_offset
485 here, so we just test two straightforwad cases each. */
486 if (info->live_update
487 && (x_mode != GET_MODE (info->x_local[i])
488 ? size >= size_i : size > size_i))
490 /* If the new pair is fully enclosing a matching
491 existing pair, remove the old one. N.B. because
492 we are removing one entry here, the check below
493 if we have space for a new entry will succeed. */
494 if ((x_regno <= x_regno_i
495 && x_regno + x_nregs >= x_regno_i + x_nregs_i
496 && x_nregs == y_nregs && x_nregs_i == y_nregs_i
497 && x_regno - x_regno_i == y_regno - y_regno_i)
498 || (x_regno == x_regno_i && y_regno == y_regno_i
499 && x_nregs >= x_nregs_i && y_nregs >= y_nregs_i))
501 info->cur.local_count = --local_count;
502 info->x_local[i] = info->x_local[local_count];
503 info->y_local[i] = info->y_local[local_count];
504 continue;
507 else
510 /* If the new pair is fully enclosed within a matching
511 existing pair, succeed. */
512 if (x_regno >= x_regno_i
513 && x_regno + x_nregs <= x_regno_i + x_nregs_i
514 && x_nregs == y_nregs && x_nregs_i == y_nregs_i
515 && x_regno - x_regno_i == y_regno - y_regno_i)
516 break;
517 if (x_regno == x_regno_i && y_regno == y_regno_i
518 && x_nregs <= x_nregs_i && y_nregs <= y_nregs_i)
519 break;
522 /* Any other overlap causes a match failure. */
523 if (x_regno + x_nregs > x_regno_i
524 && x_regno_i + x_nregs_i > x_regno)
525 return false;
526 if (y_regno + y_nregs > y_regno_i
527 && y_regno_i + y_nregs_i > y_regno)
528 return false;
530 if (i < 0)
532 /* Not found. Create a new entry if possible. */
533 if (!info->live_update
534 || info->cur.local_count >= STRUCT_EQUIV_MAX_LOCAL)
535 return false;
536 info->x_local[info->cur.local_count] = x;
537 info->y_local[info->cur.local_count] = y;
538 info->cur.local_count++;
539 info->cur.version++;
541 note_local_live (info, x, y, rvalue);
543 return true;
545 case SET:
546 gcc_assert (rvalue < 0);
547 /* Ignore the destinations role as a destination. Still, we have
548 to consider input registers embedded in the addresses of a MEM.
549 N.B., we process the rvalue aspect of STRICT_LOW_PART /
550 ZERO_EXTEND / SIGN_EXTEND along with their lvalue aspect. */
551 if(!set_dest_addr_equiv_p (SET_DEST (x), SET_DEST (y), info))
552 return false;
553 /* Process source. */
554 return rtx_equiv_p (&SET_SRC (x), SET_SRC (y), 1, info);
555 case PRE_MODIFY:
556 /* Process destination. */
557 if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info))
558 return false;
559 /* Process source. */
560 return rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 1, info);
561 case POST_MODIFY:
563 rtx x_dest0, x_dest1;
565 /* Process destination. */
566 x_dest0 = XEXP (x, 0);
567 gcc_assert (REG_P (x_dest0));
568 if (!rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info))
569 return false;
570 x_dest1 = XEXP (x, 0);
571 /* validate_change might have changed the destination. Put it back
572 so that we can do a valid source match. */
573 XEXP (x, 0) = x_dest0;
574 if (!rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 0, info))
575 return false;
576 gcc_assert (x_dest1 == XEXP (x, 0));
577 /* Process source. */
578 return rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), 1, info);
579 if (!rtx_equiv_p (&XEXP(x, 0), XEXP (y, 0), 0, info))
580 return false;
581 /* Process both subexpressions as inputs. */
582 break;
584 case CLOBBER:
585 gcc_assert (rvalue < 0);
586 return true;
587 /* Some special forms are also rvalues when they appear in lvalue
588 positions. However, we must ont try to match a register after we
589 have already altered it with validate_change, consider the rvalue
590 aspect while we process the lvalue. */
591 case STRICT_LOW_PART:
592 case ZERO_EXTEND:
593 case SIGN_EXTEND:
595 rtx x_inner, y_inner;
596 enum rtx_code code;
597 int change;
599 if (rvalue)
600 break;
601 x_inner = XEXP (x, 0);
602 y_inner = XEXP (y, 0);
603 if (GET_MODE (x_inner) != GET_MODE (y_inner))
604 return false;
605 code = GET_CODE (x_inner);
606 if (code != GET_CODE (y_inner))
607 return false;
608 /* The address of a MEM is an input that will be processed during
609 rvalue == -1 processing. */
610 if (code == SUBREG)
612 if (SUBREG_BYTE (x_inner) != SUBREG_BYTE (y_inner))
613 return false;
614 x = x_inner;
615 x_inner = SUBREG_REG (x_inner);
616 y_inner = SUBREG_REG (y_inner);
617 if (GET_MODE (x_inner) != GET_MODE (y_inner))
618 return false;
619 code = GET_CODE (x_inner);
620 if (code != GET_CODE (y_inner))
621 return false;
623 if (code == MEM)
624 return true;
625 gcc_assert (code == REG);
626 if (! rtx_equiv_p (&XEXP (x, 0), y_inner, rvalue, info))
627 return false;
628 if (REGNO (x_inner) == REGNO (y_inner))
630 change = assign_reg_reg_set (info->common_live, x_inner, 1);
631 info->cur.version++;
633 else
634 change = note_local_live (info, x_inner, y_inner, 1);
635 gcc_assert (change);
636 return true;
638 /* The AUTO_INC / POST_MODIFY / PRE_MODIFY sets are modelled to take
639 place during input processing, however, that is benign, since they
640 are paired with reads. */
641 case MEM:
642 return !rvalue || rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), rvalue, info);
643 case POST_INC: case POST_DEC: case PRE_INC: case PRE_DEC:
644 return (rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info)
645 && rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info));
646 case PARALLEL:
647 /* If this is a top-level PATTERN PARALLEL, we expect the caller to
648 have handled the SET_DESTs. A complex or vector PARALLEL can be
649 identified by having a mode. */
650 gcc_assert (rvalue < 0 || GET_MODE (x) != VOIDmode);
651 break;
652 case LABEL_REF:
653 /* Check special tablejump match case. */
654 if (XEXP (y, 0) == info->y_label)
655 return (XEXP (x, 0) == info->x_label);
656 /* We can't assume nonlocal labels have their following insns yet. */
657 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
658 return XEXP (x, 0) == XEXP (y, 0);
660 /* Two label-refs are equivalent if they point at labels
661 in the same position in the instruction stream. */
662 return (next_real_insn (XEXP (x, 0))
663 == next_real_insn (XEXP (y, 0)));
664 case SYMBOL_REF:
665 return XSTR (x, 0) == XSTR (y, 0);
666 /* Some rtl is guaranteed to be shared, or unique; If we didn't match
667 EQ equality above, they aren't the same. */
668 case CONST_INT:
669 case CODE_LABEL:
670 return false;
671 default:
672 break;
675 /* For commutative operations, the RTX match if the operands match in any
676 order. */
677 if (targetm.commutative_p (x, UNKNOWN))
678 return ((rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), rvalue, info)
679 && rtx_equiv_p (&XEXP (x, 1), XEXP (y, 1), rvalue, info))
680 || (rtx_equiv_p (&XEXP (x, 0), XEXP (y, 1), rvalue, info)
681 && rtx_equiv_p (&XEXP (x, 1), XEXP (y, 0), rvalue, info)));
683 /* Process subexpressions - this is similar to rtx_equal_p. */
684 length = GET_RTX_LENGTH (code);
685 format = GET_RTX_FORMAT (code);
687 for (i = 0; i < length; ++i)
689 switch (format[i])
691 case 'w':
692 if (XWINT (x, i) != XWINT (y, i))
693 return false;
694 break;
695 case 'n':
696 case 'i':
697 if (XINT (x, i) != XINT (y, i))
698 return false;
699 break;
700 case 'V':
701 case 'E':
702 if (XVECLEN (x, i) != XVECLEN (y, i))
703 return false;
704 if (XVEC (x, i) != 0)
706 int j;
707 for (j = 0; j < XVECLEN (x, i); ++j)
709 if (! rtx_equiv_p (&XVECEXP (x, i, j), XVECEXP (y, i, j),
710 rvalue, info))
711 return false;
714 break;
715 case 'e':
716 if (! rtx_equiv_p (&XEXP (x, i), XEXP (y, i), rvalue, info))
717 return false;
718 break;
719 case 'S':
720 case 's':
721 if ((XSTR (x, i) || XSTR (y, i))
722 && (! XSTR (x, i) || ! XSTR (y, i)
723 || strcmp (XSTR (x, i), XSTR (y, i))))
724 return false;
725 break;
726 case 'u':
727 /* These are just backpointers, so they don't matter. */
728 break;
729 case '0':
730 case 't':
731 break;
732 /* It is believed that rtx's at this level will never
733 contain anything but integers and other rtx's,
734 except for within LABEL_REFs and SYMBOL_REFs. */
735 default:
736 gcc_unreachable ();
739 return true;
742 /* Do only the rtx_equiv_p SET_DEST processing for SETs and CLOBBERs.
743 Since we are scanning backwards, this the first step in processing each
744 insn. Return true for success. */
745 static bool
746 set_dest_equiv_p (rtx x, rtx y, struct equiv_info *info)
748 if (!x || !y)
749 return x == y;
750 if (GET_CODE (x) != GET_CODE (y))
751 return false;
752 else if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
753 return rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 0, info);
754 else if (GET_CODE (x) == PARALLEL)
756 int j;
758 if (XVECLEN (x, 0) != XVECLEN (y, 0))
759 return false;
760 for (j = 0; j < XVECLEN (x, 0); ++j)
762 rtx xe = XVECEXP (x, 0, j);
763 rtx ye = XVECEXP (y, 0, j);
765 if (GET_CODE (xe) != GET_CODE (ye))
766 return false;
767 if ((GET_CODE (xe) == SET || GET_CODE (xe) == CLOBBER)
768 && ! rtx_equiv_p (&XEXP (xe, 0), XEXP (ye, 0), 0, info))
769 return false;
772 return true;
775 /* Process MEMs in SET_DEST destinations. We must not process this together
776 with REG SET_DESTs, but must do it separately, lest when we see
777 [(set (reg:SI foo) (bar))
778 (set (mem:SI (reg:SI foo) (baz)))]
779 struct_equiv_block_eq could get confused to assume that (reg:SI foo)
780 is not live before this instruction. */
781 static bool
782 set_dest_addr_equiv_p (rtx x, rtx y, struct equiv_info *info)
784 enum rtx_code code = GET_CODE (x);
785 int length;
786 const char *format;
787 int i;
789 if (code != GET_CODE (y))
790 return false;
791 if (code == MEM)
792 return rtx_equiv_p (&XEXP (x, 0), XEXP (y, 0), 1, info);
794 /* Process subexpressions. */
795 length = GET_RTX_LENGTH (code);
796 format = GET_RTX_FORMAT (code);
798 for (i = 0; i < length; ++i)
800 switch (format[i])
802 case 'V':
803 case 'E':
804 if (XVECLEN (x, i) != XVECLEN (y, i))
805 return false;
806 if (XVEC (x, i) != 0)
808 int j;
809 for (j = 0; j < XVECLEN (x, i); ++j)
811 if (! set_dest_addr_equiv_p (XVECEXP (x, i, j),
812 XVECEXP (y, i, j), info))
813 return false;
816 break;
817 case 'e':
818 if (! set_dest_addr_equiv_p (XEXP (x, i), XEXP (y, i), info))
819 return false;
820 break;
821 default:
822 break;
825 return true;
828 /* Check if the set of REG_DEAD notes attached to I1 and I2 allows us to
829 go ahead with merging I1 and I2, which otherwise look fine.
830 Inputs / local registers for the inputs of I1 and I2 have already been
831 set up. */
832 static bool
833 death_notes_match_p (rtx i1 ATTRIBUTE_UNUSED, rtx i2 ATTRIBUTE_UNUSED,
834 struct equiv_info *info ATTRIBUTE_UNUSED)
836 #ifdef STACK_REGS
837 /* If cross_jump_death_matters is not 0, the insn's mode
838 indicates whether or not the insn contains any stack-like regs. */
840 if ((info->mode & CLEANUP_POST_REGSTACK) && stack_regs_mentioned (i1))
842 /* If register stack conversion has already been done, then
843 death notes must also be compared before it is certain that
844 the two instruction streams match. */
846 rtx note;
847 HARD_REG_SET i1_regset, i2_regset;
849 CLEAR_HARD_REG_SET (i1_regset);
850 CLEAR_HARD_REG_SET (i2_regset);
852 for (note = REG_NOTES (i1); note; note = XEXP (note, 1))
853 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
854 SET_HARD_REG_BIT (i1_regset, REGNO (XEXP (note, 0)));
856 for (note = REG_NOTES (i2); note; note = XEXP (note, 1))
857 if (REG_NOTE_KIND (note) == REG_DEAD && STACK_REG_P (XEXP (note, 0)))
859 unsigned regno = REGNO (XEXP (note, 0));
860 int i;
862 for (i = info->cur.local_count - 1; i >= 0; i--)
863 if (regno == REGNO (info->y_local[i]))
865 regno = REGNO (info->x_local[i]);
866 break;
868 SET_HARD_REG_BIT (i2_regset, regno);
871 GO_IF_HARD_REG_EQUAL (i1_regset, i2_regset, done);
873 return false;
875 done:
878 #endif
879 return true;
882 /* Return true if I1 and I2 are equivalent and thus can be crossjumped. */
884 bool
885 insns_match_p (rtx i1, rtx i2, struct equiv_info *info)
887 int rvalue_change_start;
888 struct struct_equiv_checkpoint before_rvalue_change;
890 /* Verify that I1 and I2 are equivalent. */
891 if (GET_CODE (i1) != GET_CODE (i2))
892 return false;
894 info->cur.x_start = i1;
895 info->cur.y_start = i2;
897 /* If this is a CALL_INSN, compare register usage information.
898 If we don't check this on stack register machines, the two
899 CALL_INSNs might be merged leaving reg-stack.c with mismatching
900 numbers of stack registers in the same basic block.
901 If we don't check this on machines with delay slots, a delay slot may
902 be filled that clobbers a parameter expected by the subroutine.
904 ??? We take the simple route for now and assume that if they're
905 equal, they were constructed identically. */
907 if (CALL_P (i1))
909 if (SIBLING_CALL_P (i1) != SIBLING_CALL_P (i2)
910 || ! set_dest_equiv_p (PATTERN (i1), PATTERN (i2), info)
911 || ! set_dest_equiv_p (CALL_INSN_FUNCTION_USAGE (i1),
912 CALL_INSN_FUNCTION_USAGE (i2), info)
913 || ! rtx_equiv_p (&CALL_INSN_FUNCTION_USAGE (i1),
914 CALL_INSN_FUNCTION_USAGE (i2), -1, info))
916 cancel_changes (0);
917 return false;
920 else if (INSN_P (i1))
922 if (! set_dest_equiv_p (PATTERN (i1), PATTERN (i2), info))
924 cancel_changes (0);
925 return false;
928 rvalue_change_start = num_validated_changes ();
929 struct_equiv_make_checkpoint (&before_rvalue_change, info);
930 /* Check death_notes_match_p *after* the inputs have been processed,
931 so that local inputs will already have been set up. */
932 if (! INSN_P (i1)
933 || (!bitmap_bit_p (info->equiv_used, info->cur.ninsns)
934 && rtx_equiv_p (&PATTERN (i1), PATTERN (i2), -1, info)
935 && death_notes_match_p (i1, i2, info)
936 && verify_changes (0)))
937 return true;
939 /* Do not do EQUIV substitution after reload. First, we're undoing the
940 work of reload_cse. Second, we may be undoing the work of the post-
941 reload splitting pass. */
942 /* ??? Possibly add a new phase switch variable that can be used by
943 targets to disallow the troublesome insns after splitting. */
944 if (!reload_completed)
946 rtx equiv1, equiv2;
948 cancel_changes (rvalue_change_start);
949 struct_equiv_restore_checkpoint (&before_rvalue_change, info);
951 /* The following code helps take care of G++ cleanups. */
952 equiv1 = find_reg_equal_equiv_note (i1);
953 equiv2 = find_reg_equal_equiv_note (i2);
954 if (equiv1 && equiv2
955 /* If the equivalences are not to a constant, they may
956 reference pseudos that no longer exist, so we can't
957 use them. */
958 && (! reload_completed
959 || (CONSTANT_P (XEXP (equiv1, 0))
960 && rtx_equal_p (XEXP (equiv1, 0), XEXP (equiv2, 0)))))
962 rtx s1 = single_set (i1);
963 rtx s2 = single_set (i2);
965 if (s1 != 0 && s2 != 0)
967 validate_change (i1, &SET_SRC (s1), XEXP (equiv1, 0), 1);
968 validate_change (i2, &SET_SRC (s2), XEXP (equiv2, 0), 1);
969 /* Only inspecting the new SET_SRC is not good enough,
970 because there may also be bare USEs in a single_set
971 PARALLEL. */
972 if (rtx_equiv_p (&PATTERN (i1), PATTERN (i2), -1, info)
973 && death_notes_match_p (i1, i2, info)
974 && verify_changes (0))
976 /* Mark this insn so that we'll use the equivalence in
977 all subsequent passes. */
978 bitmap_set_bit (info->equiv_used, info->cur.ninsns);
979 return true;
985 cancel_changes (0);
986 return false;
989 /* Set up mode and register information in INFO. Return true for success. */
990 bool
991 struct_equiv_init (int mode, struct equiv_info *info)
993 if ((info->x_block->flags | info->y_block->flags) & BB_DIRTY)
994 update_life_info_in_dirty_blocks (UPDATE_LIFE_GLOBAL_RM_NOTES,
995 (PROP_DEATH_NOTES
996 | ((mode & CLEANUP_POST_REGSTACK)
997 ? PROP_POST_REGSTACK : 0)));
998 if (!REG_SET_EQUAL_P (info->x_block->il.rtl->global_live_at_end,
999 info->y_block->il.rtl->global_live_at_end))
1001 #ifdef STACK_REGS
1002 unsigned rn;
1004 if (!(mode & CLEANUP_POST_REGSTACK))
1005 return false;
1006 /* After reg-stack. Remove bogus live info about stack regs. N.B.
1007 these regs are not necessarily all dead - we swap random bogosity
1008 against constant bogosity. However, clearing these bits at
1009 least makes the regsets comparable. */
1010 for (rn = FIRST_STACK_REG; rn <= LAST_STACK_REG; rn++)
1012 CLEAR_REGNO_REG_SET (info->x_block->il.rtl->global_live_at_end, rn);
1013 CLEAR_REGNO_REG_SET (info->y_block->il.rtl->global_live_at_end, rn);
1015 if (!REG_SET_EQUAL_P (info->x_block->il.rtl->global_live_at_end,
1016 info->y_block->il.rtl->global_live_at_end))
1017 #endif
1018 return false;
1020 info->mode = mode;
1021 if (mode & STRUCT_EQUIV_START)
1023 info->x_input = info->y_input = info->input_reg = NULL_RTX;
1024 info->equiv_used = ALLOC_REG_SET (&reg_obstack);
1025 info->check_input_conflict = false;
1027 info->had_input_conflict = false;
1028 info->cur.ninsns = info->cur.version = 0;
1029 info->cur.local_count = info->cur.input_count = 0;
1030 info->cur.x_start = info->cur.y_start = NULL_RTX;
1031 info->x_label = info->y_label = NULL_RTX;
1032 info->need_rerun = false;
1033 info->live_update = true;
1034 info->cur.input_valid = false;
1035 info->common_live = ALLOC_REG_SET (&reg_obstack);
1036 info->x_local_live = ALLOC_REG_SET (&reg_obstack);
1037 info->y_local_live = ALLOC_REG_SET (&reg_obstack);
1038 COPY_REG_SET (info->common_live, info->x_block->il.rtl->global_live_at_end);
1039 struct_equiv_make_checkpoint (&info->best_match, info);
1040 return true;
1043 /* Insns XI and YI have been matched. Merge memory attributes and reg
1044 notes. */
1045 static void
1046 struct_equiv_merge (rtx xi, rtx yi, struct equiv_info *info)
1048 rtx equiv1, equiv2;
1050 merge_memattrs (xi, yi);
1052 /* If the merged insns have different REG_EQUAL notes, then
1053 remove them. */
1054 info->live_update = false;
1055 equiv1 = find_reg_equal_equiv_note (xi);
1056 equiv2 = find_reg_equal_equiv_note (yi);
1057 if (equiv1 && !equiv2)
1058 remove_note (xi, equiv1);
1059 else if (!equiv1 && equiv2)
1060 remove_note (yi, equiv2);
1061 else if (equiv1 && equiv2
1062 && !rtx_equiv_p (&XEXP (equiv1, 0), XEXP (equiv2, 0),
1063 1, info))
1065 remove_note (xi, equiv1);
1066 remove_note (yi, equiv2);
1068 info->live_update = true;
1071 /* Return number of matched insns.
1072 This function must be called up to three times for a successful cross-jump
1073 match:
1074 first to find out which instructions do match. While trying to match
1075 another instruction that doesn't match, we destroy information in info
1076 about the actual inputs. So if there have been any before the last
1077 match attempt, we need to call this function again to recompute the
1078 actual inputs up to the actual start of the matching sequence.
1079 When we are then satisfied that the cross-jump is worthwhile, we
1080 call this function a third time to make any changes needed to make the
1081 sequences match: apply equivalences, remove non-matching
1082 notes and merge memory attributes. */
1084 struct_equiv_block_eq (int mode, struct equiv_info *info)
1086 rtx x_stop, y_stop;
1087 rtx xi, yi;
1088 int i;
1090 if (mode & STRUCT_EQUIV_START)
1092 x_stop = BB_HEAD (info->x_block);
1093 y_stop = BB_HEAD (info->y_block);
1094 if (!x_stop || !y_stop)
1095 return 0;
1097 else
1099 x_stop = info->cur.x_start;
1100 y_stop = info->cur.y_start;
1102 if (!struct_equiv_init (mode, info))
1103 gcc_unreachable ();
1105 /* Skip simple jumps at the end of the blocks. Complex jumps still
1106 need to be compared for equivalence, which we'll do below. */
1108 xi = BB_END (info->x_block);
1109 if (onlyjump_p (xi)
1110 || (returnjump_p (xi) && !side_effects_p (PATTERN (xi))))
1112 info->cur.x_start = xi;
1113 xi = PREV_INSN (xi);
1116 yi = BB_END (info->y_block);
1117 if (onlyjump_p (yi)
1118 || (returnjump_p (yi) && !side_effects_p (PATTERN (yi))))
1120 info->cur.y_start = yi;
1121 /* Count everything except for unconditional jump as insn. */
1122 /* ??? Is it right to count unconditional jumps with a clobber?
1123 Should we count conditional returns? */
1124 if (!simplejump_p (yi) && !returnjump_p (yi) && info->cur.x_start)
1125 info->cur.ninsns++;
1126 yi = PREV_INSN (yi);
1129 if (mode & STRUCT_EQUIV_MATCH_JUMPS)
1131 /* The caller is expected to have compared the jumps already, but we
1132 need to match them again to get any local registers and inputs. */
1133 gcc_assert (!info->cur.x_start == !info->cur.y_start);
1134 if (info->cur.x_start)
1136 if (any_condjump_p (info->cur.x_start)
1137 ? !condjump_equiv_p (info, false)
1138 : !insns_match_p (info->cur.x_start, info->cur.y_start, info))
1139 gcc_unreachable ();
1141 else if (any_condjump_p (xi) && any_condjump_p (yi))
1143 info->cur.x_start = xi;
1144 info->cur.y_start = yi;
1145 xi = PREV_INSN (xi);
1146 yi = PREV_INSN (yi);
1147 info->cur.ninsns++;
1148 if (!condjump_equiv_p (info, false))
1149 gcc_unreachable ();
1151 if (info->cur.x_start && info->mode & STRUCT_EQUIV_FINAL)
1152 struct_equiv_merge (info->cur.x_start, info->cur.y_start, info);
1155 struct_equiv_improve_checkpoint (&info->best_match, info);
1156 info->x_end = xi;
1157 info->y_end = yi;
1158 if (info->cur.x_start != x_stop)
1159 for (;;)
1161 /* Ignore notes. */
1162 while (!INSN_P (xi) && xi != x_stop)
1163 xi = PREV_INSN (xi);
1165 while (!INSN_P (yi) && yi != y_stop)
1166 yi = PREV_INSN (yi);
1168 if (!insns_match_p (xi, yi, info))
1169 break;
1170 if (INSN_P (xi))
1172 if (info->mode & STRUCT_EQUIV_FINAL)
1173 struct_equiv_merge (xi, yi, info);
1174 info->cur.ninsns++;
1175 struct_equiv_improve_checkpoint (&info->best_match, info);
1177 if (xi == x_stop || yi == y_stop)
1179 /* If we reached the start of at least one of the blocks, but
1180 best_match hasn't been advanced back to the first valid insn
1181 yet, represent the increased benefit of completing the block
1182 as an increased instruction count. */
1183 if (info->best_match.x_start != info->cur.x_start
1184 && (xi == BB_HEAD (info->x_block)
1185 || yi == BB_HEAD (info->y_block)))
1187 info->cur.ninsns++;
1188 struct_equiv_improve_checkpoint (&info->best_match, info);
1189 info->cur.ninsns--;
1190 if (info->best_match.ninsns > info->cur.ninsns)
1191 info->best_match.ninsns = info->cur.ninsns;
1193 break;
1195 xi = PREV_INSN (xi);
1196 yi = PREV_INSN (yi);
1199 /* If we failed to match an insn, but had some changes registered from
1200 trying to make the insns match, we need to cancel these changes now. */
1201 cancel_changes (0);
1202 /* Restore to best_match to get the sequence with the best known-so-far
1203 cost-benefit difference. */
1204 struct_equiv_restore_checkpoint (&info->best_match, info);
1206 /* Include preceding notes and labels in the cross-jump / if-conversion.
1207 One, this may bring us to the head of the blocks.
1208 Two, it keeps line number notes as matched as may be. */
1209 if (info->cur.ninsns)
1211 xi = info->cur.x_start;
1212 yi = info->cur.y_start;
1213 while (xi != x_stop && !INSN_P (PREV_INSN (xi)))
1214 xi = PREV_INSN (xi);
1216 while (yi != y_stop && !INSN_P (PREV_INSN (yi)))
1217 yi = PREV_INSN (yi);
1219 info->cur.x_start = xi;
1220 info->cur.y_start = yi;
1223 if (!info->cur.input_valid)
1224 info->x_input = info->y_input = info->input_reg = NULL_RTX;
1225 if (!info->need_rerun)
1227 find_dying_inputs (info);
1228 if (info->mode & STRUCT_EQUIV_FINAL)
1230 if (info->check_input_conflict && ! resolve_input_conflict (info))
1231 gcc_unreachable ();
1233 else
1235 bool input_conflict = info->had_input_conflict;
1237 if (!input_conflict
1238 && info->dying_inputs > 1
1239 && bitmap_intersect_p (info->x_local_live, info->y_local_live))
1241 regset_head clobbered_regs;
1243 INIT_REG_SET (&clobbered_regs);
1244 for (i = 0; i < info->cur.local_count; i++)
1246 if (assign_reg_reg_set (&clobbered_regs, info->y_local[i], 0))
1248 input_conflict = true;
1249 break;
1251 assign_reg_reg_set (&clobbered_regs, info->x_local[i], 1);
1253 CLEAR_REG_SET (&clobbered_regs);
1255 if (input_conflict && !info->check_input_conflict)
1256 info->need_rerun = true;
1257 info->check_input_conflict = input_conflict;
1261 if (info->mode & STRUCT_EQUIV_NEED_FULL_BLOCK
1262 && (info->cur.x_start != x_stop || info->cur.y_start != y_stop))
1263 return 0;
1264 return info->cur.ninsns;
1267 /* For each local register, set info->local_rvalue to true iff the register
1268 is a dying input. Store the total number of these in info->dying_inputs. */
1269 static void
1270 find_dying_inputs (struct equiv_info *info)
1272 int i;
1274 info->dying_inputs = 0;
1275 for (i = info->cur.local_count-1; i >=0; i--)
1277 rtx x = info->x_local[i];
1278 unsigned regno = REGNO (x);
1279 int nregs = (regno >= FIRST_PSEUDO_REGISTER
1280 ? 1 : hard_regno_nregs[regno][GET_MODE (x)]);
1282 for (info->local_rvalue[i] = false; nregs >= 0; regno++, --nregs)
1283 if (REGNO_REG_SET_P (info->x_local_live, regno))
1285 info->dying_inputs++;
1286 info->local_rvalue[i] = true;
1287 break;
1292 /* For each local register that is a dying input, y_local[i] will be
1293 copied to x_local[i]. We'll do this in ascending order. Try to
1294 re-order the locals to avoid conflicts like r3 = r2; r4 = r3; .
1295 Return true iff the re-ordering is successful, or not necessary. */
1296 static bool
1297 resolve_input_conflict (struct equiv_info *info)
1299 int i, j, end;
1300 int nswaps = 0;
1301 rtx save_x_local[STRUCT_EQUIV_MAX_LOCAL];
1302 rtx save_y_local[STRUCT_EQUIV_MAX_LOCAL];
1304 find_dying_inputs (info);
1305 if (info->dying_inputs <= 1)
1306 return true;
1307 memcpy (save_x_local, info->x_local, sizeof save_x_local);
1308 memcpy (save_y_local, info->y_local, sizeof save_y_local);
1309 end = info->cur.local_count - 1;
1310 for (i = 0; i <= end; i++)
1312 /* Cycle detection with regsets is expensive, so we just check that
1313 we don't exceed the maximum number of swaps needed in the acyclic
1314 case. */
1315 int max_swaps = end - i;
1317 /* Check if x_local[i] will be clobbered. */
1318 if (!info->local_rvalue[i])
1319 continue;
1320 /* Check if any later value needs to be copied earlier. */
1321 for (j = i + 1; j <= end; j++)
1323 rtx tmp;
1325 if (!info->local_rvalue[j])
1326 continue;
1327 if (!reg_overlap_mentioned_p (info->x_local[i], info->y_local[j]))
1328 continue;
1329 if (--max_swaps < 0)
1331 memcpy (info->x_local, save_x_local, sizeof save_x_local);
1332 memcpy (info->y_local, save_y_local, sizeof save_y_local);
1333 return false;
1335 nswaps++;
1336 tmp = info->x_local[i];
1337 info->x_local[i] = info->x_local[j];
1338 info->x_local[j] = tmp;
1339 tmp = info->y_local[i];
1340 info->y_local[i] = info->y_local[j];
1341 info->y_local[j] = tmp;
1342 j = i;
1345 info->had_input_conflict = true;
1346 if (dump_file && nswaps)
1347 fprintf (dump_file, "Resolved input conflict, %d %s.\n",
1348 nswaps, nswaps == 1 ? "swap" : "swaps");
1349 return true;