PR bootstrap/18033
[official-gcc.git] / gcc / jump.c
blob85c1f6b2d75f77cd41e5fddea22e04622a22d09a
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
24 operate with jumps.
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
34 The subroutines redirect_jump and invert_jump are used
35 from other passes as well. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tm_p.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "regs.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
48 #include "recog.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "real.h"
52 #include "except.h"
53 #include "diagnostic.h"
54 #include "toplev.h"
55 #include "reload.h"
56 #include "predict.h"
57 #include "timevar.h"
59 /* Optimize jump y; x: ... y: jumpif... x?
60 Don't know if it is worth bothering with. */
61 /* Optimize two cases of conditional jump to conditional jump?
62 This can never delete any instruction or make anything dead,
63 or even change what is live at any point.
64 So perhaps let combiner do it. */
66 static void init_label_info (rtx);
67 static void mark_all_labels (rtx);
68 static void delete_computation (rtx);
69 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
70 static int redirect_exp (rtx, rtx, rtx);
71 static void invert_exp_1 (rtx);
72 static int invert_exp (rtx);
73 static int returnjump_p_1 (rtx *, void *);
74 static void delete_prior_computation (rtx, rtx);
76 /* Alternate entry into the jump optimizer. This entry point only rebuilds
77 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
78 instructions. */
79 void
80 rebuild_jump_labels (rtx f)
82 rtx insn;
84 timevar_push (TV_REBUILD_JUMP);
85 init_label_info (f);
86 mark_all_labels (f);
88 /* Keep track of labels used from static data; we don't track them
89 closely enough to delete them here, so make sure their reference
90 count doesn't drop to zero. */
92 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
93 if (LABEL_P (XEXP (insn, 0)))
94 LABEL_NUSES (XEXP (insn, 0))++;
95 timevar_pop (TV_REBUILD_JUMP);
98 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
99 non-fallthru insn. This is not generally true, as multiple barriers
100 may have crept in, or the BARRIER may be separated from the last
101 real insn by one or more NOTEs.
103 This simple pass moves barriers and removes duplicates so that the
104 old code is happy.
106 void
107 cleanup_barriers (void)
109 rtx insn, next, prev;
110 for (insn = get_insns (); insn; insn = next)
112 next = NEXT_INSN (insn);
113 if (BARRIER_P (insn))
115 prev = prev_nonnote_insn (insn);
116 if (BARRIER_P (prev))
117 delete_insn (insn);
118 else if (prev != PREV_INSN (insn))
119 reorder_insns (insn, insn, prev);
124 void
125 purge_line_number_notes (rtx f)
127 rtx last_note = 0;
128 rtx insn;
129 /* Delete extraneous line number notes.
130 Note that two consecutive notes for different lines are not really
131 extraneous. There should be some indication where that line belonged,
132 even if it became empty. */
134 for (insn = f; insn; insn = NEXT_INSN (insn))
135 if (NOTE_P (insn))
137 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
138 /* Any previous line note was for the prologue; gdb wants a new
139 note after the prologue even if it is for the same line. */
140 last_note = NULL_RTX;
141 else if (NOTE_LINE_NUMBER (insn) >= 0)
143 /* Delete this note if it is identical to previous note. */
144 if (last_note
145 #ifdef USE_MAPPED_LOCATION
146 && NOTE_SOURCE_LOCATION (insn) == NOTE_SOURCE_LOCATION (last_note)
147 #else
148 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
149 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note)
150 #endif
153 delete_related_insns (insn);
154 continue;
157 last_note = insn;
162 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
163 notes whose labels don't occur in the insn any more. Returns the
164 largest INSN_UID found. */
165 static void
166 init_label_info (rtx f)
168 rtx insn;
170 for (insn = f; insn; insn = NEXT_INSN (insn))
171 if (LABEL_P (insn))
172 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
173 else if (JUMP_P (insn))
174 JUMP_LABEL (insn) = 0;
175 else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
177 rtx note, next;
179 for (note = REG_NOTES (insn); note; note = next)
181 next = XEXP (note, 1);
182 if (REG_NOTE_KIND (note) == REG_LABEL
183 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
184 remove_note (insn, note);
189 /* Mark the label each jump jumps to.
190 Combine consecutive labels, and count uses of labels. */
192 static void
193 mark_all_labels (rtx f)
195 rtx insn;
197 for (insn = f; insn; insn = NEXT_INSN (insn))
198 if (INSN_P (insn))
200 mark_jump_label (PATTERN (insn), insn, 0);
201 if (! INSN_DELETED_P (insn) && JUMP_P (insn))
203 /* When we know the LABEL_REF contained in a REG used in
204 an indirect jump, we'll have a REG_LABEL note so that
205 flow can tell where it's going. */
206 if (JUMP_LABEL (insn) == 0)
208 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
209 if (label_note)
211 /* But a LABEL_REF around the REG_LABEL note, so
212 that we can canonicalize it. */
213 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
214 XEXP (label_note, 0));
216 mark_jump_label (label_ref, insn, 0);
217 XEXP (label_note, 0) = XEXP (label_ref, 0);
218 JUMP_LABEL (insn) = XEXP (label_note, 0);
225 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
226 notes between START and END out before START. START and END may be such
227 notes. Returns the values of the new starting and ending insns, which
228 may be different if the original ones were such notes.
229 Return true if there were only such notes and no real instructions. */
231 bool
232 squeeze_notes (rtx* startp, rtx* endp)
234 rtx start = *startp;
235 rtx end = *endp;
237 rtx insn;
238 rtx next;
239 rtx last = NULL;
240 rtx past_end = NEXT_INSN (end);
242 for (insn = start; insn != past_end; insn = next)
244 next = NEXT_INSN (insn);
245 if (NOTE_P (insn)
246 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
247 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
248 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
249 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END))
251 /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass. */
252 gcc_assert (NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_BEG
253 && NOTE_LINE_NUMBER (insn) != NOTE_INSN_BLOCK_END);
255 if (insn == start)
256 start = next;
257 else
259 rtx prev = PREV_INSN (insn);
260 PREV_INSN (insn) = PREV_INSN (start);
261 NEXT_INSN (insn) = start;
262 NEXT_INSN (PREV_INSN (insn)) = insn;
263 PREV_INSN (NEXT_INSN (insn)) = insn;
264 NEXT_INSN (prev) = next;
265 PREV_INSN (next) = prev;
268 else
269 last = insn;
272 /* There were no real instructions. */
273 if (start == past_end)
274 return true;
276 end = last;
278 *startp = start;
279 *endp = end;
280 return false;
283 /* Return the label before INSN, or put a new label there. */
286 get_label_before (rtx insn)
288 rtx label;
290 /* Find an existing label at this point
291 or make a new one if there is none. */
292 label = prev_nonnote_insn (insn);
294 if (label == 0 || !LABEL_P (label))
296 rtx prev = PREV_INSN (insn);
298 label = gen_label_rtx ();
299 emit_label_after (label, prev);
300 LABEL_NUSES (label) = 0;
302 return label;
305 /* Return the label after INSN, or put a new label there. */
308 get_label_after (rtx insn)
310 rtx label;
312 /* Find an existing label at this point
313 or make a new one if there is none. */
314 label = next_nonnote_insn (insn);
316 if (label == 0 || !LABEL_P (label))
318 label = gen_label_rtx ();
319 emit_label_after (label, insn);
320 LABEL_NUSES (label) = 0;
322 return label;
325 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
326 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
327 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
328 know whether it's source is floating point or integer comparison. Machine
329 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
330 to help this function avoid overhead in these cases. */
331 enum rtx_code
332 reversed_comparison_code_parts (enum rtx_code code, rtx arg0, rtx arg1, rtx insn)
334 enum machine_mode mode;
336 /* If this is not actually a comparison, we can't reverse it. */
337 if (GET_RTX_CLASS (code) != RTX_COMPARE
338 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
339 return UNKNOWN;
341 mode = GET_MODE (arg0);
342 if (mode == VOIDmode)
343 mode = GET_MODE (arg1);
345 /* First see if machine description supplies us way to reverse the
346 comparison. Give it priority over everything else to allow
347 machine description to do tricks. */
348 if (GET_MODE_CLASS (mode) == MODE_CC
349 && REVERSIBLE_CC_MODE (mode))
351 #ifdef REVERSE_CONDITION
352 return REVERSE_CONDITION (code, mode);
353 #endif
354 return reverse_condition (code);
357 /* Try a few special cases based on the comparison code. */
358 switch (code)
360 case GEU:
361 case GTU:
362 case LEU:
363 case LTU:
364 case NE:
365 case EQ:
366 /* It is always safe to reverse EQ and NE, even for the floating
367 point. Similarly the unsigned comparisons are never used for
368 floating point so we can reverse them in the default way. */
369 return reverse_condition (code);
370 case ORDERED:
371 case UNORDERED:
372 case LTGT:
373 case UNEQ:
374 /* In case we already see unordered comparison, we can be sure to
375 be dealing with floating point so we don't need any more tests. */
376 return reverse_condition_maybe_unordered (code);
377 case UNLT:
378 case UNLE:
379 case UNGT:
380 case UNGE:
381 /* We don't have safe way to reverse these yet. */
382 return UNKNOWN;
383 default:
384 break;
387 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
389 rtx prev;
390 /* Try to search for the comparison to determine the real mode.
391 This code is expensive, but with sane machine description it
392 will be never used, since REVERSIBLE_CC_MODE will return true
393 in all cases. */
394 if (! insn)
395 return UNKNOWN;
397 for (prev = prev_nonnote_insn (insn);
398 prev != 0 && !LABEL_P (prev);
399 prev = prev_nonnote_insn (prev))
401 rtx set = set_of (arg0, prev);
402 if (set && GET_CODE (set) == SET
403 && rtx_equal_p (SET_DEST (set), arg0))
405 rtx src = SET_SRC (set);
407 if (GET_CODE (src) == COMPARE)
409 rtx comparison = src;
410 arg0 = XEXP (src, 0);
411 mode = GET_MODE (arg0);
412 if (mode == VOIDmode)
413 mode = GET_MODE (XEXP (comparison, 1));
414 break;
416 /* We can get past reg-reg moves. This may be useful for model
417 of i387 comparisons that first move flag registers around. */
418 if (REG_P (src))
420 arg0 = src;
421 continue;
424 /* If register is clobbered in some ununderstandable way,
425 give up. */
426 if (set)
427 return UNKNOWN;
431 /* Test for an integer condition, or a floating-point comparison
432 in which NaNs can be ignored. */
433 if (GET_CODE (arg0) == CONST_INT
434 || (GET_MODE (arg0) != VOIDmode
435 && GET_MODE_CLASS (mode) != MODE_CC
436 && !HONOR_NANS (mode)))
437 return reverse_condition (code);
439 return UNKNOWN;
442 /* A wrapper around the previous function to take COMPARISON as rtx
443 expression. This simplifies many callers. */
444 enum rtx_code
445 reversed_comparison_code (rtx comparison, rtx insn)
447 if (!COMPARISON_P (comparison))
448 return UNKNOWN;
449 return reversed_comparison_code_parts (GET_CODE (comparison),
450 XEXP (comparison, 0),
451 XEXP (comparison, 1), insn);
454 /* Given an rtx-code for a comparison, return the code for the negated
455 comparison. If no such code exists, return UNKNOWN.
457 WATCH OUT! reverse_condition is not safe to use on a jump that might
458 be acting on the results of an IEEE floating point comparison, because
459 of the special treatment of non-signaling nans in comparisons.
460 Use reversed_comparison_code instead. */
462 enum rtx_code
463 reverse_condition (enum rtx_code code)
465 switch (code)
467 case EQ:
468 return NE;
469 case NE:
470 return EQ;
471 case GT:
472 return LE;
473 case GE:
474 return LT;
475 case LT:
476 return GE;
477 case LE:
478 return GT;
479 case GTU:
480 return LEU;
481 case GEU:
482 return LTU;
483 case LTU:
484 return GEU;
485 case LEU:
486 return GTU;
487 case UNORDERED:
488 return ORDERED;
489 case ORDERED:
490 return UNORDERED;
492 case UNLT:
493 case UNLE:
494 case UNGT:
495 case UNGE:
496 case UNEQ:
497 case LTGT:
498 return UNKNOWN;
500 default:
501 abort ();
505 /* Similar, but we're allowed to generate unordered comparisons, which
506 makes it safe for IEEE floating-point. Of course, we have to recognize
507 that the target will support them too... */
509 enum rtx_code
510 reverse_condition_maybe_unordered (enum rtx_code code)
512 switch (code)
514 case EQ:
515 return NE;
516 case NE:
517 return EQ;
518 case GT:
519 return UNLE;
520 case GE:
521 return UNLT;
522 case LT:
523 return UNGE;
524 case LE:
525 return UNGT;
526 case LTGT:
527 return UNEQ;
528 case UNORDERED:
529 return ORDERED;
530 case ORDERED:
531 return UNORDERED;
532 case UNLT:
533 return GE;
534 case UNLE:
535 return GT;
536 case UNGT:
537 return LE;
538 case UNGE:
539 return LT;
540 case UNEQ:
541 return LTGT;
543 default:
544 abort ();
548 /* Similar, but return the code when two operands of a comparison are swapped.
549 This IS safe for IEEE floating-point. */
551 enum rtx_code
552 swap_condition (enum rtx_code code)
554 switch (code)
556 case EQ:
557 case NE:
558 case UNORDERED:
559 case ORDERED:
560 case UNEQ:
561 case LTGT:
562 return code;
564 case GT:
565 return LT;
566 case GE:
567 return LE;
568 case LT:
569 return GT;
570 case LE:
571 return GE;
572 case GTU:
573 return LTU;
574 case GEU:
575 return LEU;
576 case LTU:
577 return GTU;
578 case LEU:
579 return GEU;
580 case UNLT:
581 return UNGT;
582 case UNLE:
583 return UNGE;
584 case UNGT:
585 return UNLT;
586 case UNGE:
587 return UNLE;
589 default:
590 abort ();
594 /* Given a comparison CODE, return the corresponding unsigned comparison.
595 If CODE is an equality comparison or already an unsigned comparison,
596 CODE is returned. */
598 enum rtx_code
599 unsigned_condition (enum rtx_code code)
601 switch (code)
603 case EQ:
604 case NE:
605 case GTU:
606 case GEU:
607 case LTU:
608 case LEU:
609 return code;
611 case GT:
612 return GTU;
613 case GE:
614 return GEU;
615 case LT:
616 return LTU;
617 case LE:
618 return LEU;
620 default:
621 abort ();
625 /* Similarly, return the signed version of a comparison. */
627 enum rtx_code
628 signed_condition (enum rtx_code code)
630 switch (code)
632 case EQ:
633 case NE:
634 case GT:
635 case GE:
636 case LT:
637 case LE:
638 return code;
640 case GTU:
641 return GT;
642 case GEU:
643 return GE;
644 case LTU:
645 return LT;
646 case LEU:
647 return LE;
649 default:
650 abort ();
654 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
655 truth of CODE1 implies the truth of CODE2. */
658 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
660 /* UNKNOWN comparison codes can happen as a result of trying to revert
661 comparison codes.
662 They can't match anything, so we have to reject them here. */
663 if (code1 == UNKNOWN || code2 == UNKNOWN)
664 return 0;
666 if (code1 == code2)
667 return 1;
669 switch (code1)
671 case UNEQ:
672 if (code2 == UNLE || code2 == UNGE)
673 return 1;
674 break;
676 case EQ:
677 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
678 || code2 == ORDERED)
679 return 1;
680 break;
682 case UNLT:
683 if (code2 == UNLE || code2 == NE)
684 return 1;
685 break;
687 case LT:
688 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
689 return 1;
690 break;
692 case UNGT:
693 if (code2 == UNGE || code2 == NE)
694 return 1;
695 break;
697 case GT:
698 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
699 return 1;
700 break;
702 case GE:
703 case LE:
704 if (code2 == ORDERED)
705 return 1;
706 break;
708 case LTGT:
709 if (code2 == NE || code2 == ORDERED)
710 return 1;
711 break;
713 case LTU:
714 if (code2 == LEU || code2 == NE)
715 return 1;
716 break;
718 case GTU:
719 if (code2 == GEU || code2 == NE)
720 return 1;
721 break;
723 case UNORDERED:
724 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
725 || code2 == UNGE || code2 == UNGT)
726 return 1;
727 break;
729 default:
730 break;
733 return 0;
736 /* Return 1 if INSN is an unconditional jump and nothing else. */
739 simplejump_p (rtx insn)
741 return (JUMP_P (insn)
742 && GET_CODE (PATTERN (insn)) == SET
743 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
744 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
747 /* Return nonzero if INSN is a (possibly) conditional jump
748 and nothing more.
750 Use of this function is deprecated, since we need to support combined
751 branch and compare insns. Use any_condjump_p instead whenever possible. */
754 condjump_p (rtx insn)
756 rtx x = PATTERN (insn);
758 if (GET_CODE (x) != SET
759 || GET_CODE (SET_DEST (x)) != PC)
760 return 0;
762 x = SET_SRC (x);
763 if (GET_CODE (x) == LABEL_REF)
764 return 1;
765 else
766 return (GET_CODE (x) == IF_THEN_ELSE
767 && ((GET_CODE (XEXP (x, 2)) == PC
768 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
769 || GET_CODE (XEXP (x, 1)) == RETURN))
770 || (GET_CODE (XEXP (x, 1)) == PC
771 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
772 || GET_CODE (XEXP (x, 2)) == RETURN))));
775 /* Return nonzero if INSN is a (possibly) conditional jump inside a
776 PARALLEL.
778 Use this function is deprecated, since we need to support combined
779 branch and compare insns. Use any_condjump_p instead whenever possible. */
782 condjump_in_parallel_p (rtx insn)
784 rtx x = PATTERN (insn);
786 if (GET_CODE (x) != PARALLEL)
787 return 0;
788 else
789 x = XVECEXP (x, 0, 0);
791 if (GET_CODE (x) != SET)
792 return 0;
793 if (GET_CODE (SET_DEST (x)) != PC)
794 return 0;
795 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
796 return 1;
797 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
798 return 0;
799 if (XEXP (SET_SRC (x), 2) == pc_rtx
800 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
801 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
802 return 1;
803 if (XEXP (SET_SRC (x), 1) == pc_rtx
804 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
805 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
806 return 1;
807 return 0;
810 /* Return set of PC, otherwise NULL. */
813 pc_set (rtx insn)
815 rtx pat;
816 if (!JUMP_P (insn))
817 return NULL_RTX;
818 pat = PATTERN (insn);
820 /* The set is allowed to appear either as the insn pattern or
821 the first set in a PARALLEL. */
822 if (GET_CODE (pat) == PARALLEL)
823 pat = XVECEXP (pat, 0, 0);
824 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
825 return pat;
827 return NULL_RTX;
830 /* Return true when insn is an unconditional direct jump,
831 possibly bundled inside a PARALLEL. */
834 any_uncondjump_p (rtx insn)
836 rtx x = pc_set (insn);
837 if (!x)
838 return 0;
839 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
840 return 0;
841 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
842 return 0;
843 return 1;
846 /* Return true when insn is a conditional jump. This function works for
847 instructions containing PC sets in PARALLELs. The instruction may have
848 various other effects so before removing the jump you must verify
849 onlyjump_p.
851 Note that unlike condjump_p it returns false for unconditional jumps. */
854 any_condjump_p (rtx insn)
856 rtx x = pc_set (insn);
857 enum rtx_code a, b;
859 if (!x)
860 return 0;
861 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
862 return 0;
864 a = GET_CODE (XEXP (SET_SRC (x), 1));
865 b = GET_CODE (XEXP (SET_SRC (x), 2));
867 return ((b == PC && (a == LABEL_REF || a == RETURN))
868 || (a == PC && (b == LABEL_REF || b == RETURN)));
871 /* Return the label of a conditional jump. */
874 condjump_label (rtx insn)
876 rtx x = pc_set (insn);
878 if (!x)
879 return NULL_RTX;
880 x = SET_SRC (x);
881 if (GET_CODE (x) == LABEL_REF)
882 return x;
883 if (GET_CODE (x) != IF_THEN_ELSE)
884 return NULL_RTX;
885 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
886 return XEXP (x, 1);
887 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
888 return XEXP (x, 2);
889 return NULL_RTX;
892 /* Return true if INSN is a (possibly conditional) return insn. */
894 static int
895 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
897 rtx x = *loc;
899 return x && (GET_CODE (x) == RETURN
900 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
904 returnjump_p (rtx insn)
906 if (!JUMP_P (insn))
907 return 0;
908 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
911 /* Return true if INSN is a jump that only transfers control and
912 nothing more. */
915 onlyjump_p (rtx insn)
917 rtx set;
919 if (!JUMP_P (insn))
920 return 0;
922 set = single_set (insn);
923 if (set == NULL)
924 return 0;
925 if (GET_CODE (SET_DEST (set)) != PC)
926 return 0;
927 if (side_effects_p (SET_SRC (set)))
928 return 0;
930 return 1;
933 #ifdef HAVE_cc0
935 /* Return nonzero if X is an RTX that only sets the condition codes
936 and has no side effects. */
939 only_sets_cc0_p (rtx x)
941 if (! x)
942 return 0;
944 if (INSN_P (x))
945 x = PATTERN (x);
947 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
950 /* Return 1 if X is an RTX that does nothing but set the condition codes
951 and CLOBBER or USE registers.
952 Return -1 if X does explicitly set the condition codes,
953 but also does other things. */
956 sets_cc0_p (rtx x)
958 if (! x)
959 return 0;
961 if (INSN_P (x))
962 x = PATTERN (x);
964 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
965 return 1;
966 if (GET_CODE (x) == PARALLEL)
968 int i;
969 int sets_cc0 = 0;
970 int other_things = 0;
971 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
973 if (GET_CODE (XVECEXP (x, 0, i)) == SET
974 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
975 sets_cc0 = 1;
976 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
977 other_things = 1;
979 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
981 return 0;
983 #endif
985 /* Follow any unconditional jump at LABEL;
986 return the ultimate label reached by any such chain of jumps.
987 Return null if the chain ultimately leads to a return instruction.
988 If LABEL is not followed by a jump, return LABEL.
989 If the chain loops or we can't find end, return LABEL,
990 since that tells caller to avoid changing the insn.
992 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
993 a USE or CLOBBER. */
996 follow_jumps (rtx label)
998 rtx insn;
999 rtx next;
1000 rtx value = label;
1001 int depth;
1003 for (depth = 0;
1004 (depth < 10
1005 && (insn = next_active_insn (value)) != 0
1006 && JUMP_P (insn)
1007 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1008 && onlyjump_p (insn))
1009 || GET_CODE (PATTERN (insn)) == RETURN)
1010 && (next = NEXT_INSN (insn))
1011 && BARRIER_P (next));
1012 depth++)
1014 /* Don't chain through the insn that jumps into a loop
1015 from outside the loop,
1016 since that would create multiple loop entry jumps
1017 and prevent loop optimization. */
1018 rtx tem;
1019 if (!reload_completed)
1020 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1021 if (NOTE_P (tem)
1022 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1023 /* ??? Optional. Disables some optimizations, but makes
1024 gcov output more accurate with -O. */
1025 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1026 return value;
1028 /* If we have found a cycle, make the insn jump to itself. */
1029 if (JUMP_LABEL (insn) == label)
1030 return label;
1032 tem = next_active_insn (JUMP_LABEL (insn));
1033 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1034 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1035 break;
1037 value = JUMP_LABEL (insn);
1039 if (depth == 10)
1040 return label;
1041 return value;
1045 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1046 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1047 in INSN, then store one of them in JUMP_LABEL (INSN).
1048 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1049 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1050 Also, when there are consecutive labels, canonicalize on the last of them.
1052 Note that two labels separated by a loop-beginning note
1053 must be kept distinct if we have not yet done loop-optimization,
1054 because the gap between them is where loop-optimize
1055 will want to move invariant code to. CROSS_JUMP tells us
1056 that loop-optimization is done with. */
1058 void
1059 mark_jump_label (rtx x, rtx insn, int in_mem)
1061 RTX_CODE code = GET_CODE (x);
1062 int i;
1063 const char *fmt;
1065 switch (code)
1067 case PC:
1068 case CC0:
1069 case REG:
1070 case CONST_INT:
1071 case CONST_DOUBLE:
1072 case CLOBBER:
1073 case CALL:
1074 return;
1076 case MEM:
1077 in_mem = 1;
1078 break;
1080 case SYMBOL_REF:
1081 if (!in_mem)
1082 return;
1084 /* If this is a constant-pool reference, see if it is a label. */
1085 if (CONSTANT_POOL_ADDRESS_P (x))
1086 mark_jump_label (get_pool_constant (x), insn, in_mem);
1087 break;
1089 case LABEL_REF:
1091 rtx label = XEXP (x, 0);
1093 /* Ignore remaining references to unreachable labels that
1094 have been deleted. */
1095 if (NOTE_P (label)
1096 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1097 break;
1099 if (!LABEL_P (label))
1100 abort ();
1102 /* Ignore references to labels of containing functions. */
1103 if (LABEL_REF_NONLOCAL_P (x))
1104 break;
1106 XEXP (x, 0) = label;
1107 if (! insn || ! INSN_DELETED_P (insn))
1108 ++LABEL_NUSES (label);
1110 if (insn)
1112 if (JUMP_P (insn))
1113 JUMP_LABEL (insn) = label;
1114 else
1116 /* Add a REG_LABEL note for LABEL unless there already
1117 is one. All uses of a label, except for labels
1118 that are the targets of jumps, must have a
1119 REG_LABEL note. */
1120 if (! find_reg_note (insn, REG_LABEL, label))
1121 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1122 REG_NOTES (insn));
1125 return;
1128 /* Do walk the labels in a vector, but not the first operand of an
1129 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1130 case ADDR_VEC:
1131 case ADDR_DIFF_VEC:
1132 if (! INSN_DELETED_P (insn))
1134 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1136 for (i = 0; i < XVECLEN (x, eltnum); i++)
1137 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1139 return;
1141 default:
1142 break;
1145 fmt = GET_RTX_FORMAT (code);
1146 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1148 if (fmt[i] == 'e')
1149 mark_jump_label (XEXP (x, i), insn, in_mem);
1150 else if (fmt[i] == 'E')
1152 int j;
1153 for (j = 0; j < XVECLEN (x, i); j++)
1154 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1159 /* If all INSN does is set the pc, delete it,
1160 and delete the insn that set the condition codes for it
1161 if that's what the previous thing was. */
1163 void
1164 delete_jump (rtx insn)
1166 rtx set = single_set (insn);
1168 if (set && GET_CODE (SET_DEST (set)) == PC)
1169 delete_computation (insn);
1172 /* Recursively delete prior insns that compute the value (used only by INSN
1173 which the caller is deleting) stored in the register mentioned by NOTE
1174 which is a REG_DEAD note associated with INSN. */
1176 static void
1177 delete_prior_computation (rtx note, rtx insn)
1179 rtx our_prev;
1180 rtx reg = XEXP (note, 0);
1182 for (our_prev = prev_nonnote_insn (insn);
1183 our_prev && (NONJUMP_INSN_P (our_prev)
1184 || CALL_P (our_prev));
1185 our_prev = prev_nonnote_insn (our_prev))
1187 rtx pat = PATTERN (our_prev);
1189 /* If we reach a CALL which is not calling a const function
1190 or the callee pops the arguments, then give up. */
1191 if (CALL_P (our_prev)
1192 && (! CONST_OR_PURE_CALL_P (our_prev)
1193 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1194 break;
1196 /* If we reach a SEQUENCE, it is too complex to try to
1197 do anything with it, so give up. We can be run during
1198 and after reorg, so SEQUENCE rtl can legitimately show
1199 up here. */
1200 if (GET_CODE (pat) == SEQUENCE)
1201 break;
1203 if (GET_CODE (pat) == USE
1204 && NONJUMP_INSN_P (XEXP (pat, 0)))
1205 /* reorg creates USEs that look like this. We leave them
1206 alone because reorg needs them for its own purposes. */
1207 break;
1209 if (reg_set_p (reg, pat))
1211 if (side_effects_p (pat) && !CALL_P (our_prev))
1212 break;
1214 if (GET_CODE (pat) == PARALLEL)
1216 /* If we find a SET of something else, we can't
1217 delete the insn. */
1219 int i;
1221 for (i = 0; i < XVECLEN (pat, 0); i++)
1223 rtx part = XVECEXP (pat, 0, i);
1225 if (GET_CODE (part) == SET
1226 && SET_DEST (part) != reg)
1227 break;
1230 if (i == XVECLEN (pat, 0))
1231 delete_computation (our_prev);
1233 else if (GET_CODE (pat) == SET
1234 && REG_P (SET_DEST (pat)))
1236 int dest_regno = REGNO (SET_DEST (pat));
1237 int dest_endregno
1238 = (dest_regno
1239 + (dest_regno < FIRST_PSEUDO_REGISTER
1240 ? hard_regno_nregs[dest_regno]
1241 [GET_MODE (SET_DEST (pat))] : 1));
1242 int regno = REGNO (reg);
1243 int endregno
1244 = (regno
1245 + (regno < FIRST_PSEUDO_REGISTER
1246 ? hard_regno_nregs[regno][GET_MODE (reg)] : 1));
1248 if (dest_regno >= regno
1249 && dest_endregno <= endregno)
1250 delete_computation (our_prev);
1252 /* We may have a multi-word hard register and some, but not
1253 all, of the words of the register are needed in subsequent
1254 insns. Write REG_UNUSED notes for those parts that were not
1255 needed. */
1256 else if (dest_regno <= regno
1257 && dest_endregno >= endregno)
1259 int i;
1261 REG_NOTES (our_prev)
1262 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1263 REG_NOTES (our_prev));
1265 for (i = dest_regno; i < dest_endregno; i++)
1266 if (! find_regno_note (our_prev, REG_UNUSED, i))
1267 break;
1269 if (i == dest_endregno)
1270 delete_computation (our_prev);
1274 break;
1277 /* If PAT references the register that dies here, it is an
1278 additional use. Hence any prior SET isn't dead. However, this
1279 insn becomes the new place for the REG_DEAD note. */
1280 if (reg_overlap_mentioned_p (reg, pat))
1282 XEXP (note, 1) = REG_NOTES (our_prev);
1283 REG_NOTES (our_prev) = note;
1284 break;
1289 /* Delete INSN and recursively delete insns that compute values used only
1290 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1291 If we are running before flow.c, we need do nothing since flow.c will
1292 delete dead code. We also can't know if the registers being used are
1293 dead or not at this point.
1295 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1296 nothing other than set a register that dies in this insn, we can delete
1297 that insn as well.
1299 On machines with CC0, if CC0 is used in this insn, we may be able to
1300 delete the insn that set it. */
1302 static void
1303 delete_computation (rtx insn)
1305 rtx note, next;
1307 #ifdef HAVE_cc0
1308 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1310 rtx prev = prev_nonnote_insn (insn);
1311 /* We assume that at this stage
1312 CC's are always set explicitly
1313 and always immediately before the jump that
1314 will use them. So if the previous insn
1315 exists to set the CC's, delete it
1316 (unless it performs auto-increments, etc.). */
1317 if (prev && NONJUMP_INSN_P (prev)
1318 && sets_cc0_p (PATTERN (prev)))
1320 if (sets_cc0_p (PATTERN (prev)) > 0
1321 && ! side_effects_p (PATTERN (prev)))
1322 delete_computation (prev);
1323 else
1324 /* Otherwise, show that cc0 won't be used. */
1325 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1326 cc0_rtx, REG_NOTES (prev));
1329 #endif
1331 for (note = REG_NOTES (insn); note; note = next)
1333 next = XEXP (note, 1);
1335 if (REG_NOTE_KIND (note) != REG_DEAD
1336 /* Verify that the REG_NOTE is legitimate. */
1337 || !REG_P (XEXP (note, 0)))
1338 continue;
1340 delete_prior_computation (note, insn);
1343 delete_related_insns (insn);
1346 /* Delete insn INSN from the chain of insns and update label ref counts
1347 and delete insns now unreachable.
1349 Returns the first insn after INSN that was not deleted.
1351 Usage of this instruction is deprecated. Use delete_insn instead and
1352 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1355 delete_related_insns (rtx insn)
1357 int was_code_label = (LABEL_P (insn));
1358 rtx note;
1359 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1361 while (next && INSN_DELETED_P (next))
1362 next = NEXT_INSN (next);
1364 /* This insn is already deleted => return first following nondeleted. */
1365 if (INSN_DELETED_P (insn))
1366 return next;
1368 delete_insn (insn);
1370 /* If instruction is followed by a barrier,
1371 delete the barrier too. */
1373 if (next != 0 && BARRIER_P (next))
1374 delete_insn (next);
1376 /* If deleting a jump, decrement the count of the label,
1377 and delete the label if it is now unused. */
1379 if (JUMP_P (insn) && JUMP_LABEL (insn))
1381 rtx lab = JUMP_LABEL (insn), lab_next;
1383 if (LABEL_NUSES (lab) == 0)
1385 /* This can delete NEXT or PREV,
1386 either directly if NEXT is JUMP_LABEL (INSN),
1387 or indirectly through more levels of jumps. */
1388 delete_related_insns (lab);
1390 /* I feel a little doubtful about this loop,
1391 but I see no clean and sure alternative way
1392 to find the first insn after INSN that is not now deleted.
1393 I hope this works. */
1394 while (next && INSN_DELETED_P (next))
1395 next = NEXT_INSN (next);
1396 return next;
1398 else if (tablejump_p (insn, NULL, &lab_next))
1400 /* If we're deleting the tablejump, delete the dispatch table.
1401 We may not be able to kill the label immediately preceding
1402 just yet, as it might be referenced in code leading up to
1403 the tablejump. */
1404 delete_related_insns (lab_next);
1408 /* Likewise if we're deleting a dispatch table. */
1410 if (JUMP_P (insn)
1411 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1412 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1414 rtx pat = PATTERN (insn);
1415 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1416 int len = XVECLEN (pat, diff_vec_p);
1418 for (i = 0; i < len; i++)
1419 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1420 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1421 while (next && INSN_DELETED_P (next))
1422 next = NEXT_INSN (next);
1423 return next;
1426 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1427 if (NONJUMP_INSN_P (insn) || CALL_P (insn))
1428 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1429 if (REG_NOTE_KIND (note) == REG_LABEL
1430 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1431 && LABEL_P (XEXP (note, 0)))
1432 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1433 delete_related_insns (XEXP (note, 0));
1435 while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1436 prev = PREV_INSN (prev);
1438 /* If INSN was a label and a dispatch table follows it,
1439 delete the dispatch table. The tablejump must have gone already.
1440 It isn't useful to fall through into a table. */
1442 if (was_code_label
1443 && NEXT_INSN (insn) != 0
1444 && JUMP_P (NEXT_INSN (insn))
1445 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1446 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1447 next = delete_related_insns (NEXT_INSN (insn));
1449 /* If INSN was a label, delete insns following it if now unreachable. */
1451 if (was_code_label && prev && BARRIER_P (prev))
1453 enum rtx_code code;
1454 while (next)
1456 code = GET_CODE (next);
1457 if (code == NOTE
1458 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1459 next = NEXT_INSN (next);
1460 /* Keep going past other deleted labels to delete what follows. */
1461 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1462 next = NEXT_INSN (next);
1463 else if (code == BARRIER || INSN_P (next))
1464 /* Note: if this deletes a jump, it can cause more
1465 deletion of unreachable code, after a different label.
1466 As long as the value from this recursive call is correct,
1467 this invocation functions correctly. */
1468 next = delete_related_insns (next);
1469 else
1470 break;
1474 return next;
1477 /* Delete a range of insns from FROM to TO, inclusive.
1478 This is for the sake of peephole optimization, so assume
1479 that whatever these insns do will still be done by a new
1480 peephole insn that will replace them. */
1482 void
1483 delete_for_peephole (rtx from, rtx to)
1485 rtx insn = from;
1487 while (1)
1489 rtx next = NEXT_INSN (insn);
1490 rtx prev = PREV_INSN (insn);
1492 if (!NOTE_P (insn))
1494 INSN_DELETED_P (insn) = 1;
1496 /* Patch this insn out of the chain. */
1497 /* We don't do this all at once, because we
1498 must preserve all NOTEs. */
1499 if (prev)
1500 NEXT_INSN (prev) = next;
1502 if (next)
1503 PREV_INSN (next) = prev;
1506 if (insn == to)
1507 break;
1508 insn = next;
1511 /* Note that if TO is an unconditional jump
1512 we *do not* delete the BARRIER that follows,
1513 since the peephole that replaces this sequence
1514 is also an unconditional jump in that case. */
1517 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1518 NLABEL as a return. Accrue modifications into the change group. */
1520 static void
1521 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1523 rtx x = *loc;
1524 RTX_CODE code = GET_CODE (x);
1525 int i;
1526 const char *fmt;
1528 if (code == LABEL_REF)
1530 if (XEXP (x, 0) == olabel)
1532 rtx n;
1533 if (nlabel)
1534 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1535 else
1536 n = gen_rtx_RETURN (VOIDmode);
1538 validate_change (insn, loc, n, 1);
1539 return;
1542 else if (code == RETURN && olabel == 0)
1544 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1545 if (loc == &PATTERN (insn))
1546 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1547 validate_change (insn, loc, x, 1);
1548 return;
1551 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1552 && GET_CODE (SET_SRC (x)) == LABEL_REF
1553 && XEXP (SET_SRC (x), 0) == olabel)
1555 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1556 return;
1559 fmt = GET_RTX_FORMAT (code);
1560 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1562 if (fmt[i] == 'e')
1563 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1564 else if (fmt[i] == 'E')
1566 int j;
1567 for (j = 0; j < XVECLEN (x, i); j++)
1568 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1573 /* Similar, but apply the change group and report success or failure. */
1575 static int
1576 redirect_exp (rtx olabel, rtx nlabel, rtx insn)
1578 rtx *loc;
1580 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1581 loc = &XVECEXP (PATTERN (insn), 0, 0);
1582 else
1583 loc = &PATTERN (insn);
1585 redirect_exp_1 (loc, olabel, nlabel, insn);
1586 if (num_validated_changes () == 0)
1587 return 0;
1589 return apply_change_group ();
1592 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1593 the modifications into the change group. Return false if we did
1594 not see how to do that. */
1597 redirect_jump_1 (rtx jump, rtx nlabel)
1599 int ochanges = num_validated_changes ();
1600 rtx *loc;
1602 if (GET_CODE (PATTERN (jump)) == PARALLEL)
1603 loc = &XVECEXP (PATTERN (jump), 0, 0);
1604 else
1605 loc = &PATTERN (jump);
1607 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1608 return num_validated_changes () > ochanges;
1611 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1612 jump target label is unused as a result, it and the code following
1613 it may be deleted.
1615 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1616 RETURN insn.
1618 The return value will be 1 if the change was made, 0 if it wasn't
1619 (this can only occur for NLABEL == 0). */
1622 redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1624 rtx olabel = JUMP_LABEL (jump);
1625 rtx note;
1627 if (nlabel == olabel)
1628 return 1;
1630 if (! redirect_exp (olabel, nlabel, jump))
1631 return 0;
1633 JUMP_LABEL (jump) = nlabel;
1634 if (nlabel)
1635 ++LABEL_NUSES (nlabel);
1637 /* Update labels in any REG_EQUAL note. */
1638 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1640 if (nlabel && olabel)
1642 rtx dest = XEXP (note, 0);
1644 if (GET_CODE (dest) == IF_THEN_ELSE)
1646 if (GET_CODE (XEXP (dest, 1)) == LABEL_REF
1647 && XEXP (XEXP (dest, 1), 0) == olabel)
1648 XEXP (XEXP (dest, 1), 0) = nlabel;
1649 if (GET_CODE (XEXP (dest, 2)) == LABEL_REF
1650 && XEXP (XEXP (dest, 2), 0) == olabel)
1651 XEXP (XEXP (dest, 2), 0) = nlabel;
1653 else
1654 remove_note (jump, note);
1656 else
1657 remove_note (jump, note);
1660 /* If we're eliding the jump over exception cleanups at the end of a
1661 function, move the function end note so that -Wreturn-type works. */
1662 if (olabel && nlabel
1663 && NEXT_INSN (olabel)
1664 && NOTE_P (NEXT_INSN (olabel))
1665 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
1666 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
1668 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
1669 /* Undefined labels will remain outside the insn stream. */
1670 && INSN_UID (olabel))
1671 delete_related_insns (olabel);
1673 return 1;
1676 /* Invert the jump condition of rtx X contained in jump insn, INSN.
1677 Accrue the modifications into the change group. */
1679 static void
1680 invert_exp_1 (rtx insn)
1682 RTX_CODE code;
1683 rtx x = pc_set (insn);
1685 if (!x)
1686 abort ();
1687 x = SET_SRC (x);
1689 code = GET_CODE (x);
1691 if (code == IF_THEN_ELSE)
1693 rtx comp = XEXP (x, 0);
1694 rtx tem;
1695 enum rtx_code reversed_code;
1697 /* We can do this in two ways: The preferable way, which can only
1698 be done if this is not an integer comparison, is to reverse
1699 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1700 of the IF_THEN_ELSE. If we can't do either, fail. */
1702 reversed_code = reversed_comparison_code (comp, insn);
1704 if (reversed_code != UNKNOWN)
1706 validate_change (insn, &XEXP (x, 0),
1707 gen_rtx_fmt_ee (reversed_code,
1708 GET_MODE (comp), XEXP (comp, 0),
1709 XEXP (comp, 1)),
1711 return;
1714 tem = XEXP (x, 1);
1715 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1716 validate_change (insn, &XEXP (x, 2), tem, 1);
1718 else
1719 abort ();
1722 /* Invert the jump condition of conditional jump insn, INSN.
1724 Return 1 if we can do so, 0 if we cannot find a way to do so that
1725 matches a pattern. */
1727 static int
1728 invert_exp (rtx insn)
1730 invert_exp_1 (insn);
1731 if (num_validated_changes () == 0)
1732 return 0;
1734 return apply_change_group ();
1737 /* Invert the condition of the jump JUMP, and make it jump to label
1738 NLABEL instead of where it jumps now. Accrue changes into the
1739 change group. Return false if we didn't see how to perform the
1740 inversion and redirection. */
1743 invert_jump_1 (rtx jump, rtx nlabel)
1745 int ochanges;
1747 ochanges = num_validated_changes ();
1748 invert_exp_1 (jump);
1749 if (num_validated_changes () == ochanges)
1750 return 0;
1752 return redirect_jump_1 (jump, nlabel);
1755 /* Invert the condition of the jump JUMP, and make it jump to label
1756 NLABEL instead of where it jumps now. Return true if successful. */
1759 invert_jump (rtx jump, rtx nlabel, int delete_unused)
1761 /* We have to either invert the condition and change the label or
1762 do neither. Either operation could fail. We first try to invert
1763 the jump. If that succeeds, we try changing the label. If that fails,
1764 we invert the jump back to what it was. */
1766 if (! invert_exp (jump))
1767 return 0;
1769 if (redirect_jump (jump, nlabel, delete_unused))
1771 /* Remove REG_EQUAL note if we have one. */
1772 rtx note = find_reg_note (jump, REG_EQUAL, NULL_RTX);
1773 if (note)
1774 remove_note (jump, note);
1776 invert_br_probabilities (jump);
1778 return 1;
1781 if (! invert_exp (jump))
1782 /* This should just be putting it back the way it was. */
1783 abort ();
1785 return 0;
1789 /* Like rtx_equal_p except that it considers two REGs as equal
1790 if they renumber to the same value and considers two commutative
1791 operations to be the same if the order of the operands has been
1792 reversed.
1794 ??? Addition is not commutative on the PA due to the weird implicit
1795 space register selection rules for memory addresses. Therefore, we
1796 don't consider a + b == b + a.
1798 We could/should make this test a little tighter. Possibly only
1799 disabling it on the PA via some backend macro or only disabling this
1800 case when the PLUS is inside a MEM. */
1803 rtx_renumbered_equal_p (rtx x, rtx y)
1805 int i;
1806 enum rtx_code code = GET_CODE (x);
1807 const char *fmt;
1809 if (x == y)
1810 return 1;
1812 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1813 && (REG_P (y) || (GET_CODE (y) == SUBREG
1814 && REG_P (SUBREG_REG (y)))))
1816 int reg_x = -1, reg_y = -1;
1817 int byte_x = 0, byte_y = 0;
1819 if (GET_MODE (x) != GET_MODE (y))
1820 return 0;
1822 /* If we haven't done any renumbering, don't
1823 make any assumptions. */
1824 if (reg_renumber == 0)
1825 return rtx_equal_p (x, y);
1827 if (code == SUBREG)
1829 reg_x = REGNO (SUBREG_REG (x));
1830 byte_x = SUBREG_BYTE (x);
1832 if (reg_renumber[reg_x] >= 0)
1834 reg_x = subreg_regno_offset (reg_renumber[reg_x],
1835 GET_MODE (SUBREG_REG (x)),
1836 byte_x,
1837 GET_MODE (x));
1838 byte_x = 0;
1841 else
1843 reg_x = REGNO (x);
1844 if (reg_renumber[reg_x] >= 0)
1845 reg_x = reg_renumber[reg_x];
1848 if (GET_CODE (y) == SUBREG)
1850 reg_y = REGNO (SUBREG_REG (y));
1851 byte_y = SUBREG_BYTE (y);
1853 if (reg_renumber[reg_y] >= 0)
1855 reg_y = subreg_regno_offset (reg_renumber[reg_y],
1856 GET_MODE (SUBREG_REG (y)),
1857 byte_y,
1858 GET_MODE (y));
1859 byte_y = 0;
1862 else
1864 reg_y = REGNO (y);
1865 if (reg_renumber[reg_y] >= 0)
1866 reg_y = reg_renumber[reg_y];
1869 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1872 /* Now we have disposed of all the cases
1873 in which different rtx codes can match. */
1874 if (code != GET_CODE (y))
1875 return 0;
1877 switch (code)
1879 case PC:
1880 case CC0:
1881 case ADDR_VEC:
1882 case ADDR_DIFF_VEC:
1883 case CONST_INT:
1884 return 0;
1886 case LABEL_REF:
1887 /* We can't assume nonlocal labels have their following insns yet. */
1888 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1889 return XEXP (x, 0) == XEXP (y, 0);
1891 /* Two label-refs are equivalent if they point at labels
1892 in the same position in the instruction stream. */
1893 return (next_real_insn (XEXP (x, 0))
1894 == next_real_insn (XEXP (y, 0)));
1896 case SYMBOL_REF:
1897 return XSTR (x, 0) == XSTR (y, 0);
1899 case CODE_LABEL:
1900 /* If we didn't match EQ equality above, they aren't the same. */
1901 return 0;
1903 default:
1904 break;
1907 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1909 if (GET_MODE (x) != GET_MODE (y))
1910 return 0;
1912 /* For commutative operations, the RTX match if the operand match in any
1913 order. Also handle the simple binary and unary cases without a loop.
1915 ??? Don't consider PLUS a commutative operator; see comments above. */
1916 if (COMMUTATIVE_P (x) && code != PLUS)
1917 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1918 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1919 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1920 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1921 else if (NON_COMMUTATIVE_P (x))
1922 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1923 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1924 else if (UNARY_P (x))
1925 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1927 /* Compare the elements. If any pair of corresponding elements
1928 fail to match, return 0 for the whole things. */
1930 fmt = GET_RTX_FORMAT (code);
1931 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1933 int j;
1934 switch (fmt[i])
1936 case 'w':
1937 if (XWINT (x, i) != XWINT (y, i))
1938 return 0;
1939 break;
1941 case 'i':
1942 if (XINT (x, i) != XINT (y, i))
1943 return 0;
1944 break;
1946 case 't':
1947 if (XTREE (x, i) != XTREE (y, i))
1948 return 0;
1949 break;
1951 case 's':
1952 if (strcmp (XSTR (x, i), XSTR (y, i)))
1953 return 0;
1954 break;
1956 case 'e':
1957 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1958 return 0;
1959 break;
1961 case 'u':
1962 if (XEXP (x, i) != XEXP (y, i))
1963 return 0;
1964 /* Fall through. */
1965 case '0':
1966 break;
1968 case 'E':
1969 if (XVECLEN (x, i) != XVECLEN (y, i))
1970 return 0;
1971 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1972 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1973 return 0;
1974 break;
1976 default:
1977 abort ();
1980 return 1;
1983 /* If X is a hard register or equivalent to one or a subregister of one,
1984 return the hard register number. If X is a pseudo register that was not
1985 assigned a hard register, return the pseudo register number. Otherwise,
1986 return -1. Any rtx is valid for X. */
1989 true_regnum (rtx x)
1991 if (REG_P (x))
1993 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
1994 return reg_renumber[REGNO (x)];
1995 return REGNO (x);
1997 if (GET_CODE (x) == SUBREG)
1999 int base = true_regnum (SUBREG_REG (x));
2000 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2001 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2002 GET_MODE (SUBREG_REG (x)),
2003 SUBREG_BYTE (x), GET_MODE (x));
2005 return -1;
2008 /* Return regno of the register REG and handle subregs too. */
2009 unsigned int
2010 reg_or_subregno (rtx reg)
2012 if (REG_P (reg))
2013 return REGNO (reg);
2014 if (GET_CODE (reg) == SUBREG)
2015 return REGNO (SUBREG_REG (reg));
2016 abort ();