1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /* This is the pathetic reminder of old fame of the jump-optimization pass
24 of the compiler. Now it contains basically a set of utility functions to
27 Each CODE_LABEL has a count of the times it is used
28 stored in the LABEL_NUSES internal field, and each JUMP_INSN
29 has one label that it refers to stored in the
30 JUMP_LABEL internal field. With this we can detect labels that
31 become unused because of the deletion of all the jumps that
32 formerly used them. The JUMP_LABEL info is sometimes looked
35 The subroutines redirect_jump and invert_jump are used
36 from other passes as well. */
40 #include "coretypes.h"
45 #include "hard-reg-set.h"
47 #include "insn-config.h"
48 #include "insn-attr.h"
54 #include "diagnostic.h"
59 #include "tree-pass.h"
62 /* Optimize jump y; x: ... y: jumpif... x?
63 Don't know if it is worth bothering with. */
64 /* Optimize two cases of conditional jump to conditional jump?
65 This can never delete any instruction or make anything dead,
66 or even change what is live at any point.
67 So perhaps let combiner do it. */
69 static void init_label_info (rtx
);
70 static void mark_all_labels (rtx
);
71 static void redirect_exp_1 (rtx
*, rtx
, rtx
, rtx
);
72 static int invert_exp_1 (rtx
, rtx
);
73 static int returnjump_p_1 (rtx
*, void *);
75 /* Alternate entry into the jump optimizer. This entry point only rebuilds
76 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
79 rebuild_jump_labels (rtx f
)
83 timevar_push (TV_REBUILD_JUMP
);
87 /* Keep track of labels used from static data; we don't track them
88 closely enough to delete them here, so make sure their reference
89 count doesn't drop to zero. */
91 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
92 if (LABEL_P (XEXP (insn
, 0)))
93 LABEL_NUSES (XEXP (insn
, 0))++;
94 timevar_pop (TV_REBUILD_JUMP
);
97 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
98 non-fallthru insn. This is not generally true, as multiple barriers
99 may have crept in, or the BARRIER may be separated from the last
100 real insn by one or more NOTEs.
102 This simple pass moves barriers and removes duplicates so that the
106 cleanup_barriers (void)
108 rtx insn
, next
, prev
;
109 for (insn
= get_insns (); insn
; insn
= next
)
111 next
= NEXT_INSN (insn
);
112 if (BARRIER_P (insn
))
114 prev
= prev_nonnote_insn (insn
);
115 if (BARRIER_P (prev
))
117 else if (prev
!= PREV_INSN (insn
))
118 reorder_insns (insn
, insn
, prev
);
124 struct tree_opt_pass pass_cleanup_barriers
=
126 "barriers", /* name */
128 cleanup_barriers
, /* execute */
131 0, /* static_pass_number */
133 0, /* properties_required */
134 0, /* properties_provided */
135 0, /* properties_destroyed */
136 0, /* todo_flags_start */
137 TODO_dump_func
, /* todo_flags_finish */
142 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
143 notes whose labels don't occur in the insn any more. Returns the
144 largest INSN_UID found. */
146 init_label_info (rtx f
)
150 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
152 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
153 else if (JUMP_P (insn
))
154 JUMP_LABEL (insn
) = 0;
155 else if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
159 for (note
= REG_NOTES (insn
); note
; note
= next
)
161 next
= XEXP (note
, 1);
162 if (REG_NOTE_KIND (note
) == REG_LABEL
163 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
164 remove_note (insn
, note
);
169 /* Mark the label each jump jumps to.
170 Combine consecutive labels, and count uses of labels. */
173 mark_all_labels (rtx f
)
177 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
180 mark_jump_label (PATTERN (insn
), insn
, 0);
181 if (! INSN_DELETED_P (insn
) && JUMP_P (insn
))
183 /* When we know the LABEL_REF contained in a REG used in
184 an indirect jump, we'll have a REG_LABEL note so that
185 flow can tell where it's going. */
186 if (JUMP_LABEL (insn
) == 0)
188 rtx label_note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
191 /* But a LABEL_REF around the REG_LABEL note, so
192 that we can canonicalize it. */
193 rtx label_ref
= gen_rtx_LABEL_REF (Pmode
,
194 XEXP (label_note
, 0));
196 mark_jump_label (label_ref
, insn
, 0);
197 XEXP (label_note
, 0) = XEXP (label_ref
, 0);
198 JUMP_LABEL (insn
) = XEXP (label_note
, 0);
204 /* If we are in cfglayout mode, there may be non-insns between the
205 basic blocks. If those non-insns represent tablejump data, they
206 contain label references that we must record. */
207 if (current_ir_type () == IR_RTL_CFGLAYOUT
)
213 for (insn
= bb
->il
.rtl
->header
; insn
; insn
= NEXT_INSN (insn
))
216 gcc_assert (JUMP_TABLE_DATA_P (insn
));
217 mark_jump_label (PATTERN (insn
), insn
, 0);
220 for (insn
= bb
->il
.rtl
->footer
; insn
; insn
= NEXT_INSN (insn
))
223 gcc_assert (JUMP_TABLE_DATA_P (insn
));
224 mark_jump_label (PATTERN (insn
), insn
, 0);
230 /* Move all block-beg, block-end and loop-beg notes between START and END out
231 before START. START and END may be such notes. Returns the values of the
232 new starting and ending insns, which may be different if the original ones
233 were such notes. Return true if there were only such notes and no real
237 squeeze_notes (rtx
* startp
, rtx
* endp
)
245 rtx past_end
= NEXT_INSN (end
);
247 for (insn
= start
; insn
!= past_end
; insn
= next
)
249 next
= NEXT_INSN (insn
);
251 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
252 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
))
254 /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass. */
255 gcc_assert (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_BEG
256 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_END
);
262 rtx prev
= PREV_INSN (insn
);
263 PREV_INSN (insn
) = PREV_INSN (start
);
264 NEXT_INSN (insn
) = start
;
265 NEXT_INSN (PREV_INSN (insn
)) = insn
;
266 PREV_INSN (NEXT_INSN (insn
)) = insn
;
267 NEXT_INSN (prev
) = next
;
268 PREV_INSN (next
) = prev
;
275 /* There were no real instructions. */
276 if (start
== past_end
)
286 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
287 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
288 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
289 know whether it's source is floating point or integer comparison. Machine
290 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
291 to help this function avoid overhead in these cases. */
293 reversed_comparison_code_parts (enum rtx_code code
, rtx arg0
, rtx arg1
, rtx insn
)
295 enum machine_mode mode
;
297 /* If this is not actually a comparison, we can't reverse it. */
298 if (GET_RTX_CLASS (code
) != RTX_COMPARE
299 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
302 mode
= GET_MODE (arg0
);
303 if (mode
== VOIDmode
)
304 mode
= GET_MODE (arg1
);
306 /* First see if machine description supplies us way to reverse the
307 comparison. Give it priority over everything else to allow
308 machine description to do tricks. */
309 if (GET_MODE_CLASS (mode
) == MODE_CC
310 && REVERSIBLE_CC_MODE (mode
))
312 #ifdef REVERSE_CONDITION
313 return REVERSE_CONDITION (code
, mode
);
315 return reverse_condition (code
);
318 /* Try a few special cases based on the comparison code. */
327 /* It is always safe to reverse EQ and NE, even for the floating
328 point. Similarly the unsigned comparisons are never used for
329 floating point so we can reverse them in the default way. */
330 return reverse_condition (code
);
335 /* In case we already see unordered comparison, we can be sure to
336 be dealing with floating point so we don't need any more tests. */
337 return reverse_condition_maybe_unordered (code
);
342 /* We don't have safe way to reverse these yet. */
348 if (GET_MODE_CLASS (mode
) == MODE_CC
|| CC0_P (arg0
))
351 /* Try to search for the comparison to determine the real mode.
352 This code is expensive, but with sane machine description it
353 will be never used, since REVERSIBLE_CC_MODE will return true
358 for (prev
= prev_nonnote_insn (insn
);
359 prev
!= 0 && !LABEL_P (prev
);
360 prev
= prev_nonnote_insn (prev
))
362 rtx set
= set_of (arg0
, prev
);
363 if (set
&& GET_CODE (set
) == SET
364 && rtx_equal_p (SET_DEST (set
), arg0
))
366 rtx src
= SET_SRC (set
);
368 if (GET_CODE (src
) == COMPARE
)
370 rtx comparison
= src
;
371 arg0
= XEXP (src
, 0);
372 mode
= GET_MODE (arg0
);
373 if (mode
== VOIDmode
)
374 mode
= GET_MODE (XEXP (comparison
, 1));
377 /* We can get past reg-reg moves. This may be useful for model
378 of i387 comparisons that first move flag registers around. */
385 /* If register is clobbered in some ununderstandable way,
392 /* Test for an integer condition, or a floating-point comparison
393 in which NaNs can be ignored. */
394 if (GET_CODE (arg0
) == CONST_INT
395 || (GET_MODE (arg0
) != VOIDmode
396 && GET_MODE_CLASS (mode
) != MODE_CC
397 && !HONOR_NANS (mode
)))
398 return reverse_condition (code
);
403 /* A wrapper around the previous function to take COMPARISON as rtx
404 expression. This simplifies many callers. */
406 reversed_comparison_code (rtx comparison
, rtx insn
)
408 if (!COMPARISON_P (comparison
))
410 return reversed_comparison_code_parts (GET_CODE (comparison
),
411 XEXP (comparison
, 0),
412 XEXP (comparison
, 1), insn
);
415 /* Return comparison with reversed code of EXP.
416 Return NULL_RTX in case we fail to do the reversal. */
418 reversed_comparison (rtx exp
, enum machine_mode mode
)
420 enum rtx_code reversed_code
= reversed_comparison_code (exp
, NULL_RTX
);
421 if (reversed_code
== UNKNOWN
)
424 return simplify_gen_relational (reversed_code
, mode
, VOIDmode
,
425 XEXP (exp
, 0), XEXP (exp
, 1));
429 /* Given an rtx-code for a comparison, return the code for the negated
430 comparison. If no such code exists, return UNKNOWN.
432 WATCH OUT! reverse_condition is not safe to use on a jump that might
433 be acting on the results of an IEEE floating point comparison, because
434 of the special treatment of non-signaling nans in comparisons.
435 Use reversed_comparison_code instead. */
438 reverse_condition (enum rtx_code code
)
480 /* Similar, but we're allowed to generate unordered comparisons, which
481 makes it safe for IEEE floating-point. Of course, we have to recognize
482 that the target will support them too... */
485 reverse_condition_maybe_unordered (enum rtx_code code
)
523 /* Similar, but return the code when two operands of a comparison are swapped.
524 This IS safe for IEEE floating-point. */
527 swap_condition (enum rtx_code code
)
569 /* Given a comparison CODE, return the corresponding unsigned comparison.
570 If CODE is an equality comparison or already an unsigned comparison,
574 unsigned_condition (enum rtx_code code
)
600 /* Similarly, return the signed version of a comparison. */
603 signed_condition (enum rtx_code code
)
629 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
630 truth of CODE1 implies the truth of CODE2. */
633 comparison_dominates_p (enum rtx_code code1
, enum rtx_code code2
)
635 /* UNKNOWN comparison codes can happen as a result of trying to revert
637 They can't match anything, so we have to reject them here. */
638 if (code1
== UNKNOWN
|| code2
== UNKNOWN
)
647 if (code2
== UNLE
|| code2
== UNGE
)
652 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
658 if (code2
== UNLE
|| code2
== NE
)
663 if (code2
== LE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
668 if (code2
== UNGE
|| code2
== NE
)
673 if (code2
== GE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
679 if (code2
== ORDERED
)
684 if (code2
== NE
|| code2
== ORDERED
)
689 if (code2
== LEU
|| code2
== NE
)
694 if (code2
== GEU
|| code2
== NE
)
699 if (code2
== NE
|| code2
== UNEQ
|| code2
== UNLE
|| code2
== UNLT
700 || code2
== UNGE
|| code2
== UNGT
)
711 /* Return 1 if INSN is an unconditional jump and nothing else. */
714 simplejump_p (rtx insn
)
716 return (JUMP_P (insn
)
717 && GET_CODE (PATTERN (insn
)) == SET
718 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
719 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
722 /* Return nonzero if INSN is a (possibly) conditional jump
725 Use of this function is deprecated, since we need to support combined
726 branch and compare insns. Use any_condjump_p instead whenever possible. */
729 condjump_p (rtx insn
)
731 rtx x
= PATTERN (insn
);
733 if (GET_CODE (x
) != SET
734 || GET_CODE (SET_DEST (x
)) != PC
)
738 if (GET_CODE (x
) == LABEL_REF
)
741 return (GET_CODE (x
) == IF_THEN_ELSE
742 && ((GET_CODE (XEXP (x
, 2)) == PC
743 && (GET_CODE (XEXP (x
, 1)) == LABEL_REF
744 || GET_CODE (XEXP (x
, 1)) == RETURN
))
745 || (GET_CODE (XEXP (x
, 1)) == PC
746 && (GET_CODE (XEXP (x
, 2)) == LABEL_REF
747 || GET_CODE (XEXP (x
, 2)) == RETURN
))));
750 /* Return nonzero if INSN is a (possibly) conditional jump inside a
753 Use this function is deprecated, since we need to support combined
754 branch and compare insns. Use any_condjump_p instead whenever possible. */
757 condjump_in_parallel_p (rtx insn
)
759 rtx x
= PATTERN (insn
);
761 if (GET_CODE (x
) != PARALLEL
)
764 x
= XVECEXP (x
, 0, 0);
766 if (GET_CODE (x
) != SET
)
768 if (GET_CODE (SET_DEST (x
)) != PC
)
770 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
772 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
774 if (XEXP (SET_SRC (x
), 2) == pc_rtx
775 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
776 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
778 if (XEXP (SET_SRC (x
), 1) == pc_rtx
779 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
780 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
785 /* Return set of PC, otherwise NULL. */
793 pat
= PATTERN (insn
);
795 /* The set is allowed to appear either as the insn pattern or
796 the first set in a PARALLEL. */
797 if (GET_CODE (pat
) == PARALLEL
)
798 pat
= XVECEXP (pat
, 0, 0);
799 if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == PC
)
805 /* Return true when insn is an unconditional direct jump,
806 possibly bundled inside a PARALLEL. */
809 any_uncondjump_p (rtx insn
)
811 rtx x
= pc_set (insn
);
814 if (GET_CODE (SET_SRC (x
)) != LABEL_REF
)
816 if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
821 /* Return true when insn is a conditional jump. This function works for
822 instructions containing PC sets in PARALLELs. The instruction may have
823 various other effects so before removing the jump you must verify
826 Note that unlike condjump_p it returns false for unconditional jumps. */
829 any_condjump_p (rtx insn
)
831 rtx x
= pc_set (insn
);
836 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
839 a
= GET_CODE (XEXP (SET_SRC (x
), 1));
840 b
= GET_CODE (XEXP (SET_SRC (x
), 2));
842 return ((b
== PC
&& (a
== LABEL_REF
|| a
== RETURN
))
843 || (a
== PC
&& (b
== LABEL_REF
|| b
== RETURN
)));
846 /* Return the label of a conditional jump. */
849 condjump_label (rtx insn
)
851 rtx x
= pc_set (insn
);
856 if (GET_CODE (x
) == LABEL_REF
)
858 if (GET_CODE (x
) != IF_THEN_ELSE
)
860 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
862 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
867 /* Return true if INSN is a (possibly conditional) return insn. */
870 returnjump_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
874 return x
&& (GET_CODE (x
) == RETURN
875 || (GET_CODE (x
) == SET
&& SET_IS_RETURN_P (x
)));
879 returnjump_p (rtx insn
)
883 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
886 /* Return true if INSN is a jump that only transfers control and
890 onlyjump_p (rtx insn
)
897 set
= single_set (insn
);
900 if (GET_CODE (SET_DEST (set
)) != PC
)
902 if (side_effects_p (SET_SRC (set
)))
910 /* Return nonzero if X is an RTX that only sets the condition codes
911 and has no side effects. */
914 only_sets_cc0_p (rtx x
)
922 return sets_cc0_p (x
) == 1 && ! side_effects_p (x
);
925 /* Return 1 if X is an RTX that does nothing but set the condition codes
926 and CLOBBER or USE registers.
927 Return -1 if X does explicitly set the condition codes,
928 but also does other things. */
939 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
941 if (GET_CODE (x
) == PARALLEL
)
945 int other_things
= 0;
946 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
948 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
949 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
951 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
954 return ! sets_cc0
? 0 : other_things
? -1 : 1;
960 /* Find all CODE_LABELs referred to in X, and increment their use counts.
961 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
962 in INSN, then store one of them in JUMP_LABEL (INSN).
963 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
964 referenced in INSN, add a REG_LABEL note containing that label to INSN.
965 Also, when there are consecutive labels, canonicalize on the last of them.
967 Note that two labels separated by a loop-beginning note
968 must be kept distinct if we have not yet done loop-optimization,
969 because the gap between them is where loop-optimize
970 will want to move invariant code to. CROSS_JUMP tells us
971 that loop-optimization is done with. */
974 mark_jump_label (rtx x
, rtx insn
, int in_mem
)
976 RTX_CODE code
= GET_CODE (x
);
999 /* If this is a constant-pool reference, see if it is a label. */
1000 if (CONSTANT_POOL_ADDRESS_P (x
))
1001 mark_jump_label (get_pool_constant (x
), insn
, in_mem
);
1006 rtx label
= XEXP (x
, 0);
1008 /* Ignore remaining references to unreachable labels that
1009 have been deleted. */
1011 && NOTE_LINE_NUMBER (label
) == NOTE_INSN_DELETED_LABEL
)
1014 gcc_assert (LABEL_P (label
));
1016 /* Ignore references to labels of containing functions. */
1017 if (LABEL_REF_NONLOCAL_P (x
))
1020 XEXP (x
, 0) = label
;
1021 if (! insn
|| ! INSN_DELETED_P (insn
))
1022 ++LABEL_NUSES (label
);
1027 JUMP_LABEL (insn
) = label
;
1030 /* Add a REG_LABEL note for LABEL unless there already
1031 is one. All uses of a label, except for labels
1032 that are the targets of jumps, must have a
1034 if (! find_reg_note (insn
, REG_LABEL
, label
))
1035 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, label
,
1042 /* Do walk the labels in a vector, but not the first operand of an
1043 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1046 if (! INSN_DELETED_P (insn
))
1048 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
1050 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
1051 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, in_mem
);
1059 fmt
= GET_RTX_FORMAT (code
);
1060 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1063 mark_jump_label (XEXP (x
, i
), insn
, in_mem
);
1064 else if (fmt
[i
] == 'E')
1067 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1068 mark_jump_label (XVECEXP (x
, i
, j
), insn
, in_mem
);
1074 /* Delete insn INSN from the chain of insns and update label ref counts
1075 and delete insns now unreachable.
1077 Returns the first insn after INSN that was not deleted.
1079 Usage of this instruction is deprecated. Use delete_insn instead and
1080 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1083 delete_related_insns (rtx insn
)
1085 int was_code_label
= (LABEL_P (insn
));
1087 rtx next
= NEXT_INSN (insn
), prev
= PREV_INSN (insn
);
1089 while (next
&& INSN_DELETED_P (next
))
1090 next
= NEXT_INSN (next
);
1092 /* This insn is already deleted => return first following nondeleted. */
1093 if (INSN_DELETED_P (insn
))
1098 /* If instruction is followed by a barrier,
1099 delete the barrier too. */
1101 if (next
!= 0 && BARRIER_P (next
))
1104 /* If deleting a jump, decrement the count of the label,
1105 and delete the label if it is now unused. */
1107 if (JUMP_P (insn
) && JUMP_LABEL (insn
))
1109 rtx lab
= JUMP_LABEL (insn
), lab_next
;
1111 if (LABEL_NUSES (lab
) == 0)
1113 /* This can delete NEXT or PREV,
1114 either directly if NEXT is JUMP_LABEL (INSN),
1115 or indirectly through more levels of jumps. */
1116 delete_related_insns (lab
);
1118 /* I feel a little doubtful about this loop,
1119 but I see no clean and sure alternative way
1120 to find the first insn after INSN that is not now deleted.
1121 I hope this works. */
1122 while (next
&& INSN_DELETED_P (next
))
1123 next
= NEXT_INSN (next
);
1126 else if (tablejump_p (insn
, NULL
, &lab_next
))
1128 /* If we're deleting the tablejump, delete the dispatch table.
1129 We may not be able to kill the label immediately preceding
1130 just yet, as it might be referenced in code leading up to
1132 delete_related_insns (lab_next
);
1136 /* Likewise if we're deleting a dispatch table. */
1139 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1140 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
1142 rtx pat
= PATTERN (insn
);
1143 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
1144 int len
= XVECLEN (pat
, diff_vec_p
);
1146 for (i
= 0; i
< len
; i
++)
1147 if (LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
1148 delete_related_insns (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
1149 while (next
&& INSN_DELETED_P (next
))
1150 next
= NEXT_INSN (next
);
1154 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1155 if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
1156 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1157 if (REG_NOTE_KIND (note
) == REG_LABEL
1158 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1159 && LABEL_P (XEXP (note
, 0)))
1160 if (LABEL_NUSES (XEXP (note
, 0)) == 0)
1161 delete_related_insns (XEXP (note
, 0));
1163 while (prev
&& (INSN_DELETED_P (prev
) || NOTE_P (prev
)))
1164 prev
= PREV_INSN (prev
);
1166 /* If INSN was a label and a dispatch table follows it,
1167 delete the dispatch table. The tablejump must have gone already.
1168 It isn't useful to fall through into a table. */
1171 && NEXT_INSN (insn
) != 0
1172 && JUMP_P (NEXT_INSN (insn
))
1173 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
1174 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
1175 next
= delete_related_insns (NEXT_INSN (insn
));
1177 /* If INSN was a label, delete insns following it if now unreachable. */
1179 if (was_code_label
&& prev
&& BARRIER_P (prev
))
1184 code
= GET_CODE (next
);
1186 next
= NEXT_INSN (next
);
1187 /* Keep going past other deleted labels to delete what follows. */
1188 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
1189 next
= NEXT_INSN (next
);
1190 else if (code
== BARRIER
|| INSN_P (next
))
1191 /* Note: if this deletes a jump, it can cause more
1192 deletion of unreachable code, after a different label.
1193 As long as the value from this recursive call is correct,
1194 this invocation functions correctly. */
1195 next
= delete_related_insns (next
);
1204 /* Delete a range of insns from FROM to TO, inclusive.
1205 This is for the sake of peephole optimization, so assume
1206 that whatever these insns do will still be done by a new
1207 peephole insn that will replace them. */
1210 delete_for_peephole (rtx from
, rtx to
)
1216 rtx next
= NEXT_INSN (insn
);
1217 rtx prev
= PREV_INSN (insn
);
1221 INSN_DELETED_P (insn
) = 1;
1223 /* Patch this insn out of the chain. */
1224 /* We don't do this all at once, because we
1225 must preserve all NOTEs. */
1227 NEXT_INSN (prev
) = next
;
1230 PREV_INSN (next
) = prev
;
1238 /* Note that if TO is an unconditional jump
1239 we *do not* delete the BARRIER that follows,
1240 since the peephole that replaces this sequence
1241 is also an unconditional jump in that case. */
1244 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1245 NLABEL as a return. Accrue modifications into the change group. */
1248 redirect_exp_1 (rtx
*loc
, rtx olabel
, rtx nlabel
, rtx insn
)
1251 RTX_CODE code
= GET_CODE (x
);
1255 if (code
== LABEL_REF
)
1257 if (XEXP (x
, 0) == olabel
)
1261 n
= gen_rtx_LABEL_REF (Pmode
, nlabel
);
1263 n
= gen_rtx_RETURN (VOIDmode
);
1265 validate_change (insn
, loc
, n
, 1);
1269 else if (code
== RETURN
&& olabel
== 0)
1272 x
= gen_rtx_LABEL_REF (Pmode
, nlabel
);
1274 x
= gen_rtx_RETURN (VOIDmode
);
1275 if (loc
== &PATTERN (insn
))
1276 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
1277 validate_change (insn
, loc
, x
, 1);
1281 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
1282 && GET_CODE (SET_SRC (x
)) == LABEL_REF
1283 && XEXP (SET_SRC (x
), 0) == olabel
)
1285 validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 1);
1289 fmt
= GET_RTX_FORMAT (code
);
1290 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1293 redirect_exp_1 (&XEXP (x
, i
), olabel
, nlabel
, insn
);
1294 else if (fmt
[i
] == 'E')
1297 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1298 redirect_exp_1 (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
);
1303 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1304 the modifications into the change group. Return false if we did
1305 not see how to do that. */
1308 redirect_jump_1 (rtx jump
, rtx nlabel
)
1310 int ochanges
= num_validated_changes ();
1313 if (GET_CODE (PATTERN (jump
)) == PARALLEL
)
1314 loc
= &XVECEXP (PATTERN (jump
), 0, 0);
1316 loc
= &PATTERN (jump
);
1318 redirect_exp_1 (loc
, JUMP_LABEL (jump
), nlabel
, jump
);
1319 return num_validated_changes () > ochanges
;
1322 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1323 jump target label is unused as a result, it and the code following
1326 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1329 The return value will be 1 if the change was made, 0 if it wasn't
1330 (this can only occur for NLABEL == 0). */
1333 redirect_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1335 rtx olabel
= JUMP_LABEL (jump
);
1337 if (nlabel
== olabel
)
1340 if (! redirect_jump_1 (jump
, nlabel
) || ! apply_change_group ())
1343 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 0);
1347 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1349 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1350 count has dropped to zero. */
1352 redirect_jump_2 (rtx jump
, rtx olabel
, rtx nlabel
, int delete_unused
,
1357 /* Negative DELETE_UNUSED used to be used to signalize behavior on
1358 moving FUNCTION_END note. Just sanity check that no user still worry
1360 gcc_assert (delete_unused
>= 0);
1361 JUMP_LABEL (jump
) = nlabel
;
1363 ++LABEL_NUSES (nlabel
);
1365 /* Update labels in any REG_EQUAL note. */
1366 if ((note
= find_reg_note (jump
, REG_EQUAL
, NULL_RTX
)) != NULL_RTX
)
1368 if (!nlabel
|| (invert
&& !invert_exp_1 (XEXP (note
, 0), jump
)))
1369 remove_note (jump
, note
);
1372 redirect_exp_1 (&XEXP (note
, 0), olabel
, nlabel
, jump
);
1373 confirm_change_group ();
1377 if (olabel
&& --LABEL_NUSES (olabel
) == 0 && delete_unused
> 0
1378 /* Undefined labels will remain outside the insn stream. */
1379 && INSN_UID (olabel
))
1380 delete_related_insns (olabel
);
1382 invert_br_probabilities (jump
);
1385 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1386 modifications into the change group. Return nonzero for success. */
1388 invert_exp_1 (rtx x
, rtx insn
)
1390 RTX_CODE code
= GET_CODE (x
);
1392 if (code
== IF_THEN_ELSE
)
1394 rtx comp
= XEXP (x
, 0);
1396 enum rtx_code reversed_code
;
1398 /* We can do this in two ways: The preferable way, which can only
1399 be done if this is not an integer comparison, is to reverse
1400 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1401 of the IF_THEN_ELSE. If we can't do either, fail. */
1403 reversed_code
= reversed_comparison_code (comp
, insn
);
1405 if (reversed_code
!= UNKNOWN
)
1407 validate_change (insn
, &XEXP (x
, 0),
1408 gen_rtx_fmt_ee (reversed_code
,
1409 GET_MODE (comp
), XEXP (comp
, 0),
1416 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
1417 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
1424 /* Invert the condition of the jump JUMP, and make it jump to label
1425 NLABEL instead of where it jumps now. Accrue changes into the
1426 change group. Return false if we didn't see how to perform the
1427 inversion and redirection. */
1430 invert_jump_1 (rtx jump
, rtx nlabel
)
1432 rtx x
= pc_set (jump
);
1436 ochanges
= num_validated_changes ();
1438 ok
= invert_exp_1 (SET_SRC (x
), jump
);
1441 if (num_validated_changes () == ochanges
)
1444 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1445 in Pmode, so checking this is not merely an optimization. */
1446 return nlabel
== JUMP_LABEL (jump
) || redirect_jump_1 (jump
, nlabel
);
1449 /* Invert the condition of the jump JUMP, and make it jump to label
1450 NLABEL instead of where it jumps now. Return true if successful. */
1453 invert_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1455 rtx olabel
= JUMP_LABEL (jump
);
1457 if (invert_jump_1 (jump
, nlabel
) && apply_change_group ())
1459 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 1);
1467 /* Like rtx_equal_p except that it considers two REGs as equal
1468 if they renumber to the same value and considers two commutative
1469 operations to be the same if the order of the operands has been
1473 rtx_renumbered_equal_p (rtx x
, rtx y
)
1476 enum rtx_code code
= GET_CODE (x
);
1482 if ((code
== REG
|| (code
== SUBREG
&& REG_P (SUBREG_REG (x
))))
1483 && (REG_P (y
) || (GET_CODE (y
) == SUBREG
1484 && REG_P (SUBREG_REG (y
)))))
1486 int reg_x
= -1, reg_y
= -1;
1487 int byte_x
= 0, byte_y
= 0;
1489 if (GET_MODE (x
) != GET_MODE (y
))
1492 /* If we haven't done any renumbering, don't
1493 make any assumptions. */
1494 if (reg_renumber
== 0)
1495 return rtx_equal_p (x
, y
);
1499 reg_x
= REGNO (SUBREG_REG (x
));
1500 byte_x
= SUBREG_BYTE (x
);
1502 if (reg_renumber
[reg_x
] >= 0)
1504 reg_x
= subreg_regno_offset (reg_renumber
[reg_x
],
1505 GET_MODE (SUBREG_REG (x
)),
1514 if (reg_renumber
[reg_x
] >= 0)
1515 reg_x
= reg_renumber
[reg_x
];
1518 if (GET_CODE (y
) == SUBREG
)
1520 reg_y
= REGNO (SUBREG_REG (y
));
1521 byte_y
= SUBREG_BYTE (y
);
1523 if (reg_renumber
[reg_y
] >= 0)
1525 reg_y
= subreg_regno_offset (reg_renumber
[reg_y
],
1526 GET_MODE (SUBREG_REG (y
)),
1535 if (reg_renumber
[reg_y
] >= 0)
1536 reg_y
= reg_renumber
[reg_y
];
1539 return reg_x
>= 0 && reg_x
== reg_y
&& byte_x
== byte_y
;
1542 /* Now we have disposed of all the cases
1543 in which different rtx codes can match. */
1544 if (code
!= GET_CODE (y
))
1558 /* We can't assume nonlocal labels have their following insns yet. */
1559 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
1560 return XEXP (x
, 0) == XEXP (y
, 0);
1562 /* Two label-refs are equivalent if they point at labels
1563 in the same position in the instruction stream. */
1564 return (next_real_insn (XEXP (x
, 0))
1565 == next_real_insn (XEXP (y
, 0)));
1568 return XSTR (x
, 0) == XSTR (y
, 0);
1571 /* If we didn't match EQ equality above, they aren't the same. */
1578 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1580 if (GET_MODE (x
) != GET_MODE (y
))
1583 /* For commutative operations, the RTX match if the operand match in any
1584 order. Also handle the simple binary and unary cases without a loop. */
1585 if (targetm
.commutative_p (x
, UNKNOWN
))
1586 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1587 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
1588 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
1589 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
1590 else if (NON_COMMUTATIVE_P (x
))
1591 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1592 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
1593 else if (UNARY_P (x
))
1594 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
1596 /* Compare the elements. If any pair of corresponding elements
1597 fail to match, return 0 for the whole things. */
1599 fmt
= GET_RTX_FORMAT (code
);
1600 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1606 if (XWINT (x
, i
) != XWINT (y
, i
))
1611 if (XINT (x
, i
) != XINT (y
, i
))
1616 if (XTREE (x
, i
) != XTREE (y
, i
))
1621 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1626 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
1631 if (XEXP (x
, i
) != XEXP (y
, i
))
1638 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1640 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1641 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
1652 /* If X is a hard register or equivalent to one or a subregister of one,
1653 return the hard register number. If X is a pseudo register that was not
1654 assigned a hard register, return the pseudo register number. Otherwise,
1655 return -1. Any rtx is valid for X. */
1662 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
1663 return reg_renumber
[REGNO (x
)];
1666 if (GET_CODE (x
) == SUBREG
)
1668 int base
= true_regnum (SUBREG_REG (x
));
1670 && base
< FIRST_PSEUDO_REGISTER
1671 && subreg_offset_representable_p (REGNO (SUBREG_REG (x
)),
1672 GET_MODE (SUBREG_REG (x
)),
1673 SUBREG_BYTE (x
), GET_MODE (x
)))
1674 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
1675 GET_MODE (SUBREG_REG (x
)),
1676 SUBREG_BYTE (x
), GET_MODE (x
));
1681 /* Return regno of the register REG and handle subregs too. */
1683 reg_or_subregno (rtx reg
)
1685 if (GET_CODE (reg
) == SUBREG
)
1686 reg
= SUBREG_REG (reg
);
1687 gcc_assert (REG_P (reg
));