1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /* This is the pathetic reminder of old fame of the jump-optimization pass
24 of the compiler. Now it contains basically a set of utility functions to
27 Each CODE_LABEL has a count of the times it is used
28 stored in the LABEL_NUSES internal field, and each JUMP_INSN
29 has one label that it refers to stored in the
30 JUMP_LABEL internal field. With this we can detect labels that
31 become unused because of the deletion of all the jumps that
32 formerly used them. The JUMP_LABEL info is sometimes looked
35 The subroutines redirect_jump and invert_jump are used
36 from other passes as well. */
40 #include "coretypes.h"
45 #include "hard-reg-set.h"
47 #include "insn-config.h"
48 #include "insn-attr.h"
54 #include "diagnostic.h"
59 #include "tree-pass.h"
62 /* Optimize jump y; x: ... y: jumpif... x?
63 Don't know if it is worth bothering with. */
64 /* Optimize two cases of conditional jump to conditional jump?
65 This can never delete any instruction or make anything dead,
66 or even change what is live at any point.
67 So perhaps let combiner do it. */
69 static void init_label_info (rtx
);
70 static void mark_all_labels (rtx
);
71 static void redirect_exp_1 (rtx
*, rtx
, rtx
, rtx
);
72 static int invert_exp_1 (rtx
, rtx
);
73 static int returnjump_p_1 (rtx
*, void *);
75 /* Alternate entry into the jump optimizer. This entry point only rebuilds
76 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
79 rebuild_jump_labels (rtx f
)
83 timevar_push (TV_REBUILD_JUMP
);
87 /* Keep track of labels used from static data; we don't track them
88 closely enough to delete them here, so make sure their reference
89 count doesn't drop to zero. */
91 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
92 if (LABEL_P (XEXP (insn
, 0)))
93 LABEL_NUSES (XEXP (insn
, 0))++;
94 timevar_pop (TV_REBUILD_JUMP
);
97 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
98 non-fallthru insn. This is not generally true, as multiple barriers
99 may have crept in, or the BARRIER may be separated from the last
100 real insn by one or more NOTEs.
102 This simple pass moves barriers and removes duplicates so that the
106 cleanup_barriers (void)
108 rtx insn
, next
, prev
;
109 for (insn
= get_insns (); insn
; insn
= next
)
111 next
= NEXT_INSN (insn
);
112 if (BARRIER_P (insn
))
114 prev
= prev_nonnote_insn (insn
);
115 if (BARRIER_P (prev
))
117 else if (prev
!= PREV_INSN (insn
))
118 reorder_insns (insn
, insn
, prev
);
124 struct tree_opt_pass pass_cleanup_barriers
=
126 "barriers", /* name */
128 cleanup_barriers
, /* execute */
131 0, /* static_pass_number */
133 0, /* properties_required */
134 0, /* properties_provided */
135 0, /* properties_destroyed */
136 0, /* todo_flags_start */
137 TODO_dump_func
, /* todo_flags_finish */
142 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
143 notes whose labels don't occur in the insn any more. Returns the
144 largest INSN_UID found. */
146 init_label_info (rtx f
)
150 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
152 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
153 else if (JUMP_P (insn
))
154 JUMP_LABEL (insn
) = 0;
155 else if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
159 for (note
= REG_NOTES (insn
); note
; note
= next
)
161 next
= XEXP (note
, 1);
162 if (REG_NOTE_KIND (note
) == REG_LABEL
163 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
164 remove_note (insn
, note
);
169 /* Mark the label each jump jumps to.
170 Combine consecutive labels, and count uses of labels. */
173 mark_all_labels (rtx f
)
177 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
180 mark_jump_label (PATTERN (insn
), insn
, 0);
181 if (! INSN_DELETED_P (insn
) && JUMP_P (insn
))
183 /* When we know the LABEL_REF contained in a REG used in
184 an indirect jump, we'll have a REG_LABEL note so that
185 flow can tell where it's going. */
186 if (JUMP_LABEL (insn
) == 0)
188 rtx label_note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
191 /* But a LABEL_REF around the REG_LABEL note, so
192 that we can canonicalize it. */
193 rtx label_ref
= gen_rtx_LABEL_REF (Pmode
,
194 XEXP (label_note
, 0));
196 mark_jump_label (label_ref
, insn
, 0);
197 XEXP (label_note
, 0) = XEXP (label_ref
, 0);
198 JUMP_LABEL (insn
) = XEXP (label_note
, 0);
204 /* If we are in cfglayout mode, there may be non-insns between the
205 basic blocks. If those non-insns represent tablejump data, they
206 contain label references that we must record. */
207 if (current_ir_type () == IR_RTL_CFGLAYOUT
)
213 for (insn
= bb
->il
.rtl
->header
; insn
; insn
= NEXT_INSN (insn
))
216 gcc_assert (JUMP_TABLE_DATA_P (insn
));
217 mark_jump_label (PATTERN (insn
), insn
, 0);
220 for (insn
= bb
->il
.rtl
->footer
; insn
; insn
= NEXT_INSN (insn
))
223 gcc_assert (JUMP_TABLE_DATA_P (insn
));
224 mark_jump_label (PATTERN (insn
), insn
, 0);
230 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
231 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
232 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
233 know whether it's source is floating point or integer comparison. Machine
234 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
235 to help this function avoid overhead in these cases. */
237 reversed_comparison_code_parts (enum rtx_code code
, rtx arg0
, rtx arg1
, rtx insn
)
239 enum machine_mode mode
;
241 /* If this is not actually a comparison, we can't reverse it. */
242 if (GET_RTX_CLASS (code
) != RTX_COMPARE
243 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
246 mode
= GET_MODE (arg0
);
247 if (mode
== VOIDmode
)
248 mode
= GET_MODE (arg1
);
250 /* First see if machine description supplies us way to reverse the
251 comparison. Give it priority over everything else to allow
252 machine description to do tricks. */
253 if (GET_MODE_CLASS (mode
) == MODE_CC
254 && REVERSIBLE_CC_MODE (mode
))
256 #ifdef REVERSE_CONDITION
257 return REVERSE_CONDITION (code
, mode
);
259 return reverse_condition (code
);
262 /* Try a few special cases based on the comparison code. */
271 /* It is always safe to reverse EQ and NE, even for the floating
272 point. Similarly the unsigned comparisons are never used for
273 floating point so we can reverse them in the default way. */
274 return reverse_condition (code
);
279 /* In case we already see unordered comparison, we can be sure to
280 be dealing with floating point so we don't need any more tests. */
281 return reverse_condition_maybe_unordered (code
);
286 /* We don't have safe way to reverse these yet. */
292 if (GET_MODE_CLASS (mode
) == MODE_CC
|| CC0_P (arg0
))
295 /* Try to search for the comparison to determine the real mode.
296 This code is expensive, but with sane machine description it
297 will be never used, since REVERSIBLE_CC_MODE will return true
302 for (prev
= prev_nonnote_insn (insn
);
303 prev
!= 0 && !LABEL_P (prev
);
304 prev
= prev_nonnote_insn (prev
))
306 rtx set
= set_of (arg0
, prev
);
307 if (set
&& GET_CODE (set
) == SET
308 && rtx_equal_p (SET_DEST (set
), arg0
))
310 rtx src
= SET_SRC (set
);
312 if (GET_CODE (src
) == COMPARE
)
314 rtx comparison
= src
;
315 arg0
= XEXP (src
, 0);
316 mode
= GET_MODE (arg0
);
317 if (mode
== VOIDmode
)
318 mode
= GET_MODE (XEXP (comparison
, 1));
321 /* We can get past reg-reg moves. This may be useful for model
322 of i387 comparisons that first move flag registers around. */
329 /* If register is clobbered in some ununderstandable way,
336 /* Test for an integer condition, or a floating-point comparison
337 in which NaNs can be ignored. */
338 if (GET_CODE (arg0
) == CONST_INT
339 || (GET_MODE (arg0
) != VOIDmode
340 && GET_MODE_CLASS (mode
) != MODE_CC
341 && !HONOR_NANS (mode
)))
342 return reverse_condition (code
);
347 /* A wrapper around the previous function to take COMPARISON as rtx
348 expression. This simplifies many callers. */
350 reversed_comparison_code (rtx comparison
, rtx insn
)
352 if (!COMPARISON_P (comparison
))
354 return reversed_comparison_code_parts (GET_CODE (comparison
),
355 XEXP (comparison
, 0),
356 XEXP (comparison
, 1), insn
);
359 /* Return comparison with reversed code of EXP.
360 Return NULL_RTX in case we fail to do the reversal. */
362 reversed_comparison (rtx exp
, enum machine_mode mode
)
364 enum rtx_code reversed_code
= reversed_comparison_code (exp
, NULL_RTX
);
365 if (reversed_code
== UNKNOWN
)
368 return simplify_gen_relational (reversed_code
, mode
, VOIDmode
,
369 XEXP (exp
, 0), XEXP (exp
, 1));
373 /* Given an rtx-code for a comparison, return the code for the negated
374 comparison. If no such code exists, return UNKNOWN.
376 WATCH OUT! reverse_condition is not safe to use on a jump that might
377 be acting on the results of an IEEE floating point comparison, because
378 of the special treatment of non-signaling nans in comparisons.
379 Use reversed_comparison_code instead. */
382 reverse_condition (enum rtx_code code
)
424 /* Similar, but we're allowed to generate unordered comparisons, which
425 makes it safe for IEEE floating-point. Of course, we have to recognize
426 that the target will support them too... */
429 reverse_condition_maybe_unordered (enum rtx_code code
)
467 /* Similar, but return the code when two operands of a comparison are swapped.
468 This IS safe for IEEE floating-point. */
471 swap_condition (enum rtx_code code
)
513 /* Given a comparison CODE, return the corresponding unsigned comparison.
514 If CODE is an equality comparison or already an unsigned comparison,
518 unsigned_condition (enum rtx_code code
)
544 /* Similarly, return the signed version of a comparison. */
547 signed_condition (enum rtx_code code
)
573 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
574 truth of CODE1 implies the truth of CODE2. */
577 comparison_dominates_p (enum rtx_code code1
, enum rtx_code code2
)
579 /* UNKNOWN comparison codes can happen as a result of trying to revert
581 They can't match anything, so we have to reject them here. */
582 if (code1
== UNKNOWN
|| code2
== UNKNOWN
)
591 if (code2
== UNLE
|| code2
== UNGE
)
596 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
602 if (code2
== UNLE
|| code2
== NE
)
607 if (code2
== LE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
612 if (code2
== UNGE
|| code2
== NE
)
617 if (code2
== GE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
623 if (code2
== ORDERED
)
628 if (code2
== NE
|| code2
== ORDERED
)
633 if (code2
== LEU
|| code2
== NE
)
638 if (code2
== GEU
|| code2
== NE
)
643 if (code2
== NE
|| code2
== UNEQ
|| code2
== UNLE
|| code2
== UNLT
644 || code2
== UNGE
|| code2
== UNGT
)
655 /* Return 1 if INSN is an unconditional jump and nothing else. */
658 simplejump_p (rtx insn
)
660 return (JUMP_P (insn
)
661 && GET_CODE (PATTERN (insn
)) == SET
662 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
663 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
666 /* Return nonzero if INSN is a (possibly) conditional jump
669 Use of this function is deprecated, since we need to support combined
670 branch and compare insns. Use any_condjump_p instead whenever possible. */
673 condjump_p (rtx insn
)
675 rtx x
= PATTERN (insn
);
677 if (GET_CODE (x
) != SET
678 || GET_CODE (SET_DEST (x
)) != PC
)
682 if (GET_CODE (x
) == LABEL_REF
)
685 return (GET_CODE (x
) == IF_THEN_ELSE
686 && ((GET_CODE (XEXP (x
, 2)) == PC
687 && (GET_CODE (XEXP (x
, 1)) == LABEL_REF
688 || GET_CODE (XEXP (x
, 1)) == RETURN
))
689 || (GET_CODE (XEXP (x
, 1)) == PC
690 && (GET_CODE (XEXP (x
, 2)) == LABEL_REF
691 || GET_CODE (XEXP (x
, 2)) == RETURN
))));
694 /* Return nonzero if INSN is a (possibly) conditional jump inside a
697 Use this function is deprecated, since we need to support combined
698 branch and compare insns. Use any_condjump_p instead whenever possible. */
701 condjump_in_parallel_p (rtx insn
)
703 rtx x
= PATTERN (insn
);
705 if (GET_CODE (x
) != PARALLEL
)
708 x
= XVECEXP (x
, 0, 0);
710 if (GET_CODE (x
) != SET
)
712 if (GET_CODE (SET_DEST (x
)) != PC
)
714 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
716 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
718 if (XEXP (SET_SRC (x
), 2) == pc_rtx
719 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
720 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
722 if (XEXP (SET_SRC (x
), 1) == pc_rtx
723 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
724 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
729 /* Return set of PC, otherwise NULL. */
737 pat
= PATTERN (insn
);
739 /* The set is allowed to appear either as the insn pattern or
740 the first set in a PARALLEL. */
741 if (GET_CODE (pat
) == PARALLEL
)
742 pat
= XVECEXP (pat
, 0, 0);
743 if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == PC
)
749 /* Return true when insn is an unconditional direct jump,
750 possibly bundled inside a PARALLEL. */
753 any_uncondjump_p (rtx insn
)
755 rtx x
= pc_set (insn
);
758 if (GET_CODE (SET_SRC (x
)) != LABEL_REF
)
760 if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
765 /* Return true when insn is a conditional jump. This function works for
766 instructions containing PC sets in PARALLELs. The instruction may have
767 various other effects so before removing the jump you must verify
770 Note that unlike condjump_p it returns false for unconditional jumps. */
773 any_condjump_p (rtx insn
)
775 rtx x
= pc_set (insn
);
780 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
783 a
= GET_CODE (XEXP (SET_SRC (x
), 1));
784 b
= GET_CODE (XEXP (SET_SRC (x
), 2));
786 return ((b
== PC
&& (a
== LABEL_REF
|| a
== RETURN
))
787 || (a
== PC
&& (b
== LABEL_REF
|| b
== RETURN
)));
790 /* Return the label of a conditional jump. */
793 condjump_label (rtx insn
)
795 rtx x
= pc_set (insn
);
800 if (GET_CODE (x
) == LABEL_REF
)
802 if (GET_CODE (x
) != IF_THEN_ELSE
)
804 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
806 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
811 /* Return true if INSN is a (possibly conditional) return insn. */
814 returnjump_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
818 return x
&& (GET_CODE (x
) == RETURN
819 || (GET_CODE (x
) == SET
&& SET_IS_RETURN_P (x
)));
823 returnjump_p (rtx insn
)
827 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
830 /* Return true if INSN is a jump that only transfers control and
834 onlyjump_p (rtx insn
)
841 set
= single_set (insn
);
844 if (GET_CODE (SET_DEST (set
)) != PC
)
846 if (side_effects_p (SET_SRC (set
)))
854 /* Return nonzero if X is an RTX that only sets the condition codes
855 and has no side effects. */
858 only_sets_cc0_p (rtx x
)
866 return sets_cc0_p (x
) == 1 && ! side_effects_p (x
);
869 /* Return 1 if X is an RTX that does nothing but set the condition codes
870 and CLOBBER or USE registers.
871 Return -1 if X does explicitly set the condition codes,
872 but also does other things. */
883 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
885 if (GET_CODE (x
) == PARALLEL
)
889 int other_things
= 0;
890 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
892 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
893 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
895 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
898 return ! sets_cc0
? 0 : other_things
? -1 : 1;
904 /* Find all CODE_LABELs referred to in X, and increment their use counts.
905 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
906 in INSN, then store one of them in JUMP_LABEL (INSN).
907 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
908 referenced in INSN, add a REG_LABEL note containing that label to INSN.
909 Also, when there are consecutive labels, canonicalize on the last of them.
911 Note that two labels separated by a loop-beginning note
912 must be kept distinct if we have not yet done loop-optimization,
913 because the gap between them is where loop-optimize
914 will want to move invariant code to. CROSS_JUMP tells us
915 that loop-optimization is done with. */
918 mark_jump_label (rtx x
, rtx insn
, int in_mem
)
920 RTX_CODE code
= GET_CODE (x
);
940 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
941 mark_jump_label (PATTERN (XVECEXP (x
, 0, i
)),
942 XVECEXP (x
, 0, i
), 0);
949 /* If this is a constant-pool reference, see if it is a label. */
950 if (CONSTANT_POOL_ADDRESS_P (x
))
951 mark_jump_label (get_pool_constant (x
), insn
, in_mem
);
956 rtx label
= XEXP (x
, 0);
958 /* Ignore remaining references to unreachable labels that
959 have been deleted. */
961 && NOTE_KIND (label
) == NOTE_INSN_DELETED_LABEL
)
964 gcc_assert (LABEL_P (label
));
966 /* Ignore references to labels of containing functions. */
967 if (LABEL_REF_NONLOCAL_P (x
))
971 if (! insn
|| ! INSN_DELETED_P (insn
))
972 ++LABEL_NUSES (label
);
977 JUMP_LABEL (insn
) = label
;
980 /* Add a REG_LABEL note for LABEL unless there already
981 is one. All uses of a label, except for labels
982 that are the targets of jumps, must have a
984 if (! find_reg_note (insn
, REG_LABEL
, label
))
985 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, label
,
992 /* Do walk the labels in a vector, but not the first operand of an
993 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
996 if (! INSN_DELETED_P (insn
))
998 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
1000 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
1001 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, in_mem
);
1009 fmt
= GET_RTX_FORMAT (code
);
1010 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1013 mark_jump_label (XEXP (x
, i
), insn
, in_mem
);
1014 else if (fmt
[i
] == 'E')
1017 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1018 mark_jump_label (XVECEXP (x
, i
, j
), insn
, in_mem
);
1024 /* Delete insn INSN from the chain of insns and update label ref counts
1025 and delete insns now unreachable.
1027 Returns the first insn after INSN that was not deleted.
1029 Usage of this instruction is deprecated. Use delete_insn instead and
1030 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1033 delete_related_insns (rtx insn
)
1035 int was_code_label
= (LABEL_P (insn
));
1037 rtx next
= NEXT_INSN (insn
), prev
= PREV_INSN (insn
);
1039 while (next
&& INSN_DELETED_P (next
))
1040 next
= NEXT_INSN (next
);
1042 /* This insn is already deleted => return first following nondeleted. */
1043 if (INSN_DELETED_P (insn
))
1048 /* If instruction is followed by a barrier,
1049 delete the barrier too. */
1051 if (next
!= 0 && BARRIER_P (next
))
1054 /* If deleting a jump, decrement the count of the label,
1055 and delete the label if it is now unused. */
1057 if (JUMP_P (insn
) && JUMP_LABEL (insn
))
1059 rtx lab
= JUMP_LABEL (insn
), lab_next
;
1061 if (LABEL_NUSES (lab
) == 0)
1063 /* This can delete NEXT or PREV,
1064 either directly if NEXT is JUMP_LABEL (INSN),
1065 or indirectly through more levels of jumps. */
1066 delete_related_insns (lab
);
1068 /* I feel a little doubtful about this loop,
1069 but I see no clean and sure alternative way
1070 to find the first insn after INSN that is not now deleted.
1071 I hope this works. */
1072 while (next
&& INSN_DELETED_P (next
))
1073 next
= NEXT_INSN (next
);
1076 else if (tablejump_p (insn
, NULL
, &lab_next
))
1078 /* If we're deleting the tablejump, delete the dispatch table.
1079 We may not be able to kill the label immediately preceding
1080 just yet, as it might be referenced in code leading up to
1082 delete_related_insns (lab_next
);
1086 /* Likewise if we're deleting a dispatch table. */
1089 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1090 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
1092 rtx pat
= PATTERN (insn
);
1093 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
1094 int len
= XVECLEN (pat
, diff_vec_p
);
1096 for (i
= 0; i
< len
; i
++)
1097 if (LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
1098 delete_related_insns (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
1099 while (next
&& INSN_DELETED_P (next
))
1100 next
= NEXT_INSN (next
);
1104 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1105 if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
1106 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1107 if (REG_NOTE_KIND (note
) == REG_LABEL
1108 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1109 && LABEL_P (XEXP (note
, 0)))
1110 if (LABEL_NUSES (XEXP (note
, 0)) == 0)
1111 delete_related_insns (XEXP (note
, 0));
1113 while (prev
&& (INSN_DELETED_P (prev
) || NOTE_P (prev
)))
1114 prev
= PREV_INSN (prev
);
1116 /* If INSN was a label and a dispatch table follows it,
1117 delete the dispatch table. The tablejump must have gone already.
1118 It isn't useful to fall through into a table. */
1121 && NEXT_INSN (insn
) != 0
1122 && JUMP_P (NEXT_INSN (insn
))
1123 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
1124 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
1125 next
= delete_related_insns (NEXT_INSN (insn
));
1127 /* If INSN was a label, delete insns following it if now unreachable. */
1129 if (was_code_label
&& prev
&& BARRIER_P (prev
))
1134 code
= GET_CODE (next
);
1136 next
= NEXT_INSN (next
);
1137 /* Keep going past other deleted labels to delete what follows. */
1138 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
1139 next
= NEXT_INSN (next
);
1140 else if (code
== BARRIER
|| INSN_P (next
))
1141 /* Note: if this deletes a jump, it can cause more
1142 deletion of unreachable code, after a different label.
1143 As long as the value from this recursive call is correct,
1144 this invocation functions correctly. */
1145 next
= delete_related_insns (next
);
1154 /* Delete a range of insns from FROM to TO, inclusive.
1155 This is for the sake of peephole optimization, so assume
1156 that whatever these insns do will still be done by a new
1157 peephole insn that will replace them. */
1160 delete_for_peephole (rtx from
, rtx to
)
1166 rtx next
= NEXT_INSN (insn
);
1167 rtx prev
= PREV_INSN (insn
);
1171 INSN_DELETED_P (insn
) = 1;
1173 /* Patch this insn out of the chain. */
1174 /* We don't do this all at once, because we
1175 must preserve all NOTEs. */
1177 NEXT_INSN (prev
) = next
;
1180 PREV_INSN (next
) = prev
;
1188 /* Note that if TO is an unconditional jump
1189 we *do not* delete the BARRIER that follows,
1190 since the peephole that replaces this sequence
1191 is also an unconditional jump in that case. */
1194 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1195 NLABEL as a return. Accrue modifications into the change group. */
1198 redirect_exp_1 (rtx
*loc
, rtx olabel
, rtx nlabel
, rtx insn
)
1201 RTX_CODE code
= GET_CODE (x
);
1205 if (code
== LABEL_REF
)
1207 if (XEXP (x
, 0) == olabel
)
1211 n
= gen_rtx_LABEL_REF (Pmode
, nlabel
);
1213 n
= gen_rtx_RETURN (VOIDmode
);
1215 validate_change (insn
, loc
, n
, 1);
1219 else if (code
== RETURN
&& olabel
== 0)
1222 x
= gen_rtx_LABEL_REF (Pmode
, nlabel
);
1224 x
= gen_rtx_RETURN (VOIDmode
);
1225 if (loc
== &PATTERN (insn
))
1226 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
1227 validate_change (insn
, loc
, x
, 1);
1231 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
1232 && GET_CODE (SET_SRC (x
)) == LABEL_REF
1233 && XEXP (SET_SRC (x
), 0) == olabel
)
1235 validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 1);
1239 fmt
= GET_RTX_FORMAT (code
);
1240 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1243 redirect_exp_1 (&XEXP (x
, i
), olabel
, nlabel
, insn
);
1244 else if (fmt
[i
] == 'E')
1247 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1248 redirect_exp_1 (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
);
1253 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1254 the modifications into the change group. Return false if we did
1255 not see how to do that. */
1258 redirect_jump_1 (rtx jump
, rtx nlabel
)
1260 int ochanges
= num_validated_changes ();
1263 if (GET_CODE (PATTERN (jump
)) == PARALLEL
)
1264 loc
= &XVECEXP (PATTERN (jump
), 0, 0);
1266 loc
= &PATTERN (jump
);
1268 redirect_exp_1 (loc
, JUMP_LABEL (jump
), nlabel
, jump
);
1269 return num_validated_changes () > ochanges
;
1272 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1273 jump target label is unused as a result, it and the code following
1276 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1279 The return value will be 1 if the change was made, 0 if it wasn't
1280 (this can only occur for NLABEL == 0). */
1283 redirect_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1285 rtx olabel
= JUMP_LABEL (jump
);
1287 if (nlabel
== olabel
)
1290 if (! redirect_jump_1 (jump
, nlabel
) || ! apply_change_group ())
1293 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 0);
1297 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1299 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1300 count has dropped to zero. */
1302 redirect_jump_2 (rtx jump
, rtx olabel
, rtx nlabel
, int delete_unused
,
1307 /* Negative DELETE_UNUSED used to be used to signalize behavior on
1308 moving FUNCTION_END note. Just sanity check that no user still worry
1310 gcc_assert (delete_unused
>= 0);
1311 JUMP_LABEL (jump
) = nlabel
;
1313 ++LABEL_NUSES (nlabel
);
1315 /* Update labels in any REG_EQUAL note. */
1316 if ((note
= find_reg_note (jump
, REG_EQUAL
, NULL_RTX
)) != NULL_RTX
)
1318 if (!nlabel
|| (invert
&& !invert_exp_1 (XEXP (note
, 0), jump
)))
1319 remove_note (jump
, note
);
1322 redirect_exp_1 (&XEXP (note
, 0), olabel
, nlabel
, jump
);
1323 confirm_change_group ();
1327 if (olabel
&& --LABEL_NUSES (olabel
) == 0 && delete_unused
> 0
1328 /* Undefined labels will remain outside the insn stream. */
1329 && INSN_UID (olabel
))
1330 delete_related_insns (olabel
);
1332 invert_br_probabilities (jump
);
1335 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1336 modifications into the change group. Return nonzero for success. */
1338 invert_exp_1 (rtx x
, rtx insn
)
1340 RTX_CODE code
= GET_CODE (x
);
1342 if (code
== IF_THEN_ELSE
)
1344 rtx comp
= XEXP (x
, 0);
1346 enum rtx_code reversed_code
;
1348 /* We can do this in two ways: The preferable way, which can only
1349 be done if this is not an integer comparison, is to reverse
1350 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1351 of the IF_THEN_ELSE. If we can't do either, fail. */
1353 reversed_code
= reversed_comparison_code (comp
, insn
);
1355 if (reversed_code
!= UNKNOWN
)
1357 validate_change (insn
, &XEXP (x
, 0),
1358 gen_rtx_fmt_ee (reversed_code
,
1359 GET_MODE (comp
), XEXP (comp
, 0),
1366 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
1367 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
1374 /* Invert the condition of the jump JUMP, and make it jump to label
1375 NLABEL instead of where it jumps now. Accrue changes into the
1376 change group. Return false if we didn't see how to perform the
1377 inversion and redirection. */
1380 invert_jump_1 (rtx jump
, rtx nlabel
)
1382 rtx x
= pc_set (jump
);
1386 ochanges
= num_validated_changes ();
1388 ok
= invert_exp_1 (SET_SRC (x
), jump
);
1391 if (num_validated_changes () == ochanges
)
1394 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1395 in Pmode, so checking this is not merely an optimization. */
1396 return nlabel
== JUMP_LABEL (jump
) || redirect_jump_1 (jump
, nlabel
);
1399 /* Invert the condition of the jump JUMP, and make it jump to label
1400 NLABEL instead of where it jumps now. Return true if successful. */
1403 invert_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1405 rtx olabel
= JUMP_LABEL (jump
);
1407 if (invert_jump_1 (jump
, nlabel
) && apply_change_group ())
1409 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 1);
1417 /* Like rtx_equal_p except that it considers two REGs as equal
1418 if they renumber to the same value and considers two commutative
1419 operations to be the same if the order of the operands has been
1423 rtx_renumbered_equal_p (rtx x
, rtx y
)
1426 enum rtx_code code
= GET_CODE (x
);
1432 if ((code
== REG
|| (code
== SUBREG
&& REG_P (SUBREG_REG (x
))))
1433 && (REG_P (y
) || (GET_CODE (y
) == SUBREG
1434 && REG_P (SUBREG_REG (y
)))))
1436 int reg_x
= -1, reg_y
= -1;
1437 int byte_x
= 0, byte_y
= 0;
1439 if (GET_MODE (x
) != GET_MODE (y
))
1442 /* If we haven't done any renumbering, don't
1443 make any assumptions. */
1444 if (reg_renumber
== 0)
1445 return rtx_equal_p (x
, y
);
1449 reg_x
= REGNO (SUBREG_REG (x
));
1450 byte_x
= SUBREG_BYTE (x
);
1452 if (reg_renumber
[reg_x
] >= 0)
1454 reg_x
= subreg_regno_offset (reg_renumber
[reg_x
],
1455 GET_MODE (SUBREG_REG (x
)),
1464 if (reg_renumber
[reg_x
] >= 0)
1465 reg_x
= reg_renumber
[reg_x
];
1468 if (GET_CODE (y
) == SUBREG
)
1470 reg_y
= REGNO (SUBREG_REG (y
));
1471 byte_y
= SUBREG_BYTE (y
);
1473 if (reg_renumber
[reg_y
] >= 0)
1475 reg_y
= subreg_regno_offset (reg_renumber
[reg_y
],
1476 GET_MODE (SUBREG_REG (y
)),
1485 if (reg_renumber
[reg_y
] >= 0)
1486 reg_y
= reg_renumber
[reg_y
];
1489 return reg_x
>= 0 && reg_x
== reg_y
&& byte_x
== byte_y
;
1492 /* Now we have disposed of all the cases
1493 in which different rtx codes can match. */
1494 if (code
!= GET_CODE (y
))
1508 /* We can't assume nonlocal labels have their following insns yet. */
1509 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
1510 return XEXP (x
, 0) == XEXP (y
, 0);
1512 /* Two label-refs are equivalent if they point at labels
1513 in the same position in the instruction stream. */
1514 return (next_real_insn (XEXP (x
, 0))
1515 == next_real_insn (XEXP (y
, 0)));
1518 return XSTR (x
, 0) == XSTR (y
, 0);
1521 /* If we didn't match EQ equality above, they aren't the same. */
1528 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1530 if (GET_MODE (x
) != GET_MODE (y
))
1533 /* For commutative operations, the RTX match if the operand match in any
1534 order. Also handle the simple binary and unary cases without a loop. */
1535 if (targetm
.commutative_p (x
, UNKNOWN
))
1536 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1537 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
1538 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
1539 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
1540 else if (NON_COMMUTATIVE_P (x
))
1541 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1542 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
1543 else if (UNARY_P (x
))
1544 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
1546 /* Compare the elements. If any pair of corresponding elements
1547 fail to match, return 0 for the whole things. */
1549 fmt
= GET_RTX_FORMAT (code
);
1550 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1556 if (XWINT (x
, i
) != XWINT (y
, i
))
1561 if (XINT (x
, i
) != XINT (y
, i
))
1566 if (XTREE (x
, i
) != XTREE (y
, i
))
1571 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1576 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
1581 if (XEXP (x
, i
) != XEXP (y
, i
))
1588 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1590 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1591 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
1602 /* If X is a hard register or equivalent to one or a subregister of one,
1603 return the hard register number. If X is a pseudo register that was not
1604 assigned a hard register, return the pseudo register number. Otherwise,
1605 return -1. Any rtx is valid for X. */
1612 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
1613 return reg_renumber
[REGNO (x
)];
1616 if (GET_CODE (x
) == SUBREG
)
1618 int base
= true_regnum (SUBREG_REG (x
));
1620 && base
< FIRST_PSEUDO_REGISTER
1621 && subreg_offset_representable_p (REGNO (SUBREG_REG (x
)),
1622 GET_MODE (SUBREG_REG (x
)),
1623 SUBREG_BYTE (x
), GET_MODE (x
)))
1624 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
1625 GET_MODE (SUBREG_REG (x
)),
1626 SUBREG_BYTE (x
), GET_MODE (x
));
1631 /* Return regno of the register REG and handle subregs too. */
1633 reg_or_subregno (rtx reg
)
1635 if (GET_CODE (reg
) == SUBREG
)
1636 reg
= SUBREG_REG (reg
);
1637 gcc_assert (REG_P (reg
));