1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically a set of utility functions to
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
34 The subroutines redirect_jump and invert_jump are used
35 from other passes as well. */
39 #include "coretypes.h"
44 #include "hard-reg-set.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
53 #include "diagnostic.h"
58 #include "tree-pass.h"
61 /* Optimize jump y; x: ... y: jumpif... x?
62 Don't know if it is worth bothering with. */
63 /* Optimize two cases of conditional jump to conditional jump?
64 This can never delete any instruction or make anything dead,
65 or even change what is live at any point.
66 So perhaps let combiner do it. */
68 static void init_label_info (rtx
);
69 static void mark_all_labels (rtx
);
70 static void redirect_exp_1 (rtx
*, rtx
, rtx
, rtx
);
71 static int invert_exp_1 (rtx
, rtx
);
72 static int returnjump_p_1 (rtx
*, void *);
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
78 rebuild_jump_labels (rtx f
)
82 timevar_push (TV_REBUILD_JUMP
);
86 /* Keep track of labels used from static data; we don't track them
87 closely enough to delete them here, so make sure their reference
88 count doesn't drop to zero. */
90 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
91 if (LABEL_P (XEXP (insn
, 0)))
92 LABEL_NUSES (XEXP (insn
, 0))++;
93 timevar_pop (TV_REBUILD_JUMP
);
96 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
97 non-fallthru insn. This is not generally true, as multiple barriers
98 may have crept in, or the BARRIER may be separated from the last
99 real insn by one or more NOTEs.
101 This simple pass moves barriers and removes duplicates so that the
105 cleanup_barriers (void)
107 rtx insn
, next
, prev
;
108 for (insn
= get_insns (); insn
; insn
= next
)
110 next
= NEXT_INSN (insn
);
111 if (BARRIER_P (insn
))
113 prev
= prev_nonnote_insn (insn
);
114 if (BARRIER_P (prev
))
116 else if (prev
!= PREV_INSN (insn
))
117 reorder_insns (insn
, insn
, prev
);
123 struct tree_opt_pass pass_cleanup_barriers
=
125 "barriers", /* name */
127 cleanup_barriers
, /* execute */
130 0, /* static_pass_number */
132 0, /* properties_required */
133 0, /* properties_provided */
134 0, /* properties_destroyed */
135 0, /* todo_flags_start */
136 TODO_dump_func
, /* todo_flags_finish */
141 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
142 notes whose labels don't occur in the insn any more. Returns the
143 largest INSN_UID found. */
145 init_label_info (rtx f
)
149 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
151 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
152 else if (JUMP_P (insn
))
153 JUMP_LABEL (insn
) = 0;
154 else if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
158 for (note
= REG_NOTES (insn
); note
; note
= next
)
160 next
= XEXP (note
, 1);
161 if (REG_NOTE_KIND (note
) == REG_LABEL
162 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
163 remove_note (insn
, note
);
168 /* Mark the label each jump jumps to.
169 Combine consecutive labels, and count uses of labels. */
172 mark_all_labels (rtx f
)
176 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
179 mark_jump_label (PATTERN (insn
), insn
, 0);
180 if (! INSN_DELETED_P (insn
) && JUMP_P (insn
))
182 /* When we know the LABEL_REF contained in a REG used in
183 an indirect jump, we'll have a REG_LABEL note so that
184 flow can tell where it's going. */
185 if (JUMP_LABEL (insn
) == 0)
187 rtx label_note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
190 /* But a LABEL_REF around the REG_LABEL note, so
191 that we can canonicalize it. */
192 rtx label_ref
= gen_rtx_LABEL_REF (Pmode
,
193 XEXP (label_note
, 0));
195 mark_jump_label (label_ref
, insn
, 0);
196 XEXP (label_note
, 0) = XEXP (label_ref
, 0);
197 JUMP_LABEL (insn
) = XEXP (label_note
, 0);
203 /* If we are in cfglayout mode, there may be non-insns between the
204 basic blocks. If those non-insns represent tablejump data, they
205 contain label references that we must record. */
206 if (current_ir_type () == IR_RTL_CFGLAYOUT
)
212 for (insn
= bb
->il
.rtl
->header
; insn
; insn
= NEXT_INSN (insn
))
215 gcc_assert (JUMP_TABLE_DATA_P (insn
));
216 mark_jump_label (PATTERN (insn
), insn
, 0);
219 for (insn
= bb
->il
.rtl
->footer
; insn
; insn
= NEXT_INSN (insn
))
222 gcc_assert (JUMP_TABLE_DATA_P (insn
));
223 mark_jump_label (PATTERN (insn
), insn
, 0);
229 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
230 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
231 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
232 know whether it's source is floating point or integer comparison. Machine
233 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
234 to help this function avoid overhead in these cases. */
236 reversed_comparison_code_parts (enum rtx_code code
, const_rtx arg0
,
237 const_rtx arg1
, const_rtx insn
)
239 enum machine_mode mode
;
241 /* If this is not actually a comparison, we can't reverse it. */
242 if (GET_RTX_CLASS (code
) != RTX_COMPARE
243 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
246 mode
= GET_MODE (arg0
);
247 if (mode
== VOIDmode
)
248 mode
= GET_MODE (arg1
);
250 /* First see if machine description supplies us way to reverse the
251 comparison. Give it priority over everything else to allow
252 machine description to do tricks. */
253 if (GET_MODE_CLASS (mode
) == MODE_CC
254 && REVERSIBLE_CC_MODE (mode
))
256 #ifdef REVERSE_CONDITION
257 return REVERSE_CONDITION (code
, mode
);
259 return reverse_condition (code
);
262 /* Try a few special cases based on the comparison code. */
271 /* It is always safe to reverse EQ and NE, even for the floating
272 point. Similarly the unsigned comparisons are never used for
273 floating point so we can reverse them in the default way. */
274 return reverse_condition (code
);
279 /* In case we already see unordered comparison, we can be sure to
280 be dealing with floating point so we don't need any more tests. */
281 return reverse_condition_maybe_unordered (code
);
286 /* We don't have safe way to reverse these yet. */
292 if (GET_MODE_CLASS (mode
) == MODE_CC
|| CC0_P (arg0
))
295 /* Try to search for the comparison to determine the real mode.
296 This code is expensive, but with sane machine description it
297 will be never used, since REVERSIBLE_CC_MODE will return true
302 /* These CONST_CAST's are okay because prev_nonnote_insn just
303 returns it's argument and we assign it to a const_rtx
305 for (prev
= prev_nonnote_insn (CONST_CAST_RTX(insn
));
306 prev
!= 0 && !LABEL_P (prev
);
307 prev
= prev_nonnote_insn (CONST_CAST_RTX(prev
)))
309 const_rtx set
= set_of (arg0
, prev
);
310 if (set
&& GET_CODE (set
) == SET
311 && rtx_equal_p (SET_DEST (set
), arg0
))
313 rtx src
= SET_SRC (set
);
315 if (GET_CODE (src
) == COMPARE
)
317 rtx comparison
= src
;
318 arg0
= XEXP (src
, 0);
319 mode
= GET_MODE (arg0
);
320 if (mode
== VOIDmode
)
321 mode
= GET_MODE (XEXP (comparison
, 1));
324 /* We can get past reg-reg moves. This may be useful for model
325 of i387 comparisons that first move flag registers around. */
332 /* If register is clobbered in some ununderstandable way,
339 /* Test for an integer condition, or a floating-point comparison
340 in which NaNs can be ignored. */
341 if (GET_CODE (arg0
) == CONST_INT
342 || (GET_MODE (arg0
) != VOIDmode
343 && GET_MODE_CLASS (mode
) != MODE_CC
344 && !HONOR_NANS (mode
)))
345 return reverse_condition (code
);
350 /* A wrapper around the previous function to take COMPARISON as rtx
351 expression. This simplifies many callers. */
353 reversed_comparison_code (const_rtx comparison
, const_rtx insn
)
355 if (!COMPARISON_P (comparison
))
357 return reversed_comparison_code_parts (GET_CODE (comparison
),
358 XEXP (comparison
, 0),
359 XEXP (comparison
, 1), insn
);
362 /* Return comparison with reversed code of EXP.
363 Return NULL_RTX in case we fail to do the reversal. */
365 reversed_comparison (const_rtx exp
, enum machine_mode mode
)
367 enum rtx_code reversed_code
= reversed_comparison_code (exp
, NULL_RTX
);
368 if (reversed_code
== UNKNOWN
)
371 return simplify_gen_relational (reversed_code
, mode
, VOIDmode
,
372 XEXP (exp
, 0), XEXP (exp
, 1));
376 /* Given an rtx-code for a comparison, return the code for the negated
377 comparison. If no such code exists, return UNKNOWN.
379 WATCH OUT! reverse_condition is not safe to use on a jump that might
380 be acting on the results of an IEEE floating point comparison, because
381 of the special treatment of non-signaling nans in comparisons.
382 Use reversed_comparison_code instead. */
385 reverse_condition (enum rtx_code code
)
427 /* Similar, but we're allowed to generate unordered comparisons, which
428 makes it safe for IEEE floating-point. Of course, we have to recognize
429 that the target will support them too... */
432 reverse_condition_maybe_unordered (enum rtx_code code
)
470 /* Similar, but return the code when two operands of a comparison are swapped.
471 This IS safe for IEEE floating-point. */
474 swap_condition (enum rtx_code code
)
516 /* Given a comparison CODE, return the corresponding unsigned comparison.
517 If CODE is an equality comparison or already an unsigned comparison,
521 unsigned_condition (enum rtx_code code
)
547 /* Similarly, return the signed version of a comparison. */
550 signed_condition (enum rtx_code code
)
576 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
577 truth of CODE1 implies the truth of CODE2. */
580 comparison_dominates_p (enum rtx_code code1
, enum rtx_code code2
)
582 /* UNKNOWN comparison codes can happen as a result of trying to revert
584 They can't match anything, so we have to reject them here. */
585 if (code1
== UNKNOWN
|| code2
== UNKNOWN
)
594 if (code2
== UNLE
|| code2
== UNGE
)
599 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
605 if (code2
== UNLE
|| code2
== NE
)
610 if (code2
== LE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
615 if (code2
== UNGE
|| code2
== NE
)
620 if (code2
== GE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
626 if (code2
== ORDERED
)
631 if (code2
== NE
|| code2
== ORDERED
)
636 if (code2
== LEU
|| code2
== NE
)
641 if (code2
== GEU
|| code2
== NE
)
646 if (code2
== NE
|| code2
== UNEQ
|| code2
== UNLE
|| code2
== UNLT
647 || code2
== UNGE
|| code2
== UNGT
)
658 /* Return 1 if INSN is an unconditional jump and nothing else. */
661 simplejump_p (const_rtx insn
)
663 return (JUMP_P (insn
)
664 && GET_CODE (PATTERN (insn
)) == SET
665 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
666 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
669 /* Return nonzero if INSN is a (possibly) conditional jump
672 Use of this function is deprecated, since we need to support combined
673 branch and compare insns. Use any_condjump_p instead whenever possible. */
676 condjump_p (const_rtx insn
)
678 const_rtx x
= PATTERN (insn
);
680 if (GET_CODE (x
) != SET
681 || GET_CODE (SET_DEST (x
)) != PC
)
685 if (GET_CODE (x
) == LABEL_REF
)
688 return (GET_CODE (x
) == IF_THEN_ELSE
689 && ((GET_CODE (XEXP (x
, 2)) == PC
690 && (GET_CODE (XEXP (x
, 1)) == LABEL_REF
691 || GET_CODE (XEXP (x
, 1)) == RETURN
))
692 || (GET_CODE (XEXP (x
, 1)) == PC
693 && (GET_CODE (XEXP (x
, 2)) == LABEL_REF
694 || GET_CODE (XEXP (x
, 2)) == RETURN
))));
697 /* Return nonzero if INSN is a (possibly) conditional jump inside a
700 Use this function is deprecated, since we need to support combined
701 branch and compare insns. Use any_condjump_p instead whenever possible. */
704 condjump_in_parallel_p (const_rtx insn
)
706 const_rtx x
= PATTERN (insn
);
708 if (GET_CODE (x
) != PARALLEL
)
711 x
= XVECEXP (x
, 0, 0);
713 if (GET_CODE (x
) != SET
)
715 if (GET_CODE (SET_DEST (x
)) != PC
)
717 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
719 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
721 if (XEXP (SET_SRC (x
), 2) == pc_rtx
722 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
723 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
725 if (XEXP (SET_SRC (x
), 1) == pc_rtx
726 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
727 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
732 /* Return set of PC, otherwise NULL. */
735 pc_set (const_rtx insn
)
740 pat
= PATTERN (insn
);
742 /* The set is allowed to appear either as the insn pattern or
743 the first set in a PARALLEL. */
744 if (GET_CODE (pat
) == PARALLEL
)
745 pat
= XVECEXP (pat
, 0, 0);
746 if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == PC
)
752 /* Return true when insn is an unconditional direct jump,
753 possibly bundled inside a PARALLEL. */
756 any_uncondjump_p (const_rtx insn
)
758 const_rtx x
= pc_set (insn
);
761 if (GET_CODE (SET_SRC (x
)) != LABEL_REF
)
763 if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
768 /* Return true when insn is a conditional jump. This function works for
769 instructions containing PC sets in PARALLELs. The instruction may have
770 various other effects so before removing the jump you must verify
773 Note that unlike condjump_p it returns false for unconditional jumps. */
776 any_condjump_p (const_rtx insn
)
778 const_rtx x
= pc_set (insn
);
783 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
786 a
= GET_CODE (XEXP (SET_SRC (x
), 1));
787 b
= GET_CODE (XEXP (SET_SRC (x
), 2));
789 return ((b
== PC
&& (a
== LABEL_REF
|| a
== RETURN
))
790 || (a
== PC
&& (b
== LABEL_REF
|| b
== RETURN
)));
793 /* Return the label of a conditional jump. */
796 condjump_label (const_rtx insn
)
798 rtx x
= pc_set (insn
);
803 if (GET_CODE (x
) == LABEL_REF
)
805 if (GET_CODE (x
) != IF_THEN_ELSE
)
807 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
809 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
814 /* Return true if INSN is a (possibly conditional) return insn. */
817 returnjump_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
821 return x
&& (GET_CODE (x
) == RETURN
822 || (GET_CODE (x
) == SET
&& SET_IS_RETURN_P (x
)));
826 returnjump_p (rtx insn
)
830 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
833 /* Return true if INSN is a jump that only transfers control and
837 onlyjump_p (const_rtx insn
)
844 set
= single_set (insn
);
847 if (GET_CODE (SET_DEST (set
)) != PC
)
849 if (side_effects_p (SET_SRC (set
)))
857 /* Return nonzero if X is an RTX that only sets the condition codes
858 and has no side effects. */
861 only_sets_cc0_p (const_rtx x
)
869 return sets_cc0_p (x
) == 1 && ! side_effects_p (x
);
872 /* Return 1 if X is an RTX that does nothing but set the condition codes
873 and CLOBBER or USE registers.
874 Return -1 if X does explicitly set the condition codes,
875 but also does other things. */
878 sets_cc0_p (const_rtx x
)
886 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
888 if (GET_CODE (x
) == PARALLEL
)
892 int other_things
= 0;
893 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
895 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
896 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
898 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
901 return ! sets_cc0
? 0 : other_things
? -1 : 1;
907 /* Find all CODE_LABELs referred to in X, and increment their use counts.
908 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
909 in INSN, then store one of them in JUMP_LABEL (INSN).
910 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
911 referenced in INSN, add a REG_LABEL note containing that label to INSN.
912 Also, when there are consecutive labels, canonicalize on the last of them.
914 Note that two labels separated by a loop-beginning note
915 must be kept distinct if we have not yet done loop-optimization,
916 because the gap between them is where loop-optimize
917 will want to move invariant code to. CROSS_JUMP tells us
918 that loop-optimization is done with. */
921 mark_jump_label (rtx x
, rtx insn
, int in_mem
)
923 RTX_CODE code
= GET_CODE (x
);
943 for (i
= 0; i
< XVECLEN (x
, 0); i
++)
944 mark_jump_label (PATTERN (XVECEXP (x
, 0, i
)),
945 XVECEXP (x
, 0, i
), 0);
952 /* If this is a constant-pool reference, see if it is a label. */
953 if (CONSTANT_POOL_ADDRESS_P (x
))
954 mark_jump_label (get_pool_constant (x
), insn
, in_mem
);
959 rtx label
= XEXP (x
, 0);
961 /* Ignore remaining references to unreachable labels that
962 have been deleted. */
964 && NOTE_KIND (label
) == NOTE_INSN_DELETED_LABEL
)
967 gcc_assert (LABEL_P (label
));
969 /* Ignore references to labels of containing functions. */
970 if (LABEL_REF_NONLOCAL_P (x
))
974 if (! insn
|| ! INSN_DELETED_P (insn
))
975 ++LABEL_NUSES (label
);
980 JUMP_LABEL (insn
) = label
;
983 /* Add a REG_LABEL note for LABEL unless there already
984 is one. All uses of a label, except for labels
985 that are the targets of jumps, must have a
987 if (! find_reg_note (insn
, REG_LABEL
, label
))
988 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, label
,
995 /* Do walk the labels in a vector, but not the first operand of an
996 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
999 if (! INSN_DELETED_P (insn
))
1001 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
1003 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
1004 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, in_mem
);
1012 fmt
= GET_RTX_FORMAT (code
);
1013 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1016 mark_jump_label (XEXP (x
, i
), insn
, in_mem
);
1017 else if (fmt
[i
] == 'E')
1020 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1021 mark_jump_label (XVECEXP (x
, i
, j
), insn
, in_mem
);
1027 /* Delete insn INSN from the chain of insns and update label ref counts
1028 and delete insns now unreachable.
1030 Returns the first insn after INSN that was not deleted.
1032 Usage of this instruction is deprecated. Use delete_insn instead and
1033 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1036 delete_related_insns (rtx insn
)
1038 int was_code_label
= (LABEL_P (insn
));
1040 rtx next
= NEXT_INSN (insn
), prev
= PREV_INSN (insn
);
1042 while (next
&& INSN_DELETED_P (next
))
1043 next
= NEXT_INSN (next
);
1045 /* This insn is already deleted => return first following nondeleted. */
1046 if (INSN_DELETED_P (insn
))
1051 /* If instruction is followed by a barrier,
1052 delete the barrier too. */
1054 if (next
!= 0 && BARRIER_P (next
))
1057 /* If deleting a jump, decrement the count of the label,
1058 and delete the label if it is now unused. */
1060 if (JUMP_P (insn
) && JUMP_LABEL (insn
))
1062 rtx lab
= JUMP_LABEL (insn
), lab_next
;
1064 if (LABEL_NUSES (lab
) == 0)
1066 /* This can delete NEXT or PREV,
1067 either directly if NEXT is JUMP_LABEL (INSN),
1068 or indirectly through more levels of jumps. */
1069 delete_related_insns (lab
);
1071 /* I feel a little doubtful about this loop,
1072 but I see no clean and sure alternative way
1073 to find the first insn after INSN that is not now deleted.
1074 I hope this works. */
1075 while (next
&& INSN_DELETED_P (next
))
1076 next
= NEXT_INSN (next
);
1079 else if (tablejump_p (insn
, NULL
, &lab_next
))
1081 /* If we're deleting the tablejump, delete the dispatch table.
1082 We may not be able to kill the label immediately preceding
1083 just yet, as it might be referenced in code leading up to
1085 delete_related_insns (lab_next
);
1089 /* Likewise if we're deleting a dispatch table. */
1092 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1093 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
1095 rtx pat
= PATTERN (insn
);
1096 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
1097 int len
= XVECLEN (pat
, diff_vec_p
);
1099 for (i
= 0; i
< len
; i
++)
1100 if (LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
1101 delete_related_insns (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
1102 while (next
&& INSN_DELETED_P (next
))
1103 next
= NEXT_INSN (next
);
1107 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1108 if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
1109 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1110 if (REG_NOTE_KIND (note
) == REG_LABEL
1111 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1112 && LABEL_P (XEXP (note
, 0)))
1113 if (LABEL_NUSES (XEXP (note
, 0)) == 0)
1114 delete_related_insns (XEXP (note
, 0));
1116 while (prev
&& (INSN_DELETED_P (prev
) || NOTE_P (prev
)))
1117 prev
= PREV_INSN (prev
);
1119 /* If INSN was a label and a dispatch table follows it,
1120 delete the dispatch table. The tablejump must have gone already.
1121 It isn't useful to fall through into a table. */
1124 && NEXT_INSN (insn
) != 0
1125 && JUMP_P (NEXT_INSN (insn
))
1126 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
1127 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
1128 next
= delete_related_insns (NEXT_INSN (insn
));
1130 /* If INSN was a label, delete insns following it if now unreachable. */
1132 if (was_code_label
&& prev
&& BARRIER_P (prev
))
1137 code
= GET_CODE (next
);
1139 next
= NEXT_INSN (next
);
1140 /* Keep going past other deleted labels to delete what follows. */
1141 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
1142 next
= NEXT_INSN (next
);
1143 else if (code
== BARRIER
|| INSN_P (next
))
1144 /* Note: if this deletes a jump, it can cause more
1145 deletion of unreachable code, after a different label.
1146 As long as the value from this recursive call is correct,
1147 this invocation functions correctly. */
1148 next
= delete_related_insns (next
);
1157 /* Delete a range of insns from FROM to TO, inclusive.
1158 This is for the sake of peephole optimization, so assume
1159 that whatever these insns do will still be done by a new
1160 peephole insn that will replace them. */
1163 delete_for_peephole (rtx from
, rtx to
)
1169 rtx next
= NEXT_INSN (insn
);
1170 rtx prev
= PREV_INSN (insn
);
1174 INSN_DELETED_P (insn
) = 1;
1176 /* Patch this insn out of the chain. */
1177 /* We don't do this all at once, because we
1178 must preserve all NOTEs. */
1180 NEXT_INSN (prev
) = next
;
1183 PREV_INSN (next
) = prev
;
1191 /* Note that if TO is an unconditional jump
1192 we *do not* delete the BARRIER that follows,
1193 since the peephole that replaces this sequence
1194 is also an unconditional jump in that case. */
1197 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1198 NLABEL as a return. Accrue modifications into the change group. */
1201 redirect_exp_1 (rtx
*loc
, rtx olabel
, rtx nlabel
, rtx insn
)
1204 RTX_CODE code
= GET_CODE (x
);
1208 if (code
== LABEL_REF
)
1210 if (XEXP (x
, 0) == olabel
)
1214 n
= gen_rtx_LABEL_REF (Pmode
, nlabel
);
1216 n
= gen_rtx_RETURN (VOIDmode
);
1218 validate_change (insn
, loc
, n
, 1);
1222 else if (code
== RETURN
&& olabel
== 0)
1225 x
= gen_rtx_LABEL_REF (Pmode
, nlabel
);
1227 x
= gen_rtx_RETURN (VOIDmode
);
1228 if (loc
== &PATTERN (insn
))
1229 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
1230 validate_change (insn
, loc
, x
, 1);
1234 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
1235 && GET_CODE (SET_SRC (x
)) == LABEL_REF
1236 && XEXP (SET_SRC (x
), 0) == olabel
)
1238 validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 1);
1242 fmt
= GET_RTX_FORMAT (code
);
1243 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1246 redirect_exp_1 (&XEXP (x
, i
), olabel
, nlabel
, insn
);
1247 else if (fmt
[i
] == 'E')
1250 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1251 redirect_exp_1 (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
);
1256 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1257 the modifications into the change group. Return false if we did
1258 not see how to do that. */
1261 redirect_jump_1 (rtx jump
, rtx nlabel
)
1263 int ochanges
= num_validated_changes ();
1266 if (GET_CODE (PATTERN (jump
)) == PARALLEL
)
1267 loc
= &XVECEXP (PATTERN (jump
), 0, 0);
1269 loc
= &PATTERN (jump
);
1271 redirect_exp_1 (loc
, JUMP_LABEL (jump
), nlabel
, jump
);
1272 return num_validated_changes () > ochanges
;
1275 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1276 jump target label is unused as a result, it and the code following
1279 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1282 The return value will be 1 if the change was made, 0 if it wasn't
1283 (this can only occur for NLABEL == 0). */
1286 redirect_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1288 rtx olabel
= JUMP_LABEL (jump
);
1290 if (nlabel
== olabel
)
1293 if (! redirect_jump_1 (jump
, nlabel
) || ! apply_change_group ())
1296 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 0);
1300 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1302 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1303 count has dropped to zero. */
1305 redirect_jump_2 (rtx jump
, rtx olabel
, rtx nlabel
, int delete_unused
,
1310 /* Negative DELETE_UNUSED used to be used to signalize behavior on
1311 moving FUNCTION_END note. Just sanity check that no user still worry
1313 gcc_assert (delete_unused
>= 0);
1314 JUMP_LABEL (jump
) = nlabel
;
1316 ++LABEL_NUSES (nlabel
);
1318 /* Update labels in any REG_EQUAL note. */
1319 if ((note
= find_reg_note (jump
, REG_EQUAL
, NULL_RTX
)) != NULL_RTX
)
1321 if (!nlabel
|| (invert
&& !invert_exp_1 (XEXP (note
, 0), jump
)))
1322 remove_note (jump
, note
);
1325 redirect_exp_1 (&XEXP (note
, 0), olabel
, nlabel
, jump
);
1326 confirm_change_group ();
1330 if (olabel
&& --LABEL_NUSES (olabel
) == 0 && delete_unused
> 0
1331 /* Undefined labels will remain outside the insn stream. */
1332 && INSN_UID (olabel
))
1333 delete_related_insns (olabel
);
1335 invert_br_probabilities (jump
);
1338 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1339 modifications into the change group. Return nonzero for success. */
1341 invert_exp_1 (rtx x
, rtx insn
)
1343 RTX_CODE code
= GET_CODE (x
);
1345 if (code
== IF_THEN_ELSE
)
1347 rtx comp
= XEXP (x
, 0);
1349 enum rtx_code reversed_code
;
1351 /* We can do this in two ways: The preferable way, which can only
1352 be done if this is not an integer comparison, is to reverse
1353 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1354 of the IF_THEN_ELSE. If we can't do either, fail. */
1356 reversed_code
= reversed_comparison_code (comp
, insn
);
1358 if (reversed_code
!= UNKNOWN
)
1360 validate_change (insn
, &XEXP (x
, 0),
1361 gen_rtx_fmt_ee (reversed_code
,
1362 GET_MODE (comp
), XEXP (comp
, 0),
1369 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
1370 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
1377 /* Invert the condition of the jump JUMP, and make it jump to label
1378 NLABEL instead of where it jumps now. Accrue changes into the
1379 change group. Return false if we didn't see how to perform the
1380 inversion and redirection. */
1383 invert_jump_1 (rtx jump
, rtx nlabel
)
1385 rtx x
= pc_set (jump
);
1389 ochanges
= num_validated_changes ();
1391 ok
= invert_exp_1 (SET_SRC (x
), jump
);
1394 if (num_validated_changes () == ochanges
)
1397 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1398 in Pmode, so checking this is not merely an optimization. */
1399 return nlabel
== JUMP_LABEL (jump
) || redirect_jump_1 (jump
, nlabel
);
1402 /* Invert the condition of the jump JUMP, and make it jump to label
1403 NLABEL instead of where it jumps now. Return true if successful. */
1406 invert_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1408 rtx olabel
= JUMP_LABEL (jump
);
1410 if (invert_jump_1 (jump
, nlabel
) && apply_change_group ())
1412 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 1);
1420 /* Like rtx_equal_p except that it considers two REGs as equal
1421 if they renumber to the same value and considers two commutative
1422 operations to be the same if the order of the operands has been
1426 rtx_renumbered_equal_p (const_rtx x
, const_rtx y
)
1429 const enum rtx_code code
= GET_CODE (x
);
1435 if ((code
== REG
|| (code
== SUBREG
&& REG_P (SUBREG_REG (x
))))
1436 && (REG_P (y
) || (GET_CODE (y
) == SUBREG
1437 && REG_P (SUBREG_REG (y
)))))
1439 int reg_x
= -1, reg_y
= -1;
1440 int byte_x
= 0, byte_y
= 0;
1442 if (GET_MODE (x
) != GET_MODE (y
))
1445 /* If we haven't done any renumbering, don't
1446 make any assumptions. */
1447 if (reg_renumber
== 0)
1448 return rtx_equal_p (x
, y
);
1452 reg_x
= REGNO (SUBREG_REG (x
));
1453 byte_x
= SUBREG_BYTE (x
);
1455 if (reg_renumber
[reg_x
] >= 0)
1457 reg_x
= subreg_regno_offset (reg_renumber
[reg_x
],
1458 GET_MODE (SUBREG_REG (x
)),
1467 if (reg_renumber
[reg_x
] >= 0)
1468 reg_x
= reg_renumber
[reg_x
];
1471 if (GET_CODE (y
) == SUBREG
)
1473 reg_y
= REGNO (SUBREG_REG (y
));
1474 byte_y
= SUBREG_BYTE (y
);
1476 if (reg_renumber
[reg_y
] >= 0)
1478 reg_y
= subreg_regno_offset (reg_renumber
[reg_y
],
1479 GET_MODE (SUBREG_REG (y
)),
1488 if (reg_renumber
[reg_y
] >= 0)
1489 reg_y
= reg_renumber
[reg_y
];
1492 return reg_x
>= 0 && reg_x
== reg_y
&& byte_x
== byte_y
;
1495 /* Now we have disposed of all the cases
1496 in which different rtx codes can match. */
1497 if (code
!= GET_CODE (y
))
1511 /* We can't assume nonlocal labels have their following insns yet. */
1512 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
1513 return XEXP (x
, 0) == XEXP (y
, 0);
1515 /* Two label-refs are equivalent if they point at labels
1516 in the same position in the instruction stream. */
1517 return (next_real_insn (XEXP (x
, 0))
1518 == next_real_insn (XEXP (y
, 0)));
1521 return XSTR (x
, 0) == XSTR (y
, 0);
1524 /* If we didn't match EQ equality above, they aren't the same. */
1531 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1533 if (GET_MODE (x
) != GET_MODE (y
))
1536 /* For commutative operations, the RTX match if the operand match in any
1537 order. Also handle the simple binary and unary cases without a loop. */
1538 if (targetm
.commutative_p (x
, UNKNOWN
))
1539 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1540 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
1541 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
1542 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
1543 else if (NON_COMMUTATIVE_P (x
))
1544 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1545 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
1546 else if (UNARY_P (x
))
1547 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
1549 /* Compare the elements. If any pair of corresponding elements
1550 fail to match, return 0 for the whole things. */
1552 fmt
= GET_RTX_FORMAT (code
);
1553 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1559 if (XWINT (x
, i
) != XWINT (y
, i
))
1564 if (XINT (x
, i
) != XINT (y
, i
))
1569 if (XTREE (x
, i
) != XTREE (y
, i
))
1574 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1579 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
1584 if (XEXP (x
, i
) != XEXP (y
, i
))
1591 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1593 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1594 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
1605 /* If X is a hard register or equivalent to one or a subregister of one,
1606 return the hard register number. If X is a pseudo register that was not
1607 assigned a hard register, return the pseudo register number. Otherwise,
1608 return -1. Any rtx is valid for X. */
1611 true_regnum (const_rtx x
)
1615 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
1616 return reg_renumber
[REGNO (x
)];
1619 if (GET_CODE (x
) == SUBREG
)
1621 int base
= true_regnum (SUBREG_REG (x
));
1623 && base
< FIRST_PSEUDO_REGISTER
1624 && subreg_offset_representable_p (REGNO (SUBREG_REG (x
)),
1625 GET_MODE (SUBREG_REG (x
)),
1626 SUBREG_BYTE (x
), GET_MODE (x
)))
1627 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
1628 GET_MODE (SUBREG_REG (x
)),
1629 SUBREG_BYTE (x
), GET_MODE (x
));
1634 /* Return regno of the register REG and handle subregs too. */
1636 reg_or_subregno (const_rtx reg
)
1638 if (GET_CODE (reg
) == SUBREG
)
1639 reg
= SUBREG_REG (reg
);
1640 gcc_assert (REG_P (reg
));