1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
34 The subroutines redirect_jump and invert_jump are used
35 from other passes as well. */
39 #include "coretypes.h"
44 #include "hard-reg-set.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
53 #include "diagnostic.h"
59 /* Optimize jump y; x: ... y: jumpif... x?
60 Don't know if it is worth bothering with. */
61 /* Optimize two cases of conditional jump to conditional jump?
62 This can never delete any instruction or make anything dead,
63 or even change what is live at any point.
64 So perhaps let combiner do it. */
66 static void init_label_info (rtx
);
67 static void mark_all_labels (rtx
);
68 static void delete_computation (rtx
);
69 static void redirect_exp_1 (rtx
*, rtx
, rtx
, rtx
);
70 static int redirect_exp (rtx
, rtx
, rtx
);
71 static void invert_exp_1 (rtx
);
72 static int invert_exp (rtx
);
73 static int returnjump_p_1 (rtx
*, void *);
74 static void delete_prior_computation (rtx
, rtx
);
76 /* Alternate entry into the jump optimizer. This entry point only rebuilds
77 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
80 rebuild_jump_labels (rtx f
)
84 timevar_push (TV_REBUILD_JUMP
);
88 /* Keep track of labels used from static data; we don't track them
89 closely enough to delete them here, so make sure their reference
90 count doesn't drop to zero. */
92 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
93 if (LABEL_P (XEXP (insn
, 0)))
94 LABEL_NUSES (XEXP (insn
, 0))++;
95 timevar_pop (TV_REBUILD_JUMP
);
98 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
99 non-fallthru insn. This is not generally true, as multiple barriers
100 may have crept in, or the BARRIER may be separated from the last
101 real insn by one or more NOTEs.
103 This simple pass moves barriers and removes duplicates so that the
107 cleanup_barriers (void)
109 rtx insn
, next
, prev
;
110 for (insn
= get_insns (); insn
; insn
= next
)
112 next
= NEXT_INSN (insn
);
113 if (BARRIER_P (insn
))
115 prev
= prev_nonnote_insn (insn
);
116 if (BARRIER_P (prev
))
118 else if (prev
!= PREV_INSN (insn
))
119 reorder_insns (insn
, insn
, prev
);
125 purge_line_number_notes (rtx f
)
129 /* Delete extraneous line number notes.
130 Note that two consecutive notes for different lines are not really
131 extraneous. There should be some indication where that line belonged,
132 even if it became empty. */
134 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
137 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
)
138 /* Any previous line note was for the prologue; gdb wants a new
139 note after the prologue even if it is for the same line. */
140 last_note
= NULL_RTX
;
141 else if (NOTE_LINE_NUMBER (insn
) >= 0)
143 /* Delete this note if it is identical to previous note. */
145 #ifdef USE_MAPPED_LOCATION
146 && NOTE_SOURCE_LOCATION (insn
) == NOTE_SOURCE_LOCATION (last_note
)
148 && NOTE_SOURCE_FILE (insn
) == NOTE_SOURCE_FILE (last_note
)
149 && NOTE_LINE_NUMBER (insn
) == NOTE_LINE_NUMBER (last_note
)
153 delete_related_insns (insn
);
162 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
163 notes whose labels don't occur in the insn any more. Returns the
164 largest INSN_UID found. */
166 init_label_info (rtx f
)
170 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
172 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
173 else if (JUMP_P (insn
))
174 JUMP_LABEL (insn
) = 0;
175 else if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
179 for (note
= REG_NOTES (insn
); note
; note
= next
)
181 next
= XEXP (note
, 1);
182 if (REG_NOTE_KIND (note
) == REG_LABEL
183 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
184 remove_note (insn
, note
);
189 /* Mark the label each jump jumps to.
190 Combine consecutive labels, and count uses of labels. */
193 mark_all_labels (rtx f
)
197 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
200 mark_jump_label (PATTERN (insn
), insn
, 0);
201 if (! INSN_DELETED_P (insn
) && JUMP_P (insn
))
203 /* When we know the LABEL_REF contained in a REG used in
204 an indirect jump, we'll have a REG_LABEL note so that
205 flow can tell where it's going. */
206 if (JUMP_LABEL (insn
) == 0)
208 rtx label_note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
211 /* But a LABEL_REF around the REG_LABEL note, so
212 that we can canonicalize it. */
213 rtx label_ref
= gen_rtx_LABEL_REF (VOIDmode
,
214 XEXP (label_note
, 0));
216 mark_jump_label (label_ref
, insn
, 0);
217 XEXP (label_note
, 0) = XEXP (label_ref
, 0);
218 JUMP_LABEL (insn
) = XEXP (label_note
, 0);
225 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
226 notes between START and END out before START. START and END may be such
227 notes. Returns the values of the new starting and ending insns, which
228 may be different if the original ones were such notes.
229 Return true if there were only such notes and no real instructions. */
232 squeeze_notes (rtx
* startp
, rtx
* endp
)
240 rtx past_end
= NEXT_INSN (end
);
242 for (insn
= start
; insn
!= past_end
; insn
= next
)
244 next
= NEXT_INSN (insn
);
246 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
247 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
248 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
249 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
))
251 /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass. */
252 gcc_assert (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_BEG
253 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_END
);
259 rtx prev
= PREV_INSN (insn
);
260 PREV_INSN (insn
) = PREV_INSN (start
);
261 NEXT_INSN (insn
) = start
;
262 NEXT_INSN (PREV_INSN (insn
)) = insn
;
263 PREV_INSN (NEXT_INSN (insn
)) = insn
;
264 NEXT_INSN (prev
) = next
;
265 PREV_INSN (next
) = prev
;
272 /* There were no real instructions. */
273 if (start
== past_end
)
283 /* Return the label before INSN, or put a new label there. */
286 get_label_before (rtx insn
)
290 /* Find an existing label at this point
291 or make a new one if there is none. */
292 label
= prev_nonnote_insn (insn
);
294 if (label
== 0 || !LABEL_P (label
))
296 rtx prev
= PREV_INSN (insn
);
298 label
= gen_label_rtx ();
299 emit_label_after (label
, prev
);
300 LABEL_NUSES (label
) = 0;
305 /* Return the label after INSN, or put a new label there. */
308 get_label_after (rtx insn
)
312 /* Find an existing label at this point
313 or make a new one if there is none. */
314 label
= next_nonnote_insn (insn
);
316 if (label
== 0 || !LABEL_P (label
))
318 label
= gen_label_rtx ();
319 emit_label_after (label
, insn
);
320 LABEL_NUSES (label
) = 0;
325 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
326 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
327 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
328 know whether it's source is floating point or integer comparison. Machine
329 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
330 to help this function avoid overhead in these cases. */
332 reversed_comparison_code_parts (enum rtx_code code
, rtx arg0
, rtx arg1
, rtx insn
)
334 enum machine_mode mode
;
336 /* If this is not actually a comparison, we can't reverse it. */
337 if (GET_RTX_CLASS (code
) != RTX_COMPARE
338 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
341 mode
= GET_MODE (arg0
);
342 if (mode
== VOIDmode
)
343 mode
= GET_MODE (arg1
);
345 /* First see if machine description supplies us way to reverse the
346 comparison. Give it priority over everything else to allow
347 machine description to do tricks. */
348 if (GET_MODE_CLASS (mode
) == MODE_CC
349 && REVERSIBLE_CC_MODE (mode
))
351 #ifdef REVERSE_CONDITION
352 return REVERSE_CONDITION (code
, mode
);
354 return reverse_condition (code
);
357 /* Try a few special cases based on the comparison code. */
366 /* It is always safe to reverse EQ and NE, even for the floating
367 point. Similarly the unsigned comparisons are never used for
368 floating point so we can reverse them in the default way. */
369 return reverse_condition (code
);
374 /* In case we already see unordered comparison, we can be sure to
375 be dealing with floating point so we don't need any more tests. */
376 return reverse_condition_maybe_unordered (code
);
381 /* We don't have safe way to reverse these yet. */
387 if (GET_MODE_CLASS (mode
) == MODE_CC
|| CC0_P (arg0
))
390 /* Try to search for the comparison to determine the real mode.
391 This code is expensive, but with sane machine description it
392 will be never used, since REVERSIBLE_CC_MODE will return true
397 for (prev
= prev_nonnote_insn (insn
);
398 prev
!= 0 && !LABEL_P (prev
);
399 prev
= prev_nonnote_insn (prev
))
401 rtx set
= set_of (arg0
, prev
);
402 if (set
&& GET_CODE (set
) == SET
403 && rtx_equal_p (SET_DEST (set
), arg0
))
405 rtx src
= SET_SRC (set
);
407 if (GET_CODE (src
) == COMPARE
)
409 rtx comparison
= src
;
410 arg0
= XEXP (src
, 0);
411 mode
= GET_MODE (arg0
);
412 if (mode
== VOIDmode
)
413 mode
= GET_MODE (XEXP (comparison
, 1));
416 /* We can get past reg-reg moves. This may be useful for model
417 of i387 comparisons that first move flag registers around. */
424 /* If register is clobbered in some ununderstandable way,
431 /* Test for an integer condition, or a floating-point comparison
432 in which NaNs can be ignored. */
433 if (GET_CODE (arg0
) == CONST_INT
434 || (GET_MODE (arg0
) != VOIDmode
435 && GET_MODE_CLASS (mode
) != MODE_CC
436 && !HONOR_NANS (mode
)))
437 return reverse_condition (code
);
442 /* A wrapper around the previous function to take COMPARISON as rtx
443 expression. This simplifies many callers. */
445 reversed_comparison_code (rtx comparison
, rtx insn
)
447 if (!COMPARISON_P (comparison
))
449 return reversed_comparison_code_parts (GET_CODE (comparison
),
450 XEXP (comparison
, 0),
451 XEXP (comparison
, 1), insn
);
454 /* Given an rtx-code for a comparison, return the code for the negated
455 comparison. If no such code exists, return UNKNOWN.
457 WATCH OUT! reverse_condition is not safe to use on a jump that might
458 be acting on the results of an IEEE floating point comparison, because
459 of the special treatment of non-signaling nans in comparisons.
460 Use reversed_comparison_code instead. */
463 reverse_condition (enum rtx_code code
)
505 /* Similar, but we're allowed to generate unordered comparisons, which
506 makes it safe for IEEE floating-point. Of course, we have to recognize
507 that the target will support them too... */
510 reverse_condition_maybe_unordered (enum rtx_code code
)
548 /* Similar, but return the code when two operands of a comparison are swapped.
549 This IS safe for IEEE floating-point. */
552 swap_condition (enum rtx_code code
)
594 /* Given a comparison CODE, return the corresponding unsigned comparison.
595 If CODE is an equality comparison or already an unsigned comparison,
599 unsigned_condition (enum rtx_code code
)
625 /* Similarly, return the signed version of a comparison. */
628 signed_condition (enum rtx_code code
)
654 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
655 truth of CODE1 implies the truth of CODE2. */
658 comparison_dominates_p (enum rtx_code code1
, enum rtx_code code2
)
660 /* UNKNOWN comparison codes can happen as a result of trying to revert
662 They can't match anything, so we have to reject them here. */
663 if (code1
== UNKNOWN
|| code2
== UNKNOWN
)
672 if (code2
== UNLE
|| code2
== UNGE
)
677 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
683 if (code2
== UNLE
|| code2
== NE
)
688 if (code2
== LE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
693 if (code2
== UNGE
|| code2
== NE
)
698 if (code2
== GE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
704 if (code2
== ORDERED
)
709 if (code2
== NE
|| code2
== ORDERED
)
714 if (code2
== LEU
|| code2
== NE
)
719 if (code2
== GEU
|| code2
== NE
)
724 if (code2
== NE
|| code2
== UNEQ
|| code2
== UNLE
|| code2
== UNLT
725 || code2
== UNGE
|| code2
== UNGT
)
736 /* Return 1 if INSN is an unconditional jump and nothing else. */
739 simplejump_p (rtx insn
)
741 return (JUMP_P (insn
)
742 && GET_CODE (PATTERN (insn
)) == SET
743 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
744 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
747 /* Return nonzero if INSN is a (possibly) conditional jump
750 Use of this function is deprecated, since we need to support combined
751 branch and compare insns. Use any_condjump_p instead whenever possible. */
754 condjump_p (rtx insn
)
756 rtx x
= PATTERN (insn
);
758 if (GET_CODE (x
) != SET
759 || GET_CODE (SET_DEST (x
)) != PC
)
763 if (GET_CODE (x
) == LABEL_REF
)
766 return (GET_CODE (x
) == IF_THEN_ELSE
767 && ((GET_CODE (XEXP (x
, 2)) == PC
768 && (GET_CODE (XEXP (x
, 1)) == LABEL_REF
769 || GET_CODE (XEXP (x
, 1)) == RETURN
))
770 || (GET_CODE (XEXP (x
, 1)) == PC
771 && (GET_CODE (XEXP (x
, 2)) == LABEL_REF
772 || GET_CODE (XEXP (x
, 2)) == RETURN
))));
775 /* Return nonzero if INSN is a (possibly) conditional jump inside a
778 Use this function is deprecated, since we need to support combined
779 branch and compare insns. Use any_condjump_p instead whenever possible. */
782 condjump_in_parallel_p (rtx insn
)
784 rtx x
= PATTERN (insn
);
786 if (GET_CODE (x
) != PARALLEL
)
789 x
= XVECEXP (x
, 0, 0);
791 if (GET_CODE (x
) != SET
)
793 if (GET_CODE (SET_DEST (x
)) != PC
)
795 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
797 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
799 if (XEXP (SET_SRC (x
), 2) == pc_rtx
800 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
801 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
803 if (XEXP (SET_SRC (x
), 1) == pc_rtx
804 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
805 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
810 /* Return set of PC, otherwise NULL. */
818 pat
= PATTERN (insn
);
820 /* The set is allowed to appear either as the insn pattern or
821 the first set in a PARALLEL. */
822 if (GET_CODE (pat
) == PARALLEL
)
823 pat
= XVECEXP (pat
, 0, 0);
824 if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == PC
)
830 /* Return true when insn is an unconditional direct jump,
831 possibly bundled inside a PARALLEL. */
834 any_uncondjump_p (rtx insn
)
836 rtx x
= pc_set (insn
);
839 if (GET_CODE (SET_SRC (x
)) != LABEL_REF
)
841 if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
846 /* Return true when insn is a conditional jump. This function works for
847 instructions containing PC sets in PARALLELs. The instruction may have
848 various other effects so before removing the jump you must verify
851 Note that unlike condjump_p it returns false for unconditional jumps. */
854 any_condjump_p (rtx insn
)
856 rtx x
= pc_set (insn
);
861 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
864 a
= GET_CODE (XEXP (SET_SRC (x
), 1));
865 b
= GET_CODE (XEXP (SET_SRC (x
), 2));
867 return ((b
== PC
&& (a
== LABEL_REF
|| a
== RETURN
))
868 || (a
== PC
&& (b
== LABEL_REF
|| b
== RETURN
)));
871 /* Return the label of a conditional jump. */
874 condjump_label (rtx insn
)
876 rtx x
= pc_set (insn
);
881 if (GET_CODE (x
) == LABEL_REF
)
883 if (GET_CODE (x
) != IF_THEN_ELSE
)
885 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
887 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
892 /* Return true if INSN is a (possibly conditional) return insn. */
895 returnjump_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
899 return x
&& (GET_CODE (x
) == RETURN
900 || (GET_CODE (x
) == SET
&& SET_IS_RETURN_P (x
)));
904 returnjump_p (rtx insn
)
908 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
911 /* Return true if INSN is a jump that only transfers control and
915 onlyjump_p (rtx insn
)
922 set
= single_set (insn
);
925 if (GET_CODE (SET_DEST (set
)) != PC
)
927 if (side_effects_p (SET_SRC (set
)))
935 /* Return nonzero if X is an RTX that only sets the condition codes
936 and has no side effects. */
939 only_sets_cc0_p (rtx x
)
947 return sets_cc0_p (x
) == 1 && ! side_effects_p (x
);
950 /* Return 1 if X is an RTX that does nothing but set the condition codes
951 and CLOBBER or USE registers.
952 Return -1 if X does explicitly set the condition codes,
953 but also does other things. */
964 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
966 if (GET_CODE (x
) == PARALLEL
)
970 int other_things
= 0;
971 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
973 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
974 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
976 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
979 return ! sets_cc0
? 0 : other_things
? -1 : 1;
985 /* Follow any unconditional jump at LABEL;
986 return the ultimate label reached by any such chain of jumps.
987 Return null if the chain ultimately leads to a return instruction.
988 If LABEL is not followed by a jump, return LABEL.
989 If the chain loops or we can't find end, return LABEL,
990 since that tells caller to avoid changing the insn.
992 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
996 follow_jumps (rtx label
)
1005 && (insn
= next_active_insn (value
)) != 0
1007 && ((JUMP_LABEL (insn
) != 0 && any_uncondjump_p (insn
)
1008 && onlyjump_p (insn
))
1009 || GET_CODE (PATTERN (insn
)) == RETURN
)
1010 && (next
= NEXT_INSN (insn
))
1011 && BARRIER_P (next
));
1014 /* Don't chain through the insn that jumps into a loop
1015 from outside the loop,
1016 since that would create multiple loop entry jumps
1017 and prevent loop optimization. */
1019 if (!reload_completed
)
1020 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
1022 && (NOTE_LINE_NUMBER (tem
) == NOTE_INSN_LOOP_BEG
1023 /* ??? Optional. Disables some optimizations, but makes
1024 gcov output more accurate with -O. */
1025 || (flag_test_coverage
&& NOTE_LINE_NUMBER (tem
) > 0)))
1028 /* If we have found a cycle, make the insn jump to itself. */
1029 if (JUMP_LABEL (insn
) == label
)
1032 tem
= next_active_insn (JUMP_LABEL (insn
));
1033 if (tem
&& (GET_CODE (PATTERN (tem
)) == ADDR_VEC
1034 || GET_CODE (PATTERN (tem
)) == ADDR_DIFF_VEC
))
1037 value
= JUMP_LABEL (insn
);
1045 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1046 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1047 in INSN, then store one of them in JUMP_LABEL (INSN).
1048 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1049 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1050 Also, when there are consecutive labels, canonicalize on the last of them.
1052 Note that two labels separated by a loop-beginning note
1053 must be kept distinct if we have not yet done loop-optimization,
1054 because the gap between them is where loop-optimize
1055 will want to move invariant code to. CROSS_JUMP tells us
1056 that loop-optimization is done with. */
1059 mark_jump_label (rtx x
, rtx insn
, int in_mem
)
1061 RTX_CODE code
= GET_CODE (x
);
1084 /* If this is a constant-pool reference, see if it is a label. */
1085 if (CONSTANT_POOL_ADDRESS_P (x
))
1086 mark_jump_label (get_pool_constant (x
), insn
, in_mem
);
1091 rtx label
= XEXP (x
, 0);
1093 /* Ignore remaining references to unreachable labels that
1094 have been deleted. */
1096 && NOTE_LINE_NUMBER (label
) == NOTE_INSN_DELETED_LABEL
)
1099 if (!LABEL_P (label
))
1102 /* Ignore references to labels of containing functions. */
1103 if (LABEL_REF_NONLOCAL_P (x
))
1106 XEXP (x
, 0) = label
;
1107 if (! insn
|| ! INSN_DELETED_P (insn
))
1108 ++LABEL_NUSES (label
);
1113 JUMP_LABEL (insn
) = label
;
1116 /* Add a REG_LABEL note for LABEL unless there already
1117 is one. All uses of a label, except for labels
1118 that are the targets of jumps, must have a
1120 if (! find_reg_note (insn
, REG_LABEL
, label
))
1121 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, label
,
1128 /* Do walk the labels in a vector, but not the first operand of an
1129 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1132 if (! INSN_DELETED_P (insn
))
1134 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
1136 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
1137 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, in_mem
);
1145 fmt
= GET_RTX_FORMAT (code
);
1146 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1149 mark_jump_label (XEXP (x
, i
), insn
, in_mem
);
1150 else if (fmt
[i
] == 'E')
1153 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1154 mark_jump_label (XVECEXP (x
, i
, j
), insn
, in_mem
);
1159 /* If all INSN does is set the pc, delete it,
1160 and delete the insn that set the condition codes for it
1161 if that's what the previous thing was. */
1164 delete_jump (rtx insn
)
1166 rtx set
= single_set (insn
);
1168 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
1169 delete_computation (insn
);
1172 /* Recursively delete prior insns that compute the value (used only by INSN
1173 which the caller is deleting) stored in the register mentioned by NOTE
1174 which is a REG_DEAD note associated with INSN. */
1177 delete_prior_computation (rtx note
, rtx insn
)
1180 rtx reg
= XEXP (note
, 0);
1182 for (our_prev
= prev_nonnote_insn (insn
);
1183 our_prev
&& (NONJUMP_INSN_P (our_prev
)
1184 || CALL_P (our_prev
));
1185 our_prev
= prev_nonnote_insn (our_prev
))
1187 rtx pat
= PATTERN (our_prev
);
1189 /* If we reach a CALL which is not calling a const function
1190 or the callee pops the arguments, then give up. */
1191 if (CALL_P (our_prev
)
1192 && (! CONST_OR_PURE_CALL_P (our_prev
)
1193 || GET_CODE (pat
) != SET
|| GET_CODE (SET_SRC (pat
)) != CALL
))
1196 /* If we reach a SEQUENCE, it is too complex to try to
1197 do anything with it, so give up. We can be run during
1198 and after reorg, so SEQUENCE rtl can legitimately show
1200 if (GET_CODE (pat
) == SEQUENCE
)
1203 if (GET_CODE (pat
) == USE
1204 && NONJUMP_INSN_P (XEXP (pat
, 0)))
1205 /* reorg creates USEs that look like this. We leave them
1206 alone because reorg needs them for its own purposes. */
1209 if (reg_set_p (reg
, pat
))
1211 if (side_effects_p (pat
) && !CALL_P (our_prev
))
1214 if (GET_CODE (pat
) == PARALLEL
)
1216 /* If we find a SET of something else, we can't
1221 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1223 rtx part
= XVECEXP (pat
, 0, i
);
1225 if (GET_CODE (part
) == SET
1226 && SET_DEST (part
) != reg
)
1230 if (i
== XVECLEN (pat
, 0))
1231 delete_computation (our_prev
);
1233 else if (GET_CODE (pat
) == SET
1234 && REG_P (SET_DEST (pat
)))
1236 int dest_regno
= REGNO (SET_DEST (pat
));
1239 + (dest_regno
< FIRST_PSEUDO_REGISTER
1240 ? hard_regno_nregs
[dest_regno
]
1241 [GET_MODE (SET_DEST (pat
))] : 1));
1242 int regno
= REGNO (reg
);
1245 + (regno
< FIRST_PSEUDO_REGISTER
1246 ? hard_regno_nregs
[regno
][GET_MODE (reg
)] : 1));
1248 if (dest_regno
>= regno
1249 && dest_endregno
<= endregno
)
1250 delete_computation (our_prev
);
1252 /* We may have a multi-word hard register and some, but not
1253 all, of the words of the register are needed in subsequent
1254 insns. Write REG_UNUSED notes for those parts that were not
1256 else if (dest_regno
<= regno
1257 && dest_endregno
>= endregno
)
1261 REG_NOTES (our_prev
)
1262 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
,
1263 REG_NOTES (our_prev
));
1265 for (i
= dest_regno
; i
< dest_endregno
; i
++)
1266 if (! find_regno_note (our_prev
, REG_UNUSED
, i
))
1269 if (i
== dest_endregno
)
1270 delete_computation (our_prev
);
1277 /* If PAT references the register that dies here, it is an
1278 additional use. Hence any prior SET isn't dead. However, this
1279 insn becomes the new place for the REG_DEAD note. */
1280 if (reg_overlap_mentioned_p (reg
, pat
))
1282 XEXP (note
, 1) = REG_NOTES (our_prev
);
1283 REG_NOTES (our_prev
) = note
;
1289 /* Delete INSN and recursively delete insns that compute values used only
1290 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1291 If we are running before flow.c, we need do nothing since flow.c will
1292 delete dead code. We also can't know if the registers being used are
1293 dead or not at this point.
1295 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1296 nothing other than set a register that dies in this insn, we can delete
1299 On machines with CC0, if CC0 is used in this insn, we may be able to
1300 delete the insn that set it. */
1303 delete_computation (rtx insn
)
1308 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
1310 rtx prev
= prev_nonnote_insn (insn
);
1311 /* We assume that at this stage
1312 CC's are always set explicitly
1313 and always immediately before the jump that
1314 will use them. So if the previous insn
1315 exists to set the CC's, delete it
1316 (unless it performs auto-increments, etc.). */
1317 if (prev
&& NONJUMP_INSN_P (prev
)
1318 && sets_cc0_p (PATTERN (prev
)))
1320 if (sets_cc0_p (PATTERN (prev
)) > 0
1321 && ! side_effects_p (PATTERN (prev
)))
1322 delete_computation (prev
);
1324 /* Otherwise, show that cc0 won't be used. */
1325 REG_NOTES (prev
) = gen_rtx_EXPR_LIST (REG_UNUSED
,
1326 cc0_rtx
, REG_NOTES (prev
));
1331 for (note
= REG_NOTES (insn
); note
; note
= next
)
1333 next
= XEXP (note
, 1);
1335 if (REG_NOTE_KIND (note
) != REG_DEAD
1336 /* Verify that the REG_NOTE is legitimate. */
1337 || !REG_P (XEXP (note
, 0)))
1340 delete_prior_computation (note
, insn
);
1343 delete_related_insns (insn
);
1346 /* Delete insn INSN from the chain of insns and update label ref counts
1347 and delete insns now unreachable.
1349 Returns the first insn after INSN that was not deleted.
1351 Usage of this instruction is deprecated. Use delete_insn instead and
1352 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1355 delete_related_insns (rtx insn
)
1357 int was_code_label
= (LABEL_P (insn
));
1359 rtx next
= NEXT_INSN (insn
), prev
= PREV_INSN (insn
);
1361 while (next
&& INSN_DELETED_P (next
))
1362 next
= NEXT_INSN (next
);
1364 /* This insn is already deleted => return first following nondeleted. */
1365 if (INSN_DELETED_P (insn
))
1370 /* If instruction is followed by a barrier,
1371 delete the barrier too. */
1373 if (next
!= 0 && BARRIER_P (next
))
1376 /* If deleting a jump, decrement the count of the label,
1377 and delete the label if it is now unused. */
1379 if (JUMP_P (insn
) && JUMP_LABEL (insn
))
1381 rtx lab
= JUMP_LABEL (insn
), lab_next
;
1383 if (LABEL_NUSES (lab
) == 0)
1385 /* This can delete NEXT or PREV,
1386 either directly if NEXT is JUMP_LABEL (INSN),
1387 or indirectly through more levels of jumps. */
1388 delete_related_insns (lab
);
1390 /* I feel a little doubtful about this loop,
1391 but I see no clean and sure alternative way
1392 to find the first insn after INSN that is not now deleted.
1393 I hope this works. */
1394 while (next
&& INSN_DELETED_P (next
))
1395 next
= NEXT_INSN (next
);
1398 else if (tablejump_p (insn
, NULL
, &lab_next
))
1400 /* If we're deleting the tablejump, delete the dispatch table.
1401 We may not be able to kill the label immediately preceding
1402 just yet, as it might be referenced in code leading up to
1404 delete_related_insns (lab_next
);
1408 /* Likewise if we're deleting a dispatch table. */
1411 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1412 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
1414 rtx pat
= PATTERN (insn
);
1415 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
1416 int len
= XVECLEN (pat
, diff_vec_p
);
1418 for (i
= 0; i
< len
; i
++)
1419 if (LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
1420 delete_related_insns (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
1421 while (next
&& INSN_DELETED_P (next
))
1422 next
= NEXT_INSN (next
);
1426 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1427 if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
1428 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1429 if (REG_NOTE_KIND (note
) == REG_LABEL
1430 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1431 && LABEL_P (XEXP (note
, 0)))
1432 if (LABEL_NUSES (XEXP (note
, 0)) == 0)
1433 delete_related_insns (XEXP (note
, 0));
1435 while (prev
&& (INSN_DELETED_P (prev
) || NOTE_P (prev
)))
1436 prev
= PREV_INSN (prev
);
1438 /* If INSN was a label and a dispatch table follows it,
1439 delete the dispatch table. The tablejump must have gone already.
1440 It isn't useful to fall through into a table. */
1443 && NEXT_INSN (insn
) != 0
1444 && JUMP_P (NEXT_INSN (insn
))
1445 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
1446 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
1447 next
= delete_related_insns (NEXT_INSN (insn
));
1449 /* If INSN was a label, delete insns following it if now unreachable. */
1451 if (was_code_label
&& prev
&& BARRIER_P (prev
))
1456 code
= GET_CODE (next
);
1458 && NOTE_LINE_NUMBER (next
) != NOTE_INSN_FUNCTION_END
)
1459 next
= NEXT_INSN (next
);
1460 /* Keep going past other deleted labels to delete what follows. */
1461 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
1462 next
= NEXT_INSN (next
);
1463 else if (code
== BARRIER
|| INSN_P (next
))
1464 /* Note: if this deletes a jump, it can cause more
1465 deletion of unreachable code, after a different label.
1466 As long as the value from this recursive call is correct,
1467 this invocation functions correctly. */
1468 next
= delete_related_insns (next
);
1477 /* Delete a range of insns from FROM to TO, inclusive.
1478 This is for the sake of peephole optimization, so assume
1479 that whatever these insns do will still be done by a new
1480 peephole insn that will replace them. */
1483 delete_for_peephole (rtx from
, rtx to
)
1489 rtx next
= NEXT_INSN (insn
);
1490 rtx prev
= PREV_INSN (insn
);
1494 INSN_DELETED_P (insn
) = 1;
1496 /* Patch this insn out of the chain. */
1497 /* We don't do this all at once, because we
1498 must preserve all NOTEs. */
1500 NEXT_INSN (prev
) = next
;
1503 PREV_INSN (next
) = prev
;
1511 /* Note that if TO is an unconditional jump
1512 we *do not* delete the BARRIER that follows,
1513 since the peephole that replaces this sequence
1514 is also an unconditional jump in that case. */
1517 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1518 NLABEL as a return. Accrue modifications into the change group. */
1521 redirect_exp_1 (rtx
*loc
, rtx olabel
, rtx nlabel
, rtx insn
)
1524 RTX_CODE code
= GET_CODE (x
);
1528 if (code
== LABEL_REF
)
1530 if (XEXP (x
, 0) == olabel
)
1534 n
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
1536 n
= gen_rtx_RETURN (VOIDmode
);
1538 validate_change (insn
, loc
, n
, 1);
1542 else if (code
== RETURN
&& olabel
== 0)
1544 x
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
1545 if (loc
== &PATTERN (insn
))
1546 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
1547 validate_change (insn
, loc
, x
, 1);
1551 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
1552 && GET_CODE (SET_SRC (x
)) == LABEL_REF
1553 && XEXP (SET_SRC (x
), 0) == olabel
)
1555 validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 1);
1559 fmt
= GET_RTX_FORMAT (code
);
1560 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1563 redirect_exp_1 (&XEXP (x
, i
), olabel
, nlabel
, insn
);
1564 else if (fmt
[i
] == 'E')
1567 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1568 redirect_exp_1 (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
);
1573 /* Similar, but apply the change group and report success or failure. */
1576 redirect_exp (rtx olabel
, rtx nlabel
, rtx insn
)
1580 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
1581 loc
= &XVECEXP (PATTERN (insn
), 0, 0);
1583 loc
= &PATTERN (insn
);
1585 redirect_exp_1 (loc
, olabel
, nlabel
, insn
);
1586 if (num_validated_changes () == 0)
1589 return apply_change_group ();
1592 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1593 the modifications into the change group. Return false if we did
1594 not see how to do that. */
1597 redirect_jump_1 (rtx jump
, rtx nlabel
)
1599 int ochanges
= num_validated_changes ();
1602 if (GET_CODE (PATTERN (jump
)) == PARALLEL
)
1603 loc
= &XVECEXP (PATTERN (jump
), 0, 0);
1605 loc
= &PATTERN (jump
);
1607 redirect_exp_1 (loc
, JUMP_LABEL (jump
), nlabel
, jump
);
1608 return num_validated_changes () > ochanges
;
1611 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1612 jump target label is unused as a result, it and the code following
1615 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1618 The return value will be 1 if the change was made, 0 if it wasn't
1619 (this can only occur for NLABEL == 0). */
1622 redirect_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1624 rtx olabel
= JUMP_LABEL (jump
);
1627 if (nlabel
== olabel
)
1630 if (! redirect_exp (olabel
, nlabel
, jump
))
1633 JUMP_LABEL (jump
) = nlabel
;
1635 ++LABEL_NUSES (nlabel
);
1637 /* Update labels in any REG_EQUAL note. */
1638 if ((note
= find_reg_note (jump
, REG_EQUAL
, NULL_RTX
)) != NULL_RTX
)
1640 if (nlabel
&& olabel
)
1642 rtx dest
= XEXP (note
, 0);
1644 if (GET_CODE (dest
) == IF_THEN_ELSE
)
1646 if (GET_CODE (XEXP (dest
, 1)) == LABEL_REF
1647 && XEXP (XEXP (dest
, 1), 0) == olabel
)
1648 XEXP (XEXP (dest
, 1), 0) = nlabel
;
1649 if (GET_CODE (XEXP (dest
, 2)) == LABEL_REF
1650 && XEXP (XEXP (dest
, 2), 0) == olabel
)
1651 XEXP (XEXP (dest
, 2), 0) = nlabel
;
1654 remove_note (jump
, note
);
1657 remove_note (jump
, note
);
1660 /* If we're eliding the jump over exception cleanups at the end of a
1661 function, move the function end note so that -Wreturn-type works. */
1662 if (olabel
&& nlabel
1663 && NEXT_INSN (olabel
)
1664 && NOTE_P (NEXT_INSN (olabel
))
1665 && NOTE_LINE_NUMBER (NEXT_INSN (olabel
)) == NOTE_INSN_FUNCTION_END
)
1666 emit_note_after (NOTE_INSN_FUNCTION_END
, nlabel
);
1668 if (olabel
&& --LABEL_NUSES (olabel
) == 0 && delete_unused
1669 /* Undefined labels will remain outside the insn stream. */
1670 && INSN_UID (olabel
))
1671 delete_related_insns (olabel
);
1676 /* Invert the jump condition of rtx X contained in jump insn, INSN.
1677 Accrue the modifications into the change group. */
1680 invert_exp_1 (rtx insn
)
1683 rtx x
= pc_set (insn
);
1689 code
= GET_CODE (x
);
1691 if (code
== IF_THEN_ELSE
)
1693 rtx comp
= XEXP (x
, 0);
1695 enum rtx_code reversed_code
;
1697 /* We can do this in two ways: The preferable way, which can only
1698 be done if this is not an integer comparison, is to reverse
1699 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1700 of the IF_THEN_ELSE. If we can't do either, fail. */
1702 reversed_code
= reversed_comparison_code (comp
, insn
);
1704 if (reversed_code
!= UNKNOWN
)
1706 validate_change (insn
, &XEXP (x
, 0),
1707 gen_rtx_fmt_ee (reversed_code
,
1708 GET_MODE (comp
), XEXP (comp
, 0),
1715 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
1716 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
1722 /* Invert the jump condition of conditional jump insn, INSN.
1724 Return 1 if we can do so, 0 if we cannot find a way to do so that
1725 matches a pattern. */
1728 invert_exp (rtx insn
)
1730 invert_exp_1 (insn
);
1731 if (num_validated_changes () == 0)
1734 return apply_change_group ();
1737 /* Invert the condition of the jump JUMP, and make it jump to label
1738 NLABEL instead of where it jumps now. Accrue changes into the
1739 change group. Return false if we didn't see how to perform the
1740 inversion and redirection. */
1743 invert_jump_1 (rtx jump
, rtx nlabel
)
1747 ochanges
= num_validated_changes ();
1748 invert_exp_1 (jump
);
1749 if (num_validated_changes () == ochanges
)
1752 return redirect_jump_1 (jump
, nlabel
);
1755 /* Invert the condition of the jump JUMP, and make it jump to label
1756 NLABEL instead of where it jumps now. Return true if successful. */
1759 invert_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1761 /* We have to either invert the condition and change the label or
1762 do neither. Either operation could fail. We first try to invert
1763 the jump. If that succeeds, we try changing the label. If that fails,
1764 we invert the jump back to what it was. */
1766 if (! invert_exp (jump
))
1769 if (redirect_jump (jump
, nlabel
, delete_unused
))
1771 /* Remove REG_EQUAL note if we have one. */
1772 rtx note
= find_reg_note (jump
, REG_EQUAL
, NULL_RTX
);
1774 remove_note (jump
, note
);
1776 invert_br_probabilities (jump
);
1781 if (! invert_exp (jump
))
1782 /* This should just be putting it back the way it was. */
1789 /* Like rtx_equal_p except that it considers two REGs as equal
1790 if they renumber to the same value and considers two commutative
1791 operations to be the same if the order of the operands has been
1794 ??? Addition is not commutative on the PA due to the weird implicit
1795 space register selection rules for memory addresses. Therefore, we
1796 don't consider a + b == b + a.
1798 We could/should make this test a little tighter. Possibly only
1799 disabling it on the PA via some backend macro or only disabling this
1800 case when the PLUS is inside a MEM. */
1803 rtx_renumbered_equal_p (rtx x
, rtx y
)
1806 enum rtx_code code
= GET_CODE (x
);
1812 if ((code
== REG
|| (code
== SUBREG
&& REG_P (SUBREG_REG (x
))))
1813 && (REG_P (y
) || (GET_CODE (y
) == SUBREG
1814 && REG_P (SUBREG_REG (y
)))))
1816 int reg_x
= -1, reg_y
= -1;
1817 int byte_x
= 0, byte_y
= 0;
1819 if (GET_MODE (x
) != GET_MODE (y
))
1822 /* If we haven't done any renumbering, don't
1823 make any assumptions. */
1824 if (reg_renumber
== 0)
1825 return rtx_equal_p (x
, y
);
1829 reg_x
= REGNO (SUBREG_REG (x
));
1830 byte_x
= SUBREG_BYTE (x
);
1832 if (reg_renumber
[reg_x
] >= 0)
1834 reg_x
= subreg_regno_offset (reg_renumber
[reg_x
],
1835 GET_MODE (SUBREG_REG (x
)),
1844 if (reg_renumber
[reg_x
] >= 0)
1845 reg_x
= reg_renumber
[reg_x
];
1848 if (GET_CODE (y
) == SUBREG
)
1850 reg_y
= REGNO (SUBREG_REG (y
));
1851 byte_y
= SUBREG_BYTE (y
);
1853 if (reg_renumber
[reg_y
] >= 0)
1855 reg_y
= subreg_regno_offset (reg_renumber
[reg_y
],
1856 GET_MODE (SUBREG_REG (y
)),
1865 if (reg_renumber
[reg_y
] >= 0)
1866 reg_y
= reg_renumber
[reg_y
];
1869 return reg_x
>= 0 && reg_x
== reg_y
&& byte_x
== byte_y
;
1872 /* Now we have disposed of all the cases
1873 in which different rtx codes can match. */
1874 if (code
!= GET_CODE (y
))
1887 /* We can't assume nonlocal labels have their following insns yet. */
1888 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
1889 return XEXP (x
, 0) == XEXP (y
, 0);
1891 /* Two label-refs are equivalent if they point at labels
1892 in the same position in the instruction stream. */
1893 return (next_real_insn (XEXP (x
, 0))
1894 == next_real_insn (XEXP (y
, 0)));
1897 return XSTR (x
, 0) == XSTR (y
, 0);
1900 /* If we didn't match EQ equality above, they aren't the same. */
1907 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1909 if (GET_MODE (x
) != GET_MODE (y
))
1912 /* For commutative operations, the RTX match if the operand match in any
1913 order. Also handle the simple binary and unary cases without a loop.
1915 ??? Don't consider PLUS a commutative operator; see comments above. */
1916 if (COMMUTATIVE_P (x
) && code
!= PLUS
)
1917 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1918 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
1919 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
1920 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
1921 else if (NON_COMMUTATIVE_P (x
))
1922 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1923 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
1924 else if (UNARY_P (x
))
1925 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
1927 /* Compare the elements. If any pair of corresponding elements
1928 fail to match, return 0 for the whole things. */
1930 fmt
= GET_RTX_FORMAT (code
);
1931 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1937 if (XWINT (x
, i
) != XWINT (y
, i
))
1942 if (XINT (x
, i
) != XINT (y
, i
))
1947 if (XTREE (x
, i
) != XTREE (y
, i
))
1952 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1957 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
1962 if (XEXP (x
, i
) != XEXP (y
, i
))
1969 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1971 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1972 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
1983 /* If X is a hard register or equivalent to one or a subregister of one,
1984 return the hard register number. If X is a pseudo register that was not
1985 assigned a hard register, return the pseudo register number. Otherwise,
1986 return -1. Any rtx is valid for X. */
1993 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
1994 return reg_renumber
[REGNO (x
)];
1997 if (GET_CODE (x
) == SUBREG
)
1999 int base
= true_regnum (SUBREG_REG (x
));
2000 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
2001 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
2002 GET_MODE (SUBREG_REG (x
)),
2003 SUBREG_BYTE (x
), GET_MODE (x
));
2008 /* Return regno of the register REG and handle subregs too. */
2010 reg_or_subregno (rtx reg
)
2014 if (GET_CODE (reg
) == SUBREG
)
2015 return REGNO (SUBREG_REG (reg
));