1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
23 /* This is the pathetic reminder of old fame of the jump-optimization pass
24 of the compiler. Now it contains basically set of utility function to
27 Each CODE_LABEL has a count of the times it is used
28 stored in the LABEL_NUSES internal field, and each JUMP_INSN
29 has one label that it refers to stored in the
30 JUMP_LABEL internal field. With this we can detect labels that
31 become unused because of the deletion of all the jumps that
32 formerly used them. The JUMP_LABEL info is sometimes looked
35 The subroutines redirect_jump and invert_jump are used
36 from other passes as well. */
40 #include "coretypes.h"
45 #include "hard-reg-set.h"
47 #include "insn-config.h"
48 #include "insn-attr.h"
54 #include "diagnostic.h"
60 /* Optimize jump y; x: ... y: jumpif... x?
61 Don't know if it is worth bothering with. */
62 /* Optimize two cases of conditional jump to conditional jump?
63 This can never delete any instruction or make anything dead,
64 or even change what is live at any point.
65 So perhaps let combiner do it. */
67 static void init_label_info (rtx
);
68 static void mark_all_labels (rtx
);
69 static void delete_computation (rtx
);
70 static void redirect_exp_1 (rtx
*, rtx
, rtx
, rtx
);
71 static int invert_exp_1 (rtx
, rtx
);
72 static int returnjump_p_1 (rtx
*, void *);
73 static void delete_prior_computation (rtx
, rtx
);
75 /* Alternate entry into the jump optimizer. This entry point only rebuilds
76 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
79 rebuild_jump_labels (rtx f
)
83 timevar_push (TV_REBUILD_JUMP
);
87 /* Keep track of labels used from static data; we don't track them
88 closely enough to delete them here, so make sure their reference
89 count doesn't drop to zero. */
91 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
92 if (LABEL_P (XEXP (insn
, 0)))
93 LABEL_NUSES (XEXP (insn
, 0))++;
94 timevar_pop (TV_REBUILD_JUMP
);
97 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
98 non-fallthru insn. This is not generally true, as multiple barriers
99 may have crept in, or the BARRIER may be separated from the last
100 real insn by one or more NOTEs.
102 This simple pass moves barriers and removes duplicates so that the
106 cleanup_barriers (void)
108 rtx insn
, next
, prev
;
109 for (insn
= get_insns (); insn
; insn
= next
)
111 next
= NEXT_INSN (insn
);
112 if (BARRIER_P (insn
))
114 prev
= prev_nonnote_insn (insn
);
115 if (BARRIER_P (prev
))
117 else if (prev
!= PREV_INSN (insn
))
118 reorder_insns (insn
, insn
, prev
);
124 purge_line_number_notes (rtx f
)
128 /* Delete extraneous line number notes.
129 Note that two consecutive notes for different lines are not really
130 extraneous. There should be some indication where that line belonged,
131 even if it became empty. */
133 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
136 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
)
137 /* Any previous line note was for the prologue; gdb wants a new
138 note after the prologue even if it is for the same line. */
139 last_note
= NULL_RTX
;
140 else if (NOTE_LINE_NUMBER (insn
) >= 0)
142 /* Delete this note if it is identical to previous note. */
144 #ifdef USE_MAPPED_LOCATION
145 && NOTE_SOURCE_LOCATION (insn
) == NOTE_SOURCE_LOCATION (last_note
)
147 && NOTE_SOURCE_FILE (insn
) == NOTE_SOURCE_FILE (last_note
)
148 && NOTE_LINE_NUMBER (insn
) == NOTE_LINE_NUMBER (last_note
)
152 delete_related_insns (insn
);
161 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
162 notes whose labels don't occur in the insn any more. Returns the
163 largest INSN_UID found. */
165 init_label_info (rtx f
)
169 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
171 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
172 else if (JUMP_P (insn
))
173 JUMP_LABEL (insn
) = 0;
174 else if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
178 for (note
= REG_NOTES (insn
); note
; note
= next
)
180 next
= XEXP (note
, 1);
181 if (REG_NOTE_KIND (note
) == REG_LABEL
182 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
183 remove_note (insn
, note
);
188 /* Mark the label each jump jumps to.
189 Combine consecutive labels, and count uses of labels. */
192 mark_all_labels (rtx f
)
196 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
199 mark_jump_label (PATTERN (insn
), insn
, 0);
200 if (! INSN_DELETED_P (insn
) && JUMP_P (insn
))
202 /* When we know the LABEL_REF contained in a REG used in
203 an indirect jump, we'll have a REG_LABEL note so that
204 flow can tell where it's going. */
205 if (JUMP_LABEL (insn
) == 0)
207 rtx label_note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
210 /* But a LABEL_REF around the REG_LABEL note, so
211 that we can canonicalize it. */
212 rtx label_ref
= gen_rtx_LABEL_REF (VOIDmode
,
213 XEXP (label_note
, 0));
215 mark_jump_label (label_ref
, insn
, 0);
216 XEXP (label_note
, 0) = XEXP (label_ref
, 0);
217 JUMP_LABEL (insn
) = XEXP (label_note
, 0);
224 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
225 notes between START and END out before START. START and END may be such
226 notes. Returns the values of the new starting and ending insns, which
227 may be different if the original ones were such notes.
228 Return true if there were only such notes and no real instructions. */
231 squeeze_notes (rtx
* startp
, rtx
* endp
)
239 rtx past_end
= NEXT_INSN (end
);
241 for (insn
= start
; insn
!= past_end
; insn
= next
)
243 next
= NEXT_INSN (insn
);
245 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
246 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
247 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
248 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
))
250 /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass. */
251 gcc_assert (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_BEG
252 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_END
);
258 rtx prev
= PREV_INSN (insn
);
259 PREV_INSN (insn
) = PREV_INSN (start
);
260 NEXT_INSN (insn
) = start
;
261 NEXT_INSN (PREV_INSN (insn
)) = insn
;
262 PREV_INSN (NEXT_INSN (insn
)) = insn
;
263 NEXT_INSN (prev
) = next
;
264 PREV_INSN (next
) = prev
;
271 /* There were no real instructions. */
272 if (start
== past_end
)
282 /* Return the label before INSN, or put a new label there. */
285 get_label_before (rtx insn
)
289 /* Find an existing label at this point
290 or make a new one if there is none. */
291 label
= prev_nonnote_insn (insn
);
293 if (label
== 0 || !LABEL_P (label
))
295 rtx prev
= PREV_INSN (insn
);
297 label
= gen_label_rtx ();
298 emit_label_after (label
, prev
);
299 LABEL_NUSES (label
) = 0;
304 /* Return the label after INSN, or put a new label there. */
307 get_label_after (rtx insn
)
311 /* Find an existing label at this point
312 or make a new one if there is none. */
313 label
= next_nonnote_insn (insn
);
315 if (label
== 0 || !LABEL_P (label
))
317 label
= gen_label_rtx ();
318 emit_label_after (label
, insn
);
319 LABEL_NUSES (label
) = 0;
324 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
325 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
326 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
327 know whether it's source is floating point or integer comparison. Machine
328 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
329 to help this function avoid overhead in these cases. */
331 reversed_comparison_code_parts (enum rtx_code code
, rtx arg0
, rtx arg1
, rtx insn
)
333 enum machine_mode mode
;
335 /* If this is not actually a comparison, we can't reverse it. */
336 if (GET_RTX_CLASS (code
) != RTX_COMPARE
337 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
340 mode
= GET_MODE (arg0
);
341 if (mode
== VOIDmode
)
342 mode
= GET_MODE (arg1
);
344 /* First see if machine description supplies us way to reverse the
345 comparison. Give it priority over everything else to allow
346 machine description to do tricks. */
347 if (GET_MODE_CLASS (mode
) == MODE_CC
348 && REVERSIBLE_CC_MODE (mode
))
350 #ifdef REVERSE_CONDITION
351 return REVERSE_CONDITION (code
, mode
);
353 return reverse_condition (code
);
356 /* Try a few special cases based on the comparison code. */
365 /* It is always safe to reverse EQ and NE, even for the floating
366 point. Similarly the unsigned comparisons are never used for
367 floating point so we can reverse them in the default way. */
368 return reverse_condition (code
);
373 /* In case we already see unordered comparison, we can be sure to
374 be dealing with floating point so we don't need any more tests. */
375 return reverse_condition_maybe_unordered (code
);
380 /* We don't have safe way to reverse these yet. */
386 if (GET_MODE_CLASS (mode
) == MODE_CC
|| CC0_P (arg0
))
389 /* Try to search for the comparison to determine the real mode.
390 This code is expensive, but with sane machine description it
391 will be never used, since REVERSIBLE_CC_MODE will return true
396 for (prev
= prev_nonnote_insn (insn
);
397 prev
!= 0 && !LABEL_P (prev
);
398 prev
= prev_nonnote_insn (prev
))
400 rtx set
= set_of (arg0
, prev
);
401 if (set
&& GET_CODE (set
) == SET
402 && rtx_equal_p (SET_DEST (set
), arg0
))
404 rtx src
= SET_SRC (set
);
406 if (GET_CODE (src
) == COMPARE
)
408 rtx comparison
= src
;
409 arg0
= XEXP (src
, 0);
410 mode
= GET_MODE (arg0
);
411 if (mode
== VOIDmode
)
412 mode
= GET_MODE (XEXP (comparison
, 1));
415 /* We can get past reg-reg moves. This may be useful for model
416 of i387 comparisons that first move flag registers around. */
423 /* If register is clobbered in some ununderstandable way,
430 /* Test for an integer condition, or a floating-point comparison
431 in which NaNs can be ignored. */
432 if (GET_CODE (arg0
) == CONST_INT
433 || (GET_MODE (arg0
) != VOIDmode
434 && GET_MODE_CLASS (mode
) != MODE_CC
435 && !HONOR_NANS (mode
)))
436 return reverse_condition (code
);
441 /* A wrapper around the previous function to take COMPARISON as rtx
442 expression. This simplifies many callers. */
444 reversed_comparison_code (rtx comparison
, rtx insn
)
446 if (!COMPARISON_P (comparison
))
448 return reversed_comparison_code_parts (GET_CODE (comparison
),
449 XEXP (comparison
, 0),
450 XEXP (comparison
, 1), insn
);
453 /* Given an rtx-code for a comparison, return the code for the negated
454 comparison. If no such code exists, return UNKNOWN.
456 WATCH OUT! reverse_condition is not safe to use on a jump that might
457 be acting on the results of an IEEE floating point comparison, because
458 of the special treatment of non-signaling nans in comparisons.
459 Use reversed_comparison_code instead. */
462 reverse_condition (enum rtx_code code
)
504 /* Similar, but we're allowed to generate unordered comparisons, which
505 makes it safe for IEEE floating-point. Of course, we have to recognize
506 that the target will support them too... */
509 reverse_condition_maybe_unordered (enum rtx_code code
)
547 /* Similar, but return the code when two operands of a comparison are swapped.
548 This IS safe for IEEE floating-point. */
551 swap_condition (enum rtx_code code
)
593 /* Given a comparison CODE, return the corresponding unsigned comparison.
594 If CODE is an equality comparison or already an unsigned comparison,
598 unsigned_condition (enum rtx_code code
)
624 /* Similarly, return the signed version of a comparison. */
627 signed_condition (enum rtx_code code
)
653 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
654 truth of CODE1 implies the truth of CODE2. */
657 comparison_dominates_p (enum rtx_code code1
, enum rtx_code code2
)
659 /* UNKNOWN comparison codes can happen as a result of trying to revert
661 They can't match anything, so we have to reject them here. */
662 if (code1
== UNKNOWN
|| code2
== UNKNOWN
)
671 if (code2
== UNLE
|| code2
== UNGE
)
676 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
682 if (code2
== UNLE
|| code2
== NE
)
687 if (code2
== LE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
692 if (code2
== UNGE
|| code2
== NE
)
697 if (code2
== GE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
703 if (code2
== ORDERED
)
708 if (code2
== NE
|| code2
== ORDERED
)
713 if (code2
== LEU
|| code2
== NE
)
718 if (code2
== GEU
|| code2
== NE
)
723 if (code2
== NE
|| code2
== UNEQ
|| code2
== UNLE
|| code2
== UNLT
724 || code2
== UNGE
|| code2
== UNGT
)
735 /* Return 1 if INSN is an unconditional jump and nothing else. */
738 simplejump_p (rtx insn
)
740 return (JUMP_P (insn
)
741 && GET_CODE (PATTERN (insn
)) == SET
742 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
743 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
746 /* Return nonzero if INSN is a (possibly) conditional jump
749 Use of this function is deprecated, since we need to support combined
750 branch and compare insns. Use any_condjump_p instead whenever possible. */
753 condjump_p (rtx insn
)
755 rtx x
= PATTERN (insn
);
757 if (GET_CODE (x
) != SET
758 || GET_CODE (SET_DEST (x
)) != PC
)
762 if (GET_CODE (x
) == LABEL_REF
)
765 return (GET_CODE (x
) == IF_THEN_ELSE
766 && ((GET_CODE (XEXP (x
, 2)) == PC
767 && (GET_CODE (XEXP (x
, 1)) == LABEL_REF
768 || GET_CODE (XEXP (x
, 1)) == RETURN
))
769 || (GET_CODE (XEXP (x
, 1)) == PC
770 && (GET_CODE (XEXP (x
, 2)) == LABEL_REF
771 || GET_CODE (XEXP (x
, 2)) == RETURN
))));
774 /* Return nonzero if INSN is a (possibly) conditional jump inside a
777 Use this function is deprecated, since we need to support combined
778 branch and compare insns. Use any_condjump_p instead whenever possible. */
781 condjump_in_parallel_p (rtx insn
)
783 rtx x
= PATTERN (insn
);
785 if (GET_CODE (x
) != PARALLEL
)
788 x
= XVECEXP (x
, 0, 0);
790 if (GET_CODE (x
) != SET
)
792 if (GET_CODE (SET_DEST (x
)) != PC
)
794 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
796 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
798 if (XEXP (SET_SRC (x
), 2) == pc_rtx
799 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
800 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
802 if (XEXP (SET_SRC (x
), 1) == pc_rtx
803 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
804 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
809 /* Return set of PC, otherwise NULL. */
817 pat
= PATTERN (insn
);
819 /* The set is allowed to appear either as the insn pattern or
820 the first set in a PARALLEL. */
821 if (GET_CODE (pat
) == PARALLEL
)
822 pat
= XVECEXP (pat
, 0, 0);
823 if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == PC
)
829 /* Return true when insn is an unconditional direct jump,
830 possibly bundled inside a PARALLEL. */
833 any_uncondjump_p (rtx insn
)
835 rtx x
= pc_set (insn
);
838 if (GET_CODE (SET_SRC (x
)) != LABEL_REF
)
840 if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
845 /* Return true when insn is a conditional jump. This function works for
846 instructions containing PC sets in PARALLELs. The instruction may have
847 various other effects so before removing the jump you must verify
850 Note that unlike condjump_p it returns false for unconditional jumps. */
853 any_condjump_p (rtx insn
)
855 rtx x
= pc_set (insn
);
860 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
863 a
= GET_CODE (XEXP (SET_SRC (x
), 1));
864 b
= GET_CODE (XEXP (SET_SRC (x
), 2));
866 return ((b
== PC
&& (a
== LABEL_REF
|| a
== RETURN
))
867 || (a
== PC
&& (b
== LABEL_REF
|| b
== RETURN
)));
870 /* Return the label of a conditional jump. */
873 condjump_label (rtx insn
)
875 rtx x
= pc_set (insn
);
880 if (GET_CODE (x
) == LABEL_REF
)
882 if (GET_CODE (x
) != IF_THEN_ELSE
)
884 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
886 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
891 /* Return true if INSN is a (possibly conditional) return insn. */
894 returnjump_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
898 return x
&& (GET_CODE (x
) == RETURN
899 || (GET_CODE (x
) == SET
&& SET_IS_RETURN_P (x
)));
903 returnjump_p (rtx insn
)
907 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
910 /* Return true if INSN is a jump that only transfers control and
914 onlyjump_p (rtx insn
)
921 set
= single_set (insn
);
924 if (GET_CODE (SET_DEST (set
)) != PC
)
926 if (side_effects_p (SET_SRC (set
)))
934 /* Return nonzero if X is an RTX that only sets the condition codes
935 and has no side effects. */
938 only_sets_cc0_p (rtx x
)
946 return sets_cc0_p (x
) == 1 && ! side_effects_p (x
);
949 /* Return 1 if X is an RTX that does nothing but set the condition codes
950 and CLOBBER or USE registers.
951 Return -1 if X does explicitly set the condition codes,
952 but also does other things. */
963 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
965 if (GET_CODE (x
) == PARALLEL
)
969 int other_things
= 0;
970 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
972 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
973 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
975 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
978 return ! sets_cc0
? 0 : other_things
? -1 : 1;
984 /* Follow any unconditional jump at LABEL;
985 return the ultimate label reached by any such chain of jumps.
986 Return null if the chain ultimately leads to a return instruction.
987 If LABEL is not followed by a jump, return LABEL.
988 If the chain loops or we can't find end, return LABEL,
989 since that tells caller to avoid changing the insn.
991 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
995 follow_jumps (rtx label
)
1004 && (insn
= next_active_insn (value
)) != 0
1006 && ((JUMP_LABEL (insn
) != 0 && any_uncondjump_p (insn
)
1007 && onlyjump_p (insn
))
1008 || GET_CODE (PATTERN (insn
)) == RETURN
)
1009 && (next
= NEXT_INSN (insn
))
1010 && BARRIER_P (next
));
1013 /* Don't chain through the insn that jumps into a loop
1014 from outside the loop,
1015 since that would create multiple loop entry jumps
1016 and prevent loop optimization. */
1018 if (!reload_completed
)
1019 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
1021 && (NOTE_LINE_NUMBER (tem
) == NOTE_INSN_LOOP_BEG
1022 /* ??? Optional. Disables some optimizations, but makes
1023 gcov output more accurate with -O. */
1024 || (flag_test_coverage
&& NOTE_LINE_NUMBER (tem
) > 0)))
1027 /* If we have found a cycle, make the insn jump to itself. */
1028 if (JUMP_LABEL (insn
) == label
)
1031 tem
= next_active_insn (JUMP_LABEL (insn
));
1032 if (tem
&& (GET_CODE (PATTERN (tem
)) == ADDR_VEC
1033 || GET_CODE (PATTERN (tem
)) == ADDR_DIFF_VEC
))
1036 value
= JUMP_LABEL (insn
);
1044 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1045 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1046 in INSN, then store one of them in JUMP_LABEL (INSN).
1047 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1048 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1049 Also, when there are consecutive labels, canonicalize on the last of them.
1051 Note that two labels separated by a loop-beginning note
1052 must be kept distinct if we have not yet done loop-optimization,
1053 because the gap between them is where loop-optimize
1054 will want to move invariant code to. CROSS_JUMP tells us
1055 that loop-optimization is done with. */
1058 mark_jump_label (rtx x
, rtx insn
, int in_mem
)
1060 RTX_CODE code
= GET_CODE (x
);
1083 /* If this is a constant-pool reference, see if it is a label. */
1084 if (CONSTANT_POOL_ADDRESS_P (x
))
1085 mark_jump_label (get_pool_constant (x
), insn
, in_mem
);
1090 rtx label
= XEXP (x
, 0);
1092 /* Ignore remaining references to unreachable labels that
1093 have been deleted. */
1095 && NOTE_LINE_NUMBER (label
) == NOTE_INSN_DELETED_LABEL
)
1098 if (!LABEL_P (label
))
1101 /* Ignore references to labels of containing functions. */
1102 if (LABEL_REF_NONLOCAL_P (x
))
1105 XEXP (x
, 0) = label
;
1106 if (! insn
|| ! INSN_DELETED_P (insn
))
1107 ++LABEL_NUSES (label
);
1112 JUMP_LABEL (insn
) = label
;
1115 /* Add a REG_LABEL note for LABEL unless there already
1116 is one. All uses of a label, except for labels
1117 that are the targets of jumps, must have a
1119 if (! find_reg_note (insn
, REG_LABEL
, label
))
1120 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, label
,
1127 /* Do walk the labels in a vector, but not the first operand of an
1128 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1131 if (! INSN_DELETED_P (insn
))
1133 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
1135 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
1136 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, in_mem
);
1144 fmt
= GET_RTX_FORMAT (code
);
1145 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1148 mark_jump_label (XEXP (x
, i
), insn
, in_mem
);
1149 else if (fmt
[i
] == 'E')
1152 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1153 mark_jump_label (XVECEXP (x
, i
, j
), insn
, in_mem
);
1158 /* If all INSN does is set the pc, delete it,
1159 and delete the insn that set the condition codes for it
1160 if that's what the previous thing was. */
1163 delete_jump (rtx insn
)
1165 rtx set
= single_set (insn
);
1167 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
1168 delete_computation (insn
);
1171 /* Recursively delete prior insns that compute the value (used only by INSN
1172 which the caller is deleting) stored in the register mentioned by NOTE
1173 which is a REG_DEAD note associated with INSN. */
1176 delete_prior_computation (rtx note
, rtx insn
)
1179 rtx reg
= XEXP (note
, 0);
1181 for (our_prev
= prev_nonnote_insn (insn
);
1182 our_prev
&& (NONJUMP_INSN_P (our_prev
)
1183 || CALL_P (our_prev
));
1184 our_prev
= prev_nonnote_insn (our_prev
))
1186 rtx pat
= PATTERN (our_prev
);
1188 /* If we reach a CALL which is not calling a const function
1189 or the callee pops the arguments, then give up. */
1190 if (CALL_P (our_prev
)
1191 && (! CONST_OR_PURE_CALL_P (our_prev
)
1192 || GET_CODE (pat
) != SET
|| GET_CODE (SET_SRC (pat
)) != CALL
))
1195 /* If we reach a SEQUENCE, it is too complex to try to
1196 do anything with it, so give up. We can be run during
1197 and after reorg, so SEQUENCE rtl can legitimately show
1199 if (GET_CODE (pat
) == SEQUENCE
)
1202 if (GET_CODE (pat
) == USE
1203 && NONJUMP_INSN_P (XEXP (pat
, 0)))
1204 /* reorg creates USEs that look like this. We leave them
1205 alone because reorg needs them for its own purposes. */
1208 if (reg_set_p (reg
, pat
))
1210 if (side_effects_p (pat
) && !CALL_P (our_prev
))
1213 if (GET_CODE (pat
) == PARALLEL
)
1215 /* If we find a SET of something else, we can't
1220 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1222 rtx part
= XVECEXP (pat
, 0, i
);
1224 if (GET_CODE (part
) == SET
1225 && SET_DEST (part
) != reg
)
1229 if (i
== XVECLEN (pat
, 0))
1230 delete_computation (our_prev
);
1232 else if (GET_CODE (pat
) == SET
1233 && REG_P (SET_DEST (pat
)))
1235 int dest_regno
= REGNO (SET_DEST (pat
));
1238 + (dest_regno
< FIRST_PSEUDO_REGISTER
1239 ? hard_regno_nregs
[dest_regno
]
1240 [GET_MODE (SET_DEST (pat
))] : 1));
1241 int regno
= REGNO (reg
);
1244 + (regno
< FIRST_PSEUDO_REGISTER
1245 ? hard_regno_nregs
[regno
][GET_MODE (reg
)] : 1));
1247 if (dest_regno
>= regno
1248 && dest_endregno
<= endregno
)
1249 delete_computation (our_prev
);
1251 /* We may have a multi-word hard register and some, but not
1252 all, of the words of the register are needed in subsequent
1253 insns. Write REG_UNUSED notes for those parts that were not
1255 else if (dest_regno
<= regno
1256 && dest_endregno
>= endregno
)
1260 REG_NOTES (our_prev
)
1261 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
,
1262 REG_NOTES (our_prev
));
1264 for (i
= dest_regno
; i
< dest_endregno
; i
++)
1265 if (! find_regno_note (our_prev
, REG_UNUSED
, i
))
1268 if (i
== dest_endregno
)
1269 delete_computation (our_prev
);
1276 /* If PAT references the register that dies here, it is an
1277 additional use. Hence any prior SET isn't dead. However, this
1278 insn becomes the new place for the REG_DEAD note. */
1279 if (reg_overlap_mentioned_p (reg
, pat
))
1281 XEXP (note
, 1) = REG_NOTES (our_prev
);
1282 REG_NOTES (our_prev
) = note
;
1288 /* Delete INSN and recursively delete insns that compute values used only
1289 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1290 If we are running before flow.c, we need do nothing since flow.c will
1291 delete dead code. We also can't know if the registers being used are
1292 dead or not at this point.
1294 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1295 nothing other than set a register that dies in this insn, we can delete
1298 On machines with CC0, if CC0 is used in this insn, we may be able to
1299 delete the insn that set it. */
1302 delete_computation (rtx insn
)
1307 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
1309 rtx prev
= prev_nonnote_insn (insn
);
1310 /* We assume that at this stage
1311 CC's are always set explicitly
1312 and always immediately before the jump that
1313 will use them. So if the previous insn
1314 exists to set the CC's, delete it
1315 (unless it performs auto-increments, etc.). */
1316 if (prev
&& NONJUMP_INSN_P (prev
)
1317 && sets_cc0_p (PATTERN (prev
)))
1319 if (sets_cc0_p (PATTERN (prev
)) > 0
1320 && ! side_effects_p (PATTERN (prev
)))
1321 delete_computation (prev
);
1323 /* Otherwise, show that cc0 won't be used. */
1324 REG_NOTES (prev
) = gen_rtx_EXPR_LIST (REG_UNUSED
,
1325 cc0_rtx
, REG_NOTES (prev
));
1330 for (note
= REG_NOTES (insn
); note
; note
= next
)
1332 next
= XEXP (note
, 1);
1334 if (REG_NOTE_KIND (note
) != REG_DEAD
1335 /* Verify that the REG_NOTE is legitimate. */
1336 || !REG_P (XEXP (note
, 0)))
1339 delete_prior_computation (note
, insn
);
1342 delete_related_insns (insn
);
1345 /* Delete insn INSN from the chain of insns and update label ref counts
1346 and delete insns now unreachable.
1348 Returns the first insn after INSN that was not deleted.
1350 Usage of this instruction is deprecated. Use delete_insn instead and
1351 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1354 delete_related_insns (rtx insn
)
1356 int was_code_label
= (LABEL_P (insn
));
1358 rtx next
= NEXT_INSN (insn
), prev
= PREV_INSN (insn
);
1360 while (next
&& INSN_DELETED_P (next
))
1361 next
= NEXT_INSN (next
);
1363 /* This insn is already deleted => return first following nondeleted. */
1364 if (INSN_DELETED_P (insn
))
1369 /* If instruction is followed by a barrier,
1370 delete the barrier too. */
1372 if (next
!= 0 && BARRIER_P (next
))
1375 /* If deleting a jump, decrement the count of the label,
1376 and delete the label if it is now unused. */
1378 if (JUMP_P (insn
) && JUMP_LABEL (insn
))
1380 rtx lab
= JUMP_LABEL (insn
), lab_next
;
1382 if (LABEL_NUSES (lab
) == 0)
1384 /* This can delete NEXT or PREV,
1385 either directly if NEXT is JUMP_LABEL (INSN),
1386 or indirectly through more levels of jumps. */
1387 delete_related_insns (lab
);
1389 /* I feel a little doubtful about this loop,
1390 but I see no clean and sure alternative way
1391 to find the first insn after INSN that is not now deleted.
1392 I hope this works. */
1393 while (next
&& INSN_DELETED_P (next
))
1394 next
= NEXT_INSN (next
);
1397 else if (tablejump_p (insn
, NULL
, &lab_next
))
1399 /* If we're deleting the tablejump, delete the dispatch table.
1400 We may not be able to kill the label immediately preceding
1401 just yet, as it might be referenced in code leading up to
1403 delete_related_insns (lab_next
);
1407 /* Likewise if we're deleting a dispatch table. */
1410 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1411 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
1413 rtx pat
= PATTERN (insn
);
1414 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
1415 int len
= XVECLEN (pat
, diff_vec_p
);
1417 for (i
= 0; i
< len
; i
++)
1418 if (LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
1419 delete_related_insns (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
1420 while (next
&& INSN_DELETED_P (next
))
1421 next
= NEXT_INSN (next
);
1425 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1426 if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
1427 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1428 if (REG_NOTE_KIND (note
) == REG_LABEL
1429 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1430 && LABEL_P (XEXP (note
, 0)))
1431 if (LABEL_NUSES (XEXP (note
, 0)) == 0)
1432 delete_related_insns (XEXP (note
, 0));
1434 while (prev
&& (INSN_DELETED_P (prev
) || NOTE_P (prev
)))
1435 prev
= PREV_INSN (prev
);
1437 /* If INSN was a label and a dispatch table follows it,
1438 delete the dispatch table. The tablejump must have gone already.
1439 It isn't useful to fall through into a table. */
1442 && NEXT_INSN (insn
) != 0
1443 && JUMP_P (NEXT_INSN (insn
))
1444 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
1445 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
1446 next
= delete_related_insns (NEXT_INSN (insn
));
1448 /* If INSN was a label, delete insns following it if now unreachable. */
1450 if (was_code_label
&& prev
&& BARRIER_P (prev
))
1455 code
= GET_CODE (next
);
1457 && NOTE_LINE_NUMBER (next
) != NOTE_INSN_FUNCTION_END
)
1458 next
= NEXT_INSN (next
);
1459 /* Keep going past other deleted labels to delete what follows. */
1460 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
1461 next
= NEXT_INSN (next
);
1462 else if (code
== BARRIER
|| INSN_P (next
))
1463 /* Note: if this deletes a jump, it can cause more
1464 deletion of unreachable code, after a different label.
1465 As long as the value from this recursive call is correct,
1466 this invocation functions correctly. */
1467 next
= delete_related_insns (next
);
1476 /* Delete a range of insns from FROM to TO, inclusive.
1477 This is for the sake of peephole optimization, so assume
1478 that whatever these insns do will still be done by a new
1479 peephole insn that will replace them. */
1482 delete_for_peephole (rtx from
, rtx to
)
1488 rtx next
= NEXT_INSN (insn
);
1489 rtx prev
= PREV_INSN (insn
);
1493 INSN_DELETED_P (insn
) = 1;
1495 /* Patch this insn out of the chain. */
1496 /* We don't do this all at once, because we
1497 must preserve all NOTEs. */
1499 NEXT_INSN (prev
) = next
;
1502 PREV_INSN (next
) = prev
;
1510 /* Note that if TO is an unconditional jump
1511 we *do not* delete the BARRIER that follows,
1512 since the peephole that replaces this sequence
1513 is also an unconditional jump in that case. */
1516 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1517 NLABEL as a return. Accrue modifications into the change group. */
1520 redirect_exp_1 (rtx
*loc
, rtx olabel
, rtx nlabel
, rtx insn
)
1523 RTX_CODE code
= GET_CODE (x
);
1527 if (code
== LABEL_REF
)
1529 if (XEXP (x
, 0) == olabel
)
1533 n
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
1535 n
= gen_rtx_RETURN (VOIDmode
);
1537 validate_change (insn
, loc
, n
, 1);
1541 else if (code
== RETURN
&& olabel
== 0)
1544 x
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
1546 x
= gen_rtx_RETURN (VOIDmode
);
1547 if (loc
== &PATTERN (insn
))
1548 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
1549 validate_change (insn
, loc
, x
, 1);
1553 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
1554 && GET_CODE (SET_SRC (x
)) == LABEL_REF
1555 && XEXP (SET_SRC (x
), 0) == olabel
)
1557 validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 1);
1561 fmt
= GET_RTX_FORMAT (code
);
1562 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1565 redirect_exp_1 (&XEXP (x
, i
), olabel
, nlabel
, insn
);
1566 else if (fmt
[i
] == 'E')
1569 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1570 redirect_exp_1 (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
);
1575 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1576 the modifications into the change group. Return false if we did
1577 not see how to do that. */
1580 redirect_jump_1 (rtx jump
, rtx nlabel
)
1582 int ochanges
= num_validated_changes ();
1585 if (GET_CODE (PATTERN (jump
)) == PARALLEL
)
1586 loc
= &XVECEXP (PATTERN (jump
), 0, 0);
1588 loc
= &PATTERN (jump
);
1590 redirect_exp_1 (loc
, JUMP_LABEL (jump
), nlabel
, jump
);
1591 return num_validated_changes () > ochanges
;
1594 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1595 jump target label is unused as a result, it and the code following
1598 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1601 The return value will be 1 if the change was made, 0 if it wasn't
1602 (this can only occur for NLABEL == 0). */
1605 redirect_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1607 rtx olabel
= JUMP_LABEL (jump
);
1609 if (nlabel
== olabel
)
1612 if (! redirect_jump_1 (jump
, nlabel
) || ! apply_change_group ())
1615 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 0);
1619 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1620 NLABEL in JUMP. If DELETE_UNUSED is non-negative, copy a
1621 NOTE_INSN_FUNCTION_END found after OLABEL to the place after NLABEL.
1622 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1623 count has dropped to zero. */
1625 redirect_jump_2 (rtx jump
, rtx olabel
, rtx nlabel
, int delete_unused
,
1630 JUMP_LABEL (jump
) = nlabel
;
1632 ++LABEL_NUSES (nlabel
);
1634 /* Update labels in any REG_EQUAL note. */
1635 if ((note
= find_reg_note (jump
, REG_EQUAL
, NULL_RTX
)) != NULL_RTX
)
1637 if (!nlabel
|| (invert
&& !invert_exp_1 (XEXP (note
, 0), jump
)))
1638 remove_note (jump
, note
);
1641 redirect_exp_1 (&XEXP (note
, 0), olabel
, nlabel
, jump
);
1642 confirm_change_group ();
1646 /* If we're eliding the jump over exception cleanups at the end of a
1647 function, move the function end note so that -Wreturn-type works. */
1648 if (olabel
&& nlabel
1649 && NEXT_INSN (olabel
)
1650 && NOTE_P (NEXT_INSN (olabel
))
1651 && NOTE_LINE_NUMBER (NEXT_INSN (olabel
)) == NOTE_INSN_FUNCTION_END
1652 && delete_unused
>= 0)
1653 emit_note_after (NOTE_INSN_FUNCTION_END
, nlabel
);
1655 if (olabel
&& --LABEL_NUSES (olabel
) == 0 && delete_unused
> 0
1656 /* Undefined labels will remain outside the insn stream. */
1657 && INSN_UID (olabel
))
1658 delete_related_insns (olabel
);
1660 invert_br_probabilities (jump
);
1663 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1664 modifications into the change group. Return nonzero for success. */
1666 invert_exp_1 (rtx x
, rtx insn
)
1668 RTX_CODE code
= GET_CODE (x
);
1670 if (code
== IF_THEN_ELSE
)
1672 rtx comp
= XEXP (x
, 0);
1674 enum rtx_code reversed_code
;
1676 /* We can do this in two ways: The preferable way, which can only
1677 be done if this is not an integer comparison, is to reverse
1678 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1679 of the IF_THEN_ELSE. If we can't do either, fail. */
1681 reversed_code
= reversed_comparison_code (comp
, insn
);
1683 if (reversed_code
!= UNKNOWN
)
1685 validate_change (insn
, &XEXP (x
, 0),
1686 gen_rtx_fmt_ee (reversed_code
,
1687 GET_MODE (comp
), XEXP (comp
, 0),
1694 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
1695 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
1702 /* Invert the condition of the jump JUMP, and make it jump to label
1703 NLABEL instead of where it jumps now. Accrue changes into the
1704 change group. Return false if we didn't see how to perform the
1705 inversion and redirection. */
1708 invert_jump_1 (rtx jump
, rtx nlabel
)
1710 rtx x
= pc_set (jump
);
1713 ochanges
= num_validated_changes ();
1714 if (!x
|| !invert_exp_1 (SET_SRC (x
), jump
))
1716 if (num_validated_changes () == ochanges
)
1719 return redirect_jump_1 (jump
, nlabel
);
1722 /* Invert the condition of the jump JUMP, and make it jump to label
1723 NLABEL instead of where it jumps now. Return true if successful. */
1726 invert_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1728 rtx olabel
= JUMP_LABEL (jump
);
1730 if (invert_jump_1 (jump
, nlabel
) && apply_change_group ())
1732 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 1);
1740 /* Like rtx_equal_p except that it considers two REGs as equal
1741 if they renumber to the same value and considers two commutative
1742 operations to be the same if the order of the operands has been
1745 ??? Addition is not commutative on the PA due to the weird implicit
1746 space register selection rules for memory addresses. Therefore, we
1747 don't consider a + b == b + a.
1749 We could/should make this test a little tighter. Possibly only
1750 disabling it on the PA via some backend macro or only disabling this
1751 case when the PLUS is inside a MEM. */
1754 rtx_renumbered_equal_p (rtx x
, rtx y
)
1757 enum rtx_code code
= GET_CODE (x
);
1763 if ((code
== REG
|| (code
== SUBREG
&& REG_P (SUBREG_REG (x
))))
1764 && (REG_P (y
) || (GET_CODE (y
) == SUBREG
1765 && REG_P (SUBREG_REG (y
)))))
1767 int reg_x
= -1, reg_y
= -1;
1768 int byte_x
= 0, byte_y
= 0;
1770 if (GET_MODE (x
) != GET_MODE (y
))
1773 /* If we haven't done any renumbering, don't
1774 make any assumptions. */
1775 if (reg_renumber
== 0)
1776 return rtx_equal_p (x
, y
);
1780 reg_x
= REGNO (SUBREG_REG (x
));
1781 byte_x
= SUBREG_BYTE (x
);
1783 if (reg_renumber
[reg_x
] >= 0)
1785 reg_x
= subreg_regno_offset (reg_renumber
[reg_x
],
1786 GET_MODE (SUBREG_REG (x
)),
1795 if (reg_renumber
[reg_x
] >= 0)
1796 reg_x
= reg_renumber
[reg_x
];
1799 if (GET_CODE (y
) == SUBREG
)
1801 reg_y
= REGNO (SUBREG_REG (y
));
1802 byte_y
= SUBREG_BYTE (y
);
1804 if (reg_renumber
[reg_y
] >= 0)
1806 reg_y
= subreg_regno_offset (reg_renumber
[reg_y
],
1807 GET_MODE (SUBREG_REG (y
)),
1816 if (reg_renumber
[reg_y
] >= 0)
1817 reg_y
= reg_renumber
[reg_y
];
1820 return reg_x
>= 0 && reg_x
== reg_y
&& byte_x
== byte_y
;
1823 /* Now we have disposed of all the cases
1824 in which different rtx codes can match. */
1825 if (code
!= GET_CODE (y
))
1838 /* We can't assume nonlocal labels have their following insns yet. */
1839 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
1840 return XEXP (x
, 0) == XEXP (y
, 0);
1842 /* Two label-refs are equivalent if they point at labels
1843 in the same position in the instruction stream. */
1844 return (next_real_insn (XEXP (x
, 0))
1845 == next_real_insn (XEXP (y
, 0)));
1848 return XSTR (x
, 0) == XSTR (y
, 0);
1851 /* If we didn't match EQ equality above, they aren't the same. */
1858 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1860 if (GET_MODE (x
) != GET_MODE (y
))
1863 /* For commutative operations, the RTX match if the operand match in any
1864 order. Also handle the simple binary and unary cases without a loop.
1866 ??? Don't consider PLUS a commutative operator; see comments above. */
1867 if (COMMUTATIVE_P (x
) && code
!= PLUS
)
1868 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1869 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
1870 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
1871 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
1872 else if (NON_COMMUTATIVE_P (x
))
1873 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1874 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
1875 else if (UNARY_P (x
))
1876 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
1878 /* Compare the elements. If any pair of corresponding elements
1879 fail to match, return 0 for the whole things. */
1881 fmt
= GET_RTX_FORMAT (code
);
1882 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1888 if (XWINT (x
, i
) != XWINT (y
, i
))
1893 if (XINT (x
, i
) != XINT (y
, i
))
1898 if (XTREE (x
, i
) != XTREE (y
, i
))
1903 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1908 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
1913 if (XEXP (x
, i
) != XEXP (y
, i
))
1920 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1922 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1923 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
1934 /* If X is a hard register or equivalent to one or a subregister of one,
1935 return the hard register number. If X is a pseudo register that was not
1936 assigned a hard register, return the pseudo register number. Otherwise,
1937 return -1. Any rtx is valid for X. */
1944 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
1945 return reg_renumber
[REGNO (x
)];
1948 if (GET_CODE (x
) == SUBREG
)
1950 int base
= true_regnum (SUBREG_REG (x
));
1951 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
1952 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
1953 GET_MODE (SUBREG_REG (x
)),
1954 SUBREG_BYTE (x
), GET_MODE (x
));
1959 /* Return regno of the register REG and handle subregs too. */
1961 reg_or_subregno (rtx reg
)
1965 if (GET_CODE (reg
) == SUBREG
)
1966 return REGNO (SUBREG_REG (reg
));