1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /* This is the pathetic reminder of old fame of the jump-optimization pass
24 of the compiler. Now it contains basically a set of utility functions to
27 Each CODE_LABEL has a count of the times it is used
28 stored in the LABEL_NUSES internal field, and each JUMP_INSN
29 has one label that it refers to stored in the
30 JUMP_LABEL internal field. With this we can detect labels that
31 become unused because of the deletion of all the jumps that
32 formerly used them. The JUMP_LABEL info is sometimes looked
35 The subroutines redirect_jump and invert_jump are used
36 from other passes as well. */
40 #include "coretypes.h"
45 #include "hard-reg-set.h"
47 #include "insn-config.h"
48 #include "insn-attr.h"
54 #include "diagnostic.h"
59 #include "tree-pass.h"
62 /* Optimize jump y; x: ... y: jumpif... x?
63 Don't know if it is worth bothering with. */
64 /* Optimize two cases of conditional jump to conditional jump?
65 This can never delete any instruction or make anything dead,
66 or even change what is live at any point.
67 So perhaps let combiner do it. */
69 static void init_label_info (rtx
);
70 static void mark_all_labels (rtx
);
71 static void delete_computation (rtx
);
72 static void redirect_exp_1 (rtx
*, rtx
, rtx
, rtx
);
73 static int invert_exp_1 (rtx
, rtx
);
74 static int returnjump_p_1 (rtx
*, void *);
75 static void delete_prior_computation (rtx
, rtx
);
77 /* Alternate entry into the jump optimizer. This entry point only rebuilds
78 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
81 rebuild_jump_labels (rtx f
)
85 timevar_push (TV_REBUILD_JUMP
);
89 /* Keep track of labels used from static data; we don't track them
90 closely enough to delete them here, so make sure their reference
91 count doesn't drop to zero. */
93 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
94 if (LABEL_P (XEXP (insn
, 0)))
95 LABEL_NUSES (XEXP (insn
, 0))++;
96 timevar_pop (TV_REBUILD_JUMP
);
99 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
100 non-fallthru insn. This is not generally true, as multiple barriers
101 may have crept in, or the BARRIER may be separated from the last
102 real insn by one or more NOTEs.
104 This simple pass moves barriers and removes duplicates so that the
108 cleanup_barriers (void)
110 rtx insn
, next
, prev
;
111 for (insn
= get_insns (); insn
; insn
= next
)
113 next
= NEXT_INSN (insn
);
114 if (BARRIER_P (insn
))
116 prev
= prev_nonnote_insn (insn
);
117 if (BARRIER_P (prev
))
119 else if (prev
!= PREV_INSN (insn
))
120 reorder_insns (insn
, insn
, prev
);
126 struct tree_opt_pass pass_cleanup_barriers
=
128 "barriers", /* name */
130 cleanup_barriers
, /* execute */
133 0, /* static_pass_number */
135 0, /* properties_required */
136 0, /* properties_provided */
137 0, /* properties_destroyed */
138 0, /* todo_flags_start */
139 TODO_dump_func
, /* todo_flags_finish */
144 purge_line_number_notes (void)
148 /* Delete extraneous line number notes.
149 Note that two consecutive notes for different lines are not really
150 extraneous. There should be some indication where that line belonged,
151 even if it became empty. */
153 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
156 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
)
157 /* Any previous line note was for the prologue; gdb wants a new
158 note after the prologue even if it is for the same line. */
159 last_note
= NULL_RTX
;
160 else if (NOTE_LINE_NUMBER (insn
) >= 0)
162 /* Delete this note if it is identical to previous note. */
164 #ifdef USE_MAPPED_LOCATION
165 && NOTE_SOURCE_LOCATION (insn
) == NOTE_SOURCE_LOCATION (last_note
)
167 && NOTE_SOURCE_FILE (insn
) == NOTE_SOURCE_FILE (last_note
)
168 && NOTE_LINE_NUMBER (insn
) == NOTE_LINE_NUMBER (last_note
)
172 delete_related_insns (insn
);
182 struct tree_opt_pass pass_purge_lineno_notes
=
184 "elnotes", /* name */
186 purge_line_number_notes
, /* execute */
189 0, /* static_pass_number */
191 0, /* properties_required */
192 0, /* properties_provided */
193 0, /* properties_destroyed */
194 0, /* todo_flags_start */
195 TODO_dump_func
, /* todo_flags_finish */
200 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
201 notes whose labels don't occur in the insn any more. Returns the
202 largest INSN_UID found. */
204 init_label_info (rtx f
)
208 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
210 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
211 else if (JUMP_P (insn
))
212 JUMP_LABEL (insn
) = 0;
213 else if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
217 for (note
= REG_NOTES (insn
); note
; note
= next
)
219 next
= XEXP (note
, 1);
220 if (REG_NOTE_KIND (note
) == REG_LABEL
221 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
222 remove_note (insn
, note
);
227 /* Mark the label each jump jumps to.
228 Combine consecutive labels, and count uses of labels. */
231 mark_all_labels (rtx f
)
235 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
238 mark_jump_label (PATTERN (insn
), insn
, 0);
239 if (! INSN_DELETED_P (insn
) && JUMP_P (insn
))
241 /* When we know the LABEL_REF contained in a REG used in
242 an indirect jump, we'll have a REG_LABEL note so that
243 flow can tell where it's going. */
244 if (JUMP_LABEL (insn
) == 0)
246 rtx label_note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
249 /* But a LABEL_REF around the REG_LABEL note, so
250 that we can canonicalize it. */
251 rtx label_ref
= gen_rtx_LABEL_REF (Pmode
,
252 XEXP (label_note
, 0));
254 mark_jump_label (label_ref
, insn
, 0);
255 XEXP (label_note
, 0) = XEXP (label_ref
, 0);
256 JUMP_LABEL (insn
) = XEXP (label_note
, 0);
263 /* Move all block-beg, block-end and loop-beg notes between START and END out
264 before START. START and END may be such notes. Returns the values of the
265 new starting and ending insns, which may be different if the original ones
266 were such notes. Return true if there were only such notes and no real
270 squeeze_notes (rtx
* startp
, rtx
* endp
)
278 rtx past_end
= NEXT_INSN (end
);
280 for (insn
= start
; insn
!= past_end
; insn
= next
)
282 next
= NEXT_INSN (insn
);
284 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
285 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
))
287 /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass. */
288 gcc_assert (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_BEG
289 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_END
);
295 rtx prev
= PREV_INSN (insn
);
296 PREV_INSN (insn
) = PREV_INSN (start
);
297 NEXT_INSN (insn
) = start
;
298 NEXT_INSN (PREV_INSN (insn
)) = insn
;
299 PREV_INSN (NEXT_INSN (insn
)) = insn
;
300 NEXT_INSN (prev
) = next
;
301 PREV_INSN (next
) = prev
;
308 /* There were no real instructions. */
309 if (start
== past_end
)
319 /* Return the label before INSN, or put a new label there. */
322 get_label_before (rtx insn
)
326 /* Find an existing label at this point
327 or make a new one if there is none. */
328 label
= prev_nonnote_insn (insn
);
330 if (label
== 0 || !LABEL_P (label
))
332 rtx prev
= PREV_INSN (insn
);
334 label
= gen_label_rtx ();
335 emit_label_after (label
, prev
);
336 LABEL_NUSES (label
) = 0;
341 /* Return the label after INSN, or put a new label there. */
344 get_label_after (rtx insn
)
348 /* Find an existing label at this point
349 or make a new one if there is none. */
350 label
= next_nonnote_insn (insn
);
352 if (label
== 0 || !LABEL_P (label
))
354 label
= gen_label_rtx ();
355 emit_label_after (label
, insn
);
356 LABEL_NUSES (label
) = 0;
361 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
362 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
363 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
364 know whether it's source is floating point or integer comparison. Machine
365 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
366 to help this function avoid overhead in these cases. */
368 reversed_comparison_code_parts (enum rtx_code code
, rtx arg0
, rtx arg1
, rtx insn
)
370 enum machine_mode mode
;
372 /* If this is not actually a comparison, we can't reverse it. */
373 if (GET_RTX_CLASS (code
) != RTX_COMPARE
374 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
377 mode
= GET_MODE (arg0
);
378 if (mode
== VOIDmode
)
379 mode
= GET_MODE (arg1
);
381 /* First see if machine description supplies us way to reverse the
382 comparison. Give it priority over everything else to allow
383 machine description to do tricks. */
384 if (GET_MODE_CLASS (mode
) == MODE_CC
385 && REVERSIBLE_CC_MODE (mode
))
387 #ifdef REVERSE_CONDITION
388 return REVERSE_CONDITION (code
, mode
);
390 return reverse_condition (code
);
393 /* Try a few special cases based on the comparison code. */
402 /* It is always safe to reverse EQ and NE, even for the floating
403 point. Similarly the unsigned comparisons are never used for
404 floating point so we can reverse them in the default way. */
405 return reverse_condition (code
);
410 /* In case we already see unordered comparison, we can be sure to
411 be dealing with floating point so we don't need any more tests. */
412 return reverse_condition_maybe_unordered (code
);
417 /* We don't have safe way to reverse these yet. */
423 if (GET_MODE_CLASS (mode
) == MODE_CC
|| CC0_P (arg0
))
426 /* Try to search for the comparison to determine the real mode.
427 This code is expensive, but with sane machine description it
428 will be never used, since REVERSIBLE_CC_MODE will return true
433 for (prev
= prev_nonnote_insn (insn
);
434 prev
!= 0 && !LABEL_P (prev
);
435 prev
= prev_nonnote_insn (prev
))
437 rtx set
= set_of (arg0
, prev
);
438 if (set
&& GET_CODE (set
) == SET
439 && rtx_equal_p (SET_DEST (set
), arg0
))
441 rtx src
= SET_SRC (set
);
443 if (GET_CODE (src
) == COMPARE
)
445 rtx comparison
= src
;
446 arg0
= XEXP (src
, 0);
447 mode
= GET_MODE (arg0
);
448 if (mode
== VOIDmode
)
449 mode
= GET_MODE (XEXP (comparison
, 1));
452 /* We can get past reg-reg moves. This may be useful for model
453 of i387 comparisons that first move flag registers around. */
460 /* If register is clobbered in some ununderstandable way,
467 /* Test for an integer condition, or a floating-point comparison
468 in which NaNs can be ignored. */
469 if (GET_CODE (arg0
) == CONST_INT
470 || (GET_MODE (arg0
) != VOIDmode
471 && GET_MODE_CLASS (mode
) != MODE_CC
472 && !HONOR_NANS (mode
)))
473 return reverse_condition (code
);
478 /* A wrapper around the previous function to take COMPARISON as rtx
479 expression. This simplifies many callers. */
481 reversed_comparison_code (rtx comparison
, rtx insn
)
483 if (!COMPARISON_P (comparison
))
485 return reversed_comparison_code_parts (GET_CODE (comparison
),
486 XEXP (comparison
, 0),
487 XEXP (comparison
, 1), insn
);
490 /* Return comparison with reversed code of EXP.
491 Return NULL_RTX in case we fail to do the reversal. */
493 reversed_comparison (rtx exp
, enum machine_mode mode
)
495 enum rtx_code reversed_code
= reversed_comparison_code (exp
, NULL_RTX
);
496 if (reversed_code
== UNKNOWN
)
499 return simplify_gen_relational (reversed_code
, mode
, VOIDmode
,
500 XEXP (exp
, 0), XEXP (exp
, 1));
504 /* Given an rtx-code for a comparison, return the code for the negated
505 comparison. If no such code exists, return UNKNOWN.
507 WATCH OUT! reverse_condition is not safe to use on a jump that might
508 be acting on the results of an IEEE floating point comparison, because
509 of the special treatment of non-signaling nans in comparisons.
510 Use reversed_comparison_code instead. */
513 reverse_condition (enum rtx_code code
)
555 /* Similar, but we're allowed to generate unordered comparisons, which
556 makes it safe for IEEE floating-point. Of course, we have to recognize
557 that the target will support them too... */
560 reverse_condition_maybe_unordered (enum rtx_code code
)
598 /* Similar, but return the code when two operands of a comparison are swapped.
599 This IS safe for IEEE floating-point. */
602 swap_condition (enum rtx_code code
)
644 /* Given a comparison CODE, return the corresponding unsigned comparison.
645 If CODE is an equality comparison or already an unsigned comparison,
649 unsigned_condition (enum rtx_code code
)
675 /* Similarly, return the signed version of a comparison. */
678 signed_condition (enum rtx_code code
)
704 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
705 truth of CODE1 implies the truth of CODE2. */
708 comparison_dominates_p (enum rtx_code code1
, enum rtx_code code2
)
710 /* UNKNOWN comparison codes can happen as a result of trying to revert
712 They can't match anything, so we have to reject them here. */
713 if (code1
== UNKNOWN
|| code2
== UNKNOWN
)
722 if (code2
== UNLE
|| code2
== UNGE
)
727 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
733 if (code2
== UNLE
|| code2
== NE
)
738 if (code2
== LE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
743 if (code2
== UNGE
|| code2
== NE
)
748 if (code2
== GE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
754 if (code2
== ORDERED
)
759 if (code2
== NE
|| code2
== ORDERED
)
764 if (code2
== LEU
|| code2
== NE
)
769 if (code2
== GEU
|| code2
== NE
)
774 if (code2
== NE
|| code2
== UNEQ
|| code2
== UNLE
|| code2
== UNLT
775 || code2
== UNGE
|| code2
== UNGT
)
786 /* Return 1 if INSN is an unconditional jump and nothing else. */
789 simplejump_p (rtx insn
)
791 return (JUMP_P (insn
)
792 && GET_CODE (PATTERN (insn
)) == SET
793 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
794 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
797 /* Return nonzero if INSN is a (possibly) conditional jump
800 Use of this function is deprecated, since we need to support combined
801 branch and compare insns. Use any_condjump_p instead whenever possible. */
804 condjump_p (rtx insn
)
806 rtx x
= PATTERN (insn
);
808 if (GET_CODE (x
) != SET
809 || GET_CODE (SET_DEST (x
)) != PC
)
813 if (GET_CODE (x
) == LABEL_REF
)
816 return (GET_CODE (x
) == IF_THEN_ELSE
817 && ((GET_CODE (XEXP (x
, 2)) == PC
818 && (GET_CODE (XEXP (x
, 1)) == LABEL_REF
819 || GET_CODE (XEXP (x
, 1)) == RETURN
))
820 || (GET_CODE (XEXP (x
, 1)) == PC
821 && (GET_CODE (XEXP (x
, 2)) == LABEL_REF
822 || GET_CODE (XEXP (x
, 2)) == RETURN
))));
825 /* Return nonzero if INSN is a (possibly) conditional jump inside a
828 Use this function is deprecated, since we need to support combined
829 branch and compare insns. Use any_condjump_p instead whenever possible. */
832 condjump_in_parallel_p (rtx insn
)
834 rtx x
= PATTERN (insn
);
836 if (GET_CODE (x
) != PARALLEL
)
839 x
= XVECEXP (x
, 0, 0);
841 if (GET_CODE (x
) != SET
)
843 if (GET_CODE (SET_DEST (x
)) != PC
)
845 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
847 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
849 if (XEXP (SET_SRC (x
), 2) == pc_rtx
850 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
851 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
853 if (XEXP (SET_SRC (x
), 1) == pc_rtx
854 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
855 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
860 /* Return set of PC, otherwise NULL. */
868 pat
= PATTERN (insn
);
870 /* The set is allowed to appear either as the insn pattern or
871 the first set in a PARALLEL. */
872 if (GET_CODE (pat
) == PARALLEL
)
873 pat
= XVECEXP (pat
, 0, 0);
874 if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == PC
)
880 /* Return true when insn is an unconditional direct jump,
881 possibly bundled inside a PARALLEL. */
884 any_uncondjump_p (rtx insn
)
886 rtx x
= pc_set (insn
);
889 if (GET_CODE (SET_SRC (x
)) != LABEL_REF
)
891 if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
896 /* Return true when insn is a conditional jump. This function works for
897 instructions containing PC sets in PARALLELs. The instruction may have
898 various other effects so before removing the jump you must verify
901 Note that unlike condjump_p it returns false for unconditional jumps. */
904 any_condjump_p (rtx insn
)
906 rtx x
= pc_set (insn
);
911 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
914 a
= GET_CODE (XEXP (SET_SRC (x
), 1));
915 b
= GET_CODE (XEXP (SET_SRC (x
), 2));
917 return ((b
== PC
&& (a
== LABEL_REF
|| a
== RETURN
))
918 || (a
== PC
&& (b
== LABEL_REF
|| b
== RETURN
)));
921 /* Return the label of a conditional jump. */
924 condjump_label (rtx insn
)
926 rtx x
= pc_set (insn
);
931 if (GET_CODE (x
) == LABEL_REF
)
933 if (GET_CODE (x
) != IF_THEN_ELSE
)
935 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
937 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
942 /* Return true if INSN is a (possibly conditional) return insn. */
945 returnjump_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
949 return x
&& (GET_CODE (x
) == RETURN
950 || (GET_CODE (x
) == SET
&& SET_IS_RETURN_P (x
)));
954 returnjump_p (rtx insn
)
958 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
961 /* Return true if INSN is a jump that only transfers control and
965 onlyjump_p (rtx insn
)
972 set
= single_set (insn
);
975 if (GET_CODE (SET_DEST (set
)) != PC
)
977 if (side_effects_p (SET_SRC (set
)))
985 /* Return nonzero if X is an RTX that only sets the condition codes
986 and has no side effects. */
989 only_sets_cc0_p (rtx x
)
997 return sets_cc0_p (x
) == 1 && ! side_effects_p (x
);
1000 /* Return 1 if X is an RTX that does nothing but set the condition codes
1001 and CLOBBER or USE registers.
1002 Return -1 if X does explicitly set the condition codes,
1003 but also does other things. */
1014 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
1016 if (GET_CODE (x
) == PARALLEL
)
1020 int other_things
= 0;
1021 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1023 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
1024 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
1026 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
1029 return ! sets_cc0
? 0 : other_things
? -1 : 1;
1035 /* Follow any unconditional jump at LABEL;
1036 return the ultimate label reached by any such chain of jumps.
1037 Return null if the chain ultimately leads to a return instruction.
1038 If LABEL is not followed by a jump, return LABEL.
1039 If the chain loops or we can't find end, return LABEL,
1040 since that tells caller to avoid changing the insn.
1042 If RELOAD_COMPLETED is 0, we do not chain across a USE or CLOBBER. */
1045 follow_jumps (rtx label
)
1054 && (insn
= next_active_insn (value
)) != 0
1056 && ((JUMP_LABEL (insn
) != 0 && any_uncondjump_p (insn
)
1057 && onlyjump_p (insn
))
1058 || GET_CODE (PATTERN (insn
)) == RETURN
)
1059 && (next
= NEXT_INSN (insn
))
1060 && BARRIER_P (next
));
1064 if (!reload_completed
&& flag_test_coverage
)
1066 /* ??? Optional. Disables some optimizations, but makes
1067 gcov output more accurate with -O. */
1068 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
1069 if (NOTE_P (tem
) && NOTE_LINE_NUMBER (tem
) > 0)
1073 /* If we have found a cycle, make the insn jump to itself. */
1074 if (JUMP_LABEL (insn
) == label
)
1077 tem
= next_active_insn (JUMP_LABEL (insn
));
1078 if (tem
&& (GET_CODE (PATTERN (tem
)) == ADDR_VEC
1079 || GET_CODE (PATTERN (tem
)) == ADDR_DIFF_VEC
))
1082 value
= JUMP_LABEL (insn
);
1090 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1091 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1092 in INSN, then store one of them in JUMP_LABEL (INSN).
1093 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1094 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1095 Also, when there are consecutive labels, canonicalize on the last of them.
1097 Note that two labels separated by a loop-beginning note
1098 must be kept distinct if we have not yet done loop-optimization,
1099 because the gap between them is where loop-optimize
1100 will want to move invariant code to. CROSS_JUMP tells us
1101 that loop-optimization is done with. */
1104 mark_jump_label (rtx x
, rtx insn
, int in_mem
)
1106 RTX_CODE code
= GET_CODE (x
);
1129 /* If this is a constant-pool reference, see if it is a label. */
1130 if (CONSTANT_POOL_ADDRESS_P (x
))
1131 mark_jump_label (get_pool_constant (x
), insn
, in_mem
);
1136 rtx label
= XEXP (x
, 0);
1138 /* Ignore remaining references to unreachable labels that
1139 have been deleted. */
1141 && NOTE_LINE_NUMBER (label
) == NOTE_INSN_DELETED_LABEL
)
1144 gcc_assert (LABEL_P (label
));
1146 /* Ignore references to labels of containing functions. */
1147 if (LABEL_REF_NONLOCAL_P (x
))
1150 XEXP (x
, 0) = label
;
1151 if (! insn
|| ! INSN_DELETED_P (insn
))
1152 ++LABEL_NUSES (label
);
1157 JUMP_LABEL (insn
) = label
;
1160 /* Add a REG_LABEL note for LABEL unless there already
1161 is one. All uses of a label, except for labels
1162 that are the targets of jumps, must have a
1164 if (! find_reg_note (insn
, REG_LABEL
, label
))
1165 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, label
,
1172 /* Do walk the labels in a vector, but not the first operand of an
1173 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1176 if (! INSN_DELETED_P (insn
))
1178 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
1180 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
1181 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, in_mem
);
1189 fmt
= GET_RTX_FORMAT (code
);
1190 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1193 mark_jump_label (XEXP (x
, i
), insn
, in_mem
);
1194 else if (fmt
[i
] == 'E')
1197 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1198 mark_jump_label (XVECEXP (x
, i
, j
), insn
, in_mem
);
1203 /* If all INSN does is set the pc, delete it,
1204 and delete the insn that set the condition codes for it
1205 if that's what the previous thing was. */
1208 delete_jump (rtx insn
)
1210 rtx set
= single_set (insn
);
1212 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
1213 delete_computation (insn
);
1216 /* Recursively delete prior insns that compute the value (used only by INSN
1217 which the caller is deleting) stored in the register mentioned by NOTE
1218 which is a REG_DEAD note associated with INSN. */
1221 delete_prior_computation (rtx note
, rtx insn
)
1224 rtx reg
= XEXP (note
, 0);
1226 for (our_prev
= prev_nonnote_insn (insn
);
1227 our_prev
&& (NONJUMP_INSN_P (our_prev
)
1228 || CALL_P (our_prev
));
1229 our_prev
= prev_nonnote_insn (our_prev
))
1231 rtx pat
= PATTERN (our_prev
);
1233 /* If we reach a CALL which is not calling a const function
1234 or the callee pops the arguments, then give up. */
1235 if (CALL_P (our_prev
)
1236 && (! CONST_OR_PURE_CALL_P (our_prev
)
1237 || GET_CODE (pat
) != SET
|| GET_CODE (SET_SRC (pat
)) != CALL
))
1240 /* If we reach a SEQUENCE, it is too complex to try to
1241 do anything with it, so give up. We can be run during
1242 and after reorg, so SEQUENCE rtl can legitimately show
1244 if (GET_CODE (pat
) == SEQUENCE
)
1247 if (GET_CODE (pat
) == USE
1248 && NONJUMP_INSN_P (XEXP (pat
, 0)))
1249 /* reorg creates USEs that look like this. We leave them
1250 alone because reorg needs them for its own purposes. */
1253 if (reg_set_p (reg
, pat
))
1255 if (side_effects_p (pat
) && !CALL_P (our_prev
))
1258 if (GET_CODE (pat
) == PARALLEL
)
1260 /* If we find a SET of something else, we can't
1265 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1267 rtx part
= XVECEXP (pat
, 0, i
);
1269 if (GET_CODE (part
) == SET
1270 && SET_DEST (part
) != reg
)
1274 if (i
== XVECLEN (pat
, 0))
1275 delete_computation (our_prev
);
1277 else if (GET_CODE (pat
) == SET
1278 && REG_P (SET_DEST (pat
)))
1280 int dest_regno
= REGNO (SET_DEST (pat
));
1283 + (dest_regno
< FIRST_PSEUDO_REGISTER
1284 ? hard_regno_nregs
[dest_regno
]
1285 [GET_MODE (SET_DEST (pat
))] : 1));
1286 int regno
= REGNO (reg
);
1289 + (regno
< FIRST_PSEUDO_REGISTER
1290 ? hard_regno_nregs
[regno
][GET_MODE (reg
)] : 1));
1292 if (dest_regno
>= regno
1293 && dest_endregno
<= endregno
)
1294 delete_computation (our_prev
);
1296 /* We may have a multi-word hard register and some, but not
1297 all, of the words of the register are needed in subsequent
1298 insns. Write REG_UNUSED notes for those parts that were not
1300 else if (dest_regno
<= regno
1301 && dest_endregno
>= endregno
)
1305 REG_NOTES (our_prev
)
1306 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
,
1307 REG_NOTES (our_prev
));
1309 for (i
= dest_regno
; i
< dest_endregno
; i
++)
1310 if (! find_regno_note (our_prev
, REG_UNUSED
, i
))
1313 if (i
== dest_endregno
)
1314 delete_computation (our_prev
);
1321 /* If PAT references the register that dies here, it is an
1322 additional use. Hence any prior SET isn't dead. However, this
1323 insn becomes the new place for the REG_DEAD note. */
1324 if (reg_overlap_mentioned_p (reg
, pat
))
1326 XEXP (note
, 1) = REG_NOTES (our_prev
);
1327 REG_NOTES (our_prev
) = note
;
1333 /* Delete INSN and recursively delete insns that compute values used only
1334 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1335 If we are running before flow.c, we need do nothing since flow.c will
1336 delete dead code. We also can't know if the registers being used are
1337 dead or not at this point.
1339 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1340 nothing other than set a register that dies in this insn, we can delete
1343 On machines with CC0, if CC0 is used in this insn, we may be able to
1344 delete the insn that set it. */
1347 delete_computation (rtx insn
)
1352 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
1354 rtx prev
= prev_nonnote_insn (insn
);
1355 /* We assume that at this stage
1356 CC's are always set explicitly
1357 and always immediately before the jump that
1358 will use them. So if the previous insn
1359 exists to set the CC's, delete it
1360 (unless it performs auto-increments, etc.). */
1361 if (prev
&& NONJUMP_INSN_P (prev
)
1362 && sets_cc0_p (PATTERN (prev
)))
1364 if (sets_cc0_p (PATTERN (prev
)) > 0
1365 && ! side_effects_p (PATTERN (prev
)))
1366 delete_computation (prev
);
1368 /* Otherwise, show that cc0 won't be used. */
1369 REG_NOTES (prev
) = gen_rtx_EXPR_LIST (REG_UNUSED
,
1370 cc0_rtx
, REG_NOTES (prev
));
1375 for (note
= REG_NOTES (insn
); note
; note
= next
)
1377 next
= XEXP (note
, 1);
1379 if (REG_NOTE_KIND (note
) != REG_DEAD
1380 /* Verify that the REG_NOTE is legitimate. */
1381 || !REG_P (XEXP (note
, 0)))
1384 delete_prior_computation (note
, insn
);
1387 delete_related_insns (insn
);
1390 /* Delete insn INSN from the chain of insns and update label ref counts
1391 and delete insns now unreachable.
1393 Returns the first insn after INSN that was not deleted.
1395 Usage of this instruction is deprecated. Use delete_insn instead and
1396 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1399 delete_related_insns (rtx insn
)
1401 int was_code_label
= (LABEL_P (insn
));
1403 rtx next
= NEXT_INSN (insn
), prev
= PREV_INSN (insn
);
1405 while (next
&& INSN_DELETED_P (next
))
1406 next
= NEXT_INSN (next
);
1408 /* This insn is already deleted => return first following nondeleted. */
1409 if (INSN_DELETED_P (insn
))
1414 /* If instruction is followed by a barrier,
1415 delete the barrier too. */
1417 if (next
!= 0 && BARRIER_P (next
))
1420 /* If deleting a jump, decrement the count of the label,
1421 and delete the label if it is now unused. */
1423 if (JUMP_P (insn
) && JUMP_LABEL (insn
))
1425 rtx lab
= JUMP_LABEL (insn
), lab_next
;
1427 if (LABEL_NUSES (lab
) == 0)
1429 /* This can delete NEXT or PREV,
1430 either directly if NEXT is JUMP_LABEL (INSN),
1431 or indirectly through more levels of jumps. */
1432 delete_related_insns (lab
);
1434 /* I feel a little doubtful about this loop,
1435 but I see no clean and sure alternative way
1436 to find the first insn after INSN that is not now deleted.
1437 I hope this works. */
1438 while (next
&& INSN_DELETED_P (next
))
1439 next
= NEXT_INSN (next
);
1442 else if (tablejump_p (insn
, NULL
, &lab_next
))
1444 /* If we're deleting the tablejump, delete the dispatch table.
1445 We may not be able to kill the label immediately preceding
1446 just yet, as it might be referenced in code leading up to
1448 delete_related_insns (lab_next
);
1452 /* Likewise if we're deleting a dispatch table. */
1455 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1456 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
1458 rtx pat
= PATTERN (insn
);
1459 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
1460 int len
= XVECLEN (pat
, diff_vec_p
);
1462 for (i
= 0; i
< len
; i
++)
1463 if (LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
1464 delete_related_insns (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
1465 while (next
&& INSN_DELETED_P (next
))
1466 next
= NEXT_INSN (next
);
1470 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1471 if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
1472 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1473 if (REG_NOTE_KIND (note
) == REG_LABEL
1474 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1475 && LABEL_P (XEXP (note
, 0)))
1476 if (LABEL_NUSES (XEXP (note
, 0)) == 0)
1477 delete_related_insns (XEXP (note
, 0));
1479 while (prev
&& (INSN_DELETED_P (prev
) || NOTE_P (prev
)))
1480 prev
= PREV_INSN (prev
);
1482 /* If INSN was a label and a dispatch table follows it,
1483 delete the dispatch table. The tablejump must have gone already.
1484 It isn't useful to fall through into a table. */
1487 && NEXT_INSN (insn
) != 0
1488 && JUMP_P (NEXT_INSN (insn
))
1489 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
1490 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
1491 next
= delete_related_insns (NEXT_INSN (insn
));
1493 /* If INSN was a label, delete insns following it if now unreachable. */
1495 if (was_code_label
&& prev
&& BARRIER_P (prev
))
1500 code
= GET_CODE (next
);
1502 && NOTE_LINE_NUMBER (next
) != NOTE_INSN_FUNCTION_END
)
1503 next
= NEXT_INSN (next
);
1504 /* Keep going past other deleted labels to delete what follows. */
1505 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
1506 next
= NEXT_INSN (next
);
1507 else if (code
== BARRIER
|| INSN_P (next
))
1508 /* Note: if this deletes a jump, it can cause more
1509 deletion of unreachable code, after a different label.
1510 As long as the value from this recursive call is correct,
1511 this invocation functions correctly. */
1512 next
= delete_related_insns (next
);
1521 /* Delete a range of insns from FROM to TO, inclusive.
1522 This is for the sake of peephole optimization, so assume
1523 that whatever these insns do will still be done by a new
1524 peephole insn that will replace them. */
1527 delete_for_peephole (rtx from
, rtx to
)
1533 rtx next
= NEXT_INSN (insn
);
1534 rtx prev
= PREV_INSN (insn
);
1538 INSN_DELETED_P (insn
) = 1;
1540 /* Patch this insn out of the chain. */
1541 /* We don't do this all at once, because we
1542 must preserve all NOTEs. */
1544 NEXT_INSN (prev
) = next
;
1547 PREV_INSN (next
) = prev
;
1555 /* Note that if TO is an unconditional jump
1556 we *do not* delete the BARRIER that follows,
1557 since the peephole that replaces this sequence
1558 is also an unconditional jump in that case. */
1561 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1562 NLABEL as a return. Accrue modifications into the change group. */
1565 redirect_exp_1 (rtx
*loc
, rtx olabel
, rtx nlabel
, rtx insn
)
1568 RTX_CODE code
= GET_CODE (x
);
1572 if (code
== LABEL_REF
)
1574 if (XEXP (x
, 0) == olabel
)
1578 n
= gen_rtx_LABEL_REF (Pmode
, nlabel
);
1580 n
= gen_rtx_RETURN (VOIDmode
);
1582 validate_change (insn
, loc
, n
, 1);
1586 else if (code
== RETURN
&& olabel
== 0)
1589 x
= gen_rtx_LABEL_REF (Pmode
, nlabel
);
1591 x
= gen_rtx_RETURN (VOIDmode
);
1592 if (loc
== &PATTERN (insn
))
1593 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
1594 validate_change (insn
, loc
, x
, 1);
1598 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
1599 && GET_CODE (SET_SRC (x
)) == LABEL_REF
1600 && XEXP (SET_SRC (x
), 0) == olabel
)
1602 validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 1);
1606 fmt
= GET_RTX_FORMAT (code
);
1607 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1610 redirect_exp_1 (&XEXP (x
, i
), olabel
, nlabel
, insn
);
1611 else if (fmt
[i
] == 'E')
1614 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1615 redirect_exp_1 (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
);
1620 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1621 the modifications into the change group. Return false if we did
1622 not see how to do that. */
1625 redirect_jump_1 (rtx jump
, rtx nlabel
)
1627 int ochanges
= num_validated_changes ();
1630 if (GET_CODE (PATTERN (jump
)) == PARALLEL
)
1631 loc
= &XVECEXP (PATTERN (jump
), 0, 0);
1633 loc
= &PATTERN (jump
);
1635 redirect_exp_1 (loc
, JUMP_LABEL (jump
), nlabel
, jump
);
1636 return num_validated_changes () > ochanges
;
1639 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1640 jump target label is unused as a result, it and the code following
1643 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1646 The return value will be 1 if the change was made, 0 if it wasn't
1647 (this can only occur for NLABEL == 0). */
1650 redirect_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1652 rtx olabel
= JUMP_LABEL (jump
);
1654 if (nlabel
== olabel
)
1657 if (! redirect_jump_1 (jump
, nlabel
) || ! apply_change_group ())
1660 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 0);
1664 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1665 NLABEL in JUMP. If DELETE_UNUSED is non-negative, copy a
1666 NOTE_INSN_FUNCTION_END found after OLABEL to the place after NLABEL.
1667 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1668 count has dropped to zero. */
1670 redirect_jump_2 (rtx jump
, rtx olabel
, rtx nlabel
, int delete_unused
,
1675 JUMP_LABEL (jump
) = nlabel
;
1677 ++LABEL_NUSES (nlabel
);
1679 /* Update labels in any REG_EQUAL note. */
1680 if ((note
= find_reg_note (jump
, REG_EQUAL
, NULL_RTX
)) != NULL_RTX
)
1682 if (!nlabel
|| (invert
&& !invert_exp_1 (XEXP (note
, 0), jump
)))
1683 remove_note (jump
, note
);
1686 redirect_exp_1 (&XEXP (note
, 0), olabel
, nlabel
, jump
);
1687 confirm_change_group ();
1691 /* If we're eliding the jump over exception cleanups at the end of a
1692 function, move the function end note so that -Wreturn-type works. */
1693 if (olabel
&& nlabel
1694 && NEXT_INSN (olabel
)
1695 && NOTE_P (NEXT_INSN (olabel
))
1696 && NOTE_LINE_NUMBER (NEXT_INSN (olabel
)) == NOTE_INSN_FUNCTION_END
1697 && delete_unused
>= 0)
1698 emit_note_after (NOTE_INSN_FUNCTION_END
, nlabel
);
1700 if (olabel
&& --LABEL_NUSES (olabel
) == 0 && delete_unused
> 0
1701 /* Undefined labels will remain outside the insn stream. */
1702 && INSN_UID (olabel
))
1703 delete_related_insns (olabel
);
1705 invert_br_probabilities (jump
);
1708 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1709 modifications into the change group. Return nonzero for success. */
1711 invert_exp_1 (rtx x
, rtx insn
)
1713 RTX_CODE code
= GET_CODE (x
);
1715 if (code
== IF_THEN_ELSE
)
1717 rtx comp
= XEXP (x
, 0);
1719 enum rtx_code reversed_code
;
1721 /* We can do this in two ways: The preferable way, which can only
1722 be done if this is not an integer comparison, is to reverse
1723 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1724 of the IF_THEN_ELSE. If we can't do either, fail. */
1726 reversed_code
= reversed_comparison_code (comp
, insn
);
1728 if (reversed_code
!= UNKNOWN
)
1730 validate_change (insn
, &XEXP (x
, 0),
1731 gen_rtx_fmt_ee (reversed_code
,
1732 GET_MODE (comp
), XEXP (comp
, 0),
1739 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
1740 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
1747 /* Invert the condition of the jump JUMP, and make it jump to label
1748 NLABEL instead of where it jumps now. Accrue changes into the
1749 change group. Return false if we didn't see how to perform the
1750 inversion and redirection. */
1753 invert_jump_1 (rtx jump
, rtx nlabel
)
1755 rtx x
= pc_set (jump
);
1759 ochanges
= num_validated_changes ();
1761 ok
= invert_exp_1 (SET_SRC (x
), jump
);
1764 if (num_validated_changes () == ochanges
)
1767 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1768 in Pmode, so checking this is not merely an optimization. */
1769 return nlabel
== JUMP_LABEL (jump
) || redirect_jump_1 (jump
, nlabel
);
1772 /* Invert the condition of the jump JUMP, and make it jump to label
1773 NLABEL instead of where it jumps now. Return true if successful. */
1776 invert_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1778 rtx olabel
= JUMP_LABEL (jump
);
1780 if (invert_jump_1 (jump
, nlabel
) && apply_change_group ())
1782 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 1);
1790 /* Like rtx_equal_p except that it considers two REGs as equal
1791 if they renumber to the same value and considers two commutative
1792 operations to be the same if the order of the operands has been
1796 rtx_renumbered_equal_p (rtx x
, rtx y
)
1799 enum rtx_code code
= GET_CODE (x
);
1805 if ((code
== REG
|| (code
== SUBREG
&& REG_P (SUBREG_REG (x
))))
1806 && (REG_P (y
) || (GET_CODE (y
) == SUBREG
1807 && REG_P (SUBREG_REG (y
)))))
1809 int reg_x
= -1, reg_y
= -1;
1810 int byte_x
= 0, byte_y
= 0;
1812 if (GET_MODE (x
) != GET_MODE (y
))
1815 /* If we haven't done any renumbering, don't
1816 make any assumptions. */
1817 if (reg_renumber
== 0)
1818 return rtx_equal_p (x
, y
);
1822 reg_x
= REGNO (SUBREG_REG (x
));
1823 byte_x
= SUBREG_BYTE (x
);
1825 if (reg_renumber
[reg_x
] >= 0)
1827 reg_x
= subreg_regno_offset (reg_renumber
[reg_x
],
1828 GET_MODE (SUBREG_REG (x
)),
1837 if (reg_renumber
[reg_x
] >= 0)
1838 reg_x
= reg_renumber
[reg_x
];
1841 if (GET_CODE (y
) == SUBREG
)
1843 reg_y
= REGNO (SUBREG_REG (y
));
1844 byte_y
= SUBREG_BYTE (y
);
1846 if (reg_renumber
[reg_y
] >= 0)
1848 reg_y
= subreg_regno_offset (reg_renumber
[reg_y
],
1849 GET_MODE (SUBREG_REG (y
)),
1858 if (reg_renumber
[reg_y
] >= 0)
1859 reg_y
= reg_renumber
[reg_y
];
1862 return reg_x
>= 0 && reg_x
== reg_y
&& byte_x
== byte_y
;
1865 /* Now we have disposed of all the cases
1866 in which different rtx codes can match. */
1867 if (code
!= GET_CODE (y
))
1881 /* We can't assume nonlocal labels have their following insns yet. */
1882 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
1883 return XEXP (x
, 0) == XEXP (y
, 0);
1885 /* Two label-refs are equivalent if they point at labels
1886 in the same position in the instruction stream. */
1887 return (next_real_insn (XEXP (x
, 0))
1888 == next_real_insn (XEXP (y
, 0)));
1891 return XSTR (x
, 0) == XSTR (y
, 0);
1894 /* If we didn't match EQ equality above, they aren't the same. */
1901 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1903 if (GET_MODE (x
) != GET_MODE (y
))
1906 /* For commutative operations, the RTX match if the operand match in any
1907 order. Also handle the simple binary and unary cases without a loop. */
1908 if (targetm
.commutative_p (x
, UNKNOWN
))
1909 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1910 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
1911 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
1912 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
1913 else if (NON_COMMUTATIVE_P (x
))
1914 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1915 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
1916 else if (UNARY_P (x
))
1917 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
1919 /* Compare the elements. If any pair of corresponding elements
1920 fail to match, return 0 for the whole things. */
1922 fmt
= GET_RTX_FORMAT (code
);
1923 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1929 if (XWINT (x
, i
) != XWINT (y
, i
))
1934 if (XINT (x
, i
) != XINT (y
, i
))
1939 if (XTREE (x
, i
) != XTREE (y
, i
))
1944 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1949 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
1954 if (XEXP (x
, i
) != XEXP (y
, i
))
1961 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1963 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1964 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
1975 /* If X is a hard register or equivalent to one or a subregister of one,
1976 return the hard register number. If X is a pseudo register that was not
1977 assigned a hard register, return the pseudo register number. Otherwise,
1978 return -1. Any rtx is valid for X. */
1985 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
1986 return reg_renumber
[REGNO (x
)];
1989 if (GET_CODE (x
) == SUBREG
)
1991 int base
= true_regnum (SUBREG_REG (x
));
1993 && base
< FIRST_PSEUDO_REGISTER
1994 && subreg_offset_representable_p (REGNO (SUBREG_REG (x
)),
1995 GET_MODE (SUBREG_REG (x
)),
1996 SUBREG_BYTE (x
), GET_MODE (x
)))
1997 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
1998 GET_MODE (SUBREG_REG (x
)),
1999 SUBREG_BYTE (x
), GET_MODE (x
));
2004 /* Return regno of the register REG and handle subregs too. */
2006 reg_or_subregno (rtx reg
)
2008 if (GET_CODE (reg
) == SUBREG
)
2009 reg
= SUBREG_REG (reg
);
2010 gcc_assert (REG_P (reg
));