1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /* This is the pathetic reminder of old fame of the jump-optimization pass
24 of the compiler. Now it contains basically a set of utility functions to
27 Each CODE_LABEL has a count of the times it is used
28 stored in the LABEL_NUSES internal field, and each JUMP_INSN
29 has one label that it refers to stored in the
30 JUMP_LABEL internal field. With this we can detect labels that
31 become unused because of the deletion of all the jumps that
32 formerly used them. The JUMP_LABEL info is sometimes looked
35 The subroutines redirect_jump and invert_jump are used
36 from other passes as well. */
40 #include "coretypes.h"
45 #include "hard-reg-set.h"
47 #include "insn-config.h"
48 #include "insn-attr.h"
54 #include "diagnostic.h"
59 #include "tree-pass.h"
62 /* Optimize jump y; x: ... y: jumpif... x?
63 Don't know if it is worth bothering with. */
64 /* Optimize two cases of conditional jump to conditional jump?
65 This can never delete any instruction or make anything dead,
66 or even change what is live at any point.
67 So perhaps let combiner do it. */
69 static void init_label_info (rtx
);
70 static void mark_all_labels (rtx
);
71 static void delete_computation (rtx
);
72 static void redirect_exp_1 (rtx
*, rtx
, rtx
, rtx
);
73 static int invert_exp_1 (rtx
, rtx
);
74 static int returnjump_p_1 (rtx
*, void *);
75 static void delete_prior_computation (rtx
, rtx
);
77 /* Alternate entry into the jump optimizer. This entry point only rebuilds
78 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
81 rebuild_jump_labels (rtx f
)
85 timevar_push (TV_REBUILD_JUMP
);
89 /* Keep track of labels used from static data; we don't track them
90 closely enough to delete them here, so make sure their reference
91 count doesn't drop to zero. */
93 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
94 if (LABEL_P (XEXP (insn
, 0)))
95 LABEL_NUSES (XEXP (insn
, 0))++;
96 timevar_pop (TV_REBUILD_JUMP
);
99 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
100 non-fallthru insn. This is not generally true, as multiple barriers
101 may have crept in, or the BARRIER may be separated from the last
102 real insn by one or more NOTEs.
104 This simple pass moves barriers and removes duplicates so that the
108 cleanup_barriers (void)
110 rtx insn
, next
, prev
;
111 for (insn
= get_insns (); insn
; insn
= next
)
113 next
= NEXT_INSN (insn
);
114 if (BARRIER_P (insn
))
116 prev
= prev_nonnote_insn (insn
);
117 if (BARRIER_P (prev
))
119 else if (prev
!= PREV_INSN (insn
))
120 reorder_insns (insn
, insn
, prev
);
126 struct tree_opt_pass pass_cleanup_barriers
=
128 "barriers", /* name */
130 cleanup_barriers
, /* execute */
133 0, /* static_pass_number */
135 0, /* properties_required */
136 0, /* properties_provided */
137 0, /* properties_destroyed */
138 0, /* todo_flags_start */
139 TODO_dump_func
, /* todo_flags_finish */
144 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
145 notes whose labels don't occur in the insn any more. Returns the
146 largest INSN_UID found. */
148 init_label_info (rtx f
)
152 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
154 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
155 else if (JUMP_P (insn
))
156 JUMP_LABEL (insn
) = 0;
157 else if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
161 for (note
= REG_NOTES (insn
); note
; note
= next
)
163 next
= XEXP (note
, 1);
164 if (REG_NOTE_KIND (note
) == REG_LABEL
165 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
166 remove_note (insn
, note
);
171 /* Mark the label each jump jumps to.
172 Combine consecutive labels, and count uses of labels. */
175 mark_all_labels (rtx f
)
179 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
182 mark_jump_label (PATTERN (insn
), insn
, 0);
183 if (! INSN_DELETED_P (insn
) && JUMP_P (insn
))
185 /* When we know the LABEL_REF contained in a REG used in
186 an indirect jump, we'll have a REG_LABEL note so that
187 flow can tell where it's going. */
188 if (JUMP_LABEL (insn
) == 0)
190 rtx label_note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
193 /* But a LABEL_REF around the REG_LABEL note, so
194 that we can canonicalize it. */
195 rtx label_ref
= gen_rtx_LABEL_REF (Pmode
,
196 XEXP (label_note
, 0));
198 mark_jump_label (label_ref
, insn
, 0);
199 XEXP (label_note
, 0) = XEXP (label_ref
, 0);
200 JUMP_LABEL (insn
) = XEXP (label_note
, 0);
207 /* Move all block-beg, block-end and loop-beg notes between START and END out
208 before START. START and END may be such notes. Returns the values of the
209 new starting and ending insns, which may be different if the original ones
210 were such notes. Return true if there were only such notes and no real
214 squeeze_notes (rtx
* startp
, rtx
* endp
)
222 rtx past_end
= NEXT_INSN (end
);
224 for (insn
= start
; insn
!= past_end
; insn
= next
)
226 next
= NEXT_INSN (insn
);
228 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
229 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
))
231 /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass. */
232 gcc_assert (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_BEG
233 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_END
);
239 rtx prev
= PREV_INSN (insn
);
240 PREV_INSN (insn
) = PREV_INSN (start
);
241 NEXT_INSN (insn
) = start
;
242 NEXT_INSN (PREV_INSN (insn
)) = insn
;
243 PREV_INSN (NEXT_INSN (insn
)) = insn
;
244 NEXT_INSN (prev
) = next
;
245 PREV_INSN (next
) = prev
;
252 /* There were no real instructions. */
253 if (start
== past_end
)
263 /* Return the label before INSN, or put a new label there. */
266 get_label_before (rtx insn
)
270 /* Find an existing label at this point
271 or make a new one if there is none. */
272 label
= prev_nonnote_insn (insn
);
274 if (label
== 0 || !LABEL_P (label
))
276 rtx prev
= PREV_INSN (insn
);
278 label
= gen_label_rtx ();
279 emit_label_after (label
, prev
);
280 LABEL_NUSES (label
) = 0;
285 /* Return the label after INSN, or put a new label there. */
288 get_label_after (rtx insn
)
292 /* Find an existing label at this point
293 or make a new one if there is none. */
294 label
= next_nonnote_insn (insn
);
296 if (label
== 0 || !LABEL_P (label
))
298 label
= gen_label_rtx ();
299 emit_label_after (label
, insn
);
300 LABEL_NUSES (label
) = 0;
305 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
306 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
307 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
308 know whether it's source is floating point or integer comparison. Machine
309 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
310 to help this function avoid overhead in these cases. */
312 reversed_comparison_code_parts (enum rtx_code code
, rtx arg0
, rtx arg1
, rtx insn
)
314 enum machine_mode mode
;
316 /* If this is not actually a comparison, we can't reverse it. */
317 if (GET_RTX_CLASS (code
) != RTX_COMPARE
318 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
321 mode
= GET_MODE (arg0
);
322 if (mode
== VOIDmode
)
323 mode
= GET_MODE (arg1
);
325 /* First see if machine description supplies us way to reverse the
326 comparison. Give it priority over everything else to allow
327 machine description to do tricks. */
328 if (GET_MODE_CLASS (mode
) == MODE_CC
329 && REVERSIBLE_CC_MODE (mode
))
331 #ifdef REVERSE_CONDITION
332 return REVERSE_CONDITION (code
, mode
);
334 return reverse_condition (code
);
337 /* Try a few special cases based on the comparison code. */
346 /* It is always safe to reverse EQ and NE, even for the floating
347 point. Similarly the unsigned comparisons are never used for
348 floating point so we can reverse them in the default way. */
349 return reverse_condition (code
);
354 /* In case we already see unordered comparison, we can be sure to
355 be dealing with floating point so we don't need any more tests. */
356 return reverse_condition_maybe_unordered (code
);
361 /* We don't have safe way to reverse these yet. */
367 if (GET_MODE_CLASS (mode
) == MODE_CC
|| CC0_P (arg0
))
370 /* Try to search for the comparison to determine the real mode.
371 This code is expensive, but with sane machine description it
372 will be never used, since REVERSIBLE_CC_MODE will return true
377 for (prev
= prev_nonnote_insn (insn
);
378 prev
!= 0 && !LABEL_P (prev
);
379 prev
= prev_nonnote_insn (prev
))
381 rtx set
= set_of (arg0
, prev
);
382 if (set
&& GET_CODE (set
) == SET
383 && rtx_equal_p (SET_DEST (set
), arg0
))
385 rtx src
= SET_SRC (set
);
387 if (GET_CODE (src
) == COMPARE
)
389 rtx comparison
= src
;
390 arg0
= XEXP (src
, 0);
391 mode
= GET_MODE (arg0
);
392 if (mode
== VOIDmode
)
393 mode
= GET_MODE (XEXP (comparison
, 1));
396 /* We can get past reg-reg moves. This may be useful for model
397 of i387 comparisons that first move flag registers around. */
404 /* If register is clobbered in some ununderstandable way,
411 /* Test for an integer condition, or a floating-point comparison
412 in which NaNs can be ignored. */
413 if (GET_CODE (arg0
) == CONST_INT
414 || (GET_MODE (arg0
) != VOIDmode
415 && GET_MODE_CLASS (mode
) != MODE_CC
416 && !HONOR_NANS (mode
)))
417 return reverse_condition (code
);
422 /* A wrapper around the previous function to take COMPARISON as rtx
423 expression. This simplifies many callers. */
425 reversed_comparison_code (rtx comparison
, rtx insn
)
427 if (!COMPARISON_P (comparison
))
429 return reversed_comparison_code_parts (GET_CODE (comparison
),
430 XEXP (comparison
, 0),
431 XEXP (comparison
, 1), insn
);
434 /* Return comparison with reversed code of EXP.
435 Return NULL_RTX in case we fail to do the reversal. */
437 reversed_comparison (rtx exp
, enum machine_mode mode
)
439 enum rtx_code reversed_code
= reversed_comparison_code (exp
, NULL_RTX
);
440 if (reversed_code
== UNKNOWN
)
443 return simplify_gen_relational (reversed_code
, mode
, VOIDmode
,
444 XEXP (exp
, 0), XEXP (exp
, 1));
448 /* Given an rtx-code for a comparison, return the code for the negated
449 comparison. If no such code exists, return UNKNOWN.
451 WATCH OUT! reverse_condition is not safe to use on a jump that might
452 be acting on the results of an IEEE floating point comparison, because
453 of the special treatment of non-signaling nans in comparisons.
454 Use reversed_comparison_code instead. */
457 reverse_condition (enum rtx_code code
)
499 /* Similar, but we're allowed to generate unordered comparisons, which
500 makes it safe for IEEE floating-point. Of course, we have to recognize
501 that the target will support them too... */
504 reverse_condition_maybe_unordered (enum rtx_code code
)
542 /* Similar, but return the code when two operands of a comparison are swapped.
543 This IS safe for IEEE floating-point. */
546 swap_condition (enum rtx_code code
)
588 /* Given a comparison CODE, return the corresponding unsigned comparison.
589 If CODE is an equality comparison or already an unsigned comparison,
593 unsigned_condition (enum rtx_code code
)
619 /* Similarly, return the signed version of a comparison. */
622 signed_condition (enum rtx_code code
)
648 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
649 truth of CODE1 implies the truth of CODE2. */
652 comparison_dominates_p (enum rtx_code code1
, enum rtx_code code2
)
654 /* UNKNOWN comparison codes can happen as a result of trying to revert
656 They can't match anything, so we have to reject them here. */
657 if (code1
== UNKNOWN
|| code2
== UNKNOWN
)
666 if (code2
== UNLE
|| code2
== UNGE
)
671 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
677 if (code2
== UNLE
|| code2
== NE
)
682 if (code2
== LE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
687 if (code2
== UNGE
|| code2
== NE
)
692 if (code2
== GE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
698 if (code2
== ORDERED
)
703 if (code2
== NE
|| code2
== ORDERED
)
708 if (code2
== LEU
|| code2
== NE
)
713 if (code2
== GEU
|| code2
== NE
)
718 if (code2
== NE
|| code2
== UNEQ
|| code2
== UNLE
|| code2
== UNLT
719 || code2
== UNGE
|| code2
== UNGT
)
730 /* Return 1 if INSN is an unconditional jump and nothing else. */
733 simplejump_p (rtx insn
)
735 return (JUMP_P (insn
)
736 && GET_CODE (PATTERN (insn
)) == SET
737 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
738 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
741 /* Return nonzero if INSN is a (possibly) conditional jump
744 Use of this function is deprecated, since we need to support combined
745 branch and compare insns. Use any_condjump_p instead whenever possible. */
748 condjump_p (rtx insn
)
750 rtx x
= PATTERN (insn
);
752 if (GET_CODE (x
) != SET
753 || GET_CODE (SET_DEST (x
)) != PC
)
757 if (GET_CODE (x
) == LABEL_REF
)
760 return (GET_CODE (x
) == IF_THEN_ELSE
761 && ((GET_CODE (XEXP (x
, 2)) == PC
762 && (GET_CODE (XEXP (x
, 1)) == LABEL_REF
763 || GET_CODE (XEXP (x
, 1)) == RETURN
))
764 || (GET_CODE (XEXP (x
, 1)) == PC
765 && (GET_CODE (XEXP (x
, 2)) == LABEL_REF
766 || GET_CODE (XEXP (x
, 2)) == RETURN
))));
769 /* Return nonzero if INSN is a (possibly) conditional jump inside a
772 Use this function is deprecated, since we need to support combined
773 branch and compare insns. Use any_condjump_p instead whenever possible. */
776 condjump_in_parallel_p (rtx insn
)
778 rtx x
= PATTERN (insn
);
780 if (GET_CODE (x
) != PARALLEL
)
783 x
= XVECEXP (x
, 0, 0);
785 if (GET_CODE (x
) != SET
)
787 if (GET_CODE (SET_DEST (x
)) != PC
)
789 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
791 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
793 if (XEXP (SET_SRC (x
), 2) == pc_rtx
794 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
795 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
797 if (XEXP (SET_SRC (x
), 1) == pc_rtx
798 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
799 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
804 /* Return set of PC, otherwise NULL. */
812 pat
= PATTERN (insn
);
814 /* The set is allowed to appear either as the insn pattern or
815 the first set in a PARALLEL. */
816 if (GET_CODE (pat
) == PARALLEL
)
817 pat
= XVECEXP (pat
, 0, 0);
818 if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == PC
)
824 /* Return true when insn is an unconditional direct jump,
825 possibly bundled inside a PARALLEL. */
828 any_uncondjump_p (rtx insn
)
830 rtx x
= pc_set (insn
);
833 if (GET_CODE (SET_SRC (x
)) != LABEL_REF
)
835 if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
840 /* Return true when insn is a conditional jump. This function works for
841 instructions containing PC sets in PARALLELs. The instruction may have
842 various other effects so before removing the jump you must verify
845 Note that unlike condjump_p it returns false for unconditional jumps. */
848 any_condjump_p (rtx insn
)
850 rtx x
= pc_set (insn
);
855 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
858 a
= GET_CODE (XEXP (SET_SRC (x
), 1));
859 b
= GET_CODE (XEXP (SET_SRC (x
), 2));
861 return ((b
== PC
&& (a
== LABEL_REF
|| a
== RETURN
))
862 || (a
== PC
&& (b
== LABEL_REF
|| b
== RETURN
)));
865 /* Return the label of a conditional jump. */
868 condjump_label (rtx insn
)
870 rtx x
= pc_set (insn
);
875 if (GET_CODE (x
) == LABEL_REF
)
877 if (GET_CODE (x
) != IF_THEN_ELSE
)
879 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
881 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
886 /* Return true if INSN is a (possibly conditional) return insn. */
889 returnjump_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
893 return x
&& (GET_CODE (x
) == RETURN
894 || (GET_CODE (x
) == SET
&& SET_IS_RETURN_P (x
)));
898 returnjump_p (rtx insn
)
902 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
905 /* Return true if INSN is a jump that only transfers control and
909 onlyjump_p (rtx insn
)
916 set
= single_set (insn
);
919 if (GET_CODE (SET_DEST (set
)) != PC
)
921 if (side_effects_p (SET_SRC (set
)))
929 /* Return nonzero if X is an RTX that only sets the condition codes
930 and has no side effects. */
933 only_sets_cc0_p (rtx x
)
941 return sets_cc0_p (x
) == 1 && ! side_effects_p (x
);
944 /* Return 1 if X is an RTX that does nothing but set the condition codes
945 and CLOBBER or USE registers.
946 Return -1 if X does explicitly set the condition codes,
947 but also does other things. */
958 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
960 if (GET_CODE (x
) == PARALLEL
)
964 int other_things
= 0;
965 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
967 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
968 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
970 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
973 return ! sets_cc0
? 0 : other_things
? -1 : 1;
979 /* Follow any unconditional jump at LABEL;
980 return the ultimate label reached by any such chain of jumps.
981 Return null if the chain ultimately leads to a return instruction.
982 If LABEL is not followed by a jump, return LABEL.
983 If the chain loops or we can't find end, return LABEL,
984 since that tells caller to avoid changing the insn.
986 If RELOAD_COMPLETED is 0, we do not chain across a USE or CLOBBER. */
989 follow_jumps (rtx label
)
998 && (insn
= next_active_insn (value
)) != 0
1000 && ((JUMP_LABEL (insn
) != 0 && any_uncondjump_p (insn
)
1001 && onlyjump_p (insn
))
1002 || GET_CODE (PATTERN (insn
)) == RETURN
)
1003 && (next
= NEXT_INSN (insn
))
1004 && BARRIER_P (next
));
1008 if (!reload_completed
&& flag_test_coverage
)
1010 /* ??? Optional. Disables some optimizations, but makes
1011 gcov output more accurate with -O. */
1012 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
1013 if (NOTE_P (tem
) && NOTE_LINE_NUMBER (tem
) > 0)
1017 /* If we have found a cycle, make the insn jump to itself. */
1018 if (JUMP_LABEL (insn
) == label
)
1021 tem
= next_active_insn (JUMP_LABEL (insn
));
1022 if (tem
&& (GET_CODE (PATTERN (tem
)) == ADDR_VEC
1023 || GET_CODE (PATTERN (tem
)) == ADDR_DIFF_VEC
))
1026 value
= JUMP_LABEL (insn
);
1034 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1035 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1036 in INSN, then store one of them in JUMP_LABEL (INSN).
1037 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1038 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1039 Also, when there are consecutive labels, canonicalize on the last of them.
1041 Note that two labels separated by a loop-beginning note
1042 must be kept distinct if we have not yet done loop-optimization,
1043 because the gap between them is where loop-optimize
1044 will want to move invariant code to. CROSS_JUMP tells us
1045 that loop-optimization is done with. */
1048 mark_jump_label (rtx x
, rtx insn
, int in_mem
)
1050 RTX_CODE code
= GET_CODE (x
);
1073 /* If this is a constant-pool reference, see if it is a label. */
1074 if (CONSTANT_POOL_ADDRESS_P (x
))
1075 mark_jump_label (get_pool_constant (x
), insn
, in_mem
);
1080 rtx label
= XEXP (x
, 0);
1082 /* Ignore remaining references to unreachable labels that
1083 have been deleted. */
1085 && NOTE_LINE_NUMBER (label
) == NOTE_INSN_DELETED_LABEL
)
1088 gcc_assert (LABEL_P (label
));
1090 /* Ignore references to labels of containing functions. */
1091 if (LABEL_REF_NONLOCAL_P (x
))
1094 XEXP (x
, 0) = label
;
1095 if (! insn
|| ! INSN_DELETED_P (insn
))
1096 ++LABEL_NUSES (label
);
1101 JUMP_LABEL (insn
) = label
;
1104 /* Add a REG_LABEL note for LABEL unless there already
1105 is one. All uses of a label, except for labels
1106 that are the targets of jumps, must have a
1108 if (! find_reg_note (insn
, REG_LABEL
, label
))
1109 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, label
,
1116 /* Do walk the labels in a vector, but not the first operand of an
1117 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1120 if (! INSN_DELETED_P (insn
))
1122 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
1124 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
1125 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, in_mem
);
1133 fmt
= GET_RTX_FORMAT (code
);
1134 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1137 mark_jump_label (XEXP (x
, i
), insn
, in_mem
);
1138 else if (fmt
[i
] == 'E')
1141 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1142 mark_jump_label (XVECEXP (x
, i
, j
), insn
, in_mem
);
1147 /* If all INSN does is set the pc, delete it,
1148 and delete the insn that set the condition codes for it
1149 if that's what the previous thing was. */
1152 delete_jump (rtx insn
)
1154 rtx set
= single_set (insn
);
1156 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
1157 delete_computation (insn
);
1160 /* Recursively delete prior insns that compute the value (used only by INSN
1161 which the caller is deleting) stored in the register mentioned by NOTE
1162 which is a REG_DEAD note associated with INSN. */
1165 delete_prior_computation (rtx note
, rtx insn
)
1168 rtx reg
= XEXP (note
, 0);
1170 for (our_prev
= prev_nonnote_insn (insn
);
1171 our_prev
&& (NONJUMP_INSN_P (our_prev
)
1172 || CALL_P (our_prev
));
1173 our_prev
= prev_nonnote_insn (our_prev
))
1175 rtx pat
= PATTERN (our_prev
);
1177 /* If we reach a CALL which is not calling a const function
1178 or the callee pops the arguments, then give up. */
1179 if (CALL_P (our_prev
)
1180 && (! CONST_OR_PURE_CALL_P (our_prev
)
1181 || GET_CODE (pat
) != SET
|| GET_CODE (SET_SRC (pat
)) != CALL
))
1184 /* If we reach a SEQUENCE, it is too complex to try to
1185 do anything with it, so give up. We can be run during
1186 and after reorg, so SEQUENCE rtl can legitimately show
1188 if (GET_CODE (pat
) == SEQUENCE
)
1191 if (GET_CODE (pat
) == USE
1192 && NONJUMP_INSN_P (XEXP (pat
, 0)))
1193 /* reorg creates USEs that look like this. We leave them
1194 alone because reorg needs them for its own purposes. */
1197 if (reg_set_p (reg
, pat
))
1199 if (side_effects_p (pat
) && !CALL_P (our_prev
))
1202 if (GET_CODE (pat
) == PARALLEL
)
1204 /* If we find a SET of something else, we can't
1209 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1211 rtx part
= XVECEXP (pat
, 0, i
);
1213 if (GET_CODE (part
) == SET
1214 && SET_DEST (part
) != reg
)
1218 if (i
== XVECLEN (pat
, 0))
1219 delete_computation (our_prev
);
1221 else if (GET_CODE (pat
) == SET
1222 && REG_P (SET_DEST (pat
)))
1224 int dest_regno
= REGNO (SET_DEST (pat
));
1227 + (dest_regno
< FIRST_PSEUDO_REGISTER
1228 ? hard_regno_nregs
[dest_regno
]
1229 [GET_MODE (SET_DEST (pat
))] : 1));
1230 int regno
= REGNO (reg
);
1233 + (regno
< FIRST_PSEUDO_REGISTER
1234 ? hard_regno_nregs
[regno
][GET_MODE (reg
)] : 1));
1236 if (dest_regno
>= regno
1237 && dest_endregno
<= endregno
)
1238 delete_computation (our_prev
);
1240 /* We may have a multi-word hard register and some, but not
1241 all, of the words of the register are needed in subsequent
1242 insns. Write REG_UNUSED notes for those parts that were not
1244 else if (dest_regno
<= regno
1245 && dest_endregno
>= endregno
)
1249 REG_NOTES (our_prev
)
1250 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
,
1251 REG_NOTES (our_prev
));
1253 for (i
= dest_regno
; i
< dest_endregno
; i
++)
1254 if (! find_regno_note (our_prev
, REG_UNUSED
, i
))
1257 if (i
== dest_endregno
)
1258 delete_computation (our_prev
);
1265 /* If PAT references the register that dies here, it is an
1266 additional use. Hence any prior SET isn't dead. However, this
1267 insn becomes the new place for the REG_DEAD note. */
1268 if (reg_overlap_mentioned_p (reg
, pat
))
1270 XEXP (note
, 1) = REG_NOTES (our_prev
);
1271 REG_NOTES (our_prev
) = note
;
1277 /* Delete INSN and recursively delete insns that compute values used only
1278 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1279 If we are running before flow.c, we need do nothing since flow.c will
1280 delete dead code. We also can't know if the registers being used are
1281 dead or not at this point.
1283 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1284 nothing other than set a register that dies in this insn, we can delete
1287 On machines with CC0, if CC0 is used in this insn, we may be able to
1288 delete the insn that set it. */
1291 delete_computation (rtx insn
)
1296 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
1298 rtx prev
= prev_nonnote_insn (insn
);
1299 /* We assume that at this stage
1300 CC's are always set explicitly
1301 and always immediately before the jump that
1302 will use them. So if the previous insn
1303 exists to set the CC's, delete it
1304 (unless it performs auto-increments, etc.). */
1305 if (prev
&& NONJUMP_INSN_P (prev
)
1306 && sets_cc0_p (PATTERN (prev
)))
1308 if (sets_cc0_p (PATTERN (prev
)) > 0
1309 && ! side_effects_p (PATTERN (prev
)))
1310 delete_computation (prev
);
1312 /* Otherwise, show that cc0 won't be used. */
1313 REG_NOTES (prev
) = gen_rtx_EXPR_LIST (REG_UNUSED
,
1314 cc0_rtx
, REG_NOTES (prev
));
1319 for (note
= REG_NOTES (insn
); note
; note
= next
)
1321 next
= XEXP (note
, 1);
1323 if (REG_NOTE_KIND (note
) != REG_DEAD
1324 /* Verify that the REG_NOTE is legitimate. */
1325 || !REG_P (XEXP (note
, 0)))
1328 delete_prior_computation (note
, insn
);
1331 delete_related_insns (insn
);
1334 /* Delete insn INSN from the chain of insns and update label ref counts
1335 and delete insns now unreachable.
1337 Returns the first insn after INSN that was not deleted.
1339 Usage of this instruction is deprecated. Use delete_insn instead and
1340 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1343 delete_related_insns (rtx insn
)
1345 int was_code_label
= (LABEL_P (insn
));
1347 rtx next
= NEXT_INSN (insn
), prev
= PREV_INSN (insn
);
1349 while (next
&& INSN_DELETED_P (next
))
1350 next
= NEXT_INSN (next
);
1352 /* This insn is already deleted => return first following nondeleted. */
1353 if (INSN_DELETED_P (insn
))
1358 /* If instruction is followed by a barrier,
1359 delete the barrier too. */
1361 if (next
!= 0 && BARRIER_P (next
))
1364 /* If deleting a jump, decrement the count of the label,
1365 and delete the label if it is now unused. */
1367 if (JUMP_P (insn
) && JUMP_LABEL (insn
))
1369 rtx lab
= JUMP_LABEL (insn
), lab_next
;
1371 if (LABEL_NUSES (lab
) == 0)
1373 /* This can delete NEXT or PREV,
1374 either directly if NEXT is JUMP_LABEL (INSN),
1375 or indirectly through more levels of jumps. */
1376 delete_related_insns (lab
);
1378 /* I feel a little doubtful about this loop,
1379 but I see no clean and sure alternative way
1380 to find the first insn after INSN that is not now deleted.
1381 I hope this works. */
1382 while (next
&& INSN_DELETED_P (next
))
1383 next
= NEXT_INSN (next
);
1386 else if (tablejump_p (insn
, NULL
, &lab_next
))
1388 /* If we're deleting the tablejump, delete the dispatch table.
1389 We may not be able to kill the label immediately preceding
1390 just yet, as it might be referenced in code leading up to
1392 delete_related_insns (lab_next
);
1396 /* Likewise if we're deleting a dispatch table. */
1399 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1400 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
1402 rtx pat
= PATTERN (insn
);
1403 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
1404 int len
= XVECLEN (pat
, diff_vec_p
);
1406 for (i
= 0; i
< len
; i
++)
1407 if (LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
1408 delete_related_insns (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
1409 while (next
&& INSN_DELETED_P (next
))
1410 next
= NEXT_INSN (next
);
1414 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1415 if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
1416 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1417 if (REG_NOTE_KIND (note
) == REG_LABEL
1418 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1419 && LABEL_P (XEXP (note
, 0)))
1420 if (LABEL_NUSES (XEXP (note
, 0)) == 0)
1421 delete_related_insns (XEXP (note
, 0));
1423 while (prev
&& (INSN_DELETED_P (prev
) || NOTE_P (prev
)))
1424 prev
= PREV_INSN (prev
);
1426 /* If INSN was a label and a dispatch table follows it,
1427 delete the dispatch table. The tablejump must have gone already.
1428 It isn't useful to fall through into a table. */
1431 && NEXT_INSN (insn
) != 0
1432 && JUMP_P (NEXT_INSN (insn
))
1433 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
1434 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
1435 next
= delete_related_insns (NEXT_INSN (insn
));
1437 /* If INSN was a label, delete insns following it if now unreachable. */
1439 if (was_code_label
&& prev
&& BARRIER_P (prev
))
1444 code
= GET_CODE (next
);
1446 && NOTE_LINE_NUMBER (next
) != NOTE_INSN_FUNCTION_END
)
1447 next
= NEXT_INSN (next
);
1448 /* Keep going past other deleted labels to delete what follows. */
1449 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
1450 next
= NEXT_INSN (next
);
1451 else if (code
== BARRIER
|| INSN_P (next
))
1452 /* Note: if this deletes a jump, it can cause more
1453 deletion of unreachable code, after a different label.
1454 As long as the value from this recursive call is correct,
1455 this invocation functions correctly. */
1456 next
= delete_related_insns (next
);
1465 /* Delete a range of insns from FROM to TO, inclusive.
1466 This is for the sake of peephole optimization, so assume
1467 that whatever these insns do will still be done by a new
1468 peephole insn that will replace them. */
1471 delete_for_peephole (rtx from
, rtx to
)
1477 rtx next
= NEXT_INSN (insn
);
1478 rtx prev
= PREV_INSN (insn
);
1482 INSN_DELETED_P (insn
) = 1;
1484 /* Patch this insn out of the chain. */
1485 /* We don't do this all at once, because we
1486 must preserve all NOTEs. */
1488 NEXT_INSN (prev
) = next
;
1491 PREV_INSN (next
) = prev
;
1499 /* Note that if TO is an unconditional jump
1500 we *do not* delete the BARRIER that follows,
1501 since the peephole that replaces this sequence
1502 is also an unconditional jump in that case. */
1505 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1506 NLABEL as a return. Accrue modifications into the change group. */
1509 redirect_exp_1 (rtx
*loc
, rtx olabel
, rtx nlabel
, rtx insn
)
1512 RTX_CODE code
= GET_CODE (x
);
1516 if (code
== LABEL_REF
)
1518 if (XEXP (x
, 0) == olabel
)
1522 n
= gen_rtx_LABEL_REF (Pmode
, nlabel
);
1524 n
= gen_rtx_RETURN (VOIDmode
);
1526 validate_change (insn
, loc
, n
, 1);
1530 else if (code
== RETURN
&& olabel
== 0)
1533 x
= gen_rtx_LABEL_REF (Pmode
, nlabel
);
1535 x
= gen_rtx_RETURN (VOIDmode
);
1536 if (loc
== &PATTERN (insn
))
1537 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
1538 validate_change (insn
, loc
, x
, 1);
1542 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
1543 && GET_CODE (SET_SRC (x
)) == LABEL_REF
1544 && XEXP (SET_SRC (x
), 0) == olabel
)
1546 validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 1);
1550 fmt
= GET_RTX_FORMAT (code
);
1551 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1554 redirect_exp_1 (&XEXP (x
, i
), olabel
, nlabel
, insn
);
1555 else if (fmt
[i
] == 'E')
1558 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1559 redirect_exp_1 (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
);
1564 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1565 the modifications into the change group. Return false if we did
1566 not see how to do that. */
1569 redirect_jump_1 (rtx jump
, rtx nlabel
)
1571 int ochanges
= num_validated_changes ();
1574 if (GET_CODE (PATTERN (jump
)) == PARALLEL
)
1575 loc
= &XVECEXP (PATTERN (jump
), 0, 0);
1577 loc
= &PATTERN (jump
);
1579 redirect_exp_1 (loc
, JUMP_LABEL (jump
), nlabel
, jump
);
1580 return num_validated_changes () > ochanges
;
1583 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1584 jump target label is unused as a result, it and the code following
1587 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1590 The return value will be 1 if the change was made, 0 if it wasn't
1591 (this can only occur for NLABEL == 0). */
1594 redirect_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1596 rtx olabel
= JUMP_LABEL (jump
);
1598 if (nlabel
== olabel
)
1601 if (! redirect_jump_1 (jump
, nlabel
) || ! apply_change_group ())
1604 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 0);
1608 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1609 NLABEL in JUMP. If DELETE_UNUSED is non-negative, copy a
1610 NOTE_INSN_FUNCTION_END found after OLABEL to the place after NLABEL.
1611 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1612 count has dropped to zero. */
1614 redirect_jump_2 (rtx jump
, rtx olabel
, rtx nlabel
, int delete_unused
,
1619 JUMP_LABEL (jump
) = nlabel
;
1621 ++LABEL_NUSES (nlabel
);
1623 /* Update labels in any REG_EQUAL note. */
1624 if ((note
= find_reg_note (jump
, REG_EQUAL
, NULL_RTX
)) != NULL_RTX
)
1626 if (!nlabel
|| (invert
&& !invert_exp_1 (XEXP (note
, 0), jump
)))
1627 remove_note (jump
, note
);
1630 redirect_exp_1 (&XEXP (note
, 0), olabel
, nlabel
, jump
);
1631 confirm_change_group ();
1635 /* If we're eliding the jump over exception cleanups at the end of a
1636 function, move the function end note so that -Wreturn-type works. */
1637 if (olabel
&& nlabel
1638 && NEXT_INSN (olabel
)
1639 && NOTE_P (NEXT_INSN (olabel
))
1640 && NOTE_LINE_NUMBER (NEXT_INSN (olabel
)) == NOTE_INSN_FUNCTION_END
1641 && delete_unused
>= 0)
1642 emit_note_after (NOTE_INSN_FUNCTION_END
, nlabel
);
1644 if (olabel
&& --LABEL_NUSES (olabel
) == 0 && delete_unused
> 0
1645 /* Undefined labels will remain outside the insn stream. */
1646 && INSN_UID (olabel
))
1647 delete_related_insns (olabel
);
1649 invert_br_probabilities (jump
);
1652 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1653 modifications into the change group. Return nonzero for success. */
1655 invert_exp_1 (rtx x
, rtx insn
)
1657 RTX_CODE code
= GET_CODE (x
);
1659 if (code
== IF_THEN_ELSE
)
1661 rtx comp
= XEXP (x
, 0);
1663 enum rtx_code reversed_code
;
1665 /* We can do this in two ways: The preferable way, which can only
1666 be done if this is not an integer comparison, is to reverse
1667 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1668 of the IF_THEN_ELSE. If we can't do either, fail. */
1670 reversed_code
= reversed_comparison_code (comp
, insn
);
1672 if (reversed_code
!= UNKNOWN
)
1674 validate_change (insn
, &XEXP (x
, 0),
1675 gen_rtx_fmt_ee (reversed_code
,
1676 GET_MODE (comp
), XEXP (comp
, 0),
1683 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
1684 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
1691 /* Invert the condition of the jump JUMP, and make it jump to label
1692 NLABEL instead of where it jumps now. Accrue changes into the
1693 change group. Return false if we didn't see how to perform the
1694 inversion and redirection. */
1697 invert_jump_1 (rtx jump
, rtx nlabel
)
1699 rtx x
= pc_set (jump
);
1703 ochanges
= num_validated_changes ();
1705 ok
= invert_exp_1 (SET_SRC (x
), jump
);
1708 if (num_validated_changes () == ochanges
)
1711 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1712 in Pmode, so checking this is not merely an optimization. */
1713 return nlabel
== JUMP_LABEL (jump
) || redirect_jump_1 (jump
, nlabel
);
1716 /* Invert the condition of the jump JUMP, and make it jump to label
1717 NLABEL instead of where it jumps now. Return true if successful. */
1720 invert_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1722 rtx olabel
= JUMP_LABEL (jump
);
1724 if (invert_jump_1 (jump
, nlabel
) && apply_change_group ())
1726 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 1);
1734 /* Like rtx_equal_p except that it considers two REGs as equal
1735 if they renumber to the same value and considers two commutative
1736 operations to be the same if the order of the operands has been
1740 rtx_renumbered_equal_p (rtx x
, rtx y
)
1743 enum rtx_code code
= GET_CODE (x
);
1749 if ((code
== REG
|| (code
== SUBREG
&& REG_P (SUBREG_REG (x
))))
1750 && (REG_P (y
) || (GET_CODE (y
) == SUBREG
1751 && REG_P (SUBREG_REG (y
)))))
1753 int reg_x
= -1, reg_y
= -1;
1754 int byte_x
= 0, byte_y
= 0;
1756 if (GET_MODE (x
) != GET_MODE (y
))
1759 /* If we haven't done any renumbering, don't
1760 make any assumptions. */
1761 if (reg_renumber
== 0)
1762 return rtx_equal_p (x
, y
);
1766 reg_x
= REGNO (SUBREG_REG (x
));
1767 byte_x
= SUBREG_BYTE (x
);
1769 if (reg_renumber
[reg_x
] >= 0)
1771 reg_x
= subreg_regno_offset (reg_renumber
[reg_x
],
1772 GET_MODE (SUBREG_REG (x
)),
1781 if (reg_renumber
[reg_x
] >= 0)
1782 reg_x
= reg_renumber
[reg_x
];
1785 if (GET_CODE (y
) == SUBREG
)
1787 reg_y
= REGNO (SUBREG_REG (y
));
1788 byte_y
= SUBREG_BYTE (y
);
1790 if (reg_renumber
[reg_y
] >= 0)
1792 reg_y
= subreg_regno_offset (reg_renumber
[reg_y
],
1793 GET_MODE (SUBREG_REG (y
)),
1802 if (reg_renumber
[reg_y
] >= 0)
1803 reg_y
= reg_renumber
[reg_y
];
1806 return reg_x
>= 0 && reg_x
== reg_y
&& byte_x
== byte_y
;
1809 /* Now we have disposed of all the cases
1810 in which different rtx codes can match. */
1811 if (code
!= GET_CODE (y
))
1825 /* We can't assume nonlocal labels have their following insns yet. */
1826 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
1827 return XEXP (x
, 0) == XEXP (y
, 0);
1829 /* Two label-refs are equivalent if they point at labels
1830 in the same position in the instruction stream. */
1831 return (next_real_insn (XEXP (x
, 0))
1832 == next_real_insn (XEXP (y
, 0)));
1835 return XSTR (x
, 0) == XSTR (y
, 0);
1838 /* If we didn't match EQ equality above, they aren't the same. */
1845 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1847 if (GET_MODE (x
) != GET_MODE (y
))
1850 /* For commutative operations, the RTX match if the operand match in any
1851 order. Also handle the simple binary and unary cases without a loop. */
1852 if (targetm
.commutative_p (x
, UNKNOWN
))
1853 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1854 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
1855 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
1856 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
1857 else if (NON_COMMUTATIVE_P (x
))
1858 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1859 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
1860 else if (UNARY_P (x
))
1861 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
1863 /* Compare the elements. If any pair of corresponding elements
1864 fail to match, return 0 for the whole things. */
1866 fmt
= GET_RTX_FORMAT (code
);
1867 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1873 if (XWINT (x
, i
) != XWINT (y
, i
))
1878 if (XINT (x
, i
) != XINT (y
, i
))
1883 if (XTREE (x
, i
) != XTREE (y
, i
))
1888 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1893 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
1898 if (XEXP (x
, i
) != XEXP (y
, i
))
1905 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1907 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1908 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
1919 /* If X is a hard register or equivalent to one or a subregister of one,
1920 return the hard register number. If X is a pseudo register that was not
1921 assigned a hard register, return the pseudo register number. Otherwise,
1922 return -1. Any rtx is valid for X. */
1929 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
1930 return reg_renumber
[REGNO (x
)];
1933 if (GET_CODE (x
) == SUBREG
)
1935 int base
= true_regnum (SUBREG_REG (x
));
1936 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
1937 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
1938 GET_MODE (SUBREG_REG (x
)),
1939 SUBREG_BYTE (x
), GET_MODE (x
));
1944 /* Return regno of the register REG and handle subregs too. */
1946 reg_or_subregno (rtx reg
)
1948 if (GET_CODE (reg
) == SUBREG
)
1949 reg
= SUBREG_REG (reg
);
1950 gcc_assert (REG_P (reg
));