1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
23 /* This is the pathetic reminder of old fame of the jump-optimization pass
24 of the compiler. Now it contains basically a set of utility functions to
27 Each CODE_LABEL has a count of the times it is used
28 stored in the LABEL_NUSES internal field, and each JUMP_INSN
29 has one label that it refers to stored in the
30 JUMP_LABEL internal field. With this we can detect labels that
31 become unused because of the deletion of all the jumps that
32 formerly used them. The JUMP_LABEL info is sometimes looked
35 The subroutines redirect_jump and invert_jump are used
36 from other passes as well. */
40 #include "coretypes.h"
45 #include "hard-reg-set.h"
47 #include "insn-config.h"
48 #include "insn-attr.h"
54 #include "diagnostic.h"
59 #include "tree-pass.h"
62 /* Optimize jump y; x: ... y: jumpif... x?
63 Don't know if it is worth bothering with. */
64 /* Optimize two cases of conditional jump to conditional jump?
65 This can never delete any instruction or make anything dead,
66 or even change what is live at any point.
67 So perhaps let combiner do it. */
69 static void init_label_info (rtx
);
70 static void mark_all_labels (rtx
);
71 static void delete_computation (rtx
);
72 static void redirect_exp_1 (rtx
*, rtx
, rtx
, rtx
);
73 static int invert_exp_1 (rtx
, rtx
);
74 static int returnjump_p_1 (rtx
*, void *);
75 static void delete_prior_computation (rtx
, rtx
);
77 /* Alternate entry into the jump optimizer. This entry point only rebuilds
78 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
81 rebuild_jump_labels (rtx f
)
85 timevar_push (TV_REBUILD_JUMP
);
89 /* Keep track of labels used from static data; we don't track them
90 closely enough to delete them here, so make sure their reference
91 count doesn't drop to zero. */
93 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
94 if (LABEL_P (XEXP (insn
, 0)))
95 LABEL_NUSES (XEXP (insn
, 0))++;
96 timevar_pop (TV_REBUILD_JUMP
);
99 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
100 non-fallthru insn. This is not generally true, as multiple barriers
101 may have crept in, or the BARRIER may be separated from the last
102 real insn by one or more NOTEs.
104 This simple pass moves barriers and removes duplicates so that the
108 cleanup_barriers (void)
110 rtx insn
, next
, prev
;
111 for (insn
= get_insns (); insn
; insn
= next
)
113 next
= NEXT_INSN (insn
);
114 if (BARRIER_P (insn
))
116 prev
= prev_nonnote_insn (insn
);
117 if (BARRIER_P (prev
))
119 else if (prev
!= PREV_INSN (insn
))
120 reorder_insns (insn
, insn
, prev
);
126 struct tree_opt_pass pass_cleanup_barriers
=
128 "barriers", /* name */
130 cleanup_barriers
, /* execute */
133 0, /* static_pass_number */
135 0, /* properties_required */
136 0, /* properties_provided */
137 0, /* properties_destroyed */
138 0, /* todo_flags_start */
139 TODO_dump_func
, /* todo_flags_finish */
144 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
145 notes whose labels don't occur in the insn any more. Returns the
146 largest INSN_UID found. */
148 init_label_info (rtx f
)
152 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
154 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
155 else if (JUMP_P (insn
))
156 JUMP_LABEL (insn
) = 0;
157 else if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
161 for (note
= REG_NOTES (insn
); note
; note
= next
)
163 next
= XEXP (note
, 1);
164 if (REG_NOTE_KIND (note
) == REG_LABEL
165 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
166 remove_note (insn
, note
);
171 /* Mark the label each jump jumps to.
172 Combine consecutive labels, and count uses of labels. */
175 mark_all_labels (rtx f
)
179 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
182 mark_jump_label (PATTERN (insn
), insn
, 0);
183 if (! INSN_DELETED_P (insn
) && JUMP_P (insn
))
185 /* When we know the LABEL_REF contained in a REG used in
186 an indirect jump, we'll have a REG_LABEL note so that
187 flow can tell where it's going. */
188 if (JUMP_LABEL (insn
) == 0)
190 rtx label_note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
193 /* But a LABEL_REF around the REG_LABEL note, so
194 that we can canonicalize it. */
195 rtx label_ref
= gen_rtx_LABEL_REF (Pmode
,
196 XEXP (label_note
, 0));
198 mark_jump_label (label_ref
, insn
, 0);
199 XEXP (label_note
, 0) = XEXP (label_ref
, 0);
200 JUMP_LABEL (insn
) = XEXP (label_note
, 0);
206 /* If we are in cfglayout mode, there may be non-insns between the
207 basic blocks. If those non-insns represent tablejump data, they
208 contain label references that we must record. */
209 if (current_ir_type () == IR_RTL_CFGLAYOUT
)
215 for (insn
= bb
->il
.rtl
->header
; insn
; insn
= NEXT_INSN (insn
))
218 gcc_assert (JUMP_TABLE_DATA_P (insn
));
219 mark_jump_label (PATTERN (insn
), insn
, 0);
222 for (insn
= bb
->il
.rtl
->footer
; insn
; insn
= NEXT_INSN (insn
))
225 gcc_assert (JUMP_TABLE_DATA_P (insn
));
226 mark_jump_label (PATTERN (insn
), insn
, 0);
232 /* Move all block-beg, block-end and loop-beg notes between START and END out
233 before START. START and END may be such notes. Returns the values of the
234 new starting and ending insns, which may be different if the original ones
235 were such notes. Return true if there were only such notes and no real
239 squeeze_notes (rtx
* startp
, rtx
* endp
)
247 rtx past_end
= NEXT_INSN (end
);
249 for (insn
= start
; insn
!= past_end
; insn
= next
)
251 next
= NEXT_INSN (insn
);
253 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
254 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
))
256 /* BLOCK_BEG or BLOCK_END notes only exist in the `final' pass. */
257 gcc_assert (NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_BEG
258 && NOTE_LINE_NUMBER (insn
) != NOTE_INSN_BLOCK_END
);
264 rtx prev
= PREV_INSN (insn
);
265 PREV_INSN (insn
) = PREV_INSN (start
);
266 NEXT_INSN (insn
) = start
;
267 NEXT_INSN (PREV_INSN (insn
)) = insn
;
268 PREV_INSN (NEXT_INSN (insn
)) = insn
;
269 NEXT_INSN (prev
) = next
;
270 PREV_INSN (next
) = prev
;
277 /* There were no real instructions. */
278 if (start
== past_end
)
288 /* Return the label before INSN, or put a new label there. */
291 get_label_before (rtx insn
)
295 /* Find an existing label at this point
296 or make a new one if there is none. */
297 label
= prev_nonnote_insn (insn
);
299 if (label
== 0 || !LABEL_P (label
))
301 rtx prev
= PREV_INSN (insn
);
303 label
= gen_label_rtx ();
304 emit_label_after (label
, prev
);
305 LABEL_NUSES (label
) = 0;
310 /* Return the label after INSN, or put a new label there. */
313 get_label_after (rtx insn
)
317 /* Find an existing label at this point
318 or make a new one if there is none. */
319 label
= next_nonnote_insn (insn
);
321 if (label
== 0 || !LABEL_P (label
))
323 label
= gen_label_rtx ();
324 emit_label_after (label
, insn
);
325 LABEL_NUSES (label
) = 0;
330 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
331 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
332 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
333 know whether it's source is floating point or integer comparison. Machine
334 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
335 to help this function avoid overhead in these cases. */
337 reversed_comparison_code_parts (enum rtx_code code
, rtx arg0
, rtx arg1
, rtx insn
)
339 enum machine_mode mode
;
341 /* If this is not actually a comparison, we can't reverse it. */
342 if (GET_RTX_CLASS (code
) != RTX_COMPARE
343 && GET_RTX_CLASS (code
) != RTX_COMM_COMPARE
)
346 mode
= GET_MODE (arg0
);
347 if (mode
== VOIDmode
)
348 mode
= GET_MODE (arg1
);
350 /* First see if machine description supplies us way to reverse the
351 comparison. Give it priority over everything else to allow
352 machine description to do tricks. */
353 if (GET_MODE_CLASS (mode
) == MODE_CC
354 && REVERSIBLE_CC_MODE (mode
))
356 #ifdef REVERSE_CONDITION
357 return REVERSE_CONDITION (code
, mode
);
359 return reverse_condition (code
);
362 /* Try a few special cases based on the comparison code. */
371 /* It is always safe to reverse EQ and NE, even for the floating
372 point. Similarly the unsigned comparisons are never used for
373 floating point so we can reverse them in the default way. */
374 return reverse_condition (code
);
379 /* In case we already see unordered comparison, we can be sure to
380 be dealing with floating point so we don't need any more tests. */
381 return reverse_condition_maybe_unordered (code
);
386 /* We don't have safe way to reverse these yet. */
392 if (GET_MODE_CLASS (mode
) == MODE_CC
|| CC0_P (arg0
))
395 /* Try to search for the comparison to determine the real mode.
396 This code is expensive, but with sane machine description it
397 will be never used, since REVERSIBLE_CC_MODE will return true
402 for (prev
= prev_nonnote_insn (insn
);
403 prev
!= 0 && !LABEL_P (prev
);
404 prev
= prev_nonnote_insn (prev
))
406 rtx set
= set_of (arg0
, prev
);
407 if (set
&& GET_CODE (set
) == SET
408 && rtx_equal_p (SET_DEST (set
), arg0
))
410 rtx src
= SET_SRC (set
);
412 if (GET_CODE (src
) == COMPARE
)
414 rtx comparison
= src
;
415 arg0
= XEXP (src
, 0);
416 mode
= GET_MODE (arg0
);
417 if (mode
== VOIDmode
)
418 mode
= GET_MODE (XEXP (comparison
, 1));
421 /* We can get past reg-reg moves. This may be useful for model
422 of i387 comparisons that first move flag registers around. */
429 /* If register is clobbered in some ununderstandable way,
436 /* Test for an integer condition, or a floating-point comparison
437 in which NaNs can be ignored. */
438 if (GET_CODE (arg0
) == CONST_INT
439 || (GET_MODE (arg0
) != VOIDmode
440 && GET_MODE_CLASS (mode
) != MODE_CC
441 && !HONOR_NANS (mode
)))
442 return reverse_condition (code
);
447 /* A wrapper around the previous function to take COMPARISON as rtx
448 expression. This simplifies many callers. */
450 reversed_comparison_code (rtx comparison
, rtx insn
)
452 if (!COMPARISON_P (comparison
))
454 return reversed_comparison_code_parts (GET_CODE (comparison
),
455 XEXP (comparison
, 0),
456 XEXP (comparison
, 1), insn
);
459 /* Return comparison with reversed code of EXP.
460 Return NULL_RTX in case we fail to do the reversal. */
462 reversed_comparison (rtx exp
, enum machine_mode mode
)
464 enum rtx_code reversed_code
= reversed_comparison_code (exp
, NULL_RTX
);
465 if (reversed_code
== UNKNOWN
)
468 return simplify_gen_relational (reversed_code
, mode
, VOIDmode
,
469 XEXP (exp
, 0), XEXP (exp
, 1));
473 /* Given an rtx-code for a comparison, return the code for the negated
474 comparison. If no such code exists, return UNKNOWN.
476 WATCH OUT! reverse_condition is not safe to use on a jump that might
477 be acting on the results of an IEEE floating point comparison, because
478 of the special treatment of non-signaling nans in comparisons.
479 Use reversed_comparison_code instead. */
482 reverse_condition (enum rtx_code code
)
524 /* Similar, but we're allowed to generate unordered comparisons, which
525 makes it safe for IEEE floating-point. Of course, we have to recognize
526 that the target will support them too... */
529 reverse_condition_maybe_unordered (enum rtx_code code
)
567 /* Similar, but return the code when two operands of a comparison are swapped.
568 This IS safe for IEEE floating-point. */
571 swap_condition (enum rtx_code code
)
613 /* Given a comparison CODE, return the corresponding unsigned comparison.
614 If CODE is an equality comparison or already an unsigned comparison,
618 unsigned_condition (enum rtx_code code
)
644 /* Similarly, return the signed version of a comparison. */
647 signed_condition (enum rtx_code code
)
673 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
674 truth of CODE1 implies the truth of CODE2. */
677 comparison_dominates_p (enum rtx_code code1
, enum rtx_code code2
)
679 /* UNKNOWN comparison codes can happen as a result of trying to revert
681 They can't match anything, so we have to reject them here. */
682 if (code1
== UNKNOWN
|| code2
== UNKNOWN
)
691 if (code2
== UNLE
|| code2
== UNGE
)
696 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
702 if (code2
== UNLE
|| code2
== NE
)
707 if (code2
== LE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
712 if (code2
== UNGE
|| code2
== NE
)
717 if (code2
== GE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
723 if (code2
== ORDERED
)
728 if (code2
== NE
|| code2
== ORDERED
)
733 if (code2
== LEU
|| code2
== NE
)
738 if (code2
== GEU
|| code2
== NE
)
743 if (code2
== NE
|| code2
== UNEQ
|| code2
== UNLE
|| code2
== UNLT
744 || code2
== UNGE
|| code2
== UNGT
)
755 /* Return 1 if INSN is an unconditional jump and nothing else. */
758 simplejump_p (rtx insn
)
760 return (JUMP_P (insn
)
761 && GET_CODE (PATTERN (insn
)) == SET
762 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
763 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
766 /* Return nonzero if INSN is a (possibly) conditional jump
769 Use of this function is deprecated, since we need to support combined
770 branch and compare insns. Use any_condjump_p instead whenever possible. */
773 condjump_p (rtx insn
)
775 rtx x
= PATTERN (insn
);
777 if (GET_CODE (x
) != SET
778 || GET_CODE (SET_DEST (x
)) != PC
)
782 if (GET_CODE (x
) == LABEL_REF
)
785 return (GET_CODE (x
) == IF_THEN_ELSE
786 && ((GET_CODE (XEXP (x
, 2)) == PC
787 && (GET_CODE (XEXP (x
, 1)) == LABEL_REF
788 || GET_CODE (XEXP (x
, 1)) == RETURN
))
789 || (GET_CODE (XEXP (x
, 1)) == PC
790 && (GET_CODE (XEXP (x
, 2)) == LABEL_REF
791 || GET_CODE (XEXP (x
, 2)) == RETURN
))));
794 /* Return nonzero if INSN is a (possibly) conditional jump inside a
797 Use this function is deprecated, since we need to support combined
798 branch and compare insns. Use any_condjump_p instead whenever possible. */
801 condjump_in_parallel_p (rtx insn
)
803 rtx x
= PATTERN (insn
);
805 if (GET_CODE (x
) != PARALLEL
)
808 x
= XVECEXP (x
, 0, 0);
810 if (GET_CODE (x
) != SET
)
812 if (GET_CODE (SET_DEST (x
)) != PC
)
814 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
816 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
818 if (XEXP (SET_SRC (x
), 2) == pc_rtx
819 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
820 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
822 if (XEXP (SET_SRC (x
), 1) == pc_rtx
823 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
824 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
829 /* Return set of PC, otherwise NULL. */
837 pat
= PATTERN (insn
);
839 /* The set is allowed to appear either as the insn pattern or
840 the first set in a PARALLEL. */
841 if (GET_CODE (pat
) == PARALLEL
)
842 pat
= XVECEXP (pat
, 0, 0);
843 if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == PC
)
849 /* Return true when insn is an unconditional direct jump,
850 possibly bundled inside a PARALLEL. */
853 any_uncondjump_p (rtx insn
)
855 rtx x
= pc_set (insn
);
858 if (GET_CODE (SET_SRC (x
)) != LABEL_REF
)
860 if (find_reg_note (insn
, REG_NON_LOCAL_GOTO
, NULL_RTX
))
865 /* Return true when insn is a conditional jump. This function works for
866 instructions containing PC sets in PARALLELs. The instruction may have
867 various other effects so before removing the jump you must verify
870 Note that unlike condjump_p it returns false for unconditional jumps. */
873 any_condjump_p (rtx insn
)
875 rtx x
= pc_set (insn
);
880 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
883 a
= GET_CODE (XEXP (SET_SRC (x
), 1));
884 b
= GET_CODE (XEXP (SET_SRC (x
), 2));
886 return ((b
== PC
&& (a
== LABEL_REF
|| a
== RETURN
))
887 || (a
== PC
&& (b
== LABEL_REF
|| b
== RETURN
)));
890 /* Return the label of a conditional jump. */
893 condjump_label (rtx insn
)
895 rtx x
= pc_set (insn
);
900 if (GET_CODE (x
) == LABEL_REF
)
902 if (GET_CODE (x
) != IF_THEN_ELSE
)
904 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
906 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
911 /* Return true if INSN is a (possibly conditional) return insn. */
914 returnjump_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
918 return x
&& (GET_CODE (x
) == RETURN
919 || (GET_CODE (x
) == SET
&& SET_IS_RETURN_P (x
)));
923 returnjump_p (rtx insn
)
927 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
930 /* Return true if INSN is a jump that only transfers control and
934 onlyjump_p (rtx insn
)
941 set
= single_set (insn
);
944 if (GET_CODE (SET_DEST (set
)) != PC
)
946 if (side_effects_p (SET_SRC (set
)))
954 /* Return nonzero if X is an RTX that only sets the condition codes
955 and has no side effects. */
958 only_sets_cc0_p (rtx x
)
966 return sets_cc0_p (x
) == 1 && ! side_effects_p (x
);
969 /* Return 1 if X is an RTX that does nothing but set the condition codes
970 and CLOBBER or USE registers.
971 Return -1 if X does explicitly set the condition codes,
972 but also does other things. */
983 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
985 if (GET_CODE (x
) == PARALLEL
)
989 int other_things
= 0;
990 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
992 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
993 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
995 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
998 return ! sets_cc0
? 0 : other_things
? -1 : 1;
1004 /* Follow any unconditional jump at LABEL;
1005 return the ultimate label reached by any such chain of jumps.
1006 Return null if the chain ultimately leads to a return instruction.
1007 If LABEL is not followed by a jump, return LABEL.
1008 If the chain loops or we can't find end, return LABEL,
1009 since that tells caller to avoid changing the insn.
1011 If RELOAD_COMPLETED is 0, we do not chain across a USE or CLOBBER. */
1014 follow_jumps (rtx label
)
1023 && (insn
= next_active_insn (value
)) != 0
1025 && ((JUMP_LABEL (insn
) != 0 && any_uncondjump_p (insn
)
1026 && onlyjump_p (insn
))
1027 || GET_CODE (PATTERN (insn
)) == RETURN
)
1028 && (next
= NEXT_INSN (insn
))
1029 && BARRIER_P (next
));
1033 if (!reload_completed
&& flag_test_coverage
)
1035 /* ??? Optional. Disables some optimizations, but makes
1036 gcov output more accurate with -O. */
1037 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
1038 if (NOTE_P (tem
) && NOTE_LINE_NUMBER (tem
) > 0)
1042 /* If we have found a cycle, make the insn jump to itself. */
1043 if (JUMP_LABEL (insn
) == label
)
1046 tem
= next_active_insn (JUMP_LABEL (insn
));
1047 if (tem
&& (GET_CODE (PATTERN (tem
)) == ADDR_VEC
1048 || GET_CODE (PATTERN (tem
)) == ADDR_DIFF_VEC
))
1051 value
= JUMP_LABEL (insn
);
1059 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1060 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1061 in INSN, then store one of them in JUMP_LABEL (INSN).
1062 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1063 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1064 Also, when there are consecutive labels, canonicalize on the last of them.
1066 Note that two labels separated by a loop-beginning note
1067 must be kept distinct if we have not yet done loop-optimization,
1068 because the gap between them is where loop-optimize
1069 will want to move invariant code to. CROSS_JUMP tells us
1070 that loop-optimization is done with. */
1073 mark_jump_label (rtx x
, rtx insn
, int in_mem
)
1075 RTX_CODE code
= GET_CODE (x
);
1098 /* If this is a constant-pool reference, see if it is a label. */
1099 if (CONSTANT_POOL_ADDRESS_P (x
))
1100 mark_jump_label (get_pool_constant (x
), insn
, in_mem
);
1105 rtx label
= XEXP (x
, 0);
1107 /* Ignore remaining references to unreachable labels that
1108 have been deleted. */
1110 && NOTE_LINE_NUMBER (label
) == NOTE_INSN_DELETED_LABEL
)
1113 gcc_assert (LABEL_P (label
));
1115 /* Ignore references to labels of containing functions. */
1116 if (LABEL_REF_NONLOCAL_P (x
))
1119 XEXP (x
, 0) = label
;
1120 if (! insn
|| ! INSN_DELETED_P (insn
))
1121 ++LABEL_NUSES (label
);
1126 JUMP_LABEL (insn
) = label
;
1129 /* Add a REG_LABEL note for LABEL unless there already
1130 is one. All uses of a label, except for labels
1131 that are the targets of jumps, must have a
1133 if (! find_reg_note (insn
, REG_LABEL
, label
))
1134 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, label
,
1141 /* Do walk the labels in a vector, but not the first operand of an
1142 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1145 if (! INSN_DELETED_P (insn
))
1147 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
1149 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
1150 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, in_mem
);
1158 fmt
= GET_RTX_FORMAT (code
);
1159 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1162 mark_jump_label (XEXP (x
, i
), insn
, in_mem
);
1163 else if (fmt
[i
] == 'E')
1166 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1167 mark_jump_label (XVECEXP (x
, i
, j
), insn
, in_mem
);
1172 /* If all INSN does is set the pc, delete it,
1173 and delete the insn that set the condition codes for it
1174 if that's what the previous thing was. */
1177 delete_jump (rtx insn
)
1179 rtx set
= single_set (insn
);
1181 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
1182 delete_computation (insn
);
1185 /* Recursively delete prior insns that compute the value (used only by INSN
1186 which the caller is deleting) stored in the register mentioned by NOTE
1187 which is a REG_DEAD note associated with INSN. */
1190 delete_prior_computation (rtx note
, rtx insn
)
1193 rtx reg
= XEXP (note
, 0);
1195 for (our_prev
= prev_nonnote_insn (insn
);
1196 our_prev
&& (NONJUMP_INSN_P (our_prev
)
1197 || CALL_P (our_prev
));
1198 our_prev
= prev_nonnote_insn (our_prev
))
1200 rtx pat
= PATTERN (our_prev
);
1202 /* If we reach a CALL which is not calling a const function
1203 or the callee pops the arguments, then give up. */
1204 if (CALL_P (our_prev
)
1205 && (! CONST_OR_PURE_CALL_P (our_prev
)
1206 || GET_CODE (pat
) != SET
|| GET_CODE (SET_SRC (pat
)) != CALL
))
1209 /* If we reach a SEQUENCE, it is too complex to try to
1210 do anything with it, so give up. We can be run during
1211 and after reorg, so SEQUENCE rtl can legitimately show
1213 if (GET_CODE (pat
) == SEQUENCE
)
1216 if (GET_CODE (pat
) == USE
1217 && NONJUMP_INSN_P (XEXP (pat
, 0)))
1218 /* reorg creates USEs that look like this. We leave them
1219 alone because reorg needs them for its own purposes. */
1222 if (reg_set_p (reg
, pat
))
1224 if (side_effects_p (pat
) && !CALL_P (our_prev
))
1227 if (GET_CODE (pat
) == PARALLEL
)
1229 /* If we find a SET of something else, we can't
1234 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1236 rtx part
= XVECEXP (pat
, 0, i
);
1238 if (GET_CODE (part
) == SET
1239 && SET_DEST (part
) != reg
)
1243 if (i
== XVECLEN (pat
, 0))
1244 delete_computation (our_prev
);
1246 else if (GET_CODE (pat
) == SET
1247 && REG_P (SET_DEST (pat
)))
1249 int dest_regno
= REGNO (SET_DEST (pat
));
1252 + (dest_regno
< FIRST_PSEUDO_REGISTER
1253 ? hard_regno_nregs
[dest_regno
]
1254 [GET_MODE (SET_DEST (pat
))] : 1));
1255 int regno
= REGNO (reg
);
1258 + (regno
< FIRST_PSEUDO_REGISTER
1259 ? hard_regno_nregs
[regno
][GET_MODE (reg
)] : 1));
1261 if (dest_regno
>= regno
1262 && dest_endregno
<= endregno
)
1263 delete_computation (our_prev
);
1265 /* We may have a multi-word hard register and some, but not
1266 all, of the words of the register are needed in subsequent
1267 insns. Write REG_UNUSED notes for those parts that were not
1269 else if (dest_regno
<= regno
1270 && dest_endregno
>= endregno
)
1274 REG_NOTES (our_prev
)
1275 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
,
1276 REG_NOTES (our_prev
));
1278 for (i
= dest_regno
; i
< dest_endregno
; i
++)
1279 if (! find_regno_note (our_prev
, REG_UNUSED
, i
))
1282 if (i
== dest_endregno
)
1283 delete_computation (our_prev
);
1290 /* If PAT references the register that dies here, it is an
1291 additional use. Hence any prior SET isn't dead. However, this
1292 insn becomes the new place for the REG_DEAD note. */
1293 if (reg_overlap_mentioned_p (reg
, pat
))
1295 XEXP (note
, 1) = REG_NOTES (our_prev
);
1296 REG_NOTES (our_prev
) = note
;
1302 /* Delete INSN and recursively delete insns that compute values used only
1303 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1304 If we are running before flow.c, we need do nothing since flow.c will
1305 delete dead code. We also can't know if the registers being used are
1306 dead or not at this point.
1308 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1309 nothing other than set a register that dies in this insn, we can delete
1312 On machines with CC0, if CC0 is used in this insn, we may be able to
1313 delete the insn that set it. */
1316 delete_computation (rtx insn
)
1321 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
1323 rtx prev
= prev_nonnote_insn (insn
);
1324 /* We assume that at this stage
1325 CC's are always set explicitly
1326 and always immediately before the jump that
1327 will use them. So if the previous insn
1328 exists to set the CC's, delete it
1329 (unless it performs auto-increments, etc.). */
1330 if (prev
&& NONJUMP_INSN_P (prev
)
1331 && sets_cc0_p (PATTERN (prev
)))
1333 if (sets_cc0_p (PATTERN (prev
)) > 0
1334 && ! side_effects_p (PATTERN (prev
)))
1335 delete_computation (prev
);
1337 /* Otherwise, show that cc0 won't be used. */
1338 REG_NOTES (prev
) = gen_rtx_EXPR_LIST (REG_UNUSED
,
1339 cc0_rtx
, REG_NOTES (prev
));
1344 for (note
= REG_NOTES (insn
); note
; note
= next
)
1346 next
= XEXP (note
, 1);
1348 if (REG_NOTE_KIND (note
) != REG_DEAD
1349 /* Verify that the REG_NOTE is legitimate. */
1350 || !REG_P (XEXP (note
, 0)))
1353 delete_prior_computation (note
, insn
);
1356 delete_related_insns (insn
);
1359 /* Delete insn INSN from the chain of insns and update label ref counts
1360 and delete insns now unreachable.
1362 Returns the first insn after INSN that was not deleted.
1364 Usage of this instruction is deprecated. Use delete_insn instead and
1365 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1368 delete_related_insns (rtx insn
)
1370 int was_code_label
= (LABEL_P (insn
));
1372 rtx next
= NEXT_INSN (insn
), prev
= PREV_INSN (insn
);
1374 while (next
&& INSN_DELETED_P (next
))
1375 next
= NEXT_INSN (next
);
1377 /* This insn is already deleted => return first following nondeleted. */
1378 if (INSN_DELETED_P (insn
))
1383 /* If instruction is followed by a barrier,
1384 delete the barrier too. */
1386 if (next
!= 0 && BARRIER_P (next
))
1389 /* If deleting a jump, decrement the count of the label,
1390 and delete the label if it is now unused. */
1392 if (JUMP_P (insn
) && JUMP_LABEL (insn
))
1394 rtx lab
= JUMP_LABEL (insn
), lab_next
;
1396 if (LABEL_NUSES (lab
) == 0)
1398 /* This can delete NEXT or PREV,
1399 either directly if NEXT is JUMP_LABEL (INSN),
1400 or indirectly through more levels of jumps. */
1401 delete_related_insns (lab
);
1403 /* I feel a little doubtful about this loop,
1404 but I see no clean and sure alternative way
1405 to find the first insn after INSN that is not now deleted.
1406 I hope this works. */
1407 while (next
&& INSN_DELETED_P (next
))
1408 next
= NEXT_INSN (next
);
1411 else if (tablejump_p (insn
, NULL
, &lab_next
))
1413 /* If we're deleting the tablejump, delete the dispatch table.
1414 We may not be able to kill the label immediately preceding
1415 just yet, as it might be referenced in code leading up to
1417 delete_related_insns (lab_next
);
1421 /* Likewise if we're deleting a dispatch table. */
1424 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1425 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
1427 rtx pat
= PATTERN (insn
);
1428 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
1429 int len
= XVECLEN (pat
, diff_vec_p
);
1431 for (i
= 0; i
< len
; i
++)
1432 if (LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
1433 delete_related_insns (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
1434 while (next
&& INSN_DELETED_P (next
))
1435 next
= NEXT_INSN (next
);
1439 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1440 if (NONJUMP_INSN_P (insn
) || CALL_P (insn
))
1441 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1442 if (REG_NOTE_KIND (note
) == REG_LABEL
1443 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1444 && LABEL_P (XEXP (note
, 0)))
1445 if (LABEL_NUSES (XEXP (note
, 0)) == 0)
1446 delete_related_insns (XEXP (note
, 0));
1448 while (prev
&& (INSN_DELETED_P (prev
) || NOTE_P (prev
)))
1449 prev
= PREV_INSN (prev
);
1451 /* If INSN was a label and a dispatch table follows it,
1452 delete the dispatch table. The tablejump must have gone already.
1453 It isn't useful to fall through into a table. */
1456 && NEXT_INSN (insn
) != 0
1457 && JUMP_P (NEXT_INSN (insn
))
1458 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
1459 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
1460 next
= delete_related_insns (NEXT_INSN (insn
));
1462 /* If INSN was a label, delete insns following it if now unreachable. */
1464 if (was_code_label
&& prev
&& BARRIER_P (prev
))
1469 code
= GET_CODE (next
);
1471 next
= NEXT_INSN (next
);
1472 /* Keep going past other deleted labels to delete what follows. */
1473 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
1474 next
= NEXT_INSN (next
);
1475 else if (code
== BARRIER
|| INSN_P (next
))
1476 /* Note: if this deletes a jump, it can cause more
1477 deletion of unreachable code, after a different label.
1478 As long as the value from this recursive call is correct,
1479 this invocation functions correctly. */
1480 next
= delete_related_insns (next
);
1489 /* Delete a range of insns from FROM to TO, inclusive.
1490 This is for the sake of peephole optimization, so assume
1491 that whatever these insns do will still be done by a new
1492 peephole insn that will replace them. */
1495 delete_for_peephole (rtx from
, rtx to
)
1501 rtx next
= NEXT_INSN (insn
);
1502 rtx prev
= PREV_INSN (insn
);
1506 INSN_DELETED_P (insn
) = 1;
1508 /* Patch this insn out of the chain. */
1509 /* We don't do this all at once, because we
1510 must preserve all NOTEs. */
1512 NEXT_INSN (prev
) = next
;
1515 PREV_INSN (next
) = prev
;
1523 /* Note that if TO is an unconditional jump
1524 we *do not* delete the BARRIER that follows,
1525 since the peephole that replaces this sequence
1526 is also an unconditional jump in that case. */
1529 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1530 NLABEL as a return. Accrue modifications into the change group. */
1533 redirect_exp_1 (rtx
*loc
, rtx olabel
, rtx nlabel
, rtx insn
)
1536 RTX_CODE code
= GET_CODE (x
);
1540 if (code
== LABEL_REF
)
1542 if (XEXP (x
, 0) == olabel
)
1546 n
= gen_rtx_LABEL_REF (Pmode
, nlabel
);
1548 n
= gen_rtx_RETURN (VOIDmode
);
1550 validate_change (insn
, loc
, n
, 1);
1554 else if (code
== RETURN
&& olabel
== 0)
1557 x
= gen_rtx_LABEL_REF (Pmode
, nlabel
);
1559 x
= gen_rtx_RETURN (VOIDmode
);
1560 if (loc
== &PATTERN (insn
))
1561 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
1562 validate_change (insn
, loc
, x
, 1);
1566 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
1567 && GET_CODE (SET_SRC (x
)) == LABEL_REF
1568 && XEXP (SET_SRC (x
), 0) == olabel
)
1570 validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 1);
1574 fmt
= GET_RTX_FORMAT (code
);
1575 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1578 redirect_exp_1 (&XEXP (x
, i
), olabel
, nlabel
, insn
);
1579 else if (fmt
[i
] == 'E')
1582 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1583 redirect_exp_1 (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
);
1588 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1589 the modifications into the change group. Return false if we did
1590 not see how to do that. */
1593 redirect_jump_1 (rtx jump
, rtx nlabel
)
1595 int ochanges
= num_validated_changes ();
1598 if (GET_CODE (PATTERN (jump
)) == PARALLEL
)
1599 loc
= &XVECEXP (PATTERN (jump
), 0, 0);
1601 loc
= &PATTERN (jump
);
1603 redirect_exp_1 (loc
, JUMP_LABEL (jump
), nlabel
, jump
);
1604 return num_validated_changes () > ochanges
;
1607 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1608 jump target label is unused as a result, it and the code following
1611 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1614 The return value will be 1 if the change was made, 0 if it wasn't
1615 (this can only occur for NLABEL == 0). */
1618 redirect_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1620 rtx olabel
= JUMP_LABEL (jump
);
1622 if (nlabel
== olabel
)
1625 if (! redirect_jump_1 (jump
, nlabel
) || ! apply_change_group ())
1628 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 0);
1632 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1634 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1635 count has dropped to zero. */
1637 redirect_jump_2 (rtx jump
, rtx olabel
, rtx nlabel
, int delete_unused
,
1642 /* negative DELETE_UNUSED used to be used to signalize behaviour on
1643 moving FUNCTION_END note. Just sanity check that no user still worry
1645 gcc_assert (delete_unused
>= 0);
1646 JUMP_LABEL (jump
) = nlabel
;
1648 ++LABEL_NUSES (nlabel
);
1650 /* Update labels in any REG_EQUAL note. */
1651 if ((note
= find_reg_note (jump
, REG_EQUAL
, NULL_RTX
)) != NULL_RTX
)
1653 if (!nlabel
|| (invert
&& !invert_exp_1 (XEXP (note
, 0), jump
)))
1654 remove_note (jump
, note
);
1657 redirect_exp_1 (&XEXP (note
, 0), olabel
, nlabel
, jump
);
1658 confirm_change_group ();
1662 if (olabel
&& --LABEL_NUSES (olabel
) == 0 && delete_unused
> 0
1663 /* Undefined labels will remain outside the insn stream. */
1664 && INSN_UID (olabel
))
1665 delete_related_insns (olabel
);
1667 invert_br_probabilities (jump
);
1670 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1671 modifications into the change group. Return nonzero for success. */
1673 invert_exp_1 (rtx x
, rtx insn
)
1675 RTX_CODE code
= GET_CODE (x
);
1677 if (code
== IF_THEN_ELSE
)
1679 rtx comp
= XEXP (x
, 0);
1681 enum rtx_code reversed_code
;
1683 /* We can do this in two ways: The preferable way, which can only
1684 be done if this is not an integer comparison, is to reverse
1685 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1686 of the IF_THEN_ELSE. If we can't do either, fail. */
1688 reversed_code
= reversed_comparison_code (comp
, insn
);
1690 if (reversed_code
!= UNKNOWN
)
1692 validate_change (insn
, &XEXP (x
, 0),
1693 gen_rtx_fmt_ee (reversed_code
,
1694 GET_MODE (comp
), XEXP (comp
, 0),
1701 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
1702 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
1709 /* Invert the condition of the jump JUMP, and make it jump to label
1710 NLABEL instead of where it jumps now. Accrue changes into the
1711 change group. Return false if we didn't see how to perform the
1712 inversion and redirection. */
1715 invert_jump_1 (rtx jump
, rtx nlabel
)
1717 rtx x
= pc_set (jump
);
1721 ochanges
= num_validated_changes ();
1723 ok
= invert_exp_1 (SET_SRC (x
), jump
);
1726 if (num_validated_changes () == ochanges
)
1729 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1730 in Pmode, so checking this is not merely an optimization. */
1731 return nlabel
== JUMP_LABEL (jump
) || redirect_jump_1 (jump
, nlabel
);
1734 /* Invert the condition of the jump JUMP, and make it jump to label
1735 NLABEL instead of where it jumps now. Return true if successful. */
1738 invert_jump (rtx jump
, rtx nlabel
, int delete_unused
)
1740 rtx olabel
= JUMP_LABEL (jump
);
1742 if (invert_jump_1 (jump
, nlabel
) && apply_change_group ())
1744 redirect_jump_2 (jump
, olabel
, nlabel
, delete_unused
, 1);
1752 /* Like rtx_equal_p except that it considers two REGs as equal
1753 if they renumber to the same value and considers two commutative
1754 operations to be the same if the order of the operands has been
1758 rtx_renumbered_equal_p (rtx x
, rtx y
)
1761 enum rtx_code code
= GET_CODE (x
);
1767 if ((code
== REG
|| (code
== SUBREG
&& REG_P (SUBREG_REG (x
))))
1768 && (REG_P (y
) || (GET_CODE (y
) == SUBREG
1769 && REG_P (SUBREG_REG (y
)))))
1771 int reg_x
= -1, reg_y
= -1;
1772 int byte_x
= 0, byte_y
= 0;
1774 if (GET_MODE (x
) != GET_MODE (y
))
1777 /* If we haven't done any renumbering, don't
1778 make any assumptions. */
1779 if (reg_renumber
== 0)
1780 return rtx_equal_p (x
, y
);
1784 reg_x
= REGNO (SUBREG_REG (x
));
1785 byte_x
= SUBREG_BYTE (x
);
1787 if (reg_renumber
[reg_x
] >= 0)
1789 reg_x
= subreg_regno_offset (reg_renumber
[reg_x
],
1790 GET_MODE (SUBREG_REG (x
)),
1799 if (reg_renumber
[reg_x
] >= 0)
1800 reg_x
= reg_renumber
[reg_x
];
1803 if (GET_CODE (y
) == SUBREG
)
1805 reg_y
= REGNO (SUBREG_REG (y
));
1806 byte_y
= SUBREG_BYTE (y
);
1808 if (reg_renumber
[reg_y
] >= 0)
1810 reg_y
= subreg_regno_offset (reg_renumber
[reg_y
],
1811 GET_MODE (SUBREG_REG (y
)),
1820 if (reg_renumber
[reg_y
] >= 0)
1821 reg_y
= reg_renumber
[reg_y
];
1824 return reg_x
>= 0 && reg_x
== reg_y
&& byte_x
== byte_y
;
1827 /* Now we have disposed of all the cases
1828 in which different rtx codes can match. */
1829 if (code
!= GET_CODE (y
))
1843 /* We can't assume nonlocal labels have their following insns yet. */
1844 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
1845 return XEXP (x
, 0) == XEXP (y
, 0);
1847 /* Two label-refs are equivalent if they point at labels
1848 in the same position in the instruction stream. */
1849 return (next_real_insn (XEXP (x
, 0))
1850 == next_real_insn (XEXP (y
, 0)));
1853 return XSTR (x
, 0) == XSTR (y
, 0);
1856 /* If we didn't match EQ equality above, they aren't the same. */
1863 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1865 if (GET_MODE (x
) != GET_MODE (y
))
1868 /* For commutative operations, the RTX match if the operand match in any
1869 order. Also handle the simple binary and unary cases without a loop. */
1870 if (targetm
.commutative_p (x
, UNKNOWN
))
1871 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1872 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
1873 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
1874 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
1875 else if (NON_COMMUTATIVE_P (x
))
1876 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
1877 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
1878 else if (UNARY_P (x
))
1879 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
1881 /* Compare the elements. If any pair of corresponding elements
1882 fail to match, return 0 for the whole things. */
1884 fmt
= GET_RTX_FORMAT (code
);
1885 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1891 if (XWINT (x
, i
) != XWINT (y
, i
))
1896 if (XINT (x
, i
) != XINT (y
, i
))
1901 if (XTREE (x
, i
) != XTREE (y
, i
))
1906 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
1911 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
1916 if (XEXP (x
, i
) != XEXP (y
, i
))
1923 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
1925 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1926 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
1937 /* If X is a hard register or equivalent to one or a subregister of one,
1938 return the hard register number. If X is a pseudo register that was not
1939 assigned a hard register, return the pseudo register number. Otherwise,
1940 return -1. Any rtx is valid for X. */
1947 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
1948 return reg_renumber
[REGNO (x
)];
1951 if (GET_CODE (x
) == SUBREG
)
1953 int base
= true_regnum (SUBREG_REG (x
));
1955 && base
< FIRST_PSEUDO_REGISTER
1956 && subreg_offset_representable_p (REGNO (SUBREG_REG (x
)),
1957 GET_MODE (SUBREG_REG (x
)),
1958 SUBREG_BYTE (x
), GET_MODE (x
)))
1959 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
1960 GET_MODE (SUBREG_REG (x
)),
1961 SUBREG_BYTE (x
), GET_MODE (x
));
1966 /* Return regno of the register REG and handle subregs too. */
1968 reg_or_subregno (rtx reg
)
1970 if (GET_CODE (reg
) == SUBREG
)
1971 reg
= SUBREG_REG (reg
);
1972 gcc_assert (REG_P (reg
));