1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
42 #include "hard-reg-set.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
55 /* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57 /* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
62 static int init_label_info
PARAMS ((rtx
));
63 static void mark_all_labels
PARAMS ((rtx
));
64 static int duplicate_loop_exit_test
PARAMS ((rtx
));
65 static void delete_computation
PARAMS ((rtx
));
66 static void redirect_exp_1
PARAMS ((rtx
*, rtx
, rtx
, rtx
));
67 static int redirect_exp
PARAMS ((rtx
, rtx
, rtx
));
68 static void invert_exp_1
PARAMS ((rtx
));
69 static int invert_exp
PARAMS ((rtx
));
70 static int returnjump_p_1
PARAMS ((rtx
*, void *));
71 static void delete_prior_computation
PARAMS ((rtx
, rtx
));
72 static void mark_modified_reg
PARAMS ((rtx
, rtx
, void *));
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
78 rebuild_jump_labels (f
)
84 max_uid
= init_label_info (f
) + 1;
88 /* Keep track of labels used from static data; we don't track them
89 closely enough to delete them here, so make sure their reference
90 count doesn't drop to zero. */
92 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
93 if (GET_CODE (XEXP (insn
, 0)) == CODE_LABEL
)
94 LABEL_NUSES (XEXP (insn
, 0))++;
96 /* Keep track of labels used for marking handlers for exception
97 regions; they cannot usually be deleted. */
99 for (insn
= exception_handler_labels
; insn
; insn
= XEXP (insn
, 1))
100 if (GET_CODE (XEXP (insn
, 0)) == CODE_LABEL
)
101 LABEL_NUSES (XEXP (insn
, 0))++;
104 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
105 non-fallthru insn. This is not generally true, as multiple barriers
106 may have crept in, or the BARRIER may be separated from the last
107 real insn by one or more NOTEs.
109 This simple pass moves barriers and removes duplicates so that the
115 rtx insn
, next
, prev
;
116 for (insn
= get_insns (); insn
; insn
= next
)
118 next
= NEXT_INSN (insn
);
119 if (GET_CODE (insn
) == BARRIER
)
121 prev
= prev_nonnote_insn (insn
);
122 if (GET_CODE (prev
) == BARRIER
)
123 delete_barrier (insn
);
124 else if (prev
!= PREV_INSN (insn
))
125 reorder_insns (insn
, insn
, prev
);
131 copy_loop_headers (f
)
135 /* Now iterate optimizing jumps until nothing changes over one pass. */
136 for (insn
= f
; insn
; insn
= next
)
140 next
= NEXT_INSN (insn
);
142 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
143 jump. Try to optimize by duplicating the loop exit test if so.
144 This is only safe immediately after regscan, because it uses
145 the values of regno_first_uid and regno_last_uid. */
146 if (GET_CODE (insn
) == NOTE
147 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
148 && (temp1
= next_nonnote_insn (insn
)) != 0
149 && any_uncondjump_p (temp1
) && onlyjump_p (temp1
))
151 temp
= PREV_INSN (insn
);
152 if (duplicate_loop_exit_test (insn
))
154 next
= NEXT_INSN (temp
);
161 purge_line_number_notes (f
)
166 /* Delete extraneous line number notes.
167 Note that two consecutive notes for different lines are not really
168 extraneous. There should be some indication where that line belonged,
169 even if it became empty. */
171 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
172 if (GET_CODE (insn
) == NOTE
)
174 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
)
175 /* Any previous line note was for the prologue; gdb wants a new
176 note after the prologue even if it is for the same line. */
177 last_note
= NULL_RTX
;
178 else if (NOTE_LINE_NUMBER (insn
) >= 0)
180 /* Delete this note if it is identical to previous note. */
182 && NOTE_SOURCE_FILE (insn
) == NOTE_SOURCE_FILE (last_note
)
183 && NOTE_LINE_NUMBER (insn
) == NOTE_LINE_NUMBER (last_note
))
185 delete_related_insns (insn
);
194 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
195 notes whose labels don't occur in the insn any more. Returns the
196 largest INSN_UID found. */
204 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
206 if (GET_CODE (insn
) == CODE_LABEL
)
207 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
208 else if (GET_CODE (insn
) == JUMP_INSN
)
209 JUMP_LABEL (insn
) = 0;
210 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
214 for (note
= REG_NOTES (insn
); note
; note
= next
)
216 next
= XEXP (note
, 1);
217 if (REG_NOTE_KIND (note
) == REG_LABEL
218 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
219 remove_note (insn
, note
);
222 if (INSN_UID (insn
) > largest_uid
)
223 largest_uid
= INSN_UID (insn
);
229 /* Mark the label each jump jumps to.
230 Combine consecutive labels, and count uses of labels. */
238 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
241 if (GET_CODE (insn
) == CALL_INSN
242 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
244 mark_all_labels (XEXP (PATTERN (insn
), 0));
245 mark_all_labels (XEXP (PATTERN (insn
), 1));
246 mark_all_labels (XEXP (PATTERN (insn
), 2));
248 /* Canonicalize the tail recursion label attached to the
249 CALL_PLACEHOLDER insn. */
250 if (XEXP (PATTERN (insn
), 3))
252 rtx label_ref
= gen_rtx_LABEL_REF (VOIDmode
,
253 XEXP (PATTERN (insn
), 3));
254 mark_jump_label (label_ref
, insn
, 0);
255 XEXP (PATTERN (insn
), 3) = XEXP (label_ref
, 0);
261 mark_jump_label (PATTERN (insn
), insn
, 0);
262 if (! INSN_DELETED_P (insn
) && GET_CODE (insn
) == JUMP_INSN
)
264 /* When we know the LABEL_REF contained in a REG used in
265 an indirect jump, we'll have a REG_LABEL note so that
266 flow can tell where it's going. */
267 if (JUMP_LABEL (insn
) == 0)
269 rtx label_note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
272 /* But a LABEL_REF around the REG_LABEL note, so
273 that we can canonicalize it. */
274 rtx label_ref
= gen_rtx_LABEL_REF (VOIDmode
,
275 XEXP (label_note
, 0));
277 mark_jump_label (label_ref
, insn
, 0);
278 XEXP (label_note
, 0) = XEXP (label_ref
, 0);
279 JUMP_LABEL (insn
) = XEXP (label_note
, 0);
286 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
287 jump. Assume that this unconditional jump is to the exit test code. If
288 the code is sufficiently simple, make a copy of it before INSN,
289 followed by a jump to the exit of the loop. Then delete the unconditional
292 Return 1 if we made the change, else 0.
294 This is only safe immediately after a regscan pass because it uses the
295 values of regno_first_uid and regno_last_uid. */
298 duplicate_loop_exit_test (loop_start
)
301 rtx insn
, set
, reg
, p
, link
;
302 rtx copy
= 0, first_copy
= 0;
304 rtx exitcode
= NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start
)));
306 int max_reg
= max_reg_num ();
308 rtx loop_pre_header_label
;
310 /* Scan the exit code. We do not perform this optimization if any insn:
314 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
315 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
316 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
319 We also do not do this if we find an insn with ASM_OPERANDS. While
320 this restriction should not be necessary, copying an insn with
321 ASM_OPERANDS can confuse asm_noperands in some cases.
323 Also, don't do this if the exit code is more than 20 insns. */
325 for (insn
= exitcode
;
327 && ! (GET_CODE (insn
) == NOTE
328 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
);
329 insn
= NEXT_INSN (insn
))
331 switch (GET_CODE (insn
))
337 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
338 a jump immediately after the loop start that branches outside
339 the loop but within an outer loop, near the exit test.
340 If we copied this exit test and created a phony
341 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
342 before the exit test look like these could be safely moved
343 out of the loop even if they actually may be never executed.
344 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
346 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
347 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
)
351 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
352 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
353 /* If we were to duplicate this code, we would not move
354 the BLOCK notes, and so debugging the moved code would
355 be difficult. Thus, we only move the code with -O2 or
362 /* The code below would grossly mishandle REG_WAS_0 notes,
363 so get rid of them here. */
364 while ((p
= find_reg_note (insn
, REG_WAS_0
, NULL_RTX
)) != 0)
365 remove_note (insn
, p
);
367 || find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
368 || find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
376 /* Unless INSN is zero, we can do the optimization. */
382 /* See if any insn sets a register only used in the loop exit code and
383 not a user variable. If so, replace it with a new register. */
384 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
385 if (GET_CODE (insn
) == INSN
386 && (set
= single_set (insn
)) != 0
387 && ((reg
= SET_DEST (set
), GET_CODE (reg
) == REG
)
388 || (GET_CODE (reg
) == SUBREG
389 && (reg
= SUBREG_REG (reg
), GET_CODE (reg
) == REG
)))
390 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
391 && REGNO_FIRST_UID (REGNO (reg
)) == INSN_UID (insn
))
393 for (p
= NEXT_INSN (insn
); p
!= lastexit
; p
= NEXT_INSN (p
))
394 if (REGNO_LAST_UID (REGNO (reg
)) == INSN_UID (p
))
399 /* We can do the replacement. Allocate reg_map if this is the
400 first replacement we found. */
402 reg_map
= (rtx
*) xcalloc (max_reg
, sizeof (rtx
));
404 REG_LOOP_TEST_P (reg
) = 1;
406 reg_map
[REGNO (reg
)] = gen_reg_rtx (GET_MODE (reg
));
409 loop_pre_header_label
= gen_label_rtx ();
411 /* Now copy each insn. */
412 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
414 switch (GET_CODE (insn
))
417 copy
= emit_barrier_before (loop_start
);
420 /* Only copy line-number notes. */
421 if (NOTE_LINE_NUMBER (insn
) >= 0)
423 copy
= emit_note_before (NOTE_LINE_NUMBER (insn
), loop_start
);
424 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
429 copy
= emit_insn_before (copy_insn (PATTERN (insn
)), loop_start
);
431 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
433 mark_jump_label (PATTERN (copy
), copy
, 0);
435 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
437 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
438 if (REG_NOTE_KIND (link
) != REG_LABEL
)
440 if (GET_CODE (link
) == EXPR_LIST
)
442 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link
),
447 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link
),
452 if (reg_map
&& REG_NOTES (copy
))
453 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
457 copy
= emit_jump_insn_before (copy_insn (PATTERN (insn
)),
460 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
461 mark_jump_label (PATTERN (copy
), copy
, 0);
462 if (REG_NOTES (insn
))
464 REG_NOTES (copy
) = copy_insn_1 (REG_NOTES (insn
));
466 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
469 /* Predict conditional jump that do make loop looping as taken.
470 Other jumps are probably exit conditions, so predict
472 if (any_condjump_p (copy
))
474 rtx label
= JUMP_LABEL (copy
);
477 /* The jump_insn after loop_start should be followed
478 by barrier and loopback label. */
479 if (prev_nonnote_insn (label
)
480 && (prev_nonnote_insn (prev_nonnote_insn (label
))
481 == next_nonnote_insn (loop_start
)))
483 predict_insn_def (copy
, PRED_LOOP_HEADER
, TAKEN
);
484 /* To keep pre-header, we need to redirect all loop
485 entrances before the LOOP_BEG note. */
486 redirect_jump (copy
, loop_pre_header_label
, 0);
489 predict_insn_def (copy
, PRED_LOOP_HEADER
, NOT_TAKEN
);
498 /* Record the first insn we copied. We need it so that we can
499 scan the copied insns for new pseudo registers. */
504 /* Now clean up by emitting a jump to the end label and deleting the jump
505 at the start of the loop. */
506 if (! copy
|| GET_CODE (copy
) != BARRIER
)
508 copy
= emit_jump_insn_before (gen_jump (get_label_after (insn
)),
511 /* Record the first insn we copied. We need it so that we can
512 scan the copied insns for new pseudo registers. This may not
513 be strictly necessary since we should have copied at least one
514 insn above. But I am going to be safe. */
518 mark_jump_label (PATTERN (copy
), copy
, 0);
519 emit_barrier_before (loop_start
);
522 emit_label_before (loop_pre_header_label
, loop_start
);
524 /* Now scan from the first insn we copied to the last insn we copied
525 (copy) for new pseudo registers. Do this after the code to jump to
526 the end label since that might create a new pseudo too. */
527 reg_scan_update (first_copy
, copy
, max_reg
);
529 /* Mark the exit code as the virtual top of the converted loop. */
530 emit_note_before (NOTE_INSN_LOOP_VTOP
, exitcode
);
532 delete_related_insns (next_nonnote_insn (loop_start
));
541 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
542 notes between START and END out before START. START and END may be such
543 notes. Returns the values of the new starting and ending insns, which
544 may be different if the original ones were such notes. */
547 squeeze_notes (startp
, endp
)
557 rtx past_end
= NEXT_INSN (end
);
559 for (insn
= start
; insn
!= past_end
; insn
= next
)
561 next
= NEXT_INSN (insn
);
562 if (GET_CODE (insn
) == NOTE
563 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
564 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
565 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
566 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
567 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
568 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
))
574 rtx prev
= PREV_INSN (insn
);
575 PREV_INSN (insn
) = PREV_INSN (start
);
576 NEXT_INSN (insn
) = start
;
577 NEXT_INSN (PREV_INSN (insn
)) = insn
;
578 PREV_INSN (NEXT_INSN (insn
)) = insn
;
579 NEXT_INSN (prev
) = next
;
580 PREV_INSN (next
) = prev
;
587 /* There were no real instructions, and we can't represent an empty
589 if (start
== past_end
)
598 /* Return the label before INSN, or put a new label there. */
601 get_label_before (insn
)
606 /* Find an existing label at this point
607 or make a new one if there is none. */
608 label
= prev_nonnote_insn (insn
);
610 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
612 rtx prev
= PREV_INSN (insn
);
614 label
= gen_label_rtx ();
615 emit_label_after (label
, prev
);
616 LABEL_NUSES (label
) = 0;
621 /* Return the label after INSN, or put a new label there. */
624 get_label_after (insn
)
629 /* Find an existing label at this point
630 or make a new one if there is none. */
631 label
= next_nonnote_insn (insn
);
633 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
635 label
= gen_label_rtx ();
636 emit_label_after (label
, insn
);
637 LABEL_NUSES (label
) = 0;
642 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
643 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
644 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
645 know whether it's source is floating point or integer comparison. Machine
646 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
647 to help this function avoid overhead in these cases. */
649 reversed_comparison_code_parts (code
, arg0
, arg1
, insn
)
650 rtx insn
, arg0
, arg1
;
653 enum machine_mode mode
;
655 /* If this is not actually a comparison, we can't reverse it. */
656 if (GET_RTX_CLASS (code
) != '<')
659 mode
= GET_MODE (arg0
);
660 if (mode
== VOIDmode
)
661 mode
= GET_MODE (arg1
);
663 /* First see if machine description supply us way to reverse the comparison.
664 Give it priority over everything else to allow machine description to do
666 #ifdef REVERSIBLE_CC_MODE
667 if (GET_MODE_CLASS (mode
) == MODE_CC
668 && REVERSIBLE_CC_MODE (mode
))
670 #ifdef REVERSE_CONDITION
671 return REVERSE_CONDITION (code
, mode
);
673 return reverse_condition (code
);
677 /* Try a few special cases based on the comparison code. */
686 /* It is always safe to reverse EQ and NE, even for the floating
687 point. Similary the unsigned comparisons are never used for
688 floating point so we can reverse them in the default way. */
689 return reverse_condition (code
);
694 /* In case we already see unordered comparison, we can be sure to
695 be dealing with floating point so we don't need any more tests. */
696 return reverse_condition_maybe_unordered (code
);
701 /* We don't have safe way to reverse these yet. */
707 /* In case we give up IEEE compatibility, all comparisons are reversible. */
708 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
709 || flag_unsafe_math_optimizations
)
710 return reverse_condition (code
);
712 if (GET_MODE_CLASS (mode
) == MODE_CC
719 /* Try to search for the comparison to determine the real mode.
720 This code is expensive, but with sane machine description it
721 will be never used, since REVERSIBLE_CC_MODE will return true
726 for (prev
= prev_nonnote_insn (insn
);
727 prev
!= 0 && GET_CODE (prev
) != CODE_LABEL
;
728 prev
= prev_nonnote_insn (prev
))
730 rtx set
= set_of (arg0
, prev
);
731 if (set
&& GET_CODE (set
) == SET
732 && rtx_equal_p (SET_DEST (set
), arg0
))
734 rtx src
= SET_SRC (set
);
736 if (GET_CODE (src
) == COMPARE
)
738 rtx comparison
= src
;
739 arg0
= XEXP (src
, 0);
740 mode
= GET_MODE (arg0
);
741 if (mode
== VOIDmode
)
742 mode
= GET_MODE (XEXP (comparison
, 1));
745 /* We can get past reg-reg moves. This may be useful for model
746 of i387 comparisons that first move flag registers around. */
753 /* If register is clobbered in some ununderstandable way,
760 /* An integer condition. */
761 if (GET_CODE (arg0
) == CONST_INT
762 || (GET_MODE (arg0
) != VOIDmode
763 && GET_MODE_CLASS (mode
) != MODE_CC
764 && ! FLOAT_MODE_P (mode
)))
765 return reverse_condition (code
);
770 /* An wrapper around the previous function to take COMPARISON as rtx
771 expression. This simplifies many callers. */
773 reversed_comparison_code (comparison
, insn
)
774 rtx comparison
, insn
;
776 if (GET_RTX_CLASS (GET_CODE (comparison
)) != '<')
778 return reversed_comparison_code_parts (GET_CODE (comparison
),
779 XEXP (comparison
, 0),
780 XEXP (comparison
, 1), insn
);
783 /* Given an rtx-code for a comparison, return the code for the negated
784 comparison. If no such code exists, return UNKNOWN.
786 WATCH OUT! reverse_condition is not safe to use on a jump that might
787 be acting on the results of an IEEE floating point comparison, because
788 of the special treatment of non-signaling nans in comparisons.
789 Use reversed_comparison_code instead. */
792 reverse_condition (code
)
835 /* Similar, but we're allowed to generate unordered comparisons, which
836 makes it safe for IEEE floating-point. Of course, we have to recognize
837 that the target will support them too... */
840 reverse_condition_maybe_unordered (code
)
843 /* Non-IEEE formats don't have unordered conditions. */
844 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
845 return reverse_condition (code
);
883 /* Similar, but return the code when two operands of a comparison are swapped.
884 This IS safe for IEEE floating-point. */
887 swap_condition (code
)
930 /* Given a comparison CODE, return the corresponding unsigned comparison.
931 If CODE is an equality comparison or already an unsigned comparison,
935 unsigned_condition (code
)
962 /* Similarly, return the signed version of a comparison. */
965 signed_condition (code
)
992 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
993 truth of CODE1 implies the truth of CODE2. */
996 comparison_dominates_p (code1
, code2
)
997 enum rtx_code code1
, code2
;
999 /* UNKNOWN comparison codes can happen as a result of trying to revert
1001 They can't match anything, so we have to reject them here. */
1002 if (code1
== UNKNOWN
|| code2
== UNKNOWN
)
1011 if (code2
== UNLE
|| code2
== UNGE
)
1016 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
1017 || code2
== ORDERED
)
1022 if (code2
== UNLE
|| code2
== NE
)
1027 if (code2
== LE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
1032 if (code2
== UNGE
|| code2
== NE
)
1037 if (code2
== GE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
1043 if (code2
== ORDERED
)
1048 if (code2
== NE
|| code2
== ORDERED
)
1053 if (code2
== LEU
|| code2
== NE
)
1058 if (code2
== GEU
|| code2
== NE
)
1063 if (code2
== NE
|| code2
== UNEQ
|| code2
== UNLE
|| code2
== UNLT
1064 || code2
== UNGE
|| code2
== UNGT
)
1075 /* Return 1 if INSN is an unconditional jump and nothing else. */
1081 return (GET_CODE (insn
) == JUMP_INSN
1082 && GET_CODE (PATTERN (insn
)) == SET
1083 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
1084 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
1087 /* Return nonzero if INSN is a (possibly) conditional jump
1090 Use this function is deprecated, since we need to support combined
1091 branch and compare insns. Use any_condjump_p instead whenever possible. */
1097 rtx x
= PATTERN (insn
);
1099 if (GET_CODE (x
) != SET
1100 || GET_CODE (SET_DEST (x
)) != PC
)
1104 if (GET_CODE (x
) == LABEL_REF
)
1107 return (GET_CODE (x
) == IF_THEN_ELSE
1108 && ((GET_CODE (XEXP (x
, 2)) == PC
1109 && (GET_CODE (XEXP (x
, 1)) == LABEL_REF
1110 || GET_CODE (XEXP (x
, 1)) == RETURN
))
1111 || (GET_CODE (XEXP (x
, 1)) == PC
1112 && (GET_CODE (XEXP (x
, 2)) == LABEL_REF
1113 || GET_CODE (XEXP (x
, 2)) == RETURN
))));
1118 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1121 Use this function is deprecated, since we need to support combined
1122 branch and compare insns. Use any_condjump_p instead whenever possible. */
1125 condjump_in_parallel_p (insn
)
1128 rtx x
= PATTERN (insn
);
1130 if (GET_CODE (x
) != PARALLEL
)
1133 x
= XVECEXP (x
, 0, 0);
1135 if (GET_CODE (x
) != SET
)
1137 if (GET_CODE (SET_DEST (x
)) != PC
)
1139 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
1141 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
1143 if (XEXP (SET_SRC (x
), 2) == pc_rtx
1144 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
1145 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
1147 if (XEXP (SET_SRC (x
), 1) == pc_rtx
1148 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
1149 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
1154 /* Return set of PC, otherwise NULL. */
1161 if (GET_CODE (insn
) != JUMP_INSN
)
1163 pat
= PATTERN (insn
);
1165 /* The set is allowed to appear either as the insn pattern or
1166 the first set in a PARALLEL. */
1167 if (GET_CODE (pat
) == PARALLEL
)
1168 pat
= XVECEXP (pat
, 0, 0);
1169 if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == PC
)
1175 /* Return true when insn is an unconditional direct jump,
1176 possibly bundled inside a PARALLEL. */
1179 any_uncondjump_p (insn
)
1182 rtx x
= pc_set (insn
);
1185 if (GET_CODE (SET_SRC (x
)) != LABEL_REF
)
1190 /* Return true when insn is a conditional jump. This function works for
1191 instructions containing PC sets in PARALLELs. The instruction may have
1192 various other effects so before removing the jump you must verify
1195 Note that unlike condjump_p it returns false for unconditional jumps. */
1198 any_condjump_p (insn
)
1201 rtx x
= pc_set (insn
);
1206 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
1209 a
= GET_CODE (XEXP (SET_SRC (x
), 1));
1210 b
= GET_CODE (XEXP (SET_SRC (x
), 2));
1212 return ((b
== PC
&& (a
== LABEL_REF
|| a
== RETURN
))
1213 || (a
== PC
&& (b
== LABEL_REF
|| b
== RETURN
)));
1216 /* Return the label of a conditional jump. */
1219 condjump_label (insn
)
1222 rtx x
= pc_set (insn
);
1227 if (GET_CODE (x
) == LABEL_REF
)
1229 if (GET_CODE (x
) != IF_THEN_ELSE
)
1231 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
1233 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
1238 /* Return true if INSN is a (possibly conditional) return insn. */
1241 returnjump_p_1 (loc
, data
)
1243 void *data ATTRIBUTE_UNUSED
;
1246 return x
&& GET_CODE (x
) == RETURN
;
1253 if (GET_CODE (insn
) != JUMP_INSN
)
1255 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
1258 /* Return true if INSN is a jump that only transfers control and
1267 if (GET_CODE (insn
) != JUMP_INSN
)
1270 set
= single_set (insn
);
1273 if (GET_CODE (SET_DEST (set
)) != PC
)
1275 if (side_effects_p (SET_SRC (set
)))
1283 /* Return non-zero if X is an RTX that only sets the condition codes
1284 and has no side effects. */
1297 return sets_cc0_p (x
) == 1 && ! side_effects_p (x
);
1300 /* Return 1 if X is an RTX that does nothing but set the condition codes
1301 and CLOBBER or USE registers.
1302 Return -1 if X does explicitly set the condition codes,
1303 but also does other things. */
1316 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
1318 if (GET_CODE (x
) == PARALLEL
)
1322 int other_things
= 0;
1323 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1325 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
1326 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
1328 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
1331 return ! sets_cc0
? 0 : other_things
? -1 : 1;
1337 /* Follow any unconditional jump at LABEL;
1338 return the ultimate label reached by any such chain of jumps.
1339 If LABEL is not followed by a jump, return LABEL.
1340 If the chain loops or we can't find end, return LABEL,
1341 since that tells caller to avoid changing the insn.
1343 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1344 a USE or CLOBBER. */
1347 follow_jumps (label
)
1357 && (insn
= next_active_insn (value
)) != 0
1358 && GET_CODE (insn
) == JUMP_INSN
1359 && ((JUMP_LABEL (insn
) != 0 && any_uncondjump_p (insn
)
1360 && onlyjump_p (insn
))
1361 || GET_CODE (PATTERN (insn
)) == RETURN
)
1362 && (next
= NEXT_INSN (insn
))
1363 && GET_CODE (next
) == BARRIER
);
1366 /* Don't chain through the insn that jumps into a loop
1367 from outside the loop,
1368 since that would create multiple loop entry jumps
1369 and prevent loop optimization. */
1371 if (!reload_completed
)
1372 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
1373 if (GET_CODE (tem
) == NOTE
1374 && (NOTE_LINE_NUMBER (tem
) == NOTE_INSN_LOOP_BEG
1375 /* ??? Optional. Disables some optimizations, but makes
1376 gcov output more accurate with -O. */
1377 || (flag_test_coverage
&& NOTE_LINE_NUMBER (tem
) > 0)))
1380 /* If we have found a cycle, make the insn jump to itself. */
1381 if (JUMP_LABEL (insn
) == label
)
1384 tem
= next_active_insn (JUMP_LABEL (insn
));
1385 if (tem
&& (GET_CODE (PATTERN (tem
)) == ADDR_VEC
1386 || GET_CODE (PATTERN (tem
)) == ADDR_DIFF_VEC
))
1389 value
= JUMP_LABEL (insn
);
1397 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1398 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1399 in INSN, then store one of them in JUMP_LABEL (INSN).
1400 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1401 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1402 Also, when there are consecutive labels, canonicalize on the last of them.
1404 Note that two labels separated by a loop-beginning note
1405 must be kept distinct if we have not yet done loop-optimization,
1406 because the gap between them is where loop-optimize
1407 will want to move invariant code to. CROSS_JUMP tells us
1408 that loop-optimization is done with. */
1411 mark_jump_label (x
, insn
, in_mem
)
1416 RTX_CODE code
= GET_CODE (x
);
1440 /* If this is a constant-pool reference, see if it is a label. */
1441 if (CONSTANT_POOL_ADDRESS_P (x
))
1442 mark_jump_label (get_pool_constant (x
), insn
, in_mem
);
1447 rtx label
= XEXP (x
, 0);
1449 /* Ignore remaining references to unreachable labels that
1450 have been deleted. */
1451 if (GET_CODE (label
) == NOTE
1452 && NOTE_LINE_NUMBER (label
) == NOTE_INSN_DELETED_LABEL
)
1455 if (GET_CODE (label
) != CODE_LABEL
)
1458 /* Ignore references to labels of containing functions. */
1459 if (LABEL_REF_NONLOCAL_P (x
))
1462 XEXP (x
, 0) = label
;
1463 if (! insn
|| ! INSN_DELETED_P (insn
))
1464 ++LABEL_NUSES (label
);
1468 if (GET_CODE (insn
) == JUMP_INSN
)
1469 JUMP_LABEL (insn
) = label
;
1472 /* Add a REG_LABEL note for LABEL unless there already
1473 is one. All uses of a label, except for labels
1474 that are the targets of jumps, must have a
1476 if (! find_reg_note (insn
, REG_LABEL
, label
))
1477 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, label
,
1484 /* Do walk the labels in a vector, but not the first operand of an
1485 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1488 if (! INSN_DELETED_P (insn
))
1490 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
1492 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
1493 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, in_mem
);
1501 fmt
= GET_RTX_FORMAT (code
);
1502 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1505 mark_jump_label (XEXP (x
, i
), insn
, in_mem
);
1506 else if (fmt
[i
] == 'E')
1509 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1510 mark_jump_label (XVECEXP (x
, i
, j
), insn
, in_mem
);
1515 /* If all INSN does is set the pc, delete it,
1516 and delete the insn that set the condition codes for it
1517 if that's what the previous thing was. */
1523 rtx set
= single_set (insn
);
1525 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
1526 delete_computation (insn
);
1529 /* Verify INSN is a BARRIER and delete it. */
1532 delete_barrier (insn
)
1535 if (GET_CODE (insn
) != BARRIER
)
1541 /* Recursively delete prior insns that compute the value (used only by INSN
1542 which the caller is deleting) stored in the register mentioned by NOTE
1543 which is a REG_DEAD note associated with INSN. */
1546 delete_prior_computation (note
, insn
)
1551 rtx reg
= XEXP (note
, 0);
1553 for (our_prev
= prev_nonnote_insn (insn
);
1554 our_prev
&& (GET_CODE (our_prev
) == INSN
1555 || GET_CODE (our_prev
) == CALL_INSN
);
1556 our_prev
= prev_nonnote_insn (our_prev
))
1558 rtx pat
= PATTERN (our_prev
);
1560 /* If we reach a CALL which is not calling a const function
1561 or the callee pops the arguments, then give up. */
1562 if (GET_CODE (our_prev
) == CALL_INSN
1563 && (! CONST_OR_PURE_CALL_P (our_prev
)
1564 || GET_CODE (pat
) != SET
|| GET_CODE (SET_SRC (pat
)) != CALL
))
1567 /* If we reach a SEQUENCE, it is too complex to try to
1568 do anything with it, so give up. */
1569 if (GET_CODE (pat
) == SEQUENCE
)
1572 if (GET_CODE (pat
) == USE
1573 && GET_CODE (XEXP (pat
, 0)) == INSN
)
1574 /* reorg creates USEs that look like this. We leave them
1575 alone because reorg needs them for its own purposes. */
1578 if (reg_set_p (reg
, pat
))
1580 if (side_effects_p (pat
) && GET_CODE (our_prev
) != CALL_INSN
)
1583 if (GET_CODE (pat
) == PARALLEL
)
1585 /* If we find a SET of something else, we can't
1590 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1592 rtx part
= XVECEXP (pat
, 0, i
);
1594 if (GET_CODE (part
) == SET
1595 && SET_DEST (part
) != reg
)
1599 if (i
== XVECLEN (pat
, 0))
1600 delete_computation (our_prev
);
1602 else if (GET_CODE (pat
) == SET
1603 && GET_CODE (SET_DEST (pat
)) == REG
)
1605 int dest_regno
= REGNO (SET_DEST (pat
));
1608 + (dest_regno
< FIRST_PSEUDO_REGISTER
1609 ? HARD_REGNO_NREGS (dest_regno
,
1610 GET_MODE (SET_DEST (pat
))) : 1));
1611 int regno
= REGNO (reg
);
1614 + (regno
< FIRST_PSEUDO_REGISTER
1615 ? HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) : 1));
1617 if (dest_regno
>= regno
1618 && dest_endregno
<= endregno
)
1619 delete_computation (our_prev
);
1621 /* We may have a multi-word hard register and some, but not
1622 all, of the words of the register are needed in subsequent
1623 insns. Write REG_UNUSED notes for those parts that were not
1625 else if (dest_regno
<= regno
1626 && dest_endregno
>= endregno
)
1630 REG_NOTES (our_prev
)
1631 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
,
1632 REG_NOTES (our_prev
));
1634 for (i
= dest_regno
; i
< dest_endregno
; i
++)
1635 if (! find_regno_note (our_prev
, REG_UNUSED
, i
))
1638 if (i
== dest_endregno
)
1639 delete_computation (our_prev
);
1646 /* If PAT references the register that dies here, it is an
1647 additional use. Hence any prior SET isn't dead. However, this
1648 insn becomes the new place for the REG_DEAD note. */
1649 if (reg_overlap_mentioned_p (reg
, pat
))
1651 XEXP (note
, 1) = REG_NOTES (our_prev
);
1652 REG_NOTES (our_prev
) = note
;
1658 /* Delete INSN and recursively delete insns that compute values used only
1659 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1660 If we are running before flow.c, we need do nothing since flow.c will
1661 delete dead code. We also can't know if the registers being used are
1662 dead or not at this point.
1664 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1665 nothing other than set a register that dies in this insn, we can delete
1668 On machines with CC0, if CC0 is used in this insn, we may be able to
1669 delete the insn that set it. */
1672 delete_computation (insn
)
1678 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
1680 rtx prev
= prev_nonnote_insn (insn
);
1681 /* We assume that at this stage
1682 CC's are always set explicitly
1683 and always immediately before the jump that
1684 will use them. So if the previous insn
1685 exists to set the CC's, delete it
1686 (unless it performs auto-increments, etc.). */
1687 if (prev
&& GET_CODE (prev
) == INSN
1688 && sets_cc0_p (PATTERN (prev
)))
1690 if (sets_cc0_p (PATTERN (prev
)) > 0
1691 && ! side_effects_p (PATTERN (prev
)))
1692 delete_computation (prev
);
1694 /* Otherwise, show that cc0 won't be used. */
1695 REG_NOTES (prev
) = gen_rtx_EXPR_LIST (REG_UNUSED
,
1696 cc0_rtx
, REG_NOTES (prev
));
1701 for (note
= REG_NOTES (insn
); note
; note
= next
)
1703 next
= XEXP (note
, 1);
1705 if (REG_NOTE_KIND (note
) != REG_DEAD
1706 /* Verify that the REG_NOTE is legitimate. */
1707 || GET_CODE (XEXP (note
, 0)) != REG
)
1710 delete_prior_computation (note
, insn
);
1713 delete_related_insns (insn
);
1716 /* Delete insn INSN from the chain of insns and update label ref counts
1717 and delete insns now unreachable.
1719 Returns the first insn after INSN that was not deleted.
1721 Usage of this instruction is deprecated. Use delete_insn instead and
1722 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1725 delete_related_insns (insn
)
1728 int was_code_label
= (GET_CODE (insn
) == CODE_LABEL
);
1730 rtx next
= NEXT_INSN (insn
), prev
= PREV_INSN (insn
);
1732 while (next
&& INSN_DELETED_P (next
))
1733 next
= NEXT_INSN (next
);
1735 /* This insn is already deleted => return first following nondeleted. */
1736 if (INSN_DELETED_P (insn
))
1741 /* If instruction is followed by a barrier,
1742 delete the barrier too. */
1744 if (next
!= 0 && GET_CODE (next
) == BARRIER
)
1747 /* If deleting a jump, decrement the count of the label,
1748 and delete the label if it is now unused. */
1750 if (GET_CODE (insn
) == JUMP_INSN
&& JUMP_LABEL (insn
))
1752 rtx lab
= JUMP_LABEL (insn
), lab_next
;
1754 if (LABEL_NUSES (lab
) == 0)
1756 /* This can delete NEXT or PREV,
1757 either directly if NEXT is JUMP_LABEL (INSN),
1758 or indirectly through more levels of jumps. */
1759 delete_related_insns (lab
);
1761 /* I feel a little doubtful about this loop,
1762 but I see no clean and sure alternative way
1763 to find the first insn after INSN that is not now deleted.
1764 I hope this works. */
1765 while (next
&& INSN_DELETED_P (next
))
1766 next
= NEXT_INSN (next
);
1769 else if ((lab_next
= next_nonnote_insn (lab
)) != NULL
1770 && GET_CODE (lab_next
) == JUMP_INSN
1771 && (GET_CODE (PATTERN (lab_next
)) == ADDR_VEC
1772 || GET_CODE (PATTERN (lab_next
)) == ADDR_DIFF_VEC
))
1774 /* If we're deleting the tablejump, delete the dispatch table.
1775 We may not be able to kill the label immediately preceeding
1776 just yet, as it might be referenced in code leading up to
1778 delete_related_insns (lab_next
);
1782 /* Likewise if we're deleting a dispatch table. */
1784 if (GET_CODE (insn
) == JUMP_INSN
1785 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1786 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
1788 rtx pat
= PATTERN (insn
);
1789 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
1790 int len
= XVECLEN (pat
, diff_vec_p
);
1792 for (i
= 0; i
< len
; i
++)
1793 if (LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
1794 delete_related_insns (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
1795 while (next
&& INSN_DELETED_P (next
))
1796 next
= NEXT_INSN (next
);
1800 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1801 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
1802 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1803 if (REG_NOTE_KIND (note
) == REG_LABEL
1804 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1805 && GET_CODE (XEXP (note
, 0)) == CODE_LABEL
)
1806 if (LABEL_NUSES (XEXP (note
, 0)) == 0)
1807 delete_related_insns (XEXP (note
, 0));
1809 while (prev
&& (INSN_DELETED_P (prev
) || GET_CODE (prev
) == NOTE
))
1810 prev
= PREV_INSN (prev
);
1812 /* If INSN was a label and a dispatch table follows it,
1813 delete the dispatch table. The tablejump must have gone already.
1814 It isn't useful to fall through into a table. */
1817 && NEXT_INSN (insn
) != 0
1818 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
1819 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
1820 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
1821 next
= delete_related_insns (NEXT_INSN (insn
));
1823 /* If INSN was a label, delete insns following it if now unreachable. */
1825 if (was_code_label
&& prev
&& GET_CODE (prev
) == BARRIER
)
1829 && (GET_RTX_CLASS (code
= GET_CODE (next
)) == 'i'
1830 || code
== NOTE
|| code
== BARRIER
1831 || (code
== CODE_LABEL
&& INSN_DELETED_P (next
))))
1834 && NOTE_LINE_NUMBER (next
) != NOTE_INSN_FUNCTION_END
)
1835 next
= NEXT_INSN (next
);
1836 /* Keep going past other deleted labels to delete what follows. */
1837 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
1838 next
= NEXT_INSN (next
);
1840 /* Note: if this deletes a jump, it can cause more
1841 deletion of unreachable code, after a different label.
1842 As long as the value from this recursive call is correct,
1843 this invocation functions correctly. */
1844 next
= delete_related_insns (next
);
1851 /* Advance from INSN till reaching something not deleted
1852 then return that. May return INSN itself. */
1855 next_nondeleted_insn (insn
)
1858 while (INSN_DELETED_P (insn
))
1859 insn
= NEXT_INSN (insn
);
1863 /* Delete a range of insns from FROM to TO, inclusive.
1864 This is for the sake of peephole optimization, so assume
1865 that whatever these insns do will still be done by a new
1866 peephole insn that will replace them. */
1869 delete_for_peephole (from
, to
)
1876 rtx next
= NEXT_INSN (insn
);
1877 rtx prev
= PREV_INSN (insn
);
1879 if (GET_CODE (insn
) != NOTE
)
1881 INSN_DELETED_P (insn
) = 1;
1883 /* Patch this insn out of the chain. */
1884 /* We don't do this all at once, because we
1885 must preserve all NOTEs. */
1887 NEXT_INSN (prev
) = next
;
1890 PREV_INSN (next
) = prev
;
1898 /* Note that if TO is an unconditional jump
1899 we *do not* delete the BARRIER that follows,
1900 since the peephole that replaces this sequence
1901 is also an unconditional jump in that case. */
1904 /* We have determined that INSN is never reached, and are about to
1905 delete it. Print a warning if the user asked for one.
1907 To try to make this warning more useful, this should only be called
1908 once per basic block not reached, and it only warns when the basic
1909 block contains more than one line from the current function, and
1910 contains at least one operation. CSE and inlining can duplicate insns,
1911 so it's possible to get spurious warnings from this. */
1914 never_reached_warning (avoided_insn
)
1918 rtx a_line_note
= NULL
;
1919 int two_avoided_lines
= 0;
1920 int contains_insn
= 0;
1922 if (! warn_notreached
)
1925 /* Scan forwards, looking at LINE_NUMBER notes, until
1926 we hit a LABEL or we run out of insns. */
1928 for (insn
= avoided_insn
; insn
!= NULL
; insn
= NEXT_INSN (insn
))
1930 if (GET_CODE (insn
) == CODE_LABEL
)
1932 else if (GET_CODE (insn
) == NOTE
/* A line number note? */
1933 && NOTE_LINE_NUMBER (insn
) >= 0)
1935 if (a_line_note
== NULL
)
1938 two_avoided_lines
|= (NOTE_LINE_NUMBER (a_line_note
)
1939 != NOTE_LINE_NUMBER (insn
));
1941 else if (INSN_P (insn
))
1944 if (two_avoided_lines
&& contains_insn
)
1945 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note
),
1946 NOTE_LINE_NUMBER (a_line_note
),
1947 "will never be executed");
1950 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1951 NLABEL as a return. Accrue modifications into the change group. */
1954 redirect_exp_1 (loc
, olabel
, nlabel
, insn
)
1960 RTX_CODE code
= GET_CODE (x
);
1964 if (code
== LABEL_REF
)
1966 if (XEXP (x
, 0) == olabel
)
1970 n
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
1972 n
= gen_rtx_RETURN (VOIDmode
);
1974 validate_change (insn
, loc
, n
, 1);
1978 else if (code
== RETURN
&& olabel
== 0)
1980 x
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
1981 if (loc
== &PATTERN (insn
))
1982 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
1983 validate_change (insn
, loc
, x
, 1);
1987 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
1988 && GET_CODE (SET_SRC (x
)) == LABEL_REF
1989 && XEXP (SET_SRC (x
), 0) == olabel
)
1991 validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 1);
1995 fmt
= GET_RTX_FORMAT (code
);
1996 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1999 redirect_exp_1 (&XEXP (x
, i
), olabel
, nlabel
, insn
);
2000 else if (fmt
[i
] == 'E')
2003 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2004 redirect_exp_1 (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
);
2009 /* Similar, but apply the change group and report success or failure. */
2012 redirect_exp (olabel
, nlabel
, insn
)
2018 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
2019 loc
= &XVECEXP (PATTERN (insn
), 0, 0);
2021 loc
= &PATTERN (insn
);
2023 redirect_exp_1 (loc
, olabel
, nlabel
, insn
);
2024 if (num_validated_changes () == 0)
2027 return apply_change_group ();
2030 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2031 the modifications into the change group. Return false if we did
2032 not see how to do that. */
2035 redirect_jump_1 (jump
, nlabel
)
2038 int ochanges
= num_validated_changes ();
2041 if (GET_CODE (PATTERN (jump
)) == PARALLEL
)
2042 loc
= &XVECEXP (PATTERN (jump
), 0, 0);
2044 loc
= &PATTERN (jump
);
2046 redirect_exp_1 (loc
, JUMP_LABEL (jump
), nlabel
, jump
);
2047 return num_validated_changes () > ochanges
;
2050 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2051 jump target label is unused as a result, it and the code following
2054 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2057 The return value will be 1 if the change was made, 0 if it wasn't
2058 (this can only occur for NLABEL == 0). */
2061 redirect_jump (jump
, nlabel
, delete_unused
)
2065 rtx olabel
= JUMP_LABEL (jump
);
2067 if (nlabel
== olabel
)
2070 if (! redirect_exp (olabel
, nlabel
, jump
))
2073 JUMP_LABEL (jump
) = nlabel
;
2075 ++LABEL_NUSES (nlabel
);
2077 /* If we're eliding the jump over exception cleanups at the end of a
2078 function, move the function end note so that -Wreturn-type works. */
2079 if (olabel
&& nlabel
2080 && NEXT_INSN (olabel
)
2081 && GET_CODE (NEXT_INSN (olabel
)) == NOTE
2082 && NOTE_LINE_NUMBER (NEXT_INSN (olabel
)) == NOTE_INSN_FUNCTION_END
)
2083 emit_note_after (NOTE_INSN_FUNCTION_END
, nlabel
);
2085 if (olabel
&& --LABEL_NUSES (olabel
) == 0 && delete_unused
)
2086 delete_related_insns (olabel
);
2091 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2092 Accrue the modifications into the change group. */
2099 rtx x
= pc_set (insn
);
2105 code
= GET_CODE (x
);
2107 if (code
== IF_THEN_ELSE
)
2109 rtx comp
= XEXP (x
, 0);
2111 enum rtx_code reversed_code
;
2113 /* We can do this in two ways: The preferable way, which can only
2114 be done if this is not an integer comparison, is to reverse
2115 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2116 of the IF_THEN_ELSE. If we can't do either, fail. */
2118 reversed_code
= reversed_comparison_code (comp
, insn
);
2120 if (reversed_code
!= UNKNOWN
)
2122 validate_change (insn
, &XEXP (x
, 0),
2123 gen_rtx_fmt_ee (reversed_code
,
2124 GET_MODE (comp
), XEXP (comp
, 0),
2131 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
2132 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
2138 /* Invert the jump condition of conditional jump insn, INSN.
2140 Return 1 if we can do so, 0 if we cannot find a way to do so that
2141 matches a pattern. */
2147 invert_exp_1 (insn
);
2148 if (num_validated_changes () == 0)
2151 return apply_change_group ();
2154 /* Invert the condition of the jump JUMP, and make it jump to label
2155 NLABEL instead of where it jumps now. Accrue changes into the
2156 change group. Return false if we didn't see how to perform the
2157 inversion and redirection. */
2160 invert_jump_1 (jump
, nlabel
)
2165 ochanges
= num_validated_changes ();
2166 invert_exp_1 (jump
);
2167 if (num_validated_changes () == ochanges
)
2170 return redirect_jump_1 (jump
, nlabel
);
2173 /* Invert the condition of the jump JUMP, and make it jump to label
2174 NLABEL instead of where it jumps now. Return true if successful. */
2177 invert_jump (jump
, nlabel
, delete_unused
)
2181 /* We have to either invert the condition and change the label or
2182 do neither. Either operation could fail. We first try to invert
2183 the jump. If that succeeds, we try changing the label. If that fails,
2184 we invert the jump back to what it was. */
2186 if (! invert_exp (jump
))
2189 if (redirect_jump (jump
, nlabel
, delete_unused
))
2191 invert_br_probabilities (jump
);
2196 if (! invert_exp (jump
))
2197 /* This should just be putting it back the way it was. */
2204 /* Like rtx_equal_p except that it considers two REGs as equal
2205 if they renumber to the same value and considers two commutative
2206 operations to be the same if the order of the operands has been
2209 ??? Addition is not commutative on the PA due to the weird implicit
2210 space register selection rules for memory addresses. Therefore, we
2211 don't consider a + b == b + a.
2213 We could/should make this test a little tighter. Possibly only
2214 disabling it on the PA via some backend macro or only disabling this
2215 case when the PLUS is inside a MEM. */
2218 rtx_renumbered_equal_p (x
, y
)
2222 RTX_CODE code
= GET_CODE (x
);
2228 if ((code
== REG
|| (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == REG
))
2229 && (GET_CODE (y
) == REG
|| (GET_CODE (y
) == SUBREG
2230 && GET_CODE (SUBREG_REG (y
)) == REG
)))
2232 int reg_x
= -1, reg_y
= -1;
2233 int byte_x
= 0, byte_y
= 0;
2235 if (GET_MODE (x
) != GET_MODE (y
))
2238 /* If we haven't done any renumbering, don't
2239 make any assumptions. */
2240 if (reg_renumber
== 0)
2241 return rtx_equal_p (x
, y
);
2245 reg_x
= REGNO (SUBREG_REG (x
));
2246 byte_x
= SUBREG_BYTE (x
);
2248 if (reg_renumber
[reg_x
] >= 0)
2250 reg_x
= subreg_regno_offset (reg_renumber
[reg_x
],
2251 GET_MODE (SUBREG_REG (x
)),
2260 if (reg_renumber
[reg_x
] >= 0)
2261 reg_x
= reg_renumber
[reg_x
];
2264 if (GET_CODE (y
) == SUBREG
)
2266 reg_y
= REGNO (SUBREG_REG (y
));
2267 byte_y
= SUBREG_BYTE (y
);
2269 if (reg_renumber
[reg_y
] >= 0)
2271 reg_y
= subreg_regno_offset (reg_renumber
[reg_y
],
2272 GET_MODE (SUBREG_REG (y
)),
2281 if (reg_renumber
[reg_y
] >= 0)
2282 reg_y
= reg_renumber
[reg_y
];
2285 return reg_x
>= 0 && reg_x
== reg_y
&& byte_x
== byte_y
;
2288 /* Now we have disposed of all the cases
2289 in which different rtx codes can match. */
2290 if (code
!= GET_CODE (y
))
2302 return INTVAL (x
) == INTVAL (y
);
2305 /* We can't assume nonlocal labels have their following insns yet. */
2306 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
2307 return XEXP (x
, 0) == XEXP (y
, 0);
2309 /* Two label-refs are equivalent if they point at labels
2310 in the same position in the instruction stream. */
2311 return (next_real_insn (XEXP (x
, 0))
2312 == next_real_insn (XEXP (y
, 0)));
2315 return XSTR (x
, 0) == XSTR (y
, 0);
2318 /* If we didn't match EQ equality above, they aren't the same. */
2325 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2327 if (GET_MODE (x
) != GET_MODE (y
))
2330 /* For commutative operations, the RTX match if the operand match in any
2331 order. Also handle the simple binary and unary cases without a loop.
2333 ??? Don't consider PLUS a commutative operator; see comments above. */
2334 if ((code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
2336 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
2337 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
2338 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
2339 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
2340 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
2341 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
2342 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
2343 else if (GET_RTX_CLASS (code
) == '1')
2344 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
2346 /* Compare the elements. If any pair of corresponding elements
2347 fail to match, return 0 for the whole things. */
2349 fmt
= GET_RTX_FORMAT (code
);
2350 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2356 if (XWINT (x
, i
) != XWINT (y
, i
))
2361 if (XINT (x
, i
) != XINT (y
, i
))
2366 if (XTREE (x
, i
) != XTREE (y
, i
))
2371 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2376 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
2381 if (XEXP (x
, i
) != XEXP (y
, i
))
2388 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2390 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2391 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
2402 /* If X is a hard register or equivalent to one or a subregister of one,
2403 return the hard register number. If X is a pseudo register that was not
2404 assigned a hard register, return the pseudo register number. Otherwise,
2405 return -1. Any rtx is valid for X. */
2411 if (GET_CODE (x
) == REG
)
2413 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
2414 return reg_renumber
[REGNO (x
)];
2417 if (GET_CODE (x
) == SUBREG
)
2419 int base
= true_regnum (SUBREG_REG (x
));
2420 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
2421 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
2422 GET_MODE (SUBREG_REG (x
)),
2423 SUBREG_BYTE (x
), GET_MODE (x
));
2428 /* Optimize code of the form:
2430 for (x = a[i]; x; ...)
2432 for (x = a[i]; x; ...)
2436 Loop optimize will change the above code into
2440 { ...; if (! (x = ...)) break; }
2443 { ...; if (! (x = ...)) break; }
2446 In general, if the first test fails, the program can branch
2447 directly to `foo' and skip the second try which is doomed to fail.
2448 We run this after loop optimization and before flow analysis. */
2450 /* When comparing the insn patterns, we track the fact that different
2451 pseudo-register numbers may have been used in each computation.
2452 The following array stores an equivalence -- same_regs[I] == J means
2453 that pseudo register I was used in the first set of tests in a context
2454 where J was used in the second set. We also count the number of such
2455 pending equivalences. If nonzero, the expressions really aren't the
2458 static int *same_regs
;
2460 static int num_same_regs
;
2462 /* Track any registers modified between the target of the first jump and
2463 the second jump. They never compare equal. */
2465 static char *modified_regs
;
2467 /* Record if memory was modified. */
2469 static int modified_mem
;
2471 /* Called via note_stores on each insn between the target of the first
2472 branch and the second branch. It marks any changed registers. */
2475 mark_modified_reg (dest
, x
, data
)
2478 void *data ATTRIBUTE_UNUSED
;
2483 if (GET_CODE (dest
) == SUBREG
)
2484 dest
= SUBREG_REG (dest
);
2486 if (GET_CODE (dest
) == MEM
)
2489 if (GET_CODE (dest
) != REG
)
2492 regno
= REGNO (dest
);
2493 if (regno
>= FIRST_PSEUDO_REGISTER
)
2494 modified_regs
[regno
] = 1;
2495 /* Don't consider a hard condition code register as modified,
2496 if it is only being set. thread_jumps will check if it is set
2497 to the same value. */
2498 else if (GET_MODE_CLASS (GET_MODE (dest
)) != MODE_CC
2499 || GET_CODE (x
) != SET
2500 || ! rtx_equal_p (dest
, SET_DEST (x
))
2501 || HARD_REGNO_NREGS (regno
, GET_MODE (dest
)) != 1)
2502 for (i
= 0; i
< HARD_REGNO_NREGS (regno
, GET_MODE (dest
)); i
++)
2503 modified_regs
[regno
+ i
] = 1;
2506 /* F is the first insn in the chain of insns. */
2509 thread_jumps (f
, max_reg
, flag_before_loop
)
2512 int flag_before_loop
;
2514 /* Basic algorithm is to find a conditional branch,
2515 the label it may branch to, and the branch after
2516 that label. If the two branches test the same condition,
2517 walk back from both branch paths until the insn patterns
2518 differ, or code labels are hit. If we make it back to
2519 the target of the first branch, then we know that the first branch
2520 will either always succeed or always fail depending on the relative
2521 senses of the two branches. So adjust the first branch accordingly
2524 rtx label
, b1
, b2
, t1
, t2
;
2525 enum rtx_code code1
, code2
;
2526 rtx b1op0
, b1op1
, b2op0
, b2op1
;
2530 enum rtx_code reversed_code1
, reversed_code2
;
2532 /* Allocate register tables and quick-reset table. */
2533 modified_regs
= (char *) xmalloc (max_reg
* sizeof (char));
2534 same_regs
= (int *) xmalloc (max_reg
* sizeof (int));
2535 all_reset
= (int *) xmalloc (max_reg
* sizeof (int));
2536 for (i
= 0; i
< max_reg
; i
++)
2543 for (b1
= f
; b1
; b1
= NEXT_INSN (b1
))
2548 /* Get to a candidate branch insn. */
2549 if (GET_CODE (b1
) != JUMP_INSN
2550 || ! any_condjump_p (b1
) || JUMP_LABEL (b1
) == 0)
2553 memset (modified_regs
, 0, max_reg
* sizeof (char));
2556 memcpy (same_regs
, all_reset
, max_reg
* sizeof (int));
2559 label
= JUMP_LABEL (b1
);
2561 /* Look for a branch after the target. Record any registers and
2562 memory modified between the target and the branch. Stop when we
2563 get to a label since we can't know what was changed there. */
2564 for (b2
= NEXT_INSN (label
); b2
; b2
= NEXT_INSN (b2
))
2566 if (GET_CODE (b2
) == CODE_LABEL
)
2569 else if (GET_CODE (b2
) == JUMP_INSN
)
2571 /* If this is an unconditional jump and is the only use of
2572 its target label, we can follow it. */
2573 if (any_uncondjump_p (b2
)
2575 && JUMP_LABEL (b2
) != 0
2576 && LABEL_NUSES (JUMP_LABEL (b2
)) == 1)
2578 b2
= JUMP_LABEL (b2
);
2585 if (GET_CODE (b2
) != CALL_INSN
&& GET_CODE (b2
) != INSN
)
2588 if (GET_CODE (b2
) == CALL_INSN
)
2591 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2592 if (call_used_regs
[i
] && ! fixed_regs
[i
]
2593 && i
!= STACK_POINTER_REGNUM
2594 && i
!= FRAME_POINTER_REGNUM
2595 && i
!= HARD_FRAME_POINTER_REGNUM
2596 && i
!= ARG_POINTER_REGNUM
)
2597 modified_regs
[i
] = 1;
2600 note_stores (PATTERN (b2
), mark_modified_reg
, NULL
);
2603 /* Check the next candidate branch insn from the label
2606 || GET_CODE (b2
) != JUMP_INSN
2608 || !any_condjump_p (b2
)
2609 || !onlyjump_p (b2
))
2614 /* Get the comparison codes and operands, reversing the
2615 codes if appropriate. If we don't have comparison codes,
2616 we can't do anything. */
2617 b1op0
= XEXP (XEXP (SET_SRC (set
), 0), 0);
2618 b1op1
= XEXP (XEXP (SET_SRC (set
), 0), 1);
2619 code1
= GET_CODE (XEXP (SET_SRC (set
), 0));
2620 reversed_code1
= code1
;
2621 if (XEXP (SET_SRC (set
), 1) == pc_rtx
)
2622 code1
= reversed_comparison_code (XEXP (SET_SRC (set
), 0), b1
);
2624 reversed_code1
= reversed_comparison_code (XEXP (SET_SRC (set
), 0), b1
);
2626 b2op0
= XEXP (XEXP (SET_SRC (set2
), 0), 0);
2627 b2op1
= XEXP (XEXP (SET_SRC (set2
), 0), 1);
2628 code2
= GET_CODE (XEXP (SET_SRC (set2
), 0));
2629 reversed_code2
= code2
;
2630 if (XEXP (SET_SRC (set2
), 1) == pc_rtx
)
2631 code2
= reversed_comparison_code (XEXP (SET_SRC (set2
), 0), b2
);
2633 reversed_code2
= reversed_comparison_code (XEXP (SET_SRC (set2
), 0), b2
);
2635 /* If they test the same things and knowing that B1 branches
2636 tells us whether or not B2 branches, check if we
2637 can thread the branch. */
2638 if (rtx_equal_for_thread_p (b1op0
, b2op0
, b2
)
2639 && rtx_equal_for_thread_p (b1op1
, b2op1
, b2
)
2640 && (comparison_dominates_p (code1
, code2
)
2641 || comparison_dominates_p (code1
, reversed_code2
)))
2644 t1
= prev_nonnote_insn (b1
);
2645 t2
= prev_nonnote_insn (b2
);
2647 while (t1
!= 0 && t2
!= 0)
2651 /* We have reached the target of the first branch.
2652 If there are no pending register equivalents,
2653 we know that this branch will either always
2654 succeed (if the senses of the two branches are
2655 the same) or always fail (if not). */
2658 if (num_same_regs
!= 0)
2661 if (comparison_dominates_p (code1
, code2
))
2662 new_label
= JUMP_LABEL (b2
);
2664 new_label
= get_label_after (b2
);
2666 if (JUMP_LABEL (b1
) != new_label
)
2668 rtx prev
= PREV_INSN (new_label
);
2670 if (flag_before_loop
2671 && GET_CODE (prev
) == NOTE
2672 && NOTE_LINE_NUMBER (prev
) == NOTE_INSN_LOOP_BEG
)
2674 /* Don't thread to the loop label. If a loop
2675 label is reused, loop optimization will
2676 be disabled for that loop. */
2677 new_label
= gen_label_rtx ();
2678 emit_label_after (new_label
, PREV_INSN (prev
));
2680 changed
|= redirect_jump (b1
, new_label
, 1);
2685 /* If either of these is not a normal insn (it might be
2686 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
2687 have already been skipped above.) Similarly, fail
2688 if the insns are different. */
2689 if (GET_CODE (t1
) != INSN
|| GET_CODE (t2
) != INSN
2690 || recog_memoized (t1
) != recog_memoized (t2
)
2691 || ! rtx_equal_for_thread_p (PATTERN (t1
),
2695 t1
= prev_nonnote_insn (t1
);
2696 t2
= prev_nonnote_insn (t2
);
2703 free (modified_regs
);
2708 /* This is like RTX_EQUAL_P except that it knows about our handling of
2709 possibly equivalent registers and knows to consider volatile and
2710 modified objects as not equal.
2712 YINSN is the insn containing Y. */
2715 rtx_equal_for_thread_p (x
, y
, yinsn
)
2724 code
= GET_CODE (x
);
2725 /* Rtx's of different codes cannot be equal. */
2726 if (code
!= GET_CODE (y
))
2729 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
2730 (REG:SI x) and (REG:HI x) are NOT equivalent. */
2732 if (GET_MODE (x
) != GET_MODE (y
))
2735 /* For floating-point, consider everything unequal. This is a bit
2736 pessimistic, but this pass would only rarely do anything for FP
2738 if (TARGET_FLOAT_FORMAT
== IEEE_FLOAT_FORMAT
2739 && FLOAT_MODE_P (GET_MODE (x
)) && ! flag_unsafe_math_optimizations
)
2742 /* For commutative operations, the RTX match if the operand match in any
2743 order. Also handle the simple binary and unary cases without a loop. */
2744 if (code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
2745 return ((rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
2746 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
))
2747 || (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 1), yinsn
)
2748 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 0), yinsn
)));
2749 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
2750 return (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
2751 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
));
2752 else if (GET_RTX_CLASS (code
) == '1')
2753 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
2755 /* Handle special-cases first. */
2759 if (REGNO (x
) == REGNO (y
) && ! modified_regs
[REGNO (x
)])
2762 /* If neither is user variable or hard register, check for possible
2764 if (REG_USERVAR_P (x
) || REG_USERVAR_P (y
)
2765 || REGNO (x
) < FIRST_PSEUDO_REGISTER
2766 || REGNO (y
) < FIRST_PSEUDO_REGISTER
)
2769 if (same_regs
[REGNO (x
)] == -1)
2771 same_regs
[REGNO (x
)] = REGNO (y
);
2774 /* If this is the first time we are seeing a register on the `Y'
2775 side, see if it is the last use. If not, we can't thread the
2776 jump, so mark it as not equivalent. */
2777 if (REGNO_LAST_UID (REGNO (y
)) != INSN_UID (yinsn
))
2783 return (same_regs
[REGNO (x
)] == (int) REGNO (y
));
2788 /* If memory modified or either volatile, not equivalent.
2789 Else, check address. */
2790 if (modified_mem
|| MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2793 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
2796 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2802 /* Cancel a pending `same_regs' if setting equivalenced registers.
2803 Then process source. */
2804 if (GET_CODE (SET_DEST (x
)) == REG
2805 && GET_CODE (SET_DEST (y
)) == REG
)
2807 if (same_regs
[REGNO (SET_DEST (x
))] == (int) REGNO (SET_DEST (y
)))
2809 same_regs
[REGNO (SET_DEST (x
))] = -1;
2812 else if (REGNO (SET_DEST (x
)) != REGNO (SET_DEST (y
)))
2817 if (rtx_equal_for_thread_p (SET_DEST (x
), SET_DEST (y
), yinsn
) == 0)
2821 return rtx_equal_for_thread_p (SET_SRC (x
), SET_SRC (y
), yinsn
);
2824 return XEXP (x
, 0) == XEXP (y
, 0);
2827 return XSTR (x
, 0) == XSTR (y
, 0);
2836 fmt
= GET_RTX_FORMAT (code
);
2837 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2842 if (XWINT (x
, i
) != XWINT (y
, i
))
2848 if (XINT (x
, i
) != XINT (y
, i
))
2854 /* Two vectors must have the same length. */
2855 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2858 /* And the corresponding elements must match. */
2859 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2860 if (rtx_equal_for_thread_p (XVECEXP (x
, i
, j
),
2861 XVECEXP (y
, i
, j
), yinsn
) == 0)
2866 if (rtx_equal_for_thread_p (XEXP (x
, i
), XEXP (y
, i
), yinsn
) == 0)
2872 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2877 /* These are just backpointers, so they don't matter. */
2884 /* It is believed that rtx's at this level will never
2885 contain anything but integers and other rtx's,
2886 except for within LABEL_REFs and SYMBOL_REFs. */