1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
42 #include "hard-reg-set.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
55 /* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57 /* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
62 static int init_label_info
PARAMS ((rtx
));
63 static void mark_all_labels
PARAMS ((rtx
));
64 static int duplicate_loop_exit_test
PARAMS ((rtx
));
65 static void delete_computation
PARAMS ((rtx
));
66 static void redirect_exp_1
PARAMS ((rtx
*, rtx
, rtx
, rtx
));
67 static int redirect_exp
PARAMS ((rtx
, rtx
, rtx
));
68 static void invert_exp_1
PARAMS ((rtx
));
69 static int invert_exp
PARAMS ((rtx
));
70 static int returnjump_p_1
PARAMS ((rtx
*, void *));
71 static void delete_prior_computation
PARAMS ((rtx
, rtx
));
72 static void mark_modified_reg
PARAMS ((rtx
, rtx
, void *));
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
78 rebuild_jump_labels (f
)
84 max_uid
= init_label_info (f
) + 1;
88 /* Keep track of labels used from static data; we don't track them
89 closely enough to delete them here, so make sure their reference
90 count doesn't drop to zero. */
92 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
93 if (GET_CODE (XEXP (insn
, 0)) == CODE_LABEL
)
94 LABEL_NUSES (XEXP (insn
, 0))++;
96 /* Keep track of labels used for marking handlers for exception
97 regions; they cannot usually be deleted. */
99 for (insn
= exception_handler_labels
; insn
; insn
= XEXP (insn
, 1))
100 if (GET_CODE (XEXP (insn
, 0)) == CODE_LABEL
)
101 LABEL_NUSES (XEXP (insn
, 0))++;
104 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
105 non-fallthru insn. This is not generally true, as multiple barriers
106 may have crept in, or the BARRIER may be separated from the last
107 real insn by one or more NOTEs.
109 This simple pass moves barriers and removes duplicates so that the
115 rtx insn
, next
, prev
;
116 for (insn
= get_insns (); insn
; insn
= next
)
118 next
= NEXT_INSN (insn
);
119 if (GET_CODE (insn
) == BARRIER
)
121 prev
= prev_nonnote_insn (insn
);
122 if (GET_CODE (prev
) == BARRIER
)
123 delete_barrier (insn
);
124 else if (prev
!= PREV_INSN (insn
))
125 reorder_insns (insn
, insn
, prev
);
131 copy_loop_headers (f
)
135 /* Now iterate optimizing jumps until nothing changes over one pass. */
136 for (insn
= f
; insn
; insn
= next
)
140 next
= NEXT_INSN (insn
);
142 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
143 jump. Try to optimize by duplicating the loop exit test if so.
144 This is only safe immediately after regscan, because it uses
145 the values of regno_first_uid and regno_last_uid. */
146 if (GET_CODE (insn
) == NOTE
147 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
148 && (temp1
= next_nonnote_insn (insn
)) != 0
149 && any_uncondjump_p (temp1
) && onlyjump_p (temp1
))
151 temp
= PREV_INSN (insn
);
152 if (duplicate_loop_exit_test (insn
))
154 next
= NEXT_INSN (temp
);
161 purge_line_number_notes (f
)
166 /* Delete extraneous line number notes.
167 Note that two consecutive notes for different lines are not really
168 extraneous. There should be some indication where that line belonged,
169 even if it became empty. */
171 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
172 if (GET_CODE (insn
) == NOTE
)
174 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
)
175 /* Any previous line note was for the prologue; gdb wants a new
176 note after the prologue even if it is for the same line. */
177 last_note
= NULL_RTX
;
178 else if (NOTE_LINE_NUMBER (insn
) >= 0)
180 /* Delete this note if it is identical to previous note. */
182 && NOTE_SOURCE_FILE (insn
) == NOTE_SOURCE_FILE (last_note
)
183 && NOTE_LINE_NUMBER (insn
) == NOTE_LINE_NUMBER (last_note
))
185 delete_related_insns (insn
);
194 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
195 notes whose labels don't occur in the insn any more. Returns the
196 largest INSN_UID found. */
204 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
206 if (GET_CODE (insn
) == CODE_LABEL
)
207 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
208 else if (GET_CODE (insn
) == JUMP_INSN
)
209 JUMP_LABEL (insn
) = 0;
210 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
214 for (note
= REG_NOTES (insn
); note
; note
= next
)
216 next
= XEXP (note
, 1);
217 if (REG_NOTE_KIND (note
) == REG_LABEL
218 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
219 remove_note (insn
, note
);
222 if (INSN_UID (insn
) > largest_uid
)
223 largest_uid
= INSN_UID (insn
);
229 /* Mark the label each jump jumps to.
230 Combine consecutive labels, and count uses of labels. */
238 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
241 if (GET_CODE (insn
) == CALL_INSN
242 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
244 mark_all_labels (XEXP (PATTERN (insn
), 0));
245 mark_all_labels (XEXP (PATTERN (insn
), 1));
246 mark_all_labels (XEXP (PATTERN (insn
), 2));
248 /* Canonicalize the tail recursion label attached to the
249 CALL_PLACEHOLDER insn. */
250 if (XEXP (PATTERN (insn
), 3))
252 rtx label_ref
= gen_rtx_LABEL_REF (VOIDmode
,
253 XEXP (PATTERN (insn
), 3));
254 mark_jump_label (label_ref
, insn
, 0);
255 XEXP (PATTERN (insn
), 3) = XEXP (label_ref
, 0);
261 mark_jump_label (PATTERN (insn
), insn
, 0);
262 if (! INSN_DELETED_P (insn
) && GET_CODE (insn
) == JUMP_INSN
)
264 /* When we know the LABEL_REF contained in a REG used in
265 an indirect jump, we'll have a REG_LABEL note so that
266 flow can tell where it's going. */
267 if (JUMP_LABEL (insn
) == 0)
269 rtx label_note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
272 /* But a LABEL_REF around the REG_LABEL note, so
273 that we can canonicalize it. */
274 rtx label_ref
= gen_rtx_LABEL_REF (VOIDmode
,
275 XEXP (label_note
, 0));
277 mark_jump_label (label_ref
, insn
, 0);
278 XEXP (label_note
, 0) = XEXP (label_ref
, 0);
279 JUMP_LABEL (insn
) = XEXP (label_note
, 0);
286 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
287 jump. Assume that this unconditional jump is to the exit test code. If
288 the code is sufficiently simple, make a copy of it before INSN,
289 followed by a jump to the exit of the loop. Then delete the unconditional
292 Return 1 if we made the change, else 0.
294 This is only safe immediately after a regscan pass because it uses the
295 values of regno_first_uid and regno_last_uid. */
298 duplicate_loop_exit_test (loop_start
)
301 rtx insn
, set
, reg
, p
, link
;
302 rtx copy
= 0, first_copy
= 0;
304 rtx exitcode
= NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start
)));
306 int max_reg
= max_reg_num ();
308 rtx loop_pre_header_label
;
310 /* Scan the exit code. We do not perform this optimization if any insn:
314 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
315 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
316 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
319 We also do not do this if we find an insn with ASM_OPERANDS. While
320 this restriction should not be necessary, copying an insn with
321 ASM_OPERANDS can confuse asm_noperands in some cases.
323 Also, don't do this if the exit code is more than 20 insns. */
325 for (insn
= exitcode
;
327 && ! (GET_CODE (insn
) == NOTE
328 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
);
329 insn
= NEXT_INSN (insn
))
331 switch (GET_CODE (insn
))
337 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
338 a jump immediately after the loop start that branches outside
339 the loop but within an outer loop, near the exit test.
340 If we copied this exit test and created a phony
341 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
342 before the exit test look like these could be safely moved
343 out of the loop even if they actually may be never executed.
344 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
346 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
347 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
)
351 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
352 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
353 /* If we were to duplicate this code, we would not move
354 the BLOCK notes, and so debugging the moved code would
355 be difficult. Thus, we only move the code with -O2 or
362 /* The code below would grossly mishandle REG_WAS_0 notes,
363 so get rid of them here. */
364 while ((p
= find_reg_note (insn
, REG_WAS_0
, NULL_RTX
)) != 0)
365 remove_note (insn
, p
);
367 || find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
368 || find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
376 /* Unless INSN is zero, we can do the optimization. */
382 /* See if any insn sets a register only used in the loop exit code and
383 not a user variable. If so, replace it with a new register. */
384 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
385 if (GET_CODE (insn
) == INSN
386 && (set
= single_set (insn
)) != 0
387 && ((reg
= SET_DEST (set
), GET_CODE (reg
) == REG
)
388 || (GET_CODE (reg
) == SUBREG
389 && (reg
= SUBREG_REG (reg
), GET_CODE (reg
) == REG
)))
390 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
391 && REGNO_FIRST_UID (REGNO (reg
)) == INSN_UID (insn
))
393 for (p
= NEXT_INSN (insn
); p
!= lastexit
; p
= NEXT_INSN (p
))
394 if (REGNO_LAST_UID (REGNO (reg
)) == INSN_UID (p
))
399 /* We can do the replacement. Allocate reg_map if this is the
400 first replacement we found. */
402 reg_map
= (rtx
*) xcalloc (max_reg
, sizeof (rtx
));
404 REG_LOOP_TEST_P (reg
) = 1;
406 reg_map
[REGNO (reg
)] = gen_reg_rtx (GET_MODE (reg
));
409 loop_pre_header_label
= gen_label_rtx ();
411 /* Now copy each insn. */
412 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
414 switch (GET_CODE (insn
))
417 copy
= emit_barrier_before (loop_start
);
420 /* Only copy line-number notes. */
421 if (NOTE_LINE_NUMBER (insn
) >= 0)
423 copy
= emit_note_before (NOTE_LINE_NUMBER (insn
), loop_start
);
424 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
429 copy
= emit_insn_before (copy_insn (PATTERN (insn
)), loop_start
);
431 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
433 mark_jump_label (PATTERN (copy
), copy
, 0);
435 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
437 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
438 if (REG_NOTE_KIND (link
) != REG_LABEL
)
440 if (GET_CODE (link
) == EXPR_LIST
)
442 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link
),
447 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link
),
452 if (reg_map
&& REG_NOTES (copy
))
453 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
457 copy
= emit_jump_insn_before (copy_insn (PATTERN (insn
)),
460 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
461 mark_jump_label (PATTERN (copy
), copy
, 0);
462 if (REG_NOTES (insn
))
464 REG_NOTES (copy
) = copy_insn_1 (REG_NOTES (insn
));
466 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
469 /* Predict conditional jump that do make loop looping as taken.
470 Other jumps are probably exit conditions, so predict
472 if (any_condjump_p (copy
))
474 rtx label
= JUMP_LABEL (copy
);
477 /* The jump_insn after loop_start should be followed
478 by barrier and loopback label. */
479 if (prev_nonnote_insn (label
)
480 && (prev_nonnote_insn (prev_nonnote_insn (label
))
481 == next_nonnote_insn (loop_start
)))
483 predict_insn_def (copy
, PRED_LOOP_HEADER
, TAKEN
);
484 /* To keep pre-header, we need to redirect all loop
485 entrances before the LOOP_BEG note. */
486 redirect_jump (copy
, loop_pre_header_label
, 0);
489 predict_insn_def (copy
, PRED_LOOP_HEADER
, NOT_TAKEN
);
498 /* Record the first insn we copied. We need it so that we can
499 scan the copied insns for new pseudo registers. */
504 /* Now clean up by emitting a jump to the end label and deleting the jump
505 at the start of the loop. */
506 if (! copy
|| GET_CODE (copy
) != BARRIER
)
508 copy
= emit_jump_insn_before (gen_jump (get_label_after (insn
)),
511 /* Record the first insn we copied. We need it so that we can
512 scan the copied insns for new pseudo registers. This may not
513 be strictly necessary since we should have copied at least one
514 insn above. But I am going to be safe. */
518 mark_jump_label (PATTERN (copy
), copy
, 0);
519 emit_barrier_before (loop_start
);
522 emit_label_before (loop_pre_header_label
, loop_start
);
524 /* Now scan from the first insn we copied to the last insn we copied
525 (copy) for new pseudo registers. Do this after the code to jump to
526 the end label since that might create a new pseudo too. */
527 reg_scan_update (first_copy
, copy
, max_reg
);
529 /* Mark the exit code as the virtual top of the converted loop. */
530 emit_note_before (NOTE_INSN_LOOP_VTOP
, exitcode
);
532 delete_related_insns (next_nonnote_insn (loop_start
));
541 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
542 notes between START and END out before START. START and END may be such
543 notes. Returns the values of the new starting and ending insns, which
544 may be different if the original ones were such notes.
545 Return true if there were only such notes and no real instructions. */
548 squeeze_notes (startp
, endp
)
558 rtx past_end
= NEXT_INSN (end
);
560 for (insn
= start
; insn
!= past_end
; insn
= next
)
562 next
= NEXT_INSN (insn
);
563 if (GET_CODE (insn
) == NOTE
564 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
565 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
566 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
567 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
568 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
569 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
))
575 rtx prev
= PREV_INSN (insn
);
576 PREV_INSN (insn
) = PREV_INSN (start
);
577 NEXT_INSN (insn
) = start
;
578 NEXT_INSN (PREV_INSN (insn
)) = insn
;
579 PREV_INSN (NEXT_INSN (insn
)) = insn
;
580 NEXT_INSN (prev
) = next
;
581 PREV_INSN (next
) = prev
;
588 /* There were no real instructions. */
589 if (start
== past_end
)
599 /* Return the label before INSN, or put a new label there. */
602 get_label_before (insn
)
607 /* Find an existing label at this point
608 or make a new one if there is none. */
609 label
= prev_nonnote_insn (insn
);
611 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
613 rtx prev
= PREV_INSN (insn
);
615 label
= gen_label_rtx ();
616 emit_label_after (label
, prev
);
617 LABEL_NUSES (label
) = 0;
622 /* Return the label after INSN, or put a new label there. */
625 get_label_after (insn
)
630 /* Find an existing label at this point
631 or make a new one if there is none. */
632 label
= next_nonnote_insn (insn
);
634 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
636 label
= gen_label_rtx ();
637 emit_label_after (label
, insn
);
638 LABEL_NUSES (label
) = 0;
643 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
644 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
645 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
646 know whether it's source is floating point or integer comparison. Machine
647 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
648 to help this function avoid overhead in these cases. */
650 reversed_comparison_code_parts (code
, arg0
, arg1
, insn
)
651 rtx insn
, arg0
, arg1
;
654 enum machine_mode mode
;
656 /* If this is not actually a comparison, we can't reverse it. */
657 if (GET_RTX_CLASS (code
) != '<')
660 mode
= GET_MODE (arg0
);
661 if (mode
== VOIDmode
)
662 mode
= GET_MODE (arg1
);
664 /* First see if machine description supply us way to reverse the comparison.
665 Give it priority over everything else to allow machine description to do
667 #ifdef REVERSIBLE_CC_MODE
668 if (GET_MODE_CLASS (mode
) == MODE_CC
669 && REVERSIBLE_CC_MODE (mode
))
671 #ifdef REVERSE_CONDITION
672 return REVERSE_CONDITION (code
, mode
);
674 return reverse_condition (code
);
678 /* Try a few special cases based on the comparison code. */
687 /* It is always safe to reverse EQ and NE, even for the floating
688 point. Similary the unsigned comparisons are never used for
689 floating point so we can reverse them in the default way. */
690 return reverse_condition (code
);
695 /* In case we already see unordered comparison, we can be sure to
696 be dealing with floating point so we don't need any more tests. */
697 return reverse_condition_maybe_unordered (code
);
702 /* We don't have safe way to reverse these yet. */
708 /* In case we give up IEEE compatibility, all comparisons are reversible. */
709 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
710 || flag_unsafe_math_optimizations
)
711 return reverse_condition (code
);
713 if (GET_MODE_CLASS (mode
) == MODE_CC
720 /* Try to search for the comparison to determine the real mode.
721 This code is expensive, but with sane machine description it
722 will be never used, since REVERSIBLE_CC_MODE will return true
727 for (prev
= prev_nonnote_insn (insn
);
728 prev
!= 0 && GET_CODE (prev
) != CODE_LABEL
;
729 prev
= prev_nonnote_insn (prev
))
731 rtx set
= set_of (arg0
, prev
);
732 if (set
&& GET_CODE (set
) == SET
733 && rtx_equal_p (SET_DEST (set
), arg0
))
735 rtx src
= SET_SRC (set
);
737 if (GET_CODE (src
) == COMPARE
)
739 rtx comparison
= src
;
740 arg0
= XEXP (src
, 0);
741 mode
= GET_MODE (arg0
);
742 if (mode
== VOIDmode
)
743 mode
= GET_MODE (XEXP (comparison
, 1));
746 /* We can get past reg-reg moves. This may be useful for model
747 of i387 comparisons that first move flag registers around. */
754 /* If register is clobbered in some ununderstandable way,
761 /* An integer condition. */
762 if (GET_CODE (arg0
) == CONST_INT
763 || (GET_MODE (arg0
) != VOIDmode
764 && GET_MODE_CLASS (mode
) != MODE_CC
765 && ! FLOAT_MODE_P (mode
)))
766 return reverse_condition (code
);
771 /* An wrapper around the previous function to take COMPARISON as rtx
772 expression. This simplifies many callers. */
774 reversed_comparison_code (comparison
, insn
)
775 rtx comparison
, insn
;
777 if (GET_RTX_CLASS (GET_CODE (comparison
)) != '<')
779 return reversed_comparison_code_parts (GET_CODE (comparison
),
780 XEXP (comparison
, 0),
781 XEXP (comparison
, 1), insn
);
784 /* Given an rtx-code for a comparison, return the code for the negated
785 comparison. If no such code exists, return UNKNOWN.
787 WATCH OUT! reverse_condition is not safe to use on a jump that might
788 be acting on the results of an IEEE floating point comparison, because
789 of the special treatment of non-signaling nans in comparisons.
790 Use reversed_comparison_code instead. */
793 reverse_condition (code
)
836 /* Similar, but we're allowed to generate unordered comparisons, which
837 makes it safe for IEEE floating-point. Of course, we have to recognize
838 that the target will support them too... */
841 reverse_condition_maybe_unordered (code
)
844 /* Non-IEEE formats don't have unordered conditions. */
845 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
846 return reverse_condition (code
);
884 /* Similar, but return the code when two operands of a comparison are swapped.
885 This IS safe for IEEE floating-point. */
888 swap_condition (code
)
931 /* Given a comparison CODE, return the corresponding unsigned comparison.
932 If CODE is an equality comparison or already an unsigned comparison,
936 unsigned_condition (code
)
963 /* Similarly, return the signed version of a comparison. */
966 signed_condition (code
)
993 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
994 truth of CODE1 implies the truth of CODE2. */
997 comparison_dominates_p (code1
, code2
)
998 enum rtx_code code1
, code2
;
1000 /* UNKNOWN comparison codes can happen as a result of trying to revert
1002 They can't match anything, so we have to reject them here. */
1003 if (code1
== UNKNOWN
|| code2
== UNKNOWN
)
1012 if (code2
== UNLE
|| code2
== UNGE
)
1017 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
1018 || code2
== ORDERED
)
1023 if (code2
== UNLE
|| code2
== NE
)
1028 if (code2
== LE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
1033 if (code2
== UNGE
|| code2
== NE
)
1038 if (code2
== GE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
1044 if (code2
== ORDERED
)
1049 if (code2
== NE
|| code2
== ORDERED
)
1054 if (code2
== LEU
|| code2
== NE
)
1059 if (code2
== GEU
|| code2
== NE
)
1064 if (code2
== NE
|| code2
== UNEQ
|| code2
== UNLE
|| code2
== UNLT
1065 || code2
== UNGE
|| code2
== UNGT
)
1076 /* Return 1 if INSN is an unconditional jump and nothing else. */
1082 return (GET_CODE (insn
) == JUMP_INSN
1083 && GET_CODE (PATTERN (insn
)) == SET
1084 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
1085 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
1088 /* Return nonzero if INSN is a (possibly) conditional jump
1091 Use this function is deprecated, since we need to support combined
1092 branch and compare insns. Use any_condjump_p instead whenever possible. */
1098 rtx x
= PATTERN (insn
);
1100 if (GET_CODE (x
) != SET
1101 || GET_CODE (SET_DEST (x
)) != PC
)
1105 if (GET_CODE (x
) == LABEL_REF
)
1108 return (GET_CODE (x
) == IF_THEN_ELSE
1109 && ((GET_CODE (XEXP (x
, 2)) == PC
1110 && (GET_CODE (XEXP (x
, 1)) == LABEL_REF
1111 || GET_CODE (XEXP (x
, 1)) == RETURN
))
1112 || (GET_CODE (XEXP (x
, 1)) == PC
1113 && (GET_CODE (XEXP (x
, 2)) == LABEL_REF
1114 || GET_CODE (XEXP (x
, 2)) == RETURN
))));
1119 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1122 Use this function is deprecated, since we need to support combined
1123 branch and compare insns. Use any_condjump_p instead whenever possible. */
1126 condjump_in_parallel_p (insn
)
1129 rtx x
= PATTERN (insn
);
1131 if (GET_CODE (x
) != PARALLEL
)
1134 x
= XVECEXP (x
, 0, 0);
1136 if (GET_CODE (x
) != SET
)
1138 if (GET_CODE (SET_DEST (x
)) != PC
)
1140 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
1142 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
1144 if (XEXP (SET_SRC (x
), 2) == pc_rtx
1145 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
1146 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
1148 if (XEXP (SET_SRC (x
), 1) == pc_rtx
1149 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
1150 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
1155 /* Return set of PC, otherwise NULL. */
1162 if (GET_CODE (insn
) != JUMP_INSN
)
1164 pat
= PATTERN (insn
);
1166 /* The set is allowed to appear either as the insn pattern or
1167 the first set in a PARALLEL. */
1168 if (GET_CODE (pat
) == PARALLEL
)
1169 pat
= XVECEXP (pat
, 0, 0);
1170 if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == PC
)
1176 /* Return true when insn is an unconditional direct jump,
1177 possibly bundled inside a PARALLEL. */
1180 any_uncondjump_p (insn
)
1183 rtx x
= pc_set (insn
);
1186 if (GET_CODE (SET_SRC (x
)) != LABEL_REF
)
1191 /* Return true when insn is a conditional jump. This function works for
1192 instructions containing PC sets in PARALLELs. The instruction may have
1193 various other effects so before removing the jump you must verify
1196 Note that unlike condjump_p it returns false for unconditional jumps. */
1199 any_condjump_p (insn
)
1202 rtx x
= pc_set (insn
);
1207 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
1210 a
= GET_CODE (XEXP (SET_SRC (x
), 1));
1211 b
= GET_CODE (XEXP (SET_SRC (x
), 2));
1213 return ((b
== PC
&& (a
== LABEL_REF
|| a
== RETURN
))
1214 || (a
== PC
&& (b
== LABEL_REF
|| b
== RETURN
)));
1217 /* Return the label of a conditional jump. */
1220 condjump_label (insn
)
1223 rtx x
= pc_set (insn
);
1228 if (GET_CODE (x
) == LABEL_REF
)
1230 if (GET_CODE (x
) != IF_THEN_ELSE
)
1232 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
1234 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
1239 /* Return true if INSN is a (possibly conditional) return insn. */
1242 returnjump_p_1 (loc
, data
)
1244 void *data ATTRIBUTE_UNUSED
;
1248 return x
&& (GET_CODE (x
) == RETURN
1249 || (GET_CODE (x
) == SET
&& SET_IS_RETURN_P (x
)));
1256 if (GET_CODE (insn
) != JUMP_INSN
)
1258 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
1261 /* Return true if INSN is a jump that only transfers control and
1270 if (GET_CODE (insn
) != JUMP_INSN
)
1273 set
= single_set (insn
);
1276 if (GET_CODE (SET_DEST (set
)) != PC
)
1278 if (side_effects_p (SET_SRC (set
)))
1286 /* Return non-zero if X is an RTX that only sets the condition codes
1287 and has no side effects. */
1300 return sets_cc0_p (x
) == 1 && ! side_effects_p (x
);
1303 /* Return 1 if X is an RTX that does nothing but set the condition codes
1304 and CLOBBER or USE registers.
1305 Return -1 if X does explicitly set the condition codes,
1306 but also does other things. */
1319 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
1321 if (GET_CODE (x
) == PARALLEL
)
1325 int other_things
= 0;
1326 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1328 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
1329 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
1331 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
1334 return ! sets_cc0
? 0 : other_things
? -1 : 1;
1340 /* Follow any unconditional jump at LABEL;
1341 return the ultimate label reached by any such chain of jumps.
1342 If LABEL is not followed by a jump, return LABEL.
1343 If the chain loops or we can't find end, return LABEL,
1344 since that tells caller to avoid changing the insn.
1346 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1347 a USE or CLOBBER. */
1350 follow_jumps (label
)
1360 && (insn
= next_active_insn (value
)) != 0
1361 && GET_CODE (insn
) == JUMP_INSN
1362 && ((JUMP_LABEL (insn
) != 0 && any_uncondjump_p (insn
)
1363 && onlyjump_p (insn
))
1364 || GET_CODE (PATTERN (insn
)) == RETURN
)
1365 && (next
= NEXT_INSN (insn
))
1366 && GET_CODE (next
) == BARRIER
);
1369 /* Don't chain through the insn that jumps into a loop
1370 from outside the loop,
1371 since that would create multiple loop entry jumps
1372 and prevent loop optimization. */
1374 if (!reload_completed
)
1375 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
1376 if (GET_CODE (tem
) == NOTE
1377 && (NOTE_LINE_NUMBER (tem
) == NOTE_INSN_LOOP_BEG
1378 /* ??? Optional. Disables some optimizations, but makes
1379 gcov output more accurate with -O. */
1380 || (flag_test_coverage
&& NOTE_LINE_NUMBER (tem
) > 0)))
1383 /* If we have found a cycle, make the insn jump to itself. */
1384 if (JUMP_LABEL (insn
) == label
)
1387 tem
= next_active_insn (JUMP_LABEL (insn
));
1388 if (tem
&& (GET_CODE (PATTERN (tem
)) == ADDR_VEC
1389 || GET_CODE (PATTERN (tem
)) == ADDR_DIFF_VEC
))
1392 value
= JUMP_LABEL (insn
);
1400 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1401 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1402 in INSN, then store one of them in JUMP_LABEL (INSN).
1403 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1404 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1405 Also, when there are consecutive labels, canonicalize on the last of them.
1407 Note that two labels separated by a loop-beginning note
1408 must be kept distinct if we have not yet done loop-optimization,
1409 because the gap between them is where loop-optimize
1410 will want to move invariant code to. CROSS_JUMP tells us
1411 that loop-optimization is done with. */
1414 mark_jump_label (x
, insn
, in_mem
)
1419 RTX_CODE code
= GET_CODE (x
);
1443 /* If this is a constant-pool reference, see if it is a label. */
1444 if (CONSTANT_POOL_ADDRESS_P (x
))
1445 mark_jump_label (get_pool_constant (x
), insn
, in_mem
);
1450 rtx label
= XEXP (x
, 0);
1452 /* Ignore remaining references to unreachable labels that
1453 have been deleted. */
1454 if (GET_CODE (label
) == NOTE
1455 && NOTE_LINE_NUMBER (label
) == NOTE_INSN_DELETED_LABEL
)
1458 if (GET_CODE (label
) != CODE_LABEL
)
1461 /* Ignore references to labels of containing functions. */
1462 if (LABEL_REF_NONLOCAL_P (x
))
1465 XEXP (x
, 0) = label
;
1466 if (! insn
|| ! INSN_DELETED_P (insn
))
1467 ++LABEL_NUSES (label
);
1471 if (GET_CODE (insn
) == JUMP_INSN
)
1472 JUMP_LABEL (insn
) = label
;
1475 /* Add a REG_LABEL note for LABEL unless there already
1476 is one. All uses of a label, except for labels
1477 that are the targets of jumps, must have a
1479 if (! find_reg_note (insn
, REG_LABEL
, label
))
1480 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, label
,
1487 /* Do walk the labels in a vector, but not the first operand of an
1488 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1491 if (! INSN_DELETED_P (insn
))
1493 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
1495 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
1496 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, in_mem
);
1504 fmt
= GET_RTX_FORMAT (code
);
1505 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1508 mark_jump_label (XEXP (x
, i
), insn
, in_mem
);
1509 else if (fmt
[i
] == 'E')
1512 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1513 mark_jump_label (XVECEXP (x
, i
, j
), insn
, in_mem
);
1518 /* If all INSN does is set the pc, delete it,
1519 and delete the insn that set the condition codes for it
1520 if that's what the previous thing was. */
1526 rtx set
= single_set (insn
);
1528 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
1529 delete_computation (insn
);
1532 /* Verify INSN is a BARRIER and delete it. */
1535 delete_barrier (insn
)
1538 if (GET_CODE (insn
) != BARRIER
)
1544 /* Recursively delete prior insns that compute the value (used only by INSN
1545 which the caller is deleting) stored in the register mentioned by NOTE
1546 which is a REG_DEAD note associated with INSN. */
1549 delete_prior_computation (note
, insn
)
1554 rtx reg
= XEXP (note
, 0);
1556 for (our_prev
= prev_nonnote_insn (insn
);
1557 our_prev
&& (GET_CODE (our_prev
) == INSN
1558 || GET_CODE (our_prev
) == CALL_INSN
);
1559 our_prev
= prev_nonnote_insn (our_prev
))
1561 rtx pat
= PATTERN (our_prev
);
1563 /* If we reach a CALL which is not calling a const function
1564 or the callee pops the arguments, then give up. */
1565 if (GET_CODE (our_prev
) == CALL_INSN
1566 && (! CONST_OR_PURE_CALL_P (our_prev
)
1567 || GET_CODE (pat
) != SET
|| GET_CODE (SET_SRC (pat
)) != CALL
))
1570 /* If we reach a SEQUENCE, it is too complex to try to
1571 do anything with it, so give up. */
1572 if (GET_CODE (pat
) == SEQUENCE
)
1575 if (GET_CODE (pat
) == USE
1576 && GET_CODE (XEXP (pat
, 0)) == INSN
)
1577 /* reorg creates USEs that look like this. We leave them
1578 alone because reorg needs them for its own purposes. */
1581 if (reg_set_p (reg
, pat
))
1583 if (side_effects_p (pat
) && GET_CODE (our_prev
) != CALL_INSN
)
1586 if (GET_CODE (pat
) == PARALLEL
)
1588 /* If we find a SET of something else, we can't
1593 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1595 rtx part
= XVECEXP (pat
, 0, i
);
1597 if (GET_CODE (part
) == SET
1598 && SET_DEST (part
) != reg
)
1602 if (i
== XVECLEN (pat
, 0))
1603 delete_computation (our_prev
);
1605 else if (GET_CODE (pat
) == SET
1606 && GET_CODE (SET_DEST (pat
)) == REG
)
1608 int dest_regno
= REGNO (SET_DEST (pat
));
1611 + (dest_regno
< FIRST_PSEUDO_REGISTER
1612 ? HARD_REGNO_NREGS (dest_regno
,
1613 GET_MODE (SET_DEST (pat
))) : 1));
1614 int regno
= REGNO (reg
);
1617 + (regno
< FIRST_PSEUDO_REGISTER
1618 ? HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) : 1));
1620 if (dest_regno
>= regno
1621 && dest_endregno
<= endregno
)
1622 delete_computation (our_prev
);
1624 /* We may have a multi-word hard register and some, but not
1625 all, of the words of the register are needed in subsequent
1626 insns. Write REG_UNUSED notes for those parts that were not
1628 else if (dest_regno
<= regno
1629 && dest_endregno
>= endregno
)
1633 REG_NOTES (our_prev
)
1634 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
,
1635 REG_NOTES (our_prev
));
1637 for (i
= dest_regno
; i
< dest_endregno
; i
++)
1638 if (! find_regno_note (our_prev
, REG_UNUSED
, i
))
1641 if (i
== dest_endregno
)
1642 delete_computation (our_prev
);
1649 /* If PAT references the register that dies here, it is an
1650 additional use. Hence any prior SET isn't dead. However, this
1651 insn becomes the new place for the REG_DEAD note. */
1652 if (reg_overlap_mentioned_p (reg
, pat
))
1654 XEXP (note
, 1) = REG_NOTES (our_prev
);
1655 REG_NOTES (our_prev
) = note
;
1661 /* Delete INSN and recursively delete insns that compute values used only
1662 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1663 If we are running before flow.c, we need do nothing since flow.c will
1664 delete dead code. We also can't know if the registers being used are
1665 dead or not at this point.
1667 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1668 nothing other than set a register that dies in this insn, we can delete
1671 On machines with CC0, if CC0 is used in this insn, we may be able to
1672 delete the insn that set it. */
1675 delete_computation (insn
)
1681 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
1683 rtx prev
= prev_nonnote_insn (insn
);
1684 /* We assume that at this stage
1685 CC's are always set explicitly
1686 and always immediately before the jump that
1687 will use them. So if the previous insn
1688 exists to set the CC's, delete it
1689 (unless it performs auto-increments, etc.). */
1690 if (prev
&& GET_CODE (prev
) == INSN
1691 && sets_cc0_p (PATTERN (prev
)))
1693 if (sets_cc0_p (PATTERN (prev
)) > 0
1694 && ! side_effects_p (PATTERN (prev
)))
1695 delete_computation (prev
);
1697 /* Otherwise, show that cc0 won't be used. */
1698 REG_NOTES (prev
) = gen_rtx_EXPR_LIST (REG_UNUSED
,
1699 cc0_rtx
, REG_NOTES (prev
));
1704 for (note
= REG_NOTES (insn
); note
; note
= next
)
1706 next
= XEXP (note
, 1);
1708 if (REG_NOTE_KIND (note
) != REG_DEAD
1709 /* Verify that the REG_NOTE is legitimate. */
1710 || GET_CODE (XEXP (note
, 0)) != REG
)
1713 delete_prior_computation (note
, insn
);
1716 delete_related_insns (insn
);
1719 /* Delete insn INSN from the chain of insns and update label ref counts
1720 and delete insns now unreachable.
1722 Returns the first insn after INSN that was not deleted.
1724 Usage of this instruction is deprecated. Use delete_insn instead and
1725 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1728 delete_related_insns (insn
)
1731 int was_code_label
= (GET_CODE (insn
) == CODE_LABEL
);
1733 rtx next
= NEXT_INSN (insn
), prev
= PREV_INSN (insn
);
1735 while (next
&& INSN_DELETED_P (next
))
1736 next
= NEXT_INSN (next
);
1738 /* This insn is already deleted => return first following nondeleted. */
1739 if (INSN_DELETED_P (insn
))
1744 /* If instruction is followed by a barrier,
1745 delete the barrier too. */
1747 if (next
!= 0 && GET_CODE (next
) == BARRIER
)
1750 /* If deleting a jump, decrement the count of the label,
1751 and delete the label if it is now unused. */
1753 if (GET_CODE (insn
) == JUMP_INSN
&& JUMP_LABEL (insn
))
1755 rtx lab
= JUMP_LABEL (insn
), lab_next
;
1757 if (LABEL_NUSES (lab
) == 0)
1759 /* This can delete NEXT or PREV,
1760 either directly if NEXT is JUMP_LABEL (INSN),
1761 or indirectly through more levels of jumps. */
1762 delete_related_insns (lab
);
1764 /* I feel a little doubtful about this loop,
1765 but I see no clean and sure alternative way
1766 to find the first insn after INSN that is not now deleted.
1767 I hope this works. */
1768 while (next
&& INSN_DELETED_P (next
))
1769 next
= NEXT_INSN (next
);
1772 else if ((lab_next
= next_nonnote_insn (lab
)) != NULL
1773 && GET_CODE (lab_next
) == JUMP_INSN
1774 && (GET_CODE (PATTERN (lab_next
)) == ADDR_VEC
1775 || GET_CODE (PATTERN (lab_next
)) == ADDR_DIFF_VEC
))
1777 /* If we're deleting the tablejump, delete the dispatch table.
1778 We may not be able to kill the label immediately preceding
1779 just yet, as it might be referenced in code leading up to
1781 delete_related_insns (lab_next
);
1785 /* Likewise if we're deleting a dispatch table. */
1787 if (GET_CODE (insn
) == JUMP_INSN
1788 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1789 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
1791 rtx pat
= PATTERN (insn
);
1792 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
1793 int len
= XVECLEN (pat
, diff_vec_p
);
1795 for (i
= 0; i
< len
; i
++)
1796 if (LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
1797 delete_related_insns (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
1798 while (next
&& INSN_DELETED_P (next
))
1799 next
= NEXT_INSN (next
);
1803 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1804 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
1805 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1806 if (REG_NOTE_KIND (note
) == REG_LABEL
1807 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1808 && GET_CODE (XEXP (note
, 0)) == CODE_LABEL
)
1809 if (LABEL_NUSES (XEXP (note
, 0)) == 0)
1810 delete_related_insns (XEXP (note
, 0));
1812 while (prev
&& (INSN_DELETED_P (prev
) || GET_CODE (prev
) == NOTE
))
1813 prev
= PREV_INSN (prev
);
1815 /* If INSN was a label and a dispatch table follows it,
1816 delete the dispatch table. The tablejump must have gone already.
1817 It isn't useful to fall through into a table. */
1820 && NEXT_INSN (insn
) != 0
1821 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
1822 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
1823 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
1824 next
= delete_related_insns (NEXT_INSN (insn
));
1826 /* If INSN was a label, delete insns following it if now unreachable. */
1828 if (was_code_label
&& prev
&& GET_CODE (prev
) == BARRIER
)
1832 && (GET_RTX_CLASS (code
= GET_CODE (next
)) == 'i'
1833 || code
== NOTE
|| code
== BARRIER
1834 || (code
== CODE_LABEL
&& INSN_DELETED_P (next
))))
1837 && NOTE_LINE_NUMBER (next
) != NOTE_INSN_FUNCTION_END
)
1838 next
= NEXT_INSN (next
);
1839 /* Keep going past other deleted labels to delete what follows. */
1840 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
1841 next
= NEXT_INSN (next
);
1843 /* Note: if this deletes a jump, it can cause more
1844 deletion of unreachable code, after a different label.
1845 As long as the value from this recursive call is correct,
1846 this invocation functions correctly. */
1847 next
= delete_related_insns (next
);
1854 /* Advance from INSN till reaching something not deleted
1855 then return that. May return INSN itself. */
1858 next_nondeleted_insn (insn
)
1861 while (INSN_DELETED_P (insn
))
1862 insn
= NEXT_INSN (insn
);
1866 /* Delete a range of insns from FROM to TO, inclusive.
1867 This is for the sake of peephole optimization, so assume
1868 that whatever these insns do will still be done by a new
1869 peephole insn that will replace them. */
1872 delete_for_peephole (from
, to
)
1879 rtx next
= NEXT_INSN (insn
);
1880 rtx prev
= PREV_INSN (insn
);
1882 if (GET_CODE (insn
) != NOTE
)
1884 INSN_DELETED_P (insn
) = 1;
1886 /* Patch this insn out of the chain. */
1887 /* We don't do this all at once, because we
1888 must preserve all NOTEs. */
1890 NEXT_INSN (prev
) = next
;
1893 PREV_INSN (next
) = prev
;
1901 /* Note that if TO is an unconditional jump
1902 we *do not* delete the BARRIER that follows,
1903 since the peephole that replaces this sequence
1904 is also an unconditional jump in that case. */
1907 /* We have determined that INSN is never reached, and are about to
1908 delete it. Print a warning if the user asked for one.
1910 To try to make this warning more useful, this should only be called
1911 once per basic block not reached, and it only warns when the basic
1912 block contains more than one line from the current function, and
1913 contains at least one operation. CSE and inlining can duplicate insns,
1914 so it's possible to get spurious warnings from this. */
1917 never_reached_warning (avoided_insn
)
1921 rtx a_line_note
= NULL
;
1922 int two_avoided_lines
= 0;
1923 int contains_insn
= 0;
1925 if (! warn_notreached
)
1928 /* Scan forwards, looking at LINE_NUMBER notes, until
1929 we hit a LABEL or we run out of insns. */
1931 for (insn
= avoided_insn
; insn
!= NULL
; insn
= NEXT_INSN (insn
))
1933 if (GET_CODE (insn
) == CODE_LABEL
)
1935 else if (GET_CODE (insn
) == NOTE
/* A line number note? */
1936 && NOTE_LINE_NUMBER (insn
) >= 0)
1938 if (a_line_note
== NULL
)
1941 two_avoided_lines
|= (NOTE_LINE_NUMBER (a_line_note
)
1942 != NOTE_LINE_NUMBER (insn
));
1944 else if (INSN_P (insn
))
1947 if (two_avoided_lines
&& contains_insn
)
1948 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note
),
1949 NOTE_LINE_NUMBER (a_line_note
),
1950 "will never be executed");
1953 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1954 NLABEL as a return. Accrue modifications into the change group. */
1957 redirect_exp_1 (loc
, olabel
, nlabel
, insn
)
1963 RTX_CODE code
= GET_CODE (x
);
1967 if (code
== LABEL_REF
)
1969 if (XEXP (x
, 0) == olabel
)
1973 n
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
1975 n
= gen_rtx_RETURN (VOIDmode
);
1977 validate_change (insn
, loc
, n
, 1);
1981 else if (code
== RETURN
&& olabel
== 0)
1983 x
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
1984 if (loc
== &PATTERN (insn
))
1985 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
1986 validate_change (insn
, loc
, x
, 1);
1990 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
1991 && GET_CODE (SET_SRC (x
)) == LABEL_REF
1992 && XEXP (SET_SRC (x
), 0) == olabel
)
1994 validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 1);
1998 fmt
= GET_RTX_FORMAT (code
);
1999 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2002 redirect_exp_1 (&XEXP (x
, i
), olabel
, nlabel
, insn
);
2003 else if (fmt
[i
] == 'E')
2006 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2007 redirect_exp_1 (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
);
2012 /* Similar, but apply the change group and report success or failure. */
2015 redirect_exp (olabel
, nlabel
, insn
)
2021 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
2022 loc
= &XVECEXP (PATTERN (insn
), 0, 0);
2024 loc
= &PATTERN (insn
);
2026 redirect_exp_1 (loc
, olabel
, nlabel
, insn
);
2027 if (num_validated_changes () == 0)
2030 return apply_change_group ();
2033 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2034 the modifications into the change group. Return false if we did
2035 not see how to do that. */
2038 redirect_jump_1 (jump
, nlabel
)
2041 int ochanges
= num_validated_changes ();
2044 if (GET_CODE (PATTERN (jump
)) == PARALLEL
)
2045 loc
= &XVECEXP (PATTERN (jump
), 0, 0);
2047 loc
= &PATTERN (jump
);
2049 redirect_exp_1 (loc
, JUMP_LABEL (jump
), nlabel
, jump
);
2050 return num_validated_changes () > ochanges
;
2053 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2054 jump target label is unused as a result, it and the code following
2057 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2060 The return value will be 1 if the change was made, 0 if it wasn't
2061 (this can only occur for NLABEL == 0). */
2064 redirect_jump (jump
, nlabel
, delete_unused
)
2068 rtx olabel
= JUMP_LABEL (jump
);
2070 if (nlabel
== olabel
)
2073 if (! redirect_exp (olabel
, nlabel
, jump
))
2076 JUMP_LABEL (jump
) = nlabel
;
2078 ++LABEL_NUSES (nlabel
);
2080 /* If we're eliding the jump over exception cleanups at the end of a
2081 function, move the function end note so that -Wreturn-type works. */
2082 if (olabel
&& nlabel
2083 && NEXT_INSN (olabel
)
2084 && GET_CODE (NEXT_INSN (olabel
)) == NOTE
2085 && NOTE_LINE_NUMBER (NEXT_INSN (olabel
)) == NOTE_INSN_FUNCTION_END
)
2086 emit_note_after (NOTE_INSN_FUNCTION_END
, nlabel
);
2088 if (olabel
&& --LABEL_NUSES (olabel
) == 0 && delete_unused
)
2089 delete_related_insns (olabel
);
2094 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2095 Accrue the modifications into the change group. */
2102 rtx x
= pc_set (insn
);
2108 code
= GET_CODE (x
);
2110 if (code
== IF_THEN_ELSE
)
2112 rtx comp
= XEXP (x
, 0);
2114 enum rtx_code reversed_code
;
2116 /* We can do this in two ways: The preferable way, which can only
2117 be done if this is not an integer comparison, is to reverse
2118 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2119 of the IF_THEN_ELSE. If we can't do either, fail. */
2121 reversed_code
= reversed_comparison_code (comp
, insn
);
2123 if (reversed_code
!= UNKNOWN
)
2125 validate_change (insn
, &XEXP (x
, 0),
2126 gen_rtx_fmt_ee (reversed_code
,
2127 GET_MODE (comp
), XEXP (comp
, 0),
2134 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
2135 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
2141 /* Invert the jump condition of conditional jump insn, INSN.
2143 Return 1 if we can do so, 0 if we cannot find a way to do so that
2144 matches a pattern. */
2150 invert_exp_1 (insn
);
2151 if (num_validated_changes () == 0)
2154 return apply_change_group ();
2157 /* Invert the condition of the jump JUMP, and make it jump to label
2158 NLABEL instead of where it jumps now. Accrue changes into the
2159 change group. Return false if we didn't see how to perform the
2160 inversion and redirection. */
2163 invert_jump_1 (jump
, nlabel
)
2168 ochanges
= num_validated_changes ();
2169 invert_exp_1 (jump
);
2170 if (num_validated_changes () == ochanges
)
2173 return redirect_jump_1 (jump
, nlabel
);
2176 /* Invert the condition of the jump JUMP, and make it jump to label
2177 NLABEL instead of where it jumps now. Return true if successful. */
2180 invert_jump (jump
, nlabel
, delete_unused
)
2184 /* We have to either invert the condition and change the label or
2185 do neither. Either operation could fail. We first try to invert
2186 the jump. If that succeeds, we try changing the label. If that fails,
2187 we invert the jump back to what it was. */
2189 if (! invert_exp (jump
))
2192 if (redirect_jump (jump
, nlabel
, delete_unused
))
2194 invert_br_probabilities (jump
);
2199 if (! invert_exp (jump
))
2200 /* This should just be putting it back the way it was. */
2207 /* Like rtx_equal_p except that it considers two REGs as equal
2208 if they renumber to the same value and considers two commutative
2209 operations to be the same if the order of the operands has been
2212 ??? Addition is not commutative on the PA due to the weird implicit
2213 space register selection rules for memory addresses. Therefore, we
2214 don't consider a + b == b + a.
2216 We could/should make this test a little tighter. Possibly only
2217 disabling it on the PA via some backend macro or only disabling this
2218 case when the PLUS is inside a MEM. */
2221 rtx_renumbered_equal_p (x
, y
)
2225 RTX_CODE code
= GET_CODE (x
);
2231 if ((code
== REG
|| (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == REG
))
2232 && (GET_CODE (y
) == REG
|| (GET_CODE (y
) == SUBREG
2233 && GET_CODE (SUBREG_REG (y
)) == REG
)))
2235 int reg_x
= -1, reg_y
= -1;
2236 int byte_x
= 0, byte_y
= 0;
2238 if (GET_MODE (x
) != GET_MODE (y
))
2241 /* If we haven't done any renumbering, don't
2242 make any assumptions. */
2243 if (reg_renumber
== 0)
2244 return rtx_equal_p (x
, y
);
2248 reg_x
= REGNO (SUBREG_REG (x
));
2249 byte_x
= SUBREG_BYTE (x
);
2251 if (reg_renumber
[reg_x
] >= 0)
2253 reg_x
= subreg_regno_offset (reg_renumber
[reg_x
],
2254 GET_MODE (SUBREG_REG (x
)),
2263 if (reg_renumber
[reg_x
] >= 0)
2264 reg_x
= reg_renumber
[reg_x
];
2267 if (GET_CODE (y
) == SUBREG
)
2269 reg_y
= REGNO (SUBREG_REG (y
));
2270 byte_y
= SUBREG_BYTE (y
);
2272 if (reg_renumber
[reg_y
] >= 0)
2274 reg_y
= subreg_regno_offset (reg_renumber
[reg_y
],
2275 GET_MODE (SUBREG_REG (y
)),
2284 if (reg_renumber
[reg_y
] >= 0)
2285 reg_y
= reg_renumber
[reg_y
];
2288 return reg_x
>= 0 && reg_x
== reg_y
&& byte_x
== byte_y
;
2291 /* Now we have disposed of all the cases
2292 in which different rtx codes can match. */
2293 if (code
!= GET_CODE (y
))
2305 return INTVAL (x
) == INTVAL (y
);
2308 /* We can't assume nonlocal labels have their following insns yet. */
2309 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
2310 return XEXP (x
, 0) == XEXP (y
, 0);
2312 /* Two label-refs are equivalent if they point at labels
2313 in the same position in the instruction stream. */
2314 return (next_real_insn (XEXP (x
, 0))
2315 == next_real_insn (XEXP (y
, 0)));
2318 return XSTR (x
, 0) == XSTR (y
, 0);
2321 /* If we didn't match EQ equality above, they aren't the same. */
2328 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2330 if (GET_MODE (x
) != GET_MODE (y
))
2333 /* For commutative operations, the RTX match if the operand match in any
2334 order. Also handle the simple binary and unary cases without a loop.
2336 ??? Don't consider PLUS a commutative operator; see comments above. */
2337 if ((code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
2339 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
2340 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
2341 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
2342 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
2343 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
2344 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
2345 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
2346 else if (GET_RTX_CLASS (code
) == '1')
2347 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
2349 /* Compare the elements. If any pair of corresponding elements
2350 fail to match, return 0 for the whole things. */
2352 fmt
= GET_RTX_FORMAT (code
);
2353 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2359 if (XWINT (x
, i
) != XWINT (y
, i
))
2364 if (XINT (x
, i
) != XINT (y
, i
))
2369 if (XTREE (x
, i
) != XTREE (y
, i
))
2374 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2379 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
2384 if (XEXP (x
, i
) != XEXP (y
, i
))
2391 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2393 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2394 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
2405 /* If X is a hard register or equivalent to one or a subregister of one,
2406 return the hard register number. If X is a pseudo register that was not
2407 assigned a hard register, return the pseudo register number. Otherwise,
2408 return -1. Any rtx is valid for X. */
2414 if (GET_CODE (x
) == REG
)
2416 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
2417 return reg_renumber
[REGNO (x
)];
2420 if (GET_CODE (x
) == SUBREG
)
2422 int base
= true_regnum (SUBREG_REG (x
));
2423 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
2424 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
2425 GET_MODE (SUBREG_REG (x
)),
2426 SUBREG_BYTE (x
), GET_MODE (x
));
2431 /* Optimize code of the form:
2433 for (x = a[i]; x; ...)
2435 for (x = a[i]; x; ...)
2439 Loop optimize will change the above code into
2443 { ...; if (! (x = ...)) break; }
2446 { ...; if (! (x = ...)) break; }
2449 In general, if the first test fails, the program can branch
2450 directly to `foo' and skip the second try which is doomed to fail.
2451 We run this after loop optimization and before flow analysis. */
2453 /* When comparing the insn patterns, we track the fact that different
2454 pseudo-register numbers may have been used in each computation.
2455 The following array stores an equivalence -- same_regs[I] == J means
2456 that pseudo register I was used in the first set of tests in a context
2457 where J was used in the second set. We also count the number of such
2458 pending equivalences. If nonzero, the expressions really aren't the
2461 static int *same_regs
;
2463 static int num_same_regs
;
2465 /* Track any registers modified between the target of the first jump and
2466 the second jump. They never compare equal. */
2468 static char *modified_regs
;
2470 /* Record if memory was modified. */
2472 static int modified_mem
;
2474 /* Called via note_stores on each insn between the target of the first
2475 branch and the second branch. It marks any changed registers. */
2478 mark_modified_reg (dest
, x
, data
)
2481 void *data ATTRIBUTE_UNUSED
;
2486 if (GET_CODE (dest
) == SUBREG
)
2487 dest
= SUBREG_REG (dest
);
2489 if (GET_CODE (dest
) == MEM
)
2492 if (GET_CODE (dest
) != REG
)
2495 regno
= REGNO (dest
);
2496 if (regno
>= FIRST_PSEUDO_REGISTER
)
2497 modified_regs
[regno
] = 1;
2498 /* Don't consider a hard condition code register as modified,
2499 if it is only being set. thread_jumps will check if it is set
2500 to the same value. */
2501 else if (GET_MODE_CLASS (GET_MODE (dest
)) != MODE_CC
2502 || GET_CODE (x
) != SET
2503 || ! rtx_equal_p (dest
, SET_DEST (x
))
2504 || HARD_REGNO_NREGS (regno
, GET_MODE (dest
)) != 1)
2505 for (i
= 0; i
< HARD_REGNO_NREGS (regno
, GET_MODE (dest
)); i
++)
2506 modified_regs
[regno
+ i
] = 1;
2509 /* F is the first insn in the chain of insns. */
2512 thread_jumps (f
, max_reg
, flag_before_loop
)
2515 int flag_before_loop
;
2517 /* Basic algorithm is to find a conditional branch,
2518 the label it may branch to, and the branch after
2519 that label. If the two branches test the same condition,
2520 walk back from both branch paths until the insn patterns
2521 differ, or code labels are hit. If we make it back to
2522 the target of the first branch, then we know that the first branch
2523 will either always succeed or always fail depending on the relative
2524 senses of the two branches. So adjust the first branch accordingly
2527 rtx label
, b1
, b2
, t1
, t2
;
2528 enum rtx_code code1
, code2
;
2529 rtx b1op0
, b1op1
, b2op0
, b2op1
;
2533 enum rtx_code reversed_code1
, reversed_code2
;
2535 /* Allocate register tables and quick-reset table. */
2536 modified_regs
= (char *) xmalloc (max_reg
* sizeof (char));
2537 same_regs
= (int *) xmalloc (max_reg
* sizeof (int));
2538 all_reset
= (int *) xmalloc (max_reg
* sizeof (int));
2539 for (i
= 0; i
< max_reg
; i
++)
2546 for (b1
= f
; b1
; b1
= NEXT_INSN (b1
))
2551 /* Get to a candidate branch insn. */
2552 if (GET_CODE (b1
) != JUMP_INSN
2553 || ! any_condjump_p (b1
) || JUMP_LABEL (b1
) == 0)
2556 memset (modified_regs
, 0, max_reg
* sizeof (char));
2559 memcpy (same_regs
, all_reset
, max_reg
* sizeof (int));
2562 label
= JUMP_LABEL (b1
);
2564 /* Look for a branch after the target. Record any registers and
2565 memory modified between the target and the branch. Stop when we
2566 get to a label since we can't know what was changed there. */
2567 for (b2
= NEXT_INSN (label
); b2
; b2
= NEXT_INSN (b2
))
2569 if (GET_CODE (b2
) == CODE_LABEL
)
2572 else if (GET_CODE (b2
) == JUMP_INSN
)
2574 /* If this is an unconditional jump and is the only use of
2575 its target label, we can follow it. */
2576 if (any_uncondjump_p (b2
)
2578 && JUMP_LABEL (b2
) != 0
2579 && LABEL_NUSES (JUMP_LABEL (b2
)) == 1)
2581 b2
= JUMP_LABEL (b2
);
2588 if (GET_CODE (b2
) != CALL_INSN
&& GET_CODE (b2
) != INSN
)
2591 if (GET_CODE (b2
) == CALL_INSN
)
2594 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2595 if (call_used_regs
[i
] && ! fixed_regs
[i
]
2596 && i
!= STACK_POINTER_REGNUM
2597 && i
!= FRAME_POINTER_REGNUM
2598 && i
!= HARD_FRAME_POINTER_REGNUM
2599 && i
!= ARG_POINTER_REGNUM
)
2600 modified_regs
[i
] = 1;
2603 note_stores (PATTERN (b2
), mark_modified_reg
, NULL
);
2606 /* Check the next candidate branch insn from the label
2609 || GET_CODE (b2
) != JUMP_INSN
2611 || !any_condjump_p (b2
)
2612 || !onlyjump_p (b2
))
2617 /* Get the comparison codes and operands, reversing the
2618 codes if appropriate. If we don't have comparison codes,
2619 we can't do anything. */
2620 b1op0
= XEXP (XEXP (SET_SRC (set
), 0), 0);
2621 b1op1
= XEXP (XEXP (SET_SRC (set
), 0), 1);
2622 code1
= GET_CODE (XEXP (SET_SRC (set
), 0));
2623 reversed_code1
= code1
;
2624 if (XEXP (SET_SRC (set
), 1) == pc_rtx
)
2625 code1
= reversed_comparison_code (XEXP (SET_SRC (set
), 0), b1
);
2627 reversed_code1
= reversed_comparison_code (XEXP (SET_SRC (set
), 0), b1
);
2629 b2op0
= XEXP (XEXP (SET_SRC (set2
), 0), 0);
2630 b2op1
= XEXP (XEXP (SET_SRC (set2
), 0), 1);
2631 code2
= GET_CODE (XEXP (SET_SRC (set2
), 0));
2632 reversed_code2
= code2
;
2633 if (XEXP (SET_SRC (set2
), 1) == pc_rtx
)
2634 code2
= reversed_comparison_code (XEXP (SET_SRC (set2
), 0), b2
);
2636 reversed_code2
= reversed_comparison_code (XEXP (SET_SRC (set2
), 0), b2
);
2638 /* If they test the same things and knowing that B1 branches
2639 tells us whether or not B2 branches, check if we
2640 can thread the branch. */
2641 if (rtx_equal_for_thread_p (b1op0
, b2op0
, b2
)
2642 && rtx_equal_for_thread_p (b1op1
, b2op1
, b2
)
2643 && (comparison_dominates_p (code1
, code2
)
2644 || comparison_dominates_p (code1
, reversed_code2
)))
2647 t1
= prev_nonnote_insn (b1
);
2648 t2
= prev_nonnote_insn (b2
);
2650 while (t1
!= 0 && t2
!= 0)
2654 /* We have reached the target of the first branch.
2655 If there are no pending register equivalents,
2656 we know that this branch will either always
2657 succeed (if the senses of the two branches are
2658 the same) or always fail (if not). */
2661 if (num_same_regs
!= 0)
2664 if (comparison_dominates_p (code1
, code2
))
2665 new_label
= JUMP_LABEL (b2
);
2667 new_label
= get_label_after (b2
);
2669 if (JUMP_LABEL (b1
) != new_label
)
2671 rtx prev
= PREV_INSN (new_label
);
2673 if (flag_before_loop
2674 && GET_CODE (prev
) == NOTE
2675 && NOTE_LINE_NUMBER (prev
) == NOTE_INSN_LOOP_BEG
)
2677 /* Don't thread to the loop label. If a loop
2678 label is reused, loop optimization will
2679 be disabled for that loop. */
2680 new_label
= gen_label_rtx ();
2681 emit_label_after (new_label
, PREV_INSN (prev
));
2683 changed
|= redirect_jump (b1
, new_label
, 1);
2688 /* If either of these is not a normal insn (it might be
2689 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
2690 have already been skipped above.) Similarly, fail
2691 if the insns are different. */
2692 if (GET_CODE (t1
) != INSN
|| GET_CODE (t2
) != INSN
2693 || recog_memoized (t1
) != recog_memoized (t2
)
2694 || ! rtx_equal_for_thread_p (PATTERN (t1
),
2698 t1
= prev_nonnote_insn (t1
);
2699 t2
= prev_nonnote_insn (t2
);
2706 free (modified_regs
);
2711 /* This is like RTX_EQUAL_P except that it knows about our handling of
2712 possibly equivalent registers and knows to consider volatile and
2713 modified objects as not equal.
2715 YINSN is the insn containing Y. */
2718 rtx_equal_for_thread_p (x
, y
, yinsn
)
2727 code
= GET_CODE (x
);
2728 /* Rtx's of different codes cannot be equal. */
2729 if (code
!= GET_CODE (y
))
2732 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
2733 (REG:SI x) and (REG:HI x) are NOT equivalent. */
2735 if (GET_MODE (x
) != GET_MODE (y
))
2738 /* For floating-point, consider everything unequal. This is a bit
2739 pessimistic, but this pass would only rarely do anything for FP
2741 if (TARGET_FLOAT_FORMAT
== IEEE_FLOAT_FORMAT
2742 && FLOAT_MODE_P (GET_MODE (x
)) && ! flag_unsafe_math_optimizations
)
2745 /* For commutative operations, the RTX match if the operand match in any
2746 order. Also handle the simple binary and unary cases without a loop. */
2747 if (code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
2748 return ((rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
2749 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
))
2750 || (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 1), yinsn
)
2751 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 0), yinsn
)));
2752 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
2753 return (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
2754 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
));
2755 else if (GET_RTX_CLASS (code
) == '1')
2756 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
2758 /* Handle special-cases first. */
2762 if (REGNO (x
) == REGNO (y
) && ! modified_regs
[REGNO (x
)])
2765 /* If neither is user variable or hard register, check for possible
2767 if (REG_USERVAR_P (x
) || REG_USERVAR_P (y
)
2768 || REGNO (x
) < FIRST_PSEUDO_REGISTER
2769 || REGNO (y
) < FIRST_PSEUDO_REGISTER
)
2772 if (same_regs
[REGNO (x
)] == -1)
2774 same_regs
[REGNO (x
)] = REGNO (y
);
2777 /* If this is the first time we are seeing a register on the `Y'
2778 side, see if it is the last use. If not, we can't thread the
2779 jump, so mark it as not equivalent. */
2780 if (REGNO_LAST_UID (REGNO (y
)) != INSN_UID (yinsn
))
2786 return (same_regs
[REGNO (x
)] == (int) REGNO (y
));
2791 /* If memory modified or either volatile, not equivalent.
2792 Else, check address. */
2793 if (modified_mem
|| MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2796 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
2799 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2805 /* Cancel a pending `same_regs' if setting equivalenced registers.
2806 Then process source. */
2807 if (GET_CODE (SET_DEST (x
)) == REG
2808 && GET_CODE (SET_DEST (y
)) == REG
)
2810 if (same_regs
[REGNO (SET_DEST (x
))] == (int) REGNO (SET_DEST (y
)))
2812 same_regs
[REGNO (SET_DEST (x
))] = -1;
2815 else if (REGNO (SET_DEST (x
)) != REGNO (SET_DEST (y
)))
2820 if (rtx_equal_for_thread_p (SET_DEST (x
), SET_DEST (y
), yinsn
) == 0)
2824 return rtx_equal_for_thread_p (SET_SRC (x
), SET_SRC (y
), yinsn
);
2827 return XEXP (x
, 0) == XEXP (y
, 0);
2830 return XSTR (x
, 0) == XSTR (y
, 0);
2839 fmt
= GET_RTX_FORMAT (code
);
2840 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2845 if (XWINT (x
, i
) != XWINT (y
, i
))
2851 if (XINT (x
, i
) != XINT (y
, i
))
2857 /* Two vectors must have the same length. */
2858 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2861 /* And the corresponding elements must match. */
2862 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2863 if (rtx_equal_for_thread_p (XVECEXP (x
, i
, j
),
2864 XVECEXP (y
, i
, j
), yinsn
) == 0)
2869 if (rtx_equal_for_thread_p (XEXP (x
, i
), XEXP (y
, i
), yinsn
) == 0)
2875 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2880 /* These are just backpointers, so they don't matter. */
2887 /* It is believed that rtx's at this level will never
2888 contain anything but integers and other rtx's,
2889 except for within LABEL_REFs and SYMBOL_REFs. */