1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
42 #include "hard-reg-set.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
55 /* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57 /* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
62 static int init_label_info
PARAMS ((rtx
));
63 static void mark_all_labels
PARAMS ((rtx
));
64 static int duplicate_loop_exit_test
PARAMS ((rtx
));
65 static void delete_computation
PARAMS ((rtx
));
66 static void redirect_exp_1
PARAMS ((rtx
*, rtx
, rtx
, rtx
));
67 static int redirect_exp
PARAMS ((rtx
, rtx
, rtx
));
68 static void invert_exp_1
PARAMS ((rtx
));
69 static int invert_exp
PARAMS ((rtx
));
70 static int returnjump_p_1
PARAMS ((rtx
*, void *));
71 static void delete_prior_computation
PARAMS ((rtx
, rtx
));
72 static void mark_modified_reg
PARAMS ((rtx
, rtx
, void *));
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
78 rebuild_jump_labels (f
)
84 max_uid
= init_label_info (f
) + 1;
88 /* Keep track of labels used from static data; we don't track them
89 closely enough to delete them here, so make sure their reference
90 count doesn't drop to zero. */
92 for (insn
= forced_labels
; insn
; insn
= XEXP (insn
, 1))
93 if (GET_CODE (XEXP (insn
, 0)) == CODE_LABEL
)
94 LABEL_NUSES (XEXP (insn
, 0))++;
96 /* Keep track of labels used for marking handlers for exception
97 regions; they cannot usually be deleted. */
99 for (insn
= exception_handler_labels
; insn
; insn
= XEXP (insn
, 1))
100 if (GET_CODE (XEXP (insn
, 0)) == CODE_LABEL
)
101 LABEL_NUSES (XEXP (insn
, 0))++;
104 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
105 non-fallthru insn. This is not generally true, as multiple barriers
106 may have crept in, or the BARRIER may be separated from the last
107 real insn by one or more NOTEs.
109 This simple pass moves barriers and removes duplicates so that the
115 rtx insn
, next
, prev
;
116 for (insn
= get_insns (); insn
; insn
= next
)
118 next
= NEXT_INSN (insn
);
119 if (GET_CODE (insn
) == BARRIER
)
121 prev
= prev_nonnote_insn (insn
);
122 if (GET_CODE (prev
) == BARRIER
)
123 delete_barrier (insn
);
124 else if (prev
!= PREV_INSN (insn
))
125 reorder_insns (insn
, insn
, prev
);
131 copy_loop_headers (f
)
134 register rtx insn
, next
;
135 /* Now iterate optimizing jumps until nothing changes over one pass. */
136 for (insn
= f
; insn
; insn
= next
)
140 next
= NEXT_INSN (insn
);
142 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
143 jump. Try to optimize by duplicating the loop exit test if so.
144 This is only safe immediately after regscan, because it uses
145 the values of regno_first_uid and regno_last_uid. */
146 if (GET_CODE (insn
) == NOTE
147 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
148 && (temp1
= next_nonnote_insn (insn
)) != 0
149 && any_uncondjump_p (temp1
) && onlyjump_p (temp1
))
151 temp
= PREV_INSN (insn
);
152 if (duplicate_loop_exit_test (insn
))
154 next
= NEXT_INSN (temp
);
161 purge_line_number_notes (f
)
166 /* Delete extraneous line number notes.
167 Note that two consecutive notes for different lines are not really
168 extraneous. There should be some indication where that line belonged,
169 even if it became empty. */
171 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
172 if (GET_CODE (insn
) == NOTE
)
174 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_FUNCTION_BEG
)
175 /* Any previous line note was for the prologue; gdb wants a new
176 note after the prologue even if it is for the same line. */
177 last_note
= NULL_RTX
;
178 else if (NOTE_LINE_NUMBER (insn
) >= 0)
180 /* Delete this note if it is identical to previous note. */
182 && NOTE_SOURCE_FILE (insn
) == NOTE_SOURCE_FILE (last_note
)
183 && NOTE_LINE_NUMBER (insn
) == NOTE_LINE_NUMBER (last_note
))
194 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
195 notes whose labels don't occur in the insn any more. Returns the
196 largest INSN_UID found. */
204 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
206 if (GET_CODE (insn
) == CODE_LABEL
)
207 LABEL_NUSES (insn
) = (LABEL_PRESERVE_P (insn
) != 0);
208 else if (GET_CODE (insn
) == JUMP_INSN
)
209 JUMP_LABEL (insn
) = 0;
210 else if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
214 for (note
= REG_NOTES (insn
); note
; note
= next
)
216 next
= XEXP (note
, 1);
217 if (REG_NOTE_KIND (note
) == REG_LABEL
218 && ! reg_mentioned_p (XEXP (note
, 0), PATTERN (insn
)))
219 remove_note (insn
, note
);
222 if (INSN_UID (insn
) > largest_uid
)
223 largest_uid
= INSN_UID (insn
);
229 /* Mark the label each jump jumps to.
230 Combine consecutive labels, and count uses of labels. */
238 for (insn
= f
; insn
; insn
= NEXT_INSN (insn
))
241 if (GET_CODE (insn
) == CALL_INSN
242 && GET_CODE (PATTERN (insn
)) == CALL_PLACEHOLDER
)
244 mark_all_labels (XEXP (PATTERN (insn
), 0));
245 mark_all_labels (XEXP (PATTERN (insn
), 1));
246 mark_all_labels (XEXP (PATTERN (insn
), 2));
248 /* Canonicalize the tail recursion label attached to the
249 CALL_PLACEHOLDER insn. */
250 if (XEXP (PATTERN (insn
), 3))
252 rtx label_ref
= gen_rtx_LABEL_REF (VOIDmode
,
253 XEXP (PATTERN (insn
), 3));
254 mark_jump_label (label_ref
, insn
, 0);
255 XEXP (PATTERN (insn
), 3) = XEXP (label_ref
, 0);
261 mark_jump_label (PATTERN (insn
), insn
, 0);
262 if (! INSN_DELETED_P (insn
) && GET_CODE (insn
) == JUMP_INSN
)
264 /* When we know the LABEL_REF contained in a REG used in
265 an indirect jump, we'll have a REG_LABEL note so that
266 flow can tell where it's going. */
267 if (JUMP_LABEL (insn
) == 0)
269 rtx label_note
= find_reg_note (insn
, REG_LABEL
, NULL_RTX
);
272 /* But a LABEL_REF around the REG_LABEL note, so
273 that we can canonicalize it. */
274 rtx label_ref
= gen_rtx_LABEL_REF (VOIDmode
,
275 XEXP (label_note
, 0));
277 mark_jump_label (label_ref
, insn
, 0);
278 XEXP (label_note
, 0) = XEXP (label_ref
, 0);
279 JUMP_LABEL (insn
) = XEXP (label_note
, 0);
286 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
287 jump. Assume that this unconditional jump is to the exit test code. If
288 the code is sufficiently simple, make a copy of it before INSN,
289 followed by a jump to the exit of the loop. Then delete the unconditional
292 Return 1 if we made the change, else 0.
294 This is only safe immediately after a regscan pass because it uses the
295 values of regno_first_uid and regno_last_uid. */
298 duplicate_loop_exit_test (loop_start
)
301 rtx insn
, set
, reg
, p
, link
;
302 rtx copy
= 0, first_copy
= 0;
304 rtx exitcode
= NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start
)));
306 int max_reg
= max_reg_num ();
308 rtx loop_pre_header_label
;
310 /* Scan the exit code. We do not perform this optimization if any insn:
314 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
315 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
316 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
319 We also do not do this if we find an insn with ASM_OPERANDS. While
320 this restriction should not be necessary, copying an insn with
321 ASM_OPERANDS can confuse asm_noperands in some cases.
323 Also, don't do this if the exit code is more than 20 insns. */
325 for (insn
= exitcode
;
327 && ! (GET_CODE (insn
) == NOTE
328 && NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
);
329 insn
= NEXT_INSN (insn
))
331 switch (GET_CODE (insn
))
337 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
338 a jump immediately after the loop start that branches outside
339 the loop but within an outer loop, near the exit test.
340 If we copied this exit test and created a phony
341 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
342 before the exit test look like these could be safely moved
343 out of the loop even if they actually may be never executed.
344 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
346 if (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
347 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
)
351 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
352 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
))
353 /* If we were to duplicate this code, we would not move
354 the BLOCK notes, and so debugging the moved code would
355 be difficult. Thus, we only move the code with -O2 or
362 /* The code below would grossly mishandle REG_WAS_0 notes,
363 so get rid of them here. */
364 while ((p
= find_reg_note (insn
, REG_WAS_0
, NULL_RTX
)) != 0)
365 remove_note (insn
, p
);
367 || find_reg_note (insn
, REG_RETVAL
, NULL_RTX
)
368 || find_reg_note (insn
, REG_LIBCALL
, NULL_RTX
))
376 /* Unless INSN is zero, we can do the optimization. */
382 /* See if any insn sets a register only used in the loop exit code and
383 not a user variable. If so, replace it with a new register. */
384 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
385 if (GET_CODE (insn
) == INSN
386 && (set
= single_set (insn
)) != 0
387 && ((reg
= SET_DEST (set
), GET_CODE (reg
) == REG
)
388 || (GET_CODE (reg
) == SUBREG
389 && (reg
= SUBREG_REG (reg
), GET_CODE (reg
) == REG
)))
390 && REGNO (reg
) >= FIRST_PSEUDO_REGISTER
391 && REGNO_FIRST_UID (REGNO (reg
)) == INSN_UID (insn
))
393 for (p
= NEXT_INSN (insn
); p
!= lastexit
; p
= NEXT_INSN (p
))
394 if (REGNO_LAST_UID (REGNO (reg
)) == INSN_UID (p
))
399 /* We can do the replacement. Allocate reg_map if this is the
400 first replacement we found. */
402 reg_map
= (rtx
*) xcalloc (max_reg
, sizeof (rtx
));
404 REG_LOOP_TEST_P (reg
) = 1;
406 reg_map
[REGNO (reg
)] = gen_reg_rtx (GET_MODE (reg
));
409 loop_pre_header_label
= gen_label_rtx ();
411 /* Now copy each insn. */
412 for (insn
= exitcode
; insn
!= lastexit
; insn
= NEXT_INSN (insn
))
414 switch (GET_CODE (insn
))
417 copy
= emit_barrier_before (loop_start
);
420 /* Only copy line-number notes. */
421 if (NOTE_LINE_NUMBER (insn
) >= 0)
423 copy
= emit_note_before (NOTE_LINE_NUMBER (insn
), loop_start
);
424 NOTE_SOURCE_FILE (copy
) = NOTE_SOURCE_FILE (insn
);
429 copy
= emit_insn_before (copy_insn (PATTERN (insn
)), loop_start
);
431 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
433 mark_jump_label (PATTERN (copy
), copy
, 0);
435 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
437 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
438 if (REG_NOTE_KIND (link
) != REG_LABEL
)
440 if (GET_CODE (link
) == EXPR_LIST
)
442 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link
),
447 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link
),
452 if (reg_map
&& REG_NOTES (copy
))
453 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
457 copy
= emit_jump_insn_before (copy_insn (PATTERN (insn
)),
460 replace_regs (PATTERN (copy
), reg_map
, max_reg
, 1);
461 mark_jump_label (PATTERN (copy
), copy
, 0);
462 if (REG_NOTES (insn
))
464 REG_NOTES (copy
) = copy_insn_1 (REG_NOTES (insn
));
466 replace_regs (REG_NOTES (copy
), reg_map
, max_reg
, 1);
469 /* Predict conditional jump that do make loop looping as taken.
470 Other jumps are probably exit conditions, so predict
472 if (any_condjump_p (copy
))
474 rtx label
= JUMP_LABEL (copy
);
477 /* The jump_insn after loop_start should be followed
478 by barrier and loopback label. */
479 if (prev_nonnote_insn (label
)
480 && (prev_nonnote_insn (prev_nonnote_insn (label
))
481 == next_nonnote_insn (loop_start
)))
483 predict_insn_def (copy
, PRED_LOOP_HEADER
, TAKEN
);
484 /* To keep pre-header, we need to redirect all loop
485 entrances before the LOOP_BEG note. */
486 redirect_jump (copy
, loop_pre_header_label
, 0);
489 predict_insn_def (copy
, PRED_LOOP_HEADER
, NOT_TAKEN
);
498 /* Record the first insn we copied. We need it so that we can
499 scan the copied insns for new pseudo registers. */
504 /* Now clean up by emitting a jump to the end label and deleting the jump
505 at the start of the loop. */
506 if (! copy
|| GET_CODE (copy
) != BARRIER
)
508 copy
= emit_jump_insn_before (gen_jump (get_label_after (insn
)),
511 /* Record the first insn we copied. We need it so that we can
512 scan the copied insns for new pseudo registers. This may not
513 be strictly necessary since we should have copied at least one
514 insn above. But I am going to be safe. */
518 mark_jump_label (PATTERN (copy
), copy
, 0);
519 emit_barrier_before (loop_start
);
522 emit_label_before (loop_pre_header_label
, loop_start
);
524 /* Now scan from the first insn we copied to the last insn we copied
525 (copy) for new pseudo registers. Do this after the code to jump to
526 the end label since that might create a new pseudo too. */
527 reg_scan_update (first_copy
, copy
, max_reg
);
529 /* Mark the exit code as the virtual top of the converted loop. */
530 emit_note_before (NOTE_INSN_LOOP_VTOP
, exitcode
);
532 delete_insn (next_nonnote_insn (loop_start
));
541 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
542 notes between START and END out before START. START and END may be such
543 notes. Returns the values of the new starting and ending insns, which
544 may be different if the original ones were such notes. */
547 squeeze_notes (startp
, endp
)
557 rtx past_end
= NEXT_INSN (end
);
559 for (insn
= start
; insn
!= past_end
; insn
= next
)
561 next
= NEXT_INSN (insn
);
562 if (GET_CODE (insn
) == NOTE
563 && (NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_END
564 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_BLOCK_BEG
565 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_BEG
566 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_END
567 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_CONT
568 || NOTE_LINE_NUMBER (insn
) == NOTE_INSN_LOOP_VTOP
))
574 rtx prev
= PREV_INSN (insn
);
575 PREV_INSN (insn
) = PREV_INSN (start
);
576 NEXT_INSN (insn
) = start
;
577 NEXT_INSN (PREV_INSN (insn
)) = insn
;
578 PREV_INSN (NEXT_INSN (insn
)) = insn
;
579 NEXT_INSN (prev
) = next
;
580 PREV_INSN (next
) = prev
;
587 /* There were no real instructions, and we can't represent an empty
589 if (start
== past_end
)
598 /* Return the label before INSN, or put a new label there. */
601 get_label_before (insn
)
606 /* Find an existing label at this point
607 or make a new one if there is none. */
608 label
= prev_nonnote_insn (insn
);
610 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
612 rtx prev
= PREV_INSN (insn
);
614 label
= gen_label_rtx ();
615 emit_label_after (label
, prev
);
616 LABEL_NUSES (label
) = 0;
621 /* Return the label after INSN, or put a new label there. */
624 get_label_after (insn
)
629 /* Find an existing label at this point
630 or make a new one if there is none. */
631 label
= next_nonnote_insn (insn
);
633 if (label
== 0 || GET_CODE (label
) != CODE_LABEL
)
635 label
= gen_label_rtx ();
636 emit_label_after (label
, insn
);
637 LABEL_NUSES (label
) = 0;
642 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
643 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
644 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
645 know whether it's source is floating point or integer comparison. Machine
646 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
647 to help this function avoid overhead in these cases. */
649 reversed_comparison_code_parts (code
, arg0
, arg1
, insn
)
650 rtx insn
, arg0
, arg1
;
653 enum machine_mode mode
;
655 /* If this is not actually a comparison, we can't reverse it. */
656 if (GET_RTX_CLASS (code
) != '<')
659 mode
= GET_MODE (arg0
);
660 if (mode
== VOIDmode
)
661 mode
= GET_MODE (arg1
);
663 /* First see if machine description supply us way to reverse the comparison.
664 Give it priority over everything else to allow machine description to do
666 #ifdef REVERSIBLE_CC_MODE
667 if (GET_MODE_CLASS (mode
) == MODE_CC
668 && REVERSIBLE_CC_MODE (mode
))
670 #ifdef REVERSE_CONDITION
671 return REVERSE_CONDITION (code
, mode
);
673 return reverse_condition (code
);
677 /* Try a few special cases based on the comparison code. */
686 /* It is always safe to reverse EQ and NE, even for the floating
687 point. Similary the unsigned comparisons are never used for
688 floating point so we can reverse them in the default way. */
689 return reverse_condition (code
);
694 /* In case we already see unordered comparison, we can be sure to
695 be dealing with floating point so we don't need any more tests. */
696 return reverse_condition_maybe_unordered (code
);
701 /* We don't have safe way to reverse these yet. */
707 /* In case we give up IEEE compatibility, all comparisons are reversible. */
708 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
709 || flag_unsafe_math_optimizations
)
710 return reverse_condition (code
);
712 if (GET_MODE_CLASS (mode
) == MODE_CC
719 /* Try to search for the comparison to determine the real mode.
720 This code is expensive, but with sane machine description it
721 will be never used, since REVERSIBLE_CC_MODE will return true
726 for (prev
= prev_nonnote_insn (insn
);
727 prev
!= 0 && GET_CODE (prev
) != CODE_LABEL
;
728 prev
= prev_nonnote_insn (prev
))
730 rtx set
= set_of (arg0
, prev
);
731 if (set
&& GET_CODE (set
) == SET
732 && rtx_equal_p (SET_DEST (set
), arg0
))
734 rtx src
= SET_SRC (set
);
736 if (GET_CODE (src
) == COMPARE
)
738 rtx comparison
= src
;
739 arg0
= XEXP (src
, 0);
740 mode
= GET_MODE (arg0
);
741 if (mode
== VOIDmode
)
742 mode
= GET_MODE (XEXP (comparison
, 1));
745 /* We can get past reg-reg moves. This may be usefull for model
746 of i387 comparisons that first move flag registers around. */
753 /* If register is clobbered in some ununderstandable way,
760 /* An integer condition. */
761 if (GET_CODE (arg0
) == CONST_INT
762 || (GET_MODE (arg0
) != VOIDmode
763 && GET_MODE_CLASS (mode
) != MODE_CC
764 && ! FLOAT_MODE_P (mode
)))
765 return reverse_condition (code
);
770 /* An wrapper around the previous function to take COMPARISON as rtx
771 expression. This simplifies many callers. */
773 reversed_comparison_code (comparison
, insn
)
774 rtx comparison
, insn
;
776 if (GET_RTX_CLASS (GET_CODE (comparison
)) != '<')
778 return reversed_comparison_code_parts (GET_CODE (comparison
),
779 XEXP (comparison
, 0),
780 XEXP (comparison
, 1), insn
);
783 /* Given an rtx-code for a comparison, return the code for the negated
784 comparison. If no such code exists, return UNKNOWN.
786 WATCH OUT! reverse_condition is not safe to use on a jump that might
787 be acting on the results of an IEEE floating point comparison, because
788 of the special treatment of non-signaling nans in comparisons.
789 Use reversed_comparison_code instead. */
792 reverse_condition (code
)
835 /* Similar, but we're allowed to generate unordered comparisons, which
836 makes it safe for IEEE floating-point. Of course, we have to recognize
837 that the target will support them too... */
840 reverse_condition_maybe_unordered (code
)
843 /* Non-IEEE formats don't have unordered conditions. */
844 if (TARGET_FLOAT_FORMAT
!= IEEE_FLOAT_FORMAT
)
845 return reverse_condition (code
);
883 /* Similar, but return the code when two operands of a comparison are swapped.
884 This IS safe for IEEE floating-point. */
887 swap_condition (code
)
930 /* Given a comparison CODE, return the corresponding unsigned comparison.
931 If CODE is an equality comparison or already an unsigned comparison,
935 unsigned_condition (code
)
962 /* Similarly, return the signed version of a comparison. */
965 signed_condition (code
)
992 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
993 truth of CODE1 implies the truth of CODE2. */
996 comparison_dominates_p (code1
, code2
)
997 enum rtx_code code1
, code2
;
999 /* UNKNOWN comparison codes can happen as a result of trying to revert
1001 They can't match anything, so we have to reject them here. */
1002 if (code1
== UNKNOWN
|| code2
== UNKNOWN
)
1011 if (code2
== UNLE
|| code2
== UNGE
)
1016 if (code2
== LE
|| code2
== LEU
|| code2
== GE
|| code2
== GEU
1017 || code2
== ORDERED
)
1022 if (code2
== UNLE
|| code2
== NE
)
1027 if (code2
== LE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
1032 if (code2
== UNGE
|| code2
== NE
)
1037 if (code2
== GE
|| code2
== NE
|| code2
== ORDERED
|| code2
== LTGT
)
1043 if (code2
== ORDERED
)
1048 if (code2
== NE
|| code2
== ORDERED
)
1053 if (code2
== LEU
|| code2
== NE
)
1058 if (code2
== GEU
|| code2
== NE
)
1063 if (code2
== NE
|| code2
== UNEQ
|| code2
== UNLE
|| code2
== UNLT
1064 || code2
== UNGE
|| code2
== UNGT
)
1075 /* Return 1 if INSN is an unconditional jump and nothing else. */
1081 return (GET_CODE (insn
) == JUMP_INSN
1082 && GET_CODE (PATTERN (insn
)) == SET
1083 && GET_CODE (SET_DEST (PATTERN (insn
))) == PC
1084 && GET_CODE (SET_SRC (PATTERN (insn
))) == LABEL_REF
);
1087 /* Return nonzero if INSN is a (possibly) conditional jump
1090 Use this function is deprecated, since we need to support combined
1091 branch and compare insns. Use any_condjump_p instead whenever possible. */
1097 register rtx x
= PATTERN (insn
);
1099 if (GET_CODE (x
) != SET
1100 || GET_CODE (SET_DEST (x
)) != PC
)
1104 if (GET_CODE (x
) == LABEL_REF
)
1107 return (GET_CODE (x
) == IF_THEN_ELSE
1108 && ((GET_CODE (XEXP (x
, 2)) == PC
1109 && (GET_CODE (XEXP (x
, 1)) == LABEL_REF
1110 || GET_CODE (XEXP (x
, 1)) == RETURN
))
1111 || (GET_CODE (XEXP (x
, 1)) == PC
1112 && (GET_CODE (XEXP (x
, 2)) == LABEL_REF
1113 || GET_CODE (XEXP (x
, 2)) == RETURN
))));
1118 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1121 Use this function is deprecated, since we need to support combined
1122 branch and compare insns. Use any_condjump_p instead whenever possible. */
1125 condjump_in_parallel_p (insn
)
1128 register rtx x
= PATTERN (insn
);
1130 if (GET_CODE (x
) != PARALLEL
)
1133 x
= XVECEXP (x
, 0, 0);
1135 if (GET_CODE (x
) != SET
)
1137 if (GET_CODE (SET_DEST (x
)) != PC
)
1139 if (GET_CODE (SET_SRC (x
)) == LABEL_REF
)
1141 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
1143 if (XEXP (SET_SRC (x
), 2) == pc_rtx
1144 && (GET_CODE (XEXP (SET_SRC (x
), 1)) == LABEL_REF
1145 || GET_CODE (XEXP (SET_SRC (x
), 1)) == RETURN
))
1147 if (XEXP (SET_SRC (x
), 1) == pc_rtx
1148 && (GET_CODE (XEXP (SET_SRC (x
), 2)) == LABEL_REF
1149 || GET_CODE (XEXP (SET_SRC (x
), 2)) == RETURN
))
1154 /* Return set of PC, otherwise NULL. */
1161 if (GET_CODE (insn
) != JUMP_INSN
)
1163 pat
= PATTERN (insn
);
1165 /* The set is allowed to appear either as the insn pattern or
1166 the first set in a PARALLEL. */
1167 if (GET_CODE (pat
) == PARALLEL
)
1168 pat
= XVECEXP (pat
, 0, 0);
1169 if (GET_CODE (pat
) == SET
&& GET_CODE (SET_DEST (pat
)) == PC
)
1175 /* Return true when insn is an unconditional direct jump,
1176 possibly bundled inside a PARALLEL. */
1179 any_uncondjump_p (insn
)
1182 rtx x
= pc_set (insn
);
1185 if (GET_CODE (SET_SRC (x
)) != LABEL_REF
)
1190 /* Return true when insn is a conditional jump. This function works for
1191 instructions containing PC sets in PARALLELs. The instruction may have
1192 various other effects so before removing the jump you must verify
1195 Note that unlike condjump_p it returns false for unconditional jumps. */
1198 any_condjump_p (insn
)
1201 rtx x
= pc_set (insn
);
1206 if (GET_CODE (SET_SRC (x
)) != IF_THEN_ELSE
)
1209 a
= GET_CODE (XEXP (SET_SRC (x
), 1));
1210 b
= GET_CODE (XEXP (SET_SRC (x
), 2));
1212 return ((b
== PC
&& (a
== LABEL_REF
|| a
== RETURN
))
1213 || (a
== PC
&& (b
== LABEL_REF
|| b
== RETURN
)));
1216 /* Return the label of a conditional jump. */
1219 condjump_label (insn
)
1222 rtx x
= pc_set (insn
);
1227 if (GET_CODE (x
) == LABEL_REF
)
1229 if (GET_CODE (x
) != IF_THEN_ELSE
)
1231 if (XEXP (x
, 2) == pc_rtx
&& GET_CODE (XEXP (x
, 1)) == LABEL_REF
)
1233 if (XEXP (x
, 1) == pc_rtx
&& GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
1238 /* Return true if INSN is a (possibly conditional) return insn. */
1241 returnjump_p_1 (loc
, data
)
1243 void *data ATTRIBUTE_UNUSED
;
1246 return x
&& GET_CODE (x
) == RETURN
;
1253 if (GET_CODE (insn
) != JUMP_INSN
)
1255 return for_each_rtx (&PATTERN (insn
), returnjump_p_1
, NULL
);
1258 /* Return true if INSN is a jump that only transfers control and
1267 if (GET_CODE (insn
) != JUMP_INSN
)
1270 set
= single_set (insn
);
1273 if (GET_CODE (SET_DEST (set
)) != PC
)
1275 if (side_effects_p (SET_SRC (set
)))
1283 /* Return non-zero if X is an RTX that only sets the condition codes
1284 and has no side effects. */
1297 return sets_cc0_p (x
) == 1 && ! side_effects_p (x
);
1300 /* Return 1 if X is an RTX that does nothing but set the condition codes
1301 and CLOBBER or USE registers.
1302 Return -1 if X does explicitly set the condition codes,
1303 but also does other things. */
1316 if (GET_CODE (x
) == SET
&& SET_DEST (x
) == cc0_rtx
)
1318 if (GET_CODE (x
) == PARALLEL
)
1322 int other_things
= 0;
1323 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1325 if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
1326 && SET_DEST (XVECEXP (x
, 0, i
)) == cc0_rtx
)
1328 else if (GET_CODE (XVECEXP (x
, 0, i
)) == SET
)
1331 return ! sets_cc0
? 0 : other_things
? -1 : 1;
1337 /* Follow any unconditional jump at LABEL;
1338 return the ultimate label reached by any such chain of jumps.
1339 If LABEL is not followed by a jump, return LABEL.
1340 If the chain loops or we can't find end, return LABEL,
1341 since that tells caller to avoid changing the insn.
1343 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1344 a USE or CLOBBER. */
1347 follow_jumps (label
)
1352 register rtx value
= label
;
1357 && (insn
= next_active_insn (value
)) != 0
1358 && GET_CODE (insn
) == JUMP_INSN
1359 && ((JUMP_LABEL (insn
) != 0 && any_uncondjump_p (insn
)
1360 && onlyjump_p (insn
))
1361 || GET_CODE (PATTERN (insn
)) == RETURN
)
1362 && (next
= NEXT_INSN (insn
))
1363 && GET_CODE (next
) == BARRIER
);
1366 /* Don't chain through the insn that jumps into a loop
1367 from outside the loop,
1368 since that would create multiple loop entry jumps
1369 and prevent loop optimization. */
1371 if (!reload_completed
)
1372 for (tem
= value
; tem
!= insn
; tem
= NEXT_INSN (tem
))
1373 if (GET_CODE (tem
) == NOTE
1374 && (NOTE_LINE_NUMBER (tem
) == NOTE_INSN_LOOP_BEG
1375 /* ??? Optional. Disables some optimizations, but makes
1376 gcov output more accurate with -O. */
1377 || (flag_test_coverage
&& NOTE_LINE_NUMBER (tem
) > 0)))
1380 /* If we have found a cycle, make the insn jump to itself. */
1381 if (JUMP_LABEL (insn
) == label
)
1384 tem
= next_active_insn (JUMP_LABEL (insn
));
1385 if (tem
&& (GET_CODE (PATTERN (tem
)) == ADDR_VEC
1386 || GET_CODE (PATTERN (tem
)) == ADDR_DIFF_VEC
))
1389 value
= JUMP_LABEL (insn
);
1397 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1398 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1399 in INSN, then store one of them in JUMP_LABEL (INSN).
1400 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1401 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1402 Also, when there are consecutive labels, canonicalize on the last of them.
1404 Note that two labels separated by a loop-beginning note
1405 must be kept distinct if we have not yet done loop-optimization,
1406 because the gap between them is where loop-optimize
1407 will want to move invariant code to. CROSS_JUMP tells us
1408 that loop-optimization is done with. */
1411 mark_jump_label (x
, insn
, in_mem
)
1416 register RTX_CODE code
= GET_CODE (x
);
1418 register const char *fmt
;
1440 /* If this is a constant-pool reference, see if it is a label. */
1441 if (CONSTANT_POOL_ADDRESS_P (x
))
1442 mark_jump_label (get_pool_constant (x
), insn
, in_mem
);
1447 rtx label
= XEXP (x
, 0);
1449 /* Ignore remaining references to unreachable labels that
1450 have been deleted. */
1451 if (GET_CODE (label
) == NOTE
1452 && NOTE_LINE_NUMBER (label
) == NOTE_INSN_DELETED_LABEL
)
1455 if (GET_CODE (label
) != CODE_LABEL
)
1458 /* Ignore references to labels of containing functions. */
1459 if (LABEL_REF_NONLOCAL_P (x
))
1462 XEXP (x
, 0) = label
;
1463 if (! insn
|| ! INSN_DELETED_P (insn
))
1464 ++LABEL_NUSES (label
);
1468 if (GET_CODE (insn
) == JUMP_INSN
)
1469 JUMP_LABEL (insn
) = label
;
1472 /* Add a REG_LABEL note for LABEL unless there already
1473 is one. All uses of a label, except for labels
1474 that are the targets of jumps, must have a
1476 if (! find_reg_note (insn
, REG_LABEL
, label
))
1477 REG_NOTES (insn
) = gen_rtx_INSN_LIST (REG_LABEL
, label
,
1484 /* Do walk the labels in a vector, but not the first operand of an
1485 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1488 if (! INSN_DELETED_P (insn
))
1490 int eltnum
= code
== ADDR_DIFF_VEC
? 1 : 0;
1492 for (i
= 0; i
< XVECLEN (x
, eltnum
); i
++)
1493 mark_jump_label (XVECEXP (x
, eltnum
, i
), NULL_RTX
, in_mem
);
1501 fmt
= GET_RTX_FORMAT (code
);
1502 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1505 mark_jump_label (XEXP (x
, i
), insn
, in_mem
);
1506 else if (fmt
[i
] == 'E')
1509 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1510 mark_jump_label (XVECEXP (x
, i
, j
), insn
, in_mem
);
1515 /* If all INSN does is set the pc, delete it,
1516 and delete the insn that set the condition codes for it
1517 if that's what the previous thing was. */
1523 register rtx set
= single_set (insn
);
1525 if (set
&& GET_CODE (SET_DEST (set
)) == PC
)
1526 delete_computation (insn
);
1529 /* Verify INSN is a BARRIER and delete it. */
1532 delete_barrier (insn
)
1535 if (GET_CODE (insn
) != BARRIER
)
1541 /* Recursively delete prior insns that compute the value (used only by INSN
1542 which the caller is deleting) stored in the register mentioned by NOTE
1543 which is a REG_DEAD note associated with INSN. */
1546 delete_prior_computation (note
, insn
)
1551 rtx reg
= XEXP (note
, 0);
1553 for (our_prev
= prev_nonnote_insn (insn
);
1554 our_prev
&& (GET_CODE (our_prev
) == INSN
1555 || GET_CODE (our_prev
) == CALL_INSN
);
1556 our_prev
= prev_nonnote_insn (our_prev
))
1558 rtx pat
= PATTERN (our_prev
);
1560 /* If we reach a CALL which is not calling a const function
1561 or the callee pops the arguments, then give up. */
1562 if (GET_CODE (our_prev
) == CALL_INSN
1563 && (! CONST_OR_PURE_CALL_P (our_prev
)
1564 || GET_CODE (pat
) != SET
|| GET_CODE (SET_SRC (pat
)) != CALL
))
1567 /* If we reach a SEQUENCE, it is too complex to try to
1568 do anything with it, so give up. */
1569 if (GET_CODE (pat
) == SEQUENCE
)
1572 if (GET_CODE (pat
) == USE
1573 && GET_CODE (XEXP (pat
, 0)) == INSN
)
1574 /* reorg creates USEs that look like this. We leave them
1575 alone because reorg needs them for its own purposes. */
1578 if (reg_set_p (reg
, pat
))
1580 if (side_effects_p (pat
) && GET_CODE (our_prev
) != CALL_INSN
)
1583 if (GET_CODE (pat
) == PARALLEL
)
1585 /* If we find a SET of something else, we can't
1590 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1592 rtx part
= XVECEXP (pat
, 0, i
);
1594 if (GET_CODE (part
) == SET
1595 && SET_DEST (part
) != reg
)
1599 if (i
== XVECLEN (pat
, 0))
1600 delete_computation (our_prev
);
1602 else if (GET_CODE (pat
) == SET
1603 && GET_CODE (SET_DEST (pat
)) == REG
)
1605 int dest_regno
= REGNO (SET_DEST (pat
));
1608 + (dest_regno
< FIRST_PSEUDO_REGISTER
1609 ? HARD_REGNO_NREGS (dest_regno
,
1610 GET_MODE (SET_DEST (pat
))) : 1));
1611 int regno
= REGNO (reg
);
1614 + (regno
< FIRST_PSEUDO_REGISTER
1615 ? HARD_REGNO_NREGS (regno
, GET_MODE (reg
)) : 1));
1617 if (dest_regno
>= regno
1618 && dest_endregno
<= endregno
)
1619 delete_computation (our_prev
);
1621 /* We may have a multi-word hard register and some, but not
1622 all, of the words of the register are needed in subsequent
1623 insns. Write REG_UNUSED notes for those parts that were not
1625 else if (dest_regno
<= regno
1626 && dest_endregno
>= endregno
)
1630 REG_NOTES (our_prev
)
1631 = gen_rtx_EXPR_LIST (REG_UNUSED
, reg
,
1632 REG_NOTES (our_prev
));
1634 for (i
= dest_regno
; i
< dest_endregno
; i
++)
1635 if (! find_regno_note (our_prev
, REG_UNUSED
, i
))
1638 if (i
== dest_endregno
)
1639 delete_computation (our_prev
);
1646 /* If PAT references the register that dies here, it is an
1647 additional use. Hence any prior SET isn't dead. However, this
1648 insn becomes the new place for the REG_DEAD note. */
1649 if (reg_overlap_mentioned_p (reg
, pat
))
1651 XEXP (note
, 1) = REG_NOTES (our_prev
);
1652 REG_NOTES (our_prev
) = note
;
1658 /* Delete INSN and recursively delete insns that compute values used only
1659 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1660 If we are running before flow.c, we need do nothing since flow.c will
1661 delete dead code. We also can't know if the registers being used are
1662 dead or not at this point.
1664 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1665 nothing other than set a register that dies in this insn, we can delete
1668 On machines with CC0, if CC0 is used in this insn, we may be able to
1669 delete the insn that set it. */
1672 delete_computation (insn
)
1678 if (reg_referenced_p (cc0_rtx
, PATTERN (insn
)))
1680 rtx prev
= prev_nonnote_insn (insn
);
1681 /* We assume that at this stage
1682 CC's are always set explicitly
1683 and always immediately before the jump that
1684 will use them. So if the previous insn
1685 exists to set the CC's, delete it
1686 (unless it performs auto-increments, etc.). */
1687 if (prev
&& GET_CODE (prev
) == INSN
1688 && sets_cc0_p (PATTERN (prev
)))
1690 if (sets_cc0_p (PATTERN (prev
)) > 0
1691 && ! side_effects_p (PATTERN (prev
)))
1692 delete_computation (prev
);
1694 /* Otherwise, show that cc0 won't be used. */
1695 REG_NOTES (prev
) = gen_rtx_EXPR_LIST (REG_UNUSED
,
1696 cc0_rtx
, REG_NOTES (prev
));
1701 for (note
= REG_NOTES (insn
); note
; note
= next
)
1703 next
= XEXP (note
, 1);
1705 if (REG_NOTE_KIND (note
) != REG_DEAD
1706 /* Verify that the REG_NOTE is legitimate. */
1707 || GET_CODE (XEXP (note
, 0)) != REG
)
1710 delete_prior_computation (note
, insn
);
1716 /* Delete insn INSN from the chain of insns and update label ref counts.
1717 May delete some following insns as a consequence; may even delete
1718 a label elsewhere and insns that follow it.
1720 Returns the first insn after INSN that was not deleted. */
1726 register rtx next
= NEXT_INSN (insn
);
1727 register rtx prev
= PREV_INSN (insn
);
1728 register int was_code_label
= (GET_CODE (insn
) == CODE_LABEL
);
1729 register int dont_really_delete
= 0;
1732 while (next
&& INSN_DELETED_P (next
))
1733 next
= NEXT_INSN (next
);
1735 /* This insn is already deleted => return first following nondeleted. */
1736 if (INSN_DELETED_P (insn
))
1740 remove_node_from_expr_list (insn
, &nonlocal_goto_handler_labels
);
1742 /* Don't delete user-declared labels. When optimizing, convert them
1743 to special NOTEs instead. When not optimizing, leave them alone. */
1744 if (was_code_label
&& LABEL_NAME (insn
) != 0)
1748 const char *name
= LABEL_NAME (insn
);
1749 PUT_CODE (insn
, NOTE
);
1750 NOTE_LINE_NUMBER (insn
) = NOTE_INSN_DELETED_LABEL
;
1751 NOTE_SOURCE_FILE (insn
) = name
;
1754 dont_really_delete
= 1;
1757 /* Mark this insn as deleted. */
1758 INSN_DELETED_P (insn
) = 1;
1760 /* If instruction is followed by a barrier,
1761 delete the barrier too. */
1763 if (next
!= 0 && GET_CODE (next
) == BARRIER
)
1765 INSN_DELETED_P (next
) = 1;
1766 next
= NEXT_INSN (next
);
1769 /* Patch out INSN (and the barrier if any) */
1771 if (! dont_really_delete
)
1775 NEXT_INSN (prev
) = next
;
1776 if (GET_CODE (prev
) == INSN
&& GET_CODE (PATTERN (prev
)) == SEQUENCE
)
1777 NEXT_INSN (XVECEXP (PATTERN (prev
), 0,
1778 XVECLEN (PATTERN (prev
), 0) - 1)) = next
;
1783 PREV_INSN (next
) = prev
;
1784 if (GET_CODE (next
) == INSN
&& GET_CODE (PATTERN (next
)) == SEQUENCE
)
1785 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = prev
;
1788 if (prev
&& NEXT_INSN (prev
) == 0)
1789 set_last_insn (prev
);
1792 /* If deleting a jump, decrement the count of the label,
1793 and delete the label if it is now unused. */
1795 if (GET_CODE (insn
) == JUMP_INSN
&& JUMP_LABEL (insn
))
1797 rtx lab
= JUMP_LABEL (insn
), lab_next
;
1799 if (--LABEL_NUSES (lab
) == 0)
1801 /* This can delete NEXT or PREV,
1802 either directly if NEXT is JUMP_LABEL (INSN),
1803 or indirectly through more levels of jumps. */
1806 /* I feel a little doubtful about this loop,
1807 but I see no clean and sure alternative way
1808 to find the first insn after INSN that is not now deleted.
1809 I hope this works. */
1810 while (next
&& INSN_DELETED_P (next
))
1811 next
= NEXT_INSN (next
);
1814 else if ((lab_next
= next_nonnote_insn (lab
)) != NULL
1815 && GET_CODE (lab_next
) == JUMP_INSN
1816 && (GET_CODE (PATTERN (lab_next
)) == ADDR_VEC
1817 || GET_CODE (PATTERN (lab_next
)) == ADDR_DIFF_VEC
))
1819 /* If we're deleting the tablejump, delete the dispatch table.
1820 We may not be able to kill the label immediately preceeding
1821 just yet, as it might be referenced in code leading up to
1823 delete_insn (lab_next
);
1827 /* Likewise if we're deleting a dispatch table. */
1829 if (GET_CODE (insn
) == JUMP_INSN
1830 && (GET_CODE (PATTERN (insn
)) == ADDR_VEC
1831 || GET_CODE (PATTERN (insn
)) == ADDR_DIFF_VEC
))
1833 rtx pat
= PATTERN (insn
);
1834 int i
, diff_vec_p
= GET_CODE (pat
) == ADDR_DIFF_VEC
;
1835 int len
= XVECLEN (pat
, diff_vec_p
);
1837 for (i
= 0; i
< len
; i
++)
1838 if (--LABEL_NUSES (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0)) == 0)
1839 delete_insn (XEXP (XVECEXP (pat
, diff_vec_p
, i
), 0));
1840 while (next
&& INSN_DELETED_P (next
))
1841 next
= NEXT_INSN (next
);
1845 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1846 if (GET_CODE (insn
) == INSN
|| GET_CODE (insn
) == CALL_INSN
)
1847 for (note
= REG_NOTES (insn
); note
; note
= XEXP (note
, 1))
1848 if (REG_NOTE_KIND (note
) == REG_LABEL
1849 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1850 && GET_CODE (XEXP (note
, 0)) == CODE_LABEL
)
1851 if (--LABEL_NUSES (XEXP (note
, 0)) == 0)
1852 delete_insn (XEXP (note
, 0));
1854 while (prev
&& (INSN_DELETED_P (prev
) || GET_CODE (prev
) == NOTE
))
1855 prev
= PREV_INSN (prev
);
1857 /* If INSN was a label and a dispatch table follows it,
1858 delete the dispatch table. The tablejump must have gone already.
1859 It isn't useful to fall through into a table. */
1862 && NEXT_INSN (insn
) != 0
1863 && GET_CODE (NEXT_INSN (insn
)) == JUMP_INSN
1864 && (GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_VEC
1865 || GET_CODE (PATTERN (NEXT_INSN (insn
))) == ADDR_DIFF_VEC
))
1866 next
= delete_insn (NEXT_INSN (insn
));
1868 /* If INSN was a label, delete insns following it if now unreachable. */
1870 if (was_code_label
&& prev
&& GET_CODE (prev
) == BARRIER
)
1872 register RTX_CODE code
;
1874 && (GET_RTX_CLASS (code
= GET_CODE (next
)) == 'i'
1875 || code
== NOTE
|| code
== BARRIER
1876 || (code
== CODE_LABEL
&& INSN_DELETED_P (next
))))
1879 && NOTE_LINE_NUMBER (next
) != NOTE_INSN_FUNCTION_END
)
1880 next
= NEXT_INSN (next
);
1881 /* Keep going past other deleted labels to delete what follows. */
1882 else if (code
== CODE_LABEL
&& INSN_DELETED_P (next
))
1883 next
= NEXT_INSN (next
);
1885 /* Note: if this deletes a jump, it can cause more
1886 deletion of unreachable code, after a different label.
1887 As long as the value from this recursive call is correct,
1888 this invocation functions correctly. */
1889 next
= delete_insn (next
);
1896 /* Advance from INSN till reaching something not deleted
1897 then return that. May return INSN itself. */
1900 next_nondeleted_insn (insn
)
1903 while (INSN_DELETED_P (insn
))
1904 insn
= NEXT_INSN (insn
);
1908 /* Delete a range of insns from FROM to TO, inclusive.
1909 This is for the sake of peephole optimization, so assume
1910 that whatever these insns do will still be done by a new
1911 peephole insn that will replace them. */
1914 delete_for_peephole (from
, to
)
1915 register rtx from
, to
;
1917 register rtx insn
= from
;
1921 register rtx next
= NEXT_INSN (insn
);
1922 register rtx prev
= PREV_INSN (insn
);
1924 if (GET_CODE (insn
) != NOTE
)
1926 INSN_DELETED_P (insn
) = 1;
1928 /* Patch this insn out of the chain. */
1929 /* We don't do this all at once, because we
1930 must preserve all NOTEs. */
1932 NEXT_INSN (prev
) = next
;
1935 PREV_INSN (next
) = prev
;
1943 /* Note that if TO is an unconditional jump
1944 we *do not* delete the BARRIER that follows,
1945 since the peephole that replaces this sequence
1946 is also an unconditional jump in that case. */
1949 /* We have determined that INSN is never reached, and are about to
1950 delete it. Print a warning if the user asked for one.
1952 To try to make this warning more useful, this should only be called
1953 once per basic block not reached, and it only warns when the basic
1954 block contains more than one line from the current function, and
1955 contains at least one operation. CSE and inlining can duplicate insns,
1956 so it's possible to get spurious warnings from this. */
1959 never_reached_warning (avoided_insn
)
1963 rtx a_line_note
= NULL
;
1964 int two_avoided_lines
= 0;
1965 int contains_insn
= 0;
1967 if (! warn_notreached
)
1970 /* Scan forwards, looking at LINE_NUMBER notes, until
1971 we hit a LABEL or we run out of insns. */
1973 for (insn
= avoided_insn
; insn
!= NULL
; insn
= NEXT_INSN (insn
))
1975 if (GET_CODE (insn
) == CODE_LABEL
)
1977 else if (GET_CODE (insn
) == NOTE
/* A line number note? */
1978 && NOTE_LINE_NUMBER (insn
) >= 0)
1980 if (a_line_note
== NULL
)
1983 two_avoided_lines
|= (NOTE_LINE_NUMBER (a_line_note
)
1984 != NOTE_LINE_NUMBER (insn
));
1986 else if (INSN_P (insn
))
1989 if (two_avoided_lines
&& contains_insn
)
1990 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note
),
1991 NOTE_LINE_NUMBER (a_line_note
),
1992 "will never be executed");
1995 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1996 NLABEL as a return. Accrue modifications into the change group. */
1999 redirect_exp_1 (loc
, olabel
, nlabel
, insn
)
2004 register rtx x
= *loc
;
2005 register RTX_CODE code
= GET_CODE (x
);
2007 register const char *fmt
;
2009 if (code
== LABEL_REF
)
2011 if (XEXP (x
, 0) == olabel
)
2015 n
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
2017 n
= gen_rtx_RETURN (VOIDmode
);
2019 validate_change (insn
, loc
, n
, 1);
2023 else if (code
== RETURN
&& olabel
== 0)
2025 x
= gen_rtx_LABEL_REF (VOIDmode
, nlabel
);
2026 if (loc
== &PATTERN (insn
))
2027 x
= gen_rtx_SET (VOIDmode
, pc_rtx
, x
);
2028 validate_change (insn
, loc
, x
, 1);
2032 if (code
== SET
&& nlabel
== 0 && SET_DEST (x
) == pc_rtx
2033 && GET_CODE (SET_SRC (x
)) == LABEL_REF
2034 && XEXP (SET_SRC (x
), 0) == olabel
)
2036 validate_change (insn
, loc
, gen_rtx_RETURN (VOIDmode
), 1);
2040 fmt
= GET_RTX_FORMAT (code
);
2041 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2044 redirect_exp_1 (&XEXP (x
, i
), olabel
, nlabel
, insn
);
2045 else if (fmt
[i
] == 'E')
2048 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2049 redirect_exp_1 (&XVECEXP (x
, i
, j
), olabel
, nlabel
, insn
);
2054 /* Similar, but apply the change group and report success or failure. */
2057 redirect_exp (olabel
, nlabel
, insn
)
2063 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
2064 loc
= &XVECEXP (PATTERN (insn
), 0, 0);
2066 loc
= &PATTERN (insn
);
2068 redirect_exp_1 (loc
, olabel
, nlabel
, insn
);
2069 if (num_validated_changes () == 0)
2072 return apply_change_group ();
2075 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2076 the modifications into the change group. Return false if we did
2077 not see how to do that. */
2080 redirect_jump_1 (jump
, nlabel
)
2083 int ochanges
= num_validated_changes ();
2086 if (GET_CODE (PATTERN (jump
)) == PARALLEL
)
2087 loc
= &XVECEXP (PATTERN (jump
), 0, 0);
2089 loc
= &PATTERN (jump
);
2091 redirect_exp_1 (loc
, JUMP_LABEL (jump
), nlabel
, jump
);
2092 return num_validated_changes () > ochanges
;
2095 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2096 jump target label is unused as a result, it and the code following
2099 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2102 The return value will be 1 if the change was made, 0 if it wasn't
2103 (this can only occur for NLABEL == 0). */
2106 redirect_jump (jump
, nlabel
, delete_unused
)
2110 register rtx olabel
= JUMP_LABEL (jump
);
2112 if (nlabel
== olabel
)
2115 if (! redirect_exp (olabel
, nlabel
, jump
))
2118 JUMP_LABEL (jump
) = nlabel
;
2120 ++LABEL_NUSES (nlabel
);
2122 /* If we're eliding the jump over exception cleanups at the end of a
2123 function, move the function end note so that -Wreturn-type works. */
2124 if (olabel
&& nlabel
2125 && NEXT_INSN (olabel
)
2126 && GET_CODE (NEXT_INSN (olabel
)) == NOTE
2127 && NOTE_LINE_NUMBER (NEXT_INSN (olabel
)) == NOTE_INSN_FUNCTION_END
)
2128 emit_note_after (NOTE_INSN_FUNCTION_END
, nlabel
);
2130 if (olabel
&& --LABEL_NUSES (olabel
) == 0 && delete_unused
)
2131 delete_insn (olabel
);
2136 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2137 Accrue the modifications into the change group. */
2143 register RTX_CODE code
;
2144 rtx x
= pc_set (insn
);
2150 code
= GET_CODE (x
);
2152 if (code
== IF_THEN_ELSE
)
2154 register rtx comp
= XEXP (x
, 0);
2156 enum rtx_code reversed_code
;
2158 /* We can do this in two ways: The preferable way, which can only
2159 be done if this is not an integer comparison, is to reverse
2160 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2161 of the IF_THEN_ELSE. If we can't do either, fail. */
2163 reversed_code
= reversed_comparison_code (comp
, insn
);
2165 if (reversed_code
!= UNKNOWN
)
2167 validate_change (insn
, &XEXP (x
, 0),
2168 gen_rtx_fmt_ee (reversed_code
,
2169 GET_MODE (comp
), XEXP (comp
, 0),
2176 validate_change (insn
, &XEXP (x
, 1), XEXP (x
, 2), 1);
2177 validate_change (insn
, &XEXP (x
, 2), tem
, 1);
2183 /* Invert the jump condition of conditional jump insn, INSN.
2185 Return 1 if we can do so, 0 if we cannot find a way to do so that
2186 matches a pattern. */
2192 invert_exp_1 (insn
);
2193 if (num_validated_changes () == 0)
2196 return apply_change_group ();
2199 /* Invert the condition of the jump JUMP, and make it jump to label
2200 NLABEL instead of where it jumps now. Accrue changes into the
2201 change group. Return false if we didn't see how to perform the
2202 inversion and redirection. */
2205 invert_jump_1 (jump
, nlabel
)
2210 ochanges
= num_validated_changes ();
2211 invert_exp_1 (jump
);
2212 if (num_validated_changes () == ochanges
)
2215 return redirect_jump_1 (jump
, nlabel
);
2218 /* Invert the condition of the jump JUMP, and make it jump to label
2219 NLABEL instead of where it jumps now. Return true if successful. */
2222 invert_jump (jump
, nlabel
, delete_unused
)
2226 /* We have to either invert the condition and change the label or
2227 do neither. Either operation could fail. We first try to invert
2228 the jump. If that succeeds, we try changing the label. If that fails,
2229 we invert the jump back to what it was. */
2231 if (! invert_exp (jump
))
2234 if (redirect_jump (jump
, nlabel
, delete_unused
))
2236 invert_br_probabilities (jump
);
2241 if (! invert_exp (jump
))
2242 /* This should just be putting it back the way it was. */
2249 /* Like rtx_equal_p except that it considers two REGs as equal
2250 if they renumber to the same value and considers two commutative
2251 operations to be the same if the order of the operands has been
2254 ??? Addition is not commutative on the PA due to the weird implicit
2255 space register selection rules for memory addresses. Therefore, we
2256 don't consider a + b == b + a.
2258 We could/should make this test a little tighter. Possibly only
2259 disabling it on the PA via some backend macro or only disabling this
2260 case when the PLUS is inside a MEM. */
2263 rtx_renumbered_equal_p (x
, y
)
2267 register RTX_CODE code
= GET_CODE (x
);
2268 register const char *fmt
;
2273 if ((code
== REG
|| (code
== SUBREG
&& GET_CODE (SUBREG_REG (x
)) == REG
))
2274 && (GET_CODE (y
) == REG
|| (GET_CODE (y
) == SUBREG
2275 && GET_CODE (SUBREG_REG (y
)) == REG
)))
2277 int reg_x
= -1, reg_y
= -1;
2278 int byte_x
= 0, byte_y
= 0;
2280 if (GET_MODE (x
) != GET_MODE (y
))
2283 /* If we haven't done any renumbering, don't
2284 make any assumptions. */
2285 if (reg_renumber
== 0)
2286 return rtx_equal_p (x
, y
);
2290 reg_x
= REGNO (SUBREG_REG (x
));
2291 byte_x
= SUBREG_BYTE (x
);
2293 if (reg_renumber
[reg_x
] >= 0)
2295 reg_x
= subreg_regno_offset (reg_renumber
[reg_x
],
2296 GET_MODE (SUBREG_REG (x
)),
2305 if (reg_renumber
[reg_x
] >= 0)
2306 reg_x
= reg_renumber
[reg_x
];
2309 if (GET_CODE (y
) == SUBREG
)
2311 reg_y
= REGNO (SUBREG_REG (y
));
2312 byte_y
= SUBREG_BYTE (y
);
2314 if (reg_renumber
[reg_y
] >= 0)
2316 reg_y
= subreg_regno_offset (reg_renumber
[reg_y
],
2317 GET_MODE (SUBREG_REG (y
)),
2326 if (reg_renumber
[reg_y
] >= 0)
2327 reg_y
= reg_renumber
[reg_y
];
2330 return reg_x
>= 0 && reg_x
== reg_y
&& byte_x
== byte_y
;
2333 /* Now we have disposed of all the cases
2334 in which different rtx codes can match. */
2335 if (code
!= GET_CODE (y
))
2347 return INTVAL (x
) == INTVAL (y
);
2350 /* We can't assume nonlocal labels have their following insns yet. */
2351 if (LABEL_REF_NONLOCAL_P (x
) || LABEL_REF_NONLOCAL_P (y
))
2352 return XEXP (x
, 0) == XEXP (y
, 0);
2354 /* Two label-refs are equivalent if they point at labels
2355 in the same position in the instruction stream. */
2356 return (next_real_insn (XEXP (x
, 0))
2357 == next_real_insn (XEXP (y
, 0)));
2360 return XSTR (x
, 0) == XSTR (y
, 0);
2363 /* If we didn't match EQ equality above, they aren't the same. */
2370 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2372 if (GET_MODE (x
) != GET_MODE (y
))
2375 /* For commutative operations, the RTX match if the operand match in any
2376 order. Also handle the simple binary and unary cases without a loop.
2378 ??? Don't consider PLUS a commutative operator; see comments above. */
2379 if ((code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
2381 return ((rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
2382 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)))
2383 || (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 1))
2384 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 0))));
2385 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
2386 return (rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0))
2387 && rtx_renumbered_equal_p (XEXP (x
, 1), XEXP (y
, 1)));
2388 else if (GET_RTX_CLASS (code
) == '1')
2389 return rtx_renumbered_equal_p (XEXP (x
, 0), XEXP (y
, 0));
2391 /* Compare the elements. If any pair of corresponding elements
2392 fail to match, return 0 for the whole things. */
2394 fmt
= GET_RTX_FORMAT (code
);
2395 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2401 if (XWINT (x
, i
) != XWINT (y
, i
))
2406 if (XINT (x
, i
) != XINT (y
, i
))
2411 if (XTREE (x
, i
) != XTREE (y
, i
))
2416 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2421 if (! rtx_renumbered_equal_p (XEXP (x
, i
), XEXP (y
, i
)))
2426 if (XEXP (x
, i
) != XEXP (y
, i
))
2433 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2435 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2436 if (!rtx_renumbered_equal_p (XVECEXP (x
, i
, j
), XVECEXP (y
, i
, j
)))
2447 /* If X is a hard register or equivalent to one or a subregister of one,
2448 return the hard register number. If X is a pseudo register that was not
2449 assigned a hard register, return the pseudo register number. Otherwise,
2450 return -1. Any rtx is valid for X. */
2456 if (GET_CODE (x
) == REG
)
2458 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
&& reg_renumber
[REGNO (x
)] >= 0)
2459 return reg_renumber
[REGNO (x
)];
2462 if (GET_CODE (x
) == SUBREG
)
2464 int base
= true_regnum (SUBREG_REG (x
));
2465 if (base
>= 0 && base
< FIRST_PSEUDO_REGISTER
)
2466 return base
+ subreg_regno_offset (REGNO (SUBREG_REG (x
)),
2467 GET_MODE (SUBREG_REG (x
)),
2468 SUBREG_BYTE (x
), GET_MODE (x
));
2473 /* Optimize code of the form:
2475 for (x = a[i]; x; ...)
2477 for (x = a[i]; x; ...)
2481 Loop optimize will change the above code into
2485 { ...; if (! (x = ...)) break; }
2488 { ...; if (! (x = ...)) break; }
2491 In general, if the first test fails, the program can branch
2492 directly to `foo' and skip the second try which is doomed to fail.
2493 We run this after loop optimization and before flow analysis. */
2495 /* When comparing the insn patterns, we track the fact that different
2496 pseudo-register numbers may have been used in each computation.
2497 The following array stores an equivalence -- same_regs[I] == J means
2498 that pseudo register I was used in the first set of tests in a context
2499 where J was used in the second set. We also count the number of such
2500 pending equivalences. If nonzero, the expressions really aren't the
2503 static int *same_regs
;
2505 static int num_same_regs
;
2507 /* Track any registers modified between the target of the first jump and
2508 the second jump. They never compare equal. */
2510 static char *modified_regs
;
2512 /* Record if memory was modified. */
2514 static int modified_mem
;
2516 /* Called via note_stores on each insn between the target of the first
2517 branch and the second branch. It marks any changed registers. */
2520 mark_modified_reg (dest
, x
, data
)
2523 void *data ATTRIBUTE_UNUSED
;
2528 if (GET_CODE (dest
) == SUBREG
)
2529 dest
= SUBREG_REG (dest
);
2531 if (GET_CODE (dest
) == MEM
)
2534 if (GET_CODE (dest
) != REG
)
2537 regno
= REGNO (dest
);
2538 if (regno
>= FIRST_PSEUDO_REGISTER
)
2539 modified_regs
[regno
] = 1;
2540 /* Don't consider a hard condition code register as modified,
2541 if it is only being set. thread_jumps will check if it is set
2542 to the same value. */
2543 else if (GET_MODE_CLASS (GET_MODE (dest
)) != MODE_CC
2544 || GET_CODE (x
) != SET
2545 || ! rtx_equal_p (dest
, SET_DEST (x
))
2546 || HARD_REGNO_NREGS (regno
, GET_MODE (dest
)) != 1)
2547 for (i
= 0; i
< HARD_REGNO_NREGS (regno
, GET_MODE (dest
)); i
++)
2548 modified_regs
[regno
+ i
] = 1;
2551 /* F is the first insn in the chain of insns. */
2554 thread_jumps (f
, max_reg
, flag_before_loop
)
2557 int flag_before_loop
;
2559 /* Basic algorithm is to find a conditional branch,
2560 the label it may branch to, and the branch after
2561 that label. If the two branches test the same condition,
2562 walk back from both branch paths until the insn patterns
2563 differ, or code labels are hit. If we make it back to
2564 the target of the first branch, then we know that the first branch
2565 will either always succeed or always fail depending on the relative
2566 senses of the two branches. So adjust the first branch accordingly
2569 rtx label
, b1
, b2
, t1
, t2
;
2570 enum rtx_code code1
, code2
;
2571 rtx b1op0
, b1op1
, b2op0
, b2op1
;
2575 enum rtx_code reversed_code1
, reversed_code2
;
2577 /* Allocate register tables and quick-reset table. */
2578 modified_regs
= (char *) xmalloc (max_reg
* sizeof (char));
2579 same_regs
= (int *) xmalloc (max_reg
* sizeof (int));
2580 all_reset
= (int *) xmalloc (max_reg
* sizeof (int));
2581 for (i
= 0; i
< max_reg
; i
++)
2588 for (b1
= f
; b1
; b1
= NEXT_INSN (b1
))
2593 /* Get to a candidate branch insn. */
2594 if (GET_CODE (b1
) != JUMP_INSN
2595 || ! any_condjump_p (b1
) || JUMP_LABEL (b1
) == 0)
2598 memset (modified_regs
, 0, max_reg
* sizeof (char));
2601 memcpy (same_regs
, all_reset
, max_reg
* sizeof (int));
2604 label
= JUMP_LABEL (b1
);
2606 /* Look for a branch after the target. Record any registers and
2607 memory modified between the target and the branch. Stop when we
2608 get to a label since we can't know what was changed there. */
2609 for (b2
= NEXT_INSN (label
); b2
; b2
= NEXT_INSN (b2
))
2611 if (GET_CODE (b2
) == CODE_LABEL
)
2614 else if (GET_CODE (b2
) == JUMP_INSN
)
2616 /* If this is an unconditional jump and is the only use of
2617 its target label, we can follow it. */
2618 if (any_uncondjump_p (b2
)
2620 && JUMP_LABEL (b2
) != 0
2621 && LABEL_NUSES (JUMP_LABEL (b2
)) == 1)
2623 b2
= JUMP_LABEL (b2
);
2630 if (GET_CODE (b2
) != CALL_INSN
&& GET_CODE (b2
) != INSN
)
2633 if (GET_CODE (b2
) == CALL_INSN
)
2636 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
2637 if (call_used_regs
[i
] && ! fixed_regs
[i
]
2638 && i
!= STACK_POINTER_REGNUM
2639 && i
!= FRAME_POINTER_REGNUM
2640 && i
!= HARD_FRAME_POINTER_REGNUM
2641 && i
!= ARG_POINTER_REGNUM
)
2642 modified_regs
[i
] = 1;
2645 note_stores (PATTERN (b2
), mark_modified_reg
, NULL
);
2648 /* Check the next candidate branch insn from the label
2651 || GET_CODE (b2
) != JUMP_INSN
2653 || !any_condjump_p (b2
)
2654 || !onlyjump_p (b2
))
2659 /* Get the comparison codes and operands, reversing the
2660 codes if appropriate. If we don't have comparison codes,
2661 we can't do anything. */
2662 b1op0
= XEXP (XEXP (SET_SRC (set
), 0), 0);
2663 b1op1
= XEXP (XEXP (SET_SRC (set
), 0), 1);
2664 code1
= GET_CODE (XEXP (SET_SRC (set
), 0));
2665 reversed_code1
= code1
;
2666 if (XEXP (SET_SRC (set
), 1) == pc_rtx
)
2667 code1
= reversed_comparison_code (XEXP (SET_SRC (set
), 0), b1
);
2669 reversed_code1
= reversed_comparison_code (XEXP (SET_SRC (set
), 0), b1
);
2671 b2op0
= XEXP (XEXP (SET_SRC (set2
), 0), 0);
2672 b2op1
= XEXP (XEXP (SET_SRC (set2
), 0), 1);
2673 code2
= GET_CODE (XEXP (SET_SRC (set2
), 0));
2674 reversed_code2
= code2
;
2675 if (XEXP (SET_SRC (set2
), 1) == pc_rtx
)
2676 code2
= reversed_comparison_code (XEXP (SET_SRC (set2
), 0), b2
);
2678 reversed_code2
= reversed_comparison_code (XEXP (SET_SRC (set2
), 0), b2
);
2680 /* If they test the same things and knowing that B1 branches
2681 tells us whether or not B2 branches, check if we
2682 can thread the branch. */
2683 if (rtx_equal_for_thread_p (b1op0
, b2op0
, b2
)
2684 && rtx_equal_for_thread_p (b1op1
, b2op1
, b2
)
2685 && (comparison_dominates_p (code1
, code2
)
2686 || comparison_dominates_p (code1
, reversed_code2
)))
2689 t1
= prev_nonnote_insn (b1
);
2690 t2
= prev_nonnote_insn (b2
);
2692 while (t1
!= 0 && t2
!= 0)
2696 /* We have reached the target of the first branch.
2697 If there are no pending register equivalents,
2698 we know that this branch will either always
2699 succeed (if the senses of the two branches are
2700 the same) or always fail (if not). */
2703 if (num_same_regs
!= 0)
2706 if (comparison_dominates_p (code1
, code2
))
2707 new_label
= JUMP_LABEL (b2
);
2709 new_label
= get_label_after (b2
);
2711 if (JUMP_LABEL (b1
) != new_label
)
2713 rtx prev
= PREV_INSN (new_label
);
2715 if (flag_before_loop
2716 && GET_CODE (prev
) == NOTE
2717 && NOTE_LINE_NUMBER (prev
) == NOTE_INSN_LOOP_BEG
)
2719 /* Don't thread to the loop label. If a loop
2720 label is reused, loop optimization will
2721 be disabled for that loop. */
2722 new_label
= gen_label_rtx ();
2723 emit_label_after (new_label
, PREV_INSN (prev
));
2725 changed
|= redirect_jump (b1
, new_label
, 1);
2730 /* If either of these is not a normal insn (it might be
2731 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
2732 have already been skipped above.) Similarly, fail
2733 if the insns are different. */
2734 if (GET_CODE (t1
) != INSN
|| GET_CODE (t2
) != INSN
2735 || recog_memoized (t1
) != recog_memoized (t2
)
2736 || ! rtx_equal_for_thread_p (PATTERN (t1
),
2740 t1
= prev_nonnote_insn (t1
);
2741 t2
= prev_nonnote_insn (t2
);
2748 free (modified_regs
);
2753 /* This is like RTX_EQUAL_P except that it knows about our handling of
2754 possibly equivalent registers and knows to consider volatile and
2755 modified objects as not equal.
2757 YINSN is the insn containing Y. */
2760 rtx_equal_for_thread_p (x
, y
, yinsn
)
2766 register enum rtx_code code
;
2767 register const char *fmt
;
2769 code
= GET_CODE (x
);
2770 /* Rtx's of different codes cannot be equal. */
2771 if (code
!= GET_CODE (y
))
2774 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
2775 (REG:SI x) and (REG:HI x) are NOT equivalent. */
2777 if (GET_MODE (x
) != GET_MODE (y
))
2780 /* For floating-point, consider everything unequal. This is a bit
2781 pessimistic, but this pass would only rarely do anything for FP
2783 if (TARGET_FLOAT_FORMAT
== IEEE_FLOAT_FORMAT
2784 && FLOAT_MODE_P (GET_MODE (x
)) && ! flag_unsafe_math_optimizations
)
2787 /* For commutative operations, the RTX match if the operand match in any
2788 order. Also handle the simple binary and unary cases without a loop. */
2789 if (code
== EQ
|| code
== NE
|| GET_RTX_CLASS (code
) == 'c')
2790 return ((rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
2791 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
))
2792 || (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 1), yinsn
)
2793 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 0), yinsn
)));
2794 else if (GET_RTX_CLASS (code
) == '<' || GET_RTX_CLASS (code
) == '2')
2795 return (rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
)
2796 && rtx_equal_for_thread_p (XEXP (x
, 1), XEXP (y
, 1), yinsn
));
2797 else if (GET_RTX_CLASS (code
) == '1')
2798 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
2800 /* Handle special-cases first. */
2804 if (REGNO (x
) == REGNO (y
) && ! modified_regs
[REGNO (x
)])
2807 /* If neither is user variable or hard register, check for possible
2809 if (REG_USERVAR_P (x
) || REG_USERVAR_P (y
)
2810 || REGNO (x
) < FIRST_PSEUDO_REGISTER
2811 || REGNO (y
) < FIRST_PSEUDO_REGISTER
)
2814 if (same_regs
[REGNO (x
)] == -1)
2816 same_regs
[REGNO (x
)] = REGNO (y
);
2819 /* If this is the first time we are seeing a register on the `Y'
2820 side, see if it is the last use. If not, we can't thread the
2821 jump, so mark it as not equivalent. */
2822 if (REGNO_LAST_UID (REGNO (y
)) != INSN_UID (yinsn
))
2828 return (same_regs
[REGNO (x
)] == (int) REGNO (y
));
2833 /* If memory modified or either volatile, not equivalent.
2834 Else, check address. */
2835 if (modified_mem
|| MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2838 return rtx_equal_for_thread_p (XEXP (x
, 0), XEXP (y
, 0), yinsn
);
2841 if (MEM_VOLATILE_P (x
) || MEM_VOLATILE_P (y
))
2847 /* Cancel a pending `same_regs' if setting equivalenced registers.
2848 Then process source. */
2849 if (GET_CODE (SET_DEST (x
)) == REG
2850 && GET_CODE (SET_DEST (y
)) == REG
)
2852 if (same_regs
[REGNO (SET_DEST (x
))] == (int) REGNO (SET_DEST (y
)))
2854 same_regs
[REGNO (SET_DEST (x
))] = -1;
2857 else if (REGNO (SET_DEST (x
)) != REGNO (SET_DEST (y
)))
2862 if (rtx_equal_for_thread_p (SET_DEST (x
), SET_DEST (y
), yinsn
) == 0)
2866 return rtx_equal_for_thread_p (SET_SRC (x
), SET_SRC (y
), yinsn
);
2869 return XEXP (x
, 0) == XEXP (y
, 0);
2872 return XSTR (x
, 0) == XSTR (y
, 0);
2881 fmt
= GET_RTX_FORMAT (code
);
2882 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2887 if (XWINT (x
, i
) != XWINT (y
, i
))
2893 if (XINT (x
, i
) != XINT (y
, i
))
2899 /* Two vectors must have the same length. */
2900 if (XVECLEN (x
, i
) != XVECLEN (y
, i
))
2903 /* And the corresponding elements must match. */
2904 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2905 if (rtx_equal_for_thread_p (XVECEXP (x
, i
, j
),
2906 XVECEXP (y
, i
, j
), yinsn
) == 0)
2911 if (rtx_equal_for_thread_p (XEXP (x
, i
), XEXP (y
, i
), yinsn
) == 0)
2917 if (strcmp (XSTR (x
, i
), XSTR (y
, i
)))
2922 /* These are just backpointers, so they don't matter. */
2929 /* It is believed that rtx's at this level will never
2930 contain anything but integers and other rtx's,
2931 except for within LABEL_REFs and SYMBOL_REFs. */