* Makefile.in (rtlanal.o): Depend on $(TM_P_H).
[official-gcc.git] / gcc / jump.c
blobc5a10b0a9be801bbe54d4f66d1325cdd5930a4a9
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
24 operate with jumps.
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
37 #include "config.h"
38 #include "system.h"
39 #include "rtl.h"
40 #include "tm_p.h"
41 #include "flags.h"
42 #include "hard-reg-set.h"
43 #include "regs.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
46 #include "recog.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "real.h"
50 #include "except.h"
51 #include "toplev.h"
52 #include "reload.h"
53 #include "predict.h"
55 /* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57 /* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
62 static int init_label_info PARAMS ((rtx));
63 static void mark_all_labels PARAMS ((rtx));
64 static int duplicate_loop_exit_test PARAMS ((rtx));
65 static void delete_computation PARAMS ((rtx));
66 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
67 static int redirect_exp PARAMS ((rtx, rtx, rtx));
68 static void invert_exp_1 PARAMS ((rtx));
69 static int invert_exp PARAMS ((rtx));
70 static int returnjump_p_1 PARAMS ((rtx *, void *));
71 static void delete_prior_computation PARAMS ((rtx, rtx));
72 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
76 instructions. */
77 void
78 rebuild_jump_labels (f)
79 rtx f;
81 rtx insn;
82 int max_uid = 0;
84 max_uid = init_label_info (f) + 1;
86 mark_all_labels (f);
88 /* Keep track of labels used from static data; we don't track them
89 closely enough to delete them here, so make sure their reference
90 count doesn't drop to zero. */
92 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
93 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
94 LABEL_NUSES (XEXP (insn, 0))++;
96 /* Keep track of labels used for marking handlers for exception
97 regions; they cannot usually be deleted. */
99 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
100 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
101 LABEL_NUSES (XEXP (insn, 0))++;
104 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
105 non-fallthru insn. This is not generally true, as multiple barriers
106 may have crept in, or the BARRIER may be separated from the last
107 real insn by one or more NOTEs.
109 This simple pass moves barriers and removes duplicates so that the
110 old code is happy.
112 void
113 cleanup_barriers ()
115 rtx insn, next, prev;
116 for (insn = get_insns (); insn; insn = next)
118 next = NEXT_INSN (insn);
119 if (GET_CODE (insn) == BARRIER)
121 prev = prev_nonnote_insn (insn);
122 if (GET_CODE (prev) == BARRIER)
123 delete_barrier (insn);
124 else if (prev != PREV_INSN (insn))
125 reorder_insns (insn, insn, prev);
130 void
131 copy_loop_headers (f)
132 rtx f;
134 rtx insn, next;
135 /* Now iterate optimizing jumps until nothing changes over one pass. */
136 for (insn = f; insn; insn = next)
138 rtx temp, temp1;
140 next = NEXT_INSN (insn);
142 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
143 jump. Try to optimize by duplicating the loop exit test if so.
144 This is only safe immediately after regscan, because it uses
145 the values of regno_first_uid and regno_last_uid. */
146 if (GET_CODE (insn) == NOTE
147 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
148 && (temp1 = next_nonnote_insn (insn)) != 0
149 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
151 temp = PREV_INSN (insn);
152 if (duplicate_loop_exit_test (insn))
154 next = NEXT_INSN (temp);
160 void
161 purge_line_number_notes (f)
162 rtx f;
164 rtx last_note = 0;
165 rtx insn;
166 /* Delete extraneous line number notes.
167 Note that two consecutive notes for different lines are not really
168 extraneous. There should be some indication where that line belonged,
169 even if it became empty. */
171 for (insn = f; insn; insn = NEXT_INSN (insn))
172 if (GET_CODE (insn) == NOTE)
174 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
175 /* Any previous line note was for the prologue; gdb wants a new
176 note after the prologue even if it is for the same line. */
177 last_note = NULL_RTX;
178 else if (NOTE_LINE_NUMBER (insn) >= 0)
180 /* Delete this note if it is identical to previous note. */
181 if (last_note
182 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
183 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
185 delete_related_insns (insn);
186 continue;
189 last_note = insn;
194 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
195 notes whose labels don't occur in the insn any more. Returns the
196 largest INSN_UID found. */
197 static int
198 init_label_info (f)
199 rtx f;
201 int largest_uid = 0;
202 rtx insn;
204 for (insn = f; insn; insn = NEXT_INSN (insn))
206 if (GET_CODE (insn) == CODE_LABEL)
207 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
208 else if (GET_CODE (insn) == JUMP_INSN)
209 JUMP_LABEL (insn) = 0;
210 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
212 rtx note, next;
214 for (note = REG_NOTES (insn); note; note = next)
216 next = XEXP (note, 1);
217 if (REG_NOTE_KIND (note) == REG_LABEL
218 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
219 remove_note (insn, note);
222 if (INSN_UID (insn) > largest_uid)
223 largest_uid = INSN_UID (insn);
226 return largest_uid;
229 /* Mark the label each jump jumps to.
230 Combine consecutive labels, and count uses of labels. */
232 static void
233 mark_all_labels (f)
234 rtx f;
236 rtx insn;
238 for (insn = f; insn; insn = NEXT_INSN (insn))
239 if (INSN_P (insn))
241 if (GET_CODE (insn) == CALL_INSN
242 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
244 mark_all_labels (XEXP (PATTERN (insn), 0));
245 mark_all_labels (XEXP (PATTERN (insn), 1));
246 mark_all_labels (XEXP (PATTERN (insn), 2));
248 /* Canonicalize the tail recursion label attached to the
249 CALL_PLACEHOLDER insn. */
250 if (XEXP (PATTERN (insn), 3))
252 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
253 XEXP (PATTERN (insn), 3));
254 mark_jump_label (label_ref, insn, 0);
255 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
258 continue;
261 mark_jump_label (PATTERN (insn), insn, 0);
262 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
264 /* When we know the LABEL_REF contained in a REG used in
265 an indirect jump, we'll have a REG_LABEL note so that
266 flow can tell where it's going. */
267 if (JUMP_LABEL (insn) == 0)
269 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
270 if (label_note)
272 /* But a LABEL_REF around the REG_LABEL note, so
273 that we can canonicalize it. */
274 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
275 XEXP (label_note, 0));
277 mark_jump_label (label_ref, insn, 0);
278 XEXP (label_note, 0) = XEXP (label_ref, 0);
279 JUMP_LABEL (insn) = XEXP (label_note, 0);
286 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
287 jump. Assume that this unconditional jump is to the exit test code. If
288 the code is sufficiently simple, make a copy of it before INSN,
289 followed by a jump to the exit of the loop. Then delete the unconditional
290 jump after INSN.
292 Return 1 if we made the change, else 0.
294 This is only safe immediately after a regscan pass because it uses the
295 values of regno_first_uid and regno_last_uid. */
297 static int
298 duplicate_loop_exit_test (loop_start)
299 rtx loop_start;
301 rtx insn, set, reg, p, link;
302 rtx copy = 0, first_copy = 0;
303 int num_insns = 0;
304 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
305 rtx lastexit;
306 int max_reg = max_reg_num ();
307 rtx *reg_map = 0;
308 rtx loop_pre_header_label;
310 /* Scan the exit code. We do not perform this optimization if any insn:
312 is a CALL_INSN
313 is a CODE_LABEL
314 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
315 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
316 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
317 is not valid.
319 We also do not do this if we find an insn with ASM_OPERANDS. While
320 this restriction should not be necessary, copying an insn with
321 ASM_OPERANDS can confuse asm_noperands in some cases.
323 Also, don't do this if the exit code is more than 20 insns. */
325 for (insn = exitcode;
326 insn
327 && ! (GET_CODE (insn) == NOTE
328 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
329 insn = NEXT_INSN (insn))
331 switch (GET_CODE (insn))
333 case CODE_LABEL:
334 case CALL_INSN:
335 return 0;
336 case NOTE:
337 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
338 a jump immediately after the loop start that branches outside
339 the loop but within an outer loop, near the exit test.
340 If we copied this exit test and created a phony
341 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
342 before the exit test look like these could be safely moved
343 out of the loop even if they actually may be never executed.
344 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
346 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
347 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
348 return 0;
350 if (optimize < 2
351 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
352 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
353 /* If we were to duplicate this code, we would not move
354 the BLOCK notes, and so debugging the moved code would
355 be difficult. Thus, we only move the code with -O2 or
356 higher. */
357 return 0;
359 break;
360 case JUMP_INSN:
361 case INSN:
362 /* The code below would grossly mishandle REG_WAS_0 notes,
363 so get rid of them here. */
364 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
365 remove_note (insn, p);
366 if (++num_insns > 20
367 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
368 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
369 return 0;
370 break;
371 default:
372 break;
376 /* Unless INSN is zero, we can do the optimization. */
377 if (insn == 0)
378 return 0;
380 lastexit = insn;
382 /* See if any insn sets a register only used in the loop exit code and
383 not a user variable. If so, replace it with a new register. */
384 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
385 if (GET_CODE (insn) == INSN
386 && (set = single_set (insn)) != 0
387 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
388 || (GET_CODE (reg) == SUBREG
389 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
390 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
391 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
393 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
394 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
395 break;
397 if (p != lastexit)
399 /* We can do the replacement. Allocate reg_map if this is the
400 first replacement we found. */
401 if (reg_map == 0)
402 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
404 REG_LOOP_TEST_P (reg) = 1;
406 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
409 loop_pre_header_label = gen_label_rtx ();
411 /* Now copy each insn. */
412 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
414 switch (GET_CODE (insn))
416 case BARRIER:
417 copy = emit_barrier_before (loop_start);
418 break;
419 case NOTE:
420 /* Only copy line-number notes. */
421 if (NOTE_LINE_NUMBER (insn) >= 0)
423 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
424 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
426 break;
428 case INSN:
429 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
430 if (reg_map)
431 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
433 mark_jump_label (PATTERN (copy), copy, 0);
435 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
436 make them. */
437 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
438 if (REG_NOTE_KIND (link) != REG_LABEL)
440 if (GET_CODE (link) == EXPR_LIST)
441 REG_NOTES (copy)
442 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
443 XEXP (link, 0),
444 REG_NOTES (copy)));
445 else
446 REG_NOTES (copy)
447 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
448 XEXP (link, 0),
449 REG_NOTES (copy)));
452 if (reg_map && REG_NOTES (copy))
453 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
454 break;
456 case JUMP_INSN:
457 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
458 loop_start);
459 if (reg_map)
460 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
461 mark_jump_label (PATTERN (copy), copy, 0);
462 if (REG_NOTES (insn))
464 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
465 if (reg_map)
466 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
469 /* Predict conditional jump that do make loop looping as taken.
470 Other jumps are probably exit conditions, so predict
471 them as untaken. */
472 if (any_condjump_p (copy))
474 rtx label = JUMP_LABEL (copy);
475 if (label)
477 /* The jump_insn after loop_start should be followed
478 by barrier and loopback label. */
479 if (prev_nonnote_insn (label)
480 && (prev_nonnote_insn (prev_nonnote_insn (label))
481 == next_nonnote_insn (loop_start)))
483 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
484 /* To keep pre-header, we need to redirect all loop
485 entrances before the LOOP_BEG note. */
486 redirect_jump (copy, loop_pre_header_label, 0);
488 else
489 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
492 break;
494 default:
495 abort ();
498 /* Record the first insn we copied. We need it so that we can
499 scan the copied insns for new pseudo registers. */
500 if (! first_copy)
501 first_copy = copy;
504 /* Now clean up by emitting a jump to the end label and deleting the jump
505 at the start of the loop. */
506 if (! copy || GET_CODE (copy) != BARRIER)
508 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
509 loop_start);
511 /* Record the first insn we copied. We need it so that we can
512 scan the copied insns for new pseudo registers. This may not
513 be strictly necessary since we should have copied at least one
514 insn above. But I am going to be safe. */
515 if (! first_copy)
516 first_copy = copy;
518 mark_jump_label (PATTERN (copy), copy, 0);
519 emit_barrier_before (loop_start);
522 emit_label_before (loop_pre_header_label, loop_start);
524 /* Now scan from the first insn we copied to the last insn we copied
525 (copy) for new pseudo registers. Do this after the code to jump to
526 the end label since that might create a new pseudo too. */
527 reg_scan_update (first_copy, copy, max_reg);
529 /* Mark the exit code as the virtual top of the converted loop. */
530 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
532 delete_related_insns (next_nonnote_insn (loop_start));
534 /* Clean up. */
535 if (reg_map)
536 free (reg_map);
538 return 1;
541 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
542 notes between START and END out before START. START and END may be such
543 notes. Returns the values of the new starting and ending insns, which
544 may be different if the original ones were such notes. */
546 void
547 squeeze_notes (startp, endp)
548 rtx* startp;
549 rtx* endp;
551 rtx start = *startp;
552 rtx end = *endp;
554 rtx insn;
555 rtx next;
556 rtx last = NULL;
557 rtx past_end = NEXT_INSN (end);
559 for (insn = start; insn != past_end; insn = next)
561 next = NEXT_INSN (insn);
562 if (GET_CODE (insn) == NOTE
563 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
564 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
565 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
566 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
567 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
568 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
570 if (insn == start)
571 start = next;
572 else
574 rtx prev = PREV_INSN (insn);
575 PREV_INSN (insn) = PREV_INSN (start);
576 NEXT_INSN (insn) = start;
577 NEXT_INSN (PREV_INSN (insn)) = insn;
578 PREV_INSN (NEXT_INSN (insn)) = insn;
579 NEXT_INSN (prev) = next;
580 PREV_INSN (next) = prev;
583 else
584 last = insn;
587 /* There were no real instructions, and we can't represent an empty
588 range. Die. */
589 if (start == past_end)
590 abort ();
592 end = last;
594 *startp = start;
595 *endp = end;
598 /* Return the label before INSN, or put a new label there. */
601 get_label_before (insn)
602 rtx insn;
604 rtx label;
606 /* Find an existing label at this point
607 or make a new one if there is none. */
608 label = prev_nonnote_insn (insn);
610 if (label == 0 || GET_CODE (label) != CODE_LABEL)
612 rtx prev = PREV_INSN (insn);
614 label = gen_label_rtx ();
615 emit_label_after (label, prev);
616 LABEL_NUSES (label) = 0;
618 return label;
621 /* Return the label after INSN, or put a new label there. */
624 get_label_after (insn)
625 rtx insn;
627 rtx label;
629 /* Find an existing label at this point
630 or make a new one if there is none. */
631 label = next_nonnote_insn (insn);
633 if (label == 0 || GET_CODE (label) != CODE_LABEL)
635 label = gen_label_rtx ();
636 emit_label_after (label, insn);
637 LABEL_NUSES (label) = 0;
639 return label;
642 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
643 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
644 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
645 know whether it's source is floating point or integer comparison. Machine
646 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
647 to help this function avoid overhead in these cases. */
648 enum rtx_code
649 reversed_comparison_code_parts (code, arg0, arg1, insn)
650 rtx insn, arg0, arg1;
651 enum rtx_code code;
653 enum machine_mode mode;
655 /* If this is not actually a comparison, we can't reverse it. */
656 if (GET_RTX_CLASS (code) != '<')
657 return UNKNOWN;
659 mode = GET_MODE (arg0);
660 if (mode == VOIDmode)
661 mode = GET_MODE (arg1);
663 /* First see if machine description supply us way to reverse the comparison.
664 Give it priority over everything else to allow machine description to do
665 tricks. */
666 #ifdef REVERSIBLE_CC_MODE
667 if (GET_MODE_CLASS (mode) == MODE_CC
668 && REVERSIBLE_CC_MODE (mode))
670 #ifdef REVERSE_CONDITION
671 return REVERSE_CONDITION (code, mode);
672 #endif
673 return reverse_condition (code);
675 #endif
677 /* Try a few special cases based on the comparison code. */
678 switch (code)
680 case GEU:
681 case GTU:
682 case LEU:
683 case LTU:
684 case NE:
685 case EQ:
686 /* It is always safe to reverse EQ and NE, even for the floating
687 point. Similary the unsigned comparisons are never used for
688 floating point so we can reverse them in the default way. */
689 return reverse_condition (code);
690 case ORDERED:
691 case UNORDERED:
692 case LTGT:
693 case UNEQ:
694 /* In case we already see unordered comparison, we can be sure to
695 be dealing with floating point so we don't need any more tests. */
696 return reverse_condition_maybe_unordered (code);
697 case UNLT:
698 case UNLE:
699 case UNGT:
700 case UNGE:
701 /* We don't have safe way to reverse these yet. */
702 return UNKNOWN;
703 default:
704 break;
707 /* In case we give up IEEE compatibility, all comparisons are reversible. */
708 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
709 || flag_unsafe_math_optimizations)
710 return reverse_condition (code);
712 if (GET_MODE_CLASS (mode) == MODE_CC
713 #ifdef HAVE_cc0
714 || arg0 == cc0_rtx
715 #endif
718 rtx prev;
719 /* Try to search for the comparison to determine the real mode.
720 This code is expensive, but with sane machine description it
721 will be never used, since REVERSIBLE_CC_MODE will return true
722 in all cases. */
723 if (! insn)
724 return UNKNOWN;
726 for (prev = prev_nonnote_insn (insn);
727 prev != 0 && GET_CODE (prev) != CODE_LABEL;
728 prev = prev_nonnote_insn (prev))
730 rtx set = set_of (arg0, prev);
731 if (set && GET_CODE (set) == SET
732 && rtx_equal_p (SET_DEST (set), arg0))
734 rtx src = SET_SRC (set);
736 if (GET_CODE (src) == COMPARE)
738 rtx comparison = src;
739 arg0 = XEXP (src, 0);
740 mode = GET_MODE (arg0);
741 if (mode == VOIDmode)
742 mode = GET_MODE (XEXP (comparison, 1));
743 break;
745 /* We can get past reg-reg moves. This may be usefull for model
746 of i387 comparisons that first move flag registers around. */
747 if (REG_P (src))
749 arg0 = src;
750 continue;
753 /* If register is clobbered in some ununderstandable way,
754 give up. */
755 if (set)
756 return UNKNOWN;
760 /* An integer condition. */
761 if (GET_CODE (arg0) == CONST_INT
762 || (GET_MODE (arg0) != VOIDmode
763 && GET_MODE_CLASS (mode) != MODE_CC
764 && ! FLOAT_MODE_P (mode)))
765 return reverse_condition (code);
767 return UNKNOWN;
770 /* An wrapper around the previous function to take COMPARISON as rtx
771 expression. This simplifies many callers. */
772 enum rtx_code
773 reversed_comparison_code (comparison, insn)
774 rtx comparison, insn;
776 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
777 return UNKNOWN;
778 return reversed_comparison_code_parts (GET_CODE (comparison),
779 XEXP (comparison, 0),
780 XEXP (comparison, 1), insn);
783 /* Given an rtx-code for a comparison, return the code for the negated
784 comparison. If no such code exists, return UNKNOWN.
786 WATCH OUT! reverse_condition is not safe to use on a jump that might
787 be acting on the results of an IEEE floating point comparison, because
788 of the special treatment of non-signaling nans in comparisons.
789 Use reversed_comparison_code instead. */
791 enum rtx_code
792 reverse_condition (code)
793 enum rtx_code code;
795 switch (code)
797 case EQ:
798 return NE;
799 case NE:
800 return EQ;
801 case GT:
802 return LE;
803 case GE:
804 return LT;
805 case LT:
806 return GE;
807 case LE:
808 return GT;
809 case GTU:
810 return LEU;
811 case GEU:
812 return LTU;
813 case LTU:
814 return GEU;
815 case LEU:
816 return GTU;
817 case UNORDERED:
818 return ORDERED;
819 case ORDERED:
820 return UNORDERED;
822 case UNLT:
823 case UNLE:
824 case UNGT:
825 case UNGE:
826 case UNEQ:
827 case LTGT:
828 return UNKNOWN;
830 default:
831 abort ();
835 /* Similar, but we're allowed to generate unordered comparisons, which
836 makes it safe for IEEE floating-point. Of course, we have to recognize
837 that the target will support them too... */
839 enum rtx_code
840 reverse_condition_maybe_unordered (code)
841 enum rtx_code code;
843 /* Non-IEEE formats don't have unordered conditions. */
844 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
845 return reverse_condition (code);
847 switch (code)
849 case EQ:
850 return NE;
851 case NE:
852 return EQ;
853 case GT:
854 return UNLE;
855 case GE:
856 return UNLT;
857 case LT:
858 return UNGE;
859 case LE:
860 return UNGT;
861 case LTGT:
862 return UNEQ;
863 case UNORDERED:
864 return ORDERED;
865 case ORDERED:
866 return UNORDERED;
867 case UNLT:
868 return GE;
869 case UNLE:
870 return GT;
871 case UNGT:
872 return LE;
873 case UNGE:
874 return LT;
875 case UNEQ:
876 return LTGT;
878 default:
879 abort ();
883 /* Similar, but return the code when two operands of a comparison are swapped.
884 This IS safe for IEEE floating-point. */
886 enum rtx_code
887 swap_condition (code)
888 enum rtx_code code;
890 switch (code)
892 case EQ:
893 case NE:
894 case UNORDERED:
895 case ORDERED:
896 case UNEQ:
897 case LTGT:
898 return code;
900 case GT:
901 return LT;
902 case GE:
903 return LE;
904 case LT:
905 return GT;
906 case LE:
907 return GE;
908 case GTU:
909 return LTU;
910 case GEU:
911 return LEU;
912 case LTU:
913 return GTU;
914 case LEU:
915 return GEU;
916 case UNLT:
917 return UNGT;
918 case UNLE:
919 return UNGE;
920 case UNGT:
921 return UNLT;
922 case UNGE:
923 return UNLE;
925 default:
926 abort ();
930 /* Given a comparison CODE, return the corresponding unsigned comparison.
931 If CODE is an equality comparison or already an unsigned comparison,
932 CODE is returned. */
934 enum rtx_code
935 unsigned_condition (code)
936 enum rtx_code code;
938 switch (code)
940 case EQ:
941 case NE:
942 case GTU:
943 case GEU:
944 case LTU:
945 case LEU:
946 return code;
948 case GT:
949 return GTU;
950 case GE:
951 return GEU;
952 case LT:
953 return LTU;
954 case LE:
955 return LEU;
957 default:
958 abort ();
962 /* Similarly, return the signed version of a comparison. */
964 enum rtx_code
965 signed_condition (code)
966 enum rtx_code code;
968 switch (code)
970 case EQ:
971 case NE:
972 case GT:
973 case GE:
974 case LT:
975 case LE:
976 return code;
978 case GTU:
979 return GT;
980 case GEU:
981 return GE;
982 case LTU:
983 return LT;
984 case LEU:
985 return LE;
987 default:
988 abort ();
992 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
993 truth of CODE1 implies the truth of CODE2. */
996 comparison_dominates_p (code1, code2)
997 enum rtx_code code1, code2;
999 /* UNKNOWN comparison codes can happen as a result of trying to revert
1000 comparison codes.
1001 They can't match anything, so we have to reject them here. */
1002 if (code1 == UNKNOWN || code2 == UNKNOWN)
1003 return 0;
1005 if (code1 == code2)
1006 return 1;
1008 switch (code1)
1010 case UNEQ:
1011 if (code2 == UNLE || code2 == UNGE)
1012 return 1;
1013 break;
1015 case EQ:
1016 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1017 || code2 == ORDERED)
1018 return 1;
1019 break;
1021 case UNLT:
1022 if (code2 == UNLE || code2 == NE)
1023 return 1;
1024 break;
1026 case LT:
1027 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1028 return 1;
1029 break;
1031 case UNGT:
1032 if (code2 == UNGE || code2 == NE)
1033 return 1;
1034 break;
1036 case GT:
1037 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1038 return 1;
1039 break;
1041 case GE:
1042 case LE:
1043 if (code2 == ORDERED)
1044 return 1;
1045 break;
1047 case LTGT:
1048 if (code2 == NE || code2 == ORDERED)
1049 return 1;
1050 break;
1052 case LTU:
1053 if (code2 == LEU || code2 == NE)
1054 return 1;
1055 break;
1057 case GTU:
1058 if (code2 == GEU || code2 == NE)
1059 return 1;
1060 break;
1062 case UNORDERED:
1063 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1064 || code2 == UNGE || code2 == UNGT)
1065 return 1;
1066 break;
1068 default:
1069 break;
1072 return 0;
1075 /* Return 1 if INSN is an unconditional jump and nothing else. */
1078 simplejump_p (insn)
1079 rtx insn;
1081 return (GET_CODE (insn) == JUMP_INSN
1082 && GET_CODE (PATTERN (insn)) == SET
1083 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1084 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1087 /* Return nonzero if INSN is a (possibly) conditional jump
1088 and nothing more.
1090 Use this function is deprecated, since we need to support combined
1091 branch and compare insns. Use any_condjump_p instead whenever possible. */
1094 condjump_p (insn)
1095 rtx insn;
1097 rtx x = PATTERN (insn);
1099 if (GET_CODE (x) != SET
1100 || GET_CODE (SET_DEST (x)) != PC)
1101 return 0;
1103 x = SET_SRC (x);
1104 if (GET_CODE (x) == LABEL_REF)
1105 return 1;
1106 else
1107 return (GET_CODE (x) == IF_THEN_ELSE
1108 && ((GET_CODE (XEXP (x, 2)) == PC
1109 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1110 || GET_CODE (XEXP (x, 1)) == RETURN))
1111 || (GET_CODE (XEXP (x, 1)) == PC
1112 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1113 || GET_CODE (XEXP (x, 2)) == RETURN))));
1115 return 0;
1118 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1119 PARALLEL.
1121 Use this function is deprecated, since we need to support combined
1122 branch and compare insns. Use any_condjump_p instead whenever possible. */
1125 condjump_in_parallel_p (insn)
1126 rtx insn;
1128 rtx x = PATTERN (insn);
1130 if (GET_CODE (x) != PARALLEL)
1131 return 0;
1132 else
1133 x = XVECEXP (x, 0, 0);
1135 if (GET_CODE (x) != SET)
1136 return 0;
1137 if (GET_CODE (SET_DEST (x)) != PC)
1138 return 0;
1139 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1140 return 1;
1141 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1142 return 0;
1143 if (XEXP (SET_SRC (x), 2) == pc_rtx
1144 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1145 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1146 return 1;
1147 if (XEXP (SET_SRC (x), 1) == pc_rtx
1148 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1149 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1150 return 1;
1151 return 0;
1154 /* Return set of PC, otherwise NULL. */
1157 pc_set (insn)
1158 rtx insn;
1160 rtx pat;
1161 if (GET_CODE (insn) != JUMP_INSN)
1162 return NULL_RTX;
1163 pat = PATTERN (insn);
1165 /* The set is allowed to appear either as the insn pattern or
1166 the first set in a PARALLEL. */
1167 if (GET_CODE (pat) == PARALLEL)
1168 pat = XVECEXP (pat, 0, 0);
1169 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1170 return pat;
1172 return NULL_RTX;
1175 /* Return true when insn is an unconditional direct jump,
1176 possibly bundled inside a PARALLEL. */
1179 any_uncondjump_p (insn)
1180 rtx insn;
1182 rtx x = pc_set (insn);
1183 if (!x)
1184 return 0;
1185 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1186 return 0;
1187 return 1;
1190 /* Return true when insn is a conditional jump. This function works for
1191 instructions containing PC sets in PARALLELs. The instruction may have
1192 various other effects so before removing the jump you must verify
1193 onlyjump_p.
1195 Note that unlike condjump_p it returns false for unconditional jumps. */
1198 any_condjump_p (insn)
1199 rtx insn;
1201 rtx x = pc_set (insn);
1202 enum rtx_code a, b;
1204 if (!x)
1205 return 0;
1206 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1207 return 0;
1209 a = GET_CODE (XEXP (SET_SRC (x), 1));
1210 b = GET_CODE (XEXP (SET_SRC (x), 2));
1212 return ((b == PC && (a == LABEL_REF || a == RETURN))
1213 || (a == PC && (b == LABEL_REF || b == RETURN)));
1216 /* Return the label of a conditional jump. */
1219 condjump_label (insn)
1220 rtx insn;
1222 rtx x = pc_set (insn);
1224 if (!x)
1225 return NULL_RTX;
1226 x = SET_SRC (x);
1227 if (GET_CODE (x) == LABEL_REF)
1228 return x;
1229 if (GET_CODE (x) != IF_THEN_ELSE)
1230 return NULL_RTX;
1231 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1232 return XEXP (x, 1);
1233 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1234 return XEXP (x, 2);
1235 return NULL_RTX;
1238 /* Return true if INSN is a (possibly conditional) return insn. */
1240 static int
1241 returnjump_p_1 (loc, data)
1242 rtx *loc;
1243 void *data ATTRIBUTE_UNUSED;
1245 rtx x = *loc;
1246 return x && GET_CODE (x) == RETURN;
1250 returnjump_p (insn)
1251 rtx insn;
1253 if (GET_CODE (insn) != JUMP_INSN)
1254 return 0;
1255 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1258 /* Return true if INSN is a jump that only transfers control and
1259 nothing more. */
1262 onlyjump_p (insn)
1263 rtx insn;
1265 rtx set;
1267 if (GET_CODE (insn) != JUMP_INSN)
1268 return 0;
1270 set = single_set (insn);
1271 if (set == NULL)
1272 return 0;
1273 if (GET_CODE (SET_DEST (set)) != PC)
1274 return 0;
1275 if (side_effects_p (SET_SRC (set)))
1276 return 0;
1278 return 1;
1281 #ifdef HAVE_cc0
1283 /* Return non-zero if X is an RTX that only sets the condition codes
1284 and has no side effects. */
1287 only_sets_cc0_p (x)
1288 rtx x;
1291 if (! x)
1292 return 0;
1294 if (INSN_P (x))
1295 x = PATTERN (x);
1297 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1300 /* Return 1 if X is an RTX that does nothing but set the condition codes
1301 and CLOBBER or USE registers.
1302 Return -1 if X does explicitly set the condition codes,
1303 but also does other things. */
1306 sets_cc0_p (x)
1307 rtx x;
1310 if (! x)
1311 return 0;
1313 if (INSN_P (x))
1314 x = PATTERN (x);
1316 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1317 return 1;
1318 if (GET_CODE (x) == PARALLEL)
1320 int i;
1321 int sets_cc0 = 0;
1322 int other_things = 0;
1323 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1325 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1326 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1327 sets_cc0 = 1;
1328 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1329 other_things = 1;
1331 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1333 return 0;
1335 #endif
1337 /* Follow any unconditional jump at LABEL;
1338 return the ultimate label reached by any such chain of jumps.
1339 If LABEL is not followed by a jump, return LABEL.
1340 If the chain loops or we can't find end, return LABEL,
1341 since that tells caller to avoid changing the insn.
1343 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1344 a USE or CLOBBER. */
1347 follow_jumps (label)
1348 rtx label;
1350 rtx insn;
1351 rtx next;
1352 rtx value = label;
1353 int depth;
1355 for (depth = 0;
1356 (depth < 10
1357 && (insn = next_active_insn (value)) != 0
1358 && GET_CODE (insn) == JUMP_INSN
1359 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1360 && onlyjump_p (insn))
1361 || GET_CODE (PATTERN (insn)) == RETURN)
1362 && (next = NEXT_INSN (insn))
1363 && GET_CODE (next) == BARRIER);
1364 depth++)
1366 /* Don't chain through the insn that jumps into a loop
1367 from outside the loop,
1368 since that would create multiple loop entry jumps
1369 and prevent loop optimization. */
1370 rtx tem;
1371 if (!reload_completed)
1372 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1373 if (GET_CODE (tem) == NOTE
1374 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1375 /* ??? Optional. Disables some optimizations, but makes
1376 gcov output more accurate with -O. */
1377 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1378 return value;
1380 /* If we have found a cycle, make the insn jump to itself. */
1381 if (JUMP_LABEL (insn) == label)
1382 return label;
1384 tem = next_active_insn (JUMP_LABEL (insn));
1385 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1386 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1387 break;
1389 value = JUMP_LABEL (insn);
1391 if (depth == 10)
1392 return label;
1393 return value;
1397 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1398 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1399 in INSN, then store one of them in JUMP_LABEL (INSN).
1400 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1401 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1402 Also, when there are consecutive labels, canonicalize on the last of them.
1404 Note that two labels separated by a loop-beginning note
1405 must be kept distinct if we have not yet done loop-optimization,
1406 because the gap between them is where loop-optimize
1407 will want to move invariant code to. CROSS_JUMP tells us
1408 that loop-optimization is done with. */
1410 void
1411 mark_jump_label (x, insn, in_mem)
1412 rtx x;
1413 rtx insn;
1414 int in_mem;
1416 RTX_CODE code = GET_CODE (x);
1417 int i;
1418 const char *fmt;
1420 switch (code)
1422 case PC:
1423 case CC0:
1424 case REG:
1425 case SUBREG:
1426 case CONST_INT:
1427 case CONST_DOUBLE:
1428 case CLOBBER:
1429 case CALL:
1430 return;
1432 case MEM:
1433 in_mem = 1;
1434 break;
1436 case SYMBOL_REF:
1437 if (!in_mem)
1438 return;
1440 /* If this is a constant-pool reference, see if it is a label. */
1441 if (CONSTANT_POOL_ADDRESS_P (x))
1442 mark_jump_label (get_pool_constant (x), insn, in_mem);
1443 break;
1445 case LABEL_REF:
1447 rtx label = XEXP (x, 0);
1449 /* Ignore remaining references to unreachable labels that
1450 have been deleted. */
1451 if (GET_CODE (label) == NOTE
1452 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1453 break;
1455 if (GET_CODE (label) != CODE_LABEL)
1456 abort ();
1458 /* Ignore references to labels of containing functions. */
1459 if (LABEL_REF_NONLOCAL_P (x))
1460 break;
1462 XEXP (x, 0) = label;
1463 if (! insn || ! INSN_DELETED_P (insn))
1464 ++LABEL_NUSES (label);
1466 if (insn)
1468 if (GET_CODE (insn) == JUMP_INSN)
1469 JUMP_LABEL (insn) = label;
1470 else
1472 /* Add a REG_LABEL note for LABEL unless there already
1473 is one. All uses of a label, except for labels
1474 that are the targets of jumps, must have a
1475 REG_LABEL note. */
1476 if (! find_reg_note (insn, REG_LABEL, label))
1477 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1478 REG_NOTES (insn));
1481 return;
1484 /* Do walk the labels in a vector, but not the first operand of an
1485 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1486 case ADDR_VEC:
1487 case ADDR_DIFF_VEC:
1488 if (! INSN_DELETED_P (insn))
1490 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1492 for (i = 0; i < XVECLEN (x, eltnum); i++)
1493 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1495 return;
1497 default:
1498 break;
1501 fmt = GET_RTX_FORMAT (code);
1502 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1504 if (fmt[i] == 'e')
1505 mark_jump_label (XEXP (x, i), insn, in_mem);
1506 else if (fmt[i] == 'E')
1508 int j;
1509 for (j = 0; j < XVECLEN (x, i); j++)
1510 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1515 /* If all INSN does is set the pc, delete it,
1516 and delete the insn that set the condition codes for it
1517 if that's what the previous thing was. */
1519 void
1520 delete_jump (insn)
1521 rtx insn;
1523 rtx set = single_set (insn);
1525 if (set && GET_CODE (SET_DEST (set)) == PC)
1526 delete_computation (insn);
1529 /* Verify INSN is a BARRIER and delete it. */
1531 void
1532 delete_barrier (insn)
1533 rtx insn;
1535 if (GET_CODE (insn) != BARRIER)
1536 abort ();
1538 delete_insn (insn);
1541 /* Recursively delete prior insns that compute the value (used only by INSN
1542 which the caller is deleting) stored in the register mentioned by NOTE
1543 which is a REG_DEAD note associated with INSN. */
1545 static void
1546 delete_prior_computation (note, insn)
1547 rtx note;
1548 rtx insn;
1550 rtx our_prev;
1551 rtx reg = XEXP (note, 0);
1553 for (our_prev = prev_nonnote_insn (insn);
1554 our_prev && (GET_CODE (our_prev) == INSN
1555 || GET_CODE (our_prev) == CALL_INSN);
1556 our_prev = prev_nonnote_insn (our_prev))
1558 rtx pat = PATTERN (our_prev);
1560 /* If we reach a CALL which is not calling a const function
1561 or the callee pops the arguments, then give up. */
1562 if (GET_CODE (our_prev) == CALL_INSN
1563 && (! CONST_OR_PURE_CALL_P (our_prev)
1564 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1565 break;
1567 /* If we reach a SEQUENCE, it is too complex to try to
1568 do anything with it, so give up. */
1569 if (GET_CODE (pat) == SEQUENCE)
1570 break;
1572 if (GET_CODE (pat) == USE
1573 && GET_CODE (XEXP (pat, 0)) == INSN)
1574 /* reorg creates USEs that look like this. We leave them
1575 alone because reorg needs them for its own purposes. */
1576 break;
1578 if (reg_set_p (reg, pat))
1580 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1581 break;
1583 if (GET_CODE (pat) == PARALLEL)
1585 /* If we find a SET of something else, we can't
1586 delete the insn. */
1588 int i;
1590 for (i = 0; i < XVECLEN (pat, 0); i++)
1592 rtx part = XVECEXP (pat, 0, i);
1594 if (GET_CODE (part) == SET
1595 && SET_DEST (part) != reg)
1596 break;
1599 if (i == XVECLEN (pat, 0))
1600 delete_computation (our_prev);
1602 else if (GET_CODE (pat) == SET
1603 && GET_CODE (SET_DEST (pat)) == REG)
1605 int dest_regno = REGNO (SET_DEST (pat));
1606 int dest_endregno
1607 = (dest_regno
1608 + (dest_regno < FIRST_PSEUDO_REGISTER
1609 ? HARD_REGNO_NREGS (dest_regno,
1610 GET_MODE (SET_DEST (pat))) : 1));
1611 int regno = REGNO (reg);
1612 int endregno
1613 = (regno
1614 + (regno < FIRST_PSEUDO_REGISTER
1615 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1617 if (dest_regno >= regno
1618 && dest_endregno <= endregno)
1619 delete_computation (our_prev);
1621 /* We may have a multi-word hard register and some, but not
1622 all, of the words of the register are needed in subsequent
1623 insns. Write REG_UNUSED notes for those parts that were not
1624 needed. */
1625 else if (dest_regno <= regno
1626 && dest_endregno >= endregno)
1628 int i;
1630 REG_NOTES (our_prev)
1631 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1632 REG_NOTES (our_prev));
1634 for (i = dest_regno; i < dest_endregno; i++)
1635 if (! find_regno_note (our_prev, REG_UNUSED, i))
1636 break;
1638 if (i == dest_endregno)
1639 delete_computation (our_prev);
1643 break;
1646 /* If PAT references the register that dies here, it is an
1647 additional use. Hence any prior SET isn't dead. However, this
1648 insn becomes the new place for the REG_DEAD note. */
1649 if (reg_overlap_mentioned_p (reg, pat))
1651 XEXP (note, 1) = REG_NOTES (our_prev);
1652 REG_NOTES (our_prev) = note;
1653 break;
1658 /* Delete INSN and recursively delete insns that compute values used only
1659 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1660 If we are running before flow.c, we need do nothing since flow.c will
1661 delete dead code. We also can't know if the registers being used are
1662 dead or not at this point.
1664 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1665 nothing other than set a register that dies in this insn, we can delete
1666 that insn as well.
1668 On machines with CC0, if CC0 is used in this insn, we may be able to
1669 delete the insn that set it. */
1671 static void
1672 delete_computation (insn)
1673 rtx insn;
1675 rtx note, next;
1677 #ifdef HAVE_cc0
1678 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1680 rtx prev = prev_nonnote_insn (insn);
1681 /* We assume that at this stage
1682 CC's are always set explicitly
1683 and always immediately before the jump that
1684 will use them. So if the previous insn
1685 exists to set the CC's, delete it
1686 (unless it performs auto-increments, etc.). */
1687 if (prev && GET_CODE (prev) == INSN
1688 && sets_cc0_p (PATTERN (prev)))
1690 if (sets_cc0_p (PATTERN (prev)) > 0
1691 && ! side_effects_p (PATTERN (prev)))
1692 delete_computation (prev);
1693 else
1694 /* Otherwise, show that cc0 won't be used. */
1695 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1696 cc0_rtx, REG_NOTES (prev));
1699 #endif
1701 for (note = REG_NOTES (insn); note; note = next)
1703 next = XEXP (note, 1);
1705 if (REG_NOTE_KIND (note) != REG_DEAD
1706 /* Verify that the REG_NOTE is legitimate. */
1707 || GET_CODE (XEXP (note, 0)) != REG)
1708 continue;
1710 delete_prior_computation (note, insn);
1713 delete_related_insns (insn);
1716 /* Delete insn INSN from the chain of insns and update label ref counts
1717 and delete insns now unreachable.
1719 Returns the first insn after INSN that was not deleted.
1721 Usage of this instruction is deprecated. Use delete_insn instead and
1722 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1725 delete_related_insns (insn)
1726 rtx insn;
1728 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1729 rtx note;
1730 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1732 while (next && INSN_DELETED_P (next))
1733 next = NEXT_INSN (next);
1735 /* This insn is already deleted => return first following nondeleted. */
1736 if (INSN_DELETED_P (insn))
1737 return next;
1739 delete_insn (insn);
1741 /* If instruction is followed by a barrier,
1742 delete the barrier too. */
1744 if (next != 0 && GET_CODE (next) == BARRIER)
1745 delete_insn (next);
1747 /* If deleting a jump, decrement the count of the label,
1748 and delete the label if it is now unused. */
1750 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1752 rtx lab = JUMP_LABEL (insn), lab_next;
1754 if (LABEL_NUSES (lab) == 0)
1756 /* This can delete NEXT or PREV,
1757 either directly if NEXT is JUMP_LABEL (INSN),
1758 or indirectly through more levels of jumps. */
1759 delete_related_insns (lab);
1761 /* I feel a little doubtful about this loop,
1762 but I see no clean and sure alternative way
1763 to find the first insn after INSN that is not now deleted.
1764 I hope this works. */
1765 while (next && INSN_DELETED_P (next))
1766 next = NEXT_INSN (next);
1767 return next;
1769 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1770 && GET_CODE (lab_next) == JUMP_INSN
1771 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1772 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1774 /* If we're deleting the tablejump, delete the dispatch table.
1775 We may not be able to kill the label immediately preceeding
1776 just yet, as it might be referenced in code leading up to
1777 the tablejump. */
1778 delete_related_insns (lab_next);
1782 /* Likewise if we're deleting a dispatch table. */
1784 if (GET_CODE (insn) == JUMP_INSN
1785 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1786 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1788 rtx pat = PATTERN (insn);
1789 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1790 int len = XVECLEN (pat, diff_vec_p);
1792 for (i = 0; i < len; i++)
1793 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1794 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1795 while (next && INSN_DELETED_P (next))
1796 next = NEXT_INSN (next);
1797 return next;
1800 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1801 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1802 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1803 if (REG_NOTE_KIND (note) == REG_LABEL
1804 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1805 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1806 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1807 delete_related_insns (XEXP (note, 0));
1809 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1810 prev = PREV_INSN (prev);
1812 /* If INSN was a label and a dispatch table follows it,
1813 delete the dispatch table. The tablejump must have gone already.
1814 It isn't useful to fall through into a table. */
1816 if (was_code_label
1817 && NEXT_INSN (insn) != 0
1818 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1819 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1820 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1821 next = delete_related_insns (NEXT_INSN (insn));
1823 /* If INSN was a label, delete insns following it if now unreachable. */
1825 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1827 RTX_CODE code;
1828 while (next != 0
1829 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1830 || code == NOTE || code == BARRIER
1831 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1833 if (code == NOTE
1834 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1835 next = NEXT_INSN (next);
1836 /* Keep going past other deleted labels to delete what follows. */
1837 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1838 next = NEXT_INSN (next);
1839 else
1840 /* Note: if this deletes a jump, it can cause more
1841 deletion of unreachable code, after a different label.
1842 As long as the value from this recursive call is correct,
1843 this invocation functions correctly. */
1844 next = delete_related_insns (next);
1848 return next;
1851 /* Advance from INSN till reaching something not deleted
1852 then return that. May return INSN itself. */
1855 next_nondeleted_insn (insn)
1856 rtx insn;
1858 while (INSN_DELETED_P (insn))
1859 insn = NEXT_INSN (insn);
1860 return insn;
1863 /* Delete a range of insns from FROM to TO, inclusive.
1864 This is for the sake of peephole optimization, so assume
1865 that whatever these insns do will still be done by a new
1866 peephole insn that will replace them. */
1868 void
1869 delete_for_peephole (from, to)
1870 rtx from, to;
1872 rtx insn = from;
1874 while (1)
1876 rtx next = NEXT_INSN (insn);
1877 rtx prev = PREV_INSN (insn);
1879 if (GET_CODE (insn) != NOTE)
1881 INSN_DELETED_P (insn) = 1;
1883 /* Patch this insn out of the chain. */
1884 /* We don't do this all at once, because we
1885 must preserve all NOTEs. */
1886 if (prev)
1887 NEXT_INSN (prev) = next;
1889 if (next)
1890 PREV_INSN (next) = prev;
1893 if (insn == to)
1894 break;
1895 insn = next;
1898 /* Note that if TO is an unconditional jump
1899 we *do not* delete the BARRIER that follows,
1900 since the peephole that replaces this sequence
1901 is also an unconditional jump in that case. */
1904 /* We have determined that INSN is never reached, and are about to
1905 delete it. Print a warning if the user asked for one.
1907 To try to make this warning more useful, this should only be called
1908 once per basic block not reached, and it only warns when the basic
1909 block contains more than one line from the current function, and
1910 contains at least one operation. CSE and inlining can duplicate insns,
1911 so it's possible to get spurious warnings from this. */
1913 void
1914 never_reached_warning (avoided_insn)
1915 rtx avoided_insn;
1917 rtx insn;
1918 rtx a_line_note = NULL;
1919 int two_avoided_lines = 0;
1920 int contains_insn = 0;
1922 if (! warn_notreached)
1923 return;
1925 /* Scan forwards, looking at LINE_NUMBER notes, until
1926 we hit a LABEL or we run out of insns. */
1928 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1930 if (GET_CODE (insn) == CODE_LABEL)
1931 break;
1932 else if (GET_CODE (insn) == NOTE /* A line number note? */
1933 && NOTE_LINE_NUMBER (insn) >= 0)
1935 if (a_line_note == NULL)
1936 a_line_note = insn;
1937 else
1938 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1939 != NOTE_LINE_NUMBER (insn));
1941 else if (INSN_P (insn))
1942 contains_insn = 1;
1944 if (two_avoided_lines && contains_insn)
1945 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1946 NOTE_LINE_NUMBER (a_line_note),
1947 "will never be executed");
1950 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1951 NLABEL as a return. Accrue modifications into the change group. */
1953 static void
1954 redirect_exp_1 (loc, olabel, nlabel, insn)
1955 rtx *loc;
1956 rtx olabel, nlabel;
1957 rtx insn;
1959 rtx x = *loc;
1960 RTX_CODE code = GET_CODE (x);
1961 int i;
1962 const char *fmt;
1964 if (code == LABEL_REF)
1966 if (XEXP (x, 0) == olabel)
1968 rtx n;
1969 if (nlabel)
1970 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1971 else
1972 n = gen_rtx_RETURN (VOIDmode);
1974 validate_change (insn, loc, n, 1);
1975 return;
1978 else if (code == RETURN && olabel == 0)
1980 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1981 if (loc == &PATTERN (insn))
1982 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1983 validate_change (insn, loc, x, 1);
1984 return;
1987 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1988 && GET_CODE (SET_SRC (x)) == LABEL_REF
1989 && XEXP (SET_SRC (x), 0) == olabel)
1991 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1992 return;
1995 fmt = GET_RTX_FORMAT (code);
1996 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1998 if (fmt[i] == 'e')
1999 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
2000 else if (fmt[i] == 'E')
2002 int j;
2003 for (j = 0; j < XVECLEN (x, i); j++)
2004 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2009 /* Similar, but apply the change group and report success or failure. */
2011 static int
2012 redirect_exp (olabel, nlabel, insn)
2013 rtx olabel, nlabel;
2014 rtx insn;
2016 rtx *loc;
2018 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2019 loc = &XVECEXP (PATTERN (insn), 0, 0);
2020 else
2021 loc = &PATTERN (insn);
2023 redirect_exp_1 (loc, olabel, nlabel, insn);
2024 if (num_validated_changes () == 0)
2025 return 0;
2027 return apply_change_group ();
2030 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2031 the modifications into the change group. Return false if we did
2032 not see how to do that. */
2035 redirect_jump_1 (jump, nlabel)
2036 rtx jump, nlabel;
2038 int ochanges = num_validated_changes ();
2039 rtx *loc;
2041 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2042 loc = &XVECEXP (PATTERN (jump), 0, 0);
2043 else
2044 loc = &PATTERN (jump);
2046 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2047 return num_validated_changes () > ochanges;
2050 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2051 jump target label is unused as a result, it and the code following
2052 it may be deleted.
2054 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2055 RETURN insn.
2057 The return value will be 1 if the change was made, 0 if it wasn't
2058 (this can only occur for NLABEL == 0). */
2061 redirect_jump (jump, nlabel, delete_unused)
2062 rtx jump, nlabel;
2063 int delete_unused;
2065 rtx olabel = JUMP_LABEL (jump);
2067 if (nlabel == olabel)
2068 return 1;
2070 if (! redirect_exp (olabel, nlabel, jump))
2071 return 0;
2073 JUMP_LABEL (jump) = nlabel;
2074 if (nlabel)
2075 ++LABEL_NUSES (nlabel);
2077 /* If we're eliding the jump over exception cleanups at the end of a
2078 function, move the function end note so that -Wreturn-type works. */
2079 if (olabel && nlabel
2080 && NEXT_INSN (olabel)
2081 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2082 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2083 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2085 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
2086 delete_related_insns (olabel);
2088 return 1;
2091 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2092 Accrue the modifications into the change group. */
2094 static void
2095 invert_exp_1 (insn)
2096 rtx insn;
2098 RTX_CODE code;
2099 rtx x = pc_set (insn);
2101 if (!x)
2102 abort ();
2103 x = SET_SRC (x);
2105 code = GET_CODE (x);
2107 if (code == IF_THEN_ELSE)
2109 rtx comp = XEXP (x, 0);
2110 rtx tem;
2111 enum rtx_code reversed_code;
2113 /* We can do this in two ways: The preferable way, which can only
2114 be done if this is not an integer comparison, is to reverse
2115 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2116 of the IF_THEN_ELSE. If we can't do either, fail. */
2118 reversed_code = reversed_comparison_code (comp, insn);
2120 if (reversed_code != UNKNOWN)
2122 validate_change (insn, &XEXP (x, 0),
2123 gen_rtx_fmt_ee (reversed_code,
2124 GET_MODE (comp), XEXP (comp, 0),
2125 XEXP (comp, 1)),
2127 return;
2130 tem = XEXP (x, 1);
2131 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2132 validate_change (insn, &XEXP (x, 2), tem, 1);
2134 else
2135 abort ();
2138 /* Invert the jump condition of conditional jump insn, INSN.
2140 Return 1 if we can do so, 0 if we cannot find a way to do so that
2141 matches a pattern. */
2143 static int
2144 invert_exp (insn)
2145 rtx insn;
2147 invert_exp_1 (insn);
2148 if (num_validated_changes () == 0)
2149 return 0;
2151 return apply_change_group ();
2154 /* Invert the condition of the jump JUMP, and make it jump to label
2155 NLABEL instead of where it jumps now. Accrue changes into the
2156 change group. Return false if we didn't see how to perform the
2157 inversion and redirection. */
2160 invert_jump_1 (jump, nlabel)
2161 rtx jump, nlabel;
2163 int ochanges;
2165 ochanges = num_validated_changes ();
2166 invert_exp_1 (jump);
2167 if (num_validated_changes () == ochanges)
2168 return 0;
2170 return redirect_jump_1 (jump, nlabel);
2173 /* Invert the condition of the jump JUMP, and make it jump to label
2174 NLABEL instead of where it jumps now. Return true if successful. */
2177 invert_jump (jump, nlabel, delete_unused)
2178 rtx jump, nlabel;
2179 int delete_unused;
2181 /* We have to either invert the condition and change the label or
2182 do neither. Either operation could fail. We first try to invert
2183 the jump. If that succeeds, we try changing the label. If that fails,
2184 we invert the jump back to what it was. */
2186 if (! invert_exp (jump))
2187 return 0;
2189 if (redirect_jump (jump, nlabel, delete_unused))
2191 invert_br_probabilities (jump);
2193 return 1;
2196 if (! invert_exp (jump))
2197 /* This should just be putting it back the way it was. */
2198 abort ();
2200 return 0;
2204 /* Like rtx_equal_p except that it considers two REGs as equal
2205 if they renumber to the same value and considers two commutative
2206 operations to be the same if the order of the operands has been
2207 reversed.
2209 ??? Addition is not commutative on the PA due to the weird implicit
2210 space register selection rules for memory addresses. Therefore, we
2211 don't consider a + b == b + a.
2213 We could/should make this test a little tighter. Possibly only
2214 disabling it on the PA via some backend macro or only disabling this
2215 case when the PLUS is inside a MEM. */
2218 rtx_renumbered_equal_p (x, y)
2219 rtx x, y;
2221 int i;
2222 RTX_CODE code = GET_CODE (x);
2223 const char *fmt;
2225 if (x == y)
2226 return 1;
2228 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2229 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2230 && GET_CODE (SUBREG_REG (y)) == REG)))
2232 int reg_x = -1, reg_y = -1;
2233 int byte_x = 0, byte_y = 0;
2235 if (GET_MODE (x) != GET_MODE (y))
2236 return 0;
2238 /* If we haven't done any renumbering, don't
2239 make any assumptions. */
2240 if (reg_renumber == 0)
2241 return rtx_equal_p (x, y);
2243 if (code == SUBREG)
2245 reg_x = REGNO (SUBREG_REG (x));
2246 byte_x = SUBREG_BYTE (x);
2248 if (reg_renumber[reg_x] >= 0)
2250 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2251 GET_MODE (SUBREG_REG (x)),
2252 byte_x,
2253 GET_MODE (x));
2254 byte_x = 0;
2257 else
2259 reg_x = REGNO (x);
2260 if (reg_renumber[reg_x] >= 0)
2261 reg_x = reg_renumber[reg_x];
2264 if (GET_CODE (y) == SUBREG)
2266 reg_y = REGNO (SUBREG_REG (y));
2267 byte_y = SUBREG_BYTE (y);
2269 if (reg_renumber[reg_y] >= 0)
2271 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2272 GET_MODE (SUBREG_REG (y)),
2273 byte_y,
2274 GET_MODE (y));
2275 byte_y = 0;
2278 else
2280 reg_y = REGNO (y);
2281 if (reg_renumber[reg_y] >= 0)
2282 reg_y = reg_renumber[reg_y];
2285 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2288 /* Now we have disposed of all the cases
2289 in which different rtx codes can match. */
2290 if (code != GET_CODE (y))
2291 return 0;
2293 switch (code)
2295 case PC:
2296 case CC0:
2297 case ADDR_VEC:
2298 case ADDR_DIFF_VEC:
2299 return 0;
2301 case CONST_INT:
2302 return INTVAL (x) == INTVAL (y);
2304 case LABEL_REF:
2305 /* We can't assume nonlocal labels have their following insns yet. */
2306 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2307 return XEXP (x, 0) == XEXP (y, 0);
2309 /* Two label-refs are equivalent if they point at labels
2310 in the same position in the instruction stream. */
2311 return (next_real_insn (XEXP (x, 0))
2312 == next_real_insn (XEXP (y, 0)));
2314 case SYMBOL_REF:
2315 return XSTR (x, 0) == XSTR (y, 0);
2317 case CODE_LABEL:
2318 /* If we didn't match EQ equality above, they aren't the same. */
2319 return 0;
2321 default:
2322 break;
2325 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2327 if (GET_MODE (x) != GET_MODE (y))
2328 return 0;
2330 /* For commutative operations, the RTX match if the operand match in any
2331 order. Also handle the simple binary and unary cases without a loop.
2333 ??? Don't consider PLUS a commutative operator; see comments above. */
2334 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2335 && code != PLUS)
2336 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2337 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2338 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2339 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2340 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2341 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2342 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2343 else if (GET_RTX_CLASS (code) == '1')
2344 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2346 /* Compare the elements. If any pair of corresponding elements
2347 fail to match, return 0 for the whole things. */
2349 fmt = GET_RTX_FORMAT (code);
2350 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2352 int j;
2353 switch (fmt[i])
2355 case 'w':
2356 if (XWINT (x, i) != XWINT (y, i))
2357 return 0;
2358 break;
2360 case 'i':
2361 if (XINT (x, i) != XINT (y, i))
2362 return 0;
2363 break;
2365 case 't':
2366 if (XTREE (x, i) != XTREE (y, i))
2367 return 0;
2368 break;
2370 case 's':
2371 if (strcmp (XSTR (x, i), XSTR (y, i)))
2372 return 0;
2373 break;
2375 case 'e':
2376 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2377 return 0;
2378 break;
2380 case 'u':
2381 if (XEXP (x, i) != XEXP (y, i))
2382 return 0;
2383 /* fall through. */
2384 case '0':
2385 break;
2387 case 'E':
2388 if (XVECLEN (x, i) != XVECLEN (y, i))
2389 return 0;
2390 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2391 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2392 return 0;
2393 break;
2395 default:
2396 abort ();
2399 return 1;
2402 /* If X is a hard register or equivalent to one or a subregister of one,
2403 return the hard register number. If X is a pseudo register that was not
2404 assigned a hard register, return the pseudo register number. Otherwise,
2405 return -1. Any rtx is valid for X. */
2408 true_regnum (x)
2409 rtx x;
2411 if (GET_CODE (x) == REG)
2413 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2414 return reg_renumber[REGNO (x)];
2415 return REGNO (x);
2417 if (GET_CODE (x) == SUBREG)
2419 int base = true_regnum (SUBREG_REG (x));
2420 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2421 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2422 GET_MODE (SUBREG_REG (x)),
2423 SUBREG_BYTE (x), GET_MODE (x));
2425 return -1;
2428 /* Optimize code of the form:
2430 for (x = a[i]; x; ...)
2432 for (x = a[i]; x; ...)
2434 foo:
2436 Loop optimize will change the above code into
2438 if (x = a[i])
2439 for (;;)
2440 { ...; if (! (x = ...)) break; }
2441 if (x = a[i])
2442 for (;;)
2443 { ...; if (! (x = ...)) break; }
2444 foo:
2446 In general, if the first test fails, the program can branch
2447 directly to `foo' and skip the second try which is doomed to fail.
2448 We run this after loop optimization and before flow analysis. */
2450 /* When comparing the insn patterns, we track the fact that different
2451 pseudo-register numbers may have been used in each computation.
2452 The following array stores an equivalence -- same_regs[I] == J means
2453 that pseudo register I was used in the first set of tests in a context
2454 where J was used in the second set. We also count the number of such
2455 pending equivalences. If nonzero, the expressions really aren't the
2456 same. */
2458 static int *same_regs;
2460 static int num_same_regs;
2462 /* Track any registers modified between the target of the first jump and
2463 the second jump. They never compare equal. */
2465 static char *modified_regs;
2467 /* Record if memory was modified. */
2469 static int modified_mem;
2471 /* Called via note_stores on each insn between the target of the first
2472 branch and the second branch. It marks any changed registers. */
2474 static void
2475 mark_modified_reg (dest, x, data)
2476 rtx dest;
2477 rtx x;
2478 void *data ATTRIBUTE_UNUSED;
2480 int regno;
2481 unsigned int i;
2483 if (GET_CODE (dest) == SUBREG)
2484 dest = SUBREG_REG (dest);
2486 if (GET_CODE (dest) == MEM)
2487 modified_mem = 1;
2489 if (GET_CODE (dest) != REG)
2490 return;
2492 regno = REGNO (dest);
2493 if (regno >= FIRST_PSEUDO_REGISTER)
2494 modified_regs[regno] = 1;
2495 /* Don't consider a hard condition code register as modified,
2496 if it is only being set. thread_jumps will check if it is set
2497 to the same value. */
2498 else if (GET_MODE_CLASS (GET_MODE (dest)) != MODE_CC
2499 || GET_CODE (x) != SET
2500 || ! rtx_equal_p (dest, SET_DEST (x))
2501 || HARD_REGNO_NREGS (regno, GET_MODE (dest)) != 1)
2502 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
2503 modified_regs[regno + i] = 1;
2506 /* F is the first insn in the chain of insns. */
2508 void
2509 thread_jumps (f, max_reg, flag_before_loop)
2510 rtx f;
2511 int max_reg;
2512 int flag_before_loop;
2514 /* Basic algorithm is to find a conditional branch,
2515 the label it may branch to, and the branch after
2516 that label. If the two branches test the same condition,
2517 walk back from both branch paths until the insn patterns
2518 differ, or code labels are hit. If we make it back to
2519 the target of the first branch, then we know that the first branch
2520 will either always succeed or always fail depending on the relative
2521 senses of the two branches. So adjust the first branch accordingly
2522 in this case. */
2524 rtx label, b1, b2, t1, t2;
2525 enum rtx_code code1, code2;
2526 rtx b1op0, b1op1, b2op0, b2op1;
2527 int changed = 1;
2528 int i;
2529 int *all_reset;
2530 enum rtx_code reversed_code1, reversed_code2;
2532 /* Allocate register tables and quick-reset table. */
2533 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
2534 same_regs = (int *) xmalloc (max_reg * sizeof (int));
2535 all_reset = (int *) xmalloc (max_reg * sizeof (int));
2536 for (i = 0; i < max_reg; i++)
2537 all_reset[i] = -1;
2539 while (changed)
2541 changed = 0;
2543 for (b1 = f; b1; b1 = NEXT_INSN (b1))
2545 rtx set;
2546 rtx set2;
2548 /* Get to a candidate branch insn. */
2549 if (GET_CODE (b1) != JUMP_INSN
2550 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
2551 continue;
2553 memset (modified_regs, 0, max_reg * sizeof (char));
2554 modified_mem = 0;
2556 memcpy (same_regs, all_reset, max_reg * sizeof (int));
2557 num_same_regs = 0;
2559 label = JUMP_LABEL (b1);
2561 /* Look for a branch after the target. Record any registers and
2562 memory modified between the target and the branch. Stop when we
2563 get to a label since we can't know what was changed there. */
2564 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
2566 if (GET_CODE (b2) == CODE_LABEL)
2567 break;
2569 else if (GET_CODE (b2) == JUMP_INSN)
2571 /* If this is an unconditional jump and is the only use of
2572 its target label, we can follow it. */
2573 if (any_uncondjump_p (b2)
2574 && onlyjump_p (b2)
2575 && JUMP_LABEL (b2) != 0
2576 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
2578 b2 = JUMP_LABEL (b2);
2579 continue;
2581 else
2582 break;
2585 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
2586 continue;
2588 if (GET_CODE (b2) == CALL_INSN)
2590 modified_mem = 1;
2591 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2592 if (call_used_regs[i] && ! fixed_regs[i]
2593 && i != STACK_POINTER_REGNUM
2594 && i != FRAME_POINTER_REGNUM
2595 && i != HARD_FRAME_POINTER_REGNUM
2596 && i != ARG_POINTER_REGNUM)
2597 modified_regs[i] = 1;
2600 note_stores (PATTERN (b2), mark_modified_reg, NULL);
2603 /* Check the next candidate branch insn from the label
2604 of the first. */
2605 if (b2 == 0
2606 || GET_CODE (b2) != JUMP_INSN
2607 || b2 == b1
2608 || !any_condjump_p (b2)
2609 || !onlyjump_p (b2))
2610 continue;
2611 set = pc_set (b1);
2612 set2 = pc_set (b2);
2614 /* Get the comparison codes and operands, reversing the
2615 codes if appropriate. If we don't have comparison codes,
2616 we can't do anything. */
2617 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
2618 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
2619 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
2620 reversed_code1 = code1;
2621 if (XEXP (SET_SRC (set), 1) == pc_rtx)
2622 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2623 else
2624 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2626 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
2627 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
2628 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
2629 reversed_code2 = code2;
2630 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
2631 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2632 else
2633 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2635 /* If they test the same things and knowing that B1 branches
2636 tells us whether or not B2 branches, check if we
2637 can thread the branch. */
2638 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
2639 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
2640 && (comparison_dominates_p (code1, code2)
2641 || comparison_dominates_p (code1, reversed_code2)))
2644 t1 = prev_nonnote_insn (b1);
2645 t2 = prev_nonnote_insn (b2);
2647 while (t1 != 0 && t2 != 0)
2649 if (t2 == label)
2651 /* We have reached the target of the first branch.
2652 If there are no pending register equivalents,
2653 we know that this branch will either always
2654 succeed (if the senses of the two branches are
2655 the same) or always fail (if not). */
2656 rtx new_label;
2658 if (num_same_regs != 0)
2659 break;
2661 if (comparison_dominates_p (code1, code2))
2662 new_label = JUMP_LABEL (b2);
2663 else
2664 new_label = get_label_after (b2);
2666 if (JUMP_LABEL (b1) != new_label)
2668 rtx prev = PREV_INSN (new_label);
2670 if (flag_before_loop
2671 && GET_CODE (prev) == NOTE
2672 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
2674 /* Don't thread to the loop label. If a loop
2675 label is reused, loop optimization will
2676 be disabled for that loop. */
2677 new_label = gen_label_rtx ();
2678 emit_label_after (new_label, PREV_INSN (prev));
2680 changed |= redirect_jump (b1, new_label, 1);
2682 break;
2685 /* If either of these is not a normal insn (it might be
2686 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
2687 have already been skipped above.) Similarly, fail
2688 if the insns are different. */
2689 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
2690 || recog_memoized (t1) != recog_memoized (t2)
2691 || ! rtx_equal_for_thread_p (PATTERN (t1),
2692 PATTERN (t2), t2))
2693 break;
2695 t1 = prev_nonnote_insn (t1);
2696 t2 = prev_nonnote_insn (t2);
2702 /* Clean up. */
2703 free (modified_regs);
2704 free (same_regs);
2705 free (all_reset);
2708 /* This is like RTX_EQUAL_P except that it knows about our handling of
2709 possibly equivalent registers and knows to consider volatile and
2710 modified objects as not equal.
2712 YINSN is the insn containing Y. */
2715 rtx_equal_for_thread_p (x, y, yinsn)
2716 rtx x, y;
2717 rtx yinsn;
2719 int i;
2720 int j;
2721 enum rtx_code code;
2722 const char *fmt;
2724 code = GET_CODE (x);
2725 /* Rtx's of different codes cannot be equal. */
2726 if (code != GET_CODE (y))
2727 return 0;
2729 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
2730 (REG:SI x) and (REG:HI x) are NOT equivalent. */
2732 if (GET_MODE (x) != GET_MODE (y))
2733 return 0;
2735 /* For floating-point, consider everything unequal. This is a bit
2736 pessimistic, but this pass would only rarely do anything for FP
2737 anyway. */
2738 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
2739 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
2740 return 0;
2742 /* For commutative operations, the RTX match if the operand match in any
2743 order. Also handle the simple binary and unary cases without a loop. */
2744 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2745 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2746 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
2747 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
2748 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
2749 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2750 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2751 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
2752 else if (GET_RTX_CLASS (code) == '1')
2753 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2755 /* Handle special-cases first. */
2756 switch (code)
2758 case REG:
2759 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
2760 return 1;
2762 /* If neither is user variable or hard register, check for possible
2763 equivalence. */
2764 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
2765 || REGNO (x) < FIRST_PSEUDO_REGISTER
2766 || REGNO (y) < FIRST_PSEUDO_REGISTER)
2767 return 0;
2769 if (same_regs[REGNO (x)] == -1)
2771 same_regs[REGNO (x)] = REGNO (y);
2772 num_same_regs++;
2774 /* If this is the first time we are seeing a register on the `Y'
2775 side, see if it is the last use. If not, we can't thread the
2776 jump, so mark it as not equivalent. */
2777 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
2778 return 0;
2780 return 1;
2782 else
2783 return (same_regs[REGNO (x)] == (int) REGNO (y));
2785 break;
2787 case MEM:
2788 /* If memory modified or either volatile, not equivalent.
2789 Else, check address. */
2790 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2791 return 0;
2793 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2795 case ASM_INPUT:
2796 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2797 return 0;
2799 break;
2801 case SET:
2802 /* Cancel a pending `same_regs' if setting equivalenced registers.
2803 Then process source. */
2804 if (GET_CODE (SET_DEST (x)) == REG
2805 && GET_CODE (SET_DEST (y)) == REG)
2807 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
2809 same_regs[REGNO (SET_DEST (x))] = -1;
2810 num_same_regs--;
2812 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
2813 return 0;
2815 else
2817 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
2818 return 0;
2821 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
2823 case LABEL_REF:
2824 return XEXP (x, 0) == XEXP (y, 0);
2826 case SYMBOL_REF:
2827 return XSTR (x, 0) == XSTR (y, 0);
2829 default:
2830 break;
2833 if (x == y)
2834 return 1;
2836 fmt = GET_RTX_FORMAT (code);
2837 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2839 switch (fmt[i])
2841 case 'w':
2842 if (XWINT (x, i) != XWINT (y, i))
2843 return 0;
2844 break;
2846 case 'n':
2847 case 'i':
2848 if (XINT (x, i) != XINT (y, i))
2849 return 0;
2850 break;
2852 case 'V':
2853 case 'E':
2854 /* Two vectors must have the same length. */
2855 if (XVECLEN (x, i) != XVECLEN (y, i))
2856 return 0;
2858 /* And the corresponding elements must match. */
2859 for (j = 0; j < XVECLEN (x, i); j++)
2860 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
2861 XVECEXP (y, i, j), yinsn) == 0)
2862 return 0;
2863 break;
2865 case 'e':
2866 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
2867 return 0;
2868 break;
2870 case 'S':
2871 case 's':
2872 if (strcmp (XSTR (x, i), XSTR (y, i)))
2873 return 0;
2874 break;
2876 case 'u':
2877 /* These are just backpointers, so they don't matter. */
2878 break;
2880 case '0':
2881 case 't':
2882 break;
2884 /* It is believed that rtx's at this level will never
2885 contain anything but integers and other rtx's,
2886 except for within LABEL_REFs and SYMBOL_REFs. */
2887 default:
2888 abort ();
2891 return 1;