* parse.y: Use VA_OPEN/VA_CLOSE/VA_FIXEDARG throughout.
[official-gcc.git] / gcc / jump.c
blobe8a859423edca21b6368ba60ecc2c1f1d73736ea
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
24 operate with jumps.
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
37 #include "config.h"
38 #include "system.h"
39 #include "rtl.h"
40 #include "tm_p.h"
41 #include "flags.h"
42 #include "hard-reg-set.h"
43 #include "regs.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
46 #include "recog.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "real.h"
50 #include "except.h"
51 #include "toplev.h"
52 #include "reload.h"
53 #include "predict.h"
55 /* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57 /* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
62 static int init_label_info PARAMS ((rtx));
63 static void mark_all_labels PARAMS ((rtx));
64 static int duplicate_loop_exit_test PARAMS ((rtx));
65 static void delete_computation PARAMS ((rtx));
66 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
67 static int redirect_exp PARAMS ((rtx, rtx, rtx));
68 static void invert_exp_1 PARAMS ((rtx));
69 static int invert_exp PARAMS ((rtx));
70 static int returnjump_p_1 PARAMS ((rtx *, void *));
71 static void delete_prior_computation PARAMS ((rtx, rtx));
72 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
76 instructions. */
77 void
78 rebuild_jump_labels (f)
79 rtx f;
81 register rtx insn;
82 int max_uid = 0;
84 max_uid = init_label_info (f) + 1;
86 mark_all_labels (f);
88 /* Keep track of labels used from static data; we don't track them
89 closely enough to delete them here, so make sure their reference
90 count doesn't drop to zero. */
92 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
93 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
94 LABEL_NUSES (XEXP (insn, 0))++;
96 /* Keep track of labels used for marking handlers for exception
97 regions; they cannot usually be deleted. */
99 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
100 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
101 LABEL_NUSES (XEXP (insn, 0))++;
104 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
105 non-fallthru insn. This is not generally true, as multiple barriers
106 may have crept in, or the BARRIER may be separated from the last
107 real insn by one or more NOTEs.
109 This simple pass moves barriers and removes duplicates so that the
110 old code is happy.
112 void
113 cleanup_barriers ()
115 rtx insn, next, prev;
116 for (insn = get_insns (); insn; insn = next)
118 next = NEXT_INSN (insn);
119 if (GET_CODE (insn) == BARRIER)
121 prev = prev_nonnote_insn (insn);
122 if (GET_CODE (prev) == BARRIER)
123 delete_barrier (insn);
124 else if (prev != PREV_INSN (insn))
125 reorder_insns (insn, insn, prev);
130 void
131 copy_loop_headers (f)
132 rtx f;
134 register rtx insn, next;
135 /* Now iterate optimizing jumps until nothing changes over one pass. */
136 for (insn = f; insn; insn = next)
138 rtx temp, temp1;
140 next = NEXT_INSN (insn);
142 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
143 jump. Try to optimize by duplicating the loop exit test if so.
144 This is only safe immediately after regscan, because it uses
145 the values of regno_first_uid and regno_last_uid. */
146 if (GET_CODE (insn) == NOTE
147 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
148 && (temp1 = next_nonnote_insn (insn)) != 0
149 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
151 temp = PREV_INSN (insn);
152 if (duplicate_loop_exit_test (insn))
154 next = NEXT_INSN (temp);
160 void
161 purge_line_number_notes (f)
162 rtx f;
164 rtx last_note = 0;
165 rtx insn;
166 /* Delete extraneous line number notes.
167 Note that two consecutive notes for different lines are not really
168 extraneous. There should be some indication where that line belonged,
169 even if it became empty. */
171 for (insn = f; insn; insn = NEXT_INSN (insn))
172 if (GET_CODE (insn) == NOTE)
174 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
175 /* Any previous line note was for the prologue; gdb wants a new
176 note after the prologue even if it is for the same line. */
177 last_note = NULL_RTX;
178 else if (NOTE_LINE_NUMBER (insn) >= 0)
180 /* Delete this note if it is identical to previous note. */
181 if (last_note
182 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
183 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
185 delete_insn (insn);
186 continue;
189 last_note = insn;
194 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
195 notes whose labels don't occur in the insn any more. Returns the
196 largest INSN_UID found. */
197 static int
198 init_label_info (f)
199 rtx f;
201 int largest_uid = 0;
202 rtx insn;
204 for (insn = f; insn; insn = NEXT_INSN (insn))
206 if (GET_CODE (insn) == CODE_LABEL)
207 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
208 else if (GET_CODE (insn) == JUMP_INSN)
209 JUMP_LABEL (insn) = 0;
210 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
212 rtx note, next;
214 for (note = REG_NOTES (insn); note; note = next)
216 next = XEXP (note, 1);
217 if (REG_NOTE_KIND (note) == REG_LABEL
218 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
219 remove_note (insn, note);
222 if (INSN_UID (insn) > largest_uid)
223 largest_uid = INSN_UID (insn);
226 return largest_uid;
229 /* Mark the label each jump jumps to.
230 Combine consecutive labels, and count uses of labels. */
232 static void
233 mark_all_labels (f)
234 rtx f;
236 rtx insn;
238 for (insn = f; insn; insn = NEXT_INSN (insn))
239 if (INSN_P (insn))
241 if (GET_CODE (insn) == CALL_INSN
242 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
244 mark_all_labels (XEXP (PATTERN (insn), 0));
245 mark_all_labels (XEXP (PATTERN (insn), 1));
246 mark_all_labels (XEXP (PATTERN (insn), 2));
248 /* Canonicalize the tail recursion label attached to the
249 CALL_PLACEHOLDER insn. */
250 if (XEXP (PATTERN (insn), 3))
252 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
253 XEXP (PATTERN (insn), 3));
254 mark_jump_label (label_ref, insn, 0);
255 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
258 continue;
261 mark_jump_label (PATTERN (insn), insn, 0);
262 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
264 /* When we know the LABEL_REF contained in a REG used in
265 an indirect jump, we'll have a REG_LABEL note so that
266 flow can tell where it's going. */
267 if (JUMP_LABEL (insn) == 0)
269 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
270 if (label_note)
272 /* But a LABEL_REF around the REG_LABEL note, so
273 that we can canonicalize it. */
274 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
275 XEXP (label_note, 0));
277 mark_jump_label (label_ref, insn, 0);
278 XEXP (label_note, 0) = XEXP (label_ref, 0);
279 JUMP_LABEL (insn) = XEXP (label_note, 0);
286 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
287 jump. Assume that this unconditional jump is to the exit test code. If
288 the code is sufficiently simple, make a copy of it before INSN,
289 followed by a jump to the exit of the loop. Then delete the unconditional
290 jump after INSN.
292 Return 1 if we made the change, else 0.
294 This is only safe immediately after a regscan pass because it uses the
295 values of regno_first_uid and regno_last_uid. */
297 static int
298 duplicate_loop_exit_test (loop_start)
299 rtx loop_start;
301 rtx insn, set, reg, p, link;
302 rtx copy = 0, first_copy = 0;
303 int num_insns = 0;
304 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
305 rtx lastexit;
306 int max_reg = max_reg_num ();
307 rtx *reg_map = 0;
308 rtx loop_pre_header_label;
310 /* Scan the exit code. We do not perform this optimization if any insn:
312 is a CALL_INSN
313 is a CODE_LABEL
314 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
315 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
316 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
317 is not valid.
319 We also do not do this if we find an insn with ASM_OPERANDS. While
320 this restriction should not be necessary, copying an insn with
321 ASM_OPERANDS can confuse asm_noperands in some cases.
323 Also, don't do this if the exit code is more than 20 insns. */
325 for (insn = exitcode;
326 insn
327 && ! (GET_CODE (insn) == NOTE
328 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
329 insn = NEXT_INSN (insn))
331 switch (GET_CODE (insn))
333 case CODE_LABEL:
334 case CALL_INSN:
335 return 0;
336 case NOTE:
337 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
338 a jump immediately after the loop start that branches outside
339 the loop but within an outer loop, near the exit test.
340 If we copied this exit test and created a phony
341 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
342 before the exit test look like these could be safely moved
343 out of the loop even if they actually may be never executed.
344 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
346 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
347 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
348 return 0;
350 if (optimize < 2
351 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
352 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
353 /* If we were to duplicate this code, we would not move
354 the BLOCK notes, and so debugging the moved code would
355 be difficult. Thus, we only move the code with -O2 or
356 higher. */
357 return 0;
359 break;
360 case JUMP_INSN:
361 case INSN:
362 /* The code below would grossly mishandle REG_WAS_0 notes,
363 so get rid of them here. */
364 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
365 remove_note (insn, p);
366 if (++num_insns > 20
367 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
368 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
369 return 0;
370 break;
371 default:
372 break;
376 /* Unless INSN is zero, we can do the optimization. */
377 if (insn == 0)
378 return 0;
380 lastexit = insn;
382 /* See if any insn sets a register only used in the loop exit code and
383 not a user variable. If so, replace it with a new register. */
384 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
385 if (GET_CODE (insn) == INSN
386 && (set = single_set (insn)) != 0
387 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
388 || (GET_CODE (reg) == SUBREG
389 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
390 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
391 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
393 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
394 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
395 break;
397 if (p != lastexit)
399 /* We can do the replacement. Allocate reg_map if this is the
400 first replacement we found. */
401 if (reg_map == 0)
402 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
404 REG_LOOP_TEST_P (reg) = 1;
406 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
409 loop_pre_header_label = gen_label_rtx ();
411 /* Now copy each insn. */
412 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
414 switch (GET_CODE (insn))
416 case BARRIER:
417 copy = emit_barrier_before (loop_start);
418 break;
419 case NOTE:
420 /* Only copy line-number notes. */
421 if (NOTE_LINE_NUMBER (insn) >= 0)
423 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
424 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
426 break;
428 case INSN:
429 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
430 if (reg_map)
431 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
433 mark_jump_label (PATTERN (copy), copy, 0);
435 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
436 make them. */
437 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
438 if (REG_NOTE_KIND (link) != REG_LABEL)
440 if (GET_CODE (link) == EXPR_LIST)
441 REG_NOTES (copy)
442 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
443 XEXP (link, 0),
444 REG_NOTES (copy)));
445 else
446 REG_NOTES (copy)
447 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
448 XEXP (link, 0),
449 REG_NOTES (copy)));
452 if (reg_map && REG_NOTES (copy))
453 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
454 break;
456 case JUMP_INSN:
457 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
458 loop_start);
459 if (reg_map)
460 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
461 mark_jump_label (PATTERN (copy), copy, 0);
462 if (REG_NOTES (insn))
464 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
465 if (reg_map)
466 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
469 /* Predict conditional jump that do make loop looping as taken.
470 Other jumps are probably exit conditions, so predict
471 them as untaken. */
472 if (any_condjump_p (copy))
474 rtx label = JUMP_LABEL (copy);
475 if (label)
477 /* The jump_insn after loop_start should be followed
478 by barrier and loopback label. */
479 if (prev_nonnote_insn (label)
480 && (prev_nonnote_insn (prev_nonnote_insn (label))
481 == next_nonnote_insn (loop_start)))
483 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
484 /* To keep pre-header, we need to redirect all loop
485 entrances before the LOOP_BEG note. */
486 redirect_jump (copy, loop_pre_header_label, 0);
488 else
489 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
492 break;
494 default:
495 abort ();
498 /* Record the first insn we copied. We need it so that we can
499 scan the copied insns for new pseudo registers. */
500 if (! first_copy)
501 first_copy = copy;
504 /* Now clean up by emitting a jump to the end label and deleting the jump
505 at the start of the loop. */
506 if (! copy || GET_CODE (copy) != BARRIER)
508 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
509 loop_start);
511 /* Record the first insn we copied. We need it so that we can
512 scan the copied insns for new pseudo registers. This may not
513 be strictly necessary since we should have copied at least one
514 insn above. But I am going to be safe. */
515 if (! first_copy)
516 first_copy = copy;
518 mark_jump_label (PATTERN (copy), copy, 0);
519 emit_barrier_before (loop_start);
522 emit_label_before (loop_pre_header_label, loop_start);
524 /* Now scan from the first insn we copied to the last insn we copied
525 (copy) for new pseudo registers. Do this after the code to jump to
526 the end label since that might create a new pseudo too. */
527 reg_scan_update (first_copy, copy, max_reg);
529 /* Mark the exit code as the virtual top of the converted loop. */
530 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
532 delete_insn (next_nonnote_insn (loop_start));
534 /* Clean up. */
535 if (reg_map)
536 free (reg_map);
538 return 1;
541 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
542 notes between START and END out before START. START and END may be such
543 notes. Returns the values of the new starting and ending insns, which
544 may be different if the original ones were such notes. */
546 void
547 squeeze_notes (startp, endp)
548 rtx* startp;
549 rtx* endp;
551 rtx start = *startp;
552 rtx end = *endp;
554 rtx insn;
555 rtx next;
556 rtx last = NULL;
557 rtx past_end = NEXT_INSN (end);
559 for (insn = start; insn != past_end; insn = next)
561 next = NEXT_INSN (insn);
562 if (GET_CODE (insn) == NOTE
563 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
564 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
565 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
566 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
567 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
568 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
570 if (insn == start)
571 start = next;
572 else
574 rtx prev = PREV_INSN (insn);
575 PREV_INSN (insn) = PREV_INSN (start);
576 NEXT_INSN (insn) = start;
577 NEXT_INSN (PREV_INSN (insn)) = insn;
578 PREV_INSN (NEXT_INSN (insn)) = insn;
579 NEXT_INSN (prev) = next;
580 PREV_INSN (next) = prev;
583 else
584 last = insn;
587 /* There were no real instructions, and we can't represent an empty
588 range. Die. */
589 if (start == past_end)
590 abort ();
592 end = last;
594 *startp = start;
595 *endp = end;
598 /* Return the label before INSN, or put a new label there. */
601 get_label_before (insn)
602 rtx insn;
604 rtx label;
606 /* Find an existing label at this point
607 or make a new one if there is none. */
608 label = prev_nonnote_insn (insn);
610 if (label == 0 || GET_CODE (label) != CODE_LABEL)
612 rtx prev = PREV_INSN (insn);
614 label = gen_label_rtx ();
615 emit_label_after (label, prev);
616 LABEL_NUSES (label) = 0;
618 return label;
621 /* Return the label after INSN, or put a new label there. */
624 get_label_after (insn)
625 rtx insn;
627 rtx label;
629 /* Find an existing label at this point
630 or make a new one if there is none. */
631 label = next_nonnote_insn (insn);
633 if (label == 0 || GET_CODE (label) != CODE_LABEL)
635 label = gen_label_rtx ();
636 emit_label_after (label, insn);
637 LABEL_NUSES (label) = 0;
639 return label;
642 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
643 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
644 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
645 know whether it's source is floating point or integer comparison. Machine
646 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
647 to help this function avoid overhead in these cases. */
648 enum rtx_code
649 reversed_comparison_code_parts (code, arg0, arg1, insn)
650 rtx insn, arg0, arg1;
651 enum rtx_code code;
653 enum machine_mode mode;
655 /* If this is not actually a comparison, we can't reverse it. */
656 if (GET_RTX_CLASS (code) != '<')
657 return UNKNOWN;
659 mode = GET_MODE (arg0);
660 if (mode == VOIDmode)
661 mode = GET_MODE (arg1);
663 /* First see if machine description supply us way to reverse the comparison.
664 Give it priority over everything else to allow machine description to do
665 tricks. */
666 #ifdef REVERSIBLE_CC_MODE
667 if (GET_MODE_CLASS (mode) == MODE_CC
668 && REVERSIBLE_CC_MODE (mode))
670 #ifdef REVERSE_CONDITION
671 return REVERSE_CONDITION (code, mode);
672 #endif
673 return reverse_condition (code);
675 #endif
677 /* Try a few special cases based on the comparison code. */
678 switch (code)
680 case GEU:
681 case GTU:
682 case LEU:
683 case LTU:
684 case NE:
685 case EQ:
686 /* It is always safe to reverse EQ and NE, even for the floating
687 point. Similary the unsigned comparisons are never used for
688 floating point so we can reverse them in the default way. */
689 return reverse_condition (code);
690 case ORDERED:
691 case UNORDERED:
692 case LTGT:
693 case UNEQ:
694 /* In case we already see unordered comparison, we can be sure to
695 be dealing with floating point so we don't need any more tests. */
696 return reverse_condition_maybe_unordered (code);
697 case UNLT:
698 case UNLE:
699 case UNGT:
700 case UNGE:
701 /* We don't have safe way to reverse these yet. */
702 return UNKNOWN;
703 default:
704 break;
707 /* In case we give up IEEE compatibility, all comparisons are reversible. */
708 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
709 || flag_unsafe_math_optimizations)
710 return reverse_condition (code);
712 if (GET_MODE_CLASS (mode) == MODE_CC
713 #ifdef HAVE_cc0
714 || arg0 == cc0_rtx
715 #endif
718 rtx prev;
719 /* Try to search for the comparison to determine the real mode.
720 This code is expensive, but with sane machine description it
721 will be never used, since REVERSIBLE_CC_MODE will return true
722 in all cases. */
723 if (! insn)
724 return UNKNOWN;
726 for (prev = prev_nonnote_insn (insn);
727 prev != 0 && GET_CODE (prev) != CODE_LABEL;
728 prev = prev_nonnote_insn (prev))
730 rtx set = set_of (arg0, prev);
731 if (set && GET_CODE (set) == SET
732 && rtx_equal_p (SET_DEST (set), arg0))
734 rtx src = SET_SRC (set);
736 if (GET_CODE (src) == COMPARE)
738 rtx comparison = src;
739 arg0 = XEXP (src, 0);
740 mode = GET_MODE (arg0);
741 if (mode == VOIDmode)
742 mode = GET_MODE (XEXP (comparison, 1));
743 break;
745 /* We can get past reg-reg moves. This may be usefull for model
746 of i387 comparisons that first move flag registers around. */
747 if (REG_P (src))
749 arg0 = src;
750 continue;
753 /* If register is clobbered in some ununderstandable way,
754 give up. */
755 if (set)
756 return UNKNOWN;
760 /* An integer condition. */
761 if (GET_CODE (arg0) == CONST_INT
762 || (GET_MODE (arg0) != VOIDmode
763 && GET_MODE_CLASS (mode) != MODE_CC
764 && ! FLOAT_MODE_P (mode)))
765 return reverse_condition (code);
767 return UNKNOWN;
770 /* An wrapper around the previous function to take COMPARISON as rtx
771 expression. This simplifies many callers. */
772 enum rtx_code
773 reversed_comparison_code (comparison, insn)
774 rtx comparison, insn;
776 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
777 return UNKNOWN;
778 return reversed_comparison_code_parts (GET_CODE (comparison),
779 XEXP (comparison, 0),
780 XEXP (comparison, 1), insn);
783 /* Given an rtx-code for a comparison, return the code for the negated
784 comparison. If no such code exists, return UNKNOWN.
786 WATCH OUT! reverse_condition is not safe to use on a jump that might
787 be acting on the results of an IEEE floating point comparison, because
788 of the special treatment of non-signaling nans in comparisons.
789 Use reversed_comparison_code instead. */
791 enum rtx_code
792 reverse_condition (code)
793 enum rtx_code code;
795 switch (code)
797 case EQ:
798 return NE;
799 case NE:
800 return EQ;
801 case GT:
802 return LE;
803 case GE:
804 return LT;
805 case LT:
806 return GE;
807 case LE:
808 return GT;
809 case GTU:
810 return LEU;
811 case GEU:
812 return LTU;
813 case LTU:
814 return GEU;
815 case LEU:
816 return GTU;
817 case UNORDERED:
818 return ORDERED;
819 case ORDERED:
820 return UNORDERED;
822 case UNLT:
823 case UNLE:
824 case UNGT:
825 case UNGE:
826 case UNEQ:
827 case LTGT:
828 return UNKNOWN;
830 default:
831 abort ();
835 /* Similar, but we're allowed to generate unordered comparisons, which
836 makes it safe for IEEE floating-point. Of course, we have to recognize
837 that the target will support them too... */
839 enum rtx_code
840 reverse_condition_maybe_unordered (code)
841 enum rtx_code code;
843 /* Non-IEEE formats don't have unordered conditions. */
844 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
845 return reverse_condition (code);
847 switch (code)
849 case EQ:
850 return NE;
851 case NE:
852 return EQ;
853 case GT:
854 return UNLE;
855 case GE:
856 return UNLT;
857 case LT:
858 return UNGE;
859 case LE:
860 return UNGT;
861 case LTGT:
862 return UNEQ;
863 case UNORDERED:
864 return ORDERED;
865 case ORDERED:
866 return UNORDERED;
867 case UNLT:
868 return GE;
869 case UNLE:
870 return GT;
871 case UNGT:
872 return LE;
873 case UNGE:
874 return LT;
875 case UNEQ:
876 return LTGT;
878 default:
879 abort ();
883 /* Similar, but return the code when two operands of a comparison are swapped.
884 This IS safe for IEEE floating-point. */
886 enum rtx_code
887 swap_condition (code)
888 enum rtx_code code;
890 switch (code)
892 case EQ:
893 case NE:
894 case UNORDERED:
895 case ORDERED:
896 case UNEQ:
897 case LTGT:
898 return code;
900 case GT:
901 return LT;
902 case GE:
903 return LE;
904 case LT:
905 return GT;
906 case LE:
907 return GE;
908 case GTU:
909 return LTU;
910 case GEU:
911 return LEU;
912 case LTU:
913 return GTU;
914 case LEU:
915 return GEU;
916 case UNLT:
917 return UNGT;
918 case UNLE:
919 return UNGE;
920 case UNGT:
921 return UNLT;
922 case UNGE:
923 return UNLE;
925 default:
926 abort ();
930 /* Given a comparison CODE, return the corresponding unsigned comparison.
931 If CODE is an equality comparison or already an unsigned comparison,
932 CODE is returned. */
934 enum rtx_code
935 unsigned_condition (code)
936 enum rtx_code code;
938 switch (code)
940 case EQ:
941 case NE:
942 case GTU:
943 case GEU:
944 case LTU:
945 case LEU:
946 return code;
948 case GT:
949 return GTU;
950 case GE:
951 return GEU;
952 case LT:
953 return LTU;
954 case LE:
955 return LEU;
957 default:
958 abort ();
962 /* Similarly, return the signed version of a comparison. */
964 enum rtx_code
965 signed_condition (code)
966 enum rtx_code code;
968 switch (code)
970 case EQ:
971 case NE:
972 case GT:
973 case GE:
974 case LT:
975 case LE:
976 return code;
978 case GTU:
979 return GT;
980 case GEU:
981 return GE;
982 case LTU:
983 return LT;
984 case LEU:
985 return LE;
987 default:
988 abort ();
992 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
993 truth of CODE1 implies the truth of CODE2. */
996 comparison_dominates_p (code1, code2)
997 enum rtx_code code1, code2;
999 /* UNKNOWN comparison codes can happen as a result of trying to revert
1000 comparison codes.
1001 They can't match anything, so we have to reject them here. */
1002 if (code1 == UNKNOWN || code2 == UNKNOWN)
1003 return 0;
1005 if (code1 == code2)
1006 return 1;
1008 switch (code1)
1010 case UNEQ:
1011 if (code2 == UNLE || code2 == UNGE)
1012 return 1;
1013 break;
1015 case EQ:
1016 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1017 || code2 == ORDERED)
1018 return 1;
1019 break;
1021 case UNLT:
1022 if (code2 == UNLE || code2 == NE)
1023 return 1;
1024 break;
1026 case LT:
1027 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1028 return 1;
1029 break;
1031 case UNGT:
1032 if (code2 == UNGE || code2 == NE)
1033 return 1;
1034 break;
1036 case GT:
1037 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1038 return 1;
1039 break;
1041 case GE:
1042 case LE:
1043 if (code2 == ORDERED)
1044 return 1;
1045 break;
1047 case LTGT:
1048 if (code2 == NE || code2 == ORDERED)
1049 return 1;
1050 break;
1052 case LTU:
1053 if (code2 == LEU || code2 == NE)
1054 return 1;
1055 break;
1057 case GTU:
1058 if (code2 == GEU || code2 == NE)
1059 return 1;
1060 break;
1062 case UNORDERED:
1063 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1064 || code2 == UNGE || code2 == UNGT)
1065 return 1;
1066 break;
1068 default:
1069 break;
1072 return 0;
1075 /* Return 1 if INSN is an unconditional jump and nothing else. */
1078 simplejump_p (insn)
1079 rtx insn;
1081 return (GET_CODE (insn) == JUMP_INSN
1082 && GET_CODE (PATTERN (insn)) == SET
1083 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1084 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1087 /* Return nonzero if INSN is a (possibly) conditional jump
1088 and nothing more.
1090 Use this function is deprecated, since we need to support combined
1091 branch and compare insns. Use any_condjump_p instead whenever possible. */
1094 condjump_p (insn)
1095 rtx insn;
1097 register rtx x = PATTERN (insn);
1099 if (GET_CODE (x) != SET
1100 || GET_CODE (SET_DEST (x)) != PC)
1101 return 0;
1103 x = SET_SRC (x);
1104 if (GET_CODE (x) == LABEL_REF)
1105 return 1;
1106 else
1107 return (GET_CODE (x) == IF_THEN_ELSE
1108 && ((GET_CODE (XEXP (x, 2)) == PC
1109 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1110 || GET_CODE (XEXP (x, 1)) == RETURN))
1111 || (GET_CODE (XEXP (x, 1)) == PC
1112 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1113 || GET_CODE (XEXP (x, 2)) == RETURN))));
1115 return 0;
1118 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1119 PARALLEL.
1121 Use this function is deprecated, since we need to support combined
1122 branch and compare insns. Use any_condjump_p instead whenever possible. */
1125 condjump_in_parallel_p (insn)
1126 rtx insn;
1128 register rtx x = PATTERN (insn);
1130 if (GET_CODE (x) != PARALLEL)
1131 return 0;
1132 else
1133 x = XVECEXP (x, 0, 0);
1135 if (GET_CODE (x) != SET)
1136 return 0;
1137 if (GET_CODE (SET_DEST (x)) != PC)
1138 return 0;
1139 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1140 return 1;
1141 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1142 return 0;
1143 if (XEXP (SET_SRC (x), 2) == pc_rtx
1144 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1145 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1146 return 1;
1147 if (XEXP (SET_SRC (x), 1) == pc_rtx
1148 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1149 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1150 return 1;
1151 return 0;
1154 /* Return set of PC, otherwise NULL. */
1157 pc_set (insn)
1158 rtx insn;
1160 rtx pat;
1161 if (GET_CODE (insn) != JUMP_INSN)
1162 return NULL_RTX;
1163 pat = PATTERN (insn);
1165 /* The set is allowed to appear either as the insn pattern or
1166 the first set in a PARALLEL. */
1167 if (GET_CODE (pat) == PARALLEL)
1168 pat = XVECEXP (pat, 0, 0);
1169 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1170 return pat;
1172 return NULL_RTX;
1175 /* Return true when insn is an unconditional direct jump,
1176 possibly bundled inside a PARALLEL. */
1179 any_uncondjump_p (insn)
1180 rtx insn;
1182 rtx x = pc_set (insn);
1183 if (!x)
1184 return 0;
1185 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1186 return 0;
1187 return 1;
1190 /* Return true when insn is a conditional jump. This function works for
1191 instructions containing PC sets in PARALLELs. The instruction may have
1192 various other effects so before removing the jump you must verify
1193 onlyjump_p.
1195 Note that unlike condjump_p it returns false for unconditional jumps. */
1198 any_condjump_p (insn)
1199 rtx insn;
1201 rtx x = pc_set (insn);
1202 enum rtx_code a, b;
1204 if (!x)
1205 return 0;
1206 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1207 return 0;
1209 a = GET_CODE (XEXP (SET_SRC (x), 1));
1210 b = GET_CODE (XEXP (SET_SRC (x), 2));
1212 return ((b == PC && (a == LABEL_REF || a == RETURN))
1213 || (a == PC && (b == LABEL_REF || b == RETURN)));
1216 /* Return the label of a conditional jump. */
1219 condjump_label (insn)
1220 rtx insn;
1222 rtx x = pc_set (insn);
1224 if (!x)
1225 return NULL_RTX;
1226 x = SET_SRC (x);
1227 if (GET_CODE (x) == LABEL_REF)
1228 return x;
1229 if (GET_CODE (x) != IF_THEN_ELSE)
1230 return NULL_RTX;
1231 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1232 return XEXP (x, 1);
1233 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1234 return XEXP (x, 2);
1235 return NULL_RTX;
1238 /* Return true if INSN is a (possibly conditional) return insn. */
1240 static int
1241 returnjump_p_1 (loc, data)
1242 rtx *loc;
1243 void *data ATTRIBUTE_UNUSED;
1245 rtx x = *loc;
1246 return x && GET_CODE (x) == RETURN;
1250 returnjump_p (insn)
1251 rtx insn;
1253 if (GET_CODE (insn) != JUMP_INSN)
1254 return 0;
1255 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1258 /* Return true if INSN is a jump that only transfers control and
1259 nothing more. */
1262 onlyjump_p (insn)
1263 rtx insn;
1265 rtx set;
1267 if (GET_CODE (insn) != JUMP_INSN)
1268 return 0;
1270 set = single_set (insn);
1271 if (set == NULL)
1272 return 0;
1273 if (GET_CODE (SET_DEST (set)) != PC)
1274 return 0;
1275 if (side_effects_p (SET_SRC (set)))
1276 return 0;
1278 return 1;
1281 #ifdef HAVE_cc0
1283 /* Return non-zero if X is an RTX that only sets the condition codes
1284 and has no side effects. */
1287 only_sets_cc0_p (x)
1288 rtx x;
1291 if (! x)
1292 return 0;
1294 if (INSN_P (x))
1295 x = PATTERN (x);
1297 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1300 /* Return 1 if X is an RTX that does nothing but set the condition codes
1301 and CLOBBER or USE registers.
1302 Return -1 if X does explicitly set the condition codes,
1303 but also does other things. */
1306 sets_cc0_p (x)
1307 rtx x;
1310 if (! x)
1311 return 0;
1313 if (INSN_P (x))
1314 x = PATTERN (x);
1316 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1317 return 1;
1318 if (GET_CODE (x) == PARALLEL)
1320 int i;
1321 int sets_cc0 = 0;
1322 int other_things = 0;
1323 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1325 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1326 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1327 sets_cc0 = 1;
1328 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1329 other_things = 1;
1331 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1333 return 0;
1335 #endif
1337 /* Follow any unconditional jump at LABEL;
1338 return the ultimate label reached by any such chain of jumps.
1339 If LABEL is not followed by a jump, return LABEL.
1340 If the chain loops or we can't find end, return LABEL,
1341 since that tells caller to avoid changing the insn.
1343 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1344 a USE or CLOBBER. */
1347 follow_jumps (label)
1348 rtx label;
1350 register rtx insn;
1351 register rtx next;
1352 register rtx value = label;
1353 register int depth;
1355 for (depth = 0;
1356 (depth < 10
1357 && (insn = next_active_insn (value)) != 0
1358 && GET_CODE (insn) == JUMP_INSN
1359 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1360 && onlyjump_p (insn))
1361 || GET_CODE (PATTERN (insn)) == RETURN)
1362 && (next = NEXT_INSN (insn))
1363 && GET_CODE (next) == BARRIER);
1364 depth++)
1366 /* Don't chain through the insn that jumps into a loop
1367 from outside the loop,
1368 since that would create multiple loop entry jumps
1369 and prevent loop optimization. */
1370 rtx tem;
1371 if (!reload_completed)
1372 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1373 if (GET_CODE (tem) == NOTE
1374 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1375 /* ??? Optional. Disables some optimizations, but makes
1376 gcov output more accurate with -O. */
1377 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1378 return value;
1380 /* If we have found a cycle, make the insn jump to itself. */
1381 if (JUMP_LABEL (insn) == label)
1382 return label;
1384 tem = next_active_insn (JUMP_LABEL (insn));
1385 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1386 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1387 break;
1389 value = JUMP_LABEL (insn);
1391 if (depth == 10)
1392 return label;
1393 return value;
1397 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1398 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1399 in INSN, then store one of them in JUMP_LABEL (INSN).
1400 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1401 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1402 Also, when there are consecutive labels, canonicalize on the last of them.
1404 Note that two labels separated by a loop-beginning note
1405 must be kept distinct if we have not yet done loop-optimization,
1406 because the gap between them is where loop-optimize
1407 will want to move invariant code to. CROSS_JUMP tells us
1408 that loop-optimization is done with. */
1410 void
1411 mark_jump_label (x, insn, in_mem)
1412 register rtx x;
1413 rtx insn;
1414 int in_mem;
1416 register RTX_CODE code = GET_CODE (x);
1417 register int i;
1418 register const char *fmt;
1420 switch (code)
1422 case PC:
1423 case CC0:
1424 case REG:
1425 case SUBREG:
1426 case CONST_INT:
1427 case CONST_DOUBLE:
1428 case CLOBBER:
1429 case CALL:
1430 return;
1432 case MEM:
1433 in_mem = 1;
1434 break;
1436 case SYMBOL_REF:
1437 if (!in_mem)
1438 return;
1440 /* If this is a constant-pool reference, see if it is a label. */
1441 if (CONSTANT_POOL_ADDRESS_P (x))
1442 mark_jump_label (get_pool_constant (x), insn, in_mem);
1443 break;
1445 case LABEL_REF:
1447 rtx label = XEXP (x, 0);
1449 /* Ignore remaining references to unreachable labels that
1450 have been deleted. */
1451 if (GET_CODE (label) == NOTE
1452 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1453 break;
1455 if (GET_CODE (label) != CODE_LABEL)
1456 abort ();
1458 /* Ignore references to labels of containing functions. */
1459 if (LABEL_REF_NONLOCAL_P (x))
1460 break;
1462 XEXP (x, 0) = label;
1463 if (! insn || ! INSN_DELETED_P (insn))
1464 ++LABEL_NUSES (label);
1466 if (insn)
1468 if (GET_CODE (insn) == JUMP_INSN)
1469 JUMP_LABEL (insn) = label;
1470 else
1472 /* Add a REG_LABEL note for LABEL unless there already
1473 is one. All uses of a label, except for labels
1474 that are the targets of jumps, must have a
1475 REG_LABEL note. */
1476 if (! find_reg_note (insn, REG_LABEL, label))
1477 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1478 REG_NOTES (insn));
1481 return;
1484 /* Do walk the labels in a vector, but not the first operand of an
1485 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1486 case ADDR_VEC:
1487 case ADDR_DIFF_VEC:
1488 if (! INSN_DELETED_P (insn))
1490 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1492 for (i = 0; i < XVECLEN (x, eltnum); i++)
1493 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1495 return;
1497 default:
1498 break;
1501 fmt = GET_RTX_FORMAT (code);
1502 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1504 if (fmt[i] == 'e')
1505 mark_jump_label (XEXP (x, i), insn, in_mem);
1506 else if (fmt[i] == 'E')
1508 register int j;
1509 for (j = 0; j < XVECLEN (x, i); j++)
1510 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1515 /* If all INSN does is set the pc, delete it,
1516 and delete the insn that set the condition codes for it
1517 if that's what the previous thing was. */
1519 void
1520 delete_jump (insn)
1521 rtx insn;
1523 register rtx set = single_set (insn);
1525 if (set && GET_CODE (SET_DEST (set)) == PC)
1526 delete_computation (insn);
1529 /* Verify INSN is a BARRIER and delete it. */
1531 void
1532 delete_barrier (insn)
1533 rtx insn;
1535 if (GET_CODE (insn) != BARRIER)
1536 abort ();
1538 delete_insn (insn);
1541 /* Recursively delete prior insns that compute the value (used only by INSN
1542 which the caller is deleting) stored in the register mentioned by NOTE
1543 which is a REG_DEAD note associated with INSN. */
1545 static void
1546 delete_prior_computation (note, insn)
1547 rtx note;
1548 rtx insn;
1550 rtx our_prev;
1551 rtx reg = XEXP (note, 0);
1553 for (our_prev = prev_nonnote_insn (insn);
1554 our_prev && (GET_CODE (our_prev) == INSN
1555 || GET_CODE (our_prev) == CALL_INSN);
1556 our_prev = prev_nonnote_insn (our_prev))
1558 rtx pat = PATTERN (our_prev);
1560 /* If we reach a CALL which is not calling a const function
1561 or the callee pops the arguments, then give up. */
1562 if (GET_CODE (our_prev) == CALL_INSN
1563 && (! CONST_OR_PURE_CALL_P (our_prev)
1564 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1565 break;
1567 /* If we reach a SEQUENCE, it is too complex to try to
1568 do anything with it, so give up. */
1569 if (GET_CODE (pat) == SEQUENCE)
1570 break;
1572 if (GET_CODE (pat) == USE
1573 && GET_CODE (XEXP (pat, 0)) == INSN)
1574 /* reorg creates USEs that look like this. We leave them
1575 alone because reorg needs them for its own purposes. */
1576 break;
1578 if (reg_set_p (reg, pat))
1580 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1581 break;
1583 if (GET_CODE (pat) == PARALLEL)
1585 /* If we find a SET of something else, we can't
1586 delete the insn. */
1588 int i;
1590 for (i = 0; i < XVECLEN (pat, 0); i++)
1592 rtx part = XVECEXP (pat, 0, i);
1594 if (GET_CODE (part) == SET
1595 && SET_DEST (part) != reg)
1596 break;
1599 if (i == XVECLEN (pat, 0))
1600 delete_computation (our_prev);
1602 else if (GET_CODE (pat) == SET
1603 && GET_CODE (SET_DEST (pat)) == REG)
1605 int dest_regno = REGNO (SET_DEST (pat));
1606 int dest_endregno
1607 = (dest_regno
1608 + (dest_regno < FIRST_PSEUDO_REGISTER
1609 ? HARD_REGNO_NREGS (dest_regno,
1610 GET_MODE (SET_DEST (pat))) : 1));
1611 int regno = REGNO (reg);
1612 int endregno
1613 = (regno
1614 + (regno < FIRST_PSEUDO_REGISTER
1615 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1617 if (dest_regno >= regno
1618 && dest_endregno <= endregno)
1619 delete_computation (our_prev);
1621 /* We may have a multi-word hard register and some, but not
1622 all, of the words of the register are needed in subsequent
1623 insns. Write REG_UNUSED notes for those parts that were not
1624 needed. */
1625 else if (dest_regno <= regno
1626 && dest_endregno >= endregno)
1628 int i;
1630 REG_NOTES (our_prev)
1631 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1632 REG_NOTES (our_prev));
1634 for (i = dest_regno; i < dest_endregno; i++)
1635 if (! find_regno_note (our_prev, REG_UNUSED, i))
1636 break;
1638 if (i == dest_endregno)
1639 delete_computation (our_prev);
1643 break;
1646 /* If PAT references the register that dies here, it is an
1647 additional use. Hence any prior SET isn't dead. However, this
1648 insn becomes the new place for the REG_DEAD note. */
1649 if (reg_overlap_mentioned_p (reg, pat))
1651 XEXP (note, 1) = REG_NOTES (our_prev);
1652 REG_NOTES (our_prev) = note;
1653 break;
1658 /* Delete INSN and recursively delete insns that compute values used only
1659 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1660 If we are running before flow.c, we need do nothing since flow.c will
1661 delete dead code. We also can't know if the registers being used are
1662 dead or not at this point.
1664 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1665 nothing other than set a register that dies in this insn, we can delete
1666 that insn as well.
1668 On machines with CC0, if CC0 is used in this insn, we may be able to
1669 delete the insn that set it. */
1671 static void
1672 delete_computation (insn)
1673 rtx insn;
1675 rtx note, next;
1677 #ifdef HAVE_cc0
1678 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1680 rtx prev = prev_nonnote_insn (insn);
1681 /* We assume that at this stage
1682 CC's are always set explicitly
1683 and always immediately before the jump that
1684 will use them. So if the previous insn
1685 exists to set the CC's, delete it
1686 (unless it performs auto-increments, etc.). */
1687 if (prev && GET_CODE (prev) == INSN
1688 && sets_cc0_p (PATTERN (prev)))
1690 if (sets_cc0_p (PATTERN (prev)) > 0
1691 && ! side_effects_p (PATTERN (prev)))
1692 delete_computation (prev);
1693 else
1694 /* Otherwise, show that cc0 won't be used. */
1695 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1696 cc0_rtx, REG_NOTES (prev));
1699 #endif
1701 for (note = REG_NOTES (insn); note; note = next)
1703 next = XEXP (note, 1);
1705 if (REG_NOTE_KIND (note) != REG_DEAD
1706 /* Verify that the REG_NOTE is legitimate. */
1707 || GET_CODE (XEXP (note, 0)) != REG)
1708 continue;
1710 delete_prior_computation (note, insn);
1713 delete_insn (insn);
1716 /* Delete insn INSN from the chain of insns and update label ref counts.
1717 May delete some following insns as a consequence; may even delete
1718 a label elsewhere and insns that follow it.
1720 Returns the first insn after INSN that was not deleted. */
1723 delete_insn (insn)
1724 register rtx insn;
1726 register rtx next = NEXT_INSN (insn);
1727 register rtx prev = PREV_INSN (insn);
1728 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1729 register int dont_really_delete = 0;
1730 rtx note;
1732 while (next && INSN_DELETED_P (next))
1733 next = NEXT_INSN (next);
1735 /* This insn is already deleted => return first following nondeleted. */
1736 if (INSN_DELETED_P (insn))
1737 return next;
1739 if (was_code_label)
1740 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1742 /* Don't delete user-declared labels. When optimizing, convert them
1743 to special NOTEs instead. When not optimizing, leave them alone. */
1744 if (was_code_label && LABEL_NAME (insn) != 0)
1746 if (optimize)
1748 const char *name = LABEL_NAME (insn);
1749 PUT_CODE (insn, NOTE);
1750 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
1751 NOTE_SOURCE_FILE (insn) = name;
1754 dont_really_delete = 1;
1756 else
1757 /* Mark this insn as deleted. */
1758 INSN_DELETED_P (insn) = 1;
1760 /* If instruction is followed by a barrier,
1761 delete the barrier too. */
1763 if (next != 0 && GET_CODE (next) == BARRIER)
1765 INSN_DELETED_P (next) = 1;
1766 next = NEXT_INSN (next);
1769 /* Patch out INSN (and the barrier if any) */
1771 if (! dont_really_delete)
1773 if (prev)
1775 NEXT_INSN (prev) = next;
1776 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
1777 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
1778 XVECLEN (PATTERN (prev), 0) - 1)) = next;
1781 if (next)
1783 PREV_INSN (next) = prev;
1784 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
1785 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
1788 if (prev && NEXT_INSN (prev) == 0)
1789 set_last_insn (prev);
1792 /* If deleting a jump, decrement the count of the label,
1793 and delete the label if it is now unused. */
1795 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1797 rtx lab = JUMP_LABEL (insn), lab_next;
1799 if (--LABEL_NUSES (lab) == 0)
1801 /* This can delete NEXT or PREV,
1802 either directly if NEXT is JUMP_LABEL (INSN),
1803 or indirectly through more levels of jumps. */
1804 delete_insn (lab);
1806 /* I feel a little doubtful about this loop,
1807 but I see no clean and sure alternative way
1808 to find the first insn after INSN that is not now deleted.
1809 I hope this works. */
1810 while (next && INSN_DELETED_P (next))
1811 next = NEXT_INSN (next);
1812 return next;
1814 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1815 && GET_CODE (lab_next) == JUMP_INSN
1816 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1817 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1819 /* If we're deleting the tablejump, delete the dispatch table.
1820 We may not be able to kill the label immediately preceeding
1821 just yet, as it might be referenced in code leading up to
1822 the tablejump. */
1823 delete_insn (lab_next);
1827 /* Likewise if we're deleting a dispatch table. */
1829 if (GET_CODE (insn) == JUMP_INSN
1830 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1831 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1833 rtx pat = PATTERN (insn);
1834 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1835 int len = XVECLEN (pat, diff_vec_p);
1837 for (i = 0; i < len; i++)
1838 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1839 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1840 while (next && INSN_DELETED_P (next))
1841 next = NEXT_INSN (next);
1842 return next;
1845 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1846 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1847 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1848 if (REG_NOTE_KIND (note) == REG_LABEL
1849 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1850 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1851 if (--LABEL_NUSES (XEXP (note, 0)) == 0)
1852 delete_insn (XEXP (note, 0));
1854 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1855 prev = PREV_INSN (prev);
1857 /* If INSN was a label and a dispatch table follows it,
1858 delete the dispatch table. The tablejump must have gone already.
1859 It isn't useful to fall through into a table. */
1861 if (was_code_label
1862 && NEXT_INSN (insn) != 0
1863 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1864 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1865 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1866 next = delete_insn (NEXT_INSN (insn));
1868 /* If INSN was a label, delete insns following it if now unreachable. */
1870 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1872 register RTX_CODE code;
1873 while (next != 0
1874 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1875 || code == NOTE || code == BARRIER
1876 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1878 if (code == NOTE
1879 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1880 next = NEXT_INSN (next);
1881 /* Keep going past other deleted labels to delete what follows. */
1882 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1883 next = NEXT_INSN (next);
1884 else
1885 /* Note: if this deletes a jump, it can cause more
1886 deletion of unreachable code, after a different label.
1887 As long as the value from this recursive call is correct,
1888 this invocation functions correctly. */
1889 next = delete_insn (next);
1893 return next;
1896 /* Advance from INSN till reaching something not deleted
1897 then return that. May return INSN itself. */
1900 next_nondeleted_insn (insn)
1901 rtx insn;
1903 while (INSN_DELETED_P (insn))
1904 insn = NEXT_INSN (insn);
1905 return insn;
1908 /* Delete a range of insns from FROM to TO, inclusive.
1909 This is for the sake of peephole optimization, so assume
1910 that whatever these insns do will still be done by a new
1911 peephole insn that will replace them. */
1913 void
1914 delete_for_peephole (from, to)
1915 register rtx from, to;
1917 register rtx insn = from;
1919 while (1)
1921 register rtx next = NEXT_INSN (insn);
1922 register rtx prev = PREV_INSN (insn);
1924 if (GET_CODE (insn) != NOTE)
1926 INSN_DELETED_P (insn) = 1;
1928 /* Patch this insn out of the chain. */
1929 /* We don't do this all at once, because we
1930 must preserve all NOTEs. */
1931 if (prev)
1932 NEXT_INSN (prev) = next;
1934 if (next)
1935 PREV_INSN (next) = prev;
1938 if (insn == to)
1939 break;
1940 insn = next;
1943 /* Note that if TO is an unconditional jump
1944 we *do not* delete the BARRIER that follows,
1945 since the peephole that replaces this sequence
1946 is also an unconditional jump in that case. */
1949 /* We have determined that INSN is never reached, and are about to
1950 delete it. Print a warning if the user asked for one.
1952 To try to make this warning more useful, this should only be called
1953 once per basic block not reached, and it only warns when the basic
1954 block contains more than one line from the current function, and
1955 contains at least one operation. CSE and inlining can duplicate insns,
1956 so it's possible to get spurious warnings from this. */
1958 void
1959 never_reached_warning (avoided_insn)
1960 rtx avoided_insn;
1962 rtx insn;
1963 rtx a_line_note = NULL;
1964 int two_avoided_lines = 0;
1965 int contains_insn = 0;
1967 if (! warn_notreached)
1968 return;
1970 /* Scan forwards, looking at LINE_NUMBER notes, until
1971 we hit a LABEL or we run out of insns. */
1973 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1975 if (GET_CODE (insn) == CODE_LABEL)
1976 break;
1977 else if (GET_CODE (insn) == NOTE /* A line number note? */
1978 && NOTE_LINE_NUMBER (insn) >= 0)
1980 if (a_line_note == NULL)
1981 a_line_note = insn;
1982 else
1983 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1984 != NOTE_LINE_NUMBER (insn));
1986 else if (INSN_P (insn))
1987 contains_insn = 1;
1989 if (two_avoided_lines && contains_insn)
1990 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1991 NOTE_LINE_NUMBER (a_line_note),
1992 "will never be executed");
1995 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1996 NLABEL as a return. Accrue modifications into the change group. */
1998 static void
1999 redirect_exp_1 (loc, olabel, nlabel, insn)
2000 rtx *loc;
2001 rtx olabel, nlabel;
2002 rtx insn;
2004 register rtx x = *loc;
2005 register RTX_CODE code = GET_CODE (x);
2006 register int i;
2007 register const char *fmt;
2009 if (code == LABEL_REF)
2011 if (XEXP (x, 0) == olabel)
2013 rtx n;
2014 if (nlabel)
2015 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
2016 else
2017 n = gen_rtx_RETURN (VOIDmode);
2019 validate_change (insn, loc, n, 1);
2020 return;
2023 else if (code == RETURN && olabel == 0)
2025 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
2026 if (loc == &PATTERN (insn))
2027 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
2028 validate_change (insn, loc, x, 1);
2029 return;
2032 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
2033 && GET_CODE (SET_SRC (x)) == LABEL_REF
2034 && XEXP (SET_SRC (x), 0) == olabel)
2036 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
2037 return;
2040 fmt = GET_RTX_FORMAT (code);
2041 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2043 if (fmt[i] == 'e')
2044 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
2045 else if (fmt[i] == 'E')
2047 register int j;
2048 for (j = 0; j < XVECLEN (x, i); j++)
2049 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2054 /* Similar, but apply the change group and report success or failure. */
2056 static int
2057 redirect_exp (olabel, nlabel, insn)
2058 rtx olabel, nlabel;
2059 rtx insn;
2061 rtx *loc;
2063 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2064 loc = &XVECEXP (PATTERN (insn), 0, 0);
2065 else
2066 loc = &PATTERN (insn);
2068 redirect_exp_1 (loc, olabel, nlabel, insn);
2069 if (num_validated_changes () == 0)
2070 return 0;
2072 return apply_change_group ();
2075 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2076 the modifications into the change group. Return false if we did
2077 not see how to do that. */
2080 redirect_jump_1 (jump, nlabel)
2081 rtx jump, nlabel;
2083 int ochanges = num_validated_changes ();
2084 rtx *loc;
2086 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2087 loc = &XVECEXP (PATTERN (jump), 0, 0);
2088 else
2089 loc = &PATTERN (jump);
2091 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2092 return num_validated_changes () > ochanges;
2095 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2096 jump target label is unused as a result, it and the code following
2097 it may be deleted.
2099 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2100 RETURN insn.
2102 The return value will be 1 if the change was made, 0 if it wasn't
2103 (this can only occur for NLABEL == 0). */
2106 redirect_jump (jump, nlabel, delete_unused)
2107 rtx jump, nlabel;
2108 int delete_unused;
2110 register rtx olabel = JUMP_LABEL (jump);
2112 if (nlabel == olabel)
2113 return 1;
2115 if (! redirect_exp (olabel, nlabel, jump))
2116 return 0;
2118 JUMP_LABEL (jump) = nlabel;
2119 if (nlabel)
2120 ++LABEL_NUSES (nlabel);
2122 /* If we're eliding the jump over exception cleanups at the end of a
2123 function, move the function end note so that -Wreturn-type works. */
2124 if (olabel && nlabel
2125 && NEXT_INSN (olabel)
2126 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2127 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2128 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2130 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
2131 delete_insn (olabel);
2133 return 1;
2136 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2137 Accrue the modifications into the change group. */
2139 static void
2140 invert_exp_1 (insn)
2141 rtx insn;
2143 register RTX_CODE code;
2144 rtx x = pc_set (insn);
2146 if (!x)
2147 abort ();
2148 x = SET_SRC (x);
2150 code = GET_CODE (x);
2152 if (code == IF_THEN_ELSE)
2154 register rtx comp = XEXP (x, 0);
2155 register rtx tem;
2156 enum rtx_code reversed_code;
2158 /* We can do this in two ways: The preferable way, which can only
2159 be done if this is not an integer comparison, is to reverse
2160 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2161 of the IF_THEN_ELSE. If we can't do either, fail. */
2163 reversed_code = reversed_comparison_code (comp, insn);
2165 if (reversed_code != UNKNOWN)
2167 validate_change (insn, &XEXP (x, 0),
2168 gen_rtx_fmt_ee (reversed_code,
2169 GET_MODE (comp), XEXP (comp, 0),
2170 XEXP (comp, 1)),
2172 return;
2175 tem = XEXP (x, 1);
2176 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2177 validate_change (insn, &XEXP (x, 2), tem, 1);
2179 else
2180 abort ();
2183 /* Invert the jump condition of conditional jump insn, INSN.
2185 Return 1 if we can do so, 0 if we cannot find a way to do so that
2186 matches a pattern. */
2188 static int
2189 invert_exp (insn)
2190 rtx insn;
2192 invert_exp_1 (insn);
2193 if (num_validated_changes () == 0)
2194 return 0;
2196 return apply_change_group ();
2199 /* Invert the condition of the jump JUMP, and make it jump to label
2200 NLABEL instead of where it jumps now. Accrue changes into the
2201 change group. Return false if we didn't see how to perform the
2202 inversion and redirection. */
2205 invert_jump_1 (jump, nlabel)
2206 rtx jump, nlabel;
2208 int ochanges;
2210 ochanges = num_validated_changes ();
2211 invert_exp_1 (jump);
2212 if (num_validated_changes () == ochanges)
2213 return 0;
2215 return redirect_jump_1 (jump, nlabel);
2218 /* Invert the condition of the jump JUMP, and make it jump to label
2219 NLABEL instead of where it jumps now. Return true if successful. */
2222 invert_jump (jump, nlabel, delete_unused)
2223 rtx jump, nlabel;
2224 int delete_unused;
2226 /* We have to either invert the condition and change the label or
2227 do neither. Either operation could fail. We first try to invert
2228 the jump. If that succeeds, we try changing the label. If that fails,
2229 we invert the jump back to what it was. */
2231 if (! invert_exp (jump))
2232 return 0;
2234 if (redirect_jump (jump, nlabel, delete_unused))
2236 invert_br_probabilities (jump);
2238 return 1;
2241 if (! invert_exp (jump))
2242 /* This should just be putting it back the way it was. */
2243 abort ();
2245 return 0;
2249 /* Like rtx_equal_p except that it considers two REGs as equal
2250 if they renumber to the same value and considers two commutative
2251 operations to be the same if the order of the operands has been
2252 reversed.
2254 ??? Addition is not commutative on the PA due to the weird implicit
2255 space register selection rules for memory addresses. Therefore, we
2256 don't consider a + b == b + a.
2258 We could/should make this test a little tighter. Possibly only
2259 disabling it on the PA via some backend macro or only disabling this
2260 case when the PLUS is inside a MEM. */
2263 rtx_renumbered_equal_p (x, y)
2264 rtx x, y;
2266 register int i;
2267 register RTX_CODE code = GET_CODE (x);
2268 register const char *fmt;
2270 if (x == y)
2271 return 1;
2273 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2274 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2275 && GET_CODE (SUBREG_REG (y)) == REG)))
2277 int reg_x = -1, reg_y = -1;
2278 int byte_x = 0, byte_y = 0;
2280 if (GET_MODE (x) != GET_MODE (y))
2281 return 0;
2283 /* If we haven't done any renumbering, don't
2284 make any assumptions. */
2285 if (reg_renumber == 0)
2286 return rtx_equal_p (x, y);
2288 if (code == SUBREG)
2290 reg_x = REGNO (SUBREG_REG (x));
2291 byte_x = SUBREG_BYTE (x);
2293 if (reg_renumber[reg_x] >= 0)
2295 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2296 GET_MODE (SUBREG_REG (x)),
2297 byte_x,
2298 GET_MODE (x));
2299 byte_x = 0;
2302 else
2304 reg_x = REGNO (x);
2305 if (reg_renumber[reg_x] >= 0)
2306 reg_x = reg_renumber[reg_x];
2309 if (GET_CODE (y) == SUBREG)
2311 reg_y = REGNO (SUBREG_REG (y));
2312 byte_y = SUBREG_BYTE (y);
2314 if (reg_renumber[reg_y] >= 0)
2316 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2317 GET_MODE (SUBREG_REG (y)),
2318 byte_y,
2319 GET_MODE (y));
2320 byte_y = 0;
2323 else
2325 reg_y = REGNO (y);
2326 if (reg_renumber[reg_y] >= 0)
2327 reg_y = reg_renumber[reg_y];
2330 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2333 /* Now we have disposed of all the cases
2334 in which different rtx codes can match. */
2335 if (code != GET_CODE (y))
2336 return 0;
2338 switch (code)
2340 case PC:
2341 case CC0:
2342 case ADDR_VEC:
2343 case ADDR_DIFF_VEC:
2344 return 0;
2346 case CONST_INT:
2347 return INTVAL (x) == INTVAL (y);
2349 case LABEL_REF:
2350 /* We can't assume nonlocal labels have their following insns yet. */
2351 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2352 return XEXP (x, 0) == XEXP (y, 0);
2354 /* Two label-refs are equivalent if they point at labels
2355 in the same position in the instruction stream. */
2356 return (next_real_insn (XEXP (x, 0))
2357 == next_real_insn (XEXP (y, 0)));
2359 case SYMBOL_REF:
2360 return XSTR (x, 0) == XSTR (y, 0);
2362 case CODE_LABEL:
2363 /* If we didn't match EQ equality above, they aren't the same. */
2364 return 0;
2366 default:
2367 break;
2370 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2372 if (GET_MODE (x) != GET_MODE (y))
2373 return 0;
2375 /* For commutative operations, the RTX match if the operand match in any
2376 order. Also handle the simple binary and unary cases without a loop.
2378 ??? Don't consider PLUS a commutative operator; see comments above. */
2379 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2380 && code != PLUS)
2381 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2382 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2383 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2384 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2385 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2386 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2387 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2388 else if (GET_RTX_CLASS (code) == '1')
2389 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2391 /* Compare the elements. If any pair of corresponding elements
2392 fail to match, return 0 for the whole things. */
2394 fmt = GET_RTX_FORMAT (code);
2395 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2397 register int j;
2398 switch (fmt[i])
2400 case 'w':
2401 if (XWINT (x, i) != XWINT (y, i))
2402 return 0;
2403 break;
2405 case 'i':
2406 if (XINT (x, i) != XINT (y, i))
2407 return 0;
2408 break;
2410 case 't':
2411 if (XTREE (x, i) != XTREE (y, i))
2412 return 0;
2413 break;
2415 case 's':
2416 if (strcmp (XSTR (x, i), XSTR (y, i)))
2417 return 0;
2418 break;
2420 case 'e':
2421 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2422 return 0;
2423 break;
2425 case 'u':
2426 if (XEXP (x, i) != XEXP (y, i))
2427 return 0;
2428 /* fall through. */
2429 case '0':
2430 break;
2432 case 'E':
2433 if (XVECLEN (x, i) != XVECLEN (y, i))
2434 return 0;
2435 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2436 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2437 return 0;
2438 break;
2440 default:
2441 abort ();
2444 return 1;
2447 /* If X is a hard register or equivalent to one or a subregister of one,
2448 return the hard register number. If X is a pseudo register that was not
2449 assigned a hard register, return the pseudo register number. Otherwise,
2450 return -1. Any rtx is valid for X. */
2453 true_regnum (x)
2454 rtx x;
2456 if (GET_CODE (x) == REG)
2458 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2459 return reg_renumber[REGNO (x)];
2460 return REGNO (x);
2462 if (GET_CODE (x) == SUBREG)
2464 int base = true_regnum (SUBREG_REG (x));
2465 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2466 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2467 GET_MODE (SUBREG_REG (x)),
2468 SUBREG_BYTE (x), GET_MODE (x));
2470 return -1;
2473 /* Optimize code of the form:
2475 for (x = a[i]; x; ...)
2477 for (x = a[i]; x; ...)
2479 foo:
2481 Loop optimize will change the above code into
2483 if (x = a[i])
2484 for (;;)
2485 { ...; if (! (x = ...)) break; }
2486 if (x = a[i])
2487 for (;;)
2488 { ...; if (! (x = ...)) break; }
2489 foo:
2491 In general, if the first test fails, the program can branch
2492 directly to `foo' and skip the second try which is doomed to fail.
2493 We run this after loop optimization and before flow analysis. */
2495 /* When comparing the insn patterns, we track the fact that different
2496 pseudo-register numbers may have been used in each computation.
2497 The following array stores an equivalence -- same_regs[I] == J means
2498 that pseudo register I was used in the first set of tests in a context
2499 where J was used in the second set. We also count the number of such
2500 pending equivalences. If nonzero, the expressions really aren't the
2501 same. */
2503 static int *same_regs;
2505 static int num_same_regs;
2507 /* Track any registers modified between the target of the first jump and
2508 the second jump. They never compare equal. */
2510 static char *modified_regs;
2512 /* Record if memory was modified. */
2514 static int modified_mem;
2516 /* Called via note_stores on each insn between the target of the first
2517 branch and the second branch. It marks any changed registers. */
2519 static void
2520 mark_modified_reg (dest, x, data)
2521 rtx dest;
2522 rtx x;
2523 void *data ATTRIBUTE_UNUSED;
2525 int regno;
2526 unsigned int i;
2528 if (GET_CODE (dest) == SUBREG)
2529 dest = SUBREG_REG (dest);
2531 if (GET_CODE (dest) == MEM)
2532 modified_mem = 1;
2534 if (GET_CODE (dest) != REG)
2535 return;
2537 regno = REGNO (dest);
2538 if (regno >= FIRST_PSEUDO_REGISTER)
2539 modified_regs[regno] = 1;
2540 /* Don't consider a hard condition code register as modified,
2541 if it is only being set. thread_jumps will check if it is set
2542 to the same value. */
2543 else if (GET_MODE_CLASS (GET_MODE (dest)) != MODE_CC
2544 || GET_CODE (x) != SET
2545 || ! rtx_equal_p (dest, SET_DEST (x))
2546 || HARD_REGNO_NREGS (regno, GET_MODE (dest)) != 1)
2547 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
2548 modified_regs[regno + i] = 1;
2551 /* F is the first insn in the chain of insns. */
2553 void
2554 thread_jumps (f, max_reg, flag_before_loop)
2555 rtx f;
2556 int max_reg;
2557 int flag_before_loop;
2559 /* Basic algorithm is to find a conditional branch,
2560 the label it may branch to, and the branch after
2561 that label. If the two branches test the same condition,
2562 walk back from both branch paths until the insn patterns
2563 differ, or code labels are hit. If we make it back to
2564 the target of the first branch, then we know that the first branch
2565 will either always succeed or always fail depending on the relative
2566 senses of the two branches. So adjust the first branch accordingly
2567 in this case. */
2569 rtx label, b1, b2, t1, t2;
2570 enum rtx_code code1, code2;
2571 rtx b1op0, b1op1, b2op0, b2op1;
2572 int changed = 1;
2573 int i;
2574 int *all_reset;
2575 enum rtx_code reversed_code1, reversed_code2;
2577 /* Allocate register tables and quick-reset table. */
2578 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
2579 same_regs = (int *) xmalloc (max_reg * sizeof (int));
2580 all_reset = (int *) xmalloc (max_reg * sizeof (int));
2581 for (i = 0; i < max_reg; i++)
2582 all_reset[i] = -1;
2584 while (changed)
2586 changed = 0;
2588 for (b1 = f; b1; b1 = NEXT_INSN (b1))
2590 rtx set;
2591 rtx set2;
2593 /* Get to a candidate branch insn. */
2594 if (GET_CODE (b1) != JUMP_INSN
2595 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
2596 continue;
2598 memset (modified_regs, 0, max_reg * sizeof (char));
2599 modified_mem = 0;
2601 memcpy (same_regs, all_reset, max_reg * sizeof (int));
2602 num_same_regs = 0;
2604 label = JUMP_LABEL (b1);
2606 /* Look for a branch after the target. Record any registers and
2607 memory modified between the target and the branch. Stop when we
2608 get to a label since we can't know what was changed there. */
2609 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
2611 if (GET_CODE (b2) == CODE_LABEL)
2612 break;
2614 else if (GET_CODE (b2) == JUMP_INSN)
2616 /* If this is an unconditional jump and is the only use of
2617 its target label, we can follow it. */
2618 if (any_uncondjump_p (b2)
2619 && onlyjump_p (b2)
2620 && JUMP_LABEL (b2) != 0
2621 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
2623 b2 = JUMP_LABEL (b2);
2624 continue;
2626 else
2627 break;
2630 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
2631 continue;
2633 if (GET_CODE (b2) == CALL_INSN)
2635 modified_mem = 1;
2636 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2637 if (call_used_regs[i] && ! fixed_regs[i]
2638 && i != STACK_POINTER_REGNUM
2639 && i != FRAME_POINTER_REGNUM
2640 && i != HARD_FRAME_POINTER_REGNUM
2641 && i != ARG_POINTER_REGNUM)
2642 modified_regs[i] = 1;
2645 note_stores (PATTERN (b2), mark_modified_reg, NULL);
2648 /* Check the next candidate branch insn from the label
2649 of the first. */
2650 if (b2 == 0
2651 || GET_CODE (b2) != JUMP_INSN
2652 || b2 == b1
2653 || !any_condjump_p (b2)
2654 || !onlyjump_p (b2))
2655 continue;
2656 set = pc_set (b1);
2657 set2 = pc_set (b2);
2659 /* Get the comparison codes and operands, reversing the
2660 codes if appropriate. If we don't have comparison codes,
2661 we can't do anything. */
2662 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
2663 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
2664 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
2665 reversed_code1 = code1;
2666 if (XEXP (SET_SRC (set), 1) == pc_rtx)
2667 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2668 else
2669 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2671 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
2672 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
2673 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
2674 reversed_code2 = code2;
2675 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
2676 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2677 else
2678 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2680 /* If they test the same things and knowing that B1 branches
2681 tells us whether or not B2 branches, check if we
2682 can thread the branch. */
2683 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
2684 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
2685 && (comparison_dominates_p (code1, code2)
2686 || comparison_dominates_p (code1, reversed_code2)))
2689 t1 = prev_nonnote_insn (b1);
2690 t2 = prev_nonnote_insn (b2);
2692 while (t1 != 0 && t2 != 0)
2694 if (t2 == label)
2696 /* We have reached the target of the first branch.
2697 If there are no pending register equivalents,
2698 we know that this branch will either always
2699 succeed (if the senses of the two branches are
2700 the same) or always fail (if not). */
2701 rtx new_label;
2703 if (num_same_regs != 0)
2704 break;
2706 if (comparison_dominates_p (code1, code2))
2707 new_label = JUMP_LABEL (b2);
2708 else
2709 new_label = get_label_after (b2);
2711 if (JUMP_LABEL (b1) != new_label)
2713 rtx prev = PREV_INSN (new_label);
2715 if (flag_before_loop
2716 && GET_CODE (prev) == NOTE
2717 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
2719 /* Don't thread to the loop label. If a loop
2720 label is reused, loop optimization will
2721 be disabled for that loop. */
2722 new_label = gen_label_rtx ();
2723 emit_label_after (new_label, PREV_INSN (prev));
2725 changed |= redirect_jump (b1, new_label, 1);
2727 break;
2730 /* If either of these is not a normal insn (it might be
2731 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
2732 have already been skipped above.) Similarly, fail
2733 if the insns are different. */
2734 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
2735 || recog_memoized (t1) != recog_memoized (t2)
2736 || ! rtx_equal_for_thread_p (PATTERN (t1),
2737 PATTERN (t2), t2))
2738 break;
2740 t1 = prev_nonnote_insn (t1);
2741 t2 = prev_nonnote_insn (t2);
2747 /* Clean up. */
2748 free (modified_regs);
2749 free (same_regs);
2750 free (all_reset);
2753 /* This is like RTX_EQUAL_P except that it knows about our handling of
2754 possibly equivalent registers and knows to consider volatile and
2755 modified objects as not equal.
2757 YINSN is the insn containing Y. */
2760 rtx_equal_for_thread_p (x, y, yinsn)
2761 rtx x, y;
2762 rtx yinsn;
2764 register int i;
2765 register int j;
2766 register enum rtx_code code;
2767 register const char *fmt;
2769 code = GET_CODE (x);
2770 /* Rtx's of different codes cannot be equal. */
2771 if (code != GET_CODE (y))
2772 return 0;
2774 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
2775 (REG:SI x) and (REG:HI x) are NOT equivalent. */
2777 if (GET_MODE (x) != GET_MODE (y))
2778 return 0;
2780 /* For floating-point, consider everything unequal. This is a bit
2781 pessimistic, but this pass would only rarely do anything for FP
2782 anyway. */
2783 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
2784 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
2785 return 0;
2787 /* For commutative operations, the RTX match if the operand match in any
2788 order. Also handle the simple binary and unary cases without a loop. */
2789 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2790 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2791 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
2792 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
2793 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
2794 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2795 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2796 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
2797 else if (GET_RTX_CLASS (code) == '1')
2798 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2800 /* Handle special-cases first. */
2801 switch (code)
2803 case REG:
2804 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
2805 return 1;
2807 /* If neither is user variable or hard register, check for possible
2808 equivalence. */
2809 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
2810 || REGNO (x) < FIRST_PSEUDO_REGISTER
2811 || REGNO (y) < FIRST_PSEUDO_REGISTER)
2812 return 0;
2814 if (same_regs[REGNO (x)] == -1)
2816 same_regs[REGNO (x)] = REGNO (y);
2817 num_same_regs++;
2819 /* If this is the first time we are seeing a register on the `Y'
2820 side, see if it is the last use. If not, we can't thread the
2821 jump, so mark it as not equivalent. */
2822 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
2823 return 0;
2825 return 1;
2827 else
2828 return (same_regs[REGNO (x)] == (int) REGNO (y));
2830 break;
2832 case MEM:
2833 /* If memory modified or either volatile, not equivalent.
2834 Else, check address. */
2835 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2836 return 0;
2838 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2840 case ASM_INPUT:
2841 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2842 return 0;
2844 break;
2846 case SET:
2847 /* Cancel a pending `same_regs' if setting equivalenced registers.
2848 Then process source. */
2849 if (GET_CODE (SET_DEST (x)) == REG
2850 && GET_CODE (SET_DEST (y)) == REG)
2852 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
2854 same_regs[REGNO (SET_DEST (x))] = -1;
2855 num_same_regs--;
2857 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
2858 return 0;
2860 else
2862 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
2863 return 0;
2866 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
2868 case LABEL_REF:
2869 return XEXP (x, 0) == XEXP (y, 0);
2871 case SYMBOL_REF:
2872 return XSTR (x, 0) == XSTR (y, 0);
2874 default:
2875 break;
2878 if (x == y)
2879 return 1;
2881 fmt = GET_RTX_FORMAT (code);
2882 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2884 switch (fmt[i])
2886 case 'w':
2887 if (XWINT (x, i) != XWINT (y, i))
2888 return 0;
2889 break;
2891 case 'n':
2892 case 'i':
2893 if (XINT (x, i) != XINT (y, i))
2894 return 0;
2895 break;
2897 case 'V':
2898 case 'E':
2899 /* Two vectors must have the same length. */
2900 if (XVECLEN (x, i) != XVECLEN (y, i))
2901 return 0;
2903 /* And the corresponding elements must match. */
2904 for (j = 0; j < XVECLEN (x, i); j++)
2905 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
2906 XVECEXP (y, i, j), yinsn) == 0)
2907 return 0;
2908 break;
2910 case 'e':
2911 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
2912 return 0;
2913 break;
2915 case 'S':
2916 case 's':
2917 if (strcmp (XSTR (x, i), XSTR (y, i)))
2918 return 0;
2919 break;
2921 case 'u':
2922 /* These are just backpointers, so they don't matter. */
2923 break;
2925 case '0':
2926 case 't':
2927 break;
2929 /* It is believed that rtx's at this level will never
2930 contain anything but integers and other rtx's,
2931 except for within LABEL_REFs and SYMBOL_REFs. */
2932 default:
2933 abort ();
2936 return 1;