Remove s390x __LONG_MAX__ special case from glimits.h;
[official-gcc.git] / gcc / jump.c
blob0b62ab42114d7ce88d0c5962e1e0589f269d9a7b
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001 Free Software Foundation, Inc.
5 This file is part of GNU CC.
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
24 operate with jumps.
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
37 #include "config.h"
38 #include "system.h"
39 #include "rtl.h"
40 #include "tm_p.h"
41 #include "flags.h"
42 #include "hard-reg-set.h"
43 #include "regs.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
46 #include "recog.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "real.h"
50 #include "except.h"
51 #include "toplev.h"
52 #include "reload.h"
53 #include "predict.h"
55 /* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57 /* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
62 static int init_label_info PARAMS ((rtx));
63 static void mark_all_labels PARAMS ((rtx));
64 static int duplicate_loop_exit_test PARAMS ((rtx));
65 static void delete_computation PARAMS ((rtx));
66 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
67 static int redirect_exp PARAMS ((rtx, rtx, rtx));
68 static void invert_exp_1 PARAMS ((rtx));
69 static int invert_exp PARAMS ((rtx));
70 static int returnjump_p_1 PARAMS ((rtx *, void *));
71 static void delete_prior_computation PARAMS ((rtx, rtx));
72 static void mark_modified_reg PARAMS ((rtx, rtx, void *));
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
76 instructions. */
77 void
78 rebuild_jump_labels (f)
79 rtx f;
81 register rtx insn;
82 int max_uid = 0;
84 max_uid = init_label_info (f) + 1;
86 mark_all_labels (f);
88 /* Keep track of labels used from static data; we don't track them
89 closely enough to delete them here, so make sure their reference
90 count doesn't drop to zero. */
92 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
93 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
94 LABEL_NUSES (XEXP (insn, 0))++;
96 /* Keep track of labels used for marking handlers for exception
97 regions; they cannot usually be deleted. */
99 for (insn = exception_handler_labels; insn; insn = XEXP (insn, 1))
100 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
101 LABEL_NUSES (XEXP (insn, 0))++;
104 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
105 non-fallthru insn. This is not generally true, as multiple barriers
106 may have crept in, or the BARRIER may be separated from the last
107 real insn by one or more NOTEs.
109 This simple pass moves barriers and removes duplicates so that the
110 old code is happy.
112 void
113 cleanup_barriers ()
115 rtx insn, next, prev;
116 for (insn = get_insns (); insn; insn = next)
118 next = NEXT_INSN (insn);
119 if (GET_CODE (insn) == BARRIER)
121 prev = prev_nonnote_insn (insn);
122 if (GET_CODE (prev) == BARRIER)
123 delete_barrier (insn);
124 else if (prev != PREV_INSN (insn))
125 reorder_insns (insn, insn, prev);
130 void
131 copy_loop_headers (f)
132 rtx f;
134 register rtx insn, next;
135 /* Now iterate optimizing jumps until nothing changes over one pass. */
136 for (insn = f; insn; insn = next)
138 rtx temp, temp1;
140 next = NEXT_INSN (insn);
142 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
143 jump. Try to optimize by duplicating the loop exit test if so.
144 This is only safe immediately after regscan, because it uses
145 the values of regno_first_uid and regno_last_uid. */
146 if (GET_CODE (insn) == NOTE
147 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
148 && (temp1 = next_nonnote_insn (insn)) != 0
149 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
151 temp = PREV_INSN (insn);
152 if (duplicate_loop_exit_test (insn))
154 next = NEXT_INSN (temp);
160 void
161 purge_line_number_notes (f)
162 rtx f;
164 rtx last_note = 0;
165 rtx insn;
166 /* Delete extraneous line number notes.
167 Note that two consecutive notes for different lines are not really
168 extraneous. There should be some indication where that line belonged,
169 even if it became empty. */
171 for (insn = f; insn; insn = NEXT_INSN (insn))
172 if (GET_CODE (insn) == NOTE)
174 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
175 /* Any previous line note was for the prologue; gdb wants a new
176 note after the prologue even if it is for the same line. */
177 last_note = NULL_RTX;
178 else if (NOTE_LINE_NUMBER (insn) >= 0)
180 /* Delete this note if it is identical to previous note. */
181 if (last_note
182 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
183 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
185 delete_insn (insn);
186 continue;
189 last_note = insn;
194 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
195 notes whose labels don't occur in the insn any more. Returns the
196 largest INSN_UID found. */
197 static int
198 init_label_info (f)
199 rtx f;
201 int largest_uid = 0;
202 rtx insn;
204 for (insn = f; insn; insn = NEXT_INSN (insn))
206 if (GET_CODE (insn) == CODE_LABEL)
207 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
208 else if (GET_CODE (insn) == JUMP_INSN)
209 JUMP_LABEL (insn) = 0;
210 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
212 rtx note, next;
214 for (note = REG_NOTES (insn); note; note = next)
216 next = XEXP (note, 1);
217 if (REG_NOTE_KIND (note) == REG_LABEL
218 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
219 remove_note (insn, note);
222 if (INSN_UID (insn) > largest_uid)
223 largest_uid = INSN_UID (insn);
226 return largest_uid;
229 /* Mark the label each jump jumps to.
230 Combine consecutive labels, and count uses of labels. */
232 static void
233 mark_all_labels (f)
234 rtx f;
236 rtx insn;
238 for (insn = f; insn; insn = NEXT_INSN (insn))
239 if (INSN_P (insn))
241 if (GET_CODE (insn) == CALL_INSN
242 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
244 mark_all_labels (XEXP (PATTERN (insn), 0));
245 mark_all_labels (XEXP (PATTERN (insn), 1));
246 mark_all_labels (XEXP (PATTERN (insn), 2));
248 /* Canonicalize the tail recursion label attached to the
249 CALL_PLACEHOLDER insn. */
250 if (XEXP (PATTERN (insn), 3))
252 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
253 XEXP (PATTERN (insn), 3));
254 mark_jump_label (label_ref, insn, 0);
255 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
258 continue;
261 mark_jump_label (PATTERN (insn), insn, 0);
262 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
264 /* When we know the LABEL_REF contained in a REG used in
265 an indirect jump, we'll have a REG_LABEL note so that
266 flow can tell where it's going. */
267 if (JUMP_LABEL (insn) == 0)
269 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
270 if (label_note)
272 /* But a LABEL_REF around the REG_LABEL note, so
273 that we can canonicalize it. */
274 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
275 XEXP (label_note, 0));
277 mark_jump_label (label_ref, insn, 0);
278 XEXP (label_note, 0) = XEXP (label_ref, 0);
279 JUMP_LABEL (insn) = XEXP (label_note, 0);
286 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
287 jump. Assume that this unconditional jump is to the exit test code. If
288 the code is sufficiently simple, make a copy of it before INSN,
289 followed by a jump to the exit of the loop. Then delete the unconditional
290 jump after INSN.
292 Return 1 if we made the change, else 0.
294 This is only safe immediately after a regscan pass because it uses the
295 values of regno_first_uid and regno_last_uid. */
297 static int
298 duplicate_loop_exit_test (loop_start)
299 rtx loop_start;
301 rtx insn, set, reg, p, link;
302 rtx copy = 0, first_copy = 0;
303 int num_insns = 0;
304 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
305 rtx lastexit;
306 int max_reg = max_reg_num ();
307 rtx *reg_map = 0;
308 rtx loop_pre_header_label;
310 /* Scan the exit code. We do not perform this optimization if any insn:
312 is a CALL_INSN
313 is a CODE_LABEL
314 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
315 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
316 is a NOTE_INSN_BLOCK_{BEG,END} because duplicating these notes
317 is not valid.
319 We also do not do this if we find an insn with ASM_OPERANDS. While
320 this restriction should not be necessary, copying an insn with
321 ASM_OPERANDS can confuse asm_noperands in some cases.
323 Also, don't do this if the exit code is more than 20 insns. */
325 for (insn = exitcode;
326 insn
327 && ! (GET_CODE (insn) == NOTE
328 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
329 insn = NEXT_INSN (insn))
331 switch (GET_CODE (insn))
333 case CODE_LABEL:
334 case CALL_INSN:
335 return 0;
336 case NOTE:
337 /* We could be in front of the wrong NOTE_INSN_LOOP_END if there is
338 a jump immediately after the loop start that branches outside
339 the loop but within an outer loop, near the exit test.
340 If we copied this exit test and created a phony
341 NOTE_INSN_LOOP_VTOP, this could make instructions immediately
342 before the exit test look like these could be safely moved
343 out of the loop even if they actually may be never executed.
344 This can be avoided by checking here for NOTE_INSN_LOOP_CONT. */
346 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
347 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT)
348 return 0;
350 if (optimize < 2
351 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
352 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
353 /* If we were to duplicate this code, we would not move
354 the BLOCK notes, and so debugging the moved code would
355 be difficult. Thus, we only move the code with -O2 or
356 higher. */
357 return 0;
359 break;
360 case JUMP_INSN:
361 case INSN:
362 /* The code below would grossly mishandle REG_WAS_0 notes,
363 so get rid of them here. */
364 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
365 remove_note (insn, p);
366 if (++num_insns > 20
367 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
368 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
369 return 0;
370 break;
371 default:
372 break;
376 /* Unless INSN is zero, we can do the optimization. */
377 if (insn == 0)
378 return 0;
380 lastexit = insn;
382 /* See if any insn sets a register only used in the loop exit code and
383 not a user variable. If so, replace it with a new register. */
384 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
385 if (GET_CODE (insn) == INSN
386 && (set = single_set (insn)) != 0
387 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
388 || (GET_CODE (reg) == SUBREG
389 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
390 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
391 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
393 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
394 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
395 break;
397 if (p != lastexit)
399 /* We can do the replacement. Allocate reg_map if this is the
400 first replacement we found. */
401 if (reg_map == 0)
402 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
404 REG_LOOP_TEST_P (reg) = 1;
406 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
409 loop_pre_header_label = gen_label_rtx ();
411 /* Now copy each insn. */
412 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
414 switch (GET_CODE (insn))
416 case BARRIER:
417 copy = emit_barrier_before (loop_start);
418 break;
419 case NOTE:
420 /* Only copy line-number notes. */
421 if (NOTE_LINE_NUMBER (insn) >= 0)
423 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
424 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
426 break;
428 case INSN:
429 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
430 if (reg_map)
431 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
433 mark_jump_label (PATTERN (copy), copy, 0);
435 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
436 make them. */
437 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
438 if (REG_NOTE_KIND (link) != REG_LABEL)
440 if (GET_CODE (link) == EXPR_LIST)
441 REG_NOTES (copy)
442 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
443 XEXP (link, 0),
444 REG_NOTES (copy)));
445 else
446 REG_NOTES (copy)
447 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
448 XEXP (link, 0),
449 REG_NOTES (copy)));
452 if (reg_map && REG_NOTES (copy))
453 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
454 break;
456 case JUMP_INSN:
457 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
458 loop_start);
459 if (reg_map)
460 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
461 mark_jump_label (PATTERN (copy), copy, 0);
462 if (REG_NOTES (insn))
464 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
465 if (reg_map)
466 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
469 /* Predict conditional jump that do make loop looping as taken.
470 Other jumps are probably exit conditions, so predict
471 them as untaken. */
472 if (any_condjump_p (copy))
474 rtx label = JUMP_LABEL (copy);
475 if (label)
477 /* The jump_insn after loop_start should be followed
478 by barrier and loopback label. */
479 if (prev_nonnote_insn (label)
480 && (prev_nonnote_insn (prev_nonnote_insn (label))
481 == next_nonnote_insn (loop_start)))
483 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
484 /* To keep pre-header, we need to redirect all loop
485 entrances before the LOOP_BEG note. */
486 redirect_jump (copy, loop_pre_header_label, 0);
488 else
489 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
492 break;
494 default:
495 abort ();
498 /* Record the first insn we copied. We need it so that we can
499 scan the copied insns for new pseudo registers. */
500 if (! first_copy)
501 first_copy = copy;
504 /* Now clean up by emitting a jump to the end label and deleting the jump
505 at the start of the loop. */
506 if (! copy || GET_CODE (copy) != BARRIER)
508 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
509 loop_start);
511 /* Record the first insn we copied. We need it so that we can
512 scan the copied insns for new pseudo registers. This may not
513 be strictly necessary since we should have copied at least one
514 insn above. But I am going to be safe. */
515 if (! first_copy)
516 first_copy = copy;
518 mark_jump_label (PATTERN (copy), copy, 0);
519 emit_barrier_before (loop_start);
522 emit_label_before (loop_pre_header_label, loop_start);
524 /* Now scan from the first insn we copied to the last insn we copied
525 (copy) for new pseudo registers. Do this after the code to jump to
526 the end label since that might create a new pseudo too. */
527 reg_scan_update (first_copy, copy, max_reg);
529 /* Mark the exit code as the virtual top of the converted loop. */
530 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
532 delete_insn (next_nonnote_insn (loop_start));
534 /* Clean up. */
535 if (reg_map)
536 free (reg_map);
538 return 1;
541 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
542 notes between START and END out before START. Assume that END is not
543 such a note. START may be such a note. Returns the value of the new
544 starting insn, which may be different if the original start was such a
545 note. */
548 squeeze_notes (start, end)
549 rtx start, end;
551 rtx insn;
552 rtx next;
554 for (insn = start; insn != end; insn = next)
556 next = NEXT_INSN (insn);
557 if (GET_CODE (insn) == NOTE
558 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
559 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
560 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
561 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
562 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
563 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
565 if (insn == start)
566 start = next;
567 else
569 rtx prev = PREV_INSN (insn);
570 PREV_INSN (insn) = PREV_INSN (start);
571 NEXT_INSN (insn) = start;
572 NEXT_INSN (PREV_INSN (insn)) = insn;
573 PREV_INSN (NEXT_INSN (insn)) = insn;
574 NEXT_INSN (prev) = next;
575 PREV_INSN (next) = prev;
580 return start;
583 /* Return the label before INSN, or put a new label there. */
586 get_label_before (insn)
587 rtx insn;
589 rtx label;
591 /* Find an existing label at this point
592 or make a new one if there is none. */
593 label = prev_nonnote_insn (insn);
595 if (label == 0 || GET_CODE (label) != CODE_LABEL)
597 rtx prev = PREV_INSN (insn);
599 label = gen_label_rtx ();
600 emit_label_after (label, prev);
601 LABEL_NUSES (label) = 0;
603 return label;
606 /* Return the label after INSN, or put a new label there. */
609 get_label_after (insn)
610 rtx insn;
612 rtx label;
614 /* Find an existing label at this point
615 or make a new one if there is none. */
616 label = next_nonnote_insn (insn);
618 if (label == 0 || GET_CODE (label) != CODE_LABEL)
620 label = gen_label_rtx ();
621 emit_label_after (label, insn);
622 LABEL_NUSES (label) = 0;
624 return label;
627 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
628 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
629 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
630 know whether it's source is floating point or integer comparison. Machine
631 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
632 to help this function avoid overhead in these cases. */
633 enum rtx_code
634 reversed_comparison_code_parts (code, arg0, arg1, insn)
635 rtx insn, arg0, arg1;
636 enum rtx_code code;
638 enum machine_mode mode;
640 /* If this is not actually a comparison, we can't reverse it. */
641 if (GET_RTX_CLASS (code) != '<')
642 return UNKNOWN;
644 mode = GET_MODE (arg0);
645 if (mode == VOIDmode)
646 mode = GET_MODE (arg1);
648 /* First see if machine description supply us way to reverse the comparison.
649 Give it priority over everything else to allow machine description to do
650 tricks. */
651 #ifdef REVERSIBLE_CC_MODE
652 if (GET_MODE_CLASS (mode) == MODE_CC
653 && REVERSIBLE_CC_MODE (mode))
655 #ifdef REVERSE_CONDITION
656 return REVERSE_CONDITION (code, mode);
657 #endif
658 return reverse_condition (code);
660 #endif
662 /* Try a few special cases based on the comparison code. */
663 switch (code)
665 case GEU:
666 case GTU:
667 case LEU:
668 case LTU:
669 case NE:
670 case EQ:
671 /* It is always safe to reverse EQ and NE, even for the floating
672 point. Similary the unsigned comparisons are never used for
673 floating point so we can reverse them in the default way. */
674 return reverse_condition (code);
675 case ORDERED:
676 case UNORDERED:
677 case LTGT:
678 case UNEQ:
679 /* In case we already see unordered comparison, we can be sure to
680 be dealing with floating point so we don't need any more tests. */
681 return reverse_condition_maybe_unordered (code);
682 case UNLT:
683 case UNLE:
684 case UNGT:
685 case UNGE:
686 /* We don't have safe way to reverse these yet. */
687 return UNKNOWN;
688 default:
689 break;
692 /* In case we give up IEEE compatibility, all comparisons are reversible. */
693 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT
694 || flag_unsafe_math_optimizations)
695 return reverse_condition (code);
697 if (GET_MODE_CLASS (mode) == MODE_CC
698 #ifdef HAVE_cc0
699 || arg0 == cc0_rtx
700 #endif
703 rtx prev;
704 /* Try to search for the comparison to determine the real mode.
705 This code is expensive, but with sane machine description it
706 will be never used, since REVERSIBLE_CC_MODE will return true
707 in all cases. */
708 if (! insn)
709 return UNKNOWN;
711 for (prev = prev_nonnote_insn (insn);
712 prev != 0 && GET_CODE (prev) != CODE_LABEL;
713 prev = prev_nonnote_insn (prev))
715 rtx set = set_of (arg0, prev);
716 if (set && GET_CODE (set) == SET
717 && rtx_equal_p (SET_DEST (set), arg0))
719 rtx src = SET_SRC (set);
721 if (GET_CODE (src) == COMPARE)
723 rtx comparison = src;
724 arg0 = XEXP (src, 0);
725 mode = GET_MODE (arg0);
726 if (mode == VOIDmode)
727 mode = GET_MODE (XEXP (comparison, 1));
728 break;
730 /* We can get past reg-reg moves. This may be usefull for model
731 of i387 comparisons that first move flag registers around. */
732 if (REG_P (src))
734 arg0 = src;
735 continue;
738 /* If register is clobbered in some ununderstandable way,
739 give up. */
740 if (set)
741 return UNKNOWN;
745 /* An integer condition. */
746 if (GET_CODE (arg0) == CONST_INT
747 || (GET_MODE (arg0) != VOIDmode
748 && GET_MODE_CLASS (mode) != MODE_CC
749 && ! FLOAT_MODE_P (mode)))
750 return reverse_condition (code);
752 return UNKNOWN;
755 /* An wrapper around the previous function to take COMPARISON as rtx
756 expression. This simplifies many callers. */
757 enum rtx_code
758 reversed_comparison_code (comparison, insn)
759 rtx comparison, insn;
761 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
762 return UNKNOWN;
763 return reversed_comparison_code_parts (GET_CODE (comparison),
764 XEXP (comparison, 0),
765 XEXP (comparison, 1), insn);
768 /* Given an rtx-code for a comparison, return the code for the negated
769 comparison. If no such code exists, return UNKNOWN.
771 WATCH OUT! reverse_condition is not safe to use on a jump that might
772 be acting on the results of an IEEE floating point comparison, because
773 of the special treatment of non-signaling nans in comparisons.
774 Use reversed_comparison_code instead. */
776 enum rtx_code
777 reverse_condition (code)
778 enum rtx_code code;
780 switch (code)
782 case EQ:
783 return NE;
784 case NE:
785 return EQ;
786 case GT:
787 return LE;
788 case GE:
789 return LT;
790 case LT:
791 return GE;
792 case LE:
793 return GT;
794 case GTU:
795 return LEU;
796 case GEU:
797 return LTU;
798 case LTU:
799 return GEU;
800 case LEU:
801 return GTU;
802 case UNORDERED:
803 return ORDERED;
804 case ORDERED:
805 return UNORDERED;
807 case UNLT:
808 case UNLE:
809 case UNGT:
810 case UNGE:
811 case UNEQ:
812 case LTGT:
813 return UNKNOWN;
815 default:
816 abort ();
820 /* Similar, but we're allowed to generate unordered comparisons, which
821 makes it safe for IEEE floating-point. Of course, we have to recognize
822 that the target will support them too... */
824 enum rtx_code
825 reverse_condition_maybe_unordered (code)
826 enum rtx_code code;
828 /* Non-IEEE formats don't have unordered conditions. */
829 if (TARGET_FLOAT_FORMAT != IEEE_FLOAT_FORMAT)
830 return reverse_condition (code);
832 switch (code)
834 case EQ:
835 return NE;
836 case NE:
837 return EQ;
838 case GT:
839 return UNLE;
840 case GE:
841 return UNLT;
842 case LT:
843 return UNGE;
844 case LE:
845 return UNGT;
846 case LTGT:
847 return UNEQ;
848 case UNORDERED:
849 return ORDERED;
850 case ORDERED:
851 return UNORDERED;
852 case UNLT:
853 return GE;
854 case UNLE:
855 return GT;
856 case UNGT:
857 return LE;
858 case UNGE:
859 return LT;
860 case UNEQ:
861 return LTGT;
863 default:
864 abort ();
868 /* Similar, but return the code when two operands of a comparison are swapped.
869 This IS safe for IEEE floating-point. */
871 enum rtx_code
872 swap_condition (code)
873 enum rtx_code code;
875 switch (code)
877 case EQ:
878 case NE:
879 case UNORDERED:
880 case ORDERED:
881 case UNEQ:
882 case LTGT:
883 return code;
885 case GT:
886 return LT;
887 case GE:
888 return LE;
889 case LT:
890 return GT;
891 case LE:
892 return GE;
893 case GTU:
894 return LTU;
895 case GEU:
896 return LEU;
897 case LTU:
898 return GTU;
899 case LEU:
900 return GEU;
901 case UNLT:
902 return UNGT;
903 case UNLE:
904 return UNGE;
905 case UNGT:
906 return UNLT;
907 case UNGE:
908 return UNLE;
910 default:
911 abort ();
915 /* Given a comparison CODE, return the corresponding unsigned comparison.
916 If CODE is an equality comparison or already an unsigned comparison,
917 CODE is returned. */
919 enum rtx_code
920 unsigned_condition (code)
921 enum rtx_code code;
923 switch (code)
925 case EQ:
926 case NE:
927 case GTU:
928 case GEU:
929 case LTU:
930 case LEU:
931 return code;
933 case GT:
934 return GTU;
935 case GE:
936 return GEU;
937 case LT:
938 return LTU;
939 case LE:
940 return LEU;
942 default:
943 abort ();
947 /* Similarly, return the signed version of a comparison. */
949 enum rtx_code
950 signed_condition (code)
951 enum rtx_code code;
953 switch (code)
955 case EQ:
956 case NE:
957 case GT:
958 case GE:
959 case LT:
960 case LE:
961 return code;
963 case GTU:
964 return GT;
965 case GEU:
966 return GE;
967 case LTU:
968 return LT;
969 case LEU:
970 return LE;
972 default:
973 abort ();
977 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
978 truth of CODE1 implies the truth of CODE2. */
981 comparison_dominates_p (code1, code2)
982 enum rtx_code code1, code2;
984 /* UNKNOWN comparison codes can happen as a result of trying to revert
985 comparison codes.
986 They can't match anything, so we have to reject them here. */
987 if (code1 == UNKNOWN || code2 == UNKNOWN)
988 return 0;
990 if (code1 == code2)
991 return 1;
993 switch (code1)
995 case UNEQ:
996 if (code2 == UNLE || code2 == UNGE)
997 return 1;
998 break;
1000 case EQ:
1001 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1002 || code2 == ORDERED)
1003 return 1;
1004 break;
1006 case UNLT:
1007 if (code2 == UNLE || code2 == NE)
1008 return 1;
1009 break;
1011 case LT:
1012 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1013 return 1;
1014 break;
1016 case UNGT:
1017 if (code2 == UNGE || code2 == NE)
1018 return 1;
1019 break;
1021 case GT:
1022 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1023 return 1;
1024 break;
1026 case GE:
1027 case LE:
1028 if (code2 == ORDERED)
1029 return 1;
1030 break;
1032 case LTGT:
1033 if (code2 == NE || code2 == ORDERED)
1034 return 1;
1035 break;
1037 case LTU:
1038 if (code2 == LEU || code2 == NE)
1039 return 1;
1040 break;
1042 case GTU:
1043 if (code2 == GEU || code2 == NE)
1044 return 1;
1045 break;
1047 case UNORDERED:
1048 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1049 || code2 == UNGE || code2 == UNGT)
1050 return 1;
1051 break;
1053 default:
1054 break;
1057 return 0;
1060 /* Return 1 if INSN is an unconditional jump and nothing else. */
1063 simplejump_p (insn)
1064 rtx insn;
1066 return (GET_CODE (insn) == JUMP_INSN
1067 && GET_CODE (PATTERN (insn)) == SET
1068 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1069 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1072 /* Return nonzero if INSN is a (possibly) conditional jump
1073 and nothing more.
1075 Use this function is deprecated, since we need to support combined
1076 branch and compare insns. Use any_condjump_p instead whenever possible. */
1079 condjump_p (insn)
1080 rtx insn;
1082 register rtx x = PATTERN (insn);
1084 if (GET_CODE (x) != SET
1085 || GET_CODE (SET_DEST (x)) != PC)
1086 return 0;
1088 x = SET_SRC (x);
1089 if (GET_CODE (x) == LABEL_REF)
1090 return 1;
1091 else
1092 return (GET_CODE (x) == IF_THEN_ELSE
1093 && ((GET_CODE (XEXP (x, 2)) == PC
1094 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1095 || GET_CODE (XEXP (x, 1)) == RETURN))
1096 || (GET_CODE (XEXP (x, 1)) == PC
1097 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1098 || GET_CODE (XEXP (x, 2)) == RETURN))));
1100 return 0;
1103 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1104 PARALLEL.
1106 Use this function is deprecated, since we need to support combined
1107 branch and compare insns. Use any_condjump_p instead whenever possible. */
1110 condjump_in_parallel_p (insn)
1111 rtx insn;
1113 register rtx x = PATTERN (insn);
1115 if (GET_CODE (x) != PARALLEL)
1116 return 0;
1117 else
1118 x = XVECEXP (x, 0, 0);
1120 if (GET_CODE (x) != SET)
1121 return 0;
1122 if (GET_CODE (SET_DEST (x)) != PC)
1123 return 0;
1124 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1125 return 1;
1126 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1127 return 0;
1128 if (XEXP (SET_SRC (x), 2) == pc_rtx
1129 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1130 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1131 return 1;
1132 if (XEXP (SET_SRC (x), 1) == pc_rtx
1133 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1134 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1135 return 1;
1136 return 0;
1139 /* Return set of PC, otherwise NULL. */
1142 pc_set (insn)
1143 rtx insn;
1145 rtx pat;
1146 if (GET_CODE (insn) != JUMP_INSN)
1147 return NULL_RTX;
1148 pat = PATTERN (insn);
1150 /* The set is allowed to appear either as the insn pattern or
1151 the first set in a PARALLEL. */
1152 if (GET_CODE (pat) == PARALLEL)
1153 pat = XVECEXP (pat, 0, 0);
1154 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1155 return pat;
1157 return NULL_RTX;
1160 /* Return true when insn is an unconditional direct jump,
1161 possibly bundled inside a PARALLEL. */
1164 any_uncondjump_p (insn)
1165 rtx insn;
1167 rtx x = pc_set (insn);
1168 if (!x)
1169 return 0;
1170 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1171 return 0;
1172 return 1;
1175 /* Return true when insn is a conditional jump. This function works for
1176 instructions containing PC sets in PARALLELs. The instruction may have
1177 various other effects so before removing the jump you must verify
1178 onlyjump_p.
1180 Note that unlike condjump_p it returns false for unconditional jumps. */
1183 any_condjump_p (insn)
1184 rtx insn;
1186 rtx x = pc_set (insn);
1187 enum rtx_code a, b;
1189 if (!x)
1190 return 0;
1191 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1192 return 0;
1194 a = GET_CODE (XEXP (SET_SRC (x), 1));
1195 b = GET_CODE (XEXP (SET_SRC (x), 2));
1197 return ((b == PC && (a == LABEL_REF || a == RETURN))
1198 || (a == PC && (b == LABEL_REF || b == RETURN)));
1201 /* Return the label of a conditional jump. */
1204 condjump_label (insn)
1205 rtx insn;
1207 rtx x = pc_set (insn);
1209 if (!x)
1210 return NULL_RTX;
1211 x = SET_SRC (x);
1212 if (GET_CODE (x) == LABEL_REF)
1213 return x;
1214 if (GET_CODE (x) != IF_THEN_ELSE)
1215 return NULL_RTX;
1216 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1217 return XEXP (x, 1);
1218 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1219 return XEXP (x, 2);
1220 return NULL_RTX;
1223 /* Return true if INSN is a (possibly conditional) return insn. */
1225 static int
1226 returnjump_p_1 (loc, data)
1227 rtx *loc;
1228 void *data ATTRIBUTE_UNUSED;
1230 rtx x = *loc;
1231 return x && GET_CODE (x) == RETURN;
1235 returnjump_p (insn)
1236 rtx insn;
1238 if (GET_CODE (insn) != JUMP_INSN)
1239 return 0;
1240 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1243 /* Return true if INSN is a jump that only transfers control and
1244 nothing more. */
1247 onlyjump_p (insn)
1248 rtx insn;
1250 rtx set;
1252 if (GET_CODE (insn) != JUMP_INSN)
1253 return 0;
1255 set = single_set (insn);
1256 if (set == NULL)
1257 return 0;
1258 if (GET_CODE (SET_DEST (set)) != PC)
1259 return 0;
1260 if (side_effects_p (SET_SRC (set)))
1261 return 0;
1263 return 1;
1266 #ifdef HAVE_cc0
1268 /* Return 1 if X is an RTX that does nothing but set the condition codes
1269 and CLOBBER or USE registers.
1270 Return -1 if X does explicitly set the condition codes,
1271 but also does other things. */
1274 sets_cc0_p (x)
1275 rtx x ATTRIBUTE_UNUSED;
1277 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1278 return 1;
1279 if (GET_CODE (x) == PARALLEL)
1281 int i;
1282 int sets_cc0 = 0;
1283 int other_things = 0;
1284 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1286 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1287 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1288 sets_cc0 = 1;
1289 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1290 other_things = 1;
1292 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1294 return 0;
1296 #endif
1298 /* Follow any unconditional jump at LABEL;
1299 return the ultimate label reached by any such chain of jumps.
1300 If LABEL is not followed by a jump, return LABEL.
1301 If the chain loops or we can't find end, return LABEL,
1302 since that tells caller to avoid changing the insn.
1304 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1305 a USE or CLOBBER. */
1308 follow_jumps (label)
1309 rtx label;
1311 register rtx insn;
1312 register rtx next;
1313 register rtx value = label;
1314 register int depth;
1316 for (depth = 0;
1317 (depth < 10
1318 && (insn = next_active_insn (value)) != 0
1319 && GET_CODE (insn) == JUMP_INSN
1320 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1321 && onlyjump_p (insn))
1322 || GET_CODE (PATTERN (insn)) == RETURN)
1323 && (next = NEXT_INSN (insn))
1324 && GET_CODE (next) == BARRIER);
1325 depth++)
1327 /* Don't chain through the insn that jumps into a loop
1328 from outside the loop,
1329 since that would create multiple loop entry jumps
1330 and prevent loop optimization. */
1331 rtx tem;
1332 if (!reload_completed)
1333 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1334 if (GET_CODE (tem) == NOTE
1335 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1336 /* ??? Optional. Disables some optimizations, but makes
1337 gcov output more accurate with -O. */
1338 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1339 return value;
1341 /* If we have found a cycle, make the insn jump to itself. */
1342 if (JUMP_LABEL (insn) == label)
1343 return label;
1345 tem = next_active_insn (JUMP_LABEL (insn));
1346 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1347 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1348 break;
1350 value = JUMP_LABEL (insn);
1352 if (depth == 10)
1353 return label;
1354 return value;
1358 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1359 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1360 in INSN, then store one of them in JUMP_LABEL (INSN).
1361 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1362 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1363 Also, when there are consecutive labels, canonicalize on the last of them.
1365 Note that two labels separated by a loop-beginning note
1366 must be kept distinct if we have not yet done loop-optimization,
1367 because the gap between them is where loop-optimize
1368 will want to move invariant code to. CROSS_JUMP tells us
1369 that loop-optimization is done with. */
1371 void
1372 mark_jump_label (x, insn, in_mem)
1373 register rtx x;
1374 rtx insn;
1375 int in_mem;
1377 register RTX_CODE code = GET_CODE (x);
1378 register int i;
1379 register const char *fmt;
1381 switch (code)
1383 case PC:
1384 case CC0:
1385 case REG:
1386 case SUBREG:
1387 case CONST_INT:
1388 case CONST_DOUBLE:
1389 case CLOBBER:
1390 case CALL:
1391 return;
1393 case MEM:
1394 in_mem = 1;
1395 break;
1397 case SYMBOL_REF:
1398 if (!in_mem)
1399 return;
1401 /* If this is a constant-pool reference, see if it is a label. */
1402 if (CONSTANT_POOL_ADDRESS_P (x))
1403 mark_jump_label (get_pool_constant (x), insn, in_mem);
1404 break;
1406 case LABEL_REF:
1408 rtx label = XEXP (x, 0);
1410 /* Ignore remaining references to unreachable labels that
1411 have been deleted. */
1412 if (GET_CODE (label) == NOTE
1413 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1414 break;
1416 if (GET_CODE (label) != CODE_LABEL)
1417 abort ();
1419 /* Ignore references to labels of containing functions. */
1420 if (LABEL_REF_NONLOCAL_P (x))
1421 break;
1423 XEXP (x, 0) = label;
1424 if (! insn || ! INSN_DELETED_P (insn))
1425 ++LABEL_NUSES (label);
1427 if (insn)
1429 if (GET_CODE (insn) == JUMP_INSN)
1430 JUMP_LABEL (insn) = label;
1431 else
1433 /* Add a REG_LABEL note for LABEL unless there already
1434 is one. All uses of a label, except for labels
1435 that are the targets of jumps, must have a
1436 REG_LABEL note. */
1437 if (! find_reg_note (insn, REG_LABEL, label))
1438 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1439 REG_NOTES (insn));
1442 return;
1445 /* Do walk the labels in a vector, but not the first operand of an
1446 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1447 case ADDR_VEC:
1448 case ADDR_DIFF_VEC:
1449 if (! INSN_DELETED_P (insn))
1451 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1453 for (i = 0; i < XVECLEN (x, eltnum); i++)
1454 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1456 return;
1458 default:
1459 break;
1462 fmt = GET_RTX_FORMAT (code);
1463 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1465 if (fmt[i] == 'e')
1466 mark_jump_label (XEXP (x, i), insn, in_mem);
1467 else if (fmt[i] == 'E')
1469 register int j;
1470 for (j = 0; j < XVECLEN (x, i); j++)
1471 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1476 /* If all INSN does is set the pc, delete it,
1477 and delete the insn that set the condition codes for it
1478 if that's what the previous thing was. */
1480 void
1481 delete_jump (insn)
1482 rtx insn;
1484 register rtx set = single_set (insn);
1486 if (set && GET_CODE (SET_DEST (set)) == PC)
1487 delete_computation (insn);
1490 /* Verify INSN is a BARRIER and delete it. */
1492 void
1493 delete_barrier (insn)
1494 rtx insn;
1496 if (GET_CODE (insn) != BARRIER)
1497 abort ();
1499 delete_insn (insn);
1502 /* Recursively delete prior insns that compute the value (used only by INSN
1503 which the caller is deleting) stored in the register mentioned by NOTE
1504 which is a REG_DEAD note associated with INSN. */
1506 static void
1507 delete_prior_computation (note, insn)
1508 rtx note;
1509 rtx insn;
1511 rtx our_prev;
1512 rtx reg = XEXP (note, 0);
1514 for (our_prev = prev_nonnote_insn (insn);
1515 our_prev && (GET_CODE (our_prev) == INSN
1516 || GET_CODE (our_prev) == CALL_INSN);
1517 our_prev = prev_nonnote_insn (our_prev))
1519 rtx pat = PATTERN (our_prev);
1521 /* If we reach a CALL which is not calling a const function
1522 or the callee pops the arguments, then give up. */
1523 if (GET_CODE (our_prev) == CALL_INSN
1524 && (! CONST_OR_PURE_CALL_P (our_prev)
1525 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1526 break;
1528 /* If we reach a SEQUENCE, it is too complex to try to
1529 do anything with it, so give up. */
1530 if (GET_CODE (pat) == SEQUENCE)
1531 break;
1533 if (GET_CODE (pat) == USE
1534 && GET_CODE (XEXP (pat, 0)) == INSN)
1535 /* reorg creates USEs that look like this. We leave them
1536 alone because reorg needs them for its own purposes. */
1537 break;
1539 if (reg_set_p (reg, pat))
1541 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1542 break;
1544 if (GET_CODE (pat) == PARALLEL)
1546 /* If we find a SET of something else, we can't
1547 delete the insn. */
1549 int i;
1551 for (i = 0; i < XVECLEN (pat, 0); i++)
1553 rtx part = XVECEXP (pat, 0, i);
1555 if (GET_CODE (part) == SET
1556 && SET_DEST (part) != reg)
1557 break;
1560 if (i == XVECLEN (pat, 0))
1561 delete_computation (our_prev);
1563 else if (GET_CODE (pat) == SET
1564 && GET_CODE (SET_DEST (pat)) == REG)
1566 int dest_regno = REGNO (SET_DEST (pat));
1567 int dest_endregno
1568 = (dest_regno
1569 + (dest_regno < FIRST_PSEUDO_REGISTER
1570 ? HARD_REGNO_NREGS (dest_regno,
1571 GET_MODE (SET_DEST (pat))) : 1));
1572 int regno = REGNO (reg);
1573 int endregno
1574 = (regno
1575 + (regno < FIRST_PSEUDO_REGISTER
1576 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1578 if (dest_regno >= regno
1579 && dest_endregno <= endregno)
1580 delete_computation (our_prev);
1582 /* We may have a multi-word hard register and some, but not
1583 all, of the words of the register are needed in subsequent
1584 insns. Write REG_UNUSED notes for those parts that were not
1585 needed. */
1586 else if (dest_regno <= regno
1587 && dest_endregno >= endregno)
1589 int i;
1591 REG_NOTES (our_prev)
1592 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1593 REG_NOTES (our_prev));
1595 for (i = dest_regno; i < dest_endregno; i++)
1596 if (! find_regno_note (our_prev, REG_UNUSED, i))
1597 break;
1599 if (i == dest_endregno)
1600 delete_computation (our_prev);
1604 break;
1607 /* If PAT references the register that dies here, it is an
1608 additional use. Hence any prior SET isn't dead. However, this
1609 insn becomes the new place for the REG_DEAD note. */
1610 if (reg_overlap_mentioned_p (reg, pat))
1612 XEXP (note, 1) = REG_NOTES (our_prev);
1613 REG_NOTES (our_prev) = note;
1614 break;
1619 /* Delete INSN and recursively delete insns that compute values used only
1620 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1621 If we are running before flow.c, we need do nothing since flow.c will
1622 delete dead code. We also can't know if the registers being used are
1623 dead or not at this point.
1625 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1626 nothing other than set a register that dies in this insn, we can delete
1627 that insn as well.
1629 On machines with CC0, if CC0 is used in this insn, we may be able to
1630 delete the insn that set it. */
1632 static void
1633 delete_computation (insn)
1634 rtx insn;
1636 rtx note, next;
1638 #ifdef HAVE_cc0
1639 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1641 rtx prev = prev_nonnote_insn (insn);
1642 /* We assume that at this stage
1643 CC's are always set explicitly
1644 and always immediately before the jump that
1645 will use them. So if the previous insn
1646 exists to set the CC's, delete it
1647 (unless it performs auto-increments, etc.). */
1648 if (prev && GET_CODE (prev) == INSN
1649 && sets_cc0_p (PATTERN (prev)))
1651 if (sets_cc0_p (PATTERN (prev)) > 0
1652 && ! side_effects_p (PATTERN (prev)))
1653 delete_computation (prev);
1654 else
1655 /* Otherwise, show that cc0 won't be used. */
1656 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1657 cc0_rtx, REG_NOTES (prev));
1660 #endif
1662 for (note = REG_NOTES (insn); note; note = next)
1664 next = XEXP (note, 1);
1666 if (REG_NOTE_KIND (note) != REG_DEAD
1667 /* Verify that the REG_NOTE is legitimate. */
1668 || GET_CODE (XEXP (note, 0)) != REG)
1669 continue;
1671 delete_prior_computation (note, insn);
1674 delete_insn (insn);
1677 /* Delete insn INSN from the chain of insns and update label ref counts.
1678 May delete some following insns as a consequence; may even delete
1679 a label elsewhere and insns that follow it.
1681 Returns the first insn after INSN that was not deleted. */
1684 delete_insn (insn)
1685 register rtx insn;
1687 register rtx next = NEXT_INSN (insn);
1688 register rtx prev = PREV_INSN (insn);
1689 register int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1690 register int dont_really_delete = 0;
1691 rtx note;
1693 while (next && INSN_DELETED_P (next))
1694 next = NEXT_INSN (next);
1696 /* This insn is already deleted => return first following nondeleted. */
1697 if (INSN_DELETED_P (insn))
1698 return next;
1700 if (was_code_label)
1701 remove_node_from_expr_list (insn, &nonlocal_goto_handler_labels);
1703 /* Don't delete user-declared labels. When optimizing, convert them
1704 to special NOTEs instead. When not optimizing, leave them alone. */
1705 if (was_code_label && LABEL_NAME (insn) != 0)
1707 if (optimize)
1709 const char *name = LABEL_NAME (insn);
1710 PUT_CODE (insn, NOTE);
1711 NOTE_LINE_NUMBER (insn) = NOTE_INSN_DELETED_LABEL;
1712 NOTE_SOURCE_FILE (insn) = name;
1715 dont_really_delete = 1;
1717 else
1718 /* Mark this insn as deleted. */
1719 INSN_DELETED_P (insn) = 1;
1721 /* If instruction is followed by a barrier,
1722 delete the barrier too. */
1724 if (next != 0 && GET_CODE (next) == BARRIER)
1726 INSN_DELETED_P (next) = 1;
1727 next = NEXT_INSN (next);
1730 /* Patch out INSN (and the barrier if any) */
1732 if (! dont_really_delete)
1734 if (prev)
1736 NEXT_INSN (prev) = next;
1737 if (GET_CODE (prev) == INSN && GET_CODE (PATTERN (prev)) == SEQUENCE)
1738 NEXT_INSN (XVECEXP (PATTERN (prev), 0,
1739 XVECLEN (PATTERN (prev), 0) - 1)) = next;
1742 if (next)
1744 PREV_INSN (next) = prev;
1745 if (GET_CODE (next) == INSN && GET_CODE (PATTERN (next)) == SEQUENCE)
1746 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
1749 if (prev && NEXT_INSN (prev) == 0)
1750 set_last_insn (prev);
1753 /* If deleting a jump, decrement the count of the label,
1754 and delete the label if it is now unused. */
1756 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1758 rtx lab = JUMP_LABEL (insn), lab_next;
1760 if (--LABEL_NUSES (lab) == 0)
1762 /* This can delete NEXT or PREV,
1763 either directly if NEXT is JUMP_LABEL (INSN),
1764 or indirectly through more levels of jumps. */
1765 delete_insn (lab);
1767 /* I feel a little doubtful about this loop,
1768 but I see no clean and sure alternative way
1769 to find the first insn after INSN that is not now deleted.
1770 I hope this works. */
1771 while (next && INSN_DELETED_P (next))
1772 next = NEXT_INSN (next);
1773 return next;
1775 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1776 && GET_CODE (lab_next) == JUMP_INSN
1777 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1778 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1780 /* If we're deleting the tablejump, delete the dispatch table.
1781 We may not be able to kill the label immediately preceeding
1782 just yet, as it might be referenced in code leading up to
1783 the tablejump. */
1784 delete_insn (lab_next);
1788 /* Likewise if we're deleting a dispatch table. */
1790 if (GET_CODE (insn) == JUMP_INSN
1791 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1792 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1794 rtx pat = PATTERN (insn);
1795 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1796 int len = XVECLEN (pat, diff_vec_p);
1798 for (i = 0; i < len; i++)
1799 if (--LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1800 delete_insn (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1801 while (next && INSN_DELETED_P (next))
1802 next = NEXT_INSN (next);
1803 return next;
1806 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1807 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1808 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1809 if (REG_NOTE_KIND (note) == REG_LABEL
1810 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1811 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1812 if (--LABEL_NUSES (XEXP (note, 0)) == 0)
1813 delete_insn (XEXP (note, 0));
1815 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1816 prev = PREV_INSN (prev);
1818 /* If INSN was a label and a dispatch table follows it,
1819 delete the dispatch table. The tablejump must have gone already.
1820 It isn't useful to fall through into a table. */
1822 if (was_code_label
1823 && NEXT_INSN (insn) != 0
1824 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1825 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1826 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1827 next = delete_insn (NEXT_INSN (insn));
1829 /* If INSN was a label, delete insns following it if now unreachable. */
1831 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1833 register RTX_CODE code;
1834 while (next != 0
1835 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1836 || code == NOTE || code == BARRIER
1837 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1839 if (code == NOTE
1840 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1841 next = NEXT_INSN (next);
1842 /* Keep going past other deleted labels to delete what follows. */
1843 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1844 next = NEXT_INSN (next);
1845 else
1846 /* Note: if this deletes a jump, it can cause more
1847 deletion of unreachable code, after a different label.
1848 As long as the value from this recursive call is correct,
1849 this invocation functions correctly. */
1850 next = delete_insn (next);
1854 return next;
1857 /* Advance from INSN till reaching something not deleted
1858 then return that. May return INSN itself. */
1861 next_nondeleted_insn (insn)
1862 rtx insn;
1864 while (INSN_DELETED_P (insn))
1865 insn = NEXT_INSN (insn);
1866 return insn;
1869 /* Delete a range of insns from FROM to TO, inclusive.
1870 This is for the sake of peephole optimization, so assume
1871 that whatever these insns do will still be done by a new
1872 peephole insn that will replace them. */
1874 void
1875 delete_for_peephole (from, to)
1876 register rtx from, to;
1878 register rtx insn = from;
1880 while (1)
1882 register rtx next = NEXT_INSN (insn);
1883 register rtx prev = PREV_INSN (insn);
1885 if (GET_CODE (insn) != NOTE)
1887 INSN_DELETED_P (insn) = 1;
1889 /* Patch this insn out of the chain. */
1890 /* We don't do this all at once, because we
1891 must preserve all NOTEs. */
1892 if (prev)
1893 NEXT_INSN (prev) = next;
1895 if (next)
1896 PREV_INSN (next) = prev;
1899 if (insn == to)
1900 break;
1901 insn = next;
1904 /* Note that if TO is an unconditional jump
1905 we *do not* delete the BARRIER that follows,
1906 since the peephole that replaces this sequence
1907 is also an unconditional jump in that case. */
1910 /* We have determined that INSN is never reached, and are about to
1911 delete it. Print a warning if the user asked for one.
1913 To try to make this warning more useful, this should only be called
1914 once per basic block not reached, and it only warns when the basic
1915 block contains more than one line from the current function, and
1916 contains at least one operation. CSE and inlining can duplicate insns,
1917 so it's possible to get spurious warnings from this. */
1919 void
1920 never_reached_warning (avoided_insn)
1921 rtx avoided_insn;
1923 rtx insn;
1924 rtx a_line_note = NULL;
1925 int two_avoided_lines = 0;
1926 int contains_insn = 0;
1928 if (! warn_notreached)
1929 return;
1931 /* Scan forwards, looking at LINE_NUMBER notes, until
1932 we hit a LABEL or we run out of insns. */
1934 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1936 if (GET_CODE (insn) == CODE_LABEL)
1937 break;
1938 else if (GET_CODE (insn) == NOTE /* A line number note? */
1939 && NOTE_LINE_NUMBER (insn) >= 0)
1941 if (a_line_note == NULL)
1942 a_line_note = insn;
1943 else
1944 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1945 != NOTE_LINE_NUMBER (insn));
1947 else if (INSN_P (insn))
1948 contains_insn = 1;
1950 if (two_avoided_lines && contains_insn)
1951 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1952 NOTE_LINE_NUMBER (a_line_note),
1953 "will never be executed");
1956 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1957 NLABEL as a return. Accrue modifications into the change group. */
1959 static void
1960 redirect_exp_1 (loc, olabel, nlabel, insn)
1961 rtx *loc;
1962 rtx olabel, nlabel;
1963 rtx insn;
1965 register rtx x = *loc;
1966 register RTX_CODE code = GET_CODE (x);
1967 register int i;
1968 register const char *fmt;
1970 if (code == LABEL_REF)
1972 if (XEXP (x, 0) == olabel)
1974 rtx n;
1975 if (nlabel)
1976 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1977 else
1978 n = gen_rtx_RETURN (VOIDmode);
1980 validate_change (insn, loc, n, 1);
1981 return;
1984 else if (code == RETURN && olabel == 0)
1986 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1987 if (loc == &PATTERN (insn))
1988 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1989 validate_change (insn, loc, x, 1);
1990 return;
1993 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1994 && GET_CODE (SET_SRC (x)) == LABEL_REF
1995 && XEXP (SET_SRC (x), 0) == olabel)
1997 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1998 return;
2001 fmt = GET_RTX_FORMAT (code);
2002 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2004 if (fmt[i] == 'e')
2005 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
2006 else if (fmt[i] == 'E')
2008 register int j;
2009 for (j = 0; j < XVECLEN (x, i); j++)
2010 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2015 /* Similar, but apply the change group and report success or failure. */
2017 static int
2018 redirect_exp (olabel, nlabel, insn)
2019 rtx olabel, nlabel;
2020 rtx insn;
2022 rtx *loc;
2024 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2025 loc = &XVECEXP (PATTERN (insn), 0, 0);
2026 else
2027 loc = &PATTERN (insn);
2029 redirect_exp_1 (loc, olabel, nlabel, insn);
2030 if (num_validated_changes () == 0)
2031 return 0;
2033 return apply_change_group ();
2036 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2037 the modifications into the change group. Return false if we did
2038 not see how to do that. */
2041 redirect_jump_1 (jump, nlabel)
2042 rtx jump, nlabel;
2044 int ochanges = num_validated_changes ();
2045 rtx *loc;
2047 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2048 loc = &XVECEXP (PATTERN (jump), 0, 0);
2049 else
2050 loc = &PATTERN (jump);
2052 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2053 return num_validated_changes () > ochanges;
2056 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2057 jump target label is unused as a result, it and the code following
2058 it may be deleted.
2060 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2061 RETURN insn.
2063 The return value will be 1 if the change was made, 0 if it wasn't
2064 (this can only occur for NLABEL == 0). */
2067 redirect_jump (jump, nlabel, delete_unused)
2068 rtx jump, nlabel;
2069 int delete_unused;
2071 register rtx olabel = JUMP_LABEL (jump);
2073 if (nlabel == olabel)
2074 return 1;
2076 if (! redirect_exp (olabel, nlabel, jump))
2077 return 0;
2079 JUMP_LABEL (jump) = nlabel;
2080 if (nlabel)
2081 ++LABEL_NUSES (nlabel);
2083 /* If we're eliding the jump over exception cleanups at the end of a
2084 function, move the function end note so that -Wreturn-type works. */
2085 if (olabel && nlabel
2086 && NEXT_INSN (olabel)
2087 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2088 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2089 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2091 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused)
2092 delete_insn (olabel);
2094 return 1;
2097 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2098 Accrue the modifications into the change group. */
2100 static void
2101 invert_exp_1 (insn)
2102 rtx insn;
2104 register RTX_CODE code;
2105 rtx x = pc_set (insn);
2107 if (!x)
2108 abort ();
2109 x = SET_SRC (x);
2111 code = GET_CODE (x);
2113 if (code == IF_THEN_ELSE)
2115 register rtx comp = XEXP (x, 0);
2116 register rtx tem;
2117 enum rtx_code reversed_code;
2119 /* We can do this in two ways: The preferable way, which can only
2120 be done if this is not an integer comparison, is to reverse
2121 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2122 of the IF_THEN_ELSE. If we can't do either, fail. */
2124 reversed_code = reversed_comparison_code (comp, insn);
2126 if (reversed_code != UNKNOWN)
2128 validate_change (insn, &XEXP (x, 0),
2129 gen_rtx_fmt_ee (reversed_code,
2130 GET_MODE (comp), XEXP (comp, 0),
2131 XEXP (comp, 1)),
2133 return;
2136 tem = XEXP (x, 1);
2137 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2138 validate_change (insn, &XEXP (x, 2), tem, 1);
2140 else
2141 abort ();
2144 /* Invert the jump condition of conditional jump insn, INSN.
2146 Return 1 if we can do so, 0 if we cannot find a way to do so that
2147 matches a pattern. */
2149 static int
2150 invert_exp (insn)
2151 rtx insn;
2153 invert_exp_1 (insn);
2154 if (num_validated_changes () == 0)
2155 return 0;
2157 return apply_change_group ();
2160 /* Invert the condition of the jump JUMP, and make it jump to label
2161 NLABEL instead of where it jumps now. Accrue changes into the
2162 change group. Return false if we didn't see how to perform the
2163 inversion and redirection. */
2166 invert_jump_1 (jump, nlabel)
2167 rtx jump, nlabel;
2169 int ochanges;
2171 ochanges = num_validated_changes ();
2172 invert_exp_1 (jump);
2173 if (num_validated_changes () == ochanges)
2174 return 0;
2176 return redirect_jump_1 (jump, nlabel);
2179 /* Invert the condition of the jump JUMP, and make it jump to label
2180 NLABEL instead of where it jumps now. Return true if successful. */
2183 invert_jump (jump, nlabel, delete_unused)
2184 rtx jump, nlabel;
2185 int delete_unused;
2187 /* We have to either invert the condition and change the label or
2188 do neither. Either operation could fail. We first try to invert
2189 the jump. If that succeeds, we try changing the label. If that fails,
2190 we invert the jump back to what it was. */
2192 if (! invert_exp (jump))
2193 return 0;
2195 if (redirect_jump (jump, nlabel, delete_unused))
2197 invert_br_probabilities (jump);
2199 return 1;
2202 if (! invert_exp (jump))
2203 /* This should just be putting it back the way it was. */
2204 abort ();
2206 return 0;
2210 /* Like rtx_equal_p except that it considers two REGs as equal
2211 if they renumber to the same value and considers two commutative
2212 operations to be the same if the order of the operands has been
2213 reversed.
2215 ??? Addition is not commutative on the PA due to the weird implicit
2216 space register selection rules for memory addresses. Therefore, we
2217 don't consider a + b == b + a.
2219 We could/should make this test a little tighter. Possibly only
2220 disabling it on the PA via some backend macro or only disabling this
2221 case when the PLUS is inside a MEM. */
2224 rtx_renumbered_equal_p (x, y)
2225 rtx x, y;
2227 register int i;
2228 register RTX_CODE code = GET_CODE (x);
2229 register const char *fmt;
2231 if (x == y)
2232 return 1;
2234 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2235 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2236 && GET_CODE (SUBREG_REG (y)) == REG)))
2238 int reg_x = -1, reg_y = -1;
2239 int byte_x = 0, byte_y = 0;
2241 if (GET_MODE (x) != GET_MODE (y))
2242 return 0;
2244 /* If we haven't done any renumbering, don't
2245 make any assumptions. */
2246 if (reg_renumber == 0)
2247 return rtx_equal_p (x, y);
2249 if (code == SUBREG)
2251 reg_x = REGNO (SUBREG_REG (x));
2252 byte_x = SUBREG_BYTE (x);
2254 if (reg_renumber[reg_x] >= 0)
2256 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2257 GET_MODE (SUBREG_REG (x)),
2258 byte_x,
2259 GET_MODE (x));
2260 byte_x = 0;
2263 else
2265 reg_x = REGNO (x);
2266 if (reg_renumber[reg_x] >= 0)
2267 reg_x = reg_renumber[reg_x];
2270 if (GET_CODE (y) == SUBREG)
2272 reg_y = REGNO (SUBREG_REG (y));
2273 byte_y = SUBREG_BYTE (y);
2275 if (reg_renumber[reg_y] >= 0)
2277 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2278 GET_MODE (SUBREG_REG (y)),
2279 byte_y,
2280 GET_MODE (y));
2281 byte_y = 0;
2284 else
2286 reg_y = REGNO (y);
2287 if (reg_renumber[reg_y] >= 0)
2288 reg_y = reg_renumber[reg_y];
2291 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2294 /* Now we have disposed of all the cases
2295 in which different rtx codes can match. */
2296 if (code != GET_CODE (y))
2297 return 0;
2299 switch (code)
2301 case PC:
2302 case CC0:
2303 case ADDR_VEC:
2304 case ADDR_DIFF_VEC:
2305 return 0;
2307 case CONST_INT:
2308 return INTVAL (x) == INTVAL (y);
2310 case LABEL_REF:
2311 /* We can't assume nonlocal labels have their following insns yet. */
2312 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2313 return XEXP (x, 0) == XEXP (y, 0);
2315 /* Two label-refs are equivalent if they point at labels
2316 in the same position in the instruction stream. */
2317 return (next_real_insn (XEXP (x, 0))
2318 == next_real_insn (XEXP (y, 0)));
2320 case SYMBOL_REF:
2321 return XSTR (x, 0) == XSTR (y, 0);
2323 case CODE_LABEL:
2324 /* If we didn't match EQ equality above, they aren't the same. */
2325 return 0;
2327 default:
2328 break;
2331 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2333 if (GET_MODE (x) != GET_MODE (y))
2334 return 0;
2336 /* For commutative operations, the RTX match if the operand match in any
2337 order. Also handle the simple binary and unary cases without a loop.
2339 ??? Don't consider PLUS a commutative operator; see comments above. */
2340 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2341 && code != PLUS)
2342 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2343 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2344 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2345 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2346 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2347 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2348 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2349 else if (GET_RTX_CLASS (code) == '1')
2350 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2352 /* Compare the elements. If any pair of corresponding elements
2353 fail to match, return 0 for the whole things. */
2355 fmt = GET_RTX_FORMAT (code);
2356 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2358 register int j;
2359 switch (fmt[i])
2361 case 'w':
2362 if (XWINT (x, i) != XWINT (y, i))
2363 return 0;
2364 break;
2366 case 'i':
2367 if (XINT (x, i) != XINT (y, i))
2368 return 0;
2369 break;
2371 case 't':
2372 if (XTREE (x, i) != XTREE (y, i))
2373 return 0;
2374 break;
2376 case 's':
2377 if (strcmp (XSTR (x, i), XSTR (y, i)))
2378 return 0;
2379 break;
2381 case 'e':
2382 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2383 return 0;
2384 break;
2386 case 'u':
2387 if (XEXP (x, i) != XEXP (y, i))
2388 return 0;
2389 /* fall through. */
2390 case '0':
2391 break;
2393 case 'E':
2394 if (XVECLEN (x, i) != XVECLEN (y, i))
2395 return 0;
2396 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2397 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2398 return 0;
2399 break;
2401 default:
2402 abort ();
2405 return 1;
2408 /* If X is a hard register or equivalent to one or a subregister of one,
2409 return the hard register number. If X is a pseudo register that was not
2410 assigned a hard register, return the pseudo register number. Otherwise,
2411 return -1. Any rtx is valid for X. */
2414 true_regnum (x)
2415 rtx x;
2417 if (GET_CODE (x) == REG)
2419 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2420 return reg_renumber[REGNO (x)];
2421 return REGNO (x);
2423 if (GET_CODE (x) == SUBREG)
2425 int base = true_regnum (SUBREG_REG (x));
2426 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2427 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2428 GET_MODE (SUBREG_REG (x)),
2429 SUBREG_BYTE (x), GET_MODE (x));
2431 return -1;
2434 /* Optimize code of the form:
2436 for (x = a[i]; x; ...)
2438 for (x = a[i]; x; ...)
2440 foo:
2442 Loop optimize will change the above code into
2444 if (x = a[i])
2445 for (;;)
2446 { ...; if (! (x = ...)) break; }
2447 if (x = a[i])
2448 for (;;)
2449 { ...; if (! (x = ...)) break; }
2450 foo:
2452 In general, if the first test fails, the program can branch
2453 directly to `foo' and skip the second try which is doomed to fail.
2454 We run this after loop optimization and before flow analysis. */
2456 /* When comparing the insn patterns, we track the fact that different
2457 pseudo-register numbers may have been used in each computation.
2458 The following array stores an equivalence -- same_regs[I] == J means
2459 that pseudo register I was used in the first set of tests in a context
2460 where J was used in the second set. We also count the number of such
2461 pending equivalences. If nonzero, the expressions really aren't the
2462 same. */
2464 static int *same_regs;
2466 static int num_same_regs;
2468 /* Track any registers modified between the target of the first jump and
2469 the second jump. They never compare equal. */
2471 static char *modified_regs;
2473 /* Record if memory was modified. */
2475 static int modified_mem;
2477 /* Called via note_stores on each insn between the target of the first
2478 branch and the second branch. It marks any changed registers. */
2480 static void
2481 mark_modified_reg (dest, x, data)
2482 rtx dest;
2483 rtx x;
2484 void *data ATTRIBUTE_UNUSED;
2486 int regno;
2487 unsigned int i;
2489 if (GET_CODE (dest) == SUBREG)
2490 dest = SUBREG_REG (dest);
2492 if (GET_CODE (dest) == MEM)
2493 modified_mem = 1;
2495 if (GET_CODE (dest) != REG)
2496 return;
2498 regno = REGNO (dest);
2499 if (regno >= FIRST_PSEUDO_REGISTER)
2500 modified_regs[regno] = 1;
2501 /* Don't consider a hard condition code register as modified,
2502 if it is only being set. thread_jumps will check if it is set
2503 to the same value. */
2504 else if (GET_MODE_CLASS (GET_MODE (dest)) != MODE_CC
2505 || GET_CODE (x) != SET
2506 || ! rtx_equal_p (dest, SET_DEST (x))
2507 || HARD_REGNO_NREGS (regno, GET_MODE (dest)) != 1)
2508 for (i = 0; i < HARD_REGNO_NREGS (regno, GET_MODE (dest)); i++)
2509 modified_regs[regno + i] = 1;
2512 /* F is the first insn in the chain of insns. */
2514 void
2515 thread_jumps (f, max_reg, flag_before_loop)
2516 rtx f;
2517 int max_reg;
2518 int flag_before_loop;
2520 /* Basic algorithm is to find a conditional branch,
2521 the label it may branch to, and the branch after
2522 that label. If the two branches test the same condition,
2523 walk back from both branch paths until the insn patterns
2524 differ, or code labels are hit. If we make it back to
2525 the target of the first branch, then we know that the first branch
2526 will either always succeed or always fail depending on the relative
2527 senses of the two branches. So adjust the first branch accordingly
2528 in this case. */
2530 rtx label, b1, b2, t1, t2;
2531 enum rtx_code code1, code2;
2532 rtx b1op0, b1op1, b2op0, b2op1;
2533 int changed = 1;
2534 int i;
2535 int *all_reset;
2536 enum rtx_code reversed_code1, reversed_code2;
2538 /* Allocate register tables and quick-reset table. */
2539 modified_regs = (char *) xmalloc (max_reg * sizeof (char));
2540 same_regs = (int *) xmalloc (max_reg * sizeof (int));
2541 all_reset = (int *) xmalloc (max_reg * sizeof (int));
2542 for (i = 0; i < max_reg; i++)
2543 all_reset[i] = -1;
2545 while (changed)
2547 changed = 0;
2549 for (b1 = f; b1; b1 = NEXT_INSN (b1))
2551 rtx set;
2552 rtx set2;
2554 /* Get to a candidate branch insn. */
2555 if (GET_CODE (b1) != JUMP_INSN
2556 || ! any_condjump_p (b1) || JUMP_LABEL (b1) == 0)
2557 continue;
2559 memset (modified_regs, 0, max_reg * sizeof (char));
2560 modified_mem = 0;
2562 memcpy (same_regs, all_reset, max_reg * sizeof (int));
2563 num_same_regs = 0;
2565 label = JUMP_LABEL (b1);
2567 /* Look for a branch after the target. Record any registers and
2568 memory modified between the target and the branch. Stop when we
2569 get to a label since we can't know what was changed there. */
2570 for (b2 = NEXT_INSN (label); b2; b2 = NEXT_INSN (b2))
2572 if (GET_CODE (b2) == CODE_LABEL)
2573 break;
2575 else if (GET_CODE (b2) == JUMP_INSN)
2577 /* If this is an unconditional jump and is the only use of
2578 its target label, we can follow it. */
2579 if (any_uncondjump_p (b2)
2580 && onlyjump_p (b2)
2581 && JUMP_LABEL (b2) != 0
2582 && LABEL_NUSES (JUMP_LABEL (b2)) == 1)
2584 b2 = JUMP_LABEL (b2);
2585 continue;
2587 else
2588 break;
2591 if (GET_CODE (b2) != CALL_INSN && GET_CODE (b2) != INSN)
2592 continue;
2594 if (GET_CODE (b2) == CALL_INSN)
2596 modified_mem = 1;
2597 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
2598 if (call_used_regs[i] && ! fixed_regs[i]
2599 && i != STACK_POINTER_REGNUM
2600 && i != FRAME_POINTER_REGNUM
2601 && i != HARD_FRAME_POINTER_REGNUM
2602 && i != ARG_POINTER_REGNUM)
2603 modified_regs[i] = 1;
2606 note_stores (PATTERN (b2), mark_modified_reg, NULL);
2609 /* Check the next candidate branch insn from the label
2610 of the first. */
2611 if (b2 == 0
2612 || GET_CODE (b2) != JUMP_INSN
2613 || b2 == b1
2614 || !any_condjump_p (b2)
2615 || !onlyjump_p (b2))
2616 continue;
2617 set = pc_set (b1);
2618 set2 = pc_set (b2);
2620 /* Get the comparison codes and operands, reversing the
2621 codes if appropriate. If we don't have comparison codes,
2622 we can't do anything. */
2623 b1op0 = XEXP (XEXP (SET_SRC (set), 0), 0);
2624 b1op1 = XEXP (XEXP (SET_SRC (set), 0), 1);
2625 code1 = GET_CODE (XEXP (SET_SRC (set), 0));
2626 reversed_code1 = code1;
2627 if (XEXP (SET_SRC (set), 1) == pc_rtx)
2628 code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2629 else
2630 reversed_code1 = reversed_comparison_code (XEXP (SET_SRC (set), 0), b1);
2632 b2op0 = XEXP (XEXP (SET_SRC (set2), 0), 0);
2633 b2op1 = XEXP (XEXP (SET_SRC (set2), 0), 1);
2634 code2 = GET_CODE (XEXP (SET_SRC (set2), 0));
2635 reversed_code2 = code2;
2636 if (XEXP (SET_SRC (set2), 1) == pc_rtx)
2637 code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2638 else
2639 reversed_code2 = reversed_comparison_code (XEXP (SET_SRC (set2), 0), b2);
2641 /* If they test the same things and knowing that B1 branches
2642 tells us whether or not B2 branches, check if we
2643 can thread the branch. */
2644 if (rtx_equal_for_thread_p (b1op0, b2op0, b2)
2645 && rtx_equal_for_thread_p (b1op1, b2op1, b2)
2646 && (comparison_dominates_p (code1, code2)
2647 || comparison_dominates_p (code1, reversed_code2)))
2650 t1 = prev_nonnote_insn (b1);
2651 t2 = prev_nonnote_insn (b2);
2653 while (t1 != 0 && t2 != 0)
2655 if (t2 == label)
2657 /* We have reached the target of the first branch.
2658 If there are no pending register equivalents,
2659 we know that this branch will either always
2660 succeed (if the senses of the two branches are
2661 the same) or always fail (if not). */
2662 rtx new_label;
2664 if (num_same_regs != 0)
2665 break;
2667 if (comparison_dominates_p (code1, code2))
2668 new_label = JUMP_LABEL (b2);
2669 else
2670 new_label = get_label_after (b2);
2672 if (JUMP_LABEL (b1) != new_label)
2674 rtx prev = PREV_INSN (new_label);
2676 if (flag_before_loop
2677 && GET_CODE (prev) == NOTE
2678 && NOTE_LINE_NUMBER (prev) == NOTE_INSN_LOOP_BEG)
2680 /* Don't thread to the loop label. If a loop
2681 label is reused, loop optimization will
2682 be disabled for that loop. */
2683 new_label = gen_label_rtx ();
2684 emit_label_after (new_label, PREV_INSN (prev));
2686 changed |= redirect_jump (b1, new_label, 1);
2688 break;
2691 /* If either of these is not a normal insn (it might be
2692 a JUMP_INSN, CALL_INSN, or CODE_LABEL) we fail. (NOTEs
2693 have already been skipped above.) Similarly, fail
2694 if the insns are different. */
2695 if (GET_CODE (t1) != INSN || GET_CODE (t2) != INSN
2696 || recog_memoized (t1) != recog_memoized (t2)
2697 || ! rtx_equal_for_thread_p (PATTERN (t1),
2698 PATTERN (t2), t2))
2699 break;
2701 t1 = prev_nonnote_insn (t1);
2702 t2 = prev_nonnote_insn (t2);
2708 /* Clean up. */
2709 free (modified_regs);
2710 free (same_regs);
2711 free (all_reset);
2714 /* This is like RTX_EQUAL_P except that it knows about our handling of
2715 possibly equivalent registers and knows to consider volatile and
2716 modified objects as not equal.
2718 YINSN is the insn containing Y. */
2721 rtx_equal_for_thread_p (x, y, yinsn)
2722 rtx x, y;
2723 rtx yinsn;
2725 register int i;
2726 register int j;
2727 register enum rtx_code code;
2728 register const char *fmt;
2730 code = GET_CODE (x);
2731 /* Rtx's of different codes cannot be equal. */
2732 if (code != GET_CODE (y))
2733 return 0;
2735 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent.
2736 (REG:SI x) and (REG:HI x) are NOT equivalent. */
2738 if (GET_MODE (x) != GET_MODE (y))
2739 return 0;
2741 /* For floating-point, consider everything unequal. This is a bit
2742 pessimistic, but this pass would only rarely do anything for FP
2743 anyway. */
2744 if (TARGET_FLOAT_FORMAT == IEEE_FLOAT_FORMAT
2745 && FLOAT_MODE_P (GET_MODE (x)) && ! flag_unsafe_math_optimizations)
2746 return 0;
2748 /* For commutative operations, the RTX match if the operand match in any
2749 order. Also handle the simple binary and unary cases without a loop. */
2750 if (code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2751 return ((rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2752 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn))
2753 || (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 1), yinsn)
2754 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 0), yinsn)));
2755 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2756 return (rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn)
2757 && rtx_equal_for_thread_p (XEXP (x, 1), XEXP (y, 1), yinsn));
2758 else if (GET_RTX_CLASS (code) == '1')
2759 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2761 /* Handle special-cases first. */
2762 switch (code)
2764 case REG:
2765 if (REGNO (x) == REGNO (y) && ! modified_regs[REGNO (x)])
2766 return 1;
2768 /* If neither is user variable or hard register, check for possible
2769 equivalence. */
2770 if (REG_USERVAR_P (x) || REG_USERVAR_P (y)
2771 || REGNO (x) < FIRST_PSEUDO_REGISTER
2772 || REGNO (y) < FIRST_PSEUDO_REGISTER)
2773 return 0;
2775 if (same_regs[REGNO (x)] == -1)
2777 same_regs[REGNO (x)] = REGNO (y);
2778 num_same_regs++;
2780 /* If this is the first time we are seeing a register on the `Y'
2781 side, see if it is the last use. If not, we can't thread the
2782 jump, so mark it as not equivalent. */
2783 if (REGNO_LAST_UID (REGNO (y)) != INSN_UID (yinsn))
2784 return 0;
2786 return 1;
2788 else
2789 return (same_regs[REGNO (x)] == (int) REGNO (y));
2791 break;
2793 case MEM:
2794 /* If memory modified or either volatile, not equivalent.
2795 Else, check address. */
2796 if (modified_mem || MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2797 return 0;
2799 return rtx_equal_for_thread_p (XEXP (x, 0), XEXP (y, 0), yinsn);
2801 case ASM_INPUT:
2802 if (MEM_VOLATILE_P (x) || MEM_VOLATILE_P (y))
2803 return 0;
2805 break;
2807 case SET:
2808 /* Cancel a pending `same_regs' if setting equivalenced registers.
2809 Then process source. */
2810 if (GET_CODE (SET_DEST (x)) == REG
2811 && GET_CODE (SET_DEST (y)) == REG)
2813 if (same_regs[REGNO (SET_DEST (x))] == (int) REGNO (SET_DEST (y)))
2815 same_regs[REGNO (SET_DEST (x))] = -1;
2816 num_same_regs--;
2818 else if (REGNO (SET_DEST (x)) != REGNO (SET_DEST (y)))
2819 return 0;
2821 else
2823 if (rtx_equal_for_thread_p (SET_DEST (x), SET_DEST (y), yinsn) == 0)
2824 return 0;
2827 return rtx_equal_for_thread_p (SET_SRC (x), SET_SRC (y), yinsn);
2829 case LABEL_REF:
2830 return XEXP (x, 0) == XEXP (y, 0);
2832 case SYMBOL_REF:
2833 return XSTR (x, 0) == XSTR (y, 0);
2835 default:
2836 break;
2839 if (x == y)
2840 return 1;
2842 fmt = GET_RTX_FORMAT (code);
2843 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2845 switch (fmt[i])
2847 case 'w':
2848 if (XWINT (x, i) != XWINT (y, i))
2849 return 0;
2850 break;
2852 case 'n':
2853 case 'i':
2854 if (XINT (x, i) != XINT (y, i))
2855 return 0;
2856 break;
2858 case 'V':
2859 case 'E':
2860 /* Two vectors must have the same length. */
2861 if (XVECLEN (x, i) != XVECLEN (y, i))
2862 return 0;
2864 /* And the corresponding elements must match. */
2865 for (j = 0; j < XVECLEN (x, i); j++)
2866 if (rtx_equal_for_thread_p (XVECEXP (x, i, j),
2867 XVECEXP (y, i, j), yinsn) == 0)
2868 return 0;
2869 break;
2871 case 'e':
2872 if (rtx_equal_for_thread_p (XEXP (x, i), XEXP (y, i), yinsn) == 0)
2873 return 0;
2874 break;
2876 case 'S':
2877 case 's':
2878 if (strcmp (XSTR (x, i), XSTR (y, i)))
2879 return 0;
2880 break;
2882 case 'u':
2883 /* These are just backpointers, so they don't matter. */
2884 break;
2886 case '0':
2887 case 't':
2888 break;
2890 /* It is believed that rtx's at this level will never
2891 contain anything but integers and other rtx's,
2892 except for within LABEL_REFs and SYMBOL_REFs. */
2893 default:
2894 abort ();
2897 return 1;