* doc/tm.texi (Storage Layout): Remove ROUND_TYPE_SIZE and
[official-gcc.git] / gcc / jump.c
blobf15e2abae107d66e240f802f65d7d57790e3d09c
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
24 operate with jumps.
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tm_p.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "regs.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
48 #include "recog.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "real.h"
52 #include "except.h"
53 #include "toplev.h"
54 #include "reload.h"
55 #include "predict.h"
56 #include "timevar.h"
57 #include "diagnostic.h"
59 /* Optimize jump y; x: ... y: jumpif... x?
60 Don't know if it is worth bothering with. */
61 /* Optimize two cases of conditional jump to conditional jump?
62 This can never delete any instruction or make anything dead,
63 or even change what is live at any point.
64 So perhaps let combiner do it. */
66 static rtx next_nonnote_insn_in_loop PARAMS ((rtx));
67 static void init_label_info PARAMS ((rtx));
68 static void mark_all_labels PARAMS ((rtx));
69 static int duplicate_loop_exit_test PARAMS ((rtx));
70 static void delete_computation PARAMS ((rtx));
71 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
72 static int redirect_exp PARAMS ((rtx, rtx, rtx));
73 static void invert_exp_1 PARAMS ((rtx));
74 static int invert_exp PARAMS ((rtx));
75 static int returnjump_p_1 PARAMS ((rtx *, void *));
76 static void delete_prior_computation PARAMS ((rtx, rtx));
78 /* Alternate entry into the jump optimizer. This entry point only rebuilds
79 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
80 instructions. */
81 void
82 rebuild_jump_labels (f)
83 rtx f;
85 rtx insn;
87 timevar_push (TV_REBUILD_JUMP);
88 init_label_info (f);
89 mark_all_labels (f);
91 /* Keep track of labels used from static data; we don't track them
92 closely enough to delete them here, so make sure their reference
93 count doesn't drop to zero. */
95 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
96 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
97 LABEL_NUSES (XEXP (insn, 0))++;
98 timevar_pop (TV_REBUILD_JUMP);
101 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
102 non-fallthru insn. This is not generally true, as multiple barriers
103 may have crept in, or the BARRIER may be separated from the last
104 real insn by one or more NOTEs.
106 This simple pass moves barriers and removes duplicates so that the
107 old code is happy.
109 void
110 cleanup_barriers ()
112 rtx insn, next, prev;
113 for (insn = get_insns (); insn; insn = next)
115 next = NEXT_INSN (insn);
116 if (GET_CODE (insn) == BARRIER)
118 prev = prev_nonnote_insn (insn);
119 if (GET_CODE (prev) == BARRIER)
120 delete_barrier (insn);
121 else if (prev != PREV_INSN (insn))
122 reorder_insns (insn, insn, prev);
127 /* Return the next insn after INSN that is not a NOTE and is in the loop,
128 i.e. when there is no such INSN before NOTE_INSN_LOOP_END return NULL_RTX.
129 This routine does not look inside SEQUENCEs. */
131 static rtx
132 next_nonnote_insn_in_loop (insn)
133 rtx insn;
135 while (insn)
137 insn = NEXT_INSN (insn);
138 if (insn == 0 || GET_CODE (insn) != NOTE)
139 break;
140 if (GET_CODE (insn) == NOTE
141 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
142 return NULL_RTX;
145 return insn;
148 void
149 copy_loop_headers (f)
150 rtx f;
152 rtx insn, next;
153 /* Now iterate optimizing jumps until nothing changes over one pass. */
154 for (insn = f; insn; insn = next)
156 rtx temp, temp1;
158 next = NEXT_INSN (insn);
160 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
161 jump. Try to optimize by duplicating the loop exit test if so.
162 This is only safe immediately after regscan, because it uses
163 the values of regno_first_uid and regno_last_uid. */
164 if (GET_CODE (insn) == NOTE
165 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
166 && (temp1 = next_nonnote_insn_in_loop (insn)) != 0
167 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
169 temp = PREV_INSN (insn);
170 if (duplicate_loop_exit_test (insn))
172 next = NEXT_INSN (temp);
178 void
179 purge_line_number_notes (f)
180 rtx f;
182 rtx last_note = 0;
183 rtx insn;
184 /* Delete extraneous line number notes.
185 Note that two consecutive notes for different lines are not really
186 extraneous. There should be some indication where that line belonged,
187 even if it became empty. */
189 for (insn = f; insn; insn = NEXT_INSN (insn))
190 if (GET_CODE (insn) == NOTE)
192 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
193 /* Any previous line note was for the prologue; gdb wants a new
194 note after the prologue even if it is for the same line. */
195 last_note = NULL_RTX;
196 else if (NOTE_LINE_NUMBER (insn) >= 0)
198 /* Delete this note if it is identical to previous note. */
199 if (last_note
200 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
201 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
203 delete_related_insns (insn);
204 continue;
207 last_note = insn;
212 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
213 notes whose labels don't occur in the insn any more. Returns the
214 largest INSN_UID found. */
215 static void
216 init_label_info (f)
217 rtx f;
219 rtx insn;
221 for (insn = f; insn; insn = NEXT_INSN (insn))
222 if (GET_CODE (insn) == CODE_LABEL)
223 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
224 else if (GET_CODE (insn) == JUMP_INSN)
225 JUMP_LABEL (insn) = 0;
226 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
228 rtx note, next;
230 for (note = REG_NOTES (insn); note; note = next)
232 next = XEXP (note, 1);
233 if (REG_NOTE_KIND (note) == REG_LABEL
234 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
235 remove_note (insn, note);
240 /* Mark the label each jump jumps to.
241 Combine consecutive labels, and count uses of labels. */
243 static void
244 mark_all_labels (f)
245 rtx f;
247 rtx insn;
249 for (insn = f; insn; insn = NEXT_INSN (insn))
250 if (INSN_P (insn))
252 if (GET_CODE (insn) == CALL_INSN
253 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
255 mark_all_labels (XEXP (PATTERN (insn), 0));
256 mark_all_labels (XEXP (PATTERN (insn), 1));
257 mark_all_labels (XEXP (PATTERN (insn), 2));
259 /* Canonicalize the tail recursion label attached to the
260 CALL_PLACEHOLDER insn. */
261 if (XEXP (PATTERN (insn), 3))
263 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
264 XEXP (PATTERN (insn), 3));
265 mark_jump_label (label_ref, insn, 0);
266 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
269 continue;
272 mark_jump_label (PATTERN (insn), insn, 0);
273 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
275 /* When we know the LABEL_REF contained in a REG used in
276 an indirect jump, we'll have a REG_LABEL note so that
277 flow can tell where it's going. */
278 if (JUMP_LABEL (insn) == 0)
280 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
281 if (label_note)
283 /* But a LABEL_REF around the REG_LABEL note, so
284 that we can canonicalize it. */
285 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
286 XEXP (label_note, 0));
288 mark_jump_label (label_ref, insn, 0);
289 XEXP (label_note, 0) = XEXP (label_ref, 0);
290 JUMP_LABEL (insn) = XEXP (label_note, 0);
297 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
298 jump. Assume that this unconditional jump is to the exit test code. If
299 the code is sufficiently simple, make a copy of it before INSN,
300 followed by a jump to the exit of the loop. Then delete the unconditional
301 jump after INSN.
303 Return 1 if we made the change, else 0.
305 This is only safe immediately after a regscan pass because it uses the
306 values of regno_first_uid and regno_last_uid. */
308 static int
309 duplicate_loop_exit_test (loop_start)
310 rtx loop_start;
312 rtx insn, set, reg, p, link;
313 rtx copy = 0, first_copy = 0;
314 int num_insns = 0;
315 rtx exitcode
316 = NEXT_INSN (JUMP_LABEL (next_nonnote_insn_in_loop (loop_start)));
317 rtx lastexit;
318 int max_reg = max_reg_num ();
319 rtx *reg_map = 0;
320 rtx loop_pre_header_label;
322 /* Scan the exit code. We do not perform this optimization if any insn:
324 is a CALL_INSN
325 is a CODE_LABEL
326 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
327 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
329 We also do not do this if we find an insn with ASM_OPERANDS. While
330 this restriction should not be necessary, copying an insn with
331 ASM_OPERANDS can confuse asm_noperands in some cases.
333 Also, don't do this if the exit code is more than 20 insns. */
335 for (insn = exitcode;
336 insn
337 && ! (GET_CODE (insn) == NOTE
338 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
339 insn = NEXT_INSN (insn))
341 switch (GET_CODE (insn))
343 case CODE_LABEL:
344 case CALL_INSN:
345 return 0;
346 case NOTE:
348 if (optimize < 2
349 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
350 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
351 /* If we were to duplicate this code, we would not move
352 the BLOCK notes, and so debugging the moved code would
353 be difficult. Thus, we only move the code with -O2 or
354 higher. */
355 return 0;
357 break;
358 case JUMP_INSN:
359 case INSN:
360 /* The code below would grossly mishandle REG_WAS_0 notes,
361 so get rid of them here. */
362 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
363 remove_note (insn, p);
364 if (++num_insns > 20
365 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
366 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
367 return 0;
368 break;
369 default:
370 break;
374 /* Unless INSN is zero, we can do the optimization. */
375 if (insn == 0)
376 return 0;
378 lastexit = insn;
380 /* See if any insn sets a register only used in the loop exit code and
381 not a user variable. If so, replace it with a new register. */
382 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
383 if (GET_CODE (insn) == INSN
384 && (set = single_set (insn)) != 0
385 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
386 || (GET_CODE (reg) == SUBREG
387 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
388 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
389 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
391 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
392 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
393 break;
395 if (p != lastexit)
397 /* We can do the replacement. Allocate reg_map if this is the
398 first replacement we found. */
399 if (reg_map == 0)
400 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
402 REG_LOOP_TEST_P (reg) = 1;
404 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
407 loop_pre_header_label = gen_label_rtx ();
409 /* Now copy each insn. */
410 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
412 switch (GET_CODE (insn))
414 case BARRIER:
415 copy = emit_barrier_before (loop_start);
416 break;
417 case NOTE:
418 /* Only copy line-number notes. */
419 if (NOTE_LINE_NUMBER (insn) >= 0)
421 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
422 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
424 break;
426 case INSN:
427 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
428 if (reg_map)
429 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
431 mark_jump_label (PATTERN (copy), copy, 0);
432 INSN_SCOPE (copy) = INSN_SCOPE (insn);
434 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
435 make them. */
436 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
437 if (REG_NOTE_KIND (link) != REG_LABEL)
439 if (GET_CODE (link) == EXPR_LIST)
440 REG_NOTES (copy)
441 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
442 XEXP (link, 0),
443 REG_NOTES (copy)));
444 else
445 REG_NOTES (copy)
446 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
447 XEXP (link, 0),
448 REG_NOTES (copy)));
451 if (reg_map && REG_NOTES (copy))
452 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
453 break;
455 case JUMP_INSN:
456 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
457 loop_start);
458 INSN_SCOPE (copy) = INSN_SCOPE (insn);
459 if (reg_map)
460 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
461 mark_jump_label (PATTERN (copy), copy, 0);
462 if (REG_NOTES (insn))
464 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
465 if (reg_map)
466 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
469 /* Predict conditional jump that do make loop looping as taken.
470 Other jumps are probably exit conditions, so predict
471 them as untaken. */
472 if (any_condjump_p (copy))
474 rtx label = JUMP_LABEL (copy);
475 if (label)
477 /* The jump_insn after loop_start should be followed
478 by barrier and loopback label. */
479 if (prev_nonnote_insn (label)
480 && (prev_nonnote_insn (prev_nonnote_insn (label))
481 == next_nonnote_insn (loop_start)))
483 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
484 /* To keep pre-header, we need to redirect all loop
485 entrances before the LOOP_BEG note. */
486 redirect_jump (copy, loop_pre_header_label, 0);
488 else
489 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
492 break;
494 default:
495 abort ();
498 /* Record the first insn we copied. We need it so that we can
499 scan the copied insns for new pseudo registers. */
500 if (! first_copy)
501 first_copy = copy;
504 /* Now clean up by emitting a jump to the end label and deleting the jump
505 at the start of the loop. */
506 if (! copy || GET_CODE (copy) != BARRIER)
508 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
509 loop_start);
511 /* Record the first insn we copied. We need it so that we can
512 scan the copied insns for new pseudo registers. This may not
513 be strictly necessary since we should have copied at least one
514 insn above. But I am going to be safe. */
515 if (! first_copy)
516 first_copy = copy;
518 mark_jump_label (PATTERN (copy), copy, 0);
519 emit_barrier_before (loop_start);
522 emit_label_before (loop_pre_header_label, loop_start);
524 /* Now scan from the first insn we copied to the last insn we copied
525 (copy) for new pseudo registers. Do this after the code to jump to
526 the end label since that might create a new pseudo too. */
527 reg_scan_update (first_copy, copy, max_reg);
529 /* Mark the exit code as the virtual top of the converted loop. */
530 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
532 delete_related_insns (next_nonnote_insn (loop_start));
534 /* Clean up. */
535 if (reg_map)
536 free (reg_map);
538 return 1;
541 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
542 notes between START and END out before START. START and END may be such
543 notes. Returns the values of the new starting and ending insns, which
544 may be different if the original ones were such notes.
545 Return true if there were only such notes and no real instructions. */
547 bool
548 squeeze_notes (startp, endp)
549 rtx* startp;
550 rtx* endp;
552 rtx start = *startp;
553 rtx end = *endp;
555 rtx insn;
556 rtx next;
557 rtx last = NULL;
558 rtx past_end = NEXT_INSN (end);
560 for (insn = start; insn != past_end; insn = next)
562 next = NEXT_INSN (insn);
563 if (GET_CODE (insn) == NOTE
564 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
565 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
566 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
567 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
568 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
569 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
571 if (insn == start)
572 start = next;
573 else
575 rtx prev = PREV_INSN (insn);
576 PREV_INSN (insn) = PREV_INSN (start);
577 NEXT_INSN (insn) = start;
578 NEXT_INSN (PREV_INSN (insn)) = insn;
579 PREV_INSN (NEXT_INSN (insn)) = insn;
580 NEXT_INSN (prev) = next;
581 PREV_INSN (next) = prev;
584 else
585 last = insn;
588 /* There were no real instructions. */
589 if (start == past_end)
590 return true;
592 end = last;
594 *startp = start;
595 *endp = end;
596 return false;
599 /* Return the label before INSN, or put a new label there. */
602 get_label_before (insn)
603 rtx insn;
605 rtx label;
607 /* Find an existing label at this point
608 or make a new one if there is none. */
609 label = prev_nonnote_insn (insn);
611 if (label == 0 || GET_CODE (label) != CODE_LABEL)
613 rtx prev = PREV_INSN (insn);
615 label = gen_label_rtx ();
616 emit_label_after (label, prev);
617 LABEL_NUSES (label) = 0;
619 return label;
622 /* Return the label after INSN, or put a new label there. */
625 get_label_after (insn)
626 rtx insn;
628 rtx label;
630 /* Find an existing label at this point
631 or make a new one if there is none. */
632 label = next_nonnote_insn (insn);
634 if (label == 0 || GET_CODE (label) != CODE_LABEL)
636 label = gen_label_rtx ();
637 emit_label_after (label, insn);
638 LABEL_NUSES (label) = 0;
640 return label;
643 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
644 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
645 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
646 know whether it's source is floating point or integer comparison. Machine
647 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
648 to help this function avoid overhead in these cases. */
649 enum rtx_code
650 reversed_comparison_code_parts (code, arg0, arg1, insn)
651 rtx insn, arg0, arg1;
652 enum rtx_code code;
654 enum machine_mode mode;
656 /* If this is not actually a comparison, we can't reverse it. */
657 if (GET_RTX_CLASS (code) != '<')
658 return UNKNOWN;
660 mode = GET_MODE (arg0);
661 if (mode == VOIDmode)
662 mode = GET_MODE (arg1);
664 /* First see if machine description supply us way to reverse the comparison.
665 Give it priority over everything else to allow machine description to do
666 tricks. */
667 #ifdef REVERSIBLE_CC_MODE
668 if (GET_MODE_CLASS (mode) == MODE_CC
669 && REVERSIBLE_CC_MODE (mode))
671 #ifdef REVERSE_CONDITION
672 return REVERSE_CONDITION (code, mode);
673 #endif
674 return reverse_condition (code);
676 #endif
678 /* Try a few special cases based on the comparison code. */
679 switch (code)
681 case GEU:
682 case GTU:
683 case LEU:
684 case LTU:
685 case NE:
686 case EQ:
687 /* It is always safe to reverse EQ and NE, even for the floating
688 point. Similary the unsigned comparisons are never used for
689 floating point so we can reverse them in the default way. */
690 return reverse_condition (code);
691 case ORDERED:
692 case UNORDERED:
693 case LTGT:
694 case UNEQ:
695 /* In case we already see unordered comparison, we can be sure to
696 be dealing with floating point so we don't need any more tests. */
697 return reverse_condition_maybe_unordered (code);
698 case UNLT:
699 case UNLE:
700 case UNGT:
701 case UNGE:
702 /* We don't have safe way to reverse these yet. */
703 return UNKNOWN;
704 default:
705 break;
708 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
710 rtx prev;
711 /* Try to search for the comparison to determine the real mode.
712 This code is expensive, but with sane machine description it
713 will be never used, since REVERSIBLE_CC_MODE will return true
714 in all cases. */
715 if (! insn)
716 return UNKNOWN;
718 for (prev = prev_nonnote_insn (insn);
719 prev != 0 && GET_CODE (prev) != CODE_LABEL;
720 prev = prev_nonnote_insn (prev))
722 rtx set = set_of (arg0, prev);
723 if (set && GET_CODE (set) == SET
724 && rtx_equal_p (SET_DEST (set), arg0))
726 rtx src = SET_SRC (set);
728 if (GET_CODE (src) == COMPARE)
730 rtx comparison = src;
731 arg0 = XEXP (src, 0);
732 mode = GET_MODE (arg0);
733 if (mode == VOIDmode)
734 mode = GET_MODE (XEXP (comparison, 1));
735 break;
737 /* We can get past reg-reg moves. This may be useful for model
738 of i387 comparisons that first move flag registers around. */
739 if (REG_P (src))
741 arg0 = src;
742 continue;
745 /* If register is clobbered in some ununderstandable way,
746 give up. */
747 if (set)
748 return UNKNOWN;
752 /* Test for an integer condition, or a floating-point comparison
753 in which NaNs can be ignored. */
754 if (GET_CODE (arg0) == CONST_INT
755 || (GET_MODE (arg0) != VOIDmode
756 && GET_MODE_CLASS (mode) != MODE_CC
757 && !HONOR_NANS (mode)))
758 return reverse_condition (code);
760 return UNKNOWN;
763 /* An wrapper around the previous function to take COMPARISON as rtx
764 expression. This simplifies many callers. */
765 enum rtx_code
766 reversed_comparison_code (comparison, insn)
767 rtx comparison, insn;
769 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
770 return UNKNOWN;
771 return reversed_comparison_code_parts (GET_CODE (comparison),
772 XEXP (comparison, 0),
773 XEXP (comparison, 1), insn);
776 /* Given an rtx-code for a comparison, return the code for the negated
777 comparison. If no such code exists, return UNKNOWN.
779 WATCH OUT! reverse_condition is not safe to use on a jump that might
780 be acting on the results of an IEEE floating point comparison, because
781 of the special treatment of non-signaling nans in comparisons.
782 Use reversed_comparison_code instead. */
784 enum rtx_code
785 reverse_condition (code)
786 enum rtx_code code;
788 switch (code)
790 case EQ:
791 return NE;
792 case NE:
793 return EQ;
794 case GT:
795 return LE;
796 case GE:
797 return LT;
798 case LT:
799 return GE;
800 case LE:
801 return GT;
802 case GTU:
803 return LEU;
804 case GEU:
805 return LTU;
806 case LTU:
807 return GEU;
808 case LEU:
809 return GTU;
810 case UNORDERED:
811 return ORDERED;
812 case ORDERED:
813 return UNORDERED;
815 case UNLT:
816 case UNLE:
817 case UNGT:
818 case UNGE:
819 case UNEQ:
820 case LTGT:
821 return UNKNOWN;
823 default:
824 abort ();
828 /* Similar, but we're allowed to generate unordered comparisons, which
829 makes it safe for IEEE floating-point. Of course, we have to recognize
830 that the target will support them too... */
832 enum rtx_code
833 reverse_condition_maybe_unordered (code)
834 enum rtx_code code;
836 switch (code)
838 case EQ:
839 return NE;
840 case NE:
841 return EQ;
842 case GT:
843 return UNLE;
844 case GE:
845 return UNLT;
846 case LT:
847 return UNGE;
848 case LE:
849 return UNGT;
850 case LTGT:
851 return UNEQ;
852 case UNORDERED:
853 return ORDERED;
854 case ORDERED:
855 return UNORDERED;
856 case UNLT:
857 return GE;
858 case UNLE:
859 return GT;
860 case UNGT:
861 return LE;
862 case UNGE:
863 return LT;
864 case UNEQ:
865 return LTGT;
867 default:
868 abort ();
872 /* Similar, but return the code when two operands of a comparison are swapped.
873 This IS safe for IEEE floating-point. */
875 enum rtx_code
876 swap_condition (code)
877 enum rtx_code code;
879 switch (code)
881 case EQ:
882 case NE:
883 case UNORDERED:
884 case ORDERED:
885 case UNEQ:
886 case LTGT:
887 return code;
889 case GT:
890 return LT;
891 case GE:
892 return LE;
893 case LT:
894 return GT;
895 case LE:
896 return GE;
897 case GTU:
898 return LTU;
899 case GEU:
900 return LEU;
901 case LTU:
902 return GTU;
903 case LEU:
904 return GEU;
905 case UNLT:
906 return UNGT;
907 case UNLE:
908 return UNGE;
909 case UNGT:
910 return UNLT;
911 case UNGE:
912 return UNLE;
914 default:
915 abort ();
919 /* Given a comparison CODE, return the corresponding unsigned comparison.
920 If CODE is an equality comparison or already an unsigned comparison,
921 CODE is returned. */
923 enum rtx_code
924 unsigned_condition (code)
925 enum rtx_code code;
927 switch (code)
929 case EQ:
930 case NE:
931 case GTU:
932 case GEU:
933 case LTU:
934 case LEU:
935 return code;
937 case GT:
938 return GTU;
939 case GE:
940 return GEU;
941 case LT:
942 return LTU;
943 case LE:
944 return LEU;
946 default:
947 abort ();
951 /* Similarly, return the signed version of a comparison. */
953 enum rtx_code
954 signed_condition (code)
955 enum rtx_code code;
957 switch (code)
959 case EQ:
960 case NE:
961 case GT:
962 case GE:
963 case LT:
964 case LE:
965 return code;
967 case GTU:
968 return GT;
969 case GEU:
970 return GE;
971 case LTU:
972 return LT;
973 case LEU:
974 return LE;
976 default:
977 abort ();
981 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
982 truth of CODE1 implies the truth of CODE2. */
985 comparison_dominates_p (code1, code2)
986 enum rtx_code code1, code2;
988 /* UNKNOWN comparison codes can happen as a result of trying to revert
989 comparison codes.
990 They can't match anything, so we have to reject them here. */
991 if (code1 == UNKNOWN || code2 == UNKNOWN)
992 return 0;
994 if (code1 == code2)
995 return 1;
997 switch (code1)
999 case UNEQ:
1000 if (code2 == UNLE || code2 == UNGE)
1001 return 1;
1002 break;
1004 case EQ:
1005 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1006 || code2 == ORDERED)
1007 return 1;
1008 break;
1010 case UNLT:
1011 if (code2 == UNLE || code2 == NE)
1012 return 1;
1013 break;
1015 case LT:
1016 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1017 return 1;
1018 break;
1020 case UNGT:
1021 if (code2 == UNGE || code2 == NE)
1022 return 1;
1023 break;
1025 case GT:
1026 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1027 return 1;
1028 break;
1030 case GE:
1031 case LE:
1032 if (code2 == ORDERED)
1033 return 1;
1034 break;
1036 case LTGT:
1037 if (code2 == NE || code2 == ORDERED)
1038 return 1;
1039 break;
1041 case LTU:
1042 if (code2 == LEU || code2 == NE)
1043 return 1;
1044 break;
1046 case GTU:
1047 if (code2 == GEU || code2 == NE)
1048 return 1;
1049 break;
1051 case UNORDERED:
1052 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1053 || code2 == UNGE || code2 == UNGT)
1054 return 1;
1055 break;
1057 default:
1058 break;
1061 return 0;
1064 /* Return 1 if INSN is an unconditional jump and nothing else. */
1067 simplejump_p (insn)
1068 rtx insn;
1070 return (GET_CODE (insn) == JUMP_INSN
1071 && GET_CODE (PATTERN (insn)) == SET
1072 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1073 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1076 /* Return nonzero if INSN is a (possibly) conditional jump
1077 and nothing more.
1079 Use this function is deprecated, since we need to support combined
1080 branch and compare insns. Use any_condjump_p instead whenever possible. */
1083 condjump_p (insn)
1084 rtx insn;
1086 rtx x = PATTERN (insn);
1088 if (GET_CODE (x) != SET
1089 || GET_CODE (SET_DEST (x)) != PC)
1090 return 0;
1092 x = SET_SRC (x);
1093 if (GET_CODE (x) == LABEL_REF)
1094 return 1;
1095 else
1096 return (GET_CODE (x) == IF_THEN_ELSE
1097 && ((GET_CODE (XEXP (x, 2)) == PC
1098 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1099 || GET_CODE (XEXP (x, 1)) == RETURN))
1100 || (GET_CODE (XEXP (x, 1)) == PC
1101 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1102 || GET_CODE (XEXP (x, 2)) == RETURN))));
1104 return 0;
1107 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1108 PARALLEL.
1110 Use this function is deprecated, since we need to support combined
1111 branch and compare insns. Use any_condjump_p instead whenever possible. */
1114 condjump_in_parallel_p (insn)
1115 rtx insn;
1117 rtx x = PATTERN (insn);
1119 if (GET_CODE (x) != PARALLEL)
1120 return 0;
1121 else
1122 x = XVECEXP (x, 0, 0);
1124 if (GET_CODE (x) != SET)
1125 return 0;
1126 if (GET_CODE (SET_DEST (x)) != PC)
1127 return 0;
1128 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1129 return 1;
1130 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1131 return 0;
1132 if (XEXP (SET_SRC (x), 2) == pc_rtx
1133 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1134 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1135 return 1;
1136 if (XEXP (SET_SRC (x), 1) == pc_rtx
1137 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1138 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1139 return 1;
1140 return 0;
1143 /* Return set of PC, otherwise NULL. */
1146 pc_set (insn)
1147 rtx insn;
1149 rtx pat;
1150 if (GET_CODE (insn) != JUMP_INSN)
1151 return NULL_RTX;
1152 pat = PATTERN (insn);
1154 /* The set is allowed to appear either as the insn pattern or
1155 the first set in a PARALLEL. */
1156 if (GET_CODE (pat) == PARALLEL)
1157 pat = XVECEXP (pat, 0, 0);
1158 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1159 return pat;
1161 return NULL_RTX;
1164 /* Return true when insn is an unconditional direct jump,
1165 possibly bundled inside a PARALLEL. */
1168 any_uncondjump_p (insn)
1169 rtx insn;
1171 rtx x = pc_set (insn);
1172 if (!x)
1173 return 0;
1174 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1175 return 0;
1176 return 1;
1179 /* Return true when insn is a conditional jump. This function works for
1180 instructions containing PC sets in PARALLELs. The instruction may have
1181 various other effects so before removing the jump you must verify
1182 onlyjump_p.
1184 Note that unlike condjump_p it returns false for unconditional jumps. */
1187 any_condjump_p (insn)
1188 rtx insn;
1190 rtx x = pc_set (insn);
1191 enum rtx_code a, b;
1193 if (!x)
1194 return 0;
1195 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1196 return 0;
1198 a = GET_CODE (XEXP (SET_SRC (x), 1));
1199 b = GET_CODE (XEXP (SET_SRC (x), 2));
1201 return ((b == PC && (a == LABEL_REF || a == RETURN))
1202 || (a == PC && (b == LABEL_REF || b == RETURN)));
1205 /* Return the label of a conditional jump. */
1208 condjump_label (insn)
1209 rtx insn;
1211 rtx x = pc_set (insn);
1213 if (!x)
1214 return NULL_RTX;
1215 x = SET_SRC (x);
1216 if (GET_CODE (x) == LABEL_REF)
1217 return x;
1218 if (GET_CODE (x) != IF_THEN_ELSE)
1219 return NULL_RTX;
1220 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1221 return XEXP (x, 1);
1222 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1223 return XEXP (x, 2);
1224 return NULL_RTX;
1227 /* Return true if INSN is a (possibly conditional) return insn. */
1229 static int
1230 returnjump_p_1 (loc, data)
1231 rtx *loc;
1232 void *data ATTRIBUTE_UNUSED;
1234 rtx x = *loc;
1236 return x && (GET_CODE (x) == RETURN
1237 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1241 returnjump_p (insn)
1242 rtx insn;
1244 if (GET_CODE (insn) != JUMP_INSN)
1245 return 0;
1246 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1249 /* Return true if INSN is a jump that only transfers control and
1250 nothing more. */
1253 onlyjump_p (insn)
1254 rtx insn;
1256 rtx set;
1258 if (GET_CODE (insn) != JUMP_INSN)
1259 return 0;
1261 set = single_set (insn);
1262 if (set == NULL)
1263 return 0;
1264 if (GET_CODE (SET_DEST (set)) != PC)
1265 return 0;
1266 if (side_effects_p (SET_SRC (set)))
1267 return 0;
1269 return 1;
1272 #ifdef HAVE_cc0
1274 /* Return nonzero if X is an RTX that only sets the condition codes
1275 and has no side effects. */
1278 only_sets_cc0_p (x)
1279 rtx x;
1281 if (! x)
1282 return 0;
1284 if (INSN_P (x))
1285 x = PATTERN (x);
1287 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1290 /* Return 1 if X is an RTX that does nothing but set the condition codes
1291 and CLOBBER or USE registers.
1292 Return -1 if X does explicitly set the condition codes,
1293 but also does other things. */
1296 sets_cc0_p (x)
1297 rtx x;
1299 if (! x)
1300 return 0;
1302 if (INSN_P (x))
1303 x = PATTERN (x);
1305 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1306 return 1;
1307 if (GET_CODE (x) == PARALLEL)
1309 int i;
1310 int sets_cc0 = 0;
1311 int other_things = 0;
1312 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1314 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1315 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1316 sets_cc0 = 1;
1317 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1318 other_things = 1;
1320 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1322 return 0;
1324 #endif
1326 /* Follow any unconditional jump at LABEL;
1327 return the ultimate label reached by any such chain of jumps.
1328 If LABEL is not followed by a jump, return LABEL.
1329 If the chain loops or we can't find end, return LABEL,
1330 since that tells caller to avoid changing the insn.
1332 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1333 a USE or CLOBBER. */
1336 follow_jumps (label)
1337 rtx label;
1339 rtx insn;
1340 rtx next;
1341 rtx value = label;
1342 int depth;
1344 for (depth = 0;
1345 (depth < 10
1346 && (insn = next_active_insn (value)) != 0
1347 && GET_CODE (insn) == JUMP_INSN
1348 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1349 && onlyjump_p (insn))
1350 || GET_CODE (PATTERN (insn)) == RETURN)
1351 && (next = NEXT_INSN (insn))
1352 && GET_CODE (next) == BARRIER);
1353 depth++)
1355 /* Don't chain through the insn that jumps into a loop
1356 from outside the loop,
1357 since that would create multiple loop entry jumps
1358 and prevent loop optimization. */
1359 rtx tem;
1360 if (!reload_completed)
1361 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1362 if (GET_CODE (tem) == NOTE
1363 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1364 /* ??? Optional. Disables some optimizations, but makes
1365 gcov output more accurate with -O. */
1366 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1367 return value;
1369 /* If we have found a cycle, make the insn jump to itself. */
1370 if (JUMP_LABEL (insn) == label)
1371 return label;
1373 tem = next_active_insn (JUMP_LABEL (insn));
1374 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1375 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1376 break;
1378 value = JUMP_LABEL (insn);
1380 if (depth == 10)
1381 return label;
1382 return value;
1386 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1387 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1388 in INSN, then store one of them in JUMP_LABEL (INSN).
1389 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1390 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1391 Also, when there are consecutive labels, canonicalize on the last of them.
1393 Note that two labels separated by a loop-beginning note
1394 must be kept distinct if we have not yet done loop-optimization,
1395 because the gap between them is where loop-optimize
1396 will want to move invariant code to. CROSS_JUMP tells us
1397 that loop-optimization is done with. */
1399 void
1400 mark_jump_label (x, insn, in_mem)
1401 rtx x;
1402 rtx insn;
1403 int in_mem;
1405 RTX_CODE code = GET_CODE (x);
1406 int i;
1407 const char *fmt;
1409 switch (code)
1411 case PC:
1412 case CC0:
1413 case REG:
1414 case CONST_INT:
1415 case CONST_DOUBLE:
1416 case CLOBBER:
1417 case CALL:
1418 return;
1420 case MEM:
1421 in_mem = 1;
1422 break;
1424 case SYMBOL_REF:
1425 if (!in_mem)
1426 return;
1428 /* If this is a constant-pool reference, see if it is a label. */
1429 if (CONSTANT_POOL_ADDRESS_P (x))
1430 mark_jump_label (get_pool_constant (x), insn, in_mem);
1431 break;
1433 case LABEL_REF:
1435 rtx label = XEXP (x, 0);
1437 /* Ignore remaining references to unreachable labels that
1438 have been deleted. */
1439 if (GET_CODE (label) == NOTE
1440 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1441 break;
1443 if (GET_CODE (label) != CODE_LABEL)
1444 abort ();
1446 /* Ignore references to labels of containing functions. */
1447 if (LABEL_REF_NONLOCAL_P (x))
1448 break;
1450 XEXP (x, 0) = label;
1451 if (! insn || ! INSN_DELETED_P (insn))
1452 ++LABEL_NUSES (label);
1454 if (insn)
1456 if (GET_CODE (insn) == JUMP_INSN)
1457 JUMP_LABEL (insn) = label;
1458 else
1460 /* Add a REG_LABEL note for LABEL unless there already
1461 is one. All uses of a label, except for labels
1462 that are the targets of jumps, must have a
1463 REG_LABEL note. */
1464 if (! find_reg_note (insn, REG_LABEL, label))
1465 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1466 REG_NOTES (insn));
1469 return;
1472 /* Do walk the labels in a vector, but not the first operand of an
1473 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1474 case ADDR_VEC:
1475 case ADDR_DIFF_VEC:
1476 if (! INSN_DELETED_P (insn))
1478 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1480 for (i = 0; i < XVECLEN (x, eltnum); i++)
1481 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1483 return;
1485 default:
1486 break;
1489 fmt = GET_RTX_FORMAT (code);
1490 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1492 if (fmt[i] == 'e')
1493 mark_jump_label (XEXP (x, i), insn, in_mem);
1494 else if (fmt[i] == 'E')
1496 int j;
1497 for (j = 0; j < XVECLEN (x, i); j++)
1498 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1503 /* If all INSN does is set the pc, delete it,
1504 and delete the insn that set the condition codes for it
1505 if that's what the previous thing was. */
1507 void
1508 delete_jump (insn)
1509 rtx insn;
1511 rtx set = single_set (insn);
1513 if (set && GET_CODE (SET_DEST (set)) == PC)
1514 delete_computation (insn);
1517 /* Verify INSN is a BARRIER and delete it. */
1519 void
1520 delete_barrier (insn)
1521 rtx insn;
1523 if (GET_CODE (insn) != BARRIER)
1524 abort ();
1526 delete_insn (insn);
1529 /* Recursively delete prior insns that compute the value (used only by INSN
1530 which the caller is deleting) stored in the register mentioned by NOTE
1531 which is a REG_DEAD note associated with INSN. */
1533 static void
1534 delete_prior_computation (note, insn)
1535 rtx note;
1536 rtx insn;
1538 rtx our_prev;
1539 rtx reg = XEXP (note, 0);
1541 for (our_prev = prev_nonnote_insn (insn);
1542 our_prev && (GET_CODE (our_prev) == INSN
1543 || GET_CODE (our_prev) == CALL_INSN);
1544 our_prev = prev_nonnote_insn (our_prev))
1546 rtx pat = PATTERN (our_prev);
1548 /* If we reach a CALL which is not calling a const function
1549 or the callee pops the arguments, then give up. */
1550 if (GET_CODE (our_prev) == CALL_INSN
1551 && (! CONST_OR_PURE_CALL_P (our_prev)
1552 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1553 break;
1555 /* If we reach a SEQUENCE, it is too complex to try to
1556 do anything with it, so give up. We can be run during
1557 and after reorg, so SEQUENCE rtl can legitimately show
1558 up here. */
1559 if (GET_CODE (pat) == SEQUENCE)
1560 break;
1562 if (GET_CODE (pat) == USE
1563 && GET_CODE (XEXP (pat, 0)) == INSN)
1564 /* reorg creates USEs that look like this. We leave them
1565 alone because reorg needs them for its own purposes. */
1566 break;
1568 if (reg_set_p (reg, pat))
1570 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1571 break;
1573 if (GET_CODE (pat) == PARALLEL)
1575 /* If we find a SET of something else, we can't
1576 delete the insn. */
1578 int i;
1580 for (i = 0; i < XVECLEN (pat, 0); i++)
1582 rtx part = XVECEXP (pat, 0, i);
1584 if (GET_CODE (part) == SET
1585 && SET_DEST (part) != reg)
1586 break;
1589 if (i == XVECLEN (pat, 0))
1590 delete_computation (our_prev);
1592 else if (GET_CODE (pat) == SET
1593 && GET_CODE (SET_DEST (pat)) == REG)
1595 int dest_regno = REGNO (SET_DEST (pat));
1596 int dest_endregno
1597 = (dest_regno
1598 + (dest_regno < FIRST_PSEUDO_REGISTER
1599 ? HARD_REGNO_NREGS (dest_regno,
1600 GET_MODE (SET_DEST (pat))) : 1));
1601 int regno = REGNO (reg);
1602 int endregno
1603 = (regno
1604 + (regno < FIRST_PSEUDO_REGISTER
1605 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1607 if (dest_regno >= regno
1608 && dest_endregno <= endregno)
1609 delete_computation (our_prev);
1611 /* We may have a multi-word hard register and some, but not
1612 all, of the words of the register are needed in subsequent
1613 insns. Write REG_UNUSED notes for those parts that were not
1614 needed. */
1615 else if (dest_regno <= regno
1616 && dest_endregno >= endregno)
1618 int i;
1620 REG_NOTES (our_prev)
1621 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1622 REG_NOTES (our_prev));
1624 for (i = dest_regno; i < dest_endregno; i++)
1625 if (! find_regno_note (our_prev, REG_UNUSED, i))
1626 break;
1628 if (i == dest_endregno)
1629 delete_computation (our_prev);
1633 break;
1636 /* If PAT references the register that dies here, it is an
1637 additional use. Hence any prior SET isn't dead. However, this
1638 insn becomes the new place for the REG_DEAD note. */
1639 if (reg_overlap_mentioned_p (reg, pat))
1641 XEXP (note, 1) = REG_NOTES (our_prev);
1642 REG_NOTES (our_prev) = note;
1643 break;
1648 /* Delete INSN and recursively delete insns that compute values used only
1649 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1650 If we are running before flow.c, we need do nothing since flow.c will
1651 delete dead code. We also can't know if the registers being used are
1652 dead or not at this point.
1654 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1655 nothing other than set a register that dies in this insn, we can delete
1656 that insn as well.
1658 On machines with CC0, if CC0 is used in this insn, we may be able to
1659 delete the insn that set it. */
1661 static void
1662 delete_computation (insn)
1663 rtx insn;
1665 rtx note, next;
1667 #ifdef HAVE_cc0
1668 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1670 rtx prev = prev_nonnote_insn (insn);
1671 /* We assume that at this stage
1672 CC's are always set explicitly
1673 and always immediately before the jump that
1674 will use them. So if the previous insn
1675 exists to set the CC's, delete it
1676 (unless it performs auto-increments, etc.). */
1677 if (prev && GET_CODE (prev) == INSN
1678 && sets_cc0_p (PATTERN (prev)))
1680 if (sets_cc0_p (PATTERN (prev)) > 0
1681 && ! side_effects_p (PATTERN (prev)))
1682 delete_computation (prev);
1683 else
1684 /* Otherwise, show that cc0 won't be used. */
1685 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1686 cc0_rtx, REG_NOTES (prev));
1689 #endif
1691 for (note = REG_NOTES (insn); note; note = next)
1693 next = XEXP (note, 1);
1695 if (REG_NOTE_KIND (note) != REG_DEAD
1696 /* Verify that the REG_NOTE is legitimate. */
1697 || GET_CODE (XEXP (note, 0)) != REG)
1698 continue;
1700 delete_prior_computation (note, insn);
1703 delete_related_insns (insn);
1706 /* Delete insn INSN from the chain of insns and update label ref counts
1707 and delete insns now unreachable.
1709 Returns the first insn after INSN that was not deleted.
1711 Usage of this instruction is deprecated. Use delete_insn instead and
1712 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1715 delete_related_insns (insn)
1716 rtx insn;
1718 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1719 rtx note;
1720 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1722 while (next && INSN_DELETED_P (next))
1723 next = NEXT_INSN (next);
1725 /* This insn is already deleted => return first following nondeleted. */
1726 if (INSN_DELETED_P (insn))
1727 return next;
1729 delete_insn (insn);
1731 /* If instruction is followed by a barrier,
1732 delete the barrier too. */
1734 if (next != 0 && GET_CODE (next) == BARRIER)
1735 delete_insn (next);
1737 /* If deleting a jump, decrement the count of the label,
1738 and delete the label if it is now unused. */
1740 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1742 rtx lab = JUMP_LABEL (insn), lab_next;
1744 if (LABEL_NUSES (lab) == 0)
1746 /* This can delete NEXT or PREV,
1747 either directly if NEXT is JUMP_LABEL (INSN),
1748 or indirectly through more levels of jumps. */
1749 delete_related_insns (lab);
1751 /* I feel a little doubtful about this loop,
1752 but I see no clean and sure alternative way
1753 to find the first insn after INSN that is not now deleted.
1754 I hope this works. */
1755 while (next && INSN_DELETED_P (next))
1756 next = NEXT_INSN (next);
1757 return next;
1759 else if (tablejump_p (insn, NULL, &lab_next))
1761 /* If we're deleting the tablejump, delete the dispatch table.
1762 We may not be able to kill the label immediately preceding
1763 just yet, as it might be referenced in code leading up to
1764 the tablejump. */
1765 delete_related_insns (lab_next);
1769 /* Likewise if we're deleting a dispatch table. */
1771 if (GET_CODE (insn) == JUMP_INSN
1772 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1773 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1775 rtx pat = PATTERN (insn);
1776 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1777 int len = XVECLEN (pat, diff_vec_p);
1779 for (i = 0; i < len; i++)
1780 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1781 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1782 while (next && INSN_DELETED_P (next))
1783 next = NEXT_INSN (next);
1784 return next;
1787 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1788 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1789 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1790 if (REG_NOTE_KIND (note) == REG_LABEL
1791 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1792 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1793 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1794 delete_related_insns (XEXP (note, 0));
1796 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1797 prev = PREV_INSN (prev);
1799 /* If INSN was a label and a dispatch table follows it,
1800 delete the dispatch table. The tablejump must have gone already.
1801 It isn't useful to fall through into a table. */
1803 if (was_code_label
1804 && NEXT_INSN (insn) != 0
1805 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1806 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1807 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1808 next = delete_related_insns (NEXT_INSN (insn));
1810 /* If INSN was a label, delete insns following it if now unreachable. */
1812 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1814 RTX_CODE code;
1815 while (next != 0
1816 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1817 || code == NOTE || code == BARRIER
1818 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1820 if (code == NOTE
1821 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1822 next = NEXT_INSN (next);
1823 /* Keep going past other deleted labels to delete what follows. */
1824 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1825 next = NEXT_INSN (next);
1826 else
1827 /* Note: if this deletes a jump, it can cause more
1828 deletion of unreachable code, after a different label.
1829 As long as the value from this recursive call is correct,
1830 this invocation functions correctly. */
1831 next = delete_related_insns (next);
1835 return next;
1838 /* Advance from INSN till reaching something not deleted
1839 then return that. May return INSN itself. */
1842 next_nondeleted_insn (insn)
1843 rtx insn;
1845 while (INSN_DELETED_P (insn))
1846 insn = NEXT_INSN (insn);
1847 return insn;
1850 /* Delete a range of insns from FROM to TO, inclusive.
1851 This is for the sake of peephole optimization, so assume
1852 that whatever these insns do will still be done by a new
1853 peephole insn that will replace them. */
1855 void
1856 delete_for_peephole (from, to)
1857 rtx from, to;
1859 rtx insn = from;
1861 while (1)
1863 rtx next = NEXT_INSN (insn);
1864 rtx prev = PREV_INSN (insn);
1866 if (GET_CODE (insn) != NOTE)
1868 INSN_DELETED_P (insn) = 1;
1870 /* Patch this insn out of the chain. */
1871 /* We don't do this all at once, because we
1872 must preserve all NOTEs. */
1873 if (prev)
1874 NEXT_INSN (prev) = next;
1876 if (next)
1877 PREV_INSN (next) = prev;
1880 if (insn == to)
1881 break;
1882 insn = next;
1885 /* Note that if TO is an unconditional jump
1886 we *do not* delete the BARRIER that follows,
1887 since the peephole that replaces this sequence
1888 is also an unconditional jump in that case. */
1891 /* We have determined that AVOIDED_INSN is never reached, and are
1892 about to delete it. If the insn chain between AVOIDED_INSN and
1893 FINISH contains more than one line from the current function, and
1894 contains at least one operation, print a warning if the user asked
1895 for it. If FINISH is NULL, look between AVOIDED_INSN and a LABEL.
1897 CSE and inlining can duplicate insns, so it's possible to get
1898 spurious warnings from this. */
1900 void
1901 never_reached_warning (avoided_insn, finish)
1902 rtx avoided_insn, finish;
1904 rtx insn;
1905 rtx a_line_note = NULL;
1906 int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
1908 if (!warn_notreached)
1909 return;
1911 /* Back up to the first of any NOTEs preceding avoided_insn; flow passes
1912 us the head of a block, a NOTE_INSN_BASIC_BLOCK, which often follows
1913 the line note. */
1914 insn = avoided_insn;
1915 while (1)
1917 rtx prev = PREV_INSN (insn);
1918 if (prev == NULL_RTX
1919 || GET_CODE (prev) != NOTE)
1920 break;
1921 insn = prev;
1924 /* Scan forwards, looking at LINE_NUMBER notes, until we hit a LABEL
1925 in case FINISH is NULL, otherwise until we run out of insns. */
1927 for (; insn != NULL; insn = NEXT_INSN (insn))
1929 if ((finish == NULL && GET_CODE (insn) == CODE_LABEL)
1930 || GET_CODE (insn) == BARRIER)
1931 break;
1933 if (GET_CODE (insn) == NOTE /* A line number note? */
1934 && NOTE_LINE_NUMBER (insn) >= 0)
1936 if (a_line_note == NULL)
1937 a_line_note = insn;
1938 else
1939 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1940 != NOTE_LINE_NUMBER (insn));
1942 else if (INSN_P (insn))
1944 if (reached_end)
1945 break;
1946 contains_insn = 1;
1949 if (insn == finish)
1950 reached_end = 1;
1952 if (two_avoided_lines && contains_insn)
1954 location_t locus;
1955 locus.file = NOTE_SOURCE_FILE (a_line_note);
1956 locus.line = NOTE_LINE_NUMBER (a_line_note);
1957 warning ("%Hwill never be executed", &locus);
1961 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1962 NLABEL as a return. Accrue modifications into the change group. */
1964 static void
1965 redirect_exp_1 (loc, olabel, nlabel, insn)
1966 rtx *loc;
1967 rtx olabel, nlabel;
1968 rtx insn;
1970 rtx x = *loc;
1971 RTX_CODE code = GET_CODE (x);
1972 int i;
1973 const char *fmt;
1975 if (code == LABEL_REF)
1977 if (XEXP (x, 0) == olabel)
1979 rtx n;
1980 if (nlabel)
1981 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1982 else
1983 n = gen_rtx_RETURN (VOIDmode);
1985 validate_change (insn, loc, n, 1);
1986 return;
1989 else if (code == RETURN && olabel == 0)
1991 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1992 if (loc == &PATTERN (insn))
1993 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1994 validate_change (insn, loc, x, 1);
1995 return;
1998 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1999 && GET_CODE (SET_SRC (x)) == LABEL_REF
2000 && XEXP (SET_SRC (x), 0) == olabel)
2002 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
2003 return;
2006 fmt = GET_RTX_FORMAT (code);
2007 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2009 if (fmt[i] == 'e')
2010 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
2011 else if (fmt[i] == 'E')
2013 int j;
2014 for (j = 0; j < XVECLEN (x, i); j++)
2015 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2020 /* Similar, but apply the change group and report success or failure. */
2022 static int
2023 redirect_exp (olabel, nlabel, insn)
2024 rtx olabel, nlabel;
2025 rtx insn;
2027 rtx *loc;
2029 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2030 loc = &XVECEXP (PATTERN (insn), 0, 0);
2031 else
2032 loc = &PATTERN (insn);
2034 redirect_exp_1 (loc, olabel, nlabel, insn);
2035 if (num_validated_changes () == 0)
2036 return 0;
2038 return apply_change_group ();
2041 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2042 the modifications into the change group. Return false if we did
2043 not see how to do that. */
2046 redirect_jump_1 (jump, nlabel)
2047 rtx jump, nlabel;
2049 int ochanges = num_validated_changes ();
2050 rtx *loc;
2052 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2053 loc = &XVECEXP (PATTERN (jump), 0, 0);
2054 else
2055 loc = &PATTERN (jump);
2057 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2058 return num_validated_changes () > ochanges;
2061 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2062 jump target label is unused as a result, it and the code following
2063 it may be deleted.
2065 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2066 RETURN insn.
2068 The return value will be 1 if the change was made, 0 if it wasn't
2069 (this can only occur for NLABEL == 0). */
2072 redirect_jump (jump, nlabel, delete_unused)
2073 rtx jump, nlabel;
2074 int delete_unused;
2076 rtx olabel = JUMP_LABEL (jump);
2077 rtx note;
2079 if (nlabel == olabel)
2080 return 1;
2082 if (! redirect_exp (olabel, nlabel, jump))
2083 return 0;
2085 JUMP_LABEL (jump) = nlabel;
2086 if (nlabel)
2087 ++LABEL_NUSES (nlabel);
2089 /* Update labels in any REG_EQUAL note. */
2090 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
2092 if (nlabel && olabel)
2094 rtx dest = XEXP (note, 0);
2096 if (GET_CODE (dest) == IF_THEN_ELSE)
2098 if (GET_CODE (XEXP (dest, 1)) == LABEL_REF
2099 && XEXP (XEXP (dest, 1), 0) == olabel)
2100 XEXP (XEXP (dest, 1), 0) = nlabel;
2101 if (GET_CODE (XEXP (dest, 2)) == LABEL_REF
2102 && XEXP (XEXP (dest, 2), 0) == olabel)
2103 XEXP (XEXP (dest, 2), 0) = nlabel;
2105 else
2106 remove_note (jump, note);
2108 else
2109 remove_note (jump, note);
2112 /* If we're eliding the jump over exception cleanups at the end of a
2113 function, move the function end note so that -Wreturn-type works. */
2114 if (olabel && nlabel
2115 && NEXT_INSN (olabel)
2116 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2117 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2118 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2120 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
2121 /* Undefined labels will remain outside the insn stream. */
2122 && INSN_UID (olabel))
2123 delete_related_insns (olabel);
2125 return 1;
2128 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2129 Accrue the modifications into the change group. */
2131 static void
2132 invert_exp_1 (insn)
2133 rtx insn;
2135 RTX_CODE code;
2136 rtx x = pc_set (insn);
2138 if (!x)
2139 abort ();
2140 x = SET_SRC (x);
2142 code = GET_CODE (x);
2144 if (code == IF_THEN_ELSE)
2146 rtx comp = XEXP (x, 0);
2147 rtx tem;
2148 enum rtx_code reversed_code;
2150 /* We can do this in two ways: The preferable way, which can only
2151 be done if this is not an integer comparison, is to reverse
2152 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2153 of the IF_THEN_ELSE. If we can't do either, fail. */
2155 reversed_code = reversed_comparison_code (comp, insn);
2157 if (reversed_code != UNKNOWN)
2159 validate_change (insn, &XEXP (x, 0),
2160 gen_rtx_fmt_ee (reversed_code,
2161 GET_MODE (comp), XEXP (comp, 0),
2162 XEXP (comp, 1)),
2164 return;
2167 tem = XEXP (x, 1);
2168 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2169 validate_change (insn, &XEXP (x, 2), tem, 1);
2171 else
2172 abort ();
2175 /* Invert the jump condition of conditional jump insn, INSN.
2177 Return 1 if we can do so, 0 if we cannot find a way to do so that
2178 matches a pattern. */
2180 static int
2181 invert_exp (insn)
2182 rtx insn;
2184 invert_exp_1 (insn);
2185 if (num_validated_changes () == 0)
2186 return 0;
2188 return apply_change_group ();
2191 /* Invert the condition of the jump JUMP, and make it jump to label
2192 NLABEL instead of where it jumps now. Accrue changes into the
2193 change group. Return false if we didn't see how to perform the
2194 inversion and redirection. */
2197 invert_jump_1 (jump, nlabel)
2198 rtx jump, nlabel;
2200 int ochanges;
2202 ochanges = num_validated_changes ();
2203 invert_exp_1 (jump);
2204 if (num_validated_changes () == ochanges)
2205 return 0;
2207 return redirect_jump_1 (jump, nlabel);
2210 /* Invert the condition of the jump JUMP, and make it jump to label
2211 NLABEL instead of where it jumps now. Return true if successful. */
2214 invert_jump (jump, nlabel, delete_unused)
2215 rtx jump, nlabel;
2216 int delete_unused;
2218 /* We have to either invert the condition and change the label or
2219 do neither. Either operation could fail. We first try to invert
2220 the jump. If that succeeds, we try changing the label. If that fails,
2221 we invert the jump back to what it was. */
2223 if (! invert_exp (jump))
2224 return 0;
2226 if (redirect_jump (jump, nlabel, delete_unused))
2228 /* Remove REG_EQUAL note if we have one. */
2229 rtx note = find_reg_note (jump, REG_EQUAL, NULL_RTX);
2230 if (note)
2231 remove_note (jump, note);
2233 invert_br_probabilities (jump);
2235 return 1;
2238 if (! invert_exp (jump))
2239 /* This should just be putting it back the way it was. */
2240 abort ();
2242 return 0;
2246 /* Like rtx_equal_p except that it considers two REGs as equal
2247 if they renumber to the same value and considers two commutative
2248 operations to be the same if the order of the operands has been
2249 reversed.
2251 ??? Addition is not commutative on the PA due to the weird implicit
2252 space register selection rules for memory addresses. Therefore, we
2253 don't consider a + b == b + a.
2255 We could/should make this test a little tighter. Possibly only
2256 disabling it on the PA via some backend macro or only disabling this
2257 case when the PLUS is inside a MEM. */
2260 rtx_renumbered_equal_p (x, y)
2261 rtx x, y;
2263 int i;
2264 RTX_CODE code = GET_CODE (x);
2265 const char *fmt;
2267 if (x == y)
2268 return 1;
2270 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2271 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2272 && GET_CODE (SUBREG_REG (y)) == REG)))
2274 int reg_x = -1, reg_y = -1;
2275 int byte_x = 0, byte_y = 0;
2277 if (GET_MODE (x) != GET_MODE (y))
2278 return 0;
2280 /* If we haven't done any renumbering, don't
2281 make any assumptions. */
2282 if (reg_renumber == 0)
2283 return rtx_equal_p (x, y);
2285 if (code == SUBREG)
2287 reg_x = REGNO (SUBREG_REG (x));
2288 byte_x = SUBREG_BYTE (x);
2290 if (reg_renumber[reg_x] >= 0)
2292 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2293 GET_MODE (SUBREG_REG (x)),
2294 byte_x,
2295 GET_MODE (x));
2296 byte_x = 0;
2299 else
2301 reg_x = REGNO (x);
2302 if (reg_renumber[reg_x] >= 0)
2303 reg_x = reg_renumber[reg_x];
2306 if (GET_CODE (y) == SUBREG)
2308 reg_y = REGNO (SUBREG_REG (y));
2309 byte_y = SUBREG_BYTE (y);
2311 if (reg_renumber[reg_y] >= 0)
2313 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2314 GET_MODE (SUBREG_REG (y)),
2315 byte_y,
2316 GET_MODE (y));
2317 byte_y = 0;
2320 else
2322 reg_y = REGNO (y);
2323 if (reg_renumber[reg_y] >= 0)
2324 reg_y = reg_renumber[reg_y];
2327 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2330 /* Now we have disposed of all the cases
2331 in which different rtx codes can match. */
2332 if (code != GET_CODE (y))
2333 return 0;
2335 switch (code)
2337 case PC:
2338 case CC0:
2339 case ADDR_VEC:
2340 case ADDR_DIFF_VEC:
2341 return 0;
2343 case CONST_INT:
2344 return INTVAL (x) == INTVAL (y);
2346 case LABEL_REF:
2347 /* We can't assume nonlocal labels have their following insns yet. */
2348 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2349 return XEXP (x, 0) == XEXP (y, 0);
2351 /* Two label-refs are equivalent if they point at labels
2352 in the same position in the instruction stream. */
2353 return (next_real_insn (XEXP (x, 0))
2354 == next_real_insn (XEXP (y, 0)));
2356 case SYMBOL_REF:
2357 return XSTR (x, 0) == XSTR (y, 0);
2359 case CODE_LABEL:
2360 /* If we didn't match EQ equality above, they aren't the same. */
2361 return 0;
2363 default:
2364 break;
2367 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2369 if (GET_MODE (x) != GET_MODE (y))
2370 return 0;
2372 /* For commutative operations, the RTX match if the operand match in any
2373 order. Also handle the simple binary and unary cases without a loop.
2375 ??? Don't consider PLUS a commutative operator; see comments above. */
2376 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2377 && code != PLUS)
2378 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2379 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2380 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2381 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2382 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2383 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2384 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2385 else if (GET_RTX_CLASS (code) == '1')
2386 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2388 /* Compare the elements. If any pair of corresponding elements
2389 fail to match, return 0 for the whole things. */
2391 fmt = GET_RTX_FORMAT (code);
2392 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2394 int j;
2395 switch (fmt[i])
2397 case 'w':
2398 if (XWINT (x, i) != XWINT (y, i))
2399 return 0;
2400 break;
2402 case 'i':
2403 if (XINT (x, i) != XINT (y, i))
2404 return 0;
2405 break;
2407 case 't':
2408 if (XTREE (x, i) != XTREE (y, i))
2409 return 0;
2410 break;
2412 case 's':
2413 if (strcmp (XSTR (x, i), XSTR (y, i)))
2414 return 0;
2415 break;
2417 case 'e':
2418 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2419 return 0;
2420 break;
2422 case 'u':
2423 if (XEXP (x, i) != XEXP (y, i))
2424 return 0;
2425 /* fall through. */
2426 case '0':
2427 break;
2429 case 'E':
2430 if (XVECLEN (x, i) != XVECLEN (y, i))
2431 return 0;
2432 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2433 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2434 return 0;
2435 break;
2437 default:
2438 abort ();
2441 return 1;
2444 /* If X is a hard register or equivalent to one or a subregister of one,
2445 return the hard register number. If X is a pseudo register that was not
2446 assigned a hard register, return the pseudo register number. Otherwise,
2447 return -1. Any rtx is valid for X. */
2450 true_regnum (x)
2451 rtx x;
2453 if (GET_CODE (x) == REG)
2455 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2456 return reg_renumber[REGNO (x)];
2457 return REGNO (x);
2459 if (GET_CODE (x) == SUBREG)
2461 int base = true_regnum (SUBREG_REG (x));
2462 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2463 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2464 GET_MODE (SUBREG_REG (x)),
2465 SUBREG_BYTE (x), GET_MODE (x));
2467 return -1;
2470 /* Return regno of the register REG and handle subregs too. */
2471 unsigned int
2472 reg_or_subregno (reg)
2473 rtx reg;
2475 if (REG_P (reg))
2476 return REGNO (reg);
2477 if (GET_CODE (reg) == SUBREG)
2478 return REGNO (SUBREG_REG (reg));
2479 abort ();