PR libstdc++/9527, PR libstdc++/8713
[official-gcc.git] / gcc / jump.c
blob2e9119b46535916c16c37609f75cd5bba1fed454
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
24 operate with jumps.
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tm_p.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "regs.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
48 #include "recog.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "real.h"
52 #include "except.h"
53 #include "toplev.h"
54 #include "reload.h"
55 #include "predict.h"
57 /* Optimize jump y; x: ... y: jumpif... x?
58 Don't know if it is worth bothering with. */
59 /* Optimize two cases of conditional jump to conditional jump?
60 This can never delete any instruction or make anything dead,
61 or even change what is live at any point.
62 So perhaps let combiner do it. */
64 static rtx next_nonnote_insn_in_loop PARAMS ((rtx));
65 static void init_label_info PARAMS ((rtx));
66 static void mark_all_labels PARAMS ((rtx));
67 static int duplicate_loop_exit_test PARAMS ((rtx));
68 static void delete_computation PARAMS ((rtx));
69 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
70 static int redirect_exp PARAMS ((rtx, rtx, rtx));
71 static void invert_exp_1 PARAMS ((rtx));
72 static int invert_exp PARAMS ((rtx));
73 static int returnjump_p_1 PARAMS ((rtx *, void *));
74 static void delete_prior_computation PARAMS ((rtx, rtx));
76 /* Alternate entry into the jump optimizer. This entry point only rebuilds
77 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
78 instructions. */
79 void
80 rebuild_jump_labels (f)
81 rtx f;
83 rtx insn;
85 init_label_info (f);
86 mark_all_labels (f);
88 /* Keep track of labels used from static data; we don't track them
89 closely enough to delete them here, so make sure their reference
90 count doesn't drop to zero. */
92 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
93 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
94 LABEL_NUSES (XEXP (insn, 0))++;
97 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
98 non-fallthru insn. This is not generally true, as multiple barriers
99 may have crept in, or the BARRIER may be separated from the last
100 real insn by one or more NOTEs.
102 This simple pass moves barriers and removes duplicates so that the
103 old code is happy.
105 void
106 cleanup_barriers ()
108 rtx insn, next, prev;
109 for (insn = get_insns (); insn; insn = next)
111 next = NEXT_INSN (insn);
112 if (GET_CODE (insn) == BARRIER)
114 prev = prev_nonnote_insn (insn);
115 if (GET_CODE (prev) == BARRIER)
116 delete_barrier (insn);
117 else if (prev != PREV_INSN (insn))
118 reorder_insns (insn, insn, prev);
123 /* Return the next insn after INSN that is not a NOTE and is in the loop,
124 i.e. when there is no such INSN before NOTE_INSN_LOOP_END return NULL_RTX.
125 This routine does not look inside SEQUENCEs. */
127 static rtx
128 next_nonnote_insn_in_loop (insn)
129 rtx insn;
131 while (insn)
133 insn = NEXT_INSN (insn);
134 if (insn == 0 || GET_CODE (insn) != NOTE)
135 break;
136 if (GET_CODE (insn) == NOTE
137 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
138 return NULL_RTX;
141 return insn;
144 void
145 copy_loop_headers (f)
146 rtx f;
148 rtx insn, next;
149 /* Now iterate optimizing jumps until nothing changes over one pass. */
150 for (insn = f; insn; insn = next)
152 rtx temp, temp1;
154 next = NEXT_INSN (insn);
156 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
157 jump. Try to optimize by duplicating the loop exit test if so.
158 This is only safe immediately after regscan, because it uses
159 the values of regno_first_uid and regno_last_uid. */
160 if (GET_CODE (insn) == NOTE
161 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
162 && (temp1 = next_nonnote_insn_in_loop (insn)) != 0
163 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
165 temp = PREV_INSN (insn);
166 if (duplicate_loop_exit_test (insn))
168 next = NEXT_INSN (temp);
174 void
175 purge_line_number_notes (f)
176 rtx f;
178 rtx last_note = 0;
179 rtx insn;
180 /* Delete extraneous line number notes.
181 Note that two consecutive notes for different lines are not really
182 extraneous. There should be some indication where that line belonged,
183 even if it became empty. */
185 for (insn = f; insn; insn = NEXT_INSN (insn))
186 if (GET_CODE (insn) == NOTE)
188 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
189 /* Any previous line note was for the prologue; gdb wants a new
190 note after the prologue even if it is for the same line. */
191 last_note = NULL_RTX;
192 else if (NOTE_LINE_NUMBER (insn) >= 0)
194 /* Delete this note if it is identical to previous note. */
195 if (last_note
196 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
197 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
199 delete_related_insns (insn);
200 continue;
203 last_note = insn;
208 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
209 notes whose labels don't occur in the insn any more. Returns the
210 largest INSN_UID found. */
211 static void
212 init_label_info (f)
213 rtx f;
215 rtx insn;
217 for (insn = f; insn; insn = NEXT_INSN (insn))
218 if (GET_CODE (insn) == CODE_LABEL)
219 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
220 else if (GET_CODE (insn) == JUMP_INSN)
221 JUMP_LABEL (insn) = 0;
222 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
224 rtx note, next;
226 for (note = REG_NOTES (insn); note; note = next)
228 next = XEXP (note, 1);
229 if (REG_NOTE_KIND (note) == REG_LABEL
230 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
231 remove_note (insn, note);
236 /* Mark the label each jump jumps to.
237 Combine consecutive labels, and count uses of labels. */
239 static void
240 mark_all_labels (f)
241 rtx f;
243 rtx insn;
245 for (insn = f; insn; insn = NEXT_INSN (insn))
246 if (INSN_P (insn))
248 if (GET_CODE (insn) == CALL_INSN
249 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
251 mark_all_labels (XEXP (PATTERN (insn), 0));
252 mark_all_labels (XEXP (PATTERN (insn), 1));
253 mark_all_labels (XEXP (PATTERN (insn), 2));
255 /* Canonicalize the tail recursion label attached to the
256 CALL_PLACEHOLDER insn. */
257 if (XEXP (PATTERN (insn), 3))
259 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
260 XEXP (PATTERN (insn), 3));
261 mark_jump_label (label_ref, insn, 0);
262 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
265 continue;
268 mark_jump_label (PATTERN (insn), insn, 0);
269 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
271 /* When we know the LABEL_REF contained in a REG used in
272 an indirect jump, we'll have a REG_LABEL note so that
273 flow can tell where it's going. */
274 if (JUMP_LABEL (insn) == 0)
276 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
277 if (label_note)
279 /* But a LABEL_REF around the REG_LABEL note, so
280 that we can canonicalize it. */
281 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
282 XEXP (label_note, 0));
284 mark_jump_label (label_ref, insn, 0);
285 XEXP (label_note, 0) = XEXP (label_ref, 0);
286 JUMP_LABEL (insn) = XEXP (label_note, 0);
293 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
294 jump. Assume that this unconditional jump is to the exit test code. If
295 the code is sufficiently simple, make a copy of it before INSN,
296 followed by a jump to the exit of the loop. Then delete the unconditional
297 jump after INSN.
299 Return 1 if we made the change, else 0.
301 This is only safe immediately after a regscan pass because it uses the
302 values of regno_first_uid and regno_last_uid. */
304 static int
305 duplicate_loop_exit_test (loop_start)
306 rtx loop_start;
308 rtx insn, set, reg, p, link;
309 rtx copy = 0, first_copy = 0;
310 int num_insns = 0;
311 rtx exitcode
312 = NEXT_INSN (JUMP_LABEL (next_nonnote_insn_in_loop (loop_start)));
313 rtx lastexit;
314 int max_reg = max_reg_num ();
315 rtx *reg_map = 0;
316 rtx loop_pre_header_label;
318 /* Scan the exit code. We do not perform this optimization if any insn:
320 is a CALL_INSN
321 is a CODE_LABEL
322 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
323 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
325 We also do not do this if we find an insn with ASM_OPERANDS. While
326 this restriction should not be necessary, copying an insn with
327 ASM_OPERANDS can confuse asm_noperands in some cases.
329 Also, don't do this if the exit code is more than 20 insns. */
331 for (insn = exitcode;
332 insn
333 && ! (GET_CODE (insn) == NOTE
334 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
335 insn = NEXT_INSN (insn))
337 switch (GET_CODE (insn))
339 case CODE_LABEL:
340 case CALL_INSN:
341 return 0;
342 case NOTE:
344 if (optimize < 2
345 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
346 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
347 /* If we were to duplicate this code, we would not move
348 the BLOCK notes, and so debugging the moved code would
349 be difficult. Thus, we only move the code with -O2 or
350 higher. */
351 return 0;
353 break;
354 case JUMP_INSN:
355 case INSN:
356 /* The code below would grossly mishandle REG_WAS_0 notes,
357 so get rid of them here. */
358 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
359 remove_note (insn, p);
360 if (++num_insns > 20
361 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
362 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
363 return 0;
364 break;
365 default:
366 break;
370 /* Unless INSN is zero, we can do the optimization. */
371 if (insn == 0)
372 return 0;
374 lastexit = insn;
376 /* See if any insn sets a register only used in the loop exit code and
377 not a user variable. If so, replace it with a new register. */
378 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
379 if (GET_CODE (insn) == INSN
380 && (set = single_set (insn)) != 0
381 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
382 || (GET_CODE (reg) == SUBREG
383 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
384 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
385 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
387 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
388 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
389 break;
391 if (p != lastexit)
393 /* We can do the replacement. Allocate reg_map if this is the
394 first replacement we found. */
395 if (reg_map == 0)
396 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
398 REG_LOOP_TEST_P (reg) = 1;
400 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
403 loop_pre_header_label = gen_label_rtx ();
405 /* Now copy each insn. */
406 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
408 switch (GET_CODE (insn))
410 case BARRIER:
411 copy = emit_barrier_before (loop_start);
412 break;
413 case NOTE:
414 /* Only copy line-number notes. */
415 if (NOTE_LINE_NUMBER (insn) >= 0)
417 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
418 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
420 break;
422 case INSN:
423 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
424 if (reg_map)
425 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
427 mark_jump_label (PATTERN (copy), copy, 0);
428 INSN_SCOPE (copy) = INSN_SCOPE (insn);
430 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
431 make them. */
432 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
433 if (REG_NOTE_KIND (link) != REG_LABEL)
435 if (GET_CODE (link) == EXPR_LIST)
436 REG_NOTES (copy)
437 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
438 XEXP (link, 0),
439 REG_NOTES (copy)));
440 else
441 REG_NOTES (copy)
442 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
443 XEXP (link, 0),
444 REG_NOTES (copy)));
447 if (reg_map && REG_NOTES (copy))
448 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
449 break;
451 case JUMP_INSN:
452 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
453 loop_start);
454 INSN_SCOPE (copy) = INSN_SCOPE (insn);
455 if (reg_map)
456 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
457 mark_jump_label (PATTERN (copy), copy, 0);
458 if (REG_NOTES (insn))
460 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
461 if (reg_map)
462 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
465 /* Predict conditional jump that do make loop looping as taken.
466 Other jumps are probably exit conditions, so predict
467 them as untaken. */
468 if (any_condjump_p (copy))
470 rtx label = JUMP_LABEL (copy);
471 if (label)
473 /* The jump_insn after loop_start should be followed
474 by barrier and loopback label. */
475 if (prev_nonnote_insn (label)
476 && (prev_nonnote_insn (prev_nonnote_insn (label))
477 == next_nonnote_insn (loop_start)))
479 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
480 /* To keep pre-header, we need to redirect all loop
481 entrances before the LOOP_BEG note. */
482 redirect_jump (copy, loop_pre_header_label, 0);
484 else
485 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
488 break;
490 default:
491 abort ();
494 /* Record the first insn we copied. We need it so that we can
495 scan the copied insns for new pseudo registers. */
496 if (! first_copy)
497 first_copy = copy;
500 /* Now clean up by emitting a jump to the end label and deleting the jump
501 at the start of the loop. */
502 if (! copy || GET_CODE (copy) != BARRIER)
504 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
505 loop_start);
507 /* Record the first insn we copied. We need it so that we can
508 scan the copied insns for new pseudo registers. This may not
509 be strictly necessary since we should have copied at least one
510 insn above. But I am going to be safe. */
511 if (! first_copy)
512 first_copy = copy;
514 mark_jump_label (PATTERN (copy), copy, 0);
515 emit_barrier_before (loop_start);
518 emit_label_before (loop_pre_header_label, loop_start);
520 /* Now scan from the first insn we copied to the last insn we copied
521 (copy) for new pseudo registers. Do this after the code to jump to
522 the end label since that might create a new pseudo too. */
523 reg_scan_update (first_copy, copy, max_reg);
525 /* Mark the exit code as the virtual top of the converted loop. */
526 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
528 delete_related_insns (next_nonnote_insn (loop_start));
530 /* Clean up. */
531 if (reg_map)
532 free (reg_map);
534 return 1;
537 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
538 notes between START and END out before START. START and END may be such
539 notes. Returns the values of the new starting and ending insns, which
540 may be different if the original ones were such notes.
541 Return true if there were only such notes and no real instructions. */
543 bool
544 squeeze_notes (startp, endp)
545 rtx* startp;
546 rtx* endp;
548 rtx start = *startp;
549 rtx end = *endp;
551 rtx insn;
552 rtx next;
553 rtx last = NULL;
554 rtx past_end = NEXT_INSN (end);
556 for (insn = start; insn != past_end; insn = next)
558 next = NEXT_INSN (insn);
559 if (GET_CODE (insn) == NOTE
560 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
561 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
562 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
563 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
564 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
565 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
567 if (insn == start)
568 start = next;
569 else
571 rtx prev = PREV_INSN (insn);
572 PREV_INSN (insn) = PREV_INSN (start);
573 NEXT_INSN (insn) = start;
574 NEXT_INSN (PREV_INSN (insn)) = insn;
575 PREV_INSN (NEXT_INSN (insn)) = insn;
576 NEXT_INSN (prev) = next;
577 PREV_INSN (next) = prev;
580 else
581 last = insn;
584 /* There were no real instructions. */
585 if (start == past_end)
586 return true;
588 end = last;
590 *startp = start;
591 *endp = end;
592 return false;
595 /* Return the label before INSN, or put a new label there. */
598 get_label_before (insn)
599 rtx insn;
601 rtx label;
603 /* Find an existing label at this point
604 or make a new one if there is none. */
605 label = prev_nonnote_insn (insn);
607 if (label == 0 || GET_CODE (label) != CODE_LABEL)
609 rtx prev = PREV_INSN (insn);
611 label = gen_label_rtx ();
612 emit_label_after (label, prev);
613 LABEL_NUSES (label) = 0;
615 return label;
618 /* Return the label after INSN, or put a new label there. */
621 get_label_after (insn)
622 rtx insn;
624 rtx label;
626 /* Find an existing label at this point
627 or make a new one if there is none. */
628 label = next_nonnote_insn (insn);
630 if (label == 0 || GET_CODE (label) != CODE_LABEL)
632 label = gen_label_rtx ();
633 emit_label_after (label, insn);
634 LABEL_NUSES (label) = 0;
636 return label;
639 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
640 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
641 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
642 know whether it's source is floating point or integer comparison. Machine
643 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
644 to help this function avoid overhead in these cases. */
645 enum rtx_code
646 reversed_comparison_code_parts (code, arg0, arg1, insn)
647 rtx insn, arg0, arg1;
648 enum rtx_code code;
650 enum machine_mode mode;
652 /* If this is not actually a comparison, we can't reverse it. */
653 if (GET_RTX_CLASS (code) != '<')
654 return UNKNOWN;
656 mode = GET_MODE (arg0);
657 if (mode == VOIDmode)
658 mode = GET_MODE (arg1);
660 /* First see if machine description supply us way to reverse the comparison.
661 Give it priority over everything else to allow machine description to do
662 tricks. */
663 #ifdef REVERSIBLE_CC_MODE
664 if (GET_MODE_CLASS (mode) == MODE_CC
665 && REVERSIBLE_CC_MODE (mode))
667 #ifdef REVERSE_CONDITION
668 return REVERSE_CONDITION (code, mode);
669 #endif
670 return reverse_condition (code);
672 #endif
674 /* Try a few special cases based on the comparison code. */
675 switch (code)
677 case GEU:
678 case GTU:
679 case LEU:
680 case LTU:
681 case NE:
682 case EQ:
683 /* It is always safe to reverse EQ and NE, even for the floating
684 point. Similary the unsigned comparisons are never used for
685 floating point so we can reverse them in the default way. */
686 return reverse_condition (code);
687 case ORDERED:
688 case UNORDERED:
689 case LTGT:
690 case UNEQ:
691 /* In case we already see unordered comparison, we can be sure to
692 be dealing with floating point so we don't need any more tests. */
693 return reverse_condition_maybe_unordered (code);
694 case UNLT:
695 case UNLE:
696 case UNGT:
697 case UNGE:
698 /* We don't have safe way to reverse these yet. */
699 return UNKNOWN;
700 default:
701 break;
704 if (GET_MODE_CLASS (mode) == MODE_CC
705 #ifdef HAVE_cc0
706 || arg0 == cc0_rtx
707 #endif
710 rtx prev;
711 /* Try to search for the comparison to determine the real mode.
712 This code is expensive, but with sane machine description it
713 will be never used, since REVERSIBLE_CC_MODE will return true
714 in all cases. */
715 if (! insn)
716 return UNKNOWN;
718 for (prev = prev_nonnote_insn (insn);
719 prev != 0 && GET_CODE (prev) != CODE_LABEL;
720 prev = prev_nonnote_insn (prev))
722 rtx set = set_of (arg0, prev);
723 if (set && GET_CODE (set) == SET
724 && rtx_equal_p (SET_DEST (set), arg0))
726 rtx src = SET_SRC (set);
728 if (GET_CODE (src) == COMPARE)
730 rtx comparison = src;
731 arg0 = XEXP (src, 0);
732 mode = GET_MODE (arg0);
733 if (mode == VOIDmode)
734 mode = GET_MODE (XEXP (comparison, 1));
735 break;
737 /* We can get past reg-reg moves. This may be useful for model
738 of i387 comparisons that first move flag registers around. */
739 if (REG_P (src))
741 arg0 = src;
742 continue;
745 /* If register is clobbered in some ununderstandable way,
746 give up. */
747 if (set)
748 return UNKNOWN;
752 /* Test for an integer condition, or a floating-point comparison
753 in which NaNs can be ignored. */
754 if (GET_CODE (arg0) == CONST_INT
755 || (GET_MODE (arg0) != VOIDmode
756 && GET_MODE_CLASS (mode) != MODE_CC
757 && !HONOR_NANS (mode)))
758 return reverse_condition (code);
760 return UNKNOWN;
763 /* An wrapper around the previous function to take COMPARISON as rtx
764 expression. This simplifies many callers. */
765 enum rtx_code
766 reversed_comparison_code (comparison, insn)
767 rtx comparison, insn;
769 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
770 return UNKNOWN;
771 return reversed_comparison_code_parts (GET_CODE (comparison),
772 XEXP (comparison, 0),
773 XEXP (comparison, 1), insn);
776 /* Given an rtx-code for a comparison, return the code for the negated
777 comparison. If no such code exists, return UNKNOWN.
779 WATCH OUT! reverse_condition is not safe to use on a jump that might
780 be acting on the results of an IEEE floating point comparison, because
781 of the special treatment of non-signaling nans in comparisons.
782 Use reversed_comparison_code instead. */
784 enum rtx_code
785 reverse_condition (code)
786 enum rtx_code code;
788 switch (code)
790 case EQ:
791 return NE;
792 case NE:
793 return EQ;
794 case GT:
795 return LE;
796 case GE:
797 return LT;
798 case LT:
799 return GE;
800 case LE:
801 return GT;
802 case GTU:
803 return LEU;
804 case GEU:
805 return LTU;
806 case LTU:
807 return GEU;
808 case LEU:
809 return GTU;
810 case UNORDERED:
811 return ORDERED;
812 case ORDERED:
813 return UNORDERED;
815 case UNLT:
816 case UNLE:
817 case UNGT:
818 case UNGE:
819 case UNEQ:
820 case LTGT:
821 return UNKNOWN;
823 default:
824 abort ();
828 /* Similar, but we're allowed to generate unordered comparisons, which
829 makes it safe for IEEE floating-point. Of course, we have to recognize
830 that the target will support them too... */
832 enum rtx_code
833 reverse_condition_maybe_unordered (code)
834 enum rtx_code code;
836 switch (code)
838 case EQ:
839 return NE;
840 case NE:
841 return EQ;
842 case GT:
843 return UNLE;
844 case GE:
845 return UNLT;
846 case LT:
847 return UNGE;
848 case LE:
849 return UNGT;
850 case LTGT:
851 return UNEQ;
852 case UNORDERED:
853 return ORDERED;
854 case ORDERED:
855 return UNORDERED;
856 case UNLT:
857 return GE;
858 case UNLE:
859 return GT;
860 case UNGT:
861 return LE;
862 case UNGE:
863 return LT;
864 case UNEQ:
865 return LTGT;
867 default:
868 abort ();
872 /* Similar, but return the code when two operands of a comparison are swapped.
873 This IS safe for IEEE floating-point. */
875 enum rtx_code
876 swap_condition (code)
877 enum rtx_code code;
879 switch (code)
881 case EQ:
882 case NE:
883 case UNORDERED:
884 case ORDERED:
885 case UNEQ:
886 case LTGT:
887 return code;
889 case GT:
890 return LT;
891 case GE:
892 return LE;
893 case LT:
894 return GT;
895 case LE:
896 return GE;
897 case GTU:
898 return LTU;
899 case GEU:
900 return LEU;
901 case LTU:
902 return GTU;
903 case LEU:
904 return GEU;
905 case UNLT:
906 return UNGT;
907 case UNLE:
908 return UNGE;
909 case UNGT:
910 return UNLT;
911 case UNGE:
912 return UNLE;
914 default:
915 abort ();
919 /* Given a comparison CODE, return the corresponding unsigned comparison.
920 If CODE is an equality comparison or already an unsigned comparison,
921 CODE is returned. */
923 enum rtx_code
924 unsigned_condition (code)
925 enum rtx_code code;
927 switch (code)
929 case EQ:
930 case NE:
931 case GTU:
932 case GEU:
933 case LTU:
934 case LEU:
935 return code;
937 case GT:
938 return GTU;
939 case GE:
940 return GEU;
941 case LT:
942 return LTU;
943 case LE:
944 return LEU;
946 default:
947 abort ();
951 /* Similarly, return the signed version of a comparison. */
953 enum rtx_code
954 signed_condition (code)
955 enum rtx_code code;
957 switch (code)
959 case EQ:
960 case NE:
961 case GT:
962 case GE:
963 case LT:
964 case LE:
965 return code;
967 case GTU:
968 return GT;
969 case GEU:
970 return GE;
971 case LTU:
972 return LT;
973 case LEU:
974 return LE;
976 default:
977 abort ();
981 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
982 truth of CODE1 implies the truth of CODE2. */
985 comparison_dominates_p (code1, code2)
986 enum rtx_code code1, code2;
988 /* UNKNOWN comparison codes can happen as a result of trying to revert
989 comparison codes.
990 They can't match anything, so we have to reject them here. */
991 if (code1 == UNKNOWN || code2 == UNKNOWN)
992 return 0;
994 if (code1 == code2)
995 return 1;
997 switch (code1)
999 case UNEQ:
1000 if (code2 == UNLE || code2 == UNGE)
1001 return 1;
1002 break;
1004 case EQ:
1005 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1006 || code2 == ORDERED)
1007 return 1;
1008 break;
1010 case UNLT:
1011 if (code2 == UNLE || code2 == NE)
1012 return 1;
1013 break;
1015 case LT:
1016 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1017 return 1;
1018 break;
1020 case UNGT:
1021 if (code2 == UNGE || code2 == NE)
1022 return 1;
1023 break;
1025 case GT:
1026 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1027 return 1;
1028 break;
1030 case GE:
1031 case LE:
1032 if (code2 == ORDERED)
1033 return 1;
1034 break;
1036 case LTGT:
1037 if (code2 == NE || code2 == ORDERED)
1038 return 1;
1039 break;
1041 case LTU:
1042 if (code2 == LEU || code2 == NE)
1043 return 1;
1044 break;
1046 case GTU:
1047 if (code2 == GEU || code2 == NE)
1048 return 1;
1049 break;
1051 case UNORDERED:
1052 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1053 || code2 == UNGE || code2 == UNGT)
1054 return 1;
1055 break;
1057 default:
1058 break;
1061 return 0;
1064 /* Return 1 if INSN is an unconditional jump and nothing else. */
1067 simplejump_p (insn)
1068 rtx insn;
1070 return (GET_CODE (insn) == JUMP_INSN
1071 && GET_CODE (PATTERN (insn)) == SET
1072 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1073 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1076 /* Return nonzero if INSN is a (possibly) conditional jump
1077 and nothing more.
1079 Use this function is deprecated, since we need to support combined
1080 branch and compare insns. Use any_condjump_p instead whenever possible. */
1083 condjump_p (insn)
1084 rtx insn;
1086 rtx x = PATTERN (insn);
1088 if (GET_CODE (x) != SET
1089 || GET_CODE (SET_DEST (x)) != PC)
1090 return 0;
1092 x = SET_SRC (x);
1093 if (GET_CODE (x) == LABEL_REF)
1094 return 1;
1095 else
1096 return (GET_CODE (x) == IF_THEN_ELSE
1097 && ((GET_CODE (XEXP (x, 2)) == PC
1098 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1099 || GET_CODE (XEXP (x, 1)) == RETURN))
1100 || (GET_CODE (XEXP (x, 1)) == PC
1101 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1102 || GET_CODE (XEXP (x, 2)) == RETURN))));
1104 return 0;
1107 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1108 PARALLEL.
1110 Use this function is deprecated, since we need to support combined
1111 branch and compare insns. Use any_condjump_p instead whenever possible. */
1114 condjump_in_parallel_p (insn)
1115 rtx insn;
1117 rtx x = PATTERN (insn);
1119 if (GET_CODE (x) != PARALLEL)
1120 return 0;
1121 else
1122 x = XVECEXP (x, 0, 0);
1124 if (GET_CODE (x) != SET)
1125 return 0;
1126 if (GET_CODE (SET_DEST (x)) != PC)
1127 return 0;
1128 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1129 return 1;
1130 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1131 return 0;
1132 if (XEXP (SET_SRC (x), 2) == pc_rtx
1133 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1134 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1135 return 1;
1136 if (XEXP (SET_SRC (x), 1) == pc_rtx
1137 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1138 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1139 return 1;
1140 return 0;
1143 /* Return set of PC, otherwise NULL. */
1146 pc_set (insn)
1147 rtx insn;
1149 rtx pat;
1150 if (GET_CODE (insn) != JUMP_INSN)
1151 return NULL_RTX;
1152 pat = PATTERN (insn);
1154 /* The set is allowed to appear either as the insn pattern or
1155 the first set in a PARALLEL. */
1156 if (GET_CODE (pat) == PARALLEL)
1157 pat = XVECEXP (pat, 0, 0);
1158 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1159 return pat;
1161 return NULL_RTX;
1164 /* Return true when insn is an unconditional direct jump,
1165 possibly bundled inside a PARALLEL. */
1168 any_uncondjump_p (insn)
1169 rtx insn;
1171 rtx x = pc_set (insn);
1172 if (!x)
1173 return 0;
1174 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1175 return 0;
1176 return 1;
1179 /* Return true when insn is a conditional jump. This function works for
1180 instructions containing PC sets in PARALLELs. The instruction may have
1181 various other effects so before removing the jump you must verify
1182 onlyjump_p.
1184 Note that unlike condjump_p it returns false for unconditional jumps. */
1187 any_condjump_p (insn)
1188 rtx insn;
1190 rtx x = pc_set (insn);
1191 enum rtx_code a, b;
1193 if (!x)
1194 return 0;
1195 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1196 return 0;
1198 a = GET_CODE (XEXP (SET_SRC (x), 1));
1199 b = GET_CODE (XEXP (SET_SRC (x), 2));
1201 return ((b == PC && (a == LABEL_REF || a == RETURN))
1202 || (a == PC && (b == LABEL_REF || b == RETURN)));
1205 /* Return the label of a conditional jump. */
1208 condjump_label (insn)
1209 rtx insn;
1211 rtx x = pc_set (insn);
1213 if (!x)
1214 return NULL_RTX;
1215 x = SET_SRC (x);
1216 if (GET_CODE (x) == LABEL_REF)
1217 return x;
1218 if (GET_CODE (x) != IF_THEN_ELSE)
1219 return NULL_RTX;
1220 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1221 return XEXP (x, 1);
1222 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1223 return XEXP (x, 2);
1224 return NULL_RTX;
1227 /* Return true if INSN is a (possibly conditional) return insn. */
1229 static int
1230 returnjump_p_1 (loc, data)
1231 rtx *loc;
1232 void *data ATTRIBUTE_UNUSED;
1234 rtx x = *loc;
1236 return x && (GET_CODE (x) == RETURN
1237 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1241 returnjump_p (insn)
1242 rtx insn;
1244 if (GET_CODE (insn) != JUMP_INSN)
1245 return 0;
1246 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1249 /* Return true if INSN is a jump that only transfers control and
1250 nothing more. */
1253 onlyjump_p (insn)
1254 rtx insn;
1256 rtx set;
1258 if (GET_CODE (insn) != JUMP_INSN)
1259 return 0;
1261 set = single_set (insn);
1262 if (set == NULL)
1263 return 0;
1264 if (GET_CODE (SET_DEST (set)) != PC)
1265 return 0;
1266 if (side_effects_p (SET_SRC (set)))
1267 return 0;
1269 return 1;
1272 #ifdef HAVE_cc0
1274 /* Return nonzero if X is an RTX that only sets the condition codes
1275 and has no side effects. */
1278 only_sets_cc0_p (x)
1279 rtx x;
1282 if (! x)
1283 return 0;
1285 if (INSN_P (x))
1286 x = PATTERN (x);
1288 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1291 /* Return 1 if X is an RTX that does nothing but set the condition codes
1292 and CLOBBER or USE registers.
1293 Return -1 if X does explicitly set the condition codes,
1294 but also does other things. */
1297 sets_cc0_p (x)
1298 rtx x;
1301 if (! x)
1302 return 0;
1304 if (INSN_P (x))
1305 x = PATTERN (x);
1307 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1308 return 1;
1309 if (GET_CODE (x) == PARALLEL)
1311 int i;
1312 int sets_cc0 = 0;
1313 int other_things = 0;
1314 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1316 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1317 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1318 sets_cc0 = 1;
1319 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1320 other_things = 1;
1322 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1324 return 0;
1326 #endif
1328 /* Follow any unconditional jump at LABEL;
1329 return the ultimate label reached by any such chain of jumps.
1330 If LABEL is not followed by a jump, return LABEL.
1331 If the chain loops or we can't find end, return LABEL,
1332 since that tells caller to avoid changing the insn.
1334 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1335 a USE or CLOBBER. */
1338 follow_jumps (label)
1339 rtx label;
1341 rtx insn;
1342 rtx next;
1343 rtx value = label;
1344 int depth;
1346 for (depth = 0;
1347 (depth < 10
1348 && (insn = next_active_insn (value)) != 0
1349 && GET_CODE (insn) == JUMP_INSN
1350 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1351 && onlyjump_p (insn))
1352 || GET_CODE (PATTERN (insn)) == RETURN)
1353 && (next = NEXT_INSN (insn))
1354 && GET_CODE (next) == BARRIER);
1355 depth++)
1357 /* Don't chain through the insn that jumps into a loop
1358 from outside the loop,
1359 since that would create multiple loop entry jumps
1360 and prevent loop optimization. */
1361 rtx tem;
1362 if (!reload_completed)
1363 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1364 if (GET_CODE (tem) == NOTE
1365 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1366 /* ??? Optional. Disables some optimizations, but makes
1367 gcov output more accurate with -O. */
1368 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1369 return value;
1371 /* If we have found a cycle, make the insn jump to itself. */
1372 if (JUMP_LABEL (insn) == label)
1373 return label;
1375 tem = next_active_insn (JUMP_LABEL (insn));
1376 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1377 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1378 break;
1380 value = JUMP_LABEL (insn);
1382 if (depth == 10)
1383 return label;
1384 return value;
1388 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1389 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1390 in INSN, then store one of them in JUMP_LABEL (INSN).
1391 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1392 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1393 Also, when there are consecutive labels, canonicalize on the last of them.
1395 Note that two labels separated by a loop-beginning note
1396 must be kept distinct if we have not yet done loop-optimization,
1397 because the gap between them is where loop-optimize
1398 will want to move invariant code to. CROSS_JUMP tells us
1399 that loop-optimization is done with. */
1401 void
1402 mark_jump_label (x, insn, in_mem)
1403 rtx x;
1404 rtx insn;
1405 int in_mem;
1407 RTX_CODE code = GET_CODE (x);
1408 int i;
1409 const char *fmt;
1411 switch (code)
1413 case PC:
1414 case CC0:
1415 case REG:
1416 case CONST_INT:
1417 case CONST_DOUBLE:
1418 case CLOBBER:
1419 case CALL:
1420 return;
1422 case MEM:
1423 in_mem = 1;
1424 break;
1426 case SYMBOL_REF:
1427 if (!in_mem)
1428 return;
1430 /* If this is a constant-pool reference, see if it is a label. */
1431 if (CONSTANT_POOL_ADDRESS_P (x))
1432 mark_jump_label (get_pool_constant (x), insn, in_mem);
1433 break;
1435 case LABEL_REF:
1437 rtx label = XEXP (x, 0);
1439 /* Ignore remaining references to unreachable labels that
1440 have been deleted. */
1441 if (GET_CODE (label) == NOTE
1442 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1443 break;
1445 if (GET_CODE (label) != CODE_LABEL)
1446 abort ();
1448 /* Ignore references to labels of containing functions. */
1449 if (LABEL_REF_NONLOCAL_P (x))
1450 break;
1452 XEXP (x, 0) = label;
1453 if (! insn || ! INSN_DELETED_P (insn))
1454 ++LABEL_NUSES (label);
1456 if (insn)
1458 if (GET_CODE (insn) == JUMP_INSN)
1459 JUMP_LABEL (insn) = label;
1460 else
1462 /* Add a REG_LABEL note for LABEL unless there already
1463 is one. All uses of a label, except for labels
1464 that are the targets of jumps, must have a
1465 REG_LABEL note. */
1466 if (! find_reg_note (insn, REG_LABEL, label))
1467 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1468 REG_NOTES (insn));
1471 return;
1474 /* Do walk the labels in a vector, but not the first operand of an
1475 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1476 case ADDR_VEC:
1477 case ADDR_DIFF_VEC:
1478 if (! INSN_DELETED_P (insn))
1480 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1482 for (i = 0; i < XVECLEN (x, eltnum); i++)
1483 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1485 return;
1487 default:
1488 break;
1491 fmt = GET_RTX_FORMAT (code);
1492 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1494 if (fmt[i] == 'e')
1495 mark_jump_label (XEXP (x, i), insn, in_mem);
1496 else if (fmt[i] == 'E')
1498 int j;
1499 for (j = 0; j < XVECLEN (x, i); j++)
1500 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1505 /* If all INSN does is set the pc, delete it,
1506 and delete the insn that set the condition codes for it
1507 if that's what the previous thing was. */
1509 void
1510 delete_jump (insn)
1511 rtx insn;
1513 rtx set = single_set (insn);
1515 if (set && GET_CODE (SET_DEST (set)) == PC)
1516 delete_computation (insn);
1519 /* Verify INSN is a BARRIER and delete it. */
1521 void
1522 delete_barrier (insn)
1523 rtx insn;
1525 if (GET_CODE (insn) != BARRIER)
1526 abort ();
1528 delete_insn (insn);
1531 /* Recursively delete prior insns that compute the value (used only by INSN
1532 which the caller is deleting) stored in the register mentioned by NOTE
1533 which is a REG_DEAD note associated with INSN. */
1535 static void
1536 delete_prior_computation (note, insn)
1537 rtx note;
1538 rtx insn;
1540 rtx our_prev;
1541 rtx reg = XEXP (note, 0);
1543 for (our_prev = prev_nonnote_insn (insn);
1544 our_prev && (GET_CODE (our_prev) == INSN
1545 || GET_CODE (our_prev) == CALL_INSN);
1546 our_prev = prev_nonnote_insn (our_prev))
1548 rtx pat = PATTERN (our_prev);
1550 /* If we reach a CALL which is not calling a const function
1551 or the callee pops the arguments, then give up. */
1552 if (GET_CODE (our_prev) == CALL_INSN
1553 && (! CONST_OR_PURE_CALL_P (our_prev)
1554 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1555 break;
1557 /* If we reach a SEQUENCE, it is too complex to try to
1558 do anything with it, so give up. We can be run during
1559 and after reorg, so SEQUENCE rtl can legitimately show
1560 up here. */
1561 if (GET_CODE (pat) == SEQUENCE)
1562 break;
1564 if (GET_CODE (pat) == USE
1565 && GET_CODE (XEXP (pat, 0)) == INSN)
1566 /* reorg creates USEs that look like this. We leave them
1567 alone because reorg needs them for its own purposes. */
1568 break;
1570 if (reg_set_p (reg, pat))
1572 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1573 break;
1575 if (GET_CODE (pat) == PARALLEL)
1577 /* If we find a SET of something else, we can't
1578 delete the insn. */
1580 int i;
1582 for (i = 0; i < XVECLEN (pat, 0); i++)
1584 rtx part = XVECEXP (pat, 0, i);
1586 if (GET_CODE (part) == SET
1587 && SET_DEST (part) != reg)
1588 break;
1591 if (i == XVECLEN (pat, 0))
1592 delete_computation (our_prev);
1594 else if (GET_CODE (pat) == SET
1595 && GET_CODE (SET_DEST (pat)) == REG)
1597 int dest_regno = REGNO (SET_DEST (pat));
1598 int dest_endregno
1599 = (dest_regno
1600 + (dest_regno < FIRST_PSEUDO_REGISTER
1601 ? HARD_REGNO_NREGS (dest_regno,
1602 GET_MODE (SET_DEST (pat))) : 1));
1603 int regno = REGNO (reg);
1604 int endregno
1605 = (regno
1606 + (regno < FIRST_PSEUDO_REGISTER
1607 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1609 if (dest_regno >= regno
1610 && dest_endregno <= endregno)
1611 delete_computation (our_prev);
1613 /* We may have a multi-word hard register and some, but not
1614 all, of the words of the register are needed in subsequent
1615 insns. Write REG_UNUSED notes for those parts that were not
1616 needed. */
1617 else if (dest_regno <= regno
1618 && dest_endregno >= endregno)
1620 int i;
1622 REG_NOTES (our_prev)
1623 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1624 REG_NOTES (our_prev));
1626 for (i = dest_regno; i < dest_endregno; i++)
1627 if (! find_regno_note (our_prev, REG_UNUSED, i))
1628 break;
1630 if (i == dest_endregno)
1631 delete_computation (our_prev);
1635 break;
1638 /* If PAT references the register that dies here, it is an
1639 additional use. Hence any prior SET isn't dead. However, this
1640 insn becomes the new place for the REG_DEAD note. */
1641 if (reg_overlap_mentioned_p (reg, pat))
1643 XEXP (note, 1) = REG_NOTES (our_prev);
1644 REG_NOTES (our_prev) = note;
1645 break;
1650 /* Delete INSN and recursively delete insns that compute values used only
1651 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1652 If we are running before flow.c, we need do nothing since flow.c will
1653 delete dead code. We also can't know if the registers being used are
1654 dead or not at this point.
1656 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1657 nothing other than set a register that dies in this insn, we can delete
1658 that insn as well.
1660 On machines with CC0, if CC0 is used in this insn, we may be able to
1661 delete the insn that set it. */
1663 static void
1664 delete_computation (insn)
1665 rtx insn;
1667 rtx note, next;
1669 #ifdef HAVE_cc0
1670 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1672 rtx prev = prev_nonnote_insn (insn);
1673 /* We assume that at this stage
1674 CC's are always set explicitly
1675 and always immediately before the jump that
1676 will use them. So if the previous insn
1677 exists to set the CC's, delete it
1678 (unless it performs auto-increments, etc.). */
1679 if (prev && GET_CODE (prev) == INSN
1680 && sets_cc0_p (PATTERN (prev)))
1682 if (sets_cc0_p (PATTERN (prev)) > 0
1683 && ! side_effects_p (PATTERN (prev)))
1684 delete_computation (prev);
1685 else
1686 /* Otherwise, show that cc0 won't be used. */
1687 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1688 cc0_rtx, REG_NOTES (prev));
1691 #endif
1693 for (note = REG_NOTES (insn); note; note = next)
1695 next = XEXP (note, 1);
1697 if (REG_NOTE_KIND (note) != REG_DEAD
1698 /* Verify that the REG_NOTE is legitimate. */
1699 || GET_CODE (XEXP (note, 0)) != REG)
1700 continue;
1702 delete_prior_computation (note, insn);
1705 delete_related_insns (insn);
1708 /* Delete insn INSN from the chain of insns and update label ref counts
1709 and delete insns now unreachable.
1711 Returns the first insn after INSN that was not deleted.
1713 Usage of this instruction is deprecated. Use delete_insn instead and
1714 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1717 delete_related_insns (insn)
1718 rtx insn;
1720 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1721 rtx note;
1722 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1724 while (next && INSN_DELETED_P (next))
1725 next = NEXT_INSN (next);
1727 /* This insn is already deleted => return first following nondeleted. */
1728 if (INSN_DELETED_P (insn))
1729 return next;
1731 delete_insn (insn);
1733 /* If instruction is followed by a barrier,
1734 delete the barrier too. */
1736 if (next != 0 && GET_CODE (next) == BARRIER)
1737 delete_insn (next);
1739 /* If deleting a jump, decrement the count of the label,
1740 and delete the label if it is now unused. */
1742 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1744 rtx lab = JUMP_LABEL (insn), lab_next;
1746 if (LABEL_NUSES (lab) == 0)
1748 /* This can delete NEXT or PREV,
1749 either directly if NEXT is JUMP_LABEL (INSN),
1750 or indirectly through more levels of jumps. */
1751 delete_related_insns (lab);
1753 /* I feel a little doubtful about this loop,
1754 but I see no clean and sure alternative way
1755 to find the first insn after INSN that is not now deleted.
1756 I hope this works. */
1757 while (next && INSN_DELETED_P (next))
1758 next = NEXT_INSN (next);
1759 return next;
1761 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1762 && GET_CODE (lab_next) == JUMP_INSN
1763 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1764 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1766 /* If we're deleting the tablejump, delete the dispatch table.
1767 We may not be able to kill the label immediately preceding
1768 just yet, as it might be referenced in code leading up to
1769 the tablejump. */
1770 delete_related_insns (lab_next);
1774 /* Likewise if we're deleting a dispatch table. */
1776 if (GET_CODE (insn) == JUMP_INSN
1777 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1778 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1780 rtx pat = PATTERN (insn);
1781 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1782 int len = XVECLEN (pat, diff_vec_p);
1784 for (i = 0; i < len; i++)
1785 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1786 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1787 while (next && INSN_DELETED_P (next))
1788 next = NEXT_INSN (next);
1789 return next;
1792 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1793 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1794 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1795 if (REG_NOTE_KIND (note) == REG_LABEL
1796 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1797 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1798 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1799 delete_related_insns (XEXP (note, 0));
1801 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1802 prev = PREV_INSN (prev);
1804 /* If INSN was a label and a dispatch table follows it,
1805 delete the dispatch table. The tablejump must have gone already.
1806 It isn't useful to fall through into a table. */
1808 if (was_code_label
1809 && NEXT_INSN (insn) != 0
1810 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1811 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1812 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1813 next = delete_related_insns (NEXT_INSN (insn));
1815 /* If INSN was a label, delete insns following it if now unreachable. */
1817 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1819 RTX_CODE code;
1820 while (next != 0
1821 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1822 || code == NOTE || code == BARRIER
1823 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1825 if (code == NOTE
1826 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1827 next = NEXT_INSN (next);
1828 /* Keep going past other deleted labels to delete what follows. */
1829 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1830 next = NEXT_INSN (next);
1831 else
1832 /* Note: if this deletes a jump, it can cause more
1833 deletion of unreachable code, after a different label.
1834 As long as the value from this recursive call is correct,
1835 this invocation functions correctly. */
1836 next = delete_related_insns (next);
1840 return next;
1843 /* Advance from INSN till reaching something not deleted
1844 then return that. May return INSN itself. */
1847 next_nondeleted_insn (insn)
1848 rtx insn;
1850 while (INSN_DELETED_P (insn))
1851 insn = NEXT_INSN (insn);
1852 return insn;
1855 /* Delete a range of insns from FROM to TO, inclusive.
1856 This is for the sake of peephole optimization, so assume
1857 that whatever these insns do will still be done by a new
1858 peephole insn that will replace them. */
1860 void
1861 delete_for_peephole (from, to)
1862 rtx from, to;
1864 rtx insn = from;
1866 while (1)
1868 rtx next = NEXT_INSN (insn);
1869 rtx prev = PREV_INSN (insn);
1871 if (GET_CODE (insn) != NOTE)
1873 INSN_DELETED_P (insn) = 1;
1875 /* Patch this insn out of the chain. */
1876 /* We don't do this all at once, because we
1877 must preserve all NOTEs. */
1878 if (prev)
1879 NEXT_INSN (prev) = next;
1881 if (next)
1882 PREV_INSN (next) = prev;
1885 if (insn == to)
1886 break;
1887 insn = next;
1890 /* Note that if TO is an unconditional jump
1891 we *do not* delete the BARRIER that follows,
1892 since the peephole that replaces this sequence
1893 is also an unconditional jump in that case. */
1896 /* We have determined that INSN is never reached, and are about to
1897 delete it. Print a warning if the user asked for one.
1899 To try to make this warning more useful, this should only be called
1900 once per basic block not reached, and it only warns when the basic
1901 block contains more than one line from the current function, and
1902 contains at least one operation. CSE and inlining can duplicate insns,
1903 so it's possible to get spurious warnings from this. */
1905 void
1906 never_reached_warning (avoided_insn, finish)
1907 rtx avoided_insn, finish;
1909 rtx insn;
1910 rtx a_line_note = NULL;
1911 int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
1913 if (! warn_notreached)
1914 return;
1916 /* Scan forwards, looking at LINE_NUMBER notes, until
1917 we hit a LABEL or we run out of insns. */
1919 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1921 if (finish == NULL && GET_CODE (insn) == CODE_LABEL)
1922 break;
1924 if (GET_CODE (insn) == NOTE /* A line number note? */
1925 && NOTE_LINE_NUMBER (insn) >= 0)
1927 if (a_line_note == NULL)
1928 a_line_note = insn;
1929 else
1930 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1931 != NOTE_LINE_NUMBER (insn));
1933 else if (INSN_P (insn))
1935 if (reached_end || a_line_note == NULL)
1936 break;
1937 contains_insn = 1;
1940 if (insn == finish)
1941 reached_end = 1;
1943 if (two_avoided_lines && contains_insn)
1944 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1945 NOTE_LINE_NUMBER (a_line_note),
1946 "will never be executed");
1949 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1950 NLABEL as a return. Accrue modifications into the change group. */
1952 static void
1953 redirect_exp_1 (loc, olabel, nlabel, insn)
1954 rtx *loc;
1955 rtx olabel, nlabel;
1956 rtx insn;
1958 rtx x = *loc;
1959 RTX_CODE code = GET_CODE (x);
1960 int i;
1961 const char *fmt;
1963 if (code == LABEL_REF)
1965 if (XEXP (x, 0) == olabel)
1967 rtx n;
1968 if (nlabel)
1969 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1970 else
1971 n = gen_rtx_RETURN (VOIDmode);
1973 validate_change (insn, loc, n, 1);
1974 return;
1977 else if (code == RETURN && olabel == 0)
1979 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1980 if (loc == &PATTERN (insn))
1981 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1982 validate_change (insn, loc, x, 1);
1983 return;
1986 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1987 && GET_CODE (SET_SRC (x)) == LABEL_REF
1988 && XEXP (SET_SRC (x), 0) == olabel)
1990 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1991 return;
1994 fmt = GET_RTX_FORMAT (code);
1995 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1997 if (fmt[i] == 'e')
1998 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1999 else if (fmt[i] == 'E')
2001 int j;
2002 for (j = 0; j < XVECLEN (x, i); j++)
2003 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2008 /* Similar, but apply the change group and report success or failure. */
2010 static int
2011 redirect_exp (olabel, nlabel, insn)
2012 rtx olabel, nlabel;
2013 rtx insn;
2015 rtx *loc;
2017 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2018 loc = &XVECEXP (PATTERN (insn), 0, 0);
2019 else
2020 loc = &PATTERN (insn);
2022 redirect_exp_1 (loc, olabel, nlabel, insn);
2023 if (num_validated_changes () == 0)
2024 return 0;
2026 return apply_change_group ();
2029 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2030 the modifications into the change group. Return false if we did
2031 not see how to do that. */
2034 redirect_jump_1 (jump, nlabel)
2035 rtx jump, nlabel;
2037 int ochanges = num_validated_changes ();
2038 rtx *loc;
2040 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2041 loc = &XVECEXP (PATTERN (jump), 0, 0);
2042 else
2043 loc = &PATTERN (jump);
2045 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2046 return num_validated_changes () > ochanges;
2049 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2050 jump target label is unused as a result, it and the code following
2051 it may be deleted.
2053 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2054 RETURN insn.
2056 The return value will be 1 if the change was made, 0 if it wasn't
2057 (this can only occur for NLABEL == 0). */
2060 redirect_jump (jump, nlabel, delete_unused)
2061 rtx jump, nlabel;
2062 int delete_unused;
2064 rtx olabel = JUMP_LABEL (jump);
2066 if (nlabel == olabel)
2067 return 1;
2069 if (! redirect_exp (olabel, nlabel, jump))
2070 return 0;
2072 JUMP_LABEL (jump) = nlabel;
2073 if (nlabel)
2074 ++LABEL_NUSES (nlabel);
2076 /* If we're eliding the jump over exception cleanups at the end of a
2077 function, move the function end note so that -Wreturn-type works. */
2078 if (olabel && nlabel
2079 && NEXT_INSN (olabel)
2080 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2081 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2082 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2084 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
2085 /* Undefined labels will remain outside the insn stream. */
2086 && INSN_UID (olabel))
2087 delete_related_insns (olabel);
2089 return 1;
2092 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2093 Accrue the modifications into the change group. */
2095 static void
2096 invert_exp_1 (insn)
2097 rtx insn;
2099 RTX_CODE code;
2100 rtx x = pc_set (insn);
2102 if (!x)
2103 abort ();
2104 x = SET_SRC (x);
2106 code = GET_CODE (x);
2108 if (code == IF_THEN_ELSE)
2110 rtx comp = XEXP (x, 0);
2111 rtx tem;
2112 enum rtx_code reversed_code;
2114 /* We can do this in two ways: The preferable way, which can only
2115 be done if this is not an integer comparison, is to reverse
2116 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2117 of the IF_THEN_ELSE. If we can't do either, fail. */
2119 reversed_code = reversed_comparison_code (comp, insn);
2121 if (reversed_code != UNKNOWN)
2123 validate_change (insn, &XEXP (x, 0),
2124 gen_rtx_fmt_ee (reversed_code,
2125 GET_MODE (comp), XEXP (comp, 0),
2126 XEXP (comp, 1)),
2128 return;
2131 tem = XEXP (x, 1);
2132 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2133 validate_change (insn, &XEXP (x, 2), tem, 1);
2135 else
2136 abort ();
2139 /* Invert the jump condition of conditional jump insn, INSN.
2141 Return 1 if we can do so, 0 if we cannot find a way to do so that
2142 matches a pattern. */
2144 static int
2145 invert_exp (insn)
2146 rtx insn;
2148 invert_exp_1 (insn);
2149 if (num_validated_changes () == 0)
2150 return 0;
2152 return apply_change_group ();
2155 /* Invert the condition of the jump JUMP, and make it jump to label
2156 NLABEL instead of where it jumps now. Accrue changes into the
2157 change group. Return false if we didn't see how to perform the
2158 inversion and redirection. */
2161 invert_jump_1 (jump, nlabel)
2162 rtx jump, nlabel;
2164 int ochanges;
2166 ochanges = num_validated_changes ();
2167 invert_exp_1 (jump);
2168 if (num_validated_changes () == ochanges)
2169 return 0;
2171 return redirect_jump_1 (jump, nlabel);
2174 /* Invert the condition of the jump JUMP, and make it jump to label
2175 NLABEL instead of where it jumps now. Return true if successful. */
2178 invert_jump (jump, nlabel, delete_unused)
2179 rtx jump, nlabel;
2180 int delete_unused;
2182 /* We have to either invert the condition and change the label or
2183 do neither. Either operation could fail. We first try to invert
2184 the jump. If that succeeds, we try changing the label. If that fails,
2185 we invert the jump back to what it was. */
2187 if (! invert_exp (jump))
2188 return 0;
2190 if (redirect_jump (jump, nlabel, delete_unused))
2192 invert_br_probabilities (jump);
2194 return 1;
2197 if (! invert_exp (jump))
2198 /* This should just be putting it back the way it was. */
2199 abort ();
2201 return 0;
2205 /* Like rtx_equal_p except that it considers two REGs as equal
2206 if they renumber to the same value and considers two commutative
2207 operations to be the same if the order of the operands has been
2208 reversed.
2210 ??? Addition is not commutative on the PA due to the weird implicit
2211 space register selection rules for memory addresses. Therefore, we
2212 don't consider a + b == b + a.
2214 We could/should make this test a little tighter. Possibly only
2215 disabling it on the PA via some backend macro or only disabling this
2216 case when the PLUS is inside a MEM. */
2219 rtx_renumbered_equal_p (x, y)
2220 rtx x, y;
2222 int i;
2223 RTX_CODE code = GET_CODE (x);
2224 const char *fmt;
2226 if (x == y)
2227 return 1;
2229 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2230 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2231 && GET_CODE (SUBREG_REG (y)) == REG)))
2233 int reg_x = -1, reg_y = -1;
2234 int byte_x = 0, byte_y = 0;
2236 if (GET_MODE (x) != GET_MODE (y))
2237 return 0;
2239 /* If we haven't done any renumbering, don't
2240 make any assumptions. */
2241 if (reg_renumber == 0)
2242 return rtx_equal_p (x, y);
2244 if (code == SUBREG)
2246 reg_x = REGNO (SUBREG_REG (x));
2247 byte_x = SUBREG_BYTE (x);
2249 if (reg_renumber[reg_x] >= 0)
2251 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2252 GET_MODE (SUBREG_REG (x)),
2253 byte_x,
2254 GET_MODE (x));
2255 byte_x = 0;
2258 else
2260 reg_x = REGNO (x);
2261 if (reg_renumber[reg_x] >= 0)
2262 reg_x = reg_renumber[reg_x];
2265 if (GET_CODE (y) == SUBREG)
2267 reg_y = REGNO (SUBREG_REG (y));
2268 byte_y = SUBREG_BYTE (y);
2270 if (reg_renumber[reg_y] >= 0)
2272 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2273 GET_MODE (SUBREG_REG (y)),
2274 byte_y,
2275 GET_MODE (y));
2276 byte_y = 0;
2279 else
2281 reg_y = REGNO (y);
2282 if (reg_renumber[reg_y] >= 0)
2283 reg_y = reg_renumber[reg_y];
2286 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2289 /* Now we have disposed of all the cases
2290 in which different rtx codes can match. */
2291 if (code != GET_CODE (y))
2292 return 0;
2294 switch (code)
2296 case PC:
2297 case CC0:
2298 case ADDR_VEC:
2299 case ADDR_DIFF_VEC:
2300 return 0;
2302 case CONST_INT:
2303 return INTVAL (x) == INTVAL (y);
2305 case LABEL_REF:
2306 /* We can't assume nonlocal labels have their following insns yet. */
2307 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2308 return XEXP (x, 0) == XEXP (y, 0);
2310 /* Two label-refs are equivalent if they point at labels
2311 in the same position in the instruction stream. */
2312 return (next_real_insn (XEXP (x, 0))
2313 == next_real_insn (XEXP (y, 0)));
2315 case SYMBOL_REF:
2316 return XSTR (x, 0) == XSTR (y, 0);
2318 case CODE_LABEL:
2319 /* If we didn't match EQ equality above, they aren't the same. */
2320 return 0;
2322 default:
2323 break;
2326 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2328 if (GET_MODE (x) != GET_MODE (y))
2329 return 0;
2331 /* For commutative operations, the RTX match if the operand match in any
2332 order. Also handle the simple binary and unary cases without a loop.
2334 ??? Don't consider PLUS a commutative operator; see comments above. */
2335 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2336 && code != PLUS)
2337 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2338 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2339 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2340 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2341 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2342 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2343 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2344 else if (GET_RTX_CLASS (code) == '1')
2345 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2347 /* Compare the elements. If any pair of corresponding elements
2348 fail to match, return 0 for the whole things. */
2350 fmt = GET_RTX_FORMAT (code);
2351 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2353 int j;
2354 switch (fmt[i])
2356 case 'w':
2357 if (XWINT (x, i) != XWINT (y, i))
2358 return 0;
2359 break;
2361 case 'i':
2362 if (XINT (x, i) != XINT (y, i))
2363 return 0;
2364 break;
2366 case 't':
2367 if (XTREE (x, i) != XTREE (y, i))
2368 return 0;
2369 break;
2371 case 's':
2372 if (strcmp (XSTR (x, i), XSTR (y, i)))
2373 return 0;
2374 break;
2376 case 'e':
2377 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2378 return 0;
2379 break;
2381 case 'u':
2382 if (XEXP (x, i) != XEXP (y, i))
2383 return 0;
2384 /* fall through. */
2385 case '0':
2386 break;
2388 case 'E':
2389 if (XVECLEN (x, i) != XVECLEN (y, i))
2390 return 0;
2391 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2392 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2393 return 0;
2394 break;
2396 default:
2397 abort ();
2400 return 1;
2403 /* If X is a hard register or equivalent to one or a subregister of one,
2404 return the hard register number. If X is a pseudo register that was not
2405 assigned a hard register, return the pseudo register number. Otherwise,
2406 return -1. Any rtx is valid for X. */
2409 true_regnum (x)
2410 rtx x;
2412 if (GET_CODE (x) == REG)
2414 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2415 return reg_renumber[REGNO (x)];
2416 return REGNO (x);
2418 if (GET_CODE (x) == SUBREG)
2420 int base = true_regnum (SUBREG_REG (x));
2421 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2422 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2423 GET_MODE (SUBREG_REG (x)),
2424 SUBREG_BYTE (x), GET_MODE (x));
2426 return -1;
2429 /* Return regno of the register REG and handle subregs too. */
2430 unsigned int
2431 reg_or_subregno (reg)
2432 rtx reg;
2434 if (REG_P (reg))
2435 return REGNO (reg);
2436 if (GET_CODE (reg) == SUBREG)
2437 return REGNO (SUBREG_REG (reg));
2438 abort ();