* combine.c (struct_undo): Change types of recorded substitutions
[official-gcc.git] / gcc / jump.c
blob3b9dc616065979390b56a4a90143a7965d351f04
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
24 operate with jumps.
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
37 #include "config.h"
38 #include "system.h"
39 #include "rtl.h"
40 #include "tm_p.h"
41 #include "flags.h"
42 #include "hard-reg-set.h"
43 #include "regs.h"
44 #include "insn-config.h"
45 #include "insn-attr.h"
46 #include "recog.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "real.h"
50 #include "except.h"
51 #include "toplev.h"
52 #include "reload.h"
53 #include "predict.h"
55 /* Optimize jump y; x: ... y: jumpif... x?
56 Don't know if it is worth bothering with. */
57 /* Optimize two cases of conditional jump to conditional jump?
58 This can never delete any instruction or make anything dead,
59 or even change what is live at any point.
60 So perhaps let combiner do it. */
62 static int init_label_info PARAMS ((rtx));
63 static void mark_all_labels PARAMS ((rtx));
64 static int duplicate_loop_exit_test PARAMS ((rtx));
65 static void delete_computation PARAMS ((rtx));
66 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
67 static int redirect_exp PARAMS ((rtx, rtx, rtx));
68 static void invert_exp_1 PARAMS ((rtx));
69 static int invert_exp PARAMS ((rtx));
70 static int returnjump_p_1 PARAMS ((rtx *, void *));
71 static void delete_prior_computation PARAMS ((rtx, rtx));
73 /* Alternate entry into the jump optimizer. This entry point only rebuilds
74 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
75 instructions. */
76 void
77 rebuild_jump_labels (f)
78 rtx f;
80 rtx insn;
81 int max_uid = 0;
83 max_uid = init_label_info (f) + 1;
85 mark_all_labels (f);
87 /* Keep track of labels used from static data; we don't track them
88 closely enough to delete them here, so make sure their reference
89 count doesn't drop to zero. */
91 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
92 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
93 LABEL_NUSES (XEXP (insn, 0))++;
96 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
97 non-fallthru insn. This is not generally true, as multiple barriers
98 may have crept in, or the BARRIER may be separated from the last
99 real insn by one or more NOTEs.
101 This simple pass moves barriers and removes duplicates so that the
102 old code is happy.
104 void
105 cleanup_barriers ()
107 rtx insn, next, prev;
108 for (insn = get_insns (); insn; insn = next)
110 next = NEXT_INSN (insn);
111 if (GET_CODE (insn) == BARRIER)
113 prev = prev_nonnote_insn (insn);
114 if (GET_CODE (prev) == BARRIER)
115 delete_barrier (insn);
116 else if (prev != PREV_INSN (insn))
117 reorder_insns (insn, insn, prev);
122 void
123 copy_loop_headers (f)
124 rtx f;
126 rtx insn, next;
127 /* Now iterate optimizing jumps until nothing changes over one pass. */
128 for (insn = f; insn; insn = next)
130 rtx temp, temp1;
132 next = NEXT_INSN (insn);
134 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
135 jump. Try to optimize by duplicating the loop exit test if so.
136 This is only safe immediately after regscan, because it uses
137 the values of regno_first_uid and regno_last_uid. */
138 if (GET_CODE (insn) == NOTE
139 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
140 && (temp1 = next_nonnote_insn (insn)) != 0
141 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
143 temp = PREV_INSN (insn);
144 if (duplicate_loop_exit_test (insn))
146 next = NEXT_INSN (temp);
152 void
153 purge_line_number_notes (f)
154 rtx f;
156 rtx last_note = 0;
157 rtx insn;
158 /* Delete extraneous line number notes.
159 Note that two consecutive notes for different lines are not really
160 extraneous. There should be some indication where that line belonged,
161 even if it became empty. */
163 for (insn = f; insn; insn = NEXT_INSN (insn))
164 if (GET_CODE (insn) == NOTE)
166 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
167 /* Any previous line note was for the prologue; gdb wants a new
168 note after the prologue even if it is for the same line. */
169 last_note = NULL_RTX;
170 else if (NOTE_LINE_NUMBER (insn) >= 0)
172 /* Delete this note if it is identical to previous note. */
173 if (last_note
174 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
175 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
177 delete_related_insns (insn);
178 continue;
181 last_note = insn;
186 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
187 notes whose labels don't occur in the insn any more. Returns the
188 largest INSN_UID found. */
189 static int
190 init_label_info (f)
191 rtx f;
193 int largest_uid = 0;
194 rtx insn;
196 for (insn = f; insn; insn = NEXT_INSN (insn))
198 if (GET_CODE (insn) == CODE_LABEL)
199 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
200 else if (GET_CODE (insn) == JUMP_INSN)
201 JUMP_LABEL (insn) = 0;
202 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
204 rtx note, next;
206 for (note = REG_NOTES (insn); note; note = next)
208 next = XEXP (note, 1);
209 if (REG_NOTE_KIND (note) == REG_LABEL
210 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
211 remove_note (insn, note);
214 if (INSN_UID (insn) > largest_uid)
215 largest_uid = INSN_UID (insn);
218 return largest_uid;
221 /* Mark the label each jump jumps to.
222 Combine consecutive labels, and count uses of labels. */
224 static void
225 mark_all_labels (f)
226 rtx f;
228 rtx insn;
230 for (insn = f; insn; insn = NEXT_INSN (insn))
231 if (INSN_P (insn))
233 if (GET_CODE (insn) == CALL_INSN
234 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
236 mark_all_labels (XEXP (PATTERN (insn), 0));
237 mark_all_labels (XEXP (PATTERN (insn), 1));
238 mark_all_labels (XEXP (PATTERN (insn), 2));
240 /* Canonicalize the tail recursion label attached to the
241 CALL_PLACEHOLDER insn. */
242 if (XEXP (PATTERN (insn), 3))
244 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
245 XEXP (PATTERN (insn), 3));
246 mark_jump_label (label_ref, insn, 0);
247 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
250 continue;
253 mark_jump_label (PATTERN (insn), insn, 0);
254 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
256 /* When we know the LABEL_REF contained in a REG used in
257 an indirect jump, we'll have a REG_LABEL note so that
258 flow can tell where it's going. */
259 if (JUMP_LABEL (insn) == 0)
261 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
262 if (label_note)
264 /* But a LABEL_REF around the REG_LABEL note, so
265 that we can canonicalize it. */
266 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
267 XEXP (label_note, 0));
269 mark_jump_label (label_ref, insn, 0);
270 XEXP (label_note, 0) = XEXP (label_ref, 0);
271 JUMP_LABEL (insn) = XEXP (label_note, 0);
278 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
279 jump. Assume that this unconditional jump is to the exit test code. If
280 the code is sufficiently simple, make a copy of it before INSN,
281 followed by a jump to the exit of the loop. Then delete the unconditional
282 jump after INSN.
284 Return 1 if we made the change, else 0.
286 This is only safe immediately after a regscan pass because it uses the
287 values of regno_first_uid and regno_last_uid. */
289 static int
290 duplicate_loop_exit_test (loop_start)
291 rtx loop_start;
293 rtx insn, set, reg, p, link;
294 rtx copy = 0, first_copy = 0;
295 int num_insns = 0;
296 rtx exitcode = NEXT_INSN (JUMP_LABEL (next_nonnote_insn (loop_start)));
297 rtx lastexit;
298 int max_reg = max_reg_num ();
299 rtx *reg_map = 0;
300 rtx loop_pre_header_label;
302 /* Scan the exit code. We do not perform this optimization if any insn:
304 is a CALL_INSN
305 is a CODE_LABEL
306 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
307 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
309 We also do not do this if we find an insn with ASM_OPERANDS. While
310 this restriction should not be necessary, copying an insn with
311 ASM_OPERANDS can confuse asm_noperands in some cases.
313 Also, don't do this if the exit code is more than 20 insns. */
315 for (insn = exitcode;
316 insn
317 && ! (GET_CODE (insn) == NOTE
318 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
319 insn = NEXT_INSN (insn))
321 switch (GET_CODE (insn))
323 case CODE_LABEL:
324 case CALL_INSN:
325 return 0;
326 case NOTE:
328 if (optimize < 2
329 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
330 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
331 /* If we were to duplicate this code, we would not move
332 the BLOCK notes, and so debugging the moved code would
333 be difficult. Thus, we only move the code with -O2 or
334 higher. */
335 return 0;
337 break;
338 case JUMP_INSN:
339 case INSN:
340 /* The code below would grossly mishandle REG_WAS_0 notes,
341 so get rid of them here. */
342 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
343 remove_note (insn, p);
344 if (++num_insns > 20
345 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
346 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
347 return 0;
348 break;
349 default:
350 break;
354 /* Unless INSN is zero, we can do the optimization. */
355 if (insn == 0)
356 return 0;
358 lastexit = insn;
360 /* See if any insn sets a register only used in the loop exit code and
361 not a user variable. If so, replace it with a new register. */
362 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
363 if (GET_CODE (insn) == INSN
364 && (set = single_set (insn)) != 0
365 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
366 || (GET_CODE (reg) == SUBREG
367 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
368 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
369 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
371 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
372 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
373 break;
375 if (p != lastexit)
377 /* We can do the replacement. Allocate reg_map if this is the
378 first replacement we found. */
379 if (reg_map == 0)
380 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
382 REG_LOOP_TEST_P (reg) = 1;
384 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
387 loop_pre_header_label = gen_label_rtx ();
389 /* Now copy each insn. */
390 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
392 switch (GET_CODE (insn))
394 case BARRIER:
395 copy = emit_barrier_before (loop_start);
396 break;
397 case NOTE:
398 /* Only copy line-number notes. */
399 if (NOTE_LINE_NUMBER (insn) >= 0)
401 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
402 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
404 break;
406 case INSN:
407 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
408 if (reg_map)
409 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
411 mark_jump_label (PATTERN (copy), copy, 0);
412 INSN_SCOPE (copy) = INSN_SCOPE (insn);
414 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
415 make them. */
416 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
417 if (REG_NOTE_KIND (link) != REG_LABEL)
419 if (GET_CODE (link) == EXPR_LIST)
420 REG_NOTES (copy)
421 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
422 XEXP (link, 0),
423 REG_NOTES (copy)));
424 else
425 REG_NOTES (copy)
426 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
427 XEXP (link, 0),
428 REG_NOTES (copy)));
431 if (reg_map && REG_NOTES (copy))
432 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
433 break;
435 case JUMP_INSN:
436 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
437 loop_start);
438 INSN_SCOPE (copy) = INSN_SCOPE (insn);
439 if (reg_map)
440 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
441 mark_jump_label (PATTERN (copy), copy, 0);
442 if (REG_NOTES (insn))
444 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
445 if (reg_map)
446 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
449 /* Predict conditional jump that do make loop looping as taken.
450 Other jumps are probably exit conditions, so predict
451 them as untaken. */
452 if (any_condjump_p (copy))
454 rtx label = JUMP_LABEL (copy);
455 if (label)
457 /* The jump_insn after loop_start should be followed
458 by barrier and loopback label. */
459 if (prev_nonnote_insn (label)
460 && (prev_nonnote_insn (prev_nonnote_insn (label))
461 == next_nonnote_insn (loop_start)))
463 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
464 /* To keep pre-header, we need to redirect all loop
465 entrances before the LOOP_BEG note. */
466 redirect_jump (copy, loop_pre_header_label, 0);
468 else
469 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
472 break;
474 default:
475 abort ();
478 /* Record the first insn we copied. We need it so that we can
479 scan the copied insns for new pseudo registers. */
480 if (! first_copy)
481 first_copy = copy;
484 /* Now clean up by emitting a jump to the end label and deleting the jump
485 at the start of the loop. */
486 if (! copy || GET_CODE (copy) != BARRIER)
488 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
489 loop_start);
491 /* Record the first insn we copied. We need it so that we can
492 scan the copied insns for new pseudo registers. This may not
493 be strictly necessary since we should have copied at least one
494 insn above. But I am going to be safe. */
495 if (! first_copy)
496 first_copy = copy;
498 mark_jump_label (PATTERN (copy), copy, 0);
499 emit_barrier_before (loop_start);
502 emit_label_before (loop_pre_header_label, loop_start);
504 /* Now scan from the first insn we copied to the last insn we copied
505 (copy) for new pseudo registers. Do this after the code to jump to
506 the end label since that might create a new pseudo too. */
507 reg_scan_update (first_copy, copy, max_reg);
509 /* Mark the exit code as the virtual top of the converted loop. */
510 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
512 delete_related_insns (next_nonnote_insn (loop_start));
514 /* Clean up. */
515 if (reg_map)
516 free (reg_map);
518 return 1;
521 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
522 notes between START and END out before START. START and END may be such
523 notes. Returns the values of the new starting and ending insns, which
524 may be different if the original ones were such notes.
525 Return true if there were only such notes and no real instructions. */
527 bool
528 squeeze_notes (startp, endp)
529 rtx* startp;
530 rtx* endp;
532 rtx start = *startp;
533 rtx end = *endp;
535 rtx insn;
536 rtx next;
537 rtx last = NULL;
538 rtx past_end = NEXT_INSN (end);
540 for (insn = start; insn != past_end; insn = next)
542 next = NEXT_INSN (insn);
543 if (GET_CODE (insn) == NOTE
544 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
545 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
546 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
547 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
548 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
549 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
551 if (insn == start)
552 start = next;
553 else
555 rtx prev = PREV_INSN (insn);
556 PREV_INSN (insn) = PREV_INSN (start);
557 NEXT_INSN (insn) = start;
558 NEXT_INSN (PREV_INSN (insn)) = insn;
559 PREV_INSN (NEXT_INSN (insn)) = insn;
560 NEXT_INSN (prev) = next;
561 PREV_INSN (next) = prev;
564 else
565 last = insn;
568 /* There were no real instructions. */
569 if (start == past_end)
570 return true;
572 end = last;
574 *startp = start;
575 *endp = end;
576 return false;
579 /* Return the label before INSN, or put a new label there. */
582 get_label_before (insn)
583 rtx insn;
585 rtx label;
587 /* Find an existing label at this point
588 or make a new one if there is none. */
589 label = prev_nonnote_insn (insn);
591 if (label == 0 || GET_CODE (label) != CODE_LABEL)
593 rtx prev = PREV_INSN (insn);
595 label = gen_label_rtx ();
596 emit_label_after (label, prev);
597 LABEL_NUSES (label) = 0;
599 return label;
602 /* Return the label after INSN, or put a new label there. */
605 get_label_after (insn)
606 rtx insn;
608 rtx label;
610 /* Find an existing label at this point
611 or make a new one if there is none. */
612 label = next_nonnote_insn (insn);
614 if (label == 0 || GET_CODE (label) != CODE_LABEL)
616 label = gen_label_rtx ();
617 emit_label_after (label, insn);
618 LABEL_NUSES (label) = 0;
620 return label;
623 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
624 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
625 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
626 know whether it's source is floating point or integer comparison. Machine
627 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
628 to help this function avoid overhead in these cases. */
629 enum rtx_code
630 reversed_comparison_code_parts (code, arg0, arg1, insn)
631 rtx insn, arg0, arg1;
632 enum rtx_code code;
634 enum machine_mode mode;
636 /* If this is not actually a comparison, we can't reverse it. */
637 if (GET_RTX_CLASS (code) != '<')
638 return UNKNOWN;
640 mode = GET_MODE (arg0);
641 if (mode == VOIDmode)
642 mode = GET_MODE (arg1);
644 /* First see if machine description supply us way to reverse the comparison.
645 Give it priority over everything else to allow machine description to do
646 tricks. */
647 #ifdef REVERSIBLE_CC_MODE
648 if (GET_MODE_CLASS (mode) == MODE_CC
649 && REVERSIBLE_CC_MODE (mode))
651 #ifdef REVERSE_CONDITION
652 return REVERSE_CONDITION (code, mode);
653 #endif
654 return reverse_condition (code);
656 #endif
658 /* Try a few special cases based on the comparison code. */
659 switch (code)
661 case GEU:
662 case GTU:
663 case LEU:
664 case LTU:
665 case NE:
666 case EQ:
667 /* It is always safe to reverse EQ and NE, even for the floating
668 point. Similary the unsigned comparisons are never used for
669 floating point so we can reverse them in the default way. */
670 return reverse_condition (code);
671 case ORDERED:
672 case UNORDERED:
673 case LTGT:
674 case UNEQ:
675 /* In case we already see unordered comparison, we can be sure to
676 be dealing with floating point so we don't need any more tests. */
677 return reverse_condition_maybe_unordered (code);
678 case UNLT:
679 case UNLE:
680 case UNGT:
681 case UNGE:
682 /* We don't have safe way to reverse these yet. */
683 return UNKNOWN;
684 default:
685 break;
688 if (GET_MODE_CLASS (mode) == MODE_CC
689 #ifdef HAVE_cc0
690 || arg0 == cc0_rtx
691 #endif
694 rtx prev;
695 /* Try to search for the comparison to determine the real mode.
696 This code is expensive, but with sane machine description it
697 will be never used, since REVERSIBLE_CC_MODE will return true
698 in all cases. */
699 if (! insn)
700 return UNKNOWN;
702 for (prev = prev_nonnote_insn (insn);
703 prev != 0 && GET_CODE (prev) != CODE_LABEL;
704 prev = prev_nonnote_insn (prev))
706 rtx set = set_of (arg0, prev);
707 if (set && GET_CODE (set) == SET
708 && rtx_equal_p (SET_DEST (set), arg0))
710 rtx src = SET_SRC (set);
712 if (GET_CODE (src) == COMPARE)
714 rtx comparison = src;
715 arg0 = XEXP (src, 0);
716 mode = GET_MODE (arg0);
717 if (mode == VOIDmode)
718 mode = GET_MODE (XEXP (comparison, 1));
719 break;
721 /* We can get past reg-reg moves. This may be useful for model
722 of i387 comparisons that first move flag registers around. */
723 if (REG_P (src))
725 arg0 = src;
726 continue;
729 /* If register is clobbered in some ununderstandable way,
730 give up. */
731 if (set)
732 return UNKNOWN;
736 /* Test for an integer condition, or a floating-point comparison
737 in which NaNs can be ignored. */
738 if (GET_CODE (arg0) == CONST_INT
739 || (GET_MODE (arg0) != VOIDmode
740 && GET_MODE_CLASS (mode) != MODE_CC
741 && !HONOR_NANS (mode)))
742 return reverse_condition (code);
744 return UNKNOWN;
747 /* An wrapper around the previous function to take COMPARISON as rtx
748 expression. This simplifies many callers. */
749 enum rtx_code
750 reversed_comparison_code (comparison, insn)
751 rtx comparison, insn;
753 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
754 return UNKNOWN;
755 return reversed_comparison_code_parts (GET_CODE (comparison),
756 XEXP (comparison, 0),
757 XEXP (comparison, 1), insn);
760 /* Given an rtx-code for a comparison, return the code for the negated
761 comparison. If no such code exists, return UNKNOWN.
763 WATCH OUT! reverse_condition is not safe to use on a jump that might
764 be acting on the results of an IEEE floating point comparison, because
765 of the special treatment of non-signaling nans in comparisons.
766 Use reversed_comparison_code instead. */
768 enum rtx_code
769 reverse_condition (code)
770 enum rtx_code code;
772 switch (code)
774 case EQ:
775 return NE;
776 case NE:
777 return EQ;
778 case GT:
779 return LE;
780 case GE:
781 return LT;
782 case LT:
783 return GE;
784 case LE:
785 return GT;
786 case GTU:
787 return LEU;
788 case GEU:
789 return LTU;
790 case LTU:
791 return GEU;
792 case LEU:
793 return GTU;
794 case UNORDERED:
795 return ORDERED;
796 case ORDERED:
797 return UNORDERED;
799 case UNLT:
800 case UNLE:
801 case UNGT:
802 case UNGE:
803 case UNEQ:
804 case LTGT:
805 return UNKNOWN;
807 default:
808 abort ();
812 /* Similar, but we're allowed to generate unordered comparisons, which
813 makes it safe for IEEE floating-point. Of course, we have to recognize
814 that the target will support them too... */
816 enum rtx_code
817 reverse_condition_maybe_unordered (code)
818 enum rtx_code code;
820 switch (code)
822 case EQ:
823 return NE;
824 case NE:
825 return EQ;
826 case GT:
827 return UNLE;
828 case GE:
829 return UNLT;
830 case LT:
831 return UNGE;
832 case LE:
833 return UNGT;
834 case LTGT:
835 return UNEQ;
836 case UNORDERED:
837 return ORDERED;
838 case ORDERED:
839 return UNORDERED;
840 case UNLT:
841 return GE;
842 case UNLE:
843 return GT;
844 case UNGT:
845 return LE;
846 case UNGE:
847 return LT;
848 case UNEQ:
849 return LTGT;
851 default:
852 abort ();
856 /* Similar, but return the code when two operands of a comparison are swapped.
857 This IS safe for IEEE floating-point. */
859 enum rtx_code
860 swap_condition (code)
861 enum rtx_code code;
863 switch (code)
865 case EQ:
866 case NE:
867 case UNORDERED:
868 case ORDERED:
869 case UNEQ:
870 case LTGT:
871 return code;
873 case GT:
874 return LT;
875 case GE:
876 return LE;
877 case LT:
878 return GT;
879 case LE:
880 return GE;
881 case GTU:
882 return LTU;
883 case GEU:
884 return LEU;
885 case LTU:
886 return GTU;
887 case LEU:
888 return GEU;
889 case UNLT:
890 return UNGT;
891 case UNLE:
892 return UNGE;
893 case UNGT:
894 return UNLT;
895 case UNGE:
896 return UNLE;
898 default:
899 abort ();
903 /* Given a comparison CODE, return the corresponding unsigned comparison.
904 If CODE is an equality comparison or already an unsigned comparison,
905 CODE is returned. */
907 enum rtx_code
908 unsigned_condition (code)
909 enum rtx_code code;
911 switch (code)
913 case EQ:
914 case NE:
915 case GTU:
916 case GEU:
917 case LTU:
918 case LEU:
919 return code;
921 case GT:
922 return GTU;
923 case GE:
924 return GEU;
925 case LT:
926 return LTU;
927 case LE:
928 return LEU;
930 default:
931 abort ();
935 /* Similarly, return the signed version of a comparison. */
937 enum rtx_code
938 signed_condition (code)
939 enum rtx_code code;
941 switch (code)
943 case EQ:
944 case NE:
945 case GT:
946 case GE:
947 case LT:
948 case LE:
949 return code;
951 case GTU:
952 return GT;
953 case GEU:
954 return GE;
955 case LTU:
956 return LT;
957 case LEU:
958 return LE;
960 default:
961 abort ();
965 /* Return non-zero if CODE1 is more strict than CODE2, i.e., if the
966 truth of CODE1 implies the truth of CODE2. */
969 comparison_dominates_p (code1, code2)
970 enum rtx_code code1, code2;
972 /* UNKNOWN comparison codes can happen as a result of trying to revert
973 comparison codes.
974 They can't match anything, so we have to reject them here. */
975 if (code1 == UNKNOWN || code2 == UNKNOWN)
976 return 0;
978 if (code1 == code2)
979 return 1;
981 switch (code1)
983 case UNEQ:
984 if (code2 == UNLE || code2 == UNGE)
985 return 1;
986 break;
988 case EQ:
989 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
990 || code2 == ORDERED)
991 return 1;
992 break;
994 case UNLT:
995 if (code2 == UNLE || code2 == NE)
996 return 1;
997 break;
999 case LT:
1000 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1001 return 1;
1002 break;
1004 case UNGT:
1005 if (code2 == UNGE || code2 == NE)
1006 return 1;
1007 break;
1009 case GT:
1010 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1011 return 1;
1012 break;
1014 case GE:
1015 case LE:
1016 if (code2 == ORDERED)
1017 return 1;
1018 break;
1020 case LTGT:
1021 if (code2 == NE || code2 == ORDERED)
1022 return 1;
1023 break;
1025 case LTU:
1026 if (code2 == LEU || code2 == NE)
1027 return 1;
1028 break;
1030 case GTU:
1031 if (code2 == GEU || code2 == NE)
1032 return 1;
1033 break;
1035 case UNORDERED:
1036 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1037 || code2 == UNGE || code2 == UNGT)
1038 return 1;
1039 break;
1041 default:
1042 break;
1045 return 0;
1048 /* Return 1 if INSN is an unconditional jump and nothing else. */
1051 simplejump_p (insn)
1052 rtx insn;
1054 return (GET_CODE (insn) == JUMP_INSN
1055 && GET_CODE (PATTERN (insn)) == SET
1056 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1057 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1060 /* Return nonzero if INSN is a (possibly) conditional jump
1061 and nothing more.
1063 Use this function is deprecated, since we need to support combined
1064 branch and compare insns. Use any_condjump_p instead whenever possible. */
1067 condjump_p (insn)
1068 rtx insn;
1070 rtx x = PATTERN (insn);
1072 if (GET_CODE (x) != SET
1073 || GET_CODE (SET_DEST (x)) != PC)
1074 return 0;
1076 x = SET_SRC (x);
1077 if (GET_CODE (x) == LABEL_REF)
1078 return 1;
1079 else
1080 return (GET_CODE (x) == IF_THEN_ELSE
1081 && ((GET_CODE (XEXP (x, 2)) == PC
1082 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1083 || GET_CODE (XEXP (x, 1)) == RETURN))
1084 || (GET_CODE (XEXP (x, 1)) == PC
1085 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1086 || GET_CODE (XEXP (x, 2)) == RETURN))));
1088 return 0;
1091 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1092 PARALLEL.
1094 Use this function is deprecated, since we need to support combined
1095 branch and compare insns. Use any_condjump_p instead whenever possible. */
1098 condjump_in_parallel_p (insn)
1099 rtx insn;
1101 rtx x = PATTERN (insn);
1103 if (GET_CODE (x) != PARALLEL)
1104 return 0;
1105 else
1106 x = XVECEXP (x, 0, 0);
1108 if (GET_CODE (x) != SET)
1109 return 0;
1110 if (GET_CODE (SET_DEST (x)) != PC)
1111 return 0;
1112 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1113 return 1;
1114 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1115 return 0;
1116 if (XEXP (SET_SRC (x), 2) == pc_rtx
1117 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1118 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1119 return 1;
1120 if (XEXP (SET_SRC (x), 1) == pc_rtx
1121 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1122 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1123 return 1;
1124 return 0;
1127 /* Return set of PC, otherwise NULL. */
1130 pc_set (insn)
1131 rtx insn;
1133 rtx pat;
1134 if (GET_CODE (insn) != JUMP_INSN)
1135 return NULL_RTX;
1136 pat = PATTERN (insn);
1138 /* The set is allowed to appear either as the insn pattern or
1139 the first set in a PARALLEL. */
1140 if (GET_CODE (pat) == PARALLEL)
1141 pat = XVECEXP (pat, 0, 0);
1142 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1143 return pat;
1145 return NULL_RTX;
1148 /* Return true when insn is an unconditional direct jump,
1149 possibly bundled inside a PARALLEL. */
1152 any_uncondjump_p (insn)
1153 rtx insn;
1155 rtx x = pc_set (insn);
1156 if (!x)
1157 return 0;
1158 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1159 return 0;
1160 return 1;
1163 /* Return true when insn is a conditional jump. This function works for
1164 instructions containing PC sets in PARALLELs. The instruction may have
1165 various other effects so before removing the jump you must verify
1166 onlyjump_p.
1168 Note that unlike condjump_p it returns false for unconditional jumps. */
1171 any_condjump_p (insn)
1172 rtx insn;
1174 rtx x = pc_set (insn);
1175 enum rtx_code a, b;
1177 if (!x)
1178 return 0;
1179 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1180 return 0;
1182 a = GET_CODE (XEXP (SET_SRC (x), 1));
1183 b = GET_CODE (XEXP (SET_SRC (x), 2));
1185 return ((b == PC && (a == LABEL_REF || a == RETURN))
1186 || (a == PC && (b == LABEL_REF || b == RETURN)));
1189 /* Return the label of a conditional jump. */
1192 condjump_label (insn)
1193 rtx insn;
1195 rtx x = pc_set (insn);
1197 if (!x)
1198 return NULL_RTX;
1199 x = SET_SRC (x);
1200 if (GET_CODE (x) == LABEL_REF)
1201 return x;
1202 if (GET_CODE (x) != IF_THEN_ELSE)
1203 return NULL_RTX;
1204 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1205 return XEXP (x, 1);
1206 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1207 return XEXP (x, 2);
1208 return NULL_RTX;
1211 /* Return true if INSN is a (possibly conditional) return insn. */
1213 static int
1214 returnjump_p_1 (loc, data)
1215 rtx *loc;
1216 void *data ATTRIBUTE_UNUSED;
1218 rtx x = *loc;
1220 return x && (GET_CODE (x) == RETURN
1221 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1225 returnjump_p (insn)
1226 rtx insn;
1228 if (GET_CODE (insn) != JUMP_INSN)
1229 return 0;
1230 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1233 /* Return true if INSN is a jump that only transfers control and
1234 nothing more. */
1237 onlyjump_p (insn)
1238 rtx insn;
1240 rtx set;
1242 if (GET_CODE (insn) != JUMP_INSN)
1243 return 0;
1245 set = single_set (insn);
1246 if (set == NULL)
1247 return 0;
1248 if (GET_CODE (SET_DEST (set)) != PC)
1249 return 0;
1250 if (side_effects_p (SET_SRC (set)))
1251 return 0;
1253 return 1;
1256 #ifdef HAVE_cc0
1258 /* Return non-zero if X is an RTX that only sets the condition codes
1259 and has no side effects. */
1262 only_sets_cc0_p (x)
1263 rtx x;
1266 if (! x)
1267 return 0;
1269 if (INSN_P (x))
1270 x = PATTERN (x);
1272 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1275 /* Return 1 if X is an RTX that does nothing but set the condition codes
1276 and CLOBBER or USE registers.
1277 Return -1 if X does explicitly set the condition codes,
1278 but also does other things. */
1281 sets_cc0_p (x)
1282 rtx x;
1285 if (! x)
1286 return 0;
1288 if (INSN_P (x))
1289 x = PATTERN (x);
1291 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1292 return 1;
1293 if (GET_CODE (x) == PARALLEL)
1295 int i;
1296 int sets_cc0 = 0;
1297 int other_things = 0;
1298 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1300 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1301 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1302 sets_cc0 = 1;
1303 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1304 other_things = 1;
1306 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1308 return 0;
1310 #endif
1312 /* Follow any unconditional jump at LABEL;
1313 return the ultimate label reached by any such chain of jumps.
1314 If LABEL is not followed by a jump, return LABEL.
1315 If the chain loops or we can't find end, return LABEL,
1316 since that tells caller to avoid changing the insn.
1318 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1319 a USE or CLOBBER. */
1322 follow_jumps (label)
1323 rtx label;
1325 rtx insn;
1326 rtx next;
1327 rtx value = label;
1328 int depth;
1330 for (depth = 0;
1331 (depth < 10
1332 && (insn = next_active_insn (value)) != 0
1333 && GET_CODE (insn) == JUMP_INSN
1334 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1335 && onlyjump_p (insn))
1336 || GET_CODE (PATTERN (insn)) == RETURN)
1337 && (next = NEXT_INSN (insn))
1338 && GET_CODE (next) == BARRIER);
1339 depth++)
1341 /* Don't chain through the insn that jumps into a loop
1342 from outside the loop,
1343 since that would create multiple loop entry jumps
1344 and prevent loop optimization. */
1345 rtx tem;
1346 if (!reload_completed)
1347 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1348 if (GET_CODE (tem) == NOTE
1349 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1350 /* ??? Optional. Disables some optimizations, but makes
1351 gcov output more accurate with -O. */
1352 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1353 return value;
1355 /* If we have found a cycle, make the insn jump to itself. */
1356 if (JUMP_LABEL (insn) == label)
1357 return label;
1359 tem = next_active_insn (JUMP_LABEL (insn));
1360 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1361 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1362 break;
1364 value = JUMP_LABEL (insn);
1366 if (depth == 10)
1367 return label;
1368 return value;
1372 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1373 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1374 in INSN, then store one of them in JUMP_LABEL (INSN).
1375 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1376 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1377 Also, when there are consecutive labels, canonicalize on the last of them.
1379 Note that two labels separated by a loop-beginning note
1380 must be kept distinct if we have not yet done loop-optimization,
1381 because the gap between them is where loop-optimize
1382 will want to move invariant code to. CROSS_JUMP tells us
1383 that loop-optimization is done with. */
1385 void
1386 mark_jump_label (x, insn, in_mem)
1387 rtx x;
1388 rtx insn;
1389 int in_mem;
1391 RTX_CODE code = GET_CODE (x);
1392 int i;
1393 const char *fmt;
1395 switch (code)
1397 case PC:
1398 case CC0:
1399 case REG:
1400 case SUBREG:
1401 case CONST_INT:
1402 case CONST_DOUBLE:
1403 case CLOBBER:
1404 case CALL:
1405 return;
1407 case MEM:
1408 in_mem = 1;
1409 break;
1411 case SYMBOL_REF:
1412 if (!in_mem)
1413 return;
1415 /* If this is a constant-pool reference, see if it is a label. */
1416 if (CONSTANT_POOL_ADDRESS_P (x))
1417 mark_jump_label (get_pool_constant (x), insn, in_mem);
1418 break;
1420 case LABEL_REF:
1422 rtx label = XEXP (x, 0);
1424 /* Ignore remaining references to unreachable labels that
1425 have been deleted. */
1426 if (GET_CODE (label) == NOTE
1427 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1428 break;
1430 if (GET_CODE (label) != CODE_LABEL)
1431 abort ();
1433 /* Ignore references to labels of containing functions. */
1434 if (LABEL_REF_NONLOCAL_P (x))
1435 break;
1437 XEXP (x, 0) = label;
1438 if (! insn || ! INSN_DELETED_P (insn))
1439 ++LABEL_NUSES (label);
1441 if (insn)
1443 if (GET_CODE (insn) == JUMP_INSN)
1444 JUMP_LABEL (insn) = label;
1445 else
1447 /* Add a REG_LABEL note for LABEL unless there already
1448 is one. All uses of a label, except for labels
1449 that are the targets of jumps, must have a
1450 REG_LABEL note. */
1451 if (! find_reg_note (insn, REG_LABEL, label))
1452 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1453 REG_NOTES (insn));
1456 return;
1459 /* Do walk the labels in a vector, but not the first operand of an
1460 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1461 case ADDR_VEC:
1462 case ADDR_DIFF_VEC:
1463 if (! INSN_DELETED_P (insn))
1465 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1467 for (i = 0; i < XVECLEN (x, eltnum); i++)
1468 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1470 return;
1472 default:
1473 break;
1476 fmt = GET_RTX_FORMAT (code);
1477 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1479 if (fmt[i] == 'e')
1480 mark_jump_label (XEXP (x, i), insn, in_mem);
1481 else if (fmt[i] == 'E')
1483 int j;
1484 for (j = 0; j < XVECLEN (x, i); j++)
1485 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1490 /* If all INSN does is set the pc, delete it,
1491 and delete the insn that set the condition codes for it
1492 if that's what the previous thing was. */
1494 void
1495 delete_jump (insn)
1496 rtx insn;
1498 rtx set = single_set (insn);
1500 if (set && GET_CODE (SET_DEST (set)) == PC)
1501 delete_computation (insn);
1504 /* Verify INSN is a BARRIER and delete it. */
1506 void
1507 delete_barrier (insn)
1508 rtx insn;
1510 if (GET_CODE (insn) != BARRIER)
1511 abort ();
1513 delete_insn (insn);
1516 /* Recursively delete prior insns that compute the value (used only by INSN
1517 which the caller is deleting) stored in the register mentioned by NOTE
1518 which is a REG_DEAD note associated with INSN. */
1520 static void
1521 delete_prior_computation (note, insn)
1522 rtx note;
1523 rtx insn;
1525 rtx our_prev;
1526 rtx reg = XEXP (note, 0);
1528 for (our_prev = prev_nonnote_insn (insn);
1529 our_prev && (GET_CODE (our_prev) == INSN
1530 || GET_CODE (our_prev) == CALL_INSN);
1531 our_prev = prev_nonnote_insn (our_prev))
1533 rtx pat = PATTERN (our_prev);
1535 /* If we reach a CALL which is not calling a const function
1536 or the callee pops the arguments, then give up. */
1537 if (GET_CODE (our_prev) == CALL_INSN
1538 && (! CONST_OR_PURE_CALL_P (our_prev)
1539 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1540 break;
1542 /* If we reach a SEQUENCE, it is too complex to try to
1543 do anything with it, so give up. We can be run during
1544 and after reorg, so SEQUENCE rtl can legitimately show
1545 up here. */
1546 if (GET_CODE (pat) == SEQUENCE)
1547 break;
1549 if (GET_CODE (pat) == USE
1550 && GET_CODE (XEXP (pat, 0)) == INSN)
1551 /* reorg creates USEs that look like this. We leave them
1552 alone because reorg needs them for its own purposes. */
1553 break;
1555 if (reg_set_p (reg, pat))
1557 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1558 break;
1560 if (GET_CODE (pat) == PARALLEL)
1562 /* If we find a SET of something else, we can't
1563 delete the insn. */
1565 int i;
1567 for (i = 0; i < XVECLEN (pat, 0); i++)
1569 rtx part = XVECEXP (pat, 0, i);
1571 if (GET_CODE (part) == SET
1572 && SET_DEST (part) != reg)
1573 break;
1576 if (i == XVECLEN (pat, 0))
1577 delete_computation (our_prev);
1579 else if (GET_CODE (pat) == SET
1580 && GET_CODE (SET_DEST (pat)) == REG)
1582 int dest_regno = REGNO (SET_DEST (pat));
1583 int dest_endregno
1584 = (dest_regno
1585 + (dest_regno < FIRST_PSEUDO_REGISTER
1586 ? HARD_REGNO_NREGS (dest_regno,
1587 GET_MODE (SET_DEST (pat))) : 1));
1588 int regno = REGNO (reg);
1589 int endregno
1590 = (regno
1591 + (regno < FIRST_PSEUDO_REGISTER
1592 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1594 if (dest_regno >= regno
1595 && dest_endregno <= endregno)
1596 delete_computation (our_prev);
1598 /* We may have a multi-word hard register and some, but not
1599 all, of the words of the register are needed in subsequent
1600 insns. Write REG_UNUSED notes for those parts that were not
1601 needed. */
1602 else if (dest_regno <= regno
1603 && dest_endregno >= endregno)
1605 int i;
1607 REG_NOTES (our_prev)
1608 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1609 REG_NOTES (our_prev));
1611 for (i = dest_regno; i < dest_endregno; i++)
1612 if (! find_regno_note (our_prev, REG_UNUSED, i))
1613 break;
1615 if (i == dest_endregno)
1616 delete_computation (our_prev);
1620 break;
1623 /* If PAT references the register that dies here, it is an
1624 additional use. Hence any prior SET isn't dead. However, this
1625 insn becomes the new place for the REG_DEAD note. */
1626 if (reg_overlap_mentioned_p (reg, pat))
1628 XEXP (note, 1) = REG_NOTES (our_prev);
1629 REG_NOTES (our_prev) = note;
1630 break;
1635 /* Delete INSN and recursively delete insns that compute values used only
1636 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1637 If we are running before flow.c, we need do nothing since flow.c will
1638 delete dead code. We also can't know if the registers being used are
1639 dead or not at this point.
1641 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1642 nothing other than set a register that dies in this insn, we can delete
1643 that insn as well.
1645 On machines with CC0, if CC0 is used in this insn, we may be able to
1646 delete the insn that set it. */
1648 static void
1649 delete_computation (insn)
1650 rtx insn;
1652 rtx note, next;
1654 #ifdef HAVE_cc0
1655 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1657 rtx prev = prev_nonnote_insn (insn);
1658 /* We assume that at this stage
1659 CC's are always set explicitly
1660 and always immediately before the jump that
1661 will use them. So if the previous insn
1662 exists to set the CC's, delete it
1663 (unless it performs auto-increments, etc.). */
1664 if (prev && GET_CODE (prev) == INSN
1665 && sets_cc0_p (PATTERN (prev)))
1667 if (sets_cc0_p (PATTERN (prev)) > 0
1668 && ! side_effects_p (PATTERN (prev)))
1669 delete_computation (prev);
1670 else
1671 /* Otherwise, show that cc0 won't be used. */
1672 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1673 cc0_rtx, REG_NOTES (prev));
1676 #endif
1678 for (note = REG_NOTES (insn); note; note = next)
1680 next = XEXP (note, 1);
1682 if (REG_NOTE_KIND (note) != REG_DEAD
1683 /* Verify that the REG_NOTE is legitimate. */
1684 || GET_CODE (XEXP (note, 0)) != REG)
1685 continue;
1687 delete_prior_computation (note, insn);
1690 delete_related_insns (insn);
1693 /* Delete insn INSN from the chain of insns and update label ref counts
1694 and delete insns now unreachable.
1696 Returns the first insn after INSN that was not deleted.
1698 Usage of this instruction is deprecated. Use delete_insn instead and
1699 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1702 delete_related_insns (insn)
1703 rtx insn;
1705 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1706 rtx note;
1707 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1709 while (next && INSN_DELETED_P (next))
1710 next = NEXT_INSN (next);
1712 /* This insn is already deleted => return first following nondeleted. */
1713 if (INSN_DELETED_P (insn))
1714 return next;
1716 delete_insn (insn);
1718 /* If instruction is followed by a barrier,
1719 delete the barrier too. */
1721 if (next != 0 && GET_CODE (next) == BARRIER)
1722 delete_insn (next);
1724 /* If deleting a jump, decrement the count of the label,
1725 and delete the label if it is now unused. */
1727 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1729 rtx lab = JUMP_LABEL (insn), lab_next;
1731 if (LABEL_NUSES (lab) == 0)
1733 /* This can delete NEXT or PREV,
1734 either directly if NEXT is JUMP_LABEL (INSN),
1735 or indirectly through more levels of jumps. */
1736 delete_related_insns (lab);
1738 /* I feel a little doubtful about this loop,
1739 but I see no clean and sure alternative way
1740 to find the first insn after INSN that is not now deleted.
1741 I hope this works. */
1742 while (next && INSN_DELETED_P (next))
1743 next = NEXT_INSN (next);
1744 return next;
1746 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1747 && GET_CODE (lab_next) == JUMP_INSN
1748 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1749 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1751 /* If we're deleting the tablejump, delete the dispatch table.
1752 We may not be able to kill the label immediately preceding
1753 just yet, as it might be referenced in code leading up to
1754 the tablejump. */
1755 delete_related_insns (lab_next);
1759 /* Likewise if we're deleting a dispatch table. */
1761 if (GET_CODE (insn) == JUMP_INSN
1762 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1763 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1765 rtx pat = PATTERN (insn);
1766 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1767 int len = XVECLEN (pat, diff_vec_p);
1769 for (i = 0; i < len; i++)
1770 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1771 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1772 while (next && INSN_DELETED_P (next))
1773 next = NEXT_INSN (next);
1774 return next;
1777 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1778 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1779 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1780 if (REG_NOTE_KIND (note) == REG_LABEL
1781 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1782 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1783 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1784 delete_related_insns (XEXP (note, 0));
1786 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1787 prev = PREV_INSN (prev);
1789 /* If INSN was a label and a dispatch table follows it,
1790 delete the dispatch table. The tablejump must have gone already.
1791 It isn't useful to fall through into a table. */
1793 if (was_code_label
1794 && NEXT_INSN (insn) != 0
1795 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1796 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1797 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1798 next = delete_related_insns (NEXT_INSN (insn));
1800 /* If INSN was a label, delete insns following it if now unreachable. */
1802 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1804 RTX_CODE code;
1805 while (next != 0
1806 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1807 || code == NOTE || code == BARRIER
1808 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1810 if (code == NOTE
1811 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1812 next = NEXT_INSN (next);
1813 /* Keep going past other deleted labels to delete what follows. */
1814 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1815 next = NEXT_INSN (next);
1816 else
1817 /* Note: if this deletes a jump, it can cause more
1818 deletion of unreachable code, after a different label.
1819 As long as the value from this recursive call is correct,
1820 this invocation functions correctly. */
1821 next = delete_related_insns (next);
1825 return next;
1828 /* Advance from INSN till reaching something not deleted
1829 then return that. May return INSN itself. */
1832 next_nondeleted_insn (insn)
1833 rtx insn;
1835 while (INSN_DELETED_P (insn))
1836 insn = NEXT_INSN (insn);
1837 return insn;
1840 /* Delete a range of insns from FROM to TO, inclusive.
1841 This is for the sake of peephole optimization, so assume
1842 that whatever these insns do will still be done by a new
1843 peephole insn that will replace them. */
1845 void
1846 delete_for_peephole (from, to)
1847 rtx from, to;
1849 rtx insn = from;
1851 while (1)
1853 rtx next = NEXT_INSN (insn);
1854 rtx prev = PREV_INSN (insn);
1856 if (GET_CODE (insn) != NOTE)
1858 INSN_DELETED_P (insn) = 1;
1860 /* Patch this insn out of the chain. */
1861 /* We don't do this all at once, because we
1862 must preserve all NOTEs. */
1863 if (prev)
1864 NEXT_INSN (prev) = next;
1866 if (next)
1867 PREV_INSN (next) = prev;
1870 if (insn == to)
1871 break;
1872 insn = next;
1875 /* Note that if TO is an unconditional jump
1876 we *do not* delete the BARRIER that follows,
1877 since the peephole that replaces this sequence
1878 is also an unconditional jump in that case. */
1881 /* We have determined that INSN is never reached, and are about to
1882 delete it. Print a warning if the user asked for one.
1884 To try to make this warning more useful, this should only be called
1885 once per basic block not reached, and it only warns when the basic
1886 block contains more than one line from the current function, and
1887 contains at least one operation. CSE and inlining can duplicate insns,
1888 so it's possible to get spurious warnings from this. */
1890 void
1891 never_reached_warning (avoided_insn, finish)
1892 rtx avoided_insn, finish;
1894 rtx insn;
1895 rtx a_line_note = NULL;
1896 int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
1898 if (! warn_notreached)
1899 return;
1901 /* Scan forwards, looking at LINE_NUMBER notes, until
1902 we hit a LABEL or we run out of insns. */
1904 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1906 if (finish == NULL && GET_CODE (insn) == CODE_LABEL)
1907 break;
1909 if (GET_CODE (insn) == NOTE /* A line number note? */
1910 && NOTE_LINE_NUMBER (insn) >= 0)
1912 if (a_line_note == NULL)
1913 a_line_note = insn;
1914 else
1915 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1916 != NOTE_LINE_NUMBER (insn));
1918 else if (INSN_P (insn))
1920 if (reached_end)
1921 break;
1922 contains_insn = 1;
1925 if (insn == finish)
1926 reached_end = 1;
1928 if (two_avoided_lines && contains_insn)
1929 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1930 NOTE_LINE_NUMBER (a_line_note),
1931 "will never be executed");
1934 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1935 NLABEL as a return. Accrue modifications into the change group. */
1937 static void
1938 redirect_exp_1 (loc, olabel, nlabel, insn)
1939 rtx *loc;
1940 rtx olabel, nlabel;
1941 rtx insn;
1943 rtx x = *loc;
1944 RTX_CODE code = GET_CODE (x);
1945 int i;
1946 const char *fmt;
1948 if (code == LABEL_REF)
1950 if (XEXP (x, 0) == olabel)
1952 rtx n;
1953 if (nlabel)
1954 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1955 else
1956 n = gen_rtx_RETURN (VOIDmode);
1958 validate_change (insn, loc, n, 1);
1959 return;
1962 else if (code == RETURN && olabel == 0)
1964 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1965 if (loc == &PATTERN (insn))
1966 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1967 validate_change (insn, loc, x, 1);
1968 return;
1971 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1972 && GET_CODE (SET_SRC (x)) == LABEL_REF
1973 && XEXP (SET_SRC (x), 0) == olabel)
1975 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1976 return;
1979 fmt = GET_RTX_FORMAT (code);
1980 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1982 if (fmt[i] == 'e')
1983 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1984 else if (fmt[i] == 'E')
1986 int j;
1987 for (j = 0; j < XVECLEN (x, i); j++)
1988 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1993 /* Similar, but apply the change group and report success or failure. */
1995 static int
1996 redirect_exp (olabel, nlabel, insn)
1997 rtx olabel, nlabel;
1998 rtx insn;
2000 rtx *loc;
2002 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2003 loc = &XVECEXP (PATTERN (insn), 0, 0);
2004 else
2005 loc = &PATTERN (insn);
2007 redirect_exp_1 (loc, olabel, nlabel, insn);
2008 if (num_validated_changes () == 0)
2009 return 0;
2011 return apply_change_group ();
2014 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2015 the modifications into the change group. Return false if we did
2016 not see how to do that. */
2019 redirect_jump_1 (jump, nlabel)
2020 rtx jump, nlabel;
2022 int ochanges = num_validated_changes ();
2023 rtx *loc;
2025 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2026 loc = &XVECEXP (PATTERN (jump), 0, 0);
2027 else
2028 loc = &PATTERN (jump);
2030 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2031 return num_validated_changes () > ochanges;
2034 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2035 jump target label is unused as a result, it and the code following
2036 it may be deleted.
2038 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2039 RETURN insn.
2041 The return value will be 1 if the change was made, 0 if it wasn't
2042 (this can only occur for NLABEL == 0). */
2045 redirect_jump (jump, nlabel, delete_unused)
2046 rtx jump, nlabel;
2047 int delete_unused;
2049 rtx olabel = JUMP_LABEL (jump);
2051 if (nlabel == olabel)
2052 return 1;
2054 if (! redirect_exp (olabel, nlabel, jump))
2055 return 0;
2057 JUMP_LABEL (jump) = nlabel;
2058 if (nlabel)
2059 ++LABEL_NUSES (nlabel);
2061 /* If we're eliding the jump over exception cleanups at the end of a
2062 function, move the function end note so that -Wreturn-type works. */
2063 if (olabel && nlabel
2064 && NEXT_INSN (olabel)
2065 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2066 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2067 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2069 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
2070 /* Undefined labels will remain outside the insn stream. */
2071 && INSN_UID (olabel))
2072 delete_related_insns (olabel);
2074 return 1;
2077 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2078 Accrue the modifications into the change group. */
2080 static void
2081 invert_exp_1 (insn)
2082 rtx insn;
2084 RTX_CODE code;
2085 rtx x = pc_set (insn);
2087 if (!x)
2088 abort ();
2089 x = SET_SRC (x);
2091 code = GET_CODE (x);
2093 if (code == IF_THEN_ELSE)
2095 rtx comp = XEXP (x, 0);
2096 rtx tem;
2097 enum rtx_code reversed_code;
2099 /* We can do this in two ways: The preferable way, which can only
2100 be done if this is not an integer comparison, is to reverse
2101 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2102 of the IF_THEN_ELSE. If we can't do either, fail. */
2104 reversed_code = reversed_comparison_code (comp, insn);
2106 if (reversed_code != UNKNOWN)
2108 validate_change (insn, &XEXP (x, 0),
2109 gen_rtx_fmt_ee (reversed_code,
2110 GET_MODE (comp), XEXP (comp, 0),
2111 XEXP (comp, 1)),
2113 return;
2116 tem = XEXP (x, 1);
2117 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2118 validate_change (insn, &XEXP (x, 2), tem, 1);
2120 else
2121 abort ();
2124 /* Invert the jump condition of conditional jump insn, INSN.
2126 Return 1 if we can do so, 0 if we cannot find a way to do so that
2127 matches a pattern. */
2129 static int
2130 invert_exp (insn)
2131 rtx insn;
2133 invert_exp_1 (insn);
2134 if (num_validated_changes () == 0)
2135 return 0;
2137 return apply_change_group ();
2140 /* Invert the condition of the jump JUMP, and make it jump to label
2141 NLABEL instead of where it jumps now. Accrue changes into the
2142 change group. Return false if we didn't see how to perform the
2143 inversion and redirection. */
2146 invert_jump_1 (jump, nlabel)
2147 rtx jump, nlabel;
2149 int ochanges;
2151 ochanges = num_validated_changes ();
2152 invert_exp_1 (jump);
2153 if (num_validated_changes () == ochanges)
2154 return 0;
2156 return redirect_jump_1 (jump, nlabel);
2159 /* Invert the condition of the jump JUMP, and make it jump to label
2160 NLABEL instead of where it jumps now. Return true if successful. */
2163 invert_jump (jump, nlabel, delete_unused)
2164 rtx jump, nlabel;
2165 int delete_unused;
2167 /* We have to either invert the condition and change the label or
2168 do neither. Either operation could fail. We first try to invert
2169 the jump. If that succeeds, we try changing the label. If that fails,
2170 we invert the jump back to what it was. */
2172 if (! invert_exp (jump))
2173 return 0;
2175 if (redirect_jump (jump, nlabel, delete_unused))
2177 invert_br_probabilities (jump);
2179 return 1;
2182 if (! invert_exp (jump))
2183 /* This should just be putting it back the way it was. */
2184 abort ();
2186 return 0;
2190 /* Like rtx_equal_p except that it considers two REGs as equal
2191 if they renumber to the same value and considers two commutative
2192 operations to be the same if the order of the operands has been
2193 reversed.
2195 ??? Addition is not commutative on the PA due to the weird implicit
2196 space register selection rules for memory addresses. Therefore, we
2197 don't consider a + b == b + a.
2199 We could/should make this test a little tighter. Possibly only
2200 disabling it on the PA via some backend macro or only disabling this
2201 case when the PLUS is inside a MEM. */
2204 rtx_renumbered_equal_p (x, y)
2205 rtx x, y;
2207 int i;
2208 RTX_CODE code = GET_CODE (x);
2209 const char *fmt;
2211 if (x == y)
2212 return 1;
2214 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2215 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2216 && GET_CODE (SUBREG_REG (y)) == REG)))
2218 int reg_x = -1, reg_y = -1;
2219 int byte_x = 0, byte_y = 0;
2221 if (GET_MODE (x) != GET_MODE (y))
2222 return 0;
2224 /* If we haven't done any renumbering, don't
2225 make any assumptions. */
2226 if (reg_renumber == 0)
2227 return rtx_equal_p (x, y);
2229 if (code == SUBREG)
2231 reg_x = REGNO (SUBREG_REG (x));
2232 byte_x = SUBREG_BYTE (x);
2234 if (reg_renumber[reg_x] >= 0)
2236 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2237 GET_MODE (SUBREG_REG (x)),
2238 byte_x,
2239 GET_MODE (x));
2240 byte_x = 0;
2243 else
2245 reg_x = REGNO (x);
2246 if (reg_renumber[reg_x] >= 0)
2247 reg_x = reg_renumber[reg_x];
2250 if (GET_CODE (y) == SUBREG)
2252 reg_y = REGNO (SUBREG_REG (y));
2253 byte_y = SUBREG_BYTE (y);
2255 if (reg_renumber[reg_y] >= 0)
2257 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2258 GET_MODE (SUBREG_REG (y)),
2259 byte_y,
2260 GET_MODE (y));
2261 byte_y = 0;
2264 else
2266 reg_y = REGNO (y);
2267 if (reg_renumber[reg_y] >= 0)
2268 reg_y = reg_renumber[reg_y];
2271 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2274 /* Now we have disposed of all the cases
2275 in which different rtx codes can match. */
2276 if (code != GET_CODE (y))
2277 return 0;
2279 switch (code)
2281 case PC:
2282 case CC0:
2283 case ADDR_VEC:
2284 case ADDR_DIFF_VEC:
2285 return 0;
2287 case CONST_INT:
2288 return INTVAL (x) == INTVAL (y);
2290 case LABEL_REF:
2291 /* We can't assume nonlocal labels have their following insns yet. */
2292 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2293 return XEXP (x, 0) == XEXP (y, 0);
2295 /* Two label-refs are equivalent if they point at labels
2296 in the same position in the instruction stream. */
2297 return (next_real_insn (XEXP (x, 0))
2298 == next_real_insn (XEXP (y, 0)));
2300 case SYMBOL_REF:
2301 return XSTR (x, 0) == XSTR (y, 0);
2303 case CODE_LABEL:
2304 /* If we didn't match EQ equality above, they aren't the same. */
2305 return 0;
2307 default:
2308 break;
2311 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2313 if (GET_MODE (x) != GET_MODE (y))
2314 return 0;
2316 /* For commutative operations, the RTX match if the operand match in any
2317 order. Also handle the simple binary and unary cases without a loop.
2319 ??? Don't consider PLUS a commutative operator; see comments above. */
2320 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2321 && code != PLUS)
2322 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2323 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2324 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2325 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2326 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2327 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2328 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2329 else if (GET_RTX_CLASS (code) == '1')
2330 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2332 /* Compare the elements. If any pair of corresponding elements
2333 fail to match, return 0 for the whole things. */
2335 fmt = GET_RTX_FORMAT (code);
2336 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2338 int j;
2339 switch (fmt[i])
2341 case 'w':
2342 if (XWINT (x, i) != XWINT (y, i))
2343 return 0;
2344 break;
2346 case 'i':
2347 if (XINT (x, i) != XINT (y, i))
2348 return 0;
2349 break;
2351 case 't':
2352 if (XTREE (x, i) != XTREE (y, i))
2353 return 0;
2354 break;
2356 case 's':
2357 if (strcmp (XSTR (x, i), XSTR (y, i)))
2358 return 0;
2359 break;
2361 case 'e':
2362 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2363 return 0;
2364 break;
2366 case 'u':
2367 if (XEXP (x, i) != XEXP (y, i))
2368 return 0;
2369 /* fall through. */
2370 case '0':
2371 break;
2373 case 'E':
2374 if (XVECLEN (x, i) != XVECLEN (y, i))
2375 return 0;
2376 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2377 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2378 return 0;
2379 break;
2381 default:
2382 abort ();
2385 return 1;
2388 /* If X is a hard register or equivalent to one or a subregister of one,
2389 return the hard register number. If X is a pseudo register that was not
2390 assigned a hard register, return the pseudo register number. Otherwise,
2391 return -1. Any rtx is valid for X. */
2394 true_regnum (x)
2395 rtx x;
2397 if (GET_CODE (x) == REG)
2399 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2400 return reg_renumber[REGNO (x)];
2401 return REGNO (x);
2403 if (GET_CODE (x) == SUBREG)
2405 int base = true_regnum (SUBREG_REG (x));
2406 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2407 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2408 GET_MODE (SUBREG_REG (x)),
2409 SUBREG_BYTE (x), GET_MODE (x));
2411 return -1;