Update copyright.
[official-gcc.git] / gcc / jump.c
blob15c9d36618f53dc09cb30a7d752d0425208d03c6
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically set of utility function to
24 operate with jumps.
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
34 The subroutines delete_insn, redirect_jump, and invert_jump are used
35 from other passes as well. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tm_p.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "regs.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
48 #include "recog.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "real.h"
52 #include "except.h"
53 #include "toplev.h"
54 #include "reload.h"
55 #include "predict.h"
56 #include "timevar.h"
58 /* Optimize jump y; x: ... y: jumpif... x?
59 Don't know if it is worth bothering with. */
60 /* Optimize two cases of conditional jump to conditional jump?
61 This can never delete any instruction or make anything dead,
62 or even change what is live at any point.
63 So perhaps let combiner do it. */
65 static rtx next_nonnote_insn_in_loop PARAMS ((rtx));
66 static void init_label_info PARAMS ((rtx));
67 static void mark_all_labels PARAMS ((rtx));
68 static int duplicate_loop_exit_test PARAMS ((rtx));
69 static void delete_computation PARAMS ((rtx));
70 static void redirect_exp_1 PARAMS ((rtx *, rtx, rtx, rtx));
71 static int redirect_exp PARAMS ((rtx, rtx, rtx));
72 static void invert_exp_1 PARAMS ((rtx));
73 static int invert_exp PARAMS ((rtx));
74 static int returnjump_p_1 PARAMS ((rtx *, void *));
75 static void delete_prior_computation PARAMS ((rtx, rtx));
77 /* Alternate entry into the jump optimizer. This entry point only rebuilds
78 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
79 instructions. */
80 void
81 rebuild_jump_labels (f)
82 rtx f;
84 rtx insn;
86 timevar_push (TV_REBUILD_JUMP);
87 init_label_info (f);
88 mark_all_labels (f);
90 /* Keep track of labels used from static data; we don't track them
91 closely enough to delete them here, so make sure their reference
92 count doesn't drop to zero. */
94 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
95 if (GET_CODE (XEXP (insn, 0)) == CODE_LABEL)
96 LABEL_NUSES (XEXP (insn, 0))++;
97 timevar_pop (TV_REBUILD_JUMP);
100 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
101 non-fallthru insn. This is not generally true, as multiple barriers
102 may have crept in, or the BARRIER may be separated from the last
103 real insn by one or more NOTEs.
105 This simple pass moves barriers and removes duplicates so that the
106 old code is happy.
108 void
109 cleanup_barriers ()
111 rtx insn, next, prev;
112 for (insn = get_insns (); insn; insn = next)
114 next = NEXT_INSN (insn);
115 if (GET_CODE (insn) == BARRIER)
117 prev = prev_nonnote_insn (insn);
118 if (GET_CODE (prev) == BARRIER)
119 delete_barrier (insn);
120 else if (prev != PREV_INSN (insn))
121 reorder_insns (insn, insn, prev);
126 /* Return the next insn after INSN that is not a NOTE and is in the loop,
127 i.e. when there is no such INSN before NOTE_INSN_LOOP_END return NULL_RTX.
128 This routine does not look inside SEQUENCEs. */
130 static rtx
131 next_nonnote_insn_in_loop (insn)
132 rtx insn;
134 while (insn)
136 insn = NEXT_INSN (insn);
137 if (insn == 0 || GET_CODE (insn) != NOTE)
138 break;
139 if (GET_CODE (insn) == NOTE
140 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END)
141 return NULL_RTX;
144 return insn;
147 void
148 copy_loop_headers (f)
149 rtx f;
151 rtx insn, next;
152 /* Now iterate optimizing jumps until nothing changes over one pass. */
153 for (insn = f; insn; insn = next)
155 rtx temp, temp1;
157 next = NEXT_INSN (insn);
159 /* See if this is a NOTE_INSN_LOOP_BEG followed by an unconditional
160 jump. Try to optimize by duplicating the loop exit test if so.
161 This is only safe immediately after regscan, because it uses
162 the values of regno_first_uid and regno_last_uid. */
163 if (GET_CODE (insn) == NOTE
164 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
165 && (temp1 = next_nonnote_insn_in_loop (insn)) != 0
166 && any_uncondjump_p (temp1) && onlyjump_p (temp1))
168 temp = PREV_INSN (insn);
169 if (duplicate_loop_exit_test (insn))
171 next = NEXT_INSN (temp);
177 void
178 purge_line_number_notes (f)
179 rtx f;
181 rtx last_note = 0;
182 rtx insn;
183 /* Delete extraneous line number notes.
184 Note that two consecutive notes for different lines are not really
185 extraneous. There should be some indication where that line belonged,
186 even if it became empty. */
188 for (insn = f; insn; insn = NEXT_INSN (insn))
189 if (GET_CODE (insn) == NOTE)
191 if (NOTE_LINE_NUMBER (insn) == NOTE_INSN_FUNCTION_BEG)
192 /* Any previous line note was for the prologue; gdb wants a new
193 note after the prologue even if it is for the same line. */
194 last_note = NULL_RTX;
195 else if (NOTE_LINE_NUMBER (insn) >= 0)
197 /* Delete this note if it is identical to previous note. */
198 if (last_note
199 && NOTE_SOURCE_FILE (insn) == NOTE_SOURCE_FILE (last_note)
200 && NOTE_LINE_NUMBER (insn) == NOTE_LINE_NUMBER (last_note))
202 delete_related_insns (insn);
203 continue;
206 last_note = insn;
211 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
212 notes whose labels don't occur in the insn any more. Returns the
213 largest INSN_UID found. */
214 static void
215 init_label_info (f)
216 rtx f;
218 rtx insn;
220 for (insn = f; insn; insn = NEXT_INSN (insn))
221 if (GET_CODE (insn) == CODE_LABEL)
222 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
223 else if (GET_CODE (insn) == JUMP_INSN)
224 JUMP_LABEL (insn) = 0;
225 else if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
227 rtx note, next;
229 for (note = REG_NOTES (insn); note; note = next)
231 next = XEXP (note, 1);
232 if (REG_NOTE_KIND (note) == REG_LABEL
233 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
234 remove_note (insn, note);
239 /* Mark the label each jump jumps to.
240 Combine consecutive labels, and count uses of labels. */
242 static void
243 mark_all_labels (f)
244 rtx f;
246 rtx insn;
248 for (insn = f; insn; insn = NEXT_INSN (insn))
249 if (INSN_P (insn))
251 if (GET_CODE (insn) == CALL_INSN
252 && GET_CODE (PATTERN (insn)) == CALL_PLACEHOLDER)
254 mark_all_labels (XEXP (PATTERN (insn), 0));
255 mark_all_labels (XEXP (PATTERN (insn), 1));
256 mark_all_labels (XEXP (PATTERN (insn), 2));
258 /* Canonicalize the tail recursion label attached to the
259 CALL_PLACEHOLDER insn. */
260 if (XEXP (PATTERN (insn), 3))
262 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
263 XEXP (PATTERN (insn), 3));
264 mark_jump_label (label_ref, insn, 0);
265 XEXP (PATTERN (insn), 3) = XEXP (label_ref, 0);
268 continue;
271 mark_jump_label (PATTERN (insn), insn, 0);
272 if (! INSN_DELETED_P (insn) && GET_CODE (insn) == JUMP_INSN)
274 /* When we know the LABEL_REF contained in a REG used in
275 an indirect jump, we'll have a REG_LABEL note so that
276 flow can tell where it's going. */
277 if (JUMP_LABEL (insn) == 0)
279 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
280 if (label_note)
282 /* But a LABEL_REF around the REG_LABEL note, so
283 that we can canonicalize it. */
284 rtx label_ref = gen_rtx_LABEL_REF (VOIDmode,
285 XEXP (label_note, 0));
287 mark_jump_label (label_ref, insn, 0);
288 XEXP (label_note, 0) = XEXP (label_ref, 0);
289 JUMP_LABEL (insn) = XEXP (label_note, 0);
296 /* LOOP_START is a NOTE_INSN_LOOP_BEG note that is followed by an unconditional
297 jump. Assume that this unconditional jump is to the exit test code. If
298 the code is sufficiently simple, make a copy of it before INSN,
299 followed by a jump to the exit of the loop. Then delete the unconditional
300 jump after INSN.
302 Return 1 if we made the change, else 0.
304 This is only safe immediately after a regscan pass because it uses the
305 values of regno_first_uid and regno_last_uid. */
307 static int
308 duplicate_loop_exit_test (loop_start)
309 rtx loop_start;
311 rtx insn, set, reg, p, link;
312 rtx copy = 0, first_copy = 0;
313 int num_insns = 0;
314 rtx exitcode
315 = NEXT_INSN (JUMP_LABEL (next_nonnote_insn_in_loop (loop_start)));
316 rtx lastexit;
317 int max_reg = max_reg_num ();
318 rtx *reg_map = 0;
319 rtx loop_pre_header_label;
321 /* Scan the exit code. We do not perform this optimization if any insn:
323 is a CALL_INSN
324 is a CODE_LABEL
325 has a REG_RETVAL or REG_LIBCALL note (hard to adjust)
326 is a NOTE_INSN_LOOP_BEG because this means we have a nested loop
328 We also do not do this if we find an insn with ASM_OPERANDS. While
329 this restriction should not be necessary, copying an insn with
330 ASM_OPERANDS can confuse asm_noperands in some cases.
332 Also, don't do this if the exit code is more than 20 insns. */
334 for (insn = exitcode;
335 insn
336 && ! (GET_CODE (insn) == NOTE
337 && NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END);
338 insn = NEXT_INSN (insn))
340 switch (GET_CODE (insn))
342 case CODE_LABEL:
343 case CALL_INSN:
344 return 0;
345 case NOTE:
347 if (optimize < 2
348 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
349 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END))
350 /* If we were to duplicate this code, we would not move
351 the BLOCK notes, and so debugging the moved code would
352 be difficult. Thus, we only move the code with -O2 or
353 higher. */
354 return 0;
356 break;
357 case JUMP_INSN:
358 case INSN:
359 /* The code below would grossly mishandle REG_WAS_0 notes,
360 so get rid of them here. */
361 while ((p = find_reg_note (insn, REG_WAS_0, NULL_RTX)) != 0)
362 remove_note (insn, p);
363 if (++num_insns > 20
364 || find_reg_note (insn, REG_RETVAL, NULL_RTX)
365 || find_reg_note (insn, REG_LIBCALL, NULL_RTX))
366 return 0;
367 break;
368 default:
369 break;
373 /* Unless INSN is zero, we can do the optimization. */
374 if (insn == 0)
375 return 0;
377 lastexit = insn;
379 /* See if any insn sets a register only used in the loop exit code and
380 not a user variable. If so, replace it with a new register. */
381 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
382 if (GET_CODE (insn) == INSN
383 && (set = single_set (insn)) != 0
384 && ((reg = SET_DEST (set), GET_CODE (reg) == REG)
385 || (GET_CODE (reg) == SUBREG
386 && (reg = SUBREG_REG (reg), GET_CODE (reg) == REG)))
387 && REGNO (reg) >= FIRST_PSEUDO_REGISTER
388 && REGNO_FIRST_UID (REGNO (reg)) == INSN_UID (insn))
390 for (p = NEXT_INSN (insn); p != lastexit; p = NEXT_INSN (p))
391 if (REGNO_LAST_UID (REGNO (reg)) == INSN_UID (p))
392 break;
394 if (p != lastexit)
396 /* We can do the replacement. Allocate reg_map if this is the
397 first replacement we found. */
398 if (reg_map == 0)
399 reg_map = (rtx *) xcalloc (max_reg, sizeof (rtx));
401 REG_LOOP_TEST_P (reg) = 1;
403 reg_map[REGNO (reg)] = gen_reg_rtx (GET_MODE (reg));
406 loop_pre_header_label = gen_label_rtx ();
408 /* Now copy each insn. */
409 for (insn = exitcode; insn != lastexit; insn = NEXT_INSN (insn))
411 switch (GET_CODE (insn))
413 case BARRIER:
414 copy = emit_barrier_before (loop_start);
415 break;
416 case NOTE:
417 /* Only copy line-number notes. */
418 if (NOTE_LINE_NUMBER (insn) >= 0)
420 copy = emit_note_before (NOTE_LINE_NUMBER (insn), loop_start);
421 NOTE_SOURCE_FILE (copy) = NOTE_SOURCE_FILE (insn);
423 break;
425 case INSN:
426 copy = emit_insn_before (copy_insn (PATTERN (insn)), loop_start);
427 if (reg_map)
428 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
430 mark_jump_label (PATTERN (copy), copy, 0);
431 INSN_SCOPE (copy) = INSN_SCOPE (insn);
433 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
434 make them. */
435 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
436 if (REG_NOTE_KIND (link) != REG_LABEL)
438 if (GET_CODE (link) == EXPR_LIST)
439 REG_NOTES (copy)
440 = copy_insn_1 (gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
441 XEXP (link, 0),
442 REG_NOTES (copy)));
443 else
444 REG_NOTES (copy)
445 = copy_insn_1 (gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
446 XEXP (link, 0),
447 REG_NOTES (copy)));
450 if (reg_map && REG_NOTES (copy))
451 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
452 break;
454 case JUMP_INSN:
455 copy = emit_jump_insn_before (copy_insn (PATTERN (insn)),
456 loop_start);
457 INSN_SCOPE (copy) = INSN_SCOPE (insn);
458 if (reg_map)
459 replace_regs (PATTERN (copy), reg_map, max_reg, 1);
460 mark_jump_label (PATTERN (copy), copy, 0);
461 if (REG_NOTES (insn))
463 REG_NOTES (copy) = copy_insn_1 (REG_NOTES (insn));
464 if (reg_map)
465 replace_regs (REG_NOTES (copy), reg_map, max_reg, 1);
468 /* Predict conditional jump that do make loop looping as taken.
469 Other jumps are probably exit conditions, so predict
470 them as untaken. */
471 if (any_condjump_p (copy))
473 rtx label = JUMP_LABEL (copy);
474 if (label)
476 /* The jump_insn after loop_start should be followed
477 by barrier and loopback label. */
478 if (prev_nonnote_insn (label)
479 && (prev_nonnote_insn (prev_nonnote_insn (label))
480 == next_nonnote_insn (loop_start)))
482 predict_insn_def (copy, PRED_LOOP_HEADER, TAKEN);
483 /* To keep pre-header, we need to redirect all loop
484 entrances before the LOOP_BEG note. */
485 redirect_jump (copy, loop_pre_header_label, 0);
487 else
488 predict_insn_def (copy, PRED_LOOP_HEADER, NOT_TAKEN);
491 break;
493 default:
494 abort ();
497 /* Record the first insn we copied. We need it so that we can
498 scan the copied insns for new pseudo registers. */
499 if (! first_copy)
500 first_copy = copy;
503 /* Now clean up by emitting a jump to the end label and deleting the jump
504 at the start of the loop. */
505 if (! copy || GET_CODE (copy) != BARRIER)
507 copy = emit_jump_insn_before (gen_jump (get_label_after (insn)),
508 loop_start);
510 /* Record the first insn we copied. We need it so that we can
511 scan the copied insns for new pseudo registers. This may not
512 be strictly necessary since we should have copied at least one
513 insn above. But I am going to be safe. */
514 if (! first_copy)
515 first_copy = copy;
517 mark_jump_label (PATTERN (copy), copy, 0);
518 emit_barrier_before (loop_start);
521 emit_label_before (loop_pre_header_label, loop_start);
523 /* Now scan from the first insn we copied to the last insn we copied
524 (copy) for new pseudo registers. Do this after the code to jump to
525 the end label since that might create a new pseudo too. */
526 reg_scan_update (first_copy, copy, max_reg);
528 /* Mark the exit code as the virtual top of the converted loop. */
529 emit_note_before (NOTE_INSN_LOOP_VTOP, exitcode);
531 delete_related_insns (next_nonnote_insn (loop_start));
533 /* Clean up. */
534 if (reg_map)
535 free (reg_map);
537 return 1;
540 /* Move all block-beg, block-end, loop-beg, loop-cont, loop-vtop, loop-end,
541 notes between START and END out before START. START and END may be such
542 notes. Returns the values of the new starting and ending insns, which
543 may be different if the original ones were such notes.
544 Return true if there were only such notes and no real instructions. */
546 bool
547 squeeze_notes (startp, endp)
548 rtx* startp;
549 rtx* endp;
551 rtx start = *startp;
552 rtx end = *endp;
554 rtx insn;
555 rtx next;
556 rtx last = NULL;
557 rtx past_end = NEXT_INSN (end);
559 for (insn = start; insn != past_end; insn = next)
561 next = NEXT_INSN (insn);
562 if (GET_CODE (insn) == NOTE
563 && (NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_END
564 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_BLOCK_BEG
565 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_BEG
566 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_END
567 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_CONT
568 || NOTE_LINE_NUMBER (insn) == NOTE_INSN_LOOP_VTOP))
570 if (insn == start)
571 start = next;
572 else
574 rtx prev = PREV_INSN (insn);
575 PREV_INSN (insn) = PREV_INSN (start);
576 NEXT_INSN (insn) = start;
577 NEXT_INSN (PREV_INSN (insn)) = insn;
578 PREV_INSN (NEXT_INSN (insn)) = insn;
579 NEXT_INSN (prev) = next;
580 PREV_INSN (next) = prev;
583 else
584 last = insn;
587 /* There were no real instructions. */
588 if (start == past_end)
589 return true;
591 end = last;
593 *startp = start;
594 *endp = end;
595 return false;
598 /* Return the label before INSN, or put a new label there. */
601 get_label_before (insn)
602 rtx insn;
604 rtx label;
606 /* Find an existing label at this point
607 or make a new one if there is none. */
608 label = prev_nonnote_insn (insn);
610 if (label == 0 || GET_CODE (label) != CODE_LABEL)
612 rtx prev = PREV_INSN (insn);
614 label = gen_label_rtx ();
615 emit_label_after (label, prev);
616 LABEL_NUSES (label) = 0;
618 return label;
621 /* Return the label after INSN, or put a new label there. */
624 get_label_after (insn)
625 rtx insn;
627 rtx label;
629 /* Find an existing label at this point
630 or make a new one if there is none. */
631 label = next_nonnote_insn (insn);
633 if (label == 0 || GET_CODE (label) != CODE_LABEL)
635 label = gen_label_rtx ();
636 emit_label_after (label, insn);
637 LABEL_NUSES (label) = 0;
639 return label;
642 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
643 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
644 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
645 know whether it's source is floating point or integer comparison. Machine
646 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
647 to help this function avoid overhead in these cases. */
648 enum rtx_code
649 reversed_comparison_code_parts (code, arg0, arg1, insn)
650 rtx insn, arg0, arg1;
651 enum rtx_code code;
653 enum machine_mode mode;
655 /* If this is not actually a comparison, we can't reverse it. */
656 if (GET_RTX_CLASS (code) != '<')
657 return UNKNOWN;
659 mode = GET_MODE (arg0);
660 if (mode == VOIDmode)
661 mode = GET_MODE (arg1);
663 /* First see if machine description supply us way to reverse the comparison.
664 Give it priority over everything else to allow machine description to do
665 tricks. */
666 #ifdef REVERSIBLE_CC_MODE
667 if (GET_MODE_CLASS (mode) == MODE_CC
668 && REVERSIBLE_CC_MODE (mode))
670 #ifdef REVERSE_CONDITION
671 return REVERSE_CONDITION (code, mode);
672 #endif
673 return reverse_condition (code);
675 #endif
677 /* Try a few special cases based on the comparison code. */
678 switch (code)
680 case GEU:
681 case GTU:
682 case LEU:
683 case LTU:
684 case NE:
685 case EQ:
686 /* It is always safe to reverse EQ and NE, even for the floating
687 point. Similary the unsigned comparisons are never used for
688 floating point so we can reverse them in the default way. */
689 return reverse_condition (code);
690 case ORDERED:
691 case UNORDERED:
692 case LTGT:
693 case UNEQ:
694 /* In case we already see unordered comparison, we can be sure to
695 be dealing with floating point so we don't need any more tests. */
696 return reverse_condition_maybe_unordered (code);
697 case UNLT:
698 case UNLE:
699 case UNGT:
700 case UNGE:
701 /* We don't have safe way to reverse these yet. */
702 return UNKNOWN;
703 default:
704 break;
707 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
709 rtx prev;
710 /* Try to search for the comparison to determine the real mode.
711 This code is expensive, but with sane machine description it
712 will be never used, since REVERSIBLE_CC_MODE will return true
713 in all cases. */
714 if (! insn)
715 return UNKNOWN;
717 for (prev = prev_nonnote_insn (insn);
718 prev != 0 && GET_CODE (prev) != CODE_LABEL;
719 prev = prev_nonnote_insn (prev))
721 rtx set = set_of (arg0, prev);
722 if (set && GET_CODE (set) == SET
723 && rtx_equal_p (SET_DEST (set), arg0))
725 rtx src = SET_SRC (set);
727 if (GET_CODE (src) == COMPARE)
729 rtx comparison = src;
730 arg0 = XEXP (src, 0);
731 mode = GET_MODE (arg0);
732 if (mode == VOIDmode)
733 mode = GET_MODE (XEXP (comparison, 1));
734 break;
736 /* We can get past reg-reg moves. This may be useful for model
737 of i387 comparisons that first move flag registers around. */
738 if (REG_P (src))
740 arg0 = src;
741 continue;
744 /* If register is clobbered in some ununderstandable way,
745 give up. */
746 if (set)
747 return UNKNOWN;
751 /* Test for an integer condition, or a floating-point comparison
752 in which NaNs can be ignored. */
753 if (GET_CODE (arg0) == CONST_INT
754 || (GET_MODE (arg0) != VOIDmode
755 && GET_MODE_CLASS (mode) != MODE_CC
756 && !HONOR_NANS (mode)))
757 return reverse_condition (code);
759 return UNKNOWN;
762 /* An wrapper around the previous function to take COMPARISON as rtx
763 expression. This simplifies many callers. */
764 enum rtx_code
765 reversed_comparison_code (comparison, insn)
766 rtx comparison, insn;
768 if (GET_RTX_CLASS (GET_CODE (comparison)) != '<')
769 return UNKNOWN;
770 return reversed_comparison_code_parts (GET_CODE (comparison),
771 XEXP (comparison, 0),
772 XEXP (comparison, 1), insn);
775 /* Given an rtx-code for a comparison, return the code for the negated
776 comparison. If no such code exists, return UNKNOWN.
778 WATCH OUT! reverse_condition is not safe to use on a jump that might
779 be acting on the results of an IEEE floating point comparison, because
780 of the special treatment of non-signaling nans in comparisons.
781 Use reversed_comparison_code instead. */
783 enum rtx_code
784 reverse_condition (code)
785 enum rtx_code code;
787 switch (code)
789 case EQ:
790 return NE;
791 case NE:
792 return EQ;
793 case GT:
794 return LE;
795 case GE:
796 return LT;
797 case LT:
798 return GE;
799 case LE:
800 return GT;
801 case GTU:
802 return LEU;
803 case GEU:
804 return LTU;
805 case LTU:
806 return GEU;
807 case LEU:
808 return GTU;
809 case UNORDERED:
810 return ORDERED;
811 case ORDERED:
812 return UNORDERED;
814 case UNLT:
815 case UNLE:
816 case UNGT:
817 case UNGE:
818 case UNEQ:
819 case LTGT:
820 return UNKNOWN;
822 default:
823 abort ();
827 /* Similar, but we're allowed to generate unordered comparisons, which
828 makes it safe for IEEE floating-point. Of course, we have to recognize
829 that the target will support them too... */
831 enum rtx_code
832 reverse_condition_maybe_unordered (code)
833 enum rtx_code code;
835 switch (code)
837 case EQ:
838 return NE;
839 case NE:
840 return EQ;
841 case GT:
842 return UNLE;
843 case GE:
844 return UNLT;
845 case LT:
846 return UNGE;
847 case LE:
848 return UNGT;
849 case LTGT:
850 return UNEQ;
851 case UNORDERED:
852 return ORDERED;
853 case ORDERED:
854 return UNORDERED;
855 case UNLT:
856 return GE;
857 case UNLE:
858 return GT;
859 case UNGT:
860 return LE;
861 case UNGE:
862 return LT;
863 case UNEQ:
864 return LTGT;
866 default:
867 abort ();
871 /* Similar, but return the code when two operands of a comparison are swapped.
872 This IS safe for IEEE floating-point. */
874 enum rtx_code
875 swap_condition (code)
876 enum rtx_code code;
878 switch (code)
880 case EQ:
881 case NE:
882 case UNORDERED:
883 case ORDERED:
884 case UNEQ:
885 case LTGT:
886 return code;
888 case GT:
889 return LT;
890 case GE:
891 return LE;
892 case LT:
893 return GT;
894 case LE:
895 return GE;
896 case GTU:
897 return LTU;
898 case GEU:
899 return LEU;
900 case LTU:
901 return GTU;
902 case LEU:
903 return GEU;
904 case UNLT:
905 return UNGT;
906 case UNLE:
907 return UNGE;
908 case UNGT:
909 return UNLT;
910 case UNGE:
911 return UNLE;
913 default:
914 abort ();
918 /* Given a comparison CODE, return the corresponding unsigned comparison.
919 If CODE is an equality comparison or already an unsigned comparison,
920 CODE is returned. */
922 enum rtx_code
923 unsigned_condition (code)
924 enum rtx_code code;
926 switch (code)
928 case EQ:
929 case NE:
930 case GTU:
931 case GEU:
932 case LTU:
933 case LEU:
934 return code;
936 case GT:
937 return GTU;
938 case GE:
939 return GEU;
940 case LT:
941 return LTU;
942 case LE:
943 return LEU;
945 default:
946 abort ();
950 /* Similarly, return the signed version of a comparison. */
952 enum rtx_code
953 signed_condition (code)
954 enum rtx_code code;
956 switch (code)
958 case EQ:
959 case NE:
960 case GT:
961 case GE:
962 case LT:
963 case LE:
964 return code;
966 case GTU:
967 return GT;
968 case GEU:
969 return GE;
970 case LTU:
971 return LT;
972 case LEU:
973 return LE;
975 default:
976 abort ();
980 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
981 truth of CODE1 implies the truth of CODE2. */
984 comparison_dominates_p (code1, code2)
985 enum rtx_code code1, code2;
987 /* UNKNOWN comparison codes can happen as a result of trying to revert
988 comparison codes.
989 They can't match anything, so we have to reject them here. */
990 if (code1 == UNKNOWN || code2 == UNKNOWN)
991 return 0;
993 if (code1 == code2)
994 return 1;
996 switch (code1)
998 case UNEQ:
999 if (code2 == UNLE || code2 == UNGE)
1000 return 1;
1001 break;
1003 case EQ:
1004 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
1005 || code2 == ORDERED)
1006 return 1;
1007 break;
1009 case UNLT:
1010 if (code2 == UNLE || code2 == NE)
1011 return 1;
1012 break;
1014 case LT:
1015 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1016 return 1;
1017 break;
1019 case UNGT:
1020 if (code2 == UNGE || code2 == NE)
1021 return 1;
1022 break;
1024 case GT:
1025 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
1026 return 1;
1027 break;
1029 case GE:
1030 case LE:
1031 if (code2 == ORDERED)
1032 return 1;
1033 break;
1035 case LTGT:
1036 if (code2 == NE || code2 == ORDERED)
1037 return 1;
1038 break;
1040 case LTU:
1041 if (code2 == LEU || code2 == NE)
1042 return 1;
1043 break;
1045 case GTU:
1046 if (code2 == GEU || code2 == NE)
1047 return 1;
1048 break;
1050 case UNORDERED:
1051 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
1052 || code2 == UNGE || code2 == UNGT)
1053 return 1;
1054 break;
1056 default:
1057 break;
1060 return 0;
1063 /* Return 1 if INSN is an unconditional jump and nothing else. */
1066 simplejump_p (insn)
1067 rtx insn;
1069 return (GET_CODE (insn) == JUMP_INSN
1070 && GET_CODE (PATTERN (insn)) == SET
1071 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
1072 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
1075 /* Return nonzero if INSN is a (possibly) conditional jump
1076 and nothing more.
1078 Use this function is deprecated, since we need to support combined
1079 branch and compare insns. Use any_condjump_p instead whenever possible. */
1082 condjump_p (insn)
1083 rtx insn;
1085 rtx x = PATTERN (insn);
1087 if (GET_CODE (x) != SET
1088 || GET_CODE (SET_DEST (x)) != PC)
1089 return 0;
1091 x = SET_SRC (x);
1092 if (GET_CODE (x) == LABEL_REF)
1093 return 1;
1094 else
1095 return (GET_CODE (x) == IF_THEN_ELSE
1096 && ((GET_CODE (XEXP (x, 2)) == PC
1097 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
1098 || GET_CODE (XEXP (x, 1)) == RETURN))
1099 || (GET_CODE (XEXP (x, 1)) == PC
1100 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
1101 || GET_CODE (XEXP (x, 2)) == RETURN))));
1103 return 0;
1106 /* Return nonzero if INSN is a (possibly) conditional jump inside a
1107 PARALLEL.
1109 Use this function is deprecated, since we need to support combined
1110 branch and compare insns. Use any_condjump_p instead whenever possible. */
1113 condjump_in_parallel_p (insn)
1114 rtx insn;
1116 rtx x = PATTERN (insn);
1118 if (GET_CODE (x) != PARALLEL)
1119 return 0;
1120 else
1121 x = XVECEXP (x, 0, 0);
1123 if (GET_CODE (x) != SET)
1124 return 0;
1125 if (GET_CODE (SET_DEST (x)) != PC)
1126 return 0;
1127 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
1128 return 1;
1129 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1130 return 0;
1131 if (XEXP (SET_SRC (x), 2) == pc_rtx
1132 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
1133 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
1134 return 1;
1135 if (XEXP (SET_SRC (x), 1) == pc_rtx
1136 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
1137 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
1138 return 1;
1139 return 0;
1142 /* Return set of PC, otherwise NULL. */
1145 pc_set (insn)
1146 rtx insn;
1148 rtx pat;
1149 if (GET_CODE (insn) != JUMP_INSN)
1150 return NULL_RTX;
1151 pat = PATTERN (insn);
1153 /* The set is allowed to appear either as the insn pattern or
1154 the first set in a PARALLEL. */
1155 if (GET_CODE (pat) == PARALLEL)
1156 pat = XVECEXP (pat, 0, 0);
1157 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
1158 return pat;
1160 return NULL_RTX;
1163 /* Return true when insn is an unconditional direct jump,
1164 possibly bundled inside a PARALLEL. */
1167 any_uncondjump_p (insn)
1168 rtx insn;
1170 rtx x = pc_set (insn);
1171 if (!x)
1172 return 0;
1173 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
1174 return 0;
1175 return 1;
1178 /* Return true when insn is a conditional jump. This function works for
1179 instructions containing PC sets in PARALLELs. The instruction may have
1180 various other effects so before removing the jump you must verify
1181 onlyjump_p.
1183 Note that unlike condjump_p it returns false for unconditional jumps. */
1186 any_condjump_p (insn)
1187 rtx insn;
1189 rtx x = pc_set (insn);
1190 enum rtx_code a, b;
1192 if (!x)
1193 return 0;
1194 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
1195 return 0;
1197 a = GET_CODE (XEXP (SET_SRC (x), 1));
1198 b = GET_CODE (XEXP (SET_SRC (x), 2));
1200 return ((b == PC && (a == LABEL_REF || a == RETURN))
1201 || (a == PC && (b == LABEL_REF || b == RETURN)));
1204 /* Return the label of a conditional jump. */
1207 condjump_label (insn)
1208 rtx insn;
1210 rtx x = pc_set (insn);
1212 if (!x)
1213 return NULL_RTX;
1214 x = SET_SRC (x);
1215 if (GET_CODE (x) == LABEL_REF)
1216 return x;
1217 if (GET_CODE (x) != IF_THEN_ELSE)
1218 return NULL_RTX;
1219 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
1220 return XEXP (x, 1);
1221 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
1222 return XEXP (x, 2);
1223 return NULL_RTX;
1226 /* Return true if INSN is a (possibly conditional) return insn. */
1228 static int
1229 returnjump_p_1 (loc, data)
1230 rtx *loc;
1231 void *data ATTRIBUTE_UNUSED;
1233 rtx x = *loc;
1235 return x && (GET_CODE (x) == RETURN
1236 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
1240 returnjump_p (insn)
1241 rtx insn;
1243 if (GET_CODE (insn) != JUMP_INSN)
1244 return 0;
1245 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
1248 /* Return true if INSN is a jump that only transfers control and
1249 nothing more. */
1252 onlyjump_p (insn)
1253 rtx insn;
1255 rtx set;
1257 if (GET_CODE (insn) != JUMP_INSN)
1258 return 0;
1260 set = single_set (insn);
1261 if (set == NULL)
1262 return 0;
1263 if (GET_CODE (SET_DEST (set)) != PC)
1264 return 0;
1265 if (side_effects_p (SET_SRC (set)))
1266 return 0;
1268 return 1;
1271 #ifdef HAVE_cc0
1273 /* Return nonzero if X is an RTX that only sets the condition codes
1274 and has no side effects. */
1277 only_sets_cc0_p (x)
1278 rtx x;
1281 if (! x)
1282 return 0;
1284 if (INSN_P (x))
1285 x = PATTERN (x);
1287 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
1290 /* Return 1 if X is an RTX that does nothing but set the condition codes
1291 and CLOBBER or USE registers.
1292 Return -1 if X does explicitly set the condition codes,
1293 but also does other things. */
1296 sets_cc0_p (x)
1297 rtx x;
1300 if (! x)
1301 return 0;
1303 if (INSN_P (x))
1304 x = PATTERN (x);
1306 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
1307 return 1;
1308 if (GET_CODE (x) == PARALLEL)
1310 int i;
1311 int sets_cc0 = 0;
1312 int other_things = 0;
1313 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1315 if (GET_CODE (XVECEXP (x, 0, i)) == SET
1316 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
1317 sets_cc0 = 1;
1318 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
1319 other_things = 1;
1321 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
1323 return 0;
1325 #endif
1327 /* Follow any unconditional jump at LABEL;
1328 return the ultimate label reached by any such chain of jumps.
1329 If LABEL is not followed by a jump, return LABEL.
1330 If the chain loops or we can't find end, return LABEL,
1331 since that tells caller to avoid changing the insn.
1333 If RELOAD_COMPLETED is 0, we do not chain across a NOTE_INSN_LOOP_BEG or
1334 a USE or CLOBBER. */
1337 follow_jumps (label)
1338 rtx label;
1340 rtx insn;
1341 rtx next;
1342 rtx value = label;
1343 int depth;
1345 for (depth = 0;
1346 (depth < 10
1347 && (insn = next_active_insn (value)) != 0
1348 && GET_CODE (insn) == JUMP_INSN
1349 && ((JUMP_LABEL (insn) != 0 && any_uncondjump_p (insn)
1350 && onlyjump_p (insn))
1351 || GET_CODE (PATTERN (insn)) == RETURN)
1352 && (next = NEXT_INSN (insn))
1353 && GET_CODE (next) == BARRIER);
1354 depth++)
1356 /* Don't chain through the insn that jumps into a loop
1357 from outside the loop,
1358 since that would create multiple loop entry jumps
1359 and prevent loop optimization. */
1360 rtx tem;
1361 if (!reload_completed)
1362 for (tem = value; tem != insn; tem = NEXT_INSN (tem))
1363 if (GET_CODE (tem) == NOTE
1364 && (NOTE_LINE_NUMBER (tem) == NOTE_INSN_LOOP_BEG
1365 /* ??? Optional. Disables some optimizations, but makes
1366 gcov output more accurate with -O. */
1367 || (flag_test_coverage && NOTE_LINE_NUMBER (tem) > 0)))
1368 return value;
1370 /* If we have found a cycle, make the insn jump to itself. */
1371 if (JUMP_LABEL (insn) == label)
1372 return label;
1374 tem = next_active_insn (JUMP_LABEL (insn));
1375 if (tem && (GET_CODE (PATTERN (tem)) == ADDR_VEC
1376 || GET_CODE (PATTERN (tem)) == ADDR_DIFF_VEC))
1377 break;
1379 value = JUMP_LABEL (insn);
1381 if (depth == 10)
1382 return label;
1383 return value;
1387 /* Find all CODE_LABELs referred to in X, and increment their use counts.
1388 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
1389 in INSN, then store one of them in JUMP_LABEL (INSN).
1390 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
1391 referenced in INSN, add a REG_LABEL note containing that label to INSN.
1392 Also, when there are consecutive labels, canonicalize on the last of them.
1394 Note that two labels separated by a loop-beginning note
1395 must be kept distinct if we have not yet done loop-optimization,
1396 because the gap between them is where loop-optimize
1397 will want to move invariant code to. CROSS_JUMP tells us
1398 that loop-optimization is done with. */
1400 void
1401 mark_jump_label (x, insn, in_mem)
1402 rtx x;
1403 rtx insn;
1404 int in_mem;
1406 RTX_CODE code = GET_CODE (x);
1407 int i;
1408 const char *fmt;
1410 switch (code)
1412 case PC:
1413 case CC0:
1414 case REG:
1415 case CONST_INT:
1416 case CONST_DOUBLE:
1417 case CLOBBER:
1418 case CALL:
1419 return;
1421 case MEM:
1422 in_mem = 1;
1423 break;
1425 case SYMBOL_REF:
1426 if (!in_mem)
1427 return;
1429 /* If this is a constant-pool reference, see if it is a label. */
1430 if (CONSTANT_POOL_ADDRESS_P (x))
1431 mark_jump_label (get_pool_constant (x), insn, in_mem);
1432 break;
1434 case LABEL_REF:
1436 rtx label = XEXP (x, 0);
1438 /* Ignore remaining references to unreachable labels that
1439 have been deleted. */
1440 if (GET_CODE (label) == NOTE
1441 && NOTE_LINE_NUMBER (label) == NOTE_INSN_DELETED_LABEL)
1442 break;
1444 if (GET_CODE (label) != CODE_LABEL)
1445 abort ();
1447 /* Ignore references to labels of containing functions. */
1448 if (LABEL_REF_NONLOCAL_P (x))
1449 break;
1451 XEXP (x, 0) = label;
1452 if (! insn || ! INSN_DELETED_P (insn))
1453 ++LABEL_NUSES (label);
1455 if (insn)
1457 if (GET_CODE (insn) == JUMP_INSN)
1458 JUMP_LABEL (insn) = label;
1459 else
1461 /* Add a REG_LABEL note for LABEL unless there already
1462 is one. All uses of a label, except for labels
1463 that are the targets of jumps, must have a
1464 REG_LABEL note. */
1465 if (! find_reg_note (insn, REG_LABEL, label))
1466 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
1467 REG_NOTES (insn));
1470 return;
1473 /* Do walk the labels in a vector, but not the first operand of an
1474 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
1475 case ADDR_VEC:
1476 case ADDR_DIFF_VEC:
1477 if (! INSN_DELETED_P (insn))
1479 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1481 for (i = 0; i < XVECLEN (x, eltnum); i++)
1482 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1484 return;
1486 default:
1487 break;
1490 fmt = GET_RTX_FORMAT (code);
1491 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1493 if (fmt[i] == 'e')
1494 mark_jump_label (XEXP (x, i), insn, in_mem);
1495 else if (fmt[i] == 'E')
1497 int j;
1498 for (j = 0; j < XVECLEN (x, i); j++)
1499 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1504 /* If all INSN does is set the pc, delete it,
1505 and delete the insn that set the condition codes for it
1506 if that's what the previous thing was. */
1508 void
1509 delete_jump (insn)
1510 rtx insn;
1512 rtx set = single_set (insn);
1514 if (set && GET_CODE (SET_DEST (set)) == PC)
1515 delete_computation (insn);
1518 /* Verify INSN is a BARRIER and delete it. */
1520 void
1521 delete_barrier (insn)
1522 rtx insn;
1524 if (GET_CODE (insn) != BARRIER)
1525 abort ();
1527 delete_insn (insn);
1530 /* Recursively delete prior insns that compute the value (used only by INSN
1531 which the caller is deleting) stored in the register mentioned by NOTE
1532 which is a REG_DEAD note associated with INSN. */
1534 static void
1535 delete_prior_computation (note, insn)
1536 rtx note;
1537 rtx insn;
1539 rtx our_prev;
1540 rtx reg = XEXP (note, 0);
1542 for (our_prev = prev_nonnote_insn (insn);
1543 our_prev && (GET_CODE (our_prev) == INSN
1544 || GET_CODE (our_prev) == CALL_INSN);
1545 our_prev = prev_nonnote_insn (our_prev))
1547 rtx pat = PATTERN (our_prev);
1549 /* If we reach a CALL which is not calling a const function
1550 or the callee pops the arguments, then give up. */
1551 if (GET_CODE (our_prev) == CALL_INSN
1552 && (! CONST_OR_PURE_CALL_P (our_prev)
1553 || GET_CODE (pat) != SET || GET_CODE (SET_SRC (pat)) != CALL))
1554 break;
1556 /* If we reach a SEQUENCE, it is too complex to try to
1557 do anything with it, so give up. We can be run during
1558 and after reorg, so SEQUENCE rtl can legitimately show
1559 up here. */
1560 if (GET_CODE (pat) == SEQUENCE)
1561 break;
1563 if (GET_CODE (pat) == USE
1564 && GET_CODE (XEXP (pat, 0)) == INSN)
1565 /* reorg creates USEs that look like this. We leave them
1566 alone because reorg needs them for its own purposes. */
1567 break;
1569 if (reg_set_p (reg, pat))
1571 if (side_effects_p (pat) && GET_CODE (our_prev) != CALL_INSN)
1572 break;
1574 if (GET_CODE (pat) == PARALLEL)
1576 /* If we find a SET of something else, we can't
1577 delete the insn. */
1579 int i;
1581 for (i = 0; i < XVECLEN (pat, 0); i++)
1583 rtx part = XVECEXP (pat, 0, i);
1585 if (GET_CODE (part) == SET
1586 && SET_DEST (part) != reg)
1587 break;
1590 if (i == XVECLEN (pat, 0))
1591 delete_computation (our_prev);
1593 else if (GET_CODE (pat) == SET
1594 && GET_CODE (SET_DEST (pat)) == REG)
1596 int dest_regno = REGNO (SET_DEST (pat));
1597 int dest_endregno
1598 = (dest_regno
1599 + (dest_regno < FIRST_PSEUDO_REGISTER
1600 ? HARD_REGNO_NREGS (dest_regno,
1601 GET_MODE (SET_DEST (pat))) : 1));
1602 int regno = REGNO (reg);
1603 int endregno
1604 = (regno
1605 + (regno < FIRST_PSEUDO_REGISTER
1606 ? HARD_REGNO_NREGS (regno, GET_MODE (reg)) : 1));
1608 if (dest_regno >= regno
1609 && dest_endregno <= endregno)
1610 delete_computation (our_prev);
1612 /* We may have a multi-word hard register and some, but not
1613 all, of the words of the register are needed in subsequent
1614 insns. Write REG_UNUSED notes for those parts that were not
1615 needed. */
1616 else if (dest_regno <= regno
1617 && dest_endregno >= endregno)
1619 int i;
1621 REG_NOTES (our_prev)
1622 = gen_rtx_EXPR_LIST (REG_UNUSED, reg,
1623 REG_NOTES (our_prev));
1625 for (i = dest_regno; i < dest_endregno; i++)
1626 if (! find_regno_note (our_prev, REG_UNUSED, i))
1627 break;
1629 if (i == dest_endregno)
1630 delete_computation (our_prev);
1634 break;
1637 /* If PAT references the register that dies here, it is an
1638 additional use. Hence any prior SET isn't dead. However, this
1639 insn becomes the new place for the REG_DEAD note. */
1640 if (reg_overlap_mentioned_p (reg, pat))
1642 XEXP (note, 1) = REG_NOTES (our_prev);
1643 REG_NOTES (our_prev) = note;
1644 break;
1649 /* Delete INSN and recursively delete insns that compute values used only
1650 by INSN. This uses the REG_DEAD notes computed during flow analysis.
1651 If we are running before flow.c, we need do nothing since flow.c will
1652 delete dead code. We also can't know if the registers being used are
1653 dead or not at this point.
1655 Otherwise, look at all our REG_DEAD notes. If a previous insn does
1656 nothing other than set a register that dies in this insn, we can delete
1657 that insn as well.
1659 On machines with CC0, if CC0 is used in this insn, we may be able to
1660 delete the insn that set it. */
1662 static void
1663 delete_computation (insn)
1664 rtx insn;
1666 rtx note, next;
1668 #ifdef HAVE_cc0
1669 if (reg_referenced_p (cc0_rtx, PATTERN (insn)))
1671 rtx prev = prev_nonnote_insn (insn);
1672 /* We assume that at this stage
1673 CC's are always set explicitly
1674 and always immediately before the jump that
1675 will use them. So if the previous insn
1676 exists to set the CC's, delete it
1677 (unless it performs auto-increments, etc.). */
1678 if (prev && GET_CODE (prev) == INSN
1679 && sets_cc0_p (PATTERN (prev)))
1681 if (sets_cc0_p (PATTERN (prev)) > 0
1682 && ! side_effects_p (PATTERN (prev)))
1683 delete_computation (prev);
1684 else
1685 /* Otherwise, show that cc0 won't be used. */
1686 REG_NOTES (prev) = gen_rtx_EXPR_LIST (REG_UNUSED,
1687 cc0_rtx, REG_NOTES (prev));
1690 #endif
1692 for (note = REG_NOTES (insn); note; note = next)
1694 next = XEXP (note, 1);
1696 if (REG_NOTE_KIND (note) != REG_DEAD
1697 /* Verify that the REG_NOTE is legitimate. */
1698 || GET_CODE (XEXP (note, 0)) != REG)
1699 continue;
1701 delete_prior_computation (note, insn);
1704 delete_related_insns (insn);
1707 /* Delete insn INSN from the chain of insns and update label ref counts
1708 and delete insns now unreachable.
1710 Returns the first insn after INSN that was not deleted.
1712 Usage of this instruction is deprecated. Use delete_insn instead and
1713 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1716 delete_related_insns (insn)
1717 rtx insn;
1719 int was_code_label = (GET_CODE (insn) == CODE_LABEL);
1720 rtx note;
1721 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1723 while (next && INSN_DELETED_P (next))
1724 next = NEXT_INSN (next);
1726 /* This insn is already deleted => return first following nondeleted. */
1727 if (INSN_DELETED_P (insn))
1728 return next;
1730 delete_insn (insn);
1732 /* If instruction is followed by a barrier,
1733 delete the barrier too. */
1735 if (next != 0 && GET_CODE (next) == BARRIER)
1736 delete_insn (next);
1738 /* If deleting a jump, decrement the count of the label,
1739 and delete the label if it is now unused. */
1741 if (GET_CODE (insn) == JUMP_INSN && JUMP_LABEL (insn))
1743 rtx lab = JUMP_LABEL (insn), lab_next;
1745 if (LABEL_NUSES (lab) == 0)
1747 /* This can delete NEXT or PREV,
1748 either directly if NEXT is JUMP_LABEL (INSN),
1749 or indirectly through more levels of jumps. */
1750 delete_related_insns (lab);
1752 /* I feel a little doubtful about this loop,
1753 but I see no clean and sure alternative way
1754 to find the first insn after INSN that is not now deleted.
1755 I hope this works. */
1756 while (next && INSN_DELETED_P (next))
1757 next = NEXT_INSN (next);
1758 return next;
1760 else if ((lab_next = next_nonnote_insn (lab)) != NULL
1761 && GET_CODE (lab_next) == JUMP_INSN
1762 && (GET_CODE (PATTERN (lab_next)) == ADDR_VEC
1763 || GET_CODE (PATTERN (lab_next)) == ADDR_DIFF_VEC))
1765 /* If we're deleting the tablejump, delete the dispatch table.
1766 We may not be able to kill the label immediately preceding
1767 just yet, as it might be referenced in code leading up to
1768 the tablejump. */
1769 delete_related_insns (lab_next);
1773 /* Likewise if we're deleting a dispatch table. */
1775 if (GET_CODE (insn) == JUMP_INSN
1776 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1777 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1779 rtx pat = PATTERN (insn);
1780 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1781 int len = XVECLEN (pat, diff_vec_p);
1783 for (i = 0; i < len; i++)
1784 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1785 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1786 while (next && INSN_DELETED_P (next))
1787 next = NEXT_INSN (next);
1788 return next;
1791 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1792 if (GET_CODE (insn) == INSN || GET_CODE (insn) == CALL_INSN)
1793 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1794 if (REG_NOTE_KIND (note) == REG_LABEL
1795 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1796 && GET_CODE (XEXP (note, 0)) == CODE_LABEL)
1797 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1798 delete_related_insns (XEXP (note, 0));
1800 while (prev && (INSN_DELETED_P (prev) || GET_CODE (prev) == NOTE))
1801 prev = PREV_INSN (prev);
1803 /* If INSN was a label and a dispatch table follows it,
1804 delete the dispatch table. The tablejump must have gone already.
1805 It isn't useful to fall through into a table. */
1807 if (was_code_label
1808 && NEXT_INSN (insn) != 0
1809 && GET_CODE (NEXT_INSN (insn)) == JUMP_INSN
1810 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1811 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1812 next = delete_related_insns (NEXT_INSN (insn));
1814 /* If INSN was a label, delete insns following it if now unreachable. */
1816 if (was_code_label && prev && GET_CODE (prev) == BARRIER)
1818 RTX_CODE code;
1819 while (next != 0
1820 && (GET_RTX_CLASS (code = GET_CODE (next)) == 'i'
1821 || code == NOTE || code == BARRIER
1822 || (code == CODE_LABEL && INSN_DELETED_P (next))))
1824 if (code == NOTE
1825 && NOTE_LINE_NUMBER (next) != NOTE_INSN_FUNCTION_END)
1826 next = NEXT_INSN (next);
1827 /* Keep going past other deleted labels to delete what follows. */
1828 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1829 next = NEXT_INSN (next);
1830 else
1831 /* Note: if this deletes a jump, it can cause more
1832 deletion of unreachable code, after a different label.
1833 As long as the value from this recursive call is correct,
1834 this invocation functions correctly. */
1835 next = delete_related_insns (next);
1839 return next;
1842 /* Advance from INSN till reaching something not deleted
1843 then return that. May return INSN itself. */
1846 next_nondeleted_insn (insn)
1847 rtx insn;
1849 while (INSN_DELETED_P (insn))
1850 insn = NEXT_INSN (insn);
1851 return insn;
1854 /* Delete a range of insns from FROM to TO, inclusive.
1855 This is for the sake of peephole optimization, so assume
1856 that whatever these insns do will still be done by a new
1857 peephole insn that will replace them. */
1859 void
1860 delete_for_peephole (from, to)
1861 rtx from, to;
1863 rtx insn = from;
1865 while (1)
1867 rtx next = NEXT_INSN (insn);
1868 rtx prev = PREV_INSN (insn);
1870 if (GET_CODE (insn) != NOTE)
1872 INSN_DELETED_P (insn) = 1;
1874 /* Patch this insn out of the chain. */
1875 /* We don't do this all at once, because we
1876 must preserve all NOTEs. */
1877 if (prev)
1878 NEXT_INSN (prev) = next;
1880 if (next)
1881 PREV_INSN (next) = prev;
1884 if (insn == to)
1885 break;
1886 insn = next;
1889 /* Note that if TO is an unconditional jump
1890 we *do not* delete the BARRIER that follows,
1891 since the peephole that replaces this sequence
1892 is also an unconditional jump in that case. */
1895 /* We have determined that AVOIDED_INSN is never reached, and are
1896 about to delete it. If the insn chain between AVOIDED_INSN and
1897 FINISH contains more than one line from the current function, and
1898 contains at least one operation, print a warning if the user asked
1899 for it. If FINISH is NULL, look between AVOIDED_INSN and a LABEL.
1901 CSE and inlining can duplicate insns, so it's possible to get
1902 spurious warnings from this. */
1904 void
1905 never_reached_warning (avoided_insn, finish)
1906 rtx avoided_insn, finish;
1908 rtx insn;
1909 rtx a_line_note = NULL;
1910 int two_avoided_lines = 0, contains_insn = 0, reached_end = 0;
1912 if (!warn_notreached)
1913 return;
1915 /* Scan forwards, looking at LINE_NUMBER notes, until we hit a LABEL
1916 in case FINISH is NULL, otherwise until we run out of insns. */
1918 for (insn = avoided_insn; insn != NULL; insn = NEXT_INSN (insn))
1920 if ((finish == NULL && GET_CODE (insn) == CODE_LABEL)
1921 || GET_CODE (insn) == BARRIER)
1922 break;
1924 if (GET_CODE (insn) == NOTE /* A line number note? */
1925 && NOTE_LINE_NUMBER (insn) >= 0)
1927 if (a_line_note == NULL)
1928 a_line_note = insn;
1929 else
1930 two_avoided_lines |= (NOTE_LINE_NUMBER (a_line_note)
1931 != NOTE_LINE_NUMBER (insn));
1933 else if (INSN_P (insn))
1935 if (reached_end || a_line_note == NULL)
1936 break;
1937 contains_insn = 1;
1940 if (insn == finish)
1941 reached_end = 1;
1943 if (two_avoided_lines && contains_insn)
1944 warning_with_file_and_line (NOTE_SOURCE_FILE (a_line_note),
1945 NOTE_LINE_NUMBER (a_line_note),
1946 "will never be executed");
1949 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1950 NLABEL as a return. Accrue modifications into the change group. */
1952 static void
1953 redirect_exp_1 (loc, olabel, nlabel, insn)
1954 rtx *loc;
1955 rtx olabel, nlabel;
1956 rtx insn;
1958 rtx x = *loc;
1959 RTX_CODE code = GET_CODE (x);
1960 int i;
1961 const char *fmt;
1963 if (code == LABEL_REF)
1965 if (XEXP (x, 0) == olabel)
1967 rtx n;
1968 if (nlabel)
1969 n = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1970 else
1971 n = gen_rtx_RETURN (VOIDmode);
1973 validate_change (insn, loc, n, 1);
1974 return;
1977 else if (code == RETURN && olabel == 0)
1979 x = gen_rtx_LABEL_REF (VOIDmode, nlabel);
1980 if (loc == &PATTERN (insn))
1981 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1982 validate_change (insn, loc, x, 1);
1983 return;
1986 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1987 && GET_CODE (SET_SRC (x)) == LABEL_REF
1988 && XEXP (SET_SRC (x), 0) == olabel)
1990 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1991 return;
1994 fmt = GET_RTX_FORMAT (code);
1995 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1997 if (fmt[i] == 'e')
1998 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1999 else if (fmt[i] == 'E')
2001 int j;
2002 for (j = 0; j < XVECLEN (x, i); j++)
2003 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
2008 /* Similar, but apply the change group and report success or failure. */
2010 static int
2011 redirect_exp (olabel, nlabel, insn)
2012 rtx olabel, nlabel;
2013 rtx insn;
2015 rtx *loc;
2017 if (GET_CODE (PATTERN (insn)) == PARALLEL)
2018 loc = &XVECEXP (PATTERN (insn), 0, 0);
2019 else
2020 loc = &PATTERN (insn);
2022 redirect_exp_1 (loc, olabel, nlabel, insn);
2023 if (num_validated_changes () == 0)
2024 return 0;
2026 return apply_change_group ();
2029 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
2030 the modifications into the change group. Return false if we did
2031 not see how to do that. */
2034 redirect_jump_1 (jump, nlabel)
2035 rtx jump, nlabel;
2037 int ochanges = num_validated_changes ();
2038 rtx *loc;
2040 if (GET_CODE (PATTERN (jump)) == PARALLEL)
2041 loc = &XVECEXP (PATTERN (jump), 0, 0);
2042 else
2043 loc = &PATTERN (jump);
2045 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
2046 return num_validated_changes () > ochanges;
2049 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
2050 jump target label is unused as a result, it and the code following
2051 it may be deleted.
2053 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
2054 RETURN insn.
2056 The return value will be 1 if the change was made, 0 if it wasn't
2057 (this can only occur for NLABEL == 0). */
2060 redirect_jump (jump, nlabel, delete_unused)
2061 rtx jump, nlabel;
2062 int delete_unused;
2064 rtx olabel = JUMP_LABEL (jump);
2066 if (nlabel == olabel)
2067 return 1;
2069 if (! redirect_exp (olabel, nlabel, jump))
2070 return 0;
2072 JUMP_LABEL (jump) = nlabel;
2073 if (nlabel)
2074 ++LABEL_NUSES (nlabel);
2076 /* If we're eliding the jump over exception cleanups at the end of a
2077 function, move the function end note so that -Wreturn-type works. */
2078 if (olabel && nlabel
2079 && NEXT_INSN (olabel)
2080 && GET_CODE (NEXT_INSN (olabel)) == NOTE
2081 && NOTE_LINE_NUMBER (NEXT_INSN (olabel)) == NOTE_INSN_FUNCTION_END)
2082 emit_note_after (NOTE_INSN_FUNCTION_END, nlabel);
2084 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused
2085 /* Undefined labels will remain outside the insn stream. */
2086 && INSN_UID (olabel))
2087 delete_related_insns (olabel);
2089 return 1;
2092 /* Invert the jump condition of rtx X contained in jump insn, INSN.
2093 Accrue the modifications into the change group. */
2095 static void
2096 invert_exp_1 (insn)
2097 rtx insn;
2099 RTX_CODE code;
2100 rtx x = pc_set (insn);
2102 if (!x)
2103 abort ();
2104 x = SET_SRC (x);
2106 code = GET_CODE (x);
2108 if (code == IF_THEN_ELSE)
2110 rtx comp = XEXP (x, 0);
2111 rtx tem;
2112 enum rtx_code reversed_code;
2114 /* We can do this in two ways: The preferable way, which can only
2115 be done if this is not an integer comparison, is to reverse
2116 the comparison code. Otherwise, swap the THEN-part and ELSE-part
2117 of the IF_THEN_ELSE. If we can't do either, fail. */
2119 reversed_code = reversed_comparison_code (comp, insn);
2121 if (reversed_code != UNKNOWN)
2123 validate_change (insn, &XEXP (x, 0),
2124 gen_rtx_fmt_ee (reversed_code,
2125 GET_MODE (comp), XEXP (comp, 0),
2126 XEXP (comp, 1)),
2128 return;
2131 tem = XEXP (x, 1);
2132 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
2133 validate_change (insn, &XEXP (x, 2), tem, 1);
2135 else
2136 abort ();
2139 /* Invert the jump condition of conditional jump insn, INSN.
2141 Return 1 if we can do so, 0 if we cannot find a way to do so that
2142 matches a pattern. */
2144 static int
2145 invert_exp (insn)
2146 rtx insn;
2148 invert_exp_1 (insn);
2149 if (num_validated_changes () == 0)
2150 return 0;
2152 return apply_change_group ();
2155 /* Invert the condition of the jump JUMP, and make it jump to label
2156 NLABEL instead of where it jumps now. Accrue changes into the
2157 change group. Return false if we didn't see how to perform the
2158 inversion and redirection. */
2161 invert_jump_1 (jump, nlabel)
2162 rtx jump, nlabel;
2164 int ochanges;
2166 ochanges = num_validated_changes ();
2167 invert_exp_1 (jump);
2168 if (num_validated_changes () == ochanges)
2169 return 0;
2171 return redirect_jump_1 (jump, nlabel);
2174 /* Invert the condition of the jump JUMP, and make it jump to label
2175 NLABEL instead of where it jumps now. Return true if successful. */
2178 invert_jump (jump, nlabel, delete_unused)
2179 rtx jump, nlabel;
2180 int delete_unused;
2182 /* We have to either invert the condition and change the label or
2183 do neither. Either operation could fail. We first try to invert
2184 the jump. If that succeeds, we try changing the label. If that fails,
2185 we invert the jump back to what it was. */
2187 if (! invert_exp (jump))
2188 return 0;
2190 if (redirect_jump (jump, nlabel, delete_unused))
2192 invert_br_probabilities (jump);
2194 return 1;
2197 if (! invert_exp (jump))
2198 /* This should just be putting it back the way it was. */
2199 abort ();
2201 return 0;
2205 /* Like rtx_equal_p except that it considers two REGs as equal
2206 if they renumber to the same value and considers two commutative
2207 operations to be the same if the order of the operands has been
2208 reversed.
2210 ??? Addition is not commutative on the PA due to the weird implicit
2211 space register selection rules for memory addresses. Therefore, we
2212 don't consider a + b == b + a.
2214 We could/should make this test a little tighter. Possibly only
2215 disabling it on the PA via some backend macro or only disabling this
2216 case when the PLUS is inside a MEM. */
2219 rtx_renumbered_equal_p (x, y)
2220 rtx x, y;
2222 int i;
2223 RTX_CODE code = GET_CODE (x);
2224 const char *fmt;
2226 if (x == y)
2227 return 1;
2229 if ((code == REG || (code == SUBREG && GET_CODE (SUBREG_REG (x)) == REG))
2230 && (GET_CODE (y) == REG || (GET_CODE (y) == SUBREG
2231 && GET_CODE (SUBREG_REG (y)) == REG)))
2233 int reg_x = -1, reg_y = -1;
2234 int byte_x = 0, byte_y = 0;
2236 if (GET_MODE (x) != GET_MODE (y))
2237 return 0;
2239 /* If we haven't done any renumbering, don't
2240 make any assumptions. */
2241 if (reg_renumber == 0)
2242 return rtx_equal_p (x, y);
2244 if (code == SUBREG)
2246 reg_x = REGNO (SUBREG_REG (x));
2247 byte_x = SUBREG_BYTE (x);
2249 if (reg_renumber[reg_x] >= 0)
2251 reg_x = subreg_regno_offset (reg_renumber[reg_x],
2252 GET_MODE (SUBREG_REG (x)),
2253 byte_x,
2254 GET_MODE (x));
2255 byte_x = 0;
2258 else
2260 reg_x = REGNO (x);
2261 if (reg_renumber[reg_x] >= 0)
2262 reg_x = reg_renumber[reg_x];
2265 if (GET_CODE (y) == SUBREG)
2267 reg_y = REGNO (SUBREG_REG (y));
2268 byte_y = SUBREG_BYTE (y);
2270 if (reg_renumber[reg_y] >= 0)
2272 reg_y = subreg_regno_offset (reg_renumber[reg_y],
2273 GET_MODE (SUBREG_REG (y)),
2274 byte_y,
2275 GET_MODE (y));
2276 byte_y = 0;
2279 else
2281 reg_y = REGNO (y);
2282 if (reg_renumber[reg_y] >= 0)
2283 reg_y = reg_renumber[reg_y];
2286 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
2289 /* Now we have disposed of all the cases
2290 in which different rtx codes can match. */
2291 if (code != GET_CODE (y))
2292 return 0;
2294 switch (code)
2296 case PC:
2297 case CC0:
2298 case ADDR_VEC:
2299 case ADDR_DIFF_VEC:
2300 return 0;
2302 case CONST_INT:
2303 return INTVAL (x) == INTVAL (y);
2305 case LABEL_REF:
2306 /* We can't assume nonlocal labels have their following insns yet. */
2307 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
2308 return XEXP (x, 0) == XEXP (y, 0);
2310 /* Two label-refs are equivalent if they point at labels
2311 in the same position in the instruction stream. */
2312 return (next_real_insn (XEXP (x, 0))
2313 == next_real_insn (XEXP (y, 0)));
2315 case SYMBOL_REF:
2316 return XSTR (x, 0) == XSTR (y, 0);
2318 case CODE_LABEL:
2319 /* If we didn't match EQ equality above, they aren't the same. */
2320 return 0;
2322 default:
2323 break;
2326 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
2328 if (GET_MODE (x) != GET_MODE (y))
2329 return 0;
2331 /* For commutative operations, the RTX match if the operand match in any
2332 order. Also handle the simple binary and unary cases without a loop.
2334 ??? Don't consider PLUS a commutative operator; see comments above. */
2335 if ((code == EQ || code == NE || GET_RTX_CLASS (code) == 'c')
2336 && code != PLUS)
2337 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2338 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
2339 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
2340 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
2341 else if (GET_RTX_CLASS (code) == '<' || GET_RTX_CLASS (code) == '2')
2342 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
2343 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
2344 else if (GET_RTX_CLASS (code) == '1')
2345 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
2347 /* Compare the elements. If any pair of corresponding elements
2348 fail to match, return 0 for the whole things. */
2350 fmt = GET_RTX_FORMAT (code);
2351 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2353 int j;
2354 switch (fmt[i])
2356 case 'w':
2357 if (XWINT (x, i) != XWINT (y, i))
2358 return 0;
2359 break;
2361 case 'i':
2362 if (XINT (x, i) != XINT (y, i))
2363 return 0;
2364 break;
2366 case 't':
2367 if (XTREE (x, i) != XTREE (y, i))
2368 return 0;
2369 break;
2371 case 's':
2372 if (strcmp (XSTR (x, i), XSTR (y, i)))
2373 return 0;
2374 break;
2376 case 'e':
2377 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
2378 return 0;
2379 break;
2381 case 'u':
2382 if (XEXP (x, i) != XEXP (y, i))
2383 return 0;
2384 /* fall through. */
2385 case '0':
2386 break;
2388 case 'E':
2389 if (XVECLEN (x, i) != XVECLEN (y, i))
2390 return 0;
2391 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2392 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
2393 return 0;
2394 break;
2396 default:
2397 abort ();
2400 return 1;
2403 /* If X is a hard register or equivalent to one or a subregister of one,
2404 return the hard register number. If X is a pseudo register that was not
2405 assigned a hard register, return the pseudo register number. Otherwise,
2406 return -1. Any rtx is valid for X. */
2409 true_regnum (x)
2410 rtx x;
2412 if (GET_CODE (x) == REG)
2414 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
2415 return reg_renumber[REGNO (x)];
2416 return REGNO (x);
2418 if (GET_CODE (x) == SUBREG)
2420 int base = true_regnum (SUBREG_REG (x));
2421 if (base >= 0 && base < FIRST_PSEUDO_REGISTER)
2422 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
2423 GET_MODE (SUBREG_REG (x)),
2424 SUBREG_BYTE (x), GET_MODE (x));
2426 return -1;
2429 /* Return regno of the register REG and handle subregs too. */
2430 unsigned int
2431 reg_or_subregno (reg)
2432 rtx reg;
2434 if (REG_P (reg))
2435 return REGNO (reg);
2436 if (GET_CODE (reg) == SUBREG)
2437 return REGNO (SUBREG_REG (reg));
2438 abort ();