EnumSet*.class: Regenerate
[official-gcc.git] / gcc / jump.c
blob959b79af54b5866a7a313b3e46beaec9052245d5
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically a set of utility functions to
24 operate with jumps.
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
34 The subroutines redirect_jump and invert_jump are used
35 from other passes as well. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tm_p.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "regs.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
48 #include "recog.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "real.h"
52 #include "except.h"
53 #include "diagnostic.h"
54 #include "toplev.h"
55 #include "reload.h"
56 #include "predict.h"
57 #include "timevar.h"
58 #include "tree-pass.h"
59 #include "target.h"
61 /* Optimize jump y; x: ... y: jumpif... x?
62 Don't know if it is worth bothering with. */
63 /* Optimize two cases of conditional jump to conditional jump?
64 This can never delete any instruction or make anything dead,
65 or even change what is live at any point.
66 So perhaps let combiner do it. */
68 static void init_label_info (rtx);
69 static void mark_all_labels (rtx);
70 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
71 static int invert_exp_1 (rtx, rtx);
72 static int returnjump_p_1 (rtx *, void *);
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
76 instructions. */
77 void
78 rebuild_jump_labels (rtx f)
80 rtx insn;
82 timevar_push (TV_REBUILD_JUMP);
83 init_label_info (f);
84 mark_all_labels (f);
86 /* Keep track of labels used from static data; we don't track them
87 closely enough to delete them here, so make sure their reference
88 count doesn't drop to zero. */
90 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
91 if (LABEL_P (XEXP (insn, 0)))
92 LABEL_NUSES (XEXP (insn, 0))++;
93 timevar_pop (TV_REBUILD_JUMP);
96 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
97 non-fallthru insn. This is not generally true, as multiple barriers
98 may have crept in, or the BARRIER may be separated from the last
99 real insn by one or more NOTEs.
101 This simple pass moves barriers and removes duplicates so that the
102 old code is happy.
104 unsigned int
105 cleanup_barriers (void)
107 rtx insn, next, prev;
108 for (insn = get_insns (); insn; insn = next)
110 next = NEXT_INSN (insn);
111 if (BARRIER_P (insn))
113 prev = prev_nonnote_insn (insn);
114 if (BARRIER_P (prev))
115 delete_insn (insn);
116 else if (prev != PREV_INSN (insn))
117 reorder_insns (insn, insn, prev);
120 return 0;
123 struct tree_opt_pass pass_cleanup_barriers =
125 "barriers", /* name */
126 NULL, /* gate */
127 cleanup_barriers, /* execute */
128 NULL, /* sub */
129 NULL, /* next */
130 0, /* static_pass_number */
131 0, /* tv_id */
132 0, /* properties_required */
133 0, /* properties_provided */
134 0, /* properties_destroyed */
135 0, /* todo_flags_start */
136 TODO_dump_func, /* todo_flags_finish */
137 0 /* letter */
141 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
142 notes whose labels don't occur in the insn any more. Returns the
143 largest INSN_UID found. */
144 static void
145 init_label_info (rtx f)
147 rtx insn;
149 for (insn = f; insn; insn = NEXT_INSN (insn))
150 if (LABEL_P (insn))
151 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
152 else if (JUMP_P (insn))
153 JUMP_LABEL (insn) = 0;
154 else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
156 rtx note, next;
158 for (note = REG_NOTES (insn); note; note = next)
160 next = XEXP (note, 1);
161 if (REG_NOTE_KIND (note) == REG_LABEL
162 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
163 remove_note (insn, note);
168 /* Mark the label each jump jumps to.
169 Combine consecutive labels, and count uses of labels. */
171 static void
172 mark_all_labels (rtx f)
174 rtx insn;
176 for (insn = f; insn; insn = NEXT_INSN (insn))
177 if (INSN_P (insn))
179 mark_jump_label (PATTERN (insn), insn, 0);
180 if (! INSN_DELETED_P (insn) && JUMP_P (insn))
182 /* When we know the LABEL_REF contained in a REG used in
183 an indirect jump, we'll have a REG_LABEL note so that
184 flow can tell where it's going. */
185 if (JUMP_LABEL (insn) == 0)
187 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
188 if (label_note)
190 /* But a LABEL_REF around the REG_LABEL note, so
191 that we can canonicalize it. */
192 rtx label_ref = gen_rtx_LABEL_REF (Pmode,
193 XEXP (label_note, 0));
195 mark_jump_label (label_ref, insn, 0);
196 XEXP (label_note, 0) = XEXP (label_ref, 0);
197 JUMP_LABEL (insn) = XEXP (label_note, 0);
203 /* If we are in cfglayout mode, there may be non-insns between the
204 basic blocks. If those non-insns represent tablejump data, they
205 contain label references that we must record. */
206 if (current_ir_type () == IR_RTL_CFGLAYOUT)
208 basic_block bb;
209 rtx insn;
210 FOR_EACH_BB (bb)
212 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
213 if (INSN_P (insn))
215 gcc_assert (JUMP_TABLE_DATA_P (insn));
216 mark_jump_label (PATTERN (insn), insn, 0);
219 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
220 if (INSN_P (insn))
222 gcc_assert (JUMP_TABLE_DATA_P (insn));
223 mark_jump_label (PATTERN (insn), insn, 0);
229 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
230 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
231 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
232 know whether it's source is floating point or integer comparison. Machine
233 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
234 to help this function avoid overhead in these cases. */
235 enum rtx_code
236 reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
237 const_rtx arg1, const_rtx insn)
239 enum machine_mode mode;
241 /* If this is not actually a comparison, we can't reverse it. */
242 if (GET_RTX_CLASS (code) != RTX_COMPARE
243 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
244 return UNKNOWN;
246 mode = GET_MODE (arg0);
247 if (mode == VOIDmode)
248 mode = GET_MODE (arg1);
250 /* First see if machine description supplies us way to reverse the
251 comparison. Give it priority over everything else to allow
252 machine description to do tricks. */
253 if (GET_MODE_CLASS (mode) == MODE_CC
254 && REVERSIBLE_CC_MODE (mode))
256 #ifdef REVERSE_CONDITION
257 return REVERSE_CONDITION (code, mode);
258 #endif
259 return reverse_condition (code);
262 /* Try a few special cases based on the comparison code. */
263 switch (code)
265 case GEU:
266 case GTU:
267 case LEU:
268 case LTU:
269 case NE:
270 case EQ:
271 /* It is always safe to reverse EQ and NE, even for the floating
272 point. Similarly the unsigned comparisons are never used for
273 floating point so we can reverse them in the default way. */
274 return reverse_condition (code);
275 case ORDERED:
276 case UNORDERED:
277 case LTGT:
278 case UNEQ:
279 /* In case we already see unordered comparison, we can be sure to
280 be dealing with floating point so we don't need any more tests. */
281 return reverse_condition_maybe_unordered (code);
282 case UNLT:
283 case UNLE:
284 case UNGT:
285 case UNGE:
286 /* We don't have safe way to reverse these yet. */
287 return UNKNOWN;
288 default:
289 break;
292 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
294 const_rtx prev;
295 /* Try to search for the comparison to determine the real mode.
296 This code is expensive, but with sane machine description it
297 will be never used, since REVERSIBLE_CC_MODE will return true
298 in all cases. */
299 if (! insn)
300 return UNKNOWN;
302 for (prev = const_prev_nonnote_insn (insn);
303 prev != 0 && !LABEL_P (prev);
304 prev = const_prev_nonnote_insn (prev))
306 const_rtx set = set_of (arg0, prev);
307 if (set && GET_CODE (set) == SET
308 && rtx_equal_p (SET_DEST (set), arg0))
310 rtx src = SET_SRC (set);
312 if (GET_CODE (src) == COMPARE)
314 rtx comparison = src;
315 arg0 = XEXP (src, 0);
316 mode = GET_MODE (arg0);
317 if (mode == VOIDmode)
318 mode = GET_MODE (XEXP (comparison, 1));
319 break;
321 /* We can get past reg-reg moves. This may be useful for model
322 of i387 comparisons that first move flag registers around. */
323 if (REG_P (src))
325 arg0 = src;
326 continue;
329 /* If register is clobbered in some ununderstandable way,
330 give up. */
331 if (set)
332 return UNKNOWN;
336 /* Test for an integer condition, or a floating-point comparison
337 in which NaNs can be ignored. */
338 if (GET_CODE (arg0) == CONST_INT
339 || (GET_MODE (arg0) != VOIDmode
340 && GET_MODE_CLASS (mode) != MODE_CC
341 && !HONOR_NANS (mode)))
342 return reverse_condition (code);
344 return UNKNOWN;
347 /* A wrapper around the previous function to take COMPARISON as rtx
348 expression. This simplifies many callers. */
349 enum rtx_code
350 reversed_comparison_code (const_rtx comparison, const_rtx insn)
352 if (!COMPARISON_P (comparison))
353 return UNKNOWN;
354 return reversed_comparison_code_parts (GET_CODE (comparison),
355 XEXP (comparison, 0),
356 XEXP (comparison, 1), insn);
359 /* Return comparison with reversed code of EXP.
360 Return NULL_RTX in case we fail to do the reversal. */
362 reversed_comparison (const_rtx exp, enum machine_mode mode)
364 enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
365 if (reversed_code == UNKNOWN)
366 return NULL_RTX;
367 else
368 return simplify_gen_relational (reversed_code, mode, VOIDmode,
369 XEXP (exp, 0), XEXP (exp, 1));
373 /* Given an rtx-code for a comparison, return the code for the negated
374 comparison. If no such code exists, return UNKNOWN.
376 WATCH OUT! reverse_condition is not safe to use on a jump that might
377 be acting on the results of an IEEE floating point comparison, because
378 of the special treatment of non-signaling nans in comparisons.
379 Use reversed_comparison_code instead. */
381 enum rtx_code
382 reverse_condition (enum rtx_code code)
384 switch (code)
386 case EQ:
387 return NE;
388 case NE:
389 return EQ;
390 case GT:
391 return LE;
392 case GE:
393 return LT;
394 case LT:
395 return GE;
396 case LE:
397 return GT;
398 case GTU:
399 return LEU;
400 case GEU:
401 return LTU;
402 case LTU:
403 return GEU;
404 case LEU:
405 return GTU;
406 case UNORDERED:
407 return ORDERED;
408 case ORDERED:
409 return UNORDERED;
411 case UNLT:
412 case UNLE:
413 case UNGT:
414 case UNGE:
415 case UNEQ:
416 case LTGT:
417 return UNKNOWN;
419 default:
420 gcc_unreachable ();
424 /* Similar, but we're allowed to generate unordered comparisons, which
425 makes it safe for IEEE floating-point. Of course, we have to recognize
426 that the target will support them too... */
428 enum rtx_code
429 reverse_condition_maybe_unordered (enum rtx_code code)
431 switch (code)
433 case EQ:
434 return NE;
435 case NE:
436 return EQ;
437 case GT:
438 return UNLE;
439 case GE:
440 return UNLT;
441 case LT:
442 return UNGE;
443 case LE:
444 return UNGT;
445 case LTGT:
446 return UNEQ;
447 case UNORDERED:
448 return ORDERED;
449 case ORDERED:
450 return UNORDERED;
451 case UNLT:
452 return GE;
453 case UNLE:
454 return GT;
455 case UNGT:
456 return LE;
457 case UNGE:
458 return LT;
459 case UNEQ:
460 return LTGT;
462 default:
463 gcc_unreachable ();
467 /* Similar, but return the code when two operands of a comparison are swapped.
468 This IS safe for IEEE floating-point. */
470 enum rtx_code
471 swap_condition (enum rtx_code code)
473 switch (code)
475 case EQ:
476 case NE:
477 case UNORDERED:
478 case ORDERED:
479 case UNEQ:
480 case LTGT:
481 return code;
483 case GT:
484 return LT;
485 case GE:
486 return LE;
487 case LT:
488 return GT;
489 case LE:
490 return GE;
491 case GTU:
492 return LTU;
493 case GEU:
494 return LEU;
495 case LTU:
496 return GTU;
497 case LEU:
498 return GEU;
499 case UNLT:
500 return UNGT;
501 case UNLE:
502 return UNGE;
503 case UNGT:
504 return UNLT;
505 case UNGE:
506 return UNLE;
508 default:
509 gcc_unreachable ();
513 /* Given a comparison CODE, return the corresponding unsigned comparison.
514 If CODE is an equality comparison or already an unsigned comparison,
515 CODE is returned. */
517 enum rtx_code
518 unsigned_condition (enum rtx_code code)
520 switch (code)
522 case EQ:
523 case NE:
524 case GTU:
525 case GEU:
526 case LTU:
527 case LEU:
528 return code;
530 case GT:
531 return GTU;
532 case GE:
533 return GEU;
534 case LT:
535 return LTU;
536 case LE:
537 return LEU;
539 default:
540 gcc_unreachable ();
544 /* Similarly, return the signed version of a comparison. */
546 enum rtx_code
547 signed_condition (enum rtx_code code)
549 switch (code)
551 case EQ:
552 case NE:
553 case GT:
554 case GE:
555 case LT:
556 case LE:
557 return code;
559 case GTU:
560 return GT;
561 case GEU:
562 return GE;
563 case LTU:
564 return LT;
565 case LEU:
566 return LE;
568 default:
569 gcc_unreachable ();
573 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
574 truth of CODE1 implies the truth of CODE2. */
577 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
579 /* UNKNOWN comparison codes can happen as a result of trying to revert
580 comparison codes.
581 They can't match anything, so we have to reject them here. */
582 if (code1 == UNKNOWN || code2 == UNKNOWN)
583 return 0;
585 if (code1 == code2)
586 return 1;
588 switch (code1)
590 case UNEQ:
591 if (code2 == UNLE || code2 == UNGE)
592 return 1;
593 break;
595 case EQ:
596 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
597 || code2 == ORDERED)
598 return 1;
599 break;
601 case UNLT:
602 if (code2 == UNLE || code2 == NE)
603 return 1;
604 break;
606 case LT:
607 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
608 return 1;
609 break;
611 case UNGT:
612 if (code2 == UNGE || code2 == NE)
613 return 1;
614 break;
616 case GT:
617 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
618 return 1;
619 break;
621 case GE:
622 case LE:
623 if (code2 == ORDERED)
624 return 1;
625 break;
627 case LTGT:
628 if (code2 == NE || code2 == ORDERED)
629 return 1;
630 break;
632 case LTU:
633 if (code2 == LEU || code2 == NE)
634 return 1;
635 break;
637 case GTU:
638 if (code2 == GEU || code2 == NE)
639 return 1;
640 break;
642 case UNORDERED:
643 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
644 || code2 == UNGE || code2 == UNGT)
645 return 1;
646 break;
648 default:
649 break;
652 return 0;
655 /* Return 1 if INSN is an unconditional jump and nothing else. */
658 simplejump_p (const_rtx insn)
660 return (JUMP_P (insn)
661 && GET_CODE (PATTERN (insn)) == SET
662 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
663 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
666 /* Return nonzero if INSN is a (possibly) conditional jump
667 and nothing more.
669 Use of this function is deprecated, since we need to support combined
670 branch and compare insns. Use any_condjump_p instead whenever possible. */
673 condjump_p (const_rtx insn)
675 const_rtx x = PATTERN (insn);
677 if (GET_CODE (x) != SET
678 || GET_CODE (SET_DEST (x)) != PC)
679 return 0;
681 x = SET_SRC (x);
682 if (GET_CODE (x) == LABEL_REF)
683 return 1;
684 else
685 return (GET_CODE (x) == IF_THEN_ELSE
686 && ((GET_CODE (XEXP (x, 2)) == PC
687 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
688 || GET_CODE (XEXP (x, 1)) == RETURN))
689 || (GET_CODE (XEXP (x, 1)) == PC
690 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
691 || GET_CODE (XEXP (x, 2)) == RETURN))));
694 /* Return nonzero if INSN is a (possibly) conditional jump inside a
695 PARALLEL.
697 Use this function is deprecated, since we need to support combined
698 branch and compare insns. Use any_condjump_p instead whenever possible. */
701 condjump_in_parallel_p (const_rtx insn)
703 const_rtx x = PATTERN (insn);
705 if (GET_CODE (x) != PARALLEL)
706 return 0;
707 else
708 x = XVECEXP (x, 0, 0);
710 if (GET_CODE (x) != SET)
711 return 0;
712 if (GET_CODE (SET_DEST (x)) != PC)
713 return 0;
714 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
715 return 1;
716 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
717 return 0;
718 if (XEXP (SET_SRC (x), 2) == pc_rtx
719 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
720 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
721 return 1;
722 if (XEXP (SET_SRC (x), 1) == pc_rtx
723 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
724 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
725 return 1;
726 return 0;
729 /* Return set of PC, otherwise NULL. */
732 pc_set (const_rtx insn)
734 rtx pat;
735 if (!JUMP_P (insn))
736 return NULL_RTX;
737 pat = PATTERN (insn);
739 /* The set is allowed to appear either as the insn pattern or
740 the first set in a PARALLEL. */
741 if (GET_CODE (pat) == PARALLEL)
742 pat = XVECEXP (pat, 0, 0);
743 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
744 return pat;
746 return NULL_RTX;
749 /* Return true when insn is an unconditional direct jump,
750 possibly bundled inside a PARALLEL. */
753 any_uncondjump_p (const_rtx insn)
755 const_rtx x = pc_set (insn);
756 if (!x)
757 return 0;
758 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
759 return 0;
760 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
761 return 0;
762 return 1;
765 /* Return true when insn is a conditional jump. This function works for
766 instructions containing PC sets in PARALLELs. The instruction may have
767 various other effects so before removing the jump you must verify
768 onlyjump_p.
770 Note that unlike condjump_p it returns false for unconditional jumps. */
773 any_condjump_p (const_rtx insn)
775 const_rtx x = pc_set (insn);
776 enum rtx_code a, b;
778 if (!x)
779 return 0;
780 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
781 return 0;
783 a = GET_CODE (XEXP (SET_SRC (x), 1));
784 b = GET_CODE (XEXP (SET_SRC (x), 2));
786 return ((b == PC && (a == LABEL_REF || a == RETURN))
787 || (a == PC && (b == LABEL_REF || b == RETURN)));
790 /* Return the label of a conditional jump. */
793 condjump_label (const_rtx insn)
795 rtx x = pc_set (insn);
797 if (!x)
798 return NULL_RTX;
799 x = SET_SRC (x);
800 if (GET_CODE (x) == LABEL_REF)
801 return x;
802 if (GET_CODE (x) != IF_THEN_ELSE)
803 return NULL_RTX;
804 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
805 return XEXP (x, 1);
806 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
807 return XEXP (x, 2);
808 return NULL_RTX;
811 /* Return true if INSN is a (possibly conditional) return insn. */
813 static int
814 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
816 rtx x = *loc;
818 return x && (GET_CODE (x) == RETURN
819 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
823 returnjump_p (rtx insn)
825 if (!JUMP_P (insn))
826 return 0;
827 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
830 /* Return true if INSN is a jump that only transfers control and
831 nothing more. */
834 onlyjump_p (const_rtx insn)
836 rtx set;
838 if (!JUMP_P (insn))
839 return 0;
841 set = single_set (insn);
842 if (set == NULL)
843 return 0;
844 if (GET_CODE (SET_DEST (set)) != PC)
845 return 0;
846 if (side_effects_p (SET_SRC (set)))
847 return 0;
849 return 1;
852 #ifdef HAVE_cc0
854 /* Return nonzero if X is an RTX that only sets the condition codes
855 and has no side effects. */
858 only_sets_cc0_p (const_rtx x)
860 if (! x)
861 return 0;
863 if (INSN_P (x))
864 x = PATTERN (x);
866 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
869 /* Return 1 if X is an RTX that does nothing but set the condition codes
870 and CLOBBER or USE registers.
871 Return -1 if X does explicitly set the condition codes,
872 but also does other things. */
875 sets_cc0_p (const_rtx x)
877 if (! x)
878 return 0;
880 if (INSN_P (x))
881 x = PATTERN (x);
883 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
884 return 1;
885 if (GET_CODE (x) == PARALLEL)
887 int i;
888 int sets_cc0 = 0;
889 int other_things = 0;
890 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
892 if (GET_CODE (XVECEXP (x, 0, i)) == SET
893 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
894 sets_cc0 = 1;
895 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
896 other_things = 1;
898 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
900 return 0;
902 #endif
904 /* Find all CODE_LABELs referred to in X, and increment their use counts.
905 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
906 in INSN, then store one of them in JUMP_LABEL (INSN).
907 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
908 referenced in INSN, add a REG_LABEL note containing that label to INSN.
909 Also, when there are consecutive labels, canonicalize on the last of them.
911 Note that two labels separated by a loop-beginning note
912 must be kept distinct if we have not yet done loop-optimization,
913 because the gap between them is where loop-optimize
914 will want to move invariant code to. CROSS_JUMP tells us
915 that loop-optimization is done with. */
917 void
918 mark_jump_label (rtx x, rtx insn, int in_mem)
920 RTX_CODE code = GET_CODE (x);
921 int i;
922 const char *fmt;
924 switch (code)
926 case PC:
927 case CC0:
928 case REG:
929 case CONST_INT:
930 case CONST_DOUBLE:
931 case CLOBBER:
932 case CALL:
933 return;
935 case MEM:
936 in_mem = 1;
937 break;
939 case SEQUENCE:
940 for (i = 0; i < XVECLEN (x, 0); i++)
941 mark_jump_label (PATTERN (XVECEXP (x, 0, i)),
942 XVECEXP (x, 0, i), 0);
943 return;
945 case SYMBOL_REF:
946 if (!in_mem)
947 return;
949 /* If this is a constant-pool reference, see if it is a label. */
950 if (CONSTANT_POOL_ADDRESS_P (x))
951 mark_jump_label (get_pool_constant (x), insn, in_mem);
952 break;
954 case LABEL_REF:
956 rtx label = XEXP (x, 0);
958 /* Ignore remaining references to unreachable labels that
959 have been deleted. */
960 if (NOTE_P (label)
961 && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
962 break;
964 gcc_assert (LABEL_P (label));
966 /* Ignore references to labels of containing functions. */
967 if (LABEL_REF_NONLOCAL_P (x))
968 break;
970 XEXP (x, 0) = label;
971 if (! insn || ! INSN_DELETED_P (insn))
972 ++LABEL_NUSES (label);
974 if (insn)
976 if (JUMP_P (insn))
977 JUMP_LABEL (insn) = label;
978 else
980 /* Add a REG_LABEL note for LABEL unless there already
981 is one. All uses of a label, except for labels
982 that are the targets of jumps, must have a
983 REG_LABEL note. */
984 if (! find_reg_note (insn, REG_LABEL, label))
985 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
986 REG_NOTES (insn));
989 return;
992 /* Do walk the labels in a vector, but not the first operand of an
993 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
994 case ADDR_VEC:
995 case ADDR_DIFF_VEC:
996 if (! INSN_DELETED_P (insn))
998 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1000 for (i = 0; i < XVECLEN (x, eltnum); i++)
1001 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1003 return;
1005 default:
1006 break;
1009 fmt = GET_RTX_FORMAT (code);
1010 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1012 if (fmt[i] == 'e')
1013 mark_jump_label (XEXP (x, i), insn, in_mem);
1014 else if (fmt[i] == 'E')
1016 int j;
1017 for (j = 0; j < XVECLEN (x, i); j++)
1018 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1024 /* Delete insn INSN from the chain of insns and update label ref counts
1025 and delete insns now unreachable.
1027 Returns the first insn after INSN that was not deleted.
1029 Usage of this instruction is deprecated. Use delete_insn instead and
1030 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1033 delete_related_insns (rtx insn)
1035 int was_code_label = (LABEL_P (insn));
1036 rtx note;
1037 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1039 while (next && INSN_DELETED_P (next))
1040 next = NEXT_INSN (next);
1042 /* This insn is already deleted => return first following nondeleted. */
1043 if (INSN_DELETED_P (insn))
1044 return next;
1046 delete_insn (insn);
1048 /* If instruction is followed by a barrier,
1049 delete the barrier too. */
1051 if (next != 0 && BARRIER_P (next))
1052 delete_insn (next);
1054 /* If deleting a jump, decrement the count of the label,
1055 and delete the label if it is now unused. */
1057 if (JUMP_P (insn) && JUMP_LABEL (insn))
1059 rtx lab = JUMP_LABEL (insn), lab_next;
1061 if (LABEL_NUSES (lab) == 0)
1063 /* This can delete NEXT or PREV,
1064 either directly if NEXT is JUMP_LABEL (INSN),
1065 or indirectly through more levels of jumps. */
1066 delete_related_insns (lab);
1068 /* I feel a little doubtful about this loop,
1069 but I see no clean and sure alternative way
1070 to find the first insn after INSN that is not now deleted.
1071 I hope this works. */
1072 while (next && INSN_DELETED_P (next))
1073 next = NEXT_INSN (next);
1074 return next;
1076 else if (tablejump_p (insn, NULL, &lab_next))
1078 /* If we're deleting the tablejump, delete the dispatch table.
1079 We may not be able to kill the label immediately preceding
1080 just yet, as it might be referenced in code leading up to
1081 the tablejump. */
1082 delete_related_insns (lab_next);
1086 /* Likewise if we're deleting a dispatch table. */
1088 if (JUMP_P (insn)
1089 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1090 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1092 rtx pat = PATTERN (insn);
1093 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1094 int len = XVECLEN (pat, diff_vec_p);
1096 for (i = 0; i < len; i++)
1097 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1098 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1099 while (next && INSN_DELETED_P (next))
1100 next = NEXT_INSN (next);
1101 return next;
1104 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1105 if (NONJUMP_INSN_P (insn) || CALL_P (insn))
1106 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1107 if (REG_NOTE_KIND (note) == REG_LABEL
1108 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1109 && LABEL_P (XEXP (note, 0)))
1110 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1111 delete_related_insns (XEXP (note, 0));
1113 while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1114 prev = PREV_INSN (prev);
1116 /* If INSN was a label and a dispatch table follows it,
1117 delete the dispatch table. The tablejump must have gone already.
1118 It isn't useful to fall through into a table. */
1120 if (was_code_label
1121 && NEXT_INSN (insn) != 0
1122 && JUMP_P (NEXT_INSN (insn))
1123 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1124 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1125 next = delete_related_insns (NEXT_INSN (insn));
1127 /* If INSN was a label, delete insns following it if now unreachable. */
1129 if (was_code_label && prev && BARRIER_P (prev))
1131 enum rtx_code code;
1132 while (next)
1134 code = GET_CODE (next);
1135 if (code == NOTE)
1136 next = NEXT_INSN (next);
1137 /* Keep going past other deleted labels to delete what follows. */
1138 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1139 next = NEXT_INSN (next);
1140 else if (code == BARRIER || INSN_P (next))
1141 /* Note: if this deletes a jump, it can cause more
1142 deletion of unreachable code, after a different label.
1143 As long as the value from this recursive call is correct,
1144 this invocation functions correctly. */
1145 next = delete_related_insns (next);
1146 else
1147 break;
1151 return next;
1154 /* Delete a range of insns from FROM to TO, inclusive.
1155 This is for the sake of peephole optimization, so assume
1156 that whatever these insns do will still be done by a new
1157 peephole insn that will replace them. */
1159 void
1160 delete_for_peephole (rtx from, rtx to)
1162 rtx insn = from;
1164 while (1)
1166 rtx next = NEXT_INSN (insn);
1167 rtx prev = PREV_INSN (insn);
1169 if (!NOTE_P (insn))
1171 INSN_DELETED_P (insn) = 1;
1173 /* Patch this insn out of the chain. */
1174 /* We don't do this all at once, because we
1175 must preserve all NOTEs. */
1176 if (prev)
1177 NEXT_INSN (prev) = next;
1179 if (next)
1180 PREV_INSN (next) = prev;
1183 if (insn == to)
1184 break;
1185 insn = next;
1188 /* Note that if TO is an unconditional jump
1189 we *do not* delete the BARRIER that follows,
1190 since the peephole that replaces this sequence
1191 is also an unconditional jump in that case. */
1194 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1195 NLABEL as a return. Accrue modifications into the change group. */
1197 static void
1198 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1200 rtx x = *loc;
1201 RTX_CODE code = GET_CODE (x);
1202 int i;
1203 const char *fmt;
1205 if (code == LABEL_REF)
1207 if (XEXP (x, 0) == olabel)
1209 rtx n;
1210 if (nlabel)
1211 n = gen_rtx_LABEL_REF (Pmode, nlabel);
1212 else
1213 n = gen_rtx_RETURN (VOIDmode);
1215 validate_change (insn, loc, n, 1);
1216 return;
1219 else if (code == RETURN && olabel == 0)
1221 if (nlabel)
1222 x = gen_rtx_LABEL_REF (Pmode, nlabel);
1223 else
1224 x = gen_rtx_RETURN (VOIDmode);
1225 if (loc == &PATTERN (insn))
1226 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1227 validate_change (insn, loc, x, 1);
1228 return;
1231 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1232 && GET_CODE (SET_SRC (x)) == LABEL_REF
1233 && XEXP (SET_SRC (x), 0) == olabel)
1235 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1236 return;
1239 fmt = GET_RTX_FORMAT (code);
1240 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1242 if (fmt[i] == 'e')
1243 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1244 else if (fmt[i] == 'E')
1246 int j;
1247 for (j = 0; j < XVECLEN (x, i); j++)
1248 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1253 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1254 the modifications into the change group. Return false if we did
1255 not see how to do that. */
1258 redirect_jump_1 (rtx jump, rtx nlabel)
1260 int ochanges = num_validated_changes ();
1261 rtx *loc;
1263 if (GET_CODE (PATTERN (jump)) == PARALLEL)
1264 loc = &XVECEXP (PATTERN (jump), 0, 0);
1265 else
1266 loc = &PATTERN (jump);
1268 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1269 return num_validated_changes () > ochanges;
1272 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1273 jump target label is unused as a result, it and the code following
1274 it may be deleted.
1276 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1277 RETURN insn.
1279 The return value will be 1 if the change was made, 0 if it wasn't
1280 (this can only occur for NLABEL == 0). */
1283 redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1285 rtx olabel = JUMP_LABEL (jump);
1287 if (nlabel == olabel)
1288 return 1;
1290 if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1291 return 0;
1293 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1294 return 1;
1297 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1298 NLABEL in JUMP.
1299 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1300 count has dropped to zero. */
1301 void
1302 redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1303 int invert)
1305 rtx note;
1307 /* Negative DELETE_UNUSED used to be used to signalize behavior on
1308 moving FUNCTION_END note. Just sanity check that no user still worry
1309 about this. */
1310 gcc_assert (delete_unused >= 0);
1311 JUMP_LABEL (jump) = nlabel;
1312 if (nlabel)
1313 ++LABEL_NUSES (nlabel);
1315 /* Update labels in any REG_EQUAL note. */
1316 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1318 if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1319 remove_note (jump, note);
1320 else
1322 redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1323 confirm_change_group ();
1327 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1328 /* Undefined labels will remain outside the insn stream. */
1329 && INSN_UID (olabel))
1330 delete_related_insns (olabel);
1331 if (invert)
1332 invert_br_probabilities (jump);
1335 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1336 modifications into the change group. Return nonzero for success. */
1337 static int
1338 invert_exp_1 (rtx x, rtx insn)
1340 RTX_CODE code = GET_CODE (x);
1342 if (code == IF_THEN_ELSE)
1344 rtx comp = XEXP (x, 0);
1345 rtx tem;
1346 enum rtx_code reversed_code;
1348 /* We can do this in two ways: The preferable way, which can only
1349 be done if this is not an integer comparison, is to reverse
1350 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1351 of the IF_THEN_ELSE. If we can't do either, fail. */
1353 reversed_code = reversed_comparison_code (comp, insn);
1355 if (reversed_code != UNKNOWN)
1357 validate_change (insn, &XEXP (x, 0),
1358 gen_rtx_fmt_ee (reversed_code,
1359 GET_MODE (comp), XEXP (comp, 0),
1360 XEXP (comp, 1)),
1362 return 1;
1365 tem = XEXP (x, 1);
1366 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1367 validate_change (insn, &XEXP (x, 2), tem, 1);
1368 return 1;
1370 else
1371 return 0;
1374 /* Invert the condition of the jump JUMP, and make it jump to label
1375 NLABEL instead of where it jumps now. Accrue changes into the
1376 change group. Return false if we didn't see how to perform the
1377 inversion and redirection. */
1380 invert_jump_1 (rtx jump, rtx nlabel)
1382 rtx x = pc_set (jump);
1383 int ochanges;
1384 int ok;
1386 ochanges = num_validated_changes ();
1387 gcc_assert (x);
1388 ok = invert_exp_1 (SET_SRC (x), jump);
1389 gcc_assert (ok);
1391 if (num_validated_changes () == ochanges)
1392 return 0;
1394 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1395 in Pmode, so checking this is not merely an optimization. */
1396 return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1399 /* Invert the condition of the jump JUMP, and make it jump to label
1400 NLABEL instead of where it jumps now. Return true if successful. */
1403 invert_jump (rtx jump, rtx nlabel, int delete_unused)
1405 rtx olabel = JUMP_LABEL (jump);
1407 if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1409 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1410 return 1;
1412 cancel_changes (0);
1413 return 0;
1417 /* Like rtx_equal_p except that it considers two REGs as equal
1418 if they renumber to the same value and considers two commutative
1419 operations to be the same if the order of the operands has been
1420 reversed. */
1423 rtx_renumbered_equal_p (const_rtx x, const_rtx y)
1425 int i;
1426 const enum rtx_code code = GET_CODE (x);
1427 const char *fmt;
1429 if (x == y)
1430 return 1;
1432 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1433 && (REG_P (y) || (GET_CODE (y) == SUBREG
1434 && REG_P (SUBREG_REG (y)))))
1436 int reg_x = -1, reg_y = -1;
1437 int byte_x = 0, byte_y = 0;
1439 if (GET_MODE (x) != GET_MODE (y))
1440 return 0;
1442 /* If we haven't done any renumbering, don't
1443 make any assumptions. */
1444 if (reg_renumber == 0)
1445 return rtx_equal_p (x, y);
1447 if (code == SUBREG)
1449 reg_x = REGNO (SUBREG_REG (x));
1450 byte_x = SUBREG_BYTE (x);
1452 if (reg_renumber[reg_x] >= 0)
1454 reg_x = subreg_regno_offset (reg_renumber[reg_x],
1455 GET_MODE (SUBREG_REG (x)),
1456 byte_x,
1457 GET_MODE (x));
1458 byte_x = 0;
1461 else
1463 reg_x = REGNO (x);
1464 if (reg_renumber[reg_x] >= 0)
1465 reg_x = reg_renumber[reg_x];
1468 if (GET_CODE (y) == SUBREG)
1470 reg_y = REGNO (SUBREG_REG (y));
1471 byte_y = SUBREG_BYTE (y);
1473 if (reg_renumber[reg_y] >= 0)
1475 reg_y = subreg_regno_offset (reg_renumber[reg_y],
1476 GET_MODE (SUBREG_REG (y)),
1477 byte_y,
1478 GET_MODE (y));
1479 byte_y = 0;
1482 else
1484 reg_y = REGNO (y);
1485 if (reg_renumber[reg_y] >= 0)
1486 reg_y = reg_renumber[reg_y];
1489 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1492 /* Now we have disposed of all the cases
1493 in which different rtx codes can match. */
1494 if (code != GET_CODE (y))
1495 return 0;
1497 switch (code)
1499 case PC:
1500 case CC0:
1501 case ADDR_VEC:
1502 case ADDR_DIFF_VEC:
1503 case CONST_INT:
1504 case CONST_DOUBLE:
1505 return 0;
1507 case LABEL_REF:
1508 /* We can't assume nonlocal labels have their following insns yet. */
1509 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1510 return XEXP (x, 0) == XEXP (y, 0);
1512 /* Two label-refs are equivalent if they point at labels
1513 in the same position in the instruction stream. */
1514 return (next_real_insn (XEXP (x, 0))
1515 == next_real_insn (XEXP (y, 0)));
1517 case SYMBOL_REF:
1518 return XSTR (x, 0) == XSTR (y, 0);
1520 case CODE_LABEL:
1521 /* If we didn't match EQ equality above, they aren't the same. */
1522 return 0;
1524 default:
1525 break;
1528 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1530 if (GET_MODE (x) != GET_MODE (y))
1531 return 0;
1533 /* For commutative operations, the RTX match if the operand match in any
1534 order. Also handle the simple binary and unary cases without a loop. */
1535 if (targetm.commutative_p (x, UNKNOWN))
1536 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1537 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1538 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1539 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1540 else if (NON_COMMUTATIVE_P (x))
1541 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1542 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1543 else if (UNARY_P (x))
1544 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1546 /* Compare the elements. If any pair of corresponding elements
1547 fail to match, return 0 for the whole things. */
1549 fmt = GET_RTX_FORMAT (code);
1550 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1552 int j;
1553 switch (fmt[i])
1555 case 'w':
1556 if (XWINT (x, i) != XWINT (y, i))
1557 return 0;
1558 break;
1560 case 'i':
1561 if (XINT (x, i) != XINT (y, i))
1562 return 0;
1563 break;
1565 case 't':
1566 if (XTREE (x, i) != XTREE (y, i))
1567 return 0;
1568 break;
1570 case 's':
1571 if (strcmp (XSTR (x, i), XSTR (y, i)))
1572 return 0;
1573 break;
1575 case 'e':
1576 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1577 return 0;
1578 break;
1580 case 'u':
1581 if (XEXP (x, i) != XEXP (y, i))
1582 return 0;
1583 /* Fall through. */
1584 case '0':
1585 break;
1587 case 'E':
1588 if (XVECLEN (x, i) != XVECLEN (y, i))
1589 return 0;
1590 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1591 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1592 return 0;
1593 break;
1595 default:
1596 gcc_unreachable ();
1599 return 1;
1602 /* If X is a hard register or equivalent to one or a subregister of one,
1603 return the hard register number. If X is a pseudo register that was not
1604 assigned a hard register, return the pseudo register number. Otherwise,
1605 return -1. Any rtx is valid for X. */
1608 true_regnum (const_rtx x)
1610 if (REG_P (x))
1612 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
1613 return reg_renumber[REGNO (x)];
1614 return REGNO (x);
1616 if (GET_CODE (x) == SUBREG)
1618 int base = true_regnum (SUBREG_REG (x));
1619 if (base >= 0
1620 && base < FIRST_PSEUDO_REGISTER
1621 && subreg_offset_representable_p (REGNO (SUBREG_REG (x)),
1622 GET_MODE (SUBREG_REG (x)),
1623 SUBREG_BYTE (x), GET_MODE (x)))
1624 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
1625 GET_MODE (SUBREG_REG (x)),
1626 SUBREG_BYTE (x), GET_MODE (x));
1628 return -1;
1631 /* Return regno of the register REG and handle subregs too. */
1632 unsigned int
1633 reg_or_subregno (const_rtx reg)
1635 if (GET_CODE (reg) == SUBREG)
1636 reg = SUBREG_REG (reg);
1637 gcc_assert (REG_P (reg));
1638 return REGNO (reg);