builtins.def (BUILT_IN_VA_ARG_PACK): New built-in.
[official-gcc.git] / gcc / jump.c
blobcd51764f03adbfda01360920a64e737944296946
1 /* Optimize jump instructions, for GNU compiler.
2 Copyright (C) 1987, 1988, 1989, 1991, 1992, 1993, 1994, 1995, 1996, 1997
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
22 /* This is the pathetic reminder of old fame of the jump-optimization pass
23 of the compiler. Now it contains basically a set of utility functions to
24 operate with jumps.
26 Each CODE_LABEL has a count of the times it is used
27 stored in the LABEL_NUSES internal field, and each JUMP_INSN
28 has one label that it refers to stored in the
29 JUMP_LABEL internal field. With this we can detect labels that
30 become unused because of the deletion of all the jumps that
31 formerly used them. The JUMP_LABEL info is sometimes looked
32 at by later passes.
34 The subroutines redirect_jump and invert_jump are used
35 from other passes as well. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "rtl.h"
42 #include "tm_p.h"
43 #include "flags.h"
44 #include "hard-reg-set.h"
45 #include "regs.h"
46 #include "insn-config.h"
47 #include "insn-attr.h"
48 #include "recog.h"
49 #include "function.h"
50 #include "expr.h"
51 #include "real.h"
52 #include "except.h"
53 #include "diagnostic.h"
54 #include "toplev.h"
55 #include "reload.h"
56 #include "predict.h"
57 #include "timevar.h"
58 #include "tree-pass.h"
59 #include "target.h"
61 /* Optimize jump y; x: ... y: jumpif... x?
62 Don't know if it is worth bothering with. */
63 /* Optimize two cases of conditional jump to conditional jump?
64 This can never delete any instruction or make anything dead,
65 or even change what is live at any point.
66 So perhaps let combiner do it. */
68 static void init_label_info (rtx);
69 static void mark_all_labels (rtx);
70 static void redirect_exp_1 (rtx *, rtx, rtx, rtx);
71 static int invert_exp_1 (rtx, rtx);
72 static int returnjump_p_1 (rtx *, void *);
74 /* Alternate entry into the jump optimizer. This entry point only rebuilds
75 the JUMP_LABEL field in jumping insns and REG_LABEL notes in non-jumping
76 instructions. */
77 void
78 rebuild_jump_labels (rtx f)
80 rtx insn;
82 timevar_push (TV_REBUILD_JUMP);
83 init_label_info (f);
84 mark_all_labels (f);
86 /* Keep track of labels used from static data; we don't track them
87 closely enough to delete them here, so make sure their reference
88 count doesn't drop to zero. */
90 for (insn = forced_labels; insn; insn = XEXP (insn, 1))
91 if (LABEL_P (XEXP (insn, 0)))
92 LABEL_NUSES (XEXP (insn, 0))++;
93 timevar_pop (TV_REBUILD_JUMP);
96 /* Some old code expects exactly one BARRIER as the NEXT_INSN of a
97 non-fallthru insn. This is not generally true, as multiple barriers
98 may have crept in, or the BARRIER may be separated from the last
99 real insn by one or more NOTEs.
101 This simple pass moves barriers and removes duplicates so that the
102 old code is happy.
104 unsigned int
105 cleanup_barriers (void)
107 rtx insn, next, prev;
108 for (insn = get_insns (); insn; insn = next)
110 next = NEXT_INSN (insn);
111 if (BARRIER_P (insn))
113 prev = prev_nonnote_insn (insn);
114 if (BARRIER_P (prev))
115 delete_insn (insn);
116 else if (prev != PREV_INSN (insn))
117 reorder_insns (insn, insn, prev);
120 return 0;
123 struct tree_opt_pass pass_cleanup_barriers =
125 "barriers", /* name */
126 NULL, /* gate */
127 cleanup_barriers, /* execute */
128 NULL, /* sub */
129 NULL, /* next */
130 0, /* static_pass_number */
131 0, /* tv_id */
132 0, /* properties_required */
133 0, /* properties_provided */
134 0, /* properties_destroyed */
135 0, /* todo_flags_start */
136 TODO_dump_func, /* todo_flags_finish */
137 0 /* letter */
141 /* Initialize LABEL_NUSES and JUMP_LABEL fields. Delete any REG_LABEL
142 notes whose labels don't occur in the insn any more. Returns the
143 largest INSN_UID found. */
144 static void
145 init_label_info (rtx f)
147 rtx insn;
149 for (insn = f; insn; insn = NEXT_INSN (insn))
150 if (LABEL_P (insn))
151 LABEL_NUSES (insn) = (LABEL_PRESERVE_P (insn) != 0);
152 else if (JUMP_P (insn))
153 JUMP_LABEL (insn) = 0;
154 else if (NONJUMP_INSN_P (insn) || CALL_P (insn))
156 rtx note, next;
158 for (note = REG_NOTES (insn); note; note = next)
160 next = XEXP (note, 1);
161 if (REG_NOTE_KIND (note) == REG_LABEL
162 && ! reg_mentioned_p (XEXP (note, 0), PATTERN (insn)))
163 remove_note (insn, note);
168 /* Mark the label each jump jumps to.
169 Combine consecutive labels, and count uses of labels. */
171 static void
172 mark_all_labels (rtx f)
174 rtx insn;
176 for (insn = f; insn; insn = NEXT_INSN (insn))
177 if (INSN_P (insn))
179 mark_jump_label (PATTERN (insn), insn, 0);
180 if (! INSN_DELETED_P (insn) && JUMP_P (insn))
182 /* When we know the LABEL_REF contained in a REG used in
183 an indirect jump, we'll have a REG_LABEL note so that
184 flow can tell where it's going. */
185 if (JUMP_LABEL (insn) == 0)
187 rtx label_note = find_reg_note (insn, REG_LABEL, NULL_RTX);
188 if (label_note)
190 /* But a LABEL_REF around the REG_LABEL note, so
191 that we can canonicalize it. */
192 rtx label_ref = gen_rtx_LABEL_REF (Pmode,
193 XEXP (label_note, 0));
195 mark_jump_label (label_ref, insn, 0);
196 XEXP (label_note, 0) = XEXP (label_ref, 0);
197 JUMP_LABEL (insn) = XEXP (label_note, 0);
203 /* If we are in cfglayout mode, there may be non-insns between the
204 basic blocks. If those non-insns represent tablejump data, they
205 contain label references that we must record. */
206 if (current_ir_type () == IR_RTL_CFGLAYOUT)
208 basic_block bb;
209 rtx insn;
210 FOR_EACH_BB (bb)
212 for (insn = bb->il.rtl->header; insn; insn = NEXT_INSN (insn))
213 if (INSN_P (insn))
215 gcc_assert (JUMP_TABLE_DATA_P (insn));
216 mark_jump_label (PATTERN (insn), insn, 0);
219 for (insn = bb->il.rtl->footer; insn; insn = NEXT_INSN (insn))
220 if (INSN_P (insn))
222 gcc_assert (JUMP_TABLE_DATA_P (insn));
223 mark_jump_label (PATTERN (insn), insn, 0);
229 /* Given a comparison (CODE ARG0 ARG1), inside an insn, INSN, return a code
230 of reversed comparison if it is possible to do so. Otherwise return UNKNOWN.
231 UNKNOWN may be returned in case we are having CC_MODE compare and we don't
232 know whether it's source is floating point or integer comparison. Machine
233 description should define REVERSIBLE_CC_MODE and REVERSE_CONDITION macros
234 to help this function avoid overhead in these cases. */
235 enum rtx_code
236 reversed_comparison_code_parts (enum rtx_code code, const_rtx arg0,
237 const_rtx arg1, const_rtx insn)
239 enum machine_mode mode;
241 /* If this is not actually a comparison, we can't reverse it. */
242 if (GET_RTX_CLASS (code) != RTX_COMPARE
243 && GET_RTX_CLASS (code) != RTX_COMM_COMPARE)
244 return UNKNOWN;
246 mode = GET_MODE (arg0);
247 if (mode == VOIDmode)
248 mode = GET_MODE (arg1);
250 /* First see if machine description supplies us way to reverse the
251 comparison. Give it priority over everything else to allow
252 machine description to do tricks. */
253 if (GET_MODE_CLASS (mode) == MODE_CC
254 && REVERSIBLE_CC_MODE (mode))
256 #ifdef REVERSE_CONDITION
257 return REVERSE_CONDITION (code, mode);
258 #endif
259 return reverse_condition (code);
262 /* Try a few special cases based on the comparison code. */
263 switch (code)
265 case GEU:
266 case GTU:
267 case LEU:
268 case LTU:
269 case NE:
270 case EQ:
271 /* It is always safe to reverse EQ and NE, even for the floating
272 point. Similarly the unsigned comparisons are never used for
273 floating point so we can reverse them in the default way. */
274 return reverse_condition (code);
275 case ORDERED:
276 case UNORDERED:
277 case LTGT:
278 case UNEQ:
279 /* In case we already see unordered comparison, we can be sure to
280 be dealing with floating point so we don't need any more tests. */
281 return reverse_condition_maybe_unordered (code);
282 case UNLT:
283 case UNLE:
284 case UNGT:
285 case UNGE:
286 /* We don't have safe way to reverse these yet. */
287 return UNKNOWN;
288 default:
289 break;
292 if (GET_MODE_CLASS (mode) == MODE_CC || CC0_P (arg0))
294 const_rtx prev;
295 /* Try to search for the comparison to determine the real mode.
296 This code is expensive, but with sane machine description it
297 will be never used, since REVERSIBLE_CC_MODE will return true
298 in all cases. */
299 if (! insn)
300 return UNKNOWN;
302 /* These CONST_CAST's are okay because prev_nonnote_insn just
303 returns it's argument and we assign it to a const_rtx
304 variable. */
305 for (prev = prev_nonnote_insn (CONST_CAST_RTX(insn));
306 prev != 0 && !LABEL_P (prev);
307 prev = prev_nonnote_insn (CONST_CAST_RTX(prev)))
309 const_rtx set = set_of (arg0, prev);
310 if (set && GET_CODE (set) == SET
311 && rtx_equal_p (SET_DEST (set), arg0))
313 rtx src = SET_SRC (set);
315 if (GET_CODE (src) == COMPARE)
317 rtx comparison = src;
318 arg0 = XEXP (src, 0);
319 mode = GET_MODE (arg0);
320 if (mode == VOIDmode)
321 mode = GET_MODE (XEXP (comparison, 1));
322 break;
324 /* We can get past reg-reg moves. This may be useful for model
325 of i387 comparisons that first move flag registers around. */
326 if (REG_P (src))
328 arg0 = src;
329 continue;
332 /* If register is clobbered in some ununderstandable way,
333 give up. */
334 if (set)
335 return UNKNOWN;
339 /* Test for an integer condition, or a floating-point comparison
340 in which NaNs can be ignored. */
341 if (GET_CODE (arg0) == CONST_INT
342 || (GET_MODE (arg0) != VOIDmode
343 && GET_MODE_CLASS (mode) != MODE_CC
344 && !HONOR_NANS (mode)))
345 return reverse_condition (code);
347 return UNKNOWN;
350 /* A wrapper around the previous function to take COMPARISON as rtx
351 expression. This simplifies many callers. */
352 enum rtx_code
353 reversed_comparison_code (const_rtx comparison, const_rtx insn)
355 if (!COMPARISON_P (comparison))
356 return UNKNOWN;
357 return reversed_comparison_code_parts (GET_CODE (comparison),
358 XEXP (comparison, 0),
359 XEXP (comparison, 1), insn);
362 /* Return comparison with reversed code of EXP.
363 Return NULL_RTX in case we fail to do the reversal. */
365 reversed_comparison (const_rtx exp, enum machine_mode mode)
367 enum rtx_code reversed_code = reversed_comparison_code (exp, NULL_RTX);
368 if (reversed_code == UNKNOWN)
369 return NULL_RTX;
370 else
371 return simplify_gen_relational (reversed_code, mode, VOIDmode,
372 XEXP (exp, 0), XEXP (exp, 1));
376 /* Given an rtx-code for a comparison, return the code for the negated
377 comparison. If no such code exists, return UNKNOWN.
379 WATCH OUT! reverse_condition is not safe to use on a jump that might
380 be acting on the results of an IEEE floating point comparison, because
381 of the special treatment of non-signaling nans in comparisons.
382 Use reversed_comparison_code instead. */
384 enum rtx_code
385 reverse_condition (enum rtx_code code)
387 switch (code)
389 case EQ:
390 return NE;
391 case NE:
392 return EQ;
393 case GT:
394 return LE;
395 case GE:
396 return LT;
397 case LT:
398 return GE;
399 case LE:
400 return GT;
401 case GTU:
402 return LEU;
403 case GEU:
404 return LTU;
405 case LTU:
406 return GEU;
407 case LEU:
408 return GTU;
409 case UNORDERED:
410 return ORDERED;
411 case ORDERED:
412 return UNORDERED;
414 case UNLT:
415 case UNLE:
416 case UNGT:
417 case UNGE:
418 case UNEQ:
419 case LTGT:
420 return UNKNOWN;
422 default:
423 gcc_unreachable ();
427 /* Similar, but we're allowed to generate unordered comparisons, which
428 makes it safe for IEEE floating-point. Of course, we have to recognize
429 that the target will support them too... */
431 enum rtx_code
432 reverse_condition_maybe_unordered (enum rtx_code code)
434 switch (code)
436 case EQ:
437 return NE;
438 case NE:
439 return EQ;
440 case GT:
441 return UNLE;
442 case GE:
443 return UNLT;
444 case LT:
445 return UNGE;
446 case LE:
447 return UNGT;
448 case LTGT:
449 return UNEQ;
450 case UNORDERED:
451 return ORDERED;
452 case ORDERED:
453 return UNORDERED;
454 case UNLT:
455 return GE;
456 case UNLE:
457 return GT;
458 case UNGT:
459 return LE;
460 case UNGE:
461 return LT;
462 case UNEQ:
463 return LTGT;
465 default:
466 gcc_unreachable ();
470 /* Similar, but return the code when two operands of a comparison are swapped.
471 This IS safe for IEEE floating-point. */
473 enum rtx_code
474 swap_condition (enum rtx_code code)
476 switch (code)
478 case EQ:
479 case NE:
480 case UNORDERED:
481 case ORDERED:
482 case UNEQ:
483 case LTGT:
484 return code;
486 case GT:
487 return LT;
488 case GE:
489 return LE;
490 case LT:
491 return GT;
492 case LE:
493 return GE;
494 case GTU:
495 return LTU;
496 case GEU:
497 return LEU;
498 case LTU:
499 return GTU;
500 case LEU:
501 return GEU;
502 case UNLT:
503 return UNGT;
504 case UNLE:
505 return UNGE;
506 case UNGT:
507 return UNLT;
508 case UNGE:
509 return UNLE;
511 default:
512 gcc_unreachable ();
516 /* Given a comparison CODE, return the corresponding unsigned comparison.
517 If CODE is an equality comparison or already an unsigned comparison,
518 CODE is returned. */
520 enum rtx_code
521 unsigned_condition (enum rtx_code code)
523 switch (code)
525 case EQ:
526 case NE:
527 case GTU:
528 case GEU:
529 case LTU:
530 case LEU:
531 return code;
533 case GT:
534 return GTU;
535 case GE:
536 return GEU;
537 case LT:
538 return LTU;
539 case LE:
540 return LEU;
542 default:
543 gcc_unreachable ();
547 /* Similarly, return the signed version of a comparison. */
549 enum rtx_code
550 signed_condition (enum rtx_code code)
552 switch (code)
554 case EQ:
555 case NE:
556 case GT:
557 case GE:
558 case LT:
559 case LE:
560 return code;
562 case GTU:
563 return GT;
564 case GEU:
565 return GE;
566 case LTU:
567 return LT;
568 case LEU:
569 return LE;
571 default:
572 gcc_unreachable ();
576 /* Return nonzero if CODE1 is more strict than CODE2, i.e., if the
577 truth of CODE1 implies the truth of CODE2. */
580 comparison_dominates_p (enum rtx_code code1, enum rtx_code code2)
582 /* UNKNOWN comparison codes can happen as a result of trying to revert
583 comparison codes.
584 They can't match anything, so we have to reject them here. */
585 if (code1 == UNKNOWN || code2 == UNKNOWN)
586 return 0;
588 if (code1 == code2)
589 return 1;
591 switch (code1)
593 case UNEQ:
594 if (code2 == UNLE || code2 == UNGE)
595 return 1;
596 break;
598 case EQ:
599 if (code2 == LE || code2 == LEU || code2 == GE || code2 == GEU
600 || code2 == ORDERED)
601 return 1;
602 break;
604 case UNLT:
605 if (code2 == UNLE || code2 == NE)
606 return 1;
607 break;
609 case LT:
610 if (code2 == LE || code2 == NE || code2 == ORDERED || code2 == LTGT)
611 return 1;
612 break;
614 case UNGT:
615 if (code2 == UNGE || code2 == NE)
616 return 1;
617 break;
619 case GT:
620 if (code2 == GE || code2 == NE || code2 == ORDERED || code2 == LTGT)
621 return 1;
622 break;
624 case GE:
625 case LE:
626 if (code2 == ORDERED)
627 return 1;
628 break;
630 case LTGT:
631 if (code2 == NE || code2 == ORDERED)
632 return 1;
633 break;
635 case LTU:
636 if (code2 == LEU || code2 == NE)
637 return 1;
638 break;
640 case GTU:
641 if (code2 == GEU || code2 == NE)
642 return 1;
643 break;
645 case UNORDERED:
646 if (code2 == NE || code2 == UNEQ || code2 == UNLE || code2 == UNLT
647 || code2 == UNGE || code2 == UNGT)
648 return 1;
649 break;
651 default:
652 break;
655 return 0;
658 /* Return 1 if INSN is an unconditional jump and nothing else. */
661 simplejump_p (const_rtx insn)
663 return (JUMP_P (insn)
664 && GET_CODE (PATTERN (insn)) == SET
665 && GET_CODE (SET_DEST (PATTERN (insn))) == PC
666 && GET_CODE (SET_SRC (PATTERN (insn))) == LABEL_REF);
669 /* Return nonzero if INSN is a (possibly) conditional jump
670 and nothing more.
672 Use of this function is deprecated, since we need to support combined
673 branch and compare insns. Use any_condjump_p instead whenever possible. */
676 condjump_p (const_rtx insn)
678 const_rtx x = PATTERN (insn);
680 if (GET_CODE (x) != SET
681 || GET_CODE (SET_DEST (x)) != PC)
682 return 0;
684 x = SET_SRC (x);
685 if (GET_CODE (x) == LABEL_REF)
686 return 1;
687 else
688 return (GET_CODE (x) == IF_THEN_ELSE
689 && ((GET_CODE (XEXP (x, 2)) == PC
690 && (GET_CODE (XEXP (x, 1)) == LABEL_REF
691 || GET_CODE (XEXP (x, 1)) == RETURN))
692 || (GET_CODE (XEXP (x, 1)) == PC
693 && (GET_CODE (XEXP (x, 2)) == LABEL_REF
694 || GET_CODE (XEXP (x, 2)) == RETURN))));
697 /* Return nonzero if INSN is a (possibly) conditional jump inside a
698 PARALLEL.
700 Use this function is deprecated, since we need to support combined
701 branch and compare insns. Use any_condjump_p instead whenever possible. */
704 condjump_in_parallel_p (const_rtx insn)
706 const_rtx x = PATTERN (insn);
708 if (GET_CODE (x) != PARALLEL)
709 return 0;
710 else
711 x = XVECEXP (x, 0, 0);
713 if (GET_CODE (x) != SET)
714 return 0;
715 if (GET_CODE (SET_DEST (x)) != PC)
716 return 0;
717 if (GET_CODE (SET_SRC (x)) == LABEL_REF)
718 return 1;
719 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
720 return 0;
721 if (XEXP (SET_SRC (x), 2) == pc_rtx
722 && (GET_CODE (XEXP (SET_SRC (x), 1)) == LABEL_REF
723 || GET_CODE (XEXP (SET_SRC (x), 1)) == RETURN))
724 return 1;
725 if (XEXP (SET_SRC (x), 1) == pc_rtx
726 && (GET_CODE (XEXP (SET_SRC (x), 2)) == LABEL_REF
727 || GET_CODE (XEXP (SET_SRC (x), 2)) == RETURN))
728 return 1;
729 return 0;
732 /* Return set of PC, otherwise NULL. */
735 pc_set (const_rtx insn)
737 rtx pat;
738 if (!JUMP_P (insn))
739 return NULL_RTX;
740 pat = PATTERN (insn);
742 /* The set is allowed to appear either as the insn pattern or
743 the first set in a PARALLEL. */
744 if (GET_CODE (pat) == PARALLEL)
745 pat = XVECEXP (pat, 0, 0);
746 if (GET_CODE (pat) == SET && GET_CODE (SET_DEST (pat)) == PC)
747 return pat;
749 return NULL_RTX;
752 /* Return true when insn is an unconditional direct jump,
753 possibly bundled inside a PARALLEL. */
756 any_uncondjump_p (const_rtx insn)
758 const_rtx x = pc_set (insn);
759 if (!x)
760 return 0;
761 if (GET_CODE (SET_SRC (x)) != LABEL_REF)
762 return 0;
763 if (find_reg_note (insn, REG_NON_LOCAL_GOTO, NULL_RTX))
764 return 0;
765 return 1;
768 /* Return true when insn is a conditional jump. This function works for
769 instructions containing PC sets in PARALLELs. The instruction may have
770 various other effects so before removing the jump you must verify
771 onlyjump_p.
773 Note that unlike condjump_p it returns false for unconditional jumps. */
776 any_condjump_p (const_rtx insn)
778 const_rtx x = pc_set (insn);
779 enum rtx_code a, b;
781 if (!x)
782 return 0;
783 if (GET_CODE (SET_SRC (x)) != IF_THEN_ELSE)
784 return 0;
786 a = GET_CODE (XEXP (SET_SRC (x), 1));
787 b = GET_CODE (XEXP (SET_SRC (x), 2));
789 return ((b == PC && (a == LABEL_REF || a == RETURN))
790 || (a == PC && (b == LABEL_REF || b == RETURN)));
793 /* Return the label of a conditional jump. */
796 condjump_label (const_rtx insn)
798 rtx x = pc_set (insn);
800 if (!x)
801 return NULL_RTX;
802 x = SET_SRC (x);
803 if (GET_CODE (x) == LABEL_REF)
804 return x;
805 if (GET_CODE (x) != IF_THEN_ELSE)
806 return NULL_RTX;
807 if (XEXP (x, 2) == pc_rtx && GET_CODE (XEXP (x, 1)) == LABEL_REF)
808 return XEXP (x, 1);
809 if (XEXP (x, 1) == pc_rtx && GET_CODE (XEXP (x, 2)) == LABEL_REF)
810 return XEXP (x, 2);
811 return NULL_RTX;
814 /* Return true if INSN is a (possibly conditional) return insn. */
816 static int
817 returnjump_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
819 rtx x = *loc;
821 return x && (GET_CODE (x) == RETURN
822 || (GET_CODE (x) == SET && SET_IS_RETURN_P (x)));
826 returnjump_p (rtx insn)
828 if (!JUMP_P (insn))
829 return 0;
830 return for_each_rtx (&PATTERN (insn), returnjump_p_1, NULL);
833 /* Return true if INSN is a jump that only transfers control and
834 nothing more. */
837 onlyjump_p (const_rtx insn)
839 rtx set;
841 if (!JUMP_P (insn))
842 return 0;
844 set = single_set (insn);
845 if (set == NULL)
846 return 0;
847 if (GET_CODE (SET_DEST (set)) != PC)
848 return 0;
849 if (side_effects_p (SET_SRC (set)))
850 return 0;
852 return 1;
855 #ifdef HAVE_cc0
857 /* Return nonzero if X is an RTX that only sets the condition codes
858 and has no side effects. */
861 only_sets_cc0_p (const_rtx x)
863 if (! x)
864 return 0;
866 if (INSN_P (x))
867 x = PATTERN (x);
869 return sets_cc0_p (x) == 1 && ! side_effects_p (x);
872 /* Return 1 if X is an RTX that does nothing but set the condition codes
873 and CLOBBER or USE registers.
874 Return -1 if X does explicitly set the condition codes,
875 but also does other things. */
878 sets_cc0_p (const_rtx x)
880 if (! x)
881 return 0;
883 if (INSN_P (x))
884 x = PATTERN (x);
886 if (GET_CODE (x) == SET && SET_DEST (x) == cc0_rtx)
887 return 1;
888 if (GET_CODE (x) == PARALLEL)
890 int i;
891 int sets_cc0 = 0;
892 int other_things = 0;
893 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
895 if (GET_CODE (XVECEXP (x, 0, i)) == SET
896 && SET_DEST (XVECEXP (x, 0, i)) == cc0_rtx)
897 sets_cc0 = 1;
898 else if (GET_CODE (XVECEXP (x, 0, i)) == SET)
899 other_things = 1;
901 return ! sets_cc0 ? 0 : other_things ? -1 : 1;
903 return 0;
905 #endif
907 /* Find all CODE_LABELs referred to in X, and increment their use counts.
908 If INSN is a JUMP_INSN and there is at least one CODE_LABEL referenced
909 in INSN, then store one of them in JUMP_LABEL (INSN).
910 If INSN is an INSN or a CALL_INSN and there is at least one CODE_LABEL
911 referenced in INSN, add a REG_LABEL note containing that label to INSN.
912 Also, when there are consecutive labels, canonicalize on the last of them.
914 Note that two labels separated by a loop-beginning note
915 must be kept distinct if we have not yet done loop-optimization,
916 because the gap between them is where loop-optimize
917 will want to move invariant code to. CROSS_JUMP tells us
918 that loop-optimization is done with. */
920 void
921 mark_jump_label (rtx x, rtx insn, int in_mem)
923 RTX_CODE code = GET_CODE (x);
924 int i;
925 const char *fmt;
927 switch (code)
929 case PC:
930 case CC0:
931 case REG:
932 case CONST_INT:
933 case CONST_DOUBLE:
934 case CLOBBER:
935 case CALL:
936 return;
938 case MEM:
939 in_mem = 1;
940 break;
942 case SEQUENCE:
943 for (i = 0; i < XVECLEN (x, 0); i++)
944 mark_jump_label (PATTERN (XVECEXP (x, 0, i)),
945 XVECEXP (x, 0, i), 0);
946 return;
948 case SYMBOL_REF:
949 if (!in_mem)
950 return;
952 /* If this is a constant-pool reference, see if it is a label. */
953 if (CONSTANT_POOL_ADDRESS_P (x))
954 mark_jump_label (get_pool_constant (x), insn, in_mem);
955 break;
957 case LABEL_REF:
959 rtx label = XEXP (x, 0);
961 /* Ignore remaining references to unreachable labels that
962 have been deleted. */
963 if (NOTE_P (label)
964 && NOTE_KIND (label) == NOTE_INSN_DELETED_LABEL)
965 break;
967 gcc_assert (LABEL_P (label));
969 /* Ignore references to labels of containing functions. */
970 if (LABEL_REF_NONLOCAL_P (x))
971 break;
973 XEXP (x, 0) = label;
974 if (! insn || ! INSN_DELETED_P (insn))
975 ++LABEL_NUSES (label);
977 if (insn)
979 if (JUMP_P (insn))
980 JUMP_LABEL (insn) = label;
981 else
983 /* Add a REG_LABEL note for LABEL unless there already
984 is one. All uses of a label, except for labels
985 that are the targets of jumps, must have a
986 REG_LABEL note. */
987 if (! find_reg_note (insn, REG_LABEL, label))
988 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_LABEL, label,
989 REG_NOTES (insn));
992 return;
995 /* Do walk the labels in a vector, but not the first operand of an
996 ADDR_DIFF_VEC. Don't set the JUMP_LABEL of a vector. */
997 case ADDR_VEC:
998 case ADDR_DIFF_VEC:
999 if (! INSN_DELETED_P (insn))
1001 int eltnum = code == ADDR_DIFF_VEC ? 1 : 0;
1003 for (i = 0; i < XVECLEN (x, eltnum); i++)
1004 mark_jump_label (XVECEXP (x, eltnum, i), NULL_RTX, in_mem);
1006 return;
1008 default:
1009 break;
1012 fmt = GET_RTX_FORMAT (code);
1013 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1015 if (fmt[i] == 'e')
1016 mark_jump_label (XEXP (x, i), insn, in_mem);
1017 else if (fmt[i] == 'E')
1019 int j;
1020 for (j = 0; j < XVECLEN (x, i); j++)
1021 mark_jump_label (XVECEXP (x, i, j), insn, in_mem);
1027 /* Delete insn INSN from the chain of insns and update label ref counts
1028 and delete insns now unreachable.
1030 Returns the first insn after INSN that was not deleted.
1032 Usage of this instruction is deprecated. Use delete_insn instead and
1033 subsequent cfg_cleanup pass to delete unreachable code if needed. */
1036 delete_related_insns (rtx insn)
1038 int was_code_label = (LABEL_P (insn));
1039 rtx note;
1040 rtx next = NEXT_INSN (insn), prev = PREV_INSN (insn);
1042 while (next && INSN_DELETED_P (next))
1043 next = NEXT_INSN (next);
1045 /* This insn is already deleted => return first following nondeleted. */
1046 if (INSN_DELETED_P (insn))
1047 return next;
1049 delete_insn (insn);
1051 /* If instruction is followed by a barrier,
1052 delete the barrier too. */
1054 if (next != 0 && BARRIER_P (next))
1055 delete_insn (next);
1057 /* If deleting a jump, decrement the count of the label,
1058 and delete the label if it is now unused. */
1060 if (JUMP_P (insn) && JUMP_LABEL (insn))
1062 rtx lab = JUMP_LABEL (insn), lab_next;
1064 if (LABEL_NUSES (lab) == 0)
1066 /* This can delete NEXT or PREV,
1067 either directly if NEXT is JUMP_LABEL (INSN),
1068 or indirectly through more levels of jumps. */
1069 delete_related_insns (lab);
1071 /* I feel a little doubtful about this loop,
1072 but I see no clean and sure alternative way
1073 to find the first insn after INSN that is not now deleted.
1074 I hope this works. */
1075 while (next && INSN_DELETED_P (next))
1076 next = NEXT_INSN (next);
1077 return next;
1079 else if (tablejump_p (insn, NULL, &lab_next))
1081 /* If we're deleting the tablejump, delete the dispatch table.
1082 We may not be able to kill the label immediately preceding
1083 just yet, as it might be referenced in code leading up to
1084 the tablejump. */
1085 delete_related_insns (lab_next);
1089 /* Likewise if we're deleting a dispatch table. */
1091 if (JUMP_P (insn)
1092 && (GET_CODE (PATTERN (insn)) == ADDR_VEC
1093 || GET_CODE (PATTERN (insn)) == ADDR_DIFF_VEC))
1095 rtx pat = PATTERN (insn);
1096 int i, diff_vec_p = GET_CODE (pat) == ADDR_DIFF_VEC;
1097 int len = XVECLEN (pat, diff_vec_p);
1099 for (i = 0; i < len; i++)
1100 if (LABEL_NUSES (XEXP (XVECEXP (pat, diff_vec_p, i), 0)) == 0)
1101 delete_related_insns (XEXP (XVECEXP (pat, diff_vec_p, i), 0));
1102 while (next && INSN_DELETED_P (next))
1103 next = NEXT_INSN (next);
1104 return next;
1107 /* Likewise for an ordinary INSN / CALL_INSN with a REG_LABEL note. */
1108 if (NONJUMP_INSN_P (insn) || CALL_P (insn))
1109 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1110 if (REG_NOTE_KIND (note) == REG_LABEL
1111 /* This could also be a NOTE_INSN_DELETED_LABEL note. */
1112 && LABEL_P (XEXP (note, 0)))
1113 if (LABEL_NUSES (XEXP (note, 0)) == 0)
1114 delete_related_insns (XEXP (note, 0));
1116 while (prev && (INSN_DELETED_P (prev) || NOTE_P (prev)))
1117 prev = PREV_INSN (prev);
1119 /* If INSN was a label and a dispatch table follows it,
1120 delete the dispatch table. The tablejump must have gone already.
1121 It isn't useful to fall through into a table. */
1123 if (was_code_label
1124 && NEXT_INSN (insn) != 0
1125 && JUMP_P (NEXT_INSN (insn))
1126 && (GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_VEC
1127 || GET_CODE (PATTERN (NEXT_INSN (insn))) == ADDR_DIFF_VEC))
1128 next = delete_related_insns (NEXT_INSN (insn));
1130 /* If INSN was a label, delete insns following it if now unreachable. */
1132 if (was_code_label && prev && BARRIER_P (prev))
1134 enum rtx_code code;
1135 while (next)
1137 code = GET_CODE (next);
1138 if (code == NOTE)
1139 next = NEXT_INSN (next);
1140 /* Keep going past other deleted labels to delete what follows. */
1141 else if (code == CODE_LABEL && INSN_DELETED_P (next))
1142 next = NEXT_INSN (next);
1143 else if (code == BARRIER || INSN_P (next))
1144 /* Note: if this deletes a jump, it can cause more
1145 deletion of unreachable code, after a different label.
1146 As long as the value from this recursive call is correct,
1147 this invocation functions correctly. */
1148 next = delete_related_insns (next);
1149 else
1150 break;
1154 return next;
1157 /* Delete a range of insns from FROM to TO, inclusive.
1158 This is for the sake of peephole optimization, so assume
1159 that whatever these insns do will still be done by a new
1160 peephole insn that will replace them. */
1162 void
1163 delete_for_peephole (rtx from, rtx to)
1165 rtx insn = from;
1167 while (1)
1169 rtx next = NEXT_INSN (insn);
1170 rtx prev = PREV_INSN (insn);
1172 if (!NOTE_P (insn))
1174 INSN_DELETED_P (insn) = 1;
1176 /* Patch this insn out of the chain. */
1177 /* We don't do this all at once, because we
1178 must preserve all NOTEs. */
1179 if (prev)
1180 NEXT_INSN (prev) = next;
1182 if (next)
1183 PREV_INSN (next) = prev;
1186 if (insn == to)
1187 break;
1188 insn = next;
1191 /* Note that if TO is an unconditional jump
1192 we *do not* delete the BARRIER that follows,
1193 since the peephole that replaces this sequence
1194 is also an unconditional jump in that case. */
1197 /* Throughout LOC, redirect OLABEL to NLABEL. Treat null OLABEL or
1198 NLABEL as a return. Accrue modifications into the change group. */
1200 static void
1201 redirect_exp_1 (rtx *loc, rtx olabel, rtx nlabel, rtx insn)
1203 rtx x = *loc;
1204 RTX_CODE code = GET_CODE (x);
1205 int i;
1206 const char *fmt;
1208 if (code == LABEL_REF)
1210 if (XEXP (x, 0) == olabel)
1212 rtx n;
1213 if (nlabel)
1214 n = gen_rtx_LABEL_REF (Pmode, nlabel);
1215 else
1216 n = gen_rtx_RETURN (VOIDmode);
1218 validate_change (insn, loc, n, 1);
1219 return;
1222 else if (code == RETURN && olabel == 0)
1224 if (nlabel)
1225 x = gen_rtx_LABEL_REF (Pmode, nlabel);
1226 else
1227 x = gen_rtx_RETURN (VOIDmode);
1228 if (loc == &PATTERN (insn))
1229 x = gen_rtx_SET (VOIDmode, pc_rtx, x);
1230 validate_change (insn, loc, x, 1);
1231 return;
1234 if (code == SET && nlabel == 0 && SET_DEST (x) == pc_rtx
1235 && GET_CODE (SET_SRC (x)) == LABEL_REF
1236 && XEXP (SET_SRC (x), 0) == olabel)
1238 validate_change (insn, loc, gen_rtx_RETURN (VOIDmode), 1);
1239 return;
1242 fmt = GET_RTX_FORMAT (code);
1243 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1245 if (fmt[i] == 'e')
1246 redirect_exp_1 (&XEXP (x, i), olabel, nlabel, insn);
1247 else if (fmt[i] == 'E')
1249 int j;
1250 for (j = 0; j < XVECLEN (x, i); j++)
1251 redirect_exp_1 (&XVECEXP (x, i, j), olabel, nlabel, insn);
1256 /* Make JUMP go to NLABEL instead of where it jumps now. Accrue
1257 the modifications into the change group. Return false if we did
1258 not see how to do that. */
1261 redirect_jump_1 (rtx jump, rtx nlabel)
1263 int ochanges = num_validated_changes ();
1264 rtx *loc;
1266 if (GET_CODE (PATTERN (jump)) == PARALLEL)
1267 loc = &XVECEXP (PATTERN (jump), 0, 0);
1268 else
1269 loc = &PATTERN (jump);
1271 redirect_exp_1 (loc, JUMP_LABEL (jump), nlabel, jump);
1272 return num_validated_changes () > ochanges;
1275 /* Make JUMP go to NLABEL instead of where it jumps now. If the old
1276 jump target label is unused as a result, it and the code following
1277 it may be deleted.
1279 If NLABEL is zero, we are to turn the jump into a (possibly conditional)
1280 RETURN insn.
1282 The return value will be 1 if the change was made, 0 if it wasn't
1283 (this can only occur for NLABEL == 0). */
1286 redirect_jump (rtx jump, rtx nlabel, int delete_unused)
1288 rtx olabel = JUMP_LABEL (jump);
1290 if (nlabel == olabel)
1291 return 1;
1293 if (! redirect_jump_1 (jump, nlabel) || ! apply_change_group ())
1294 return 0;
1296 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 0);
1297 return 1;
1300 /* Fix up JUMP_LABEL and label ref counts after OLABEL has been replaced with
1301 NLABEL in JUMP.
1302 If DELETE_UNUSED is positive, delete related insn to OLABEL if its ref
1303 count has dropped to zero. */
1304 void
1305 redirect_jump_2 (rtx jump, rtx olabel, rtx nlabel, int delete_unused,
1306 int invert)
1308 rtx note;
1310 /* Negative DELETE_UNUSED used to be used to signalize behavior on
1311 moving FUNCTION_END note. Just sanity check that no user still worry
1312 about this. */
1313 gcc_assert (delete_unused >= 0);
1314 JUMP_LABEL (jump) = nlabel;
1315 if (nlabel)
1316 ++LABEL_NUSES (nlabel);
1318 /* Update labels in any REG_EQUAL note. */
1319 if ((note = find_reg_note (jump, REG_EQUAL, NULL_RTX)) != NULL_RTX)
1321 if (!nlabel || (invert && !invert_exp_1 (XEXP (note, 0), jump)))
1322 remove_note (jump, note);
1323 else
1325 redirect_exp_1 (&XEXP (note, 0), olabel, nlabel, jump);
1326 confirm_change_group ();
1330 if (olabel && --LABEL_NUSES (olabel) == 0 && delete_unused > 0
1331 /* Undefined labels will remain outside the insn stream. */
1332 && INSN_UID (olabel))
1333 delete_related_insns (olabel);
1334 if (invert)
1335 invert_br_probabilities (jump);
1338 /* Invert the jump condition X contained in jump insn INSN. Accrue the
1339 modifications into the change group. Return nonzero for success. */
1340 static int
1341 invert_exp_1 (rtx x, rtx insn)
1343 RTX_CODE code = GET_CODE (x);
1345 if (code == IF_THEN_ELSE)
1347 rtx comp = XEXP (x, 0);
1348 rtx tem;
1349 enum rtx_code reversed_code;
1351 /* We can do this in two ways: The preferable way, which can only
1352 be done if this is not an integer comparison, is to reverse
1353 the comparison code. Otherwise, swap the THEN-part and ELSE-part
1354 of the IF_THEN_ELSE. If we can't do either, fail. */
1356 reversed_code = reversed_comparison_code (comp, insn);
1358 if (reversed_code != UNKNOWN)
1360 validate_change (insn, &XEXP (x, 0),
1361 gen_rtx_fmt_ee (reversed_code,
1362 GET_MODE (comp), XEXP (comp, 0),
1363 XEXP (comp, 1)),
1365 return 1;
1368 tem = XEXP (x, 1);
1369 validate_change (insn, &XEXP (x, 1), XEXP (x, 2), 1);
1370 validate_change (insn, &XEXP (x, 2), tem, 1);
1371 return 1;
1373 else
1374 return 0;
1377 /* Invert the condition of the jump JUMP, and make it jump to label
1378 NLABEL instead of where it jumps now. Accrue changes into the
1379 change group. Return false if we didn't see how to perform the
1380 inversion and redirection. */
1383 invert_jump_1 (rtx jump, rtx nlabel)
1385 rtx x = pc_set (jump);
1386 int ochanges;
1387 int ok;
1389 ochanges = num_validated_changes ();
1390 gcc_assert (x);
1391 ok = invert_exp_1 (SET_SRC (x), jump);
1392 gcc_assert (ok);
1394 if (num_validated_changes () == ochanges)
1395 return 0;
1397 /* redirect_jump_1 will fail of nlabel == olabel, and the current use is
1398 in Pmode, so checking this is not merely an optimization. */
1399 return nlabel == JUMP_LABEL (jump) || redirect_jump_1 (jump, nlabel);
1402 /* Invert the condition of the jump JUMP, and make it jump to label
1403 NLABEL instead of where it jumps now. Return true if successful. */
1406 invert_jump (rtx jump, rtx nlabel, int delete_unused)
1408 rtx olabel = JUMP_LABEL (jump);
1410 if (invert_jump_1 (jump, nlabel) && apply_change_group ())
1412 redirect_jump_2 (jump, olabel, nlabel, delete_unused, 1);
1413 return 1;
1415 cancel_changes (0);
1416 return 0;
1420 /* Like rtx_equal_p except that it considers two REGs as equal
1421 if they renumber to the same value and considers two commutative
1422 operations to be the same if the order of the operands has been
1423 reversed. */
1426 rtx_renumbered_equal_p (const_rtx x, const_rtx y)
1428 int i;
1429 const enum rtx_code code = GET_CODE (x);
1430 const char *fmt;
1432 if (x == y)
1433 return 1;
1435 if ((code == REG || (code == SUBREG && REG_P (SUBREG_REG (x))))
1436 && (REG_P (y) || (GET_CODE (y) == SUBREG
1437 && REG_P (SUBREG_REG (y)))))
1439 int reg_x = -1, reg_y = -1;
1440 int byte_x = 0, byte_y = 0;
1442 if (GET_MODE (x) != GET_MODE (y))
1443 return 0;
1445 /* If we haven't done any renumbering, don't
1446 make any assumptions. */
1447 if (reg_renumber == 0)
1448 return rtx_equal_p (x, y);
1450 if (code == SUBREG)
1452 reg_x = REGNO (SUBREG_REG (x));
1453 byte_x = SUBREG_BYTE (x);
1455 if (reg_renumber[reg_x] >= 0)
1457 reg_x = subreg_regno_offset (reg_renumber[reg_x],
1458 GET_MODE (SUBREG_REG (x)),
1459 byte_x,
1460 GET_MODE (x));
1461 byte_x = 0;
1464 else
1466 reg_x = REGNO (x);
1467 if (reg_renumber[reg_x] >= 0)
1468 reg_x = reg_renumber[reg_x];
1471 if (GET_CODE (y) == SUBREG)
1473 reg_y = REGNO (SUBREG_REG (y));
1474 byte_y = SUBREG_BYTE (y);
1476 if (reg_renumber[reg_y] >= 0)
1478 reg_y = subreg_regno_offset (reg_renumber[reg_y],
1479 GET_MODE (SUBREG_REG (y)),
1480 byte_y,
1481 GET_MODE (y));
1482 byte_y = 0;
1485 else
1487 reg_y = REGNO (y);
1488 if (reg_renumber[reg_y] >= 0)
1489 reg_y = reg_renumber[reg_y];
1492 return reg_x >= 0 && reg_x == reg_y && byte_x == byte_y;
1495 /* Now we have disposed of all the cases
1496 in which different rtx codes can match. */
1497 if (code != GET_CODE (y))
1498 return 0;
1500 switch (code)
1502 case PC:
1503 case CC0:
1504 case ADDR_VEC:
1505 case ADDR_DIFF_VEC:
1506 case CONST_INT:
1507 case CONST_DOUBLE:
1508 return 0;
1510 case LABEL_REF:
1511 /* We can't assume nonlocal labels have their following insns yet. */
1512 if (LABEL_REF_NONLOCAL_P (x) || LABEL_REF_NONLOCAL_P (y))
1513 return XEXP (x, 0) == XEXP (y, 0);
1515 /* Two label-refs are equivalent if they point at labels
1516 in the same position in the instruction stream. */
1517 return (next_real_insn (XEXP (x, 0))
1518 == next_real_insn (XEXP (y, 0)));
1520 case SYMBOL_REF:
1521 return XSTR (x, 0) == XSTR (y, 0);
1523 case CODE_LABEL:
1524 /* If we didn't match EQ equality above, they aren't the same. */
1525 return 0;
1527 default:
1528 break;
1531 /* (MULT:SI x y) and (MULT:HI x y) are NOT equivalent. */
1533 if (GET_MODE (x) != GET_MODE (y))
1534 return 0;
1536 /* For commutative operations, the RTX match if the operand match in any
1537 order. Also handle the simple binary and unary cases without a loop. */
1538 if (targetm.commutative_p (x, UNKNOWN))
1539 return ((rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1540 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)))
1541 || (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 1))
1542 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 0))));
1543 else if (NON_COMMUTATIVE_P (x))
1544 return (rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0))
1545 && rtx_renumbered_equal_p (XEXP (x, 1), XEXP (y, 1)));
1546 else if (UNARY_P (x))
1547 return rtx_renumbered_equal_p (XEXP (x, 0), XEXP (y, 0));
1549 /* Compare the elements. If any pair of corresponding elements
1550 fail to match, return 0 for the whole things. */
1552 fmt = GET_RTX_FORMAT (code);
1553 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1555 int j;
1556 switch (fmt[i])
1558 case 'w':
1559 if (XWINT (x, i) != XWINT (y, i))
1560 return 0;
1561 break;
1563 case 'i':
1564 if (XINT (x, i) != XINT (y, i))
1565 return 0;
1566 break;
1568 case 't':
1569 if (XTREE (x, i) != XTREE (y, i))
1570 return 0;
1571 break;
1573 case 's':
1574 if (strcmp (XSTR (x, i), XSTR (y, i)))
1575 return 0;
1576 break;
1578 case 'e':
1579 if (! rtx_renumbered_equal_p (XEXP (x, i), XEXP (y, i)))
1580 return 0;
1581 break;
1583 case 'u':
1584 if (XEXP (x, i) != XEXP (y, i))
1585 return 0;
1586 /* Fall through. */
1587 case '0':
1588 break;
1590 case 'E':
1591 if (XVECLEN (x, i) != XVECLEN (y, i))
1592 return 0;
1593 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1594 if (!rtx_renumbered_equal_p (XVECEXP (x, i, j), XVECEXP (y, i, j)))
1595 return 0;
1596 break;
1598 default:
1599 gcc_unreachable ();
1602 return 1;
1605 /* If X is a hard register or equivalent to one or a subregister of one,
1606 return the hard register number. If X is a pseudo register that was not
1607 assigned a hard register, return the pseudo register number. Otherwise,
1608 return -1. Any rtx is valid for X. */
1611 true_regnum (const_rtx x)
1613 if (REG_P (x))
1615 if (REGNO (x) >= FIRST_PSEUDO_REGISTER && reg_renumber[REGNO (x)] >= 0)
1616 return reg_renumber[REGNO (x)];
1617 return REGNO (x);
1619 if (GET_CODE (x) == SUBREG)
1621 int base = true_regnum (SUBREG_REG (x));
1622 if (base >= 0
1623 && base < FIRST_PSEUDO_REGISTER
1624 && subreg_offset_representable_p (REGNO (SUBREG_REG (x)),
1625 GET_MODE (SUBREG_REG (x)),
1626 SUBREG_BYTE (x), GET_MODE (x)))
1627 return base + subreg_regno_offset (REGNO (SUBREG_REG (x)),
1628 GET_MODE (SUBREG_REG (x)),
1629 SUBREG_BYTE (x), GET_MODE (x));
1631 return -1;
1634 /* Return regno of the register REG and handle subregs too. */
1635 unsigned int
1636 reg_or_subregno (const_rtx reg)
1638 if (GET_CODE (reg) == SUBREG)
1639 reg = SUBREG_REG (reg);
1640 gcc_assert (REG_P (reg));
1641 return REGNO (reg);