1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
34 #include "basic-block.h"
38 /* Forward declarations */
39 static int global_reg_mentioned_p_1 (rtx
*, void *);
40 static void set_of_1 (rtx
, rtx
, void *);
41 static void insn_dependent_p_1 (rtx
, rtx
, void *);
42 static int rtx_referenced_p_1 (rtx
*, void *);
43 static int computed_jump_p_1 (rtx
);
44 static void parms_set (rtx
, rtx
, void *);
45 static bool hoist_test_store (rtx
, rtx
, regset
);
46 static void hoist_update_store (rtx
, rtx
*, rtx
, rtx
);
48 /* Bit flags that specify the machine subtype we are compiling for.
49 Bits are tested using macros TARGET_... defined in the tm.h file
50 and set by `-m...' switches. Must be defined in rtlanal.c. */
54 /* Return 1 if the value of X is unstable
55 (would be different at a different point in the program).
56 The frame pointer, arg pointer, etc. are considered stable
57 (within one function) and so is anything marked `unchanging'. */
60 rtx_unstable_p (rtx x
)
62 RTX_CODE code
= GET_CODE (x
);
69 return ! RTX_UNCHANGING_P (x
) || rtx_unstable_p (XEXP (x
, 0));
84 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
85 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
86 /* The arg pointer varies if it is not a fixed register. */
87 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
88 || RTX_UNCHANGING_P (x
))
90 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
91 /* ??? When call-clobbered, the value is stable modulo the restore
92 that must happen after a call. This currently screws up local-alloc
93 into believing that the restore is not needed. */
94 if (x
== pic_offset_table_rtx
)
100 if (MEM_VOLATILE_P (x
))
109 fmt
= GET_RTX_FORMAT (code
);
110 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
113 if (rtx_unstable_p (XEXP (x
, i
)))
116 else if (fmt
[i
] == 'E')
119 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
120 if (rtx_unstable_p (XVECEXP (x
, i
, j
)))
127 /* Return 1 if X has a value that can vary even between two
128 executions of the program. 0 means X can be compared reliably
129 against certain constants or near-constants.
130 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
131 zero, we are slightly more conservative.
132 The frame pointer and the arg pointer are considered constant. */
135 rtx_varies_p (rtx x
, int for_alias
)
137 RTX_CODE code
= GET_CODE (x
);
144 return ! RTX_UNCHANGING_P (x
) || rtx_varies_p (XEXP (x
, 0), for_alias
);
158 /* This will resolve to some offset from the frame pointer. */
162 /* Note that we have to test for the actual rtx used for the frame
163 and arg pointers and not just the register number in case we have
164 eliminated the frame and/or arg pointer and are using it
166 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
167 /* The arg pointer varies if it is not a fixed register. */
168 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
170 if (x
== pic_offset_table_rtx
171 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
172 /* ??? When call-clobbered, the value is stable modulo the restore
173 that must happen after a call. This currently screws up
174 local-alloc into believing that the restore is not needed, so we
175 must return 0 only if we are called from alias analysis. */
183 /* The operand 0 of a LO_SUM is considered constant
184 (in fact it is related specifically to operand 1)
185 during alias analysis. */
186 return (! for_alias
&& rtx_varies_p (XEXP (x
, 0), for_alias
))
187 || rtx_varies_p (XEXP (x
, 1), for_alias
);
190 if (MEM_VOLATILE_P (x
))
199 fmt
= GET_RTX_FORMAT (code
);
200 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
203 if (rtx_varies_p (XEXP (x
, i
), for_alias
))
206 else if (fmt
[i
] == 'E')
209 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
210 if (rtx_varies_p (XVECEXP (x
, i
, j
), for_alias
))
217 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
220 rtx_addr_can_trap_p (rtx x
)
222 enum rtx_code code
= GET_CODE (x
);
227 return SYMBOL_REF_WEAK (x
);
233 /* This will resolve to some offset from the frame pointer. */
237 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
238 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
239 || x
== stack_pointer_rtx
240 /* The arg pointer varies if it is not a fixed register. */
241 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
243 /* All of the virtual frame registers are stack references. */
244 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
245 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
250 return rtx_addr_can_trap_p (XEXP (x
, 0));
253 /* An address is assumed not to trap if it is an address that can't
254 trap plus a constant integer or it is the pic register plus a
256 return ! ((! rtx_addr_can_trap_p (XEXP (x
, 0))
257 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
258 || (XEXP (x
, 0) == pic_offset_table_rtx
259 && CONSTANT_P (XEXP (x
, 1))));
263 return rtx_addr_can_trap_p (XEXP (x
, 1));
270 return rtx_addr_can_trap_p (XEXP (x
, 0));
276 /* If it isn't one of the case above, it can cause a trap. */
280 /* Return true if X is an address that is known to not be zero. */
283 nonzero_address_p (rtx x
)
285 enum rtx_code code
= GET_CODE (x
);
290 return !SYMBOL_REF_WEAK (x
);
296 /* This will resolve to some offset from the frame pointer. */
300 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
301 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
302 || x
== stack_pointer_rtx
303 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
305 /* All of the virtual frame registers are stack references. */
306 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
307 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
312 return nonzero_address_p (XEXP (x
, 0));
315 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
317 /* Pointers aren't allowed to wrap. If we've got a register
318 that is known to be a pointer, and a positive offset, then
319 the composite can't be zero. */
320 if (INTVAL (XEXP (x
, 1)) > 0
321 && REG_P (XEXP (x
, 0))
322 && REG_POINTER (XEXP (x
, 0)))
325 return nonzero_address_p (XEXP (x
, 0));
327 /* Handle PIC references. */
328 else if (XEXP (x
, 0) == pic_offset_table_rtx
329 && CONSTANT_P (XEXP (x
, 1)))
334 /* Similar to the above; allow positive offsets. Further, since
335 auto-inc is only allowed in memories, the register must be a
337 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
338 && INTVAL (XEXP (x
, 1)) > 0)
340 return nonzero_address_p (XEXP (x
, 0));
343 /* Similarly. Further, the offset is always positive. */
350 return nonzero_address_p (XEXP (x
, 0));
353 return nonzero_address_p (XEXP (x
, 1));
359 /* If it isn't one of the case above, might be zero. */
363 /* Return 1 if X refers to a memory location whose address
364 cannot be compared reliably with constant addresses,
365 or if X refers to a BLKmode memory object.
366 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
367 zero, we are slightly more conservative. */
370 rtx_addr_varies_p (rtx x
, int for_alias
)
381 return GET_MODE (x
) == BLKmode
|| rtx_varies_p (XEXP (x
, 0), for_alias
);
383 fmt
= GET_RTX_FORMAT (code
);
384 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
387 if (rtx_addr_varies_p (XEXP (x
, i
), for_alias
))
390 else if (fmt
[i
] == 'E')
393 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
394 if (rtx_addr_varies_p (XVECEXP (x
, i
, j
), for_alias
))
400 /* Return the value of the integer term in X, if one is apparent;
402 Only obvious integer terms are detected.
403 This is used in cse.c with the `related_value' field. */
406 get_integer_term (rtx x
)
408 if (GET_CODE (x
) == CONST
)
411 if (GET_CODE (x
) == MINUS
412 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
413 return - INTVAL (XEXP (x
, 1));
414 if (GET_CODE (x
) == PLUS
415 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
416 return INTVAL (XEXP (x
, 1));
420 /* If X is a constant, return the value sans apparent integer term;
422 Only obvious integer terms are detected. */
425 get_related_value (rtx x
)
427 if (GET_CODE (x
) != CONST
)
430 if (GET_CODE (x
) == PLUS
431 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
433 else if (GET_CODE (x
) == MINUS
434 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
439 /* Given a tablejump insn INSN, return the RTL expression for the offset
440 into the jump table. If the offset cannot be determined, then return
443 If EARLIEST is nonzero, it is a pointer to a place where the earliest
444 insn used in locating the offset was found. */
447 get_jump_table_offset (rtx insn
, rtx
*earliest
)
459 if (!tablejump_p (insn
, &label
, &table
) || !(set
= single_set (insn
)))
464 /* Some targets (eg, ARM) emit a tablejump that also
465 contains the out-of-range target. */
466 if (GET_CODE (x
) == IF_THEN_ELSE
467 && GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
470 /* Search backwards and locate the expression stored in X. */
471 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
472 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
475 /* If X is an expression using a relative address then strip
476 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
477 or the jump table label. */
478 if (GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
479 && (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
))
481 for (i
= 0; i
< 2; i
++)
486 if (y
== pc_rtx
|| y
== pic_offset_table_rtx
)
489 for (old_y
= NULL_RTX
; GET_CODE (y
) == REG
&& y
!= old_y
;
490 old_y
= y
, y
= find_last_value (y
, &old_insn
, NULL_RTX
, 0))
493 if ((GET_CODE (y
) == LABEL_REF
&& XEXP (y
, 0) == label
))
502 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
503 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
507 /* Strip off any sign or zero extension. */
508 if (GET_CODE (x
) == SIGN_EXTEND
|| GET_CODE (x
) == ZERO_EXTEND
)
512 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
513 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
517 /* If X isn't a MEM then this isn't a tablejump we understand. */
518 if (GET_CODE (x
) != MEM
)
521 /* Strip off the MEM. */
524 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
525 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
528 /* If X isn't a PLUS than this isn't a tablejump we understand. */
529 if (GET_CODE (x
) != PLUS
)
532 /* At this point we should have an expression representing the jump table
533 plus an offset. Examine each operand in order to determine which one
534 represents the jump table. Knowing that tells us that the other operand
535 must represent the offset. */
536 for (i
= 0; i
< 2; i
++)
541 for (old_y
= NULL_RTX
; GET_CODE (y
) == REG
&& y
!= old_y
;
542 old_y
= y
, y
= find_last_value (y
, &old_insn
, NULL_RTX
, 0))
545 if ((GET_CODE (y
) == CONST
|| GET_CODE (y
) == LABEL_REF
)
546 && reg_mentioned_p (label
, y
))
555 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
556 if (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
)
557 for (i
= 0; i
< 2; i
++)
558 if (XEXP (x
, i
) == pic_offset_table_rtx
)
567 /* Return the RTL expression representing the offset. */
571 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
572 a global register. */
575 global_reg_mentioned_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
583 switch (GET_CODE (x
))
586 if (GET_CODE (SUBREG_REG (x
)) == REG
)
588 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
589 && global_regs
[subreg_regno (x
)])
597 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
611 /* A non-constant call might use a global register. */
621 /* Returns nonzero if X mentions a global register. */
624 global_reg_mentioned_p (rtx x
)
628 if (GET_CODE (x
) == CALL_INSN
)
630 if (! CONST_OR_PURE_CALL_P (x
))
632 x
= CALL_INSN_FUNCTION_USAGE (x
);
640 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
643 /* Return the number of places FIND appears within X. If COUNT_DEST is
644 zero, we do not count occurrences inside the destination of a SET. */
647 count_occurrences (rtx x
, rtx find
, int count_dest
)
651 const char *format_ptr
;
672 if (GET_CODE (find
) == MEM
&& rtx_equal_p (x
, find
))
677 if (SET_DEST (x
) == find
&& ! count_dest
)
678 return count_occurrences (SET_SRC (x
), find
, count_dest
);
685 format_ptr
= GET_RTX_FORMAT (code
);
688 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
690 switch (*format_ptr
++)
693 count
+= count_occurrences (XEXP (x
, i
), find
, count_dest
);
697 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
698 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
, count_dest
);
705 /* Nonzero if register REG appears somewhere within IN.
706 Also works if REG is not a register; in this case it checks
707 for a subexpression of IN that is Lisp "equal" to REG. */
710 reg_mentioned_p (rtx reg
, rtx in
)
722 if (GET_CODE (in
) == LABEL_REF
)
723 return reg
== XEXP (in
, 0);
725 code
= GET_CODE (in
);
729 /* Compare registers by number. */
731 return GET_CODE (reg
) == REG
&& REGNO (in
) == REGNO (reg
);
733 /* These codes have no constituent expressions
743 /* These are kept unique for a given value. */
750 if (GET_CODE (reg
) == code
&& rtx_equal_p (reg
, in
))
753 fmt
= GET_RTX_FORMAT (code
);
755 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
760 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
761 if (reg_mentioned_p (reg
, XVECEXP (in
, i
, j
)))
764 else if (fmt
[i
] == 'e'
765 && reg_mentioned_p (reg
, XEXP (in
, i
)))
771 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
772 no CODE_LABEL insn. */
775 no_labels_between_p (rtx beg
, rtx end
)
780 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
781 if (GET_CODE (p
) == CODE_LABEL
)
786 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
787 no JUMP_INSN insn. */
790 no_jumps_between_p (rtx beg
, rtx end
)
793 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
794 if (GET_CODE (p
) == JUMP_INSN
)
799 /* Nonzero if register REG is used in an insn between
800 FROM_INSN and TO_INSN (exclusive of those two). */
803 reg_used_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
807 if (from_insn
== to_insn
)
810 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
812 && (reg_overlap_mentioned_p (reg
, PATTERN (insn
))
813 || (GET_CODE (insn
) == CALL_INSN
814 && (find_reg_fusage (insn
, USE
, reg
)
815 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
820 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
821 is entirely replaced by a new value and the only use is as a SET_DEST,
822 we do not consider it a reference. */
825 reg_referenced_p (rtx x
, rtx body
)
829 switch (GET_CODE (body
))
832 if (reg_overlap_mentioned_p (x
, SET_SRC (body
)))
835 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
836 of a REG that occupies all of the REG, the insn references X if
837 it is mentioned in the destination. */
838 if (GET_CODE (SET_DEST (body
)) != CC0
839 && GET_CODE (SET_DEST (body
)) != PC
840 && GET_CODE (SET_DEST (body
)) != REG
841 && ! (GET_CODE (SET_DEST (body
)) == SUBREG
842 && GET_CODE (SUBREG_REG (SET_DEST (body
))) == REG
843 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body
))))
844 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
845 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body
)))
846 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
847 && reg_overlap_mentioned_p (x
, SET_DEST (body
)))
852 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
853 if (reg_overlap_mentioned_p (x
, ASM_OPERANDS_INPUT (body
, i
)))
860 return reg_overlap_mentioned_p (x
, body
);
863 return reg_overlap_mentioned_p (x
, TRAP_CONDITION (body
));
866 return reg_overlap_mentioned_p (x
, XEXP (body
, 0));
869 case UNSPEC_VOLATILE
:
870 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
871 if (reg_overlap_mentioned_p (x
, XVECEXP (body
, 0, i
)))
876 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
877 if (reg_referenced_p (x
, XVECEXP (body
, 0, i
)))
882 if (GET_CODE (XEXP (body
, 0)) == MEM
)
883 if (reg_overlap_mentioned_p (x
, XEXP (XEXP (body
, 0), 0)))
888 if (reg_overlap_mentioned_p (x
, COND_EXEC_TEST (body
)))
890 return reg_referenced_p (x
, COND_EXEC_CODE (body
));
897 /* Nonzero if register REG is referenced in an insn between
898 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
902 reg_referenced_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
906 if (from_insn
== to_insn
)
909 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
911 && (reg_referenced_p (reg
, PATTERN (insn
))
912 || (GET_CODE (insn
) == CALL_INSN
913 && find_reg_fusage (insn
, USE
, reg
))))
918 /* Nonzero if register REG is set or clobbered in an insn between
919 FROM_INSN and TO_INSN (exclusive of those two). */
922 reg_set_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
926 if (from_insn
== to_insn
)
929 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
930 if (INSN_P (insn
) && reg_set_p (reg
, insn
))
935 /* Internals of reg_set_between_p. */
937 reg_set_p (rtx reg
, rtx insn
)
939 /* We can be passed an insn or part of one. If we are passed an insn,
940 check if a side-effect of the insn clobbers REG. */
942 && (FIND_REG_INC_NOTE (insn
, reg
)
943 || (GET_CODE (insn
) == CALL_INSN
944 /* We'd like to test call_used_regs here, but rtlanal.c can't
945 reference that variable due to its use in genattrtab. So
946 we'll just be more conservative.
948 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
949 information holds all clobbered registers. */
950 && ((GET_CODE (reg
) == REG
951 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
952 || GET_CODE (reg
) == MEM
953 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
956 return set_of (reg
, insn
) != NULL_RTX
;
959 /* Similar to reg_set_between_p, but check all registers in X. Return 0
960 only if none of them are modified between START and END. Do not
961 consider non-registers one way or the other. */
964 regs_set_between_p (rtx x
, rtx start
, rtx end
)
966 enum rtx_code code
= GET_CODE (x
);
983 return reg_set_between_p (x
, start
, end
);
989 fmt
= GET_RTX_FORMAT (code
);
990 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
992 if (fmt
[i
] == 'e' && regs_set_between_p (XEXP (x
, i
), start
, end
))
995 else if (fmt
[i
] == 'E')
996 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
997 if (regs_set_between_p (XVECEXP (x
, i
, j
), start
, end
))
1004 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1005 only if none of them are modified between START and END. Return 1 if
1006 X contains a MEM; this routine does usememory aliasing. */
1009 modified_between_p (rtx x
, rtx start
, rtx end
)
1011 enum rtx_code code
= GET_CODE (x
);
1034 if (RTX_UNCHANGING_P (x
))
1036 if (modified_between_p (XEXP (x
, 0), start
, end
))
1038 for (insn
= NEXT_INSN (start
); insn
!= end
; insn
= NEXT_INSN (insn
))
1039 if (memory_modified_in_insn_p (x
, insn
))
1045 return reg_set_between_p (x
, start
, end
);
1051 fmt
= GET_RTX_FORMAT (code
);
1052 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1054 if (fmt
[i
] == 'e' && modified_between_p (XEXP (x
, i
), start
, end
))
1057 else if (fmt
[i
] == 'E')
1058 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1059 if (modified_between_p (XVECEXP (x
, i
, j
), start
, end
))
1066 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1067 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1068 does use memory aliasing. */
1071 modified_in_p (rtx x
, rtx insn
)
1073 enum rtx_code code
= GET_CODE (x
);
1092 if (RTX_UNCHANGING_P (x
))
1094 if (modified_in_p (XEXP (x
, 0), insn
))
1096 if (memory_modified_in_insn_p (x
, insn
))
1102 return reg_set_p (x
, insn
);
1108 fmt
= GET_RTX_FORMAT (code
);
1109 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1111 if (fmt
[i
] == 'e' && modified_in_p (XEXP (x
, i
), insn
))
1114 else if (fmt
[i
] == 'E')
1115 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1116 if (modified_in_p (XVECEXP (x
, i
, j
), insn
))
1123 /* Return true if anything in insn X is (anti,output,true) dependent on
1124 anything in insn Y. */
1127 insn_dependent_p (rtx x
, rtx y
)
1131 if (! INSN_P (x
) || ! INSN_P (y
))
1135 note_stores (PATTERN (x
), insn_dependent_p_1
, &tmp
);
1136 if (tmp
== NULL_RTX
)
1140 note_stores (PATTERN (y
), insn_dependent_p_1
, &tmp
);
1141 if (tmp
== NULL_RTX
)
1147 /* A helper routine for insn_dependent_p called through note_stores. */
1150 insn_dependent_p_1 (rtx x
, rtx pat ATTRIBUTE_UNUSED
, void *data
)
1152 rtx
* pinsn
= (rtx
*) data
;
1154 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
1158 /* Helper function for set_of. */
1166 set_of_1 (rtx x
, rtx pat
, void *data1
)
1168 struct set_of_data
*data
= (struct set_of_data
*) (data1
);
1169 if (rtx_equal_p (x
, data
->pat
)
1170 || (GET_CODE (x
) != MEM
&& reg_overlap_mentioned_p (data
->pat
, x
)))
1174 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1175 (either directly or via STRICT_LOW_PART and similar modifiers). */
1177 set_of (rtx pat
, rtx insn
)
1179 struct set_of_data data
;
1180 data
.found
= NULL_RTX
;
1182 note_stores (INSN_P (insn
) ? PATTERN (insn
) : insn
, set_of_1
, &data
);
1186 /* Given an INSN, return a SET expression if this insn has only a single SET.
1187 It may also have CLOBBERs, USEs, or SET whose output
1188 will not be used, which we ignore. */
1191 single_set_2 (rtx insn
, rtx pat
)
1194 int set_verified
= 1;
1197 if (GET_CODE (pat
) == PARALLEL
)
1199 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1201 rtx sub
= XVECEXP (pat
, 0, i
);
1202 switch (GET_CODE (sub
))
1209 /* We can consider insns having multiple sets, where all
1210 but one are dead as single set insns. In common case
1211 only single set is present in the pattern so we want
1212 to avoid checking for REG_UNUSED notes unless necessary.
1214 When we reach set first time, we just expect this is
1215 the single set we are looking for and only when more
1216 sets are found in the insn, we check them. */
1219 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (set
))
1220 && !side_effects_p (set
))
1226 set
= sub
, set_verified
= 0;
1227 else if (!find_reg_note (insn
, REG_UNUSED
, SET_DEST (sub
))
1228 || side_effects_p (sub
))
1240 /* Given an INSN, return nonzero if it has more than one SET, else return
1244 multiple_sets (rtx insn
)
1249 /* INSN must be an insn. */
1250 if (! INSN_P (insn
))
1253 /* Only a PARALLEL can have multiple SETs. */
1254 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
1256 for (i
= 0, found
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1257 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == SET
)
1259 /* If we have already found a SET, then return now. */
1267 /* Either zero or one SET. */
1271 /* Return nonzero if the destination of SET equals the source
1272 and there are no side effects. */
1275 set_noop_p (rtx set
)
1277 rtx src
= SET_SRC (set
);
1278 rtx dst
= SET_DEST (set
);
1280 if (dst
== pc_rtx
&& src
== pc_rtx
)
1283 if (GET_CODE (dst
) == MEM
&& GET_CODE (src
) == MEM
)
1284 return rtx_equal_p (dst
, src
) && !side_effects_p (dst
);
1286 if (GET_CODE (dst
) == SIGN_EXTRACT
1287 || GET_CODE (dst
) == ZERO_EXTRACT
)
1288 return rtx_equal_p (XEXP (dst
, 0), src
)
1289 && ! BYTES_BIG_ENDIAN
&& XEXP (dst
, 2) == const0_rtx
1290 && !side_effects_p (src
);
1292 if (GET_CODE (dst
) == STRICT_LOW_PART
)
1293 dst
= XEXP (dst
, 0);
1295 if (GET_CODE (src
) == SUBREG
&& GET_CODE (dst
) == SUBREG
)
1297 if (SUBREG_BYTE (src
) != SUBREG_BYTE (dst
))
1299 src
= SUBREG_REG (src
);
1300 dst
= SUBREG_REG (dst
);
1303 return (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
1304 && REGNO (src
) == REGNO (dst
));
1307 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1311 noop_move_p (rtx insn
)
1313 rtx pat
= PATTERN (insn
);
1315 if (INSN_CODE (insn
) == NOOP_MOVE_INSN_CODE
)
1318 /* Insns carrying these notes are useful later on. */
1319 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1322 /* For now treat an insn with a REG_RETVAL note as a
1323 a special insn which should not be considered a no-op. */
1324 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
1327 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
1330 if (GET_CODE (pat
) == PARALLEL
)
1333 /* If nothing but SETs of registers to themselves,
1334 this insn can also be deleted. */
1335 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1337 rtx tem
= XVECEXP (pat
, 0, i
);
1339 if (GET_CODE (tem
) == USE
1340 || GET_CODE (tem
) == CLOBBER
)
1343 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
1353 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1354 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1355 If the object was modified, if we hit a partial assignment to X, or hit a
1356 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1357 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1361 find_last_value (rtx x
, rtx
*pinsn
, rtx valid_to
, int allow_hwreg
)
1365 for (p
= PREV_INSN (*pinsn
); p
&& GET_CODE (p
) != CODE_LABEL
;
1369 rtx set
= single_set (p
);
1370 rtx note
= find_reg_note (p
, REG_EQUAL
, NULL_RTX
);
1372 if (set
&& rtx_equal_p (x
, SET_DEST (set
)))
1374 rtx src
= SET_SRC (set
);
1376 if (note
&& GET_CODE (XEXP (note
, 0)) != EXPR_LIST
)
1377 src
= XEXP (note
, 0);
1379 if ((valid_to
== NULL_RTX
1380 || ! modified_between_p (src
, PREV_INSN (p
), valid_to
))
1381 /* Reject hard registers because we don't usually want
1382 to use them; we'd rather use a pseudo. */
1383 && (! (GET_CODE (src
) == REG
1384 && REGNO (src
) < FIRST_PSEUDO_REGISTER
) || allow_hwreg
))
1391 /* If set in non-simple way, we don't have a value. */
1392 if (reg_set_p (x
, p
))
1399 /* Return nonzero if register in range [REGNO, ENDREGNO)
1400 appears either explicitly or implicitly in X
1401 other than being stored into.
1403 References contained within the substructure at LOC do not count.
1404 LOC may be zero, meaning don't ignore anything. */
1407 refers_to_regno_p (unsigned int regno
, unsigned int endregno
, rtx x
,
1411 unsigned int x_regno
;
1416 /* The contents of a REG_NONNEG note is always zero, so we must come here
1417 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1421 code
= GET_CODE (x
);
1426 x_regno
= REGNO (x
);
1428 /* If we modifying the stack, frame, or argument pointer, it will
1429 clobber a virtual register. In fact, we could be more precise,
1430 but it isn't worth it. */
1431 if ((x_regno
== STACK_POINTER_REGNUM
1432 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1433 || x_regno
== ARG_POINTER_REGNUM
1435 || x_regno
== FRAME_POINTER_REGNUM
)
1436 && regno
>= FIRST_VIRTUAL_REGISTER
&& regno
<= LAST_VIRTUAL_REGISTER
)
1439 return (endregno
> x_regno
1440 && regno
< x_regno
+ (x_regno
< FIRST_PSEUDO_REGISTER
1441 ? hard_regno_nregs
[x_regno
][GET_MODE (x
)]
1445 /* If this is a SUBREG of a hard reg, we can see exactly which
1446 registers are being modified. Otherwise, handle normally. */
1447 if (GET_CODE (SUBREG_REG (x
)) == REG
1448 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
1450 unsigned int inner_regno
= subreg_regno (x
);
1451 unsigned int inner_endregno
1452 = inner_regno
+ (inner_regno
< FIRST_PSEUDO_REGISTER
1453 ? hard_regno_nregs
[inner_regno
][GET_MODE (x
)] : 1);
1455 return endregno
> inner_regno
&& regno
< inner_endregno
;
1461 if (&SET_DEST (x
) != loc
1462 /* Note setting a SUBREG counts as referring to the REG it is in for
1463 a pseudo but not for hard registers since we can
1464 treat each word individually. */
1465 && ((GET_CODE (SET_DEST (x
)) == SUBREG
1466 && loc
!= &SUBREG_REG (SET_DEST (x
))
1467 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
1468 && REGNO (SUBREG_REG (SET_DEST (x
))) >= FIRST_PSEUDO_REGISTER
1469 && refers_to_regno_p (regno
, endregno
,
1470 SUBREG_REG (SET_DEST (x
)), loc
))
1471 || (GET_CODE (SET_DEST (x
)) != REG
1472 && refers_to_regno_p (regno
, endregno
, SET_DEST (x
), loc
))))
1475 if (code
== CLOBBER
|| loc
== &SET_SRC (x
))
1484 /* X does not match, so try its subexpressions. */
1486 fmt
= GET_RTX_FORMAT (code
);
1487 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1489 if (fmt
[i
] == 'e' && loc
!= &XEXP (x
, i
))
1497 if (refers_to_regno_p (regno
, endregno
, XEXP (x
, i
), loc
))
1500 else if (fmt
[i
] == 'E')
1503 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1504 if (loc
!= &XVECEXP (x
, i
, j
)
1505 && refers_to_regno_p (regno
, endregno
, XVECEXP (x
, i
, j
), loc
))
1512 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1513 we check if any register number in X conflicts with the relevant register
1514 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1515 contains a MEM (we don't bother checking for memory addresses that can't
1516 conflict because we expect this to be a rare case. */
1519 reg_overlap_mentioned_p (rtx x
, rtx in
)
1521 unsigned int regno
, endregno
;
1523 /* If either argument is a constant, then modifying X can not
1524 affect IN. Here we look at IN, we can profitably combine
1525 CONSTANT_P (x) with the switch statement below. */
1526 if (CONSTANT_P (in
))
1530 switch (GET_CODE (x
))
1532 case STRICT_LOW_PART
:
1535 /* Overly conservative. */
1540 regno
= REGNO (SUBREG_REG (x
));
1541 if (regno
< FIRST_PSEUDO_REGISTER
)
1542 regno
= subreg_regno (x
);
1548 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
1549 ? hard_regno_nregs
[regno
][GET_MODE (x
)] : 1);
1550 return refers_to_regno_p (regno
, endregno
, in
, (rtx
*) 0);
1557 if (GET_CODE (in
) == MEM
)
1560 fmt
= GET_RTX_FORMAT (GET_CODE (in
));
1561 for (i
= GET_RTX_LENGTH (GET_CODE (in
)) - 1; i
>= 0; i
--)
1562 if (fmt
[i
] == 'e' && reg_overlap_mentioned_p (x
, XEXP (in
, i
)))
1571 return reg_mentioned_p (x
, in
);
1577 /* If any register in here refers to it we return true. */
1578 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1579 if (XEXP (XVECEXP (x
, 0, i
), 0) != 0
1580 && reg_overlap_mentioned_p (XEXP (XVECEXP (x
, 0, i
), 0), in
))
1586 #ifdef ENABLE_CHECKING
1587 if (!CONSTANT_P (x
))
1595 /* Return the last value to which REG was set prior to INSN. If we can't
1596 find it easily, return 0.
1598 We only return a REG, SUBREG, or constant because it is too hard to
1599 check if a MEM remains unchanged. */
1602 reg_set_last (rtx x
, rtx insn
)
1604 rtx orig_insn
= insn
;
1606 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1607 Stop when we reach a label or X is a hard reg and we reach a
1608 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1610 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1612 /* We compare with <= here, because reg_set_last_last_regno
1613 is actually the number of the first reg *not* in X. */
1615 insn
&& GET_CODE (insn
) != CODE_LABEL
1616 && ! (GET_CODE (insn
) == CALL_INSN
1617 && REGNO (x
) <= FIRST_PSEUDO_REGISTER
);
1618 insn
= PREV_INSN (insn
))
1621 rtx set
= set_of (x
, insn
);
1622 /* OK, this function modify our register. See if we understand it. */
1626 if (GET_CODE (set
) != SET
|| SET_DEST (set
) != x
)
1628 last_value
= SET_SRC (x
);
1629 if (CONSTANT_P (last_value
)
1630 || ((GET_CODE (last_value
) == REG
1631 || GET_CODE (last_value
) == SUBREG
)
1632 && ! reg_set_between_p (last_value
,
1643 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1644 (X would be the pattern of an insn).
1645 FUN receives two arguments:
1646 the REG, MEM, CC0 or PC being stored in or clobbered,
1647 the SET or CLOBBER rtx that does the store.
1649 If the item being stored in or clobbered is a SUBREG of a hard register,
1650 the SUBREG will be passed. */
1653 note_stores (rtx x
, void (*fun
) (rtx
, rtx
, void *), void *data
)
1657 if (GET_CODE (x
) == COND_EXEC
)
1658 x
= COND_EXEC_CODE (x
);
1660 if (GET_CODE (x
) == SET
|| GET_CODE (x
) == CLOBBER
)
1662 rtx dest
= SET_DEST (x
);
1664 while ((GET_CODE (dest
) == SUBREG
1665 && (GET_CODE (SUBREG_REG (dest
)) != REG
1666 || REGNO (SUBREG_REG (dest
)) >= FIRST_PSEUDO_REGISTER
))
1667 || GET_CODE (dest
) == ZERO_EXTRACT
1668 || GET_CODE (dest
) == SIGN_EXTRACT
1669 || GET_CODE (dest
) == STRICT_LOW_PART
)
1670 dest
= XEXP (dest
, 0);
1672 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1673 each of whose first operand is a register. */
1674 if (GET_CODE (dest
) == PARALLEL
)
1676 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1677 if (XEXP (XVECEXP (dest
, 0, i
), 0) != 0)
1678 (*fun
) (XEXP (XVECEXP (dest
, 0, i
), 0), x
, data
);
1681 (*fun
) (dest
, x
, data
);
1684 else if (GET_CODE (x
) == PARALLEL
)
1685 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1686 note_stores (XVECEXP (x
, 0, i
), fun
, data
);
1689 /* Like notes_stores, but call FUN for each expression that is being
1690 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1691 FUN for each expression, not any interior subexpressions. FUN receives a
1692 pointer to the expression and the DATA passed to this function.
1694 Note that this is not quite the same test as that done in reg_referenced_p
1695 since that considers something as being referenced if it is being
1696 partially set, while we do not. */
1699 note_uses (rtx
*pbody
, void (*fun
) (rtx
*, void *), void *data
)
1704 switch (GET_CODE (body
))
1707 (*fun
) (&COND_EXEC_TEST (body
), data
);
1708 note_uses (&COND_EXEC_CODE (body
), fun
, data
);
1712 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1713 note_uses (&XVECEXP (body
, 0, i
), fun
, data
);
1717 (*fun
) (&XEXP (body
, 0), data
);
1721 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
1722 (*fun
) (&ASM_OPERANDS_INPUT (body
, i
), data
);
1726 (*fun
) (&TRAP_CONDITION (body
), data
);
1730 (*fun
) (&XEXP (body
, 0), data
);
1734 case UNSPEC_VOLATILE
:
1735 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1736 (*fun
) (&XVECEXP (body
, 0, i
), data
);
1740 if (GET_CODE (XEXP (body
, 0)) == MEM
)
1741 (*fun
) (&XEXP (XEXP (body
, 0), 0), data
);
1746 rtx dest
= SET_DEST (body
);
1748 /* For sets we replace everything in source plus registers in memory
1749 expression in store and operands of a ZERO_EXTRACT. */
1750 (*fun
) (&SET_SRC (body
), data
);
1752 if (GET_CODE (dest
) == ZERO_EXTRACT
)
1754 (*fun
) (&XEXP (dest
, 1), data
);
1755 (*fun
) (&XEXP (dest
, 2), data
);
1758 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
)
1759 dest
= XEXP (dest
, 0);
1761 if (GET_CODE (dest
) == MEM
)
1762 (*fun
) (&XEXP (dest
, 0), data
);
1767 /* All the other possibilities never store. */
1768 (*fun
) (pbody
, data
);
1773 /* Return nonzero if X's old contents don't survive after INSN.
1774 This will be true if X is (cc0) or if X is a register and
1775 X dies in INSN or because INSN entirely sets X.
1777 "Entirely set" means set directly and not through a SUBREG,
1778 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1779 Likewise, REG_INC does not count.
1781 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1782 but for this use that makes no difference, since regs don't overlap
1783 during their lifetimes. Therefore, this function may be used
1784 at any time after deaths have been computed (in flow.c).
1786 If REG is a hard reg that occupies multiple machine registers, this
1787 function will only return 1 if each of those registers will be replaced
1791 dead_or_set_p (rtx insn
, rtx x
)
1793 unsigned int regno
, last_regno
;
1796 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1797 if (GET_CODE (x
) == CC0
)
1800 if (GET_CODE (x
) != REG
)
1804 last_regno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
1805 : regno
+ hard_regno_nregs
[regno
][GET_MODE (x
)] - 1);
1807 for (i
= regno
; i
<= last_regno
; i
++)
1808 if (! dead_or_set_regno_p (insn
, i
))
1814 /* Utility function for dead_or_set_p to check an individual register. Also
1815 called from flow.c. */
1818 dead_or_set_regno_p (rtx insn
, unsigned int test_regno
)
1820 unsigned int regno
, endregno
;
1823 /* See if there is a death note for something that includes TEST_REGNO. */
1824 if (find_regno_note (insn
, REG_DEAD
, test_regno
))
1827 if (GET_CODE (insn
) == CALL_INSN
1828 && find_regno_fusage (insn
, CLOBBER
, test_regno
))
1831 pattern
= PATTERN (insn
);
1833 if (GET_CODE (pattern
) == COND_EXEC
)
1834 pattern
= COND_EXEC_CODE (pattern
);
1836 if (GET_CODE (pattern
) == SET
)
1838 rtx dest
= SET_DEST (pattern
);
1840 /* A value is totally replaced if it is the destination or the
1841 destination is a SUBREG of REGNO that does not change the number of
1843 if (GET_CODE (dest
) == SUBREG
1844 && (((GET_MODE_SIZE (GET_MODE (dest
))
1845 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1846 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1847 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1848 dest
= SUBREG_REG (dest
);
1850 if (GET_CODE (dest
) != REG
)
1853 regno
= REGNO (dest
);
1854 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1855 : regno
+ hard_regno_nregs
[regno
][GET_MODE (dest
)]);
1857 return (test_regno
>= regno
&& test_regno
< endregno
);
1859 else if (GET_CODE (pattern
) == PARALLEL
)
1863 for (i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
1865 rtx body
= XVECEXP (pattern
, 0, i
);
1867 if (GET_CODE (body
) == COND_EXEC
)
1868 body
= COND_EXEC_CODE (body
);
1870 if (GET_CODE (body
) == SET
|| GET_CODE (body
) == CLOBBER
)
1872 rtx dest
= SET_DEST (body
);
1874 if (GET_CODE (dest
) == SUBREG
1875 && (((GET_MODE_SIZE (GET_MODE (dest
))
1876 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1877 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1878 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1879 dest
= SUBREG_REG (dest
);
1881 if (GET_CODE (dest
) != REG
)
1884 regno
= REGNO (dest
);
1885 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1886 : regno
+ hard_regno_nregs
[regno
][GET_MODE (dest
)]);
1888 if (test_regno
>= regno
&& test_regno
< endregno
)
1897 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1898 If DATUM is nonzero, look for one whose datum is DATUM. */
1901 find_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
1905 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1906 if (! INSN_P (insn
))
1909 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1910 if (REG_NOTE_KIND (link
) == kind
1911 && (datum
== 0 || datum
== XEXP (link
, 0)))
1916 /* Return the reg-note of kind KIND in insn INSN which applies to register
1917 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1918 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1919 it might be the case that the note overlaps REGNO. */
1922 find_regno_note (rtx insn
, enum reg_note kind
, unsigned int regno
)
1926 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1927 if (! INSN_P (insn
))
1930 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1931 if (REG_NOTE_KIND (link
) == kind
1932 /* Verify that it is a register, so that scratch and MEM won't cause a
1934 && GET_CODE (XEXP (link
, 0)) == REG
1935 && REGNO (XEXP (link
, 0)) <= regno
1936 && ((REGNO (XEXP (link
, 0))
1937 + (REGNO (XEXP (link
, 0)) >= FIRST_PSEUDO_REGISTER
? 1
1938 : hard_regno_nregs
[REGNO (XEXP (link
, 0))]
1939 [GET_MODE (XEXP (link
, 0))]))
1945 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1949 find_reg_equal_equiv_note (rtx insn
)
1955 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1956 if (REG_NOTE_KIND (link
) == REG_EQUAL
1957 || REG_NOTE_KIND (link
) == REG_EQUIV
)
1959 if (single_set (insn
) == 0)
1966 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1967 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1970 find_reg_fusage (rtx insn
, enum rtx_code code
, rtx datum
)
1972 /* If it's not a CALL_INSN, it can't possibly have a
1973 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1974 if (GET_CODE (insn
) != CALL_INSN
)
1980 if (GET_CODE (datum
) != REG
)
1984 for (link
= CALL_INSN_FUNCTION_USAGE (insn
);
1986 link
= XEXP (link
, 1))
1987 if (GET_CODE (XEXP (link
, 0)) == code
1988 && rtx_equal_p (datum
, XEXP (XEXP (link
, 0), 0)))
1993 unsigned int regno
= REGNO (datum
);
1995 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1996 to pseudo registers, so don't bother checking. */
1998 if (regno
< FIRST_PSEUDO_REGISTER
)
2000 unsigned int end_regno
2001 = regno
+ hard_regno_nregs
[regno
][GET_MODE (datum
)];
2004 for (i
= regno
; i
< end_regno
; i
++)
2005 if (find_regno_fusage (insn
, code
, i
))
2013 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2014 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2017 find_regno_fusage (rtx insn
, enum rtx_code code
, unsigned int regno
)
2021 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2022 to pseudo registers, so don't bother checking. */
2024 if (regno
>= FIRST_PSEUDO_REGISTER
2025 || GET_CODE (insn
) != CALL_INSN
)
2028 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
2030 unsigned int regnote
;
2033 if (GET_CODE (op
= XEXP (link
, 0)) == code
2034 && GET_CODE (reg
= XEXP (op
, 0)) == REG
2035 && (regnote
= REGNO (reg
)) <= regno
2036 && regnote
+ hard_regno_nregs
[regnote
][GET_MODE (reg
)] > regno
)
2043 /* Return true if INSN is a call to a pure function. */
2046 pure_call_p (rtx insn
)
2050 if (GET_CODE (insn
) != CALL_INSN
|| ! CONST_OR_PURE_CALL_P (insn
))
2053 /* Look for the note that differentiates const and pure functions. */
2054 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
2058 if (GET_CODE (u
= XEXP (link
, 0)) == USE
2059 && GET_CODE (m
= XEXP (u
, 0)) == MEM
&& GET_MODE (m
) == BLKmode
2060 && GET_CODE (XEXP (m
, 0)) == SCRATCH
)
2067 /* Remove register note NOTE from the REG_NOTES of INSN. */
2070 remove_note (rtx insn
, rtx note
)
2074 if (note
== NULL_RTX
)
2077 if (REG_NOTES (insn
) == note
)
2079 REG_NOTES (insn
) = XEXP (note
, 1);
2083 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
2084 if (XEXP (link
, 1) == note
)
2086 XEXP (link
, 1) = XEXP (note
, 1);
2093 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2094 return 1 if it is found. A simple equality test is used to determine if
2098 in_expr_list_p (rtx listp
, rtx node
)
2102 for (x
= listp
; x
; x
= XEXP (x
, 1))
2103 if (node
== XEXP (x
, 0))
2109 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2110 remove that entry from the list if it is found.
2112 A simple equality test is used to determine if NODE matches. */
2115 remove_node_from_expr_list (rtx node
, rtx
*listp
)
2118 rtx prev
= NULL_RTX
;
2122 if (node
== XEXP (temp
, 0))
2124 /* Splice the node out of the list. */
2126 XEXP (prev
, 1) = XEXP (temp
, 1);
2128 *listp
= XEXP (temp
, 1);
2134 temp
= XEXP (temp
, 1);
2138 /* Nonzero if X contains any volatile instructions. These are instructions
2139 which may cause unpredictable machine state instructions, and thus no
2140 instructions should be moved or combined across them. This includes
2141 only volatile asms and UNSPEC_VOLATILE instructions. */
2144 volatile_insn_p (rtx x
)
2148 code
= GET_CODE (x
);
2168 case UNSPEC_VOLATILE
:
2169 /* case TRAP_IF: This isn't clear yet. */
2174 if (MEM_VOLATILE_P (x
))
2181 /* Recursively scan the operands of this expression. */
2184 const char *fmt
= GET_RTX_FORMAT (code
);
2187 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2191 if (volatile_insn_p (XEXP (x
, i
)))
2194 else if (fmt
[i
] == 'E')
2197 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2198 if (volatile_insn_p (XVECEXP (x
, i
, j
)))
2206 /* Nonzero if X contains any volatile memory references
2207 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2210 volatile_refs_p (rtx x
)
2214 code
= GET_CODE (x
);
2232 case UNSPEC_VOLATILE
:
2238 if (MEM_VOLATILE_P (x
))
2245 /* Recursively scan the operands of this expression. */
2248 const char *fmt
= GET_RTX_FORMAT (code
);
2251 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2255 if (volatile_refs_p (XEXP (x
, i
)))
2258 else if (fmt
[i
] == 'E')
2261 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2262 if (volatile_refs_p (XVECEXP (x
, i
, j
)))
2270 /* Similar to above, except that it also rejects register pre- and post-
2274 side_effects_p (rtx x
)
2278 code
= GET_CODE (x
);
2296 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2297 when some combination can't be done. If we see one, don't think
2298 that we can simplify the expression. */
2299 return (GET_MODE (x
) != VOIDmode
);
2308 case UNSPEC_VOLATILE
:
2309 /* case TRAP_IF: This isn't clear yet. */
2315 if (MEM_VOLATILE_P (x
))
2322 /* Recursively scan the operands of this expression. */
2325 const char *fmt
= GET_RTX_FORMAT (code
);
2328 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2332 if (side_effects_p (XEXP (x
, i
)))
2335 else if (fmt
[i
] == 'E')
2338 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2339 if (side_effects_p (XVECEXP (x
, i
, j
)))
2347 /* Return nonzero if evaluating rtx X might cause a trap. */
2358 code
= GET_CODE (x
);
2361 /* Handle these cases quickly. */
2375 case UNSPEC_VOLATILE
:
2380 return MEM_VOLATILE_P (x
);
2382 /* Memory ref can trap unless it's a static var or a stack slot. */
2384 if (MEM_NOTRAP_P (x
))
2386 return rtx_addr_can_trap_p (XEXP (x
, 0));
2388 /* Division by a non-constant might trap. */
2393 if (HONOR_SNANS (GET_MODE (x
)))
2395 if (! CONSTANT_P (XEXP (x
, 1))
2396 || (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2397 && flag_trapping_math
))
2399 if (XEXP (x
, 1) == const0_rtx
)
2404 /* An EXPR_LIST is used to represent a function call. This
2405 certainly may trap. */
2413 /* Some floating point comparisons may trap. */
2414 if (!flag_trapping_math
)
2416 /* ??? There is no machine independent way to check for tests that trap
2417 when COMPARE is used, though many targets do make this distinction.
2418 For instance, sparc uses CCFPE for compares which generate exceptions
2419 and CCFP for compares which do not generate exceptions. */
2420 if (HONOR_NANS (GET_MODE (x
)))
2422 /* But often the compare has some CC mode, so check operand
2424 if (HONOR_NANS (GET_MODE (XEXP (x
, 0)))
2425 || HONOR_NANS (GET_MODE (XEXP (x
, 1))))
2431 if (HONOR_SNANS (GET_MODE (x
)))
2433 /* Often comparison is CC mode, so check operand modes. */
2434 if (HONOR_SNANS (GET_MODE (XEXP (x
, 0)))
2435 || HONOR_SNANS (GET_MODE (XEXP (x
, 1))))
2440 /* Conversion of floating point might trap. */
2441 if (flag_trapping_math
&& HONOR_NANS (GET_MODE (XEXP (x
, 0))))
2447 /* These operations don't trap even with floating point. */
2451 /* Any floating arithmetic may trap. */
2452 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2453 && flag_trapping_math
)
2457 fmt
= GET_RTX_FORMAT (code
);
2458 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2462 if (may_trap_p (XEXP (x
, i
)))
2465 else if (fmt
[i
] == 'E')
2468 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2469 if (may_trap_p (XVECEXP (x
, i
, j
)))
2476 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2477 i.e., an inequality. */
2480 inequality_comparisons_p (rtx x
)
2484 enum rtx_code code
= GET_CODE (x
);
2514 len
= GET_RTX_LENGTH (code
);
2515 fmt
= GET_RTX_FORMAT (code
);
2517 for (i
= 0; i
< len
; i
++)
2521 if (inequality_comparisons_p (XEXP (x
, i
)))
2524 else if (fmt
[i
] == 'E')
2527 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2528 if (inequality_comparisons_p (XVECEXP (x
, i
, j
)))
2536 /* Replace any occurrence of FROM in X with TO. The function does
2537 not enter into CONST_DOUBLE for the replace.
2539 Note that copying is not done so X must not be shared unless all copies
2540 are to be modified. */
2543 replace_rtx (rtx x
, rtx from
, rtx to
)
2548 /* The following prevents loops occurrence when we change MEM in
2549 CONST_DOUBLE onto the same CONST_DOUBLE. */
2550 if (x
!= 0 && GET_CODE (x
) == CONST_DOUBLE
)
2556 /* Allow this function to make replacements in EXPR_LISTs. */
2560 if (GET_CODE (x
) == SUBREG
)
2562 rtx
new = replace_rtx (SUBREG_REG (x
), from
, to
);
2564 if (GET_CODE (new) == CONST_INT
)
2566 x
= simplify_subreg (GET_MODE (x
), new,
2567 GET_MODE (SUBREG_REG (x
)),
2573 SUBREG_REG (x
) = new;
2577 else if (GET_CODE (x
) == ZERO_EXTEND
)
2579 rtx
new = replace_rtx (XEXP (x
, 0), from
, to
);
2581 if (GET_CODE (new) == CONST_INT
)
2583 x
= simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
2584 new, GET_MODE (XEXP (x
, 0)));
2594 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2595 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2598 XEXP (x
, i
) = replace_rtx (XEXP (x
, i
), from
, to
);
2599 else if (fmt
[i
] == 'E')
2600 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2601 XVECEXP (x
, i
, j
) = replace_rtx (XVECEXP (x
, i
, j
), from
, to
);
2607 /* Throughout the rtx X, replace many registers according to REG_MAP.
2608 Return the replacement for X (which may be X with altered contents).
2609 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2610 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2612 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2613 should not be mapped to pseudos or vice versa since validate_change
2616 If REPLACE_DEST is 1, replacements are also done in destinations;
2617 otherwise, only sources are replaced. */
2620 replace_regs (rtx x
, rtx
*reg_map
, unsigned int nregs
, int replace_dest
)
2629 code
= GET_CODE (x
);
2644 /* Verify that the register has an entry before trying to access it. */
2645 if (REGNO (x
) < nregs
&& reg_map
[REGNO (x
)] != 0)
2647 /* SUBREGs can't be shared. Always return a copy to ensure that if
2648 this replacement occurs more than once then each instance will
2649 get distinct rtx. */
2650 if (GET_CODE (reg_map
[REGNO (x
)]) == SUBREG
)
2651 return copy_rtx (reg_map
[REGNO (x
)]);
2652 return reg_map
[REGNO (x
)];
2657 /* Prevent making nested SUBREGs. */
2658 if (GET_CODE (SUBREG_REG (x
)) == REG
&& REGNO (SUBREG_REG (x
)) < nregs
2659 && reg_map
[REGNO (SUBREG_REG (x
))] != 0
2660 && GET_CODE (reg_map
[REGNO (SUBREG_REG (x
))]) == SUBREG
)
2662 rtx map_val
= reg_map
[REGNO (SUBREG_REG (x
))];
2663 return simplify_gen_subreg (GET_MODE (x
), map_val
,
2664 GET_MODE (SUBREG_REG (x
)),
2671 SET_DEST (x
) = replace_regs (SET_DEST (x
), reg_map
, nregs
, 0);
2673 else if (GET_CODE (SET_DEST (x
)) == MEM
2674 || GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2675 /* Even if we are not to replace destinations, replace register if it
2676 is CONTAINED in destination (destination is memory or
2677 STRICT_LOW_PART). */
2678 XEXP (SET_DEST (x
), 0) = replace_regs (XEXP (SET_DEST (x
), 0),
2680 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2681 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2684 SET_SRC (x
) = replace_regs (SET_SRC (x
), reg_map
, nregs
, 0);
2691 fmt
= GET_RTX_FORMAT (code
);
2692 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2695 XEXP (x
, i
) = replace_regs (XEXP (x
, i
), reg_map
, nregs
, replace_dest
);
2696 else if (fmt
[i
] == 'E')
2699 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2700 XVECEXP (x
, i
, j
) = replace_regs (XVECEXP (x
, i
, j
), reg_map
,
2701 nregs
, replace_dest
);
2707 /* Replace occurrences of the old label in *X with the new one.
2708 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2711 replace_label (rtx
*x
, void *data
)
2715 rtx old_label
= ((replace_label_data
*) data
)->r1
;
2716 rtx new_label
= ((replace_label_data
*) data
)->r2
;
2717 bool update_label_nuses
= ((replace_label_data
*) data
)->update_label_nuses
;
2722 if (GET_CODE (l
) == MEM
2723 && (tmp
= XEXP (l
, 0)) != NULL_RTX
2724 && GET_CODE (tmp
) == SYMBOL_REF
2725 && CONSTANT_POOL_ADDRESS_P (tmp
))
2727 rtx c
= get_pool_constant (tmp
);
2728 if (rtx_referenced_p (old_label
, c
))
2731 replace_label_data
*d
= (replace_label_data
*) data
;
2733 /* Create a copy of constant C; replace the label inside
2734 but do not update LABEL_NUSES because uses in constant pool
2736 new_c
= copy_rtx (c
);
2737 d
->update_label_nuses
= false;
2738 for_each_rtx (&new_c
, replace_label
, data
);
2739 d
->update_label_nuses
= update_label_nuses
;
2741 /* Add the new constant NEW_C to constant pool and replace
2742 the old reference to constant by new reference. */
2743 new_l
= force_const_mem (get_pool_mode (tmp
), new_c
);
2744 *x
= replace_rtx (l
, l
, new_l
);
2749 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2750 field. This is not handled by for_each_rtx because it doesn't
2751 handle unprinted ('0') fields. */
2752 if (GET_CODE (l
) == JUMP_INSN
&& JUMP_LABEL (l
) == old_label
)
2753 JUMP_LABEL (l
) = new_label
;
2755 if ((GET_CODE (l
) == LABEL_REF
2756 || GET_CODE (l
) == INSN_LIST
)
2757 && XEXP (l
, 0) == old_label
)
2759 XEXP (l
, 0) = new_label
;
2760 if (update_label_nuses
)
2762 ++LABEL_NUSES (new_label
);
2763 --LABEL_NUSES (old_label
);
2771 /* When *BODY is equal to X or X is directly referenced by *BODY
2772 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2773 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2776 rtx_referenced_p_1 (rtx
*body
, void *x
)
2780 if (*body
== NULL_RTX
)
2781 return y
== NULL_RTX
;
2783 /* Return true if a label_ref *BODY refers to label Y. */
2784 if (GET_CODE (*body
) == LABEL_REF
&& GET_CODE (y
) == CODE_LABEL
)
2785 return XEXP (*body
, 0) == y
;
2787 /* If *BODY is a reference to pool constant traverse the constant. */
2788 if (GET_CODE (*body
) == SYMBOL_REF
2789 && CONSTANT_POOL_ADDRESS_P (*body
))
2790 return rtx_referenced_p (y
, get_pool_constant (*body
));
2792 /* By default, compare the RTL expressions. */
2793 return rtx_equal_p (*body
, y
);
2796 /* Return true if X is referenced in BODY. */
2799 rtx_referenced_p (rtx x
, rtx body
)
2801 return for_each_rtx (&body
, rtx_referenced_p_1
, x
);
2804 /* If INSN is a tablejump return true and store the label (before jump table) to
2805 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2808 tablejump_p (rtx insn
, rtx
*labelp
, rtx
*tablep
)
2812 if (GET_CODE (insn
) == JUMP_INSN
2813 && (label
= JUMP_LABEL (insn
)) != NULL_RTX
2814 && (table
= next_active_insn (label
)) != NULL_RTX
2815 && GET_CODE (table
) == JUMP_INSN
2816 && (GET_CODE (PATTERN (table
)) == ADDR_VEC
2817 || GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
))
2828 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2829 constant that is not in the constant pool and not in the condition
2830 of an IF_THEN_ELSE. */
2833 computed_jump_p_1 (rtx x
)
2835 enum rtx_code code
= GET_CODE (x
);
2854 return ! (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
2855 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)));
2858 return (computed_jump_p_1 (XEXP (x
, 1))
2859 || computed_jump_p_1 (XEXP (x
, 2)));
2865 fmt
= GET_RTX_FORMAT (code
);
2866 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2869 && computed_jump_p_1 (XEXP (x
, i
)))
2872 else if (fmt
[i
] == 'E')
2873 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2874 if (computed_jump_p_1 (XVECEXP (x
, i
, j
)))
2881 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2883 Tablejumps and casesi insns are not considered indirect jumps;
2884 we can recognize them by a (use (label_ref)). */
2887 computed_jump_p (rtx insn
)
2890 if (GET_CODE (insn
) == JUMP_INSN
)
2892 rtx pat
= PATTERN (insn
);
2894 if (find_reg_note (insn
, REG_LABEL
, NULL_RTX
))
2896 else if (GET_CODE (pat
) == PARALLEL
)
2898 int len
= XVECLEN (pat
, 0);
2899 int has_use_labelref
= 0;
2901 for (i
= len
- 1; i
>= 0; i
--)
2902 if (GET_CODE (XVECEXP (pat
, 0, i
)) == USE
2903 && (GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0))
2905 has_use_labelref
= 1;
2907 if (! has_use_labelref
)
2908 for (i
= len
- 1; i
>= 0; i
--)
2909 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
2910 && SET_DEST (XVECEXP (pat
, 0, i
)) == pc_rtx
2911 && computed_jump_p_1 (SET_SRC (XVECEXP (pat
, 0, i
))))
2914 else if (GET_CODE (pat
) == SET
2915 && SET_DEST (pat
) == pc_rtx
2916 && computed_jump_p_1 (SET_SRC (pat
)))
2922 /* Traverse X via depth-first search, calling F for each
2923 sub-expression (including X itself). F is also passed the DATA.
2924 If F returns -1, do not traverse sub-expressions, but continue
2925 traversing the rest of the tree. If F ever returns any other
2926 nonzero value, stop the traversal, and return the value returned
2927 by F. Otherwise, return 0. This function does not traverse inside
2928 tree structure that contains RTX_EXPRs, or into sub-expressions
2929 whose format code is `0' since it is not known whether or not those
2930 codes are actually RTL.
2932 This routine is very general, and could (should?) be used to
2933 implement many of the other routines in this file. */
2936 for_each_rtx (rtx
*x
, rtx_function f
, void *data
)
2944 result
= (*f
) (x
, data
);
2946 /* Do not traverse sub-expressions. */
2948 else if (result
!= 0)
2949 /* Stop the traversal. */
2953 /* There are no sub-expressions. */
2956 length
= GET_RTX_LENGTH (GET_CODE (*x
));
2957 format
= GET_RTX_FORMAT (GET_CODE (*x
));
2959 for (i
= 0; i
< length
; ++i
)
2964 result
= for_each_rtx (&XEXP (*x
, i
), f
, data
);
2971 if (XVEC (*x
, i
) != 0)
2974 for (j
= 0; j
< XVECLEN (*x
, i
); ++j
)
2976 result
= for_each_rtx (&XVECEXP (*x
, i
, j
), f
, data
);
2984 /* Nothing to do. */
2993 /* Searches X for any reference to REGNO, returning the rtx of the
2994 reference found if any. Otherwise, returns NULL_RTX. */
2997 regno_use_in (unsigned int regno
, rtx x
)
3003 if (GET_CODE (x
) == REG
&& REGNO (x
) == regno
)
3006 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
3007 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
3011 if ((tem
= regno_use_in (regno
, XEXP (x
, i
))))
3014 else if (fmt
[i
] == 'E')
3015 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3016 if ((tem
= regno_use_in (regno
, XVECEXP (x
, i
, j
))))
3023 /* Return a value indicating whether OP, an operand of a commutative
3024 operation, is preferred as the first or second operand. The higher
3025 the value, the stronger the preference for being the first operand.
3026 We use negative values to indicate a preference for the first operand
3027 and positive values for the second operand. */
3030 commutative_operand_precedence (rtx op
)
3032 /* Constants always come the second operand. Prefer "nice" constants. */
3033 if (GET_CODE (op
) == CONST_INT
)
3035 if (GET_CODE (op
) == CONST_DOUBLE
)
3037 op
= avoid_constant_pool_reference (op
);
3038 if (GET_CODE (op
) == CONST_INT
)
3040 if (GET_CODE (op
) == CONST_DOUBLE
)
3042 if (CONSTANT_P (op
))
3045 /* SUBREGs of objects should come second. */
3046 if (GET_CODE (op
) == SUBREG
3047 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op
))) == 'o')
3050 /* If only one operand is a `neg', `not',
3051 `mult', `plus', or `minus' expression, it will be the first
3053 if (GET_CODE (op
) == NEG
|| GET_CODE (op
) == NOT
3054 || GET_CODE (op
) == MULT
|| GET_CODE (op
) == PLUS
3055 || GET_CODE (op
) == MINUS
)
3058 /* Complex expressions should be the first, so decrease priority
3060 if (GET_RTX_CLASS (GET_CODE (op
)) == 'o')
3065 /* Return 1 iff it is necessary to swap operands of commutative operation
3066 in order to canonicalize expression. */
3069 swap_commutative_operands_p (rtx x
, rtx y
)
3071 return (commutative_operand_precedence (x
)
3072 < commutative_operand_precedence (y
));
3075 /* Return 1 if X is an autoincrement side effect and the register is
3076 not the stack pointer. */
3080 switch (GET_CODE (x
))
3088 /* There are no REG_INC notes for SP. */
3089 if (XEXP (x
, 0) != stack_pointer_rtx
)
3097 /* Return 1 if the sequence of instructions beginning with FROM and up
3098 to and including TO is safe to move. If NEW_TO is non-NULL, and
3099 the sequence is not already safe to move, but can be easily
3100 extended to a sequence which is safe, then NEW_TO will point to the
3101 end of the extended sequence.
3103 For now, this function only checks that the region contains whole
3104 exception regions, but it could be extended to check additional
3105 conditions as well. */
3108 insns_safe_to_move_p (rtx from
, rtx to
, rtx
*new_to
)
3110 int eh_region_count
= 0;
3114 /* By default, assume the end of the region will be what was
3121 if (GET_CODE (r
) == NOTE
)
3123 switch (NOTE_LINE_NUMBER (r
))
3125 case NOTE_INSN_EH_REGION_BEG
:
3129 case NOTE_INSN_EH_REGION_END
:
3130 if (eh_region_count
== 0)
3131 /* This sequence of instructions contains the end of
3132 an exception region, but not he beginning. Moving
3133 it will cause chaos. */
3144 /* If we've passed TO, and we see a non-note instruction, we
3145 can't extend the sequence to a movable sequence. */
3151 /* It's OK to move the sequence if there were matched sets of
3152 exception region notes. */
3153 return eh_region_count
== 0;
3158 /* It's OK to move the sequence if there were matched sets of
3159 exception region notes. */
3160 if (past_to_p
&& eh_region_count
== 0)
3166 /* Go to the next instruction. */
3173 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3175 loc_mentioned_in_p (rtx
*loc
, rtx in
)
3177 enum rtx_code code
= GET_CODE (in
);
3178 const char *fmt
= GET_RTX_FORMAT (code
);
3181 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3183 if (loc
== &in
->u
.fld
[i
].rtx
)
3187 if (loc_mentioned_in_p (loc
, XEXP (in
, i
)))
3190 else if (fmt
[i
] == 'E')
3191 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
3192 if (loc_mentioned_in_p (loc
, XVECEXP (in
, i
, j
)))
3198 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3199 and SUBREG_BYTE, return the bit offset where the subreg begins
3200 (counting from the least significant bit of the operand). */
3203 subreg_lsb_1 (enum machine_mode outer_mode
,
3204 enum machine_mode inner_mode
,
3205 unsigned int subreg_byte
)
3207 unsigned int bitpos
;
3211 /* A paradoxical subreg begins at bit position 0. */
3212 if (GET_MODE_BITSIZE (outer_mode
) > GET_MODE_BITSIZE (inner_mode
))
3215 if (WORDS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
3216 /* If the subreg crosses a word boundary ensure that
3217 it also begins and ends on a word boundary. */
3218 if ((subreg_byte
% UNITS_PER_WORD
3219 + GET_MODE_SIZE (outer_mode
)) > UNITS_PER_WORD
3220 && (subreg_byte
% UNITS_PER_WORD
3221 || GET_MODE_SIZE (outer_mode
) % UNITS_PER_WORD
))
3224 if (WORDS_BIG_ENDIAN
)
3225 word
= (GET_MODE_SIZE (inner_mode
)
3226 - (subreg_byte
+ GET_MODE_SIZE (outer_mode
))) / UNITS_PER_WORD
;
3228 word
= subreg_byte
/ UNITS_PER_WORD
;
3229 bitpos
= word
* BITS_PER_WORD
;
3231 if (BYTES_BIG_ENDIAN
)
3232 byte
= (GET_MODE_SIZE (inner_mode
)
3233 - (subreg_byte
+ GET_MODE_SIZE (outer_mode
))) % UNITS_PER_WORD
;
3235 byte
= subreg_byte
% UNITS_PER_WORD
;
3236 bitpos
+= byte
* BITS_PER_UNIT
;
3241 /* Given a subreg X, return the bit offset where the subreg begins
3242 (counting from the least significant bit of the reg). */
3247 return subreg_lsb_1 (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)),
3251 /* This function returns the regno offset of a subreg expression.
3252 xregno - A regno of an inner hard subreg_reg (or what will become one).
3253 xmode - The mode of xregno.
3254 offset - The byte offset.
3255 ymode - The mode of a top level SUBREG (or what may become one).
3256 RETURN - The regno offset which would be used. */
3258 subreg_regno_offset (unsigned int xregno
, enum machine_mode xmode
,
3259 unsigned int offset
, enum machine_mode ymode
)
3261 int nregs_xmode
, nregs_ymode
;
3262 int mode_multiple
, nregs_multiple
;
3265 if (xregno
>= FIRST_PSEUDO_REGISTER
)
3268 nregs_xmode
= hard_regno_nregs
[xregno
][xmode
];
3269 nregs_ymode
= hard_regno_nregs
[xregno
][ymode
];
3271 /* If this is a big endian paradoxical subreg, which uses more actual
3272 hard registers than the original register, we must return a negative
3273 offset so that we find the proper highpart of the register. */
3275 && nregs_ymode
> nregs_xmode
3276 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3277 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3278 return nregs_xmode
- nregs_ymode
;
3280 if (offset
== 0 || nregs_xmode
== nregs_ymode
)
3283 /* size of ymode must not be greater than the size of xmode. */
3284 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3285 if (mode_multiple
== 0)
3288 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3289 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3290 return (y_offset
/ (mode_multiple
/ nregs_multiple
)) * nregs_ymode
;
3293 /* This function returns true when the offset is representable via
3294 subreg_offset in the given regno.
3295 xregno - A regno of an inner hard subreg_reg (or what will become one).
3296 xmode - The mode of xregno.
3297 offset - The byte offset.
3298 ymode - The mode of a top level SUBREG (or what may become one).
3299 RETURN - The regno offset which would be used. */
3301 subreg_offset_representable_p (unsigned int xregno
, enum machine_mode xmode
,
3302 unsigned int offset
, enum machine_mode ymode
)
3304 int nregs_xmode
, nregs_ymode
;
3305 int mode_multiple
, nregs_multiple
;
3308 if (xregno
>= FIRST_PSEUDO_REGISTER
)
3311 nregs_xmode
= hard_regno_nregs
[xregno
][xmode
];
3312 nregs_ymode
= hard_regno_nregs
[xregno
][ymode
];
3314 /* paradoxical subregs are always valid. */
3316 && nregs_ymode
> nregs_xmode
3317 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3318 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3321 /* Lowpart subregs are always valid. */
3322 if (offset
== subreg_lowpart_offset (ymode
, xmode
))
3325 #ifdef ENABLE_CHECKING
3326 /* This should always pass, otherwise we don't know how to verify the
3327 constraint. These conditions may be relaxed but subreg_offset would
3328 need to be redesigned. */
3329 if (GET_MODE_SIZE (xmode
) % GET_MODE_SIZE (ymode
)
3330 || GET_MODE_SIZE (ymode
) % nregs_ymode
3331 || nregs_xmode
% nregs_ymode
)
3335 /* The XMODE value can be seen as a vector of NREGS_XMODE
3336 values. The subreg must represent a lowpart of given field.
3337 Compute what field it is. */
3338 offset
-= subreg_lowpart_offset (ymode
,
3339 mode_for_size (GET_MODE_BITSIZE (xmode
)
3343 /* size of ymode must not be greater than the size of xmode. */
3344 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3345 if (mode_multiple
== 0)
3348 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3349 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3350 #ifdef ENABLE_CHECKING
3351 if (offset
% GET_MODE_SIZE (ymode
)
3352 || mode_multiple
% nregs_multiple
)
3355 return (!(y_offset
% (mode_multiple
/ nregs_multiple
)));
3358 /* Return the final regno that a subreg expression refers to. */
3360 subreg_regno (rtx x
)
3363 rtx subreg
= SUBREG_REG (x
);
3364 int regno
= REGNO (subreg
);
3366 ret
= regno
+ subreg_regno_offset (regno
,
3373 struct parms_set_data
3379 /* Helper function for noticing stores to parameter registers. */
3381 parms_set (rtx x
, rtx pat ATTRIBUTE_UNUSED
, void *data
)
3383 struct parms_set_data
*d
= data
;
3384 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3385 && TEST_HARD_REG_BIT (d
->regs
, REGNO (x
)))
3387 CLEAR_HARD_REG_BIT (d
->regs
, REGNO (x
));
3392 /* Look backward for first parameter to be loaded.
3393 Do not skip BOUNDARY. */
3395 find_first_parameter_load (rtx call_insn
, rtx boundary
)
3397 struct parms_set_data parm
;
3400 /* Since different machines initialize their parameter registers
3401 in different orders, assume nothing. Collect the set of all
3402 parameter registers. */
3403 CLEAR_HARD_REG_SET (parm
.regs
);
3405 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
3406 if (GET_CODE (XEXP (p
, 0)) == USE
3407 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == REG
)
3409 if (REGNO (XEXP (XEXP (p
, 0), 0)) >= FIRST_PSEUDO_REGISTER
)
3412 /* We only care about registers which can hold function
3414 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p
, 0), 0))))
3417 SET_HARD_REG_BIT (parm
.regs
, REGNO (XEXP (XEXP (p
, 0), 0)));
3422 /* Search backward for the first set of a register in this set. */
3423 while (parm
.nregs
&& before
!= boundary
)
3425 before
= PREV_INSN (before
);
3427 /* It is possible that some loads got CSEed from one call to
3428 another. Stop in that case. */
3429 if (GET_CODE (before
) == CALL_INSN
)
3432 /* Our caller needs either ensure that we will find all sets
3433 (in case code has not been optimized yet), or take care
3434 for possible labels in a way by setting boundary to preceding
3436 if (GET_CODE (before
) == CODE_LABEL
)
3438 if (before
!= boundary
)
3443 if (INSN_P (before
))
3444 note_stores (PATTERN (before
), parms_set
, &parm
);
3449 /* Return true if we should avoid inserting code between INSN and preceding
3450 call instruction. */
3453 keep_with_call_p (rtx insn
)
3457 if (INSN_P (insn
) && (set
= single_set (insn
)) != NULL
)
3459 if (GET_CODE (SET_DEST (set
)) == REG
3460 && REGNO (SET_DEST (set
)) < FIRST_PSEUDO_REGISTER
3461 && fixed_regs
[REGNO (SET_DEST (set
))]
3462 && general_operand (SET_SRC (set
), VOIDmode
))
3464 if (GET_CODE (SET_SRC (set
)) == REG
3465 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set
)))
3466 && GET_CODE (SET_DEST (set
)) == REG
3467 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
)
3469 /* There may be a stack pop just after the call and before the store
3470 of the return register. Search for the actual store when deciding
3471 if we can break or not. */
3472 if (SET_DEST (set
) == stack_pointer_rtx
)
3474 rtx i2
= next_nonnote_insn (insn
);
3475 if (i2
&& keep_with_call_p (i2
))
3482 /* Return true when store to register X can be hoisted to the place
3483 with LIVE registers (can be NULL). Value VAL contains destination
3484 whose value will be used. */
3487 hoist_test_store (rtx x
, rtx val
, regset live
)
3489 if (GET_CODE (x
) == SCRATCH
)
3492 if (rtx_equal_p (x
, val
))
3495 /* Allow subreg of X in case it is not writing just part of multireg pseudo.
3496 Then we would need to update all users to care hoisting the store too.
3497 Caller may represent that by specifying whole subreg as val. */
3499 if (GET_CODE (x
) == SUBREG
&& rtx_equal_p (SUBREG_REG (x
), val
))
3501 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) > UNITS_PER_WORD
3502 && GET_MODE_BITSIZE (GET_MODE (x
)) <
3503 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))))
3507 if (GET_CODE (x
) == SUBREG
)
3510 /* Anything except register store is not hoistable. This includes the
3511 partial stores to registers. */
3516 /* Pseudo registers can be always replaced by another pseudo to avoid
3517 the side effect, for hard register we must ensure that they are dead.
3518 Eventually we may want to add code to try turn pseudos to hards, but it
3519 is unlikely useful. */
3521 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3523 int regno
= REGNO (x
);
3524 int n
= hard_regno_nregs
[regno
][GET_MODE (x
)];
3528 if (REGNO_REG_SET_P (live
, regno
))
3531 if (REGNO_REG_SET_P (live
, regno
+ n
))
3538 /* Return true if INSN can be hoisted to place with LIVE hard registers
3539 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3540 and used by the hoisting pass. */
3543 can_hoist_insn_p (rtx insn
, rtx val
, regset live
)
3545 rtx pat
= PATTERN (insn
);
3548 /* It probably does not worth the complexity to handle multiple
3550 if (!single_set (insn
))
3552 /* We can move CALL_INSN, but we need to check that all caller clobbered
3554 if (GET_CODE (insn
) == CALL_INSN
)
3556 /* In future we will handle hoisting of libcall sequences, but
3558 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
3560 switch (GET_CODE (pat
))
3563 if (!hoist_test_store (SET_DEST (pat
), val
, live
))
3567 /* USES do have sick semantics, so do not move them. */
3571 if (!hoist_test_store (XEXP (pat
, 0), val
, live
))
3575 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3577 rtx x
= XVECEXP (pat
, 0, i
);
3578 switch (GET_CODE (x
))
3581 if (!hoist_test_store (SET_DEST (x
), val
, live
))
3585 /* We need to fix callers to really ensure availability
3586 of all values insn uses, but for now it is safe to prohibit
3587 hoisting of any insn having such a hidden uses. */
3591 if (!hoist_test_store (SET_DEST (x
), val
, live
))
3605 /* Update store after hoisting - replace all stores to pseudo registers
3606 by new ones to avoid clobbering of values except for store to VAL that will
3607 be updated to NEW. */
3610 hoist_update_store (rtx insn
, rtx
*xp
, rtx val
, rtx
new)
3614 if (GET_CODE (x
) == SCRATCH
)
3617 if (GET_CODE (x
) == SUBREG
&& SUBREG_REG (x
) == val
)
3618 validate_change (insn
, xp
,
3619 simplify_gen_subreg (GET_MODE (x
), new, GET_MODE (new),
3620 SUBREG_BYTE (x
)), 1);
3621 if (rtx_equal_p (x
, val
))
3623 validate_change (insn
, xp
, new, 1);
3626 if (GET_CODE (x
) == SUBREG
)
3628 xp
= &SUBREG_REG (x
);
3635 /* We've verified that hard registers are dead, so we may keep the side
3636 effect. Otherwise replace it by new pseudo. */
3637 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
3638 validate_change (insn
, xp
, gen_reg_rtx (GET_MODE (x
)), 1);
3640 = alloc_EXPR_LIST (REG_UNUSED
, *xp
, REG_NOTES (insn
));
3643 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3644 and each other side effect to pseudo register by new pseudo register. */
3647 hoist_insn_after (rtx insn
, rtx after
, rtx val
, rtx
new)
3653 insn
= emit_copy_of_insn_after (insn
, after
);
3654 pat
= PATTERN (insn
);
3656 /* Remove REG_UNUSED notes as we will re-emit them. */
3657 while ((note
= find_reg_note (insn
, REG_UNUSED
, NULL_RTX
)))
3658 remove_note (insn
, note
);
3660 /* To get this working callers must ensure to move everything referenced
3661 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3663 while ((note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)))
3664 remove_note (insn
, note
);
3665 while ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)))
3666 remove_note (insn
, note
);
3668 /* Remove REG_DEAD notes as they might not be valid anymore in case
3669 we create redundancy. */
3670 while ((note
= find_reg_note (insn
, REG_DEAD
, NULL_RTX
)))
3671 remove_note (insn
, note
);
3672 switch (GET_CODE (pat
))
3675 hoist_update_store (insn
, &SET_DEST (pat
), val
, new);
3680 hoist_update_store (insn
, &XEXP (pat
, 0), val
, new);
3683 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3685 rtx x
= XVECEXP (pat
, 0, i
);
3686 switch (GET_CODE (x
))
3689 hoist_update_store (insn
, &SET_DEST (x
), val
, new);
3694 hoist_update_store (insn
, &SET_DEST (x
), val
, new);
3704 if (!apply_change_group ())
3711 hoist_insn_to_edge (rtx insn
, edge e
, rtx val
, rtx
new)
3715 /* We cannot insert instructions on an abnormal critical edge.
3716 It will be easier to find the culprit if we die now. */
3717 if ((e
->flags
& EDGE_ABNORMAL
) && EDGE_CRITICAL_P (e
))
3720 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3721 stuff. We also emit CALL_INSNS and firends. */
3722 if (e
->insns
== NULL_RTX
)
3725 emit_note (NOTE_INSN_DELETED
);
3728 push_to_sequence (e
->insns
);
3730 new_insn
= hoist_insn_after (insn
, get_last_insn (), val
, new);
3732 e
->insns
= get_insns ();
3737 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3738 to non-complex jumps. That is, direct unconditional, conditional,
3739 and tablejumps, but not computed jumps or returns. It also does
3740 not apply to the fallthru case of a conditional jump. */
3743 label_is_jump_target_p (rtx label
, rtx jump_insn
)
3745 rtx tmp
= JUMP_LABEL (jump_insn
);
3750 if (tablejump_p (jump_insn
, NULL
, &tmp
))
3752 rtvec vec
= XVEC (PATTERN (tmp
),
3753 GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
);
3754 int i
, veclen
= GET_NUM_ELEM (vec
);
3756 for (i
= 0; i
< veclen
; ++i
)
3757 if (XEXP (RTVEC_ELT (vec
, i
), 0) == label
)