1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
34 #include "basic-block.h"
37 /* Forward declarations */
38 static int global_reg_mentioned_p_1 (rtx
*, void *);
39 static void set_of_1 (rtx
, rtx
, void *);
40 static void insn_dependent_p_1 (rtx
, rtx
, void *);
41 static int rtx_referenced_p_1 (rtx
*, void *);
42 static int computed_jump_p_1 (rtx
);
43 static void parms_set (rtx
, rtx
, void *);
44 static bool hoist_test_store (rtx
, rtx
, regset
);
45 static void hoist_update_store (rtx
, rtx
*, rtx
, rtx
);
47 /* Bit flags that specify the machine subtype we are compiling for.
48 Bits are tested using macros TARGET_... defined in the tm.h file
49 and set by `-m...' switches. Must be defined in rtlanal.c. */
53 /* Return 1 if the value of X is unstable
54 (would be different at a different point in the program).
55 The frame pointer, arg pointer, etc. are considered stable
56 (within one function) and so is anything marked `unchanging'. */
59 rtx_unstable_p (rtx x
)
61 RTX_CODE code
= GET_CODE (x
);
68 return ! RTX_UNCHANGING_P (x
) || rtx_unstable_p (XEXP (x
, 0));
83 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
84 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
85 /* The arg pointer varies if it is not a fixed register. */
86 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
87 || RTX_UNCHANGING_P (x
))
89 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
90 /* ??? When call-clobbered, the value is stable modulo the restore
91 that must happen after a call. This currently screws up local-alloc
92 into believing that the restore is not needed. */
93 if (x
== pic_offset_table_rtx
)
99 if (MEM_VOLATILE_P (x
))
108 fmt
= GET_RTX_FORMAT (code
);
109 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
112 if (rtx_unstable_p (XEXP (x
, i
)))
115 else if (fmt
[i
] == 'E')
118 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
119 if (rtx_unstable_p (XVECEXP (x
, i
, j
)))
126 /* Return 1 if X has a value that can vary even between two
127 executions of the program. 0 means X can be compared reliably
128 against certain constants or near-constants.
129 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
130 zero, we are slightly more conservative.
131 The frame pointer and the arg pointer are considered constant. */
134 rtx_varies_p (rtx x
, int for_alias
)
136 RTX_CODE code
= GET_CODE (x
);
143 return ! RTX_UNCHANGING_P (x
) || rtx_varies_p (XEXP (x
, 0), for_alias
);
157 /* This will resolve to some offset from the frame pointer. */
161 /* Note that we have to test for the actual rtx used for the frame
162 and arg pointers and not just the register number in case we have
163 eliminated the frame and/or arg pointer and are using it
165 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
166 /* The arg pointer varies if it is not a fixed register. */
167 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
169 if (x
== pic_offset_table_rtx
170 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
171 /* ??? When call-clobbered, the value is stable modulo the restore
172 that must happen after a call. This currently screws up
173 local-alloc into believing that the restore is not needed, so we
174 must return 0 only if we are called from alias analysis. */
182 /* The operand 0 of a LO_SUM is considered constant
183 (in fact it is related specifically to operand 1)
184 during alias analysis. */
185 return (! for_alias
&& rtx_varies_p (XEXP (x
, 0), for_alias
))
186 || rtx_varies_p (XEXP (x
, 1), for_alias
);
189 if (MEM_VOLATILE_P (x
))
198 fmt
= GET_RTX_FORMAT (code
);
199 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
202 if (rtx_varies_p (XEXP (x
, i
), for_alias
))
205 else if (fmt
[i
] == 'E')
208 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
209 if (rtx_varies_p (XVECEXP (x
, i
, j
), for_alias
))
216 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
219 rtx_addr_can_trap_p (rtx x
)
221 enum rtx_code code
= GET_CODE (x
);
226 return SYMBOL_REF_WEAK (x
);
232 /* This will resolve to some offset from the frame pointer. */
236 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
237 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
238 || x
== stack_pointer_rtx
239 /* The arg pointer varies if it is not a fixed register. */
240 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
242 /* All of the virtual frame registers are stack references. */
243 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
244 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
249 return rtx_addr_can_trap_p (XEXP (x
, 0));
252 /* An address is assumed not to trap if it is an address that can't
253 trap plus a constant integer or it is the pic register plus a
255 return ! ((! rtx_addr_can_trap_p (XEXP (x
, 0))
256 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
257 || (XEXP (x
, 0) == pic_offset_table_rtx
258 && CONSTANT_P (XEXP (x
, 1))));
262 return rtx_addr_can_trap_p (XEXP (x
, 1));
269 return rtx_addr_can_trap_p (XEXP (x
, 0));
275 /* If it isn't one of the case above, it can cause a trap. */
279 /* Return true if X is an address that is known to not be zero. */
282 nonzero_address_p (rtx x
)
284 enum rtx_code code
= GET_CODE (x
);
289 return !SYMBOL_REF_WEAK (x
);
295 /* This will resolve to some offset from the frame pointer. */
299 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
300 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
301 || x
== stack_pointer_rtx
302 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
304 /* All of the virtual frame registers are stack references. */
305 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
306 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
311 return nonzero_address_p (XEXP (x
, 0));
314 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
316 /* Pointers aren't allowed to wrap. If we've got a register
317 that is known to be a pointer, and a positive offset, then
318 the composite can't be zero. */
319 if (INTVAL (XEXP (x
, 1)) > 0
320 && REG_P (XEXP (x
, 0))
321 && REG_POINTER (XEXP (x
, 0)))
324 return nonzero_address_p (XEXP (x
, 0));
326 /* Handle PIC references. */
327 else if (XEXP (x
, 0) == pic_offset_table_rtx
328 && CONSTANT_P (XEXP (x
, 1)))
333 /* Similar to the above; allow positive offsets. Further, since
334 auto-inc is only allowed in memories, the register must be a
336 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
337 && INTVAL (XEXP (x
, 1)) > 0)
339 return nonzero_address_p (XEXP (x
, 0));
342 /* Similarly. Further, the offset is always positive. */
349 return nonzero_address_p (XEXP (x
, 0));
352 return nonzero_address_p (XEXP (x
, 1));
358 /* If it isn't one of the case above, might be zero. */
362 /* Return 1 if X refers to a memory location whose address
363 cannot be compared reliably with constant addresses,
364 or if X refers to a BLKmode memory object.
365 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
366 zero, we are slightly more conservative. */
369 rtx_addr_varies_p (rtx x
, int for_alias
)
380 return GET_MODE (x
) == BLKmode
|| rtx_varies_p (XEXP (x
, 0), for_alias
);
382 fmt
= GET_RTX_FORMAT (code
);
383 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
386 if (rtx_addr_varies_p (XEXP (x
, i
), for_alias
))
389 else if (fmt
[i
] == 'E')
392 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
393 if (rtx_addr_varies_p (XVECEXP (x
, i
, j
), for_alias
))
399 /* Return the value of the integer term in X, if one is apparent;
401 Only obvious integer terms are detected.
402 This is used in cse.c with the `related_value' field. */
405 get_integer_term (rtx x
)
407 if (GET_CODE (x
) == CONST
)
410 if (GET_CODE (x
) == MINUS
411 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
412 return - INTVAL (XEXP (x
, 1));
413 if (GET_CODE (x
) == PLUS
414 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
415 return INTVAL (XEXP (x
, 1));
419 /* If X is a constant, return the value sans apparent integer term;
421 Only obvious integer terms are detected. */
424 get_related_value (rtx x
)
426 if (GET_CODE (x
) != CONST
)
429 if (GET_CODE (x
) == PLUS
430 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
432 else if (GET_CODE (x
) == MINUS
433 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
438 /* Given a tablejump insn INSN, return the RTL expression for the offset
439 into the jump table. If the offset cannot be determined, then return
442 If EARLIEST is nonzero, it is a pointer to a place where the earliest
443 insn used in locating the offset was found. */
446 get_jump_table_offset (rtx insn
, rtx
*earliest
)
458 if (!tablejump_p (insn
, &label
, &table
) || !(set
= single_set (insn
)))
463 /* Some targets (eg, ARM) emit a tablejump that also
464 contains the out-of-range target. */
465 if (GET_CODE (x
) == IF_THEN_ELSE
466 && GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
469 /* Search backwards and locate the expression stored in X. */
470 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
471 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
474 /* If X is an expression using a relative address then strip
475 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
476 or the jump table label. */
477 if (GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
478 && (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
))
480 for (i
= 0; i
< 2; i
++)
485 if (y
== pc_rtx
|| y
== pic_offset_table_rtx
)
488 for (old_y
= NULL_RTX
; GET_CODE (y
) == REG
&& y
!= old_y
;
489 old_y
= y
, y
= find_last_value (y
, &old_insn
, NULL_RTX
, 0))
492 if ((GET_CODE (y
) == LABEL_REF
&& XEXP (y
, 0) == label
))
501 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
502 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
506 /* Strip off any sign or zero extension. */
507 if (GET_CODE (x
) == SIGN_EXTEND
|| GET_CODE (x
) == ZERO_EXTEND
)
511 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
512 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
516 /* If X isn't a MEM then this isn't a tablejump we understand. */
517 if (GET_CODE (x
) != MEM
)
520 /* Strip off the MEM. */
523 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
524 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
527 /* If X isn't a PLUS than this isn't a tablejump we understand. */
528 if (GET_CODE (x
) != PLUS
)
531 /* At this point we should have an expression representing the jump table
532 plus an offset. Examine each operand in order to determine which one
533 represents the jump table. Knowing that tells us that the other operand
534 must represent the offset. */
535 for (i
= 0; i
< 2; i
++)
540 for (old_y
= NULL_RTX
; GET_CODE (y
) == REG
&& y
!= old_y
;
541 old_y
= y
, y
= find_last_value (y
, &old_insn
, NULL_RTX
, 0))
544 if ((GET_CODE (y
) == CONST
|| GET_CODE (y
) == LABEL_REF
)
545 && reg_mentioned_p (label
, y
))
554 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
555 if (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
)
556 for (i
= 0; i
< 2; i
++)
557 if (XEXP (x
, i
) == pic_offset_table_rtx
)
566 /* Return the RTL expression representing the offset. */
570 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
571 a global register. */
574 global_reg_mentioned_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
582 switch (GET_CODE (x
))
585 if (GET_CODE (SUBREG_REG (x
)) == REG
)
587 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
588 && global_regs
[subreg_regno (x
)])
596 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
610 /* A non-constant call might use a global register. */
620 /* Returns nonzero if X mentions a global register. */
623 global_reg_mentioned_p (rtx x
)
627 if (GET_CODE (x
) == CALL_INSN
)
629 if (! CONST_OR_PURE_CALL_P (x
))
631 x
= CALL_INSN_FUNCTION_USAGE (x
);
639 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
642 /* Return the number of places FIND appears within X. If COUNT_DEST is
643 zero, we do not count occurrences inside the destination of a SET. */
646 count_occurrences (rtx x
, rtx find
, int count_dest
)
650 const char *format_ptr
;
671 if (GET_CODE (find
) == MEM
&& rtx_equal_p (x
, find
))
676 if (SET_DEST (x
) == find
&& ! count_dest
)
677 return count_occurrences (SET_SRC (x
), find
, count_dest
);
684 format_ptr
= GET_RTX_FORMAT (code
);
687 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
689 switch (*format_ptr
++)
692 count
+= count_occurrences (XEXP (x
, i
), find
, count_dest
);
696 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
697 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
, count_dest
);
704 /* Nonzero if register REG appears somewhere within IN.
705 Also works if REG is not a register; in this case it checks
706 for a subexpression of IN that is Lisp "equal" to REG. */
709 reg_mentioned_p (rtx reg
, rtx in
)
721 if (GET_CODE (in
) == LABEL_REF
)
722 return reg
== XEXP (in
, 0);
724 code
= GET_CODE (in
);
728 /* Compare registers by number. */
730 return GET_CODE (reg
) == REG
&& REGNO (in
) == REGNO (reg
);
732 /* These codes have no constituent expressions
742 /* These are kept unique for a given value. */
749 if (GET_CODE (reg
) == code
&& rtx_equal_p (reg
, in
))
752 fmt
= GET_RTX_FORMAT (code
);
754 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
759 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
760 if (reg_mentioned_p (reg
, XVECEXP (in
, i
, j
)))
763 else if (fmt
[i
] == 'e'
764 && reg_mentioned_p (reg
, XEXP (in
, i
)))
770 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
771 no CODE_LABEL insn. */
774 no_labels_between_p (rtx beg
, rtx end
)
779 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
780 if (GET_CODE (p
) == CODE_LABEL
)
785 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
786 no JUMP_INSN insn. */
789 no_jumps_between_p (rtx beg
, rtx end
)
792 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
793 if (GET_CODE (p
) == JUMP_INSN
)
798 /* Nonzero if register REG is used in an insn between
799 FROM_INSN and TO_INSN (exclusive of those two). */
802 reg_used_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
806 if (from_insn
== to_insn
)
809 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
811 && (reg_overlap_mentioned_p (reg
, PATTERN (insn
))
812 || (GET_CODE (insn
) == CALL_INSN
813 && (find_reg_fusage (insn
, USE
, reg
)
814 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
819 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
820 is entirely replaced by a new value and the only use is as a SET_DEST,
821 we do not consider it a reference. */
824 reg_referenced_p (rtx x
, rtx body
)
828 switch (GET_CODE (body
))
831 if (reg_overlap_mentioned_p (x
, SET_SRC (body
)))
834 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
835 of a REG that occupies all of the REG, the insn references X if
836 it is mentioned in the destination. */
837 if (GET_CODE (SET_DEST (body
)) != CC0
838 && GET_CODE (SET_DEST (body
)) != PC
839 && GET_CODE (SET_DEST (body
)) != REG
840 && ! (GET_CODE (SET_DEST (body
)) == SUBREG
841 && GET_CODE (SUBREG_REG (SET_DEST (body
))) == REG
842 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body
))))
843 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
844 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body
)))
845 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
846 && reg_overlap_mentioned_p (x
, SET_DEST (body
)))
851 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
852 if (reg_overlap_mentioned_p (x
, ASM_OPERANDS_INPUT (body
, i
)))
859 return reg_overlap_mentioned_p (x
, body
);
862 return reg_overlap_mentioned_p (x
, TRAP_CONDITION (body
));
865 return reg_overlap_mentioned_p (x
, XEXP (body
, 0));
868 case UNSPEC_VOLATILE
:
869 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
870 if (reg_overlap_mentioned_p (x
, XVECEXP (body
, 0, i
)))
875 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
876 if (reg_referenced_p (x
, XVECEXP (body
, 0, i
)))
881 if (GET_CODE (XEXP (body
, 0)) == MEM
)
882 if (reg_overlap_mentioned_p (x
, XEXP (XEXP (body
, 0), 0)))
887 if (reg_overlap_mentioned_p (x
, COND_EXEC_TEST (body
)))
889 return reg_referenced_p (x
, COND_EXEC_CODE (body
));
896 /* Nonzero if register REG is referenced in an insn between
897 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
901 reg_referenced_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
905 if (from_insn
== to_insn
)
908 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
910 && (reg_referenced_p (reg
, PATTERN (insn
))
911 || (GET_CODE (insn
) == CALL_INSN
912 && find_reg_fusage (insn
, USE
, reg
))))
917 /* Nonzero if register REG is set or clobbered in an insn between
918 FROM_INSN and TO_INSN (exclusive of those two). */
921 reg_set_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
925 if (from_insn
== to_insn
)
928 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
929 if (INSN_P (insn
) && reg_set_p (reg
, insn
))
934 /* Internals of reg_set_between_p. */
936 reg_set_p (rtx reg
, rtx insn
)
938 /* We can be passed an insn or part of one. If we are passed an insn,
939 check if a side-effect of the insn clobbers REG. */
941 && (FIND_REG_INC_NOTE (insn
, reg
)
942 || (GET_CODE (insn
) == CALL_INSN
943 /* We'd like to test call_used_regs here, but rtlanal.c can't
944 reference that variable due to its use in genattrtab. So
945 we'll just be more conservative.
947 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
948 information holds all clobbered registers. */
949 && ((GET_CODE (reg
) == REG
950 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
951 || GET_CODE (reg
) == MEM
952 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
955 return set_of (reg
, insn
) != NULL_RTX
;
958 /* Similar to reg_set_between_p, but check all registers in X. Return 0
959 only if none of them are modified between START and END. Do not
960 consider non-registers one way or the other. */
963 regs_set_between_p (rtx x
, rtx start
, rtx end
)
965 enum rtx_code code
= GET_CODE (x
);
982 return reg_set_between_p (x
, start
, end
);
988 fmt
= GET_RTX_FORMAT (code
);
989 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
991 if (fmt
[i
] == 'e' && regs_set_between_p (XEXP (x
, i
), start
, end
))
994 else if (fmt
[i
] == 'E')
995 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
996 if (regs_set_between_p (XVECEXP (x
, i
, j
), start
, end
))
1003 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1004 only if none of them are modified between START and END. Return 1 if
1005 X contains a MEM; this routine does usememory aliasing. */
1008 modified_between_p (rtx x
, rtx start
, rtx end
)
1010 enum rtx_code code
= GET_CODE (x
);
1033 if (RTX_UNCHANGING_P (x
))
1035 if (modified_between_p (XEXP (x
, 0), start
, end
))
1037 for (insn
= NEXT_INSN (start
); insn
!= end
; insn
= NEXT_INSN (insn
))
1038 if (memory_modified_in_insn_p (x
, insn
))
1044 return reg_set_between_p (x
, start
, end
);
1050 fmt
= GET_RTX_FORMAT (code
);
1051 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1053 if (fmt
[i
] == 'e' && modified_between_p (XEXP (x
, i
), start
, end
))
1056 else if (fmt
[i
] == 'E')
1057 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1058 if (modified_between_p (XVECEXP (x
, i
, j
), start
, end
))
1065 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1066 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1067 does use memory aliasing. */
1070 modified_in_p (rtx x
, rtx insn
)
1072 enum rtx_code code
= GET_CODE (x
);
1091 if (RTX_UNCHANGING_P (x
))
1093 if (modified_in_p (XEXP (x
, 0), insn
))
1095 if (memory_modified_in_insn_p (x
, insn
))
1101 return reg_set_p (x
, insn
);
1107 fmt
= GET_RTX_FORMAT (code
);
1108 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1110 if (fmt
[i
] == 'e' && modified_in_p (XEXP (x
, i
), insn
))
1113 else if (fmt
[i
] == 'E')
1114 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1115 if (modified_in_p (XVECEXP (x
, i
, j
), insn
))
1122 /* Return true if anything in insn X is (anti,output,true) dependent on
1123 anything in insn Y. */
1126 insn_dependent_p (rtx x
, rtx y
)
1130 if (! INSN_P (x
) || ! INSN_P (y
))
1134 note_stores (PATTERN (x
), insn_dependent_p_1
, &tmp
);
1135 if (tmp
== NULL_RTX
)
1139 note_stores (PATTERN (y
), insn_dependent_p_1
, &tmp
);
1140 if (tmp
== NULL_RTX
)
1146 /* A helper routine for insn_dependent_p called through note_stores. */
1149 insn_dependent_p_1 (rtx x
, rtx pat ATTRIBUTE_UNUSED
, void *data
)
1151 rtx
* pinsn
= (rtx
*) data
;
1153 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
1157 /* Helper function for set_of. */
1165 set_of_1 (rtx x
, rtx pat
, void *data1
)
1167 struct set_of_data
*data
= (struct set_of_data
*) (data1
);
1168 if (rtx_equal_p (x
, data
->pat
)
1169 || (GET_CODE (x
) != MEM
&& reg_overlap_mentioned_p (data
->pat
, x
)))
1173 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1174 (either directly or via STRICT_LOW_PART and similar modifiers). */
1176 set_of (rtx pat
, rtx insn
)
1178 struct set_of_data data
;
1179 data
.found
= NULL_RTX
;
1181 note_stores (INSN_P (insn
) ? PATTERN (insn
) : insn
, set_of_1
, &data
);
1185 /* Given an INSN, return a SET expression if this insn has only a single SET.
1186 It may also have CLOBBERs, USEs, or SET whose output
1187 will not be used, which we ignore. */
1190 single_set_2 (rtx insn
, rtx pat
)
1193 int set_verified
= 1;
1196 if (GET_CODE (pat
) == PARALLEL
)
1198 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1200 rtx sub
= XVECEXP (pat
, 0, i
);
1201 switch (GET_CODE (sub
))
1208 /* We can consider insns having multiple sets, where all
1209 but one are dead as single set insns. In common case
1210 only single set is present in the pattern so we want
1211 to avoid checking for REG_UNUSED notes unless necessary.
1213 When we reach set first time, we just expect this is
1214 the single set we are looking for and only when more
1215 sets are found in the insn, we check them. */
1218 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (set
))
1219 && !side_effects_p (set
))
1225 set
= sub
, set_verified
= 0;
1226 else if (!find_reg_note (insn
, REG_UNUSED
, SET_DEST (sub
))
1227 || side_effects_p (sub
))
1239 /* Given an INSN, return nonzero if it has more than one SET, else return
1243 multiple_sets (rtx insn
)
1248 /* INSN must be an insn. */
1249 if (! INSN_P (insn
))
1252 /* Only a PARALLEL can have multiple SETs. */
1253 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
1255 for (i
= 0, found
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1256 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == SET
)
1258 /* If we have already found a SET, then return now. */
1266 /* Either zero or one SET. */
1270 /* Return nonzero if the destination of SET equals the source
1271 and there are no side effects. */
1274 set_noop_p (rtx set
)
1276 rtx src
= SET_SRC (set
);
1277 rtx dst
= SET_DEST (set
);
1279 if (dst
== pc_rtx
&& src
== pc_rtx
)
1282 if (GET_CODE (dst
) == MEM
&& GET_CODE (src
) == MEM
)
1283 return rtx_equal_p (dst
, src
) && !side_effects_p (dst
);
1285 if (GET_CODE (dst
) == SIGN_EXTRACT
1286 || GET_CODE (dst
) == ZERO_EXTRACT
)
1287 return rtx_equal_p (XEXP (dst
, 0), src
)
1288 && ! BYTES_BIG_ENDIAN
&& XEXP (dst
, 2) == const0_rtx
1289 && !side_effects_p (src
);
1291 if (GET_CODE (dst
) == STRICT_LOW_PART
)
1292 dst
= XEXP (dst
, 0);
1294 if (GET_CODE (src
) == SUBREG
&& GET_CODE (dst
) == SUBREG
)
1296 if (SUBREG_BYTE (src
) != SUBREG_BYTE (dst
))
1298 src
= SUBREG_REG (src
);
1299 dst
= SUBREG_REG (dst
);
1302 return (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
1303 && REGNO (src
) == REGNO (dst
));
1306 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1310 noop_move_p (rtx insn
)
1312 rtx pat
= PATTERN (insn
);
1314 if (INSN_CODE (insn
) == NOOP_MOVE_INSN_CODE
)
1317 /* Insns carrying these notes are useful later on. */
1318 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1321 /* For now treat an insn with a REG_RETVAL note as a
1322 a special insn which should not be considered a no-op. */
1323 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
1326 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
1329 if (GET_CODE (pat
) == PARALLEL
)
1332 /* If nothing but SETs of registers to themselves,
1333 this insn can also be deleted. */
1334 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1336 rtx tem
= XVECEXP (pat
, 0, i
);
1338 if (GET_CODE (tem
) == USE
1339 || GET_CODE (tem
) == CLOBBER
)
1342 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
1352 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1353 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1354 If the object was modified, if we hit a partial assignment to X, or hit a
1355 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1356 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1360 find_last_value (rtx x
, rtx
*pinsn
, rtx valid_to
, int allow_hwreg
)
1364 for (p
= PREV_INSN (*pinsn
); p
&& GET_CODE (p
) != CODE_LABEL
;
1368 rtx set
= single_set (p
);
1369 rtx note
= find_reg_note (p
, REG_EQUAL
, NULL_RTX
);
1371 if (set
&& rtx_equal_p (x
, SET_DEST (set
)))
1373 rtx src
= SET_SRC (set
);
1375 if (note
&& GET_CODE (XEXP (note
, 0)) != EXPR_LIST
)
1376 src
= XEXP (note
, 0);
1378 if ((valid_to
== NULL_RTX
1379 || ! modified_between_p (src
, PREV_INSN (p
), valid_to
))
1380 /* Reject hard registers because we don't usually want
1381 to use them; we'd rather use a pseudo. */
1382 && (! (GET_CODE (src
) == REG
1383 && REGNO (src
) < FIRST_PSEUDO_REGISTER
) || allow_hwreg
))
1390 /* If set in non-simple way, we don't have a value. */
1391 if (reg_set_p (x
, p
))
1398 /* Return nonzero if register in range [REGNO, ENDREGNO)
1399 appears either explicitly or implicitly in X
1400 other than being stored into.
1402 References contained within the substructure at LOC do not count.
1403 LOC may be zero, meaning don't ignore anything. */
1406 refers_to_regno_p (unsigned int regno
, unsigned int endregno
, rtx x
,
1410 unsigned int x_regno
;
1415 /* The contents of a REG_NONNEG note is always zero, so we must come here
1416 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1420 code
= GET_CODE (x
);
1425 x_regno
= REGNO (x
);
1427 /* If we modifying the stack, frame, or argument pointer, it will
1428 clobber a virtual register. In fact, we could be more precise,
1429 but it isn't worth it. */
1430 if ((x_regno
== STACK_POINTER_REGNUM
1431 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1432 || x_regno
== ARG_POINTER_REGNUM
1434 || x_regno
== FRAME_POINTER_REGNUM
)
1435 && regno
>= FIRST_VIRTUAL_REGISTER
&& regno
<= LAST_VIRTUAL_REGISTER
)
1438 return (endregno
> x_regno
1439 && regno
< x_regno
+ (x_regno
< FIRST_PSEUDO_REGISTER
1440 ? HARD_REGNO_NREGS (x_regno
, GET_MODE (x
))
1444 /* If this is a SUBREG of a hard reg, we can see exactly which
1445 registers are being modified. Otherwise, handle normally. */
1446 if (GET_CODE (SUBREG_REG (x
)) == REG
1447 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
1449 unsigned int inner_regno
= subreg_regno (x
);
1450 unsigned int inner_endregno
1451 = inner_regno
+ (inner_regno
< FIRST_PSEUDO_REGISTER
1452 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
1454 return endregno
> inner_regno
&& regno
< inner_endregno
;
1460 if (&SET_DEST (x
) != loc
1461 /* Note setting a SUBREG counts as referring to the REG it is in for
1462 a pseudo but not for hard registers since we can
1463 treat each word individually. */
1464 && ((GET_CODE (SET_DEST (x
)) == SUBREG
1465 && loc
!= &SUBREG_REG (SET_DEST (x
))
1466 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
1467 && REGNO (SUBREG_REG (SET_DEST (x
))) >= FIRST_PSEUDO_REGISTER
1468 && refers_to_regno_p (regno
, endregno
,
1469 SUBREG_REG (SET_DEST (x
)), loc
))
1470 || (GET_CODE (SET_DEST (x
)) != REG
1471 && refers_to_regno_p (regno
, endregno
, SET_DEST (x
), loc
))))
1474 if (code
== CLOBBER
|| loc
== &SET_SRC (x
))
1483 /* X does not match, so try its subexpressions. */
1485 fmt
= GET_RTX_FORMAT (code
);
1486 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1488 if (fmt
[i
] == 'e' && loc
!= &XEXP (x
, i
))
1496 if (refers_to_regno_p (regno
, endregno
, XEXP (x
, i
), loc
))
1499 else if (fmt
[i
] == 'E')
1502 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1503 if (loc
!= &XVECEXP (x
, i
, j
)
1504 && refers_to_regno_p (regno
, endregno
, XVECEXP (x
, i
, j
), loc
))
1511 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1512 we check if any register number in X conflicts with the relevant register
1513 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1514 contains a MEM (we don't bother checking for memory addresses that can't
1515 conflict because we expect this to be a rare case. */
1518 reg_overlap_mentioned_p (rtx x
, rtx in
)
1520 unsigned int regno
, endregno
;
1522 /* Overly conservative. */
1523 if (GET_CODE (x
) == STRICT_LOW_PART
1524 || GET_CODE (x
) == ZERO_EXTRACT
1525 || GET_CODE (x
) == SIGN_EXTRACT
)
1528 /* If either argument is a constant, then modifying X can not affect IN. */
1529 if (CONSTANT_P (x
) || CONSTANT_P (in
))
1532 switch (GET_CODE (x
))
1535 regno
= REGNO (SUBREG_REG (x
));
1536 if (regno
< FIRST_PSEUDO_REGISTER
)
1537 regno
= subreg_regno (x
);
1543 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
1544 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
1545 return refers_to_regno_p (regno
, endregno
, in
, (rtx
*) 0);
1552 if (GET_CODE (in
) == MEM
)
1555 fmt
= GET_RTX_FORMAT (GET_CODE (in
));
1556 for (i
= GET_RTX_LENGTH (GET_CODE (in
)) - 1; i
>= 0; i
--)
1557 if (fmt
[i
] == 'e' && reg_overlap_mentioned_p (x
, XEXP (in
, i
)))
1566 return reg_mentioned_p (x
, in
);
1572 /* If any register in here refers to it we return true. */
1573 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1574 if (XEXP (XVECEXP (x
, 0, i
), 0) != 0
1575 && reg_overlap_mentioned_p (XEXP (XVECEXP (x
, 0, i
), 0), in
))
1587 /* Return the last value to which REG was set prior to INSN. If we can't
1588 find it easily, return 0.
1590 We only return a REG, SUBREG, or constant because it is too hard to
1591 check if a MEM remains unchanged. */
1594 reg_set_last (rtx x
, rtx insn
)
1596 rtx orig_insn
= insn
;
1598 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1599 Stop when we reach a label or X is a hard reg and we reach a
1600 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1602 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1604 /* We compare with <= here, because reg_set_last_last_regno
1605 is actually the number of the first reg *not* in X. */
1607 insn
&& GET_CODE (insn
) != CODE_LABEL
1608 && ! (GET_CODE (insn
) == CALL_INSN
1609 && REGNO (x
) <= FIRST_PSEUDO_REGISTER
);
1610 insn
= PREV_INSN (insn
))
1613 rtx set
= set_of (x
, insn
);
1614 /* OK, this function modify our register. See if we understand it. */
1618 if (GET_CODE (set
) != SET
|| SET_DEST (set
) != x
)
1620 last_value
= SET_SRC (x
);
1621 if (CONSTANT_P (last_value
)
1622 || ((GET_CODE (last_value
) == REG
1623 || GET_CODE (last_value
) == SUBREG
)
1624 && ! reg_set_between_p (last_value
,
1635 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1636 (X would be the pattern of an insn).
1637 FUN receives two arguments:
1638 the REG, MEM, CC0 or PC being stored in or clobbered,
1639 the SET or CLOBBER rtx that does the store.
1641 If the item being stored in or clobbered is a SUBREG of a hard register,
1642 the SUBREG will be passed. */
1645 note_stores (rtx x
, void (*fun
) (rtx
, rtx
, void *), void *data
)
1649 if (GET_CODE (x
) == COND_EXEC
)
1650 x
= COND_EXEC_CODE (x
);
1652 if (GET_CODE (x
) == SET
|| GET_CODE (x
) == CLOBBER
)
1654 rtx dest
= SET_DEST (x
);
1656 while ((GET_CODE (dest
) == SUBREG
1657 && (GET_CODE (SUBREG_REG (dest
)) != REG
1658 || REGNO (SUBREG_REG (dest
)) >= FIRST_PSEUDO_REGISTER
))
1659 || GET_CODE (dest
) == ZERO_EXTRACT
1660 || GET_CODE (dest
) == SIGN_EXTRACT
1661 || GET_CODE (dest
) == STRICT_LOW_PART
)
1662 dest
= XEXP (dest
, 0);
1664 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1665 each of whose first operand is a register. */
1666 if (GET_CODE (dest
) == PARALLEL
)
1668 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1669 if (XEXP (XVECEXP (dest
, 0, i
), 0) != 0)
1670 (*fun
) (XEXP (XVECEXP (dest
, 0, i
), 0), x
, data
);
1673 (*fun
) (dest
, x
, data
);
1676 else if (GET_CODE (x
) == PARALLEL
)
1677 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1678 note_stores (XVECEXP (x
, 0, i
), fun
, data
);
1681 /* Like notes_stores, but call FUN for each expression that is being
1682 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1683 FUN for each expression, not any interior subexpressions. FUN receives a
1684 pointer to the expression and the DATA passed to this function.
1686 Note that this is not quite the same test as that done in reg_referenced_p
1687 since that considers something as being referenced if it is being
1688 partially set, while we do not. */
1691 note_uses (rtx
*pbody
, void (*fun
) (rtx
*, void *), void *data
)
1696 switch (GET_CODE (body
))
1699 (*fun
) (&COND_EXEC_TEST (body
), data
);
1700 note_uses (&COND_EXEC_CODE (body
), fun
, data
);
1704 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1705 note_uses (&XVECEXP (body
, 0, i
), fun
, data
);
1709 (*fun
) (&XEXP (body
, 0), data
);
1713 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
1714 (*fun
) (&ASM_OPERANDS_INPUT (body
, i
), data
);
1718 (*fun
) (&TRAP_CONDITION (body
), data
);
1722 (*fun
) (&XEXP (body
, 0), data
);
1726 case UNSPEC_VOLATILE
:
1727 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1728 (*fun
) (&XVECEXP (body
, 0, i
), data
);
1732 if (GET_CODE (XEXP (body
, 0)) == MEM
)
1733 (*fun
) (&XEXP (XEXP (body
, 0), 0), data
);
1738 rtx dest
= SET_DEST (body
);
1740 /* For sets we replace everything in source plus registers in memory
1741 expression in store and operands of a ZERO_EXTRACT. */
1742 (*fun
) (&SET_SRC (body
), data
);
1744 if (GET_CODE (dest
) == ZERO_EXTRACT
)
1746 (*fun
) (&XEXP (dest
, 1), data
);
1747 (*fun
) (&XEXP (dest
, 2), data
);
1750 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
)
1751 dest
= XEXP (dest
, 0);
1753 if (GET_CODE (dest
) == MEM
)
1754 (*fun
) (&XEXP (dest
, 0), data
);
1759 /* All the other possibilities never store. */
1760 (*fun
) (pbody
, data
);
1765 /* Return nonzero if X's old contents don't survive after INSN.
1766 This will be true if X is (cc0) or if X is a register and
1767 X dies in INSN or because INSN entirely sets X.
1769 "Entirely set" means set directly and not through a SUBREG,
1770 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1771 Likewise, REG_INC does not count.
1773 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1774 but for this use that makes no difference, since regs don't overlap
1775 during their lifetimes. Therefore, this function may be used
1776 at any time after deaths have been computed (in flow.c).
1778 If REG is a hard reg that occupies multiple machine registers, this
1779 function will only return 1 if each of those registers will be replaced
1783 dead_or_set_p (rtx insn
, rtx x
)
1785 unsigned int regno
, last_regno
;
1788 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1789 if (GET_CODE (x
) == CC0
)
1792 if (GET_CODE (x
) != REG
)
1796 last_regno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
1797 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
)) - 1);
1799 for (i
= regno
; i
<= last_regno
; i
++)
1800 if (! dead_or_set_regno_p (insn
, i
))
1806 /* Utility function for dead_or_set_p to check an individual register. Also
1807 called from flow.c. */
1810 dead_or_set_regno_p (rtx insn
, unsigned int test_regno
)
1812 unsigned int regno
, endregno
;
1815 /* See if there is a death note for something that includes TEST_REGNO. */
1816 if (find_regno_note (insn
, REG_DEAD
, test_regno
))
1819 if (GET_CODE (insn
) == CALL_INSN
1820 && find_regno_fusage (insn
, CLOBBER
, test_regno
))
1823 pattern
= PATTERN (insn
);
1825 if (GET_CODE (pattern
) == COND_EXEC
)
1826 pattern
= COND_EXEC_CODE (pattern
);
1828 if (GET_CODE (pattern
) == SET
)
1830 rtx dest
= SET_DEST (pattern
);
1832 /* A value is totally replaced if it is the destination or the
1833 destination is a SUBREG of REGNO that does not change the number of
1835 if (GET_CODE (dest
) == SUBREG
1836 && (((GET_MODE_SIZE (GET_MODE (dest
))
1837 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1838 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1839 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1840 dest
= SUBREG_REG (dest
);
1842 if (GET_CODE (dest
) != REG
)
1845 regno
= REGNO (dest
);
1846 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1847 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (dest
)));
1849 return (test_regno
>= regno
&& test_regno
< endregno
);
1851 else if (GET_CODE (pattern
) == PARALLEL
)
1855 for (i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
1857 rtx body
= XVECEXP (pattern
, 0, i
);
1859 if (GET_CODE (body
) == COND_EXEC
)
1860 body
= COND_EXEC_CODE (body
);
1862 if (GET_CODE (body
) == SET
|| GET_CODE (body
) == CLOBBER
)
1864 rtx dest
= SET_DEST (body
);
1866 if (GET_CODE (dest
) == SUBREG
1867 && (((GET_MODE_SIZE (GET_MODE (dest
))
1868 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1869 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1870 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1871 dest
= SUBREG_REG (dest
);
1873 if (GET_CODE (dest
) != REG
)
1876 regno
= REGNO (dest
);
1877 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1878 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (dest
)));
1880 if (test_regno
>= regno
&& test_regno
< endregno
)
1889 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1890 If DATUM is nonzero, look for one whose datum is DATUM. */
1893 find_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
1897 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1898 if (! INSN_P (insn
))
1901 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1902 if (REG_NOTE_KIND (link
) == kind
1903 && (datum
== 0 || datum
== XEXP (link
, 0)))
1908 /* Return the reg-note of kind KIND in insn INSN which applies to register
1909 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1910 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1911 it might be the case that the note overlaps REGNO. */
1914 find_regno_note (rtx insn
, enum reg_note kind
, unsigned int regno
)
1918 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1919 if (! INSN_P (insn
))
1922 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1923 if (REG_NOTE_KIND (link
) == kind
1924 /* Verify that it is a register, so that scratch and MEM won't cause a
1926 && GET_CODE (XEXP (link
, 0)) == REG
1927 && REGNO (XEXP (link
, 0)) <= regno
1928 && ((REGNO (XEXP (link
, 0))
1929 + (REGNO (XEXP (link
, 0)) >= FIRST_PSEUDO_REGISTER
? 1
1930 : HARD_REGNO_NREGS (REGNO (XEXP (link
, 0)),
1931 GET_MODE (XEXP (link
, 0)))))
1937 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1941 find_reg_equal_equiv_note (rtx insn
)
1947 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1948 if (REG_NOTE_KIND (link
) == REG_EQUAL
1949 || REG_NOTE_KIND (link
) == REG_EQUIV
)
1951 if (single_set (insn
) == 0)
1958 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1959 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1962 find_reg_fusage (rtx insn
, enum rtx_code code
, rtx datum
)
1964 /* If it's not a CALL_INSN, it can't possibly have a
1965 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1966 if (GET_CODE (insn
) != CALL_INSN
)
1972 if (GET_CODE (datum
) != REG
)
1976 for (link
= CALL_INSN_FUNCTION_USAGE (insn
);
1978 link
= XEXP (link
, 1))
1979 if (GET_CODE (XEXP (link
, 0)) == code
1980 && rtx_equal_p (datum
, XEXP (XEXP (link
, 0), 0)))
1985 unsigned int regno
= REGNO (datum
);
1987 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1988 to pseudo registers, so don't bother checking. */
1990 if (regno
< FIRST_PSEUDO_REGISTER
)
1992 unsigned int end_regno
1993 = regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (datum
));
1996 for (i
= regno
; i
< end_regno
; i
++)
1997 if (find_regno_fusage (insn
, code
, i
))
2005 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2006 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2009 find_regno_fusage (rtx insn
, enum rtx_code code
, unsigned int regno
)
2013 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2014 to pseudo registers, so don't bother checking. */
2016 if (regno
>= FIRST_PSEUDO_REGISTER
2017 || GET_CODE (insn
) != CALL_INSN
)
2020 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
2022 unsigned int regnote
;
2025 if (GET_CODE (op
= XEXP (link
, 0)) == code
2026 && GET_CODE (reg
= XEXP (op
, 0)) == REG
2027 && (regnote
= REGNO (reg
)) <= regno
2028 && regnote
+ HARD_REGNO_NREGS (regnote
, GET_MODE (reg
)) > regno
)
2035 /* Return true if INSN is a call to a pure function. */
2038 pure_call_p (rtx insn
)
2042 if (GET_CODE (insn
) != CALL_INSN
|| ! CONST_OR_PURE_CALL_P (insn
))
2045 /* Look for the note that differentiates const and pure functions. */
2046 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
2050 if (GET_CODE (u
= XEXP (link
, 0)) == USE
2051 && GET_CODE (m
= XEXP (u
, 0)) == MEM
&& GET_MODE (m
) == BLKmode
2052 && GET_CODE (XEXP (m
, 0)) == SCRATCH
)
2059 /* Remove register note NOTE from the REG_NOTES of INSN. */
2062 remove_note (rtx insn
, rtx note
)
2066 if (note
== NULL_RTX
)
2069 if (REG_NOTES (insn
) == note
)
2071 REG_NOTES (insn
) = XEXP (note
, 1);
2075 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
2076 if (XEXP (link
, 1) == note
)
2078 XEXP (link
, 1) = XEXP (note
, 1);
2085 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2086 return 1 if it is found. A simple equality test is used to determine if
2090 in_expr_list_p (rtx listp
, rtx node
)
2094 for (x
= listp
; x
; x
= XEXP (x
, 1))
2095 if (node
== XEXP (x
, 0))
2101 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2102 remove that entry from the list if it is found.
2104 A simple equality test is used to determine if NODE matches. */
2107 remove_node_from_expr_list (rtx node
, rtx
*listp
)
2110 rtx prev
= NULL_RTX
;
2114 if (node
== XEXP (temp
, 0))
2116 /* Splice the node out of the list. */
2118 XEXP (prev
, 1) = XEXP (temp
, 1);
2120 *listp
= XEXP (temp
, 1);
2126 temp
= XEXP (temp
, 1);
2130 /* Nonzero if X contains any volatile instructions. These are instructions
2131 which may cause unpredictable machine state instructions, and thus no
2132 instructions should be moved or combined across them. This includes
2133 only volatile asms and UNSPEC_VOLATILE instructions. */
2136 volatile_insn_p (rtx x
)
2140 code
= GET_CODE (x
);
2160 case UNSPEC_VOLATILE
:
2161 /* case TRAP_IF: This isn't clear yet. */
2166 if (MEM_VOLATILE_P (x
))
2173 /* Recursively scan the operands of this expression. */
2176 const char *fmt
= GET_RTX_FORMAT (code
);
2179 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2183 if (volatile_insn_p (XEXP (x
, i
)))
2186 else if (fmt
[i
] == 'E')
2189 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2190 if (volatile_insn_p (XVECEXP (x
, i
, j
)))
2198 /* Nonzero if X contains any volatile memory references
2199 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2202 volatile_refs_p (rtx x
)
2206 code
= GET_CODE (x
);
2224 case UNSPEC_VOLATILE
:
2230 if (MEM_VOLATILE_P (x
))
2237 /* Recursively scan the operands of this expression. */
2240 const char *fmt
= GET_RTX_FORMAT (code
);
2243 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2247 if (volatile_refs_p (XEXP (x
, i
)))
2250 else if (fmt
[i
] == 'E')
2253 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2254 if (volatile_refs_p (XVECEXP (x
, i
, j
)))
2262 /* Similar to above, except that it also rejects register pre- and post-
2266 side_effects_p (rtx x
)
2270 code
= GET_CODE (x
);
2288 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2289 when some combination can't be done. If we see one, don't think
2290 that we can simplify the expression. */
2291 return (GET_MODE (x
) != VOIDmode
);
2300 case UNSPEC_VOLATILE
:
2301 /* case TRAP_IF: This isn't clear yet. */
2307 if (MEM_VOLATILE_P (x
))
2314 /* Recursively scan the operands of this expression. */
2317 const char *fmt
= GET_RTX_FORMAT (code
);
2320 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2324 if (side_effects_p (XEXP (x
, i
)))
2327 else if (fmt
[i
] == 'E')
2330 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2331 if (side_effects_p (XVECEXP (x
, i
, j
)))
2339 /* Return nonzero if evaluating rtx X might cause a trap. */
2350 code
= GET_CODE (x
);
2353 /* Handle these cases quickly. */
2367 case UNSPEC_VOLATILE
:
2372 return MEM_VOLATILE_P (x
);
2374 /* Memory ref can trap unless it's a static var or a stack slot. */
2376 if (MEM_NOTRAP_P (x
))
2378 return rtx_addr_can_trap_p (XEXP (x
, 0));
2380 /* Division by a non-constant might trap. */
2385 if (HONOR_SNANS (GET_MODE (x
)))
2387 if (! CONSTANT_P (XEXP (x
, 1))
2388 || (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2389 && flag_trapping_math
))
2391 if (XEXP (x
, 1) == const0_rtx
)
2396 /* An EXPR_LIST is used to represent a function call. This
2397 certainly may trap. */
2405 /* Some floating point comparisons may trap. */
2406 if (!flag_trapping_math
)
2408 /* ??? There is no machine independent way to check for tests that trap
2409 when COMPARE is used, though many targets do make this distinction.
2410 For instance, sparc uses CCFPE for compares which generate exceptions
2411 and CCFP for compares which do not generate exceptions. */
2412 if (HONOR_NANS (GET_MODE (x
)))
2414 /* But often the compare has some CC mode, so check operand
2416 if (HONOR_NANS (GET_MODE (XEXP (x
, 0)))
2417 || HONOR_NANS (GET_MODE (XEXP (x
, 1))))
2423 if (HONOR_SNANS (GET_MODE (x
)))
2425 /* Often comparison is CC mode, so check operand modes. */
2426 if (HONOR_SNANS (GET_MODE (XEXP (x
, 0)))
2427 || HONOR_SNANS (GET_MODE (XEXP (x
, 1))))
2432 /* Conversion of floating point might trap. */
2433 if (flag_trapping_math
&& HONOR_NANS (GET_MODE (XEXP (x
, 0))))
2439 /* These operations don't trap even with floating point. */
2443 /* Any floating arithmetic may trap. */
2444 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2445 && flag_trapping_math
)
2449 fmt
= GET_RTX_FORMAT (code
);
2450 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2454 if (may_trap_p (XEXP (x
, i
)))
2457 else if (fmt
[i
] == 'E')
2460 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2461 if (may_trap_p (XVECEXP (x
, i
, j
)))
2468 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2469 i.e., an inequality. */
2472 inequality_comparisons_p (rtx x
)
2476 enum rtx_code code
= GET_CODE (x
);
2506 len
= GET_RTX_LENGTH (code
);
2507 fmt
= GET_RTX_FORMAT (code
);
2509 for (i
= 0; i
< len
; i
++)
2513 if (inequality_comparisons_p (XEXP (x
, i
)))
2516 else if (fmt
[i
] == 'E')
2519 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2520 if (inequality_comparisons_p (XVECEXP (x
, i
, j
)))
2528 /* Replace any occurrence of FROM in X with TO. The function does
2529 not enter into CONST_DOUBLE for the replace.
2531 Note that copying is not done so X must not be shared unless all copies
2532 are to be modified. */
2535 replace_rtx (rtx x
, rtx from
, rtx to
)
2540 /* The following prevents loops occurrence when we change MEM in
2541 CONST_DOUBLE onto the same CONST_DOUBLE. */
2542 if (x
!= 0 && GET_CODE (x
) == CONST_DOUBLE
)
2548 /* Allow this function to make replacements in EXPR_LISTs. */
2552 if (GET_CODE (x
) == SUBREG
)
2554 rtx
new = replace_rtx (SUBREG_REG (x
), from
, to
);
2556 if (GET_CODE (new) == CONST_INT
)
2558 x
= simplify_subreg (GET_MODE (x
), new,
2559 GET_MODE (SUBREG_REG (x
)),
2565 SUBREG_REG (x
) = new;
2569 else if (GET_CODE (x
) == ZERO_EXTEND
)
2571 rtx
new = replace_rtx (XEXP (x
, 0), from
, to
);
2573 if (GET_CODE (new) == CONST_INT
)
2575 x
= simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
2576 new, GET_MODE (XEXP (x
, 0)));
2586 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2587 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2590 XEXP (x
, i
) = replace_rtx (XEXP (x
, i
), from
, to
);
2591 else if (fmt
[i
] == 'E')
2592 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2593 XVECEXP (x
, i
, j
) = replace_rtx (XVECEXP (x
, i
, j
), from
, to
);
2599 /* Throughout the rtx X, replace many registers according to REG_MAP.
2600 Return the replacement for X (which may be X with altered contents).
2601 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2602 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2604 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2605 should not be mapped to pseudos or vice versa since validate_change
2608 If REPLACE_DEST is 1, replacements are also done in destinations;
2609 otherwise, only sources are replaced. */
2612 replace_regs (rtx x
, rtx
*reg_map
, unsigned int nregs
, int replace_dest
)
2621 code
= GET_CODE (x
);
2636 /* Verify that the register has an entry before trying to access it. */
2637 if (REGNO (x
) < nregs
&& reg_map
[REGNO (x
)] != 0)
2639 /* SUBREGs can't be shared. Always return a copy to ensure that if
2640 this replacement occurs more than once then each instance will
2641 get distinct rtx. */
2642 if (GET_CODE (reg_map
[REGNO (x
)]) == SUBREG
)
2643 return copy_rtx (reg_map
[REGNO (x
)]);
2644 return reg_map
[REGNO (x
)];
2649 /* Prevent making nested SUBREGs. */
2650 if (GET_CODE (SUBREG_REG (x
)) == REG
&& REGNO (SUBREG_REG (x
)) < nregs
2651 && reg_map
[REGNO (SUBREG_REG (x
))] != 0
2652 && GET_CODE (reg_map
[REGNO (SUBREG_REG (x
))]) == SUBREG
)
2654 rtx map_val
= reg_map
[REGNO (SUBREG_REG (x
))];
2655 return simplify_gen_subreg (GET_MODE (x
), map_val
,
2656 GET_MODE (SUBREG_REG (x
)),
2663 SET_DEST (x
) = replace_regs (SET_DEST (x
), reg_map
, nregs
, 0);
2665 else if (GET_CODE (SET_DEST (x
)) == MEM
2666 || GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2667 /* Even if we are not to replace destinations, replace register if it
2668 is CONTAINED in destination (destination is memory or
2669 STRICT_LOW_PART). */
2670 XEXP (SET_DEST (x
), 0) = replace_regs (XEXP (SET_DEST (x
), 0),
2672 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2673 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2676 SET_SRC (x
) = replace_regs (SET_SRC (x
), reg_map
, nregs
, 0);
2683 fmt
= GET_RTX_FORMAT (code
);
2684 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2687 XEXP (x
, i
) = replace_regs (XEXP (x
, i
), reg_map
, nregs
, replace_dest
);
2688 else if (fmt
[i
] == 'E')
2691 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2692 XVECEXP (x
, i
, j
) = replace_regs (XVECEXP (x
, i
, j
), reg_map
,
2693 nregs
, replace_dest
);
2699 /* Replace occurrences of the old label in *X with the new one.
2700 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2703 replace_label (rtx
*x
, void *data
)
2707 rtx old_label
= ((replace_label_data
*) data
)->r1
;
2708 rtx new_label
= ((replace_label_data
*) data
)->r2
;
2709 bool update_label_nuses
= ((replace_label_data
*) data
)->update_label_nuses
;
2714 if (GET_CODE (l
) == MEM
2715 && (tmp
= XEXP (l
, 0)) != NULL_RTX
2716 && GET_CODE (tmp
) == SYMBOL_REF
2717 && CONSTANT_POOL_ADDRESS_P (tmp
))
2719 rtx c
= get_pool_constant (tmp
);
2720 if (rtx_referenced_p (old_label
, c
))
2723 replace_label_data
*d
= (replace_label_data
*) data
;
2725 /* Create a copy of constant C; replace the label inside
2726 but do not update LABEL_NUSES because uses in constant pool
2728 new_c
= copy_rtx (c
);
2729 d
->update_label_nuses
= false;
2730 for_each_rtx (&new_c
, replace_label
, data
);
2731 d
->update_label_nuses
= update_label_nuses
;
2733 /* Add the new constant NEW_C to constant pool and replace
2734 the old reference to constant by new reference. */
2735 new_l
= force_const_mem (get_pool_mode (tmp
), new_c
);
2736 *x
= replace_rtx (l
, l
, new_l
);
2741 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2742 field. This is not handled by for_each_rtx because it doesn't
2743 handle unprinted ('0') fields. */
2744 if (GET_CODE (l
) == JUMP_INSN
&& JUMP_LABEL (l
) == old_label
)
2745 JUMP_LABEL (l
) = new_label
;
2747 if ((GET_CODE (l
) == LABEL_REF
2748 || GET_CODE (l
) == INSN_LIST
)
2749 && XEXP (l
, 0) == old_label
)
2751 XEXP (l
, 0) = new_label
;
2752 if (update_label_nuses
)
2754 ++LABEL_NUSES (new_label
);
2755 --LABEL_NUSES (old_label
);
2763 /* When *BODY is equal to X or X is directly referenced by *BODY
2764 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2765 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2768 rtx_referenced_p_1 (rtx
*body
, void *x
)
2772 if (*body
== NULL_RTX
)
2773 return y
== NULL_RTX
;
2775 /* Return true if a label_ref *BODY refers to label Y. */
2776 if (GET_CODE (*body
) == LABEL_REF
&& GET_CODE (y
) == CODE_LABEL
)
2777 return XEXP (*body
, 0) == y
;
2779 /* If *BODY is a reference to pool constant traverse the constant. */
2780 if (GET_CODE (*body
) == SYMBOL_REF
2781 && CONSTANT_POOL_ADDRESS_P (*body
))
2782 return rtx_referenced_p (y
, get_pool_constant (*body
));
2784 /* By default, compare the RTL expressions. */
2785 return rtx_equal_p (*body
, y
);
2788 /* Return true if X is referenced in BODY. */
2791 rtx_referenced_p (rtx x
, rtx body
)
2793 return for_each_rtx (&body
, rtx_referenced_p_1
, x
);
2796 /* If INSN is a tablejump return true and store the label (before jump table) to
2797 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2800 tablejump_p (rtx insn
, rtx
*labelp
, rtx
*tablep
)
2804 if (GET_CODE (insn
) == JUMP_INSN
2805 && (label
= JUMP_LABEL (insn
)) != NULL_RTX
2806 && (table
= next_active_insn (label
)) != NULL_RTX
2807 && GET_CODE (table
) == JUMP_INSN
2808 && (GET_CODE (PATTERN (table
)) == ADDR_VEC
2809 || GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
))
2820 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2821 constant that is not in the constant pool and not in the condition
2822 of an IF_THEN_ELSE. */
2825 computed_jump_p_1 (rtx x
)
2827 enum rtx_code code
= GET_CODE (x
);
2846 return ! (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
2847 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)));
2850 return (computed_jump_p_1 (XEXP (x
, 1))
2851 || computed_jump_p_1 (XEXP (x
, 2)));
2857 fmt
= GET_RTX_FORMAT (code
);
2858 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2861 && computed_jump_p_1 (XEXP (x
, i
)))
2864 else if (fmt
[i
] == 'E')
2865 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2866 if (computed_jump_p_1 (XVECEXP (x
, i
, j
)))
2873 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2875 Tablejumps and casesi insns are not considered indirect jumps;
2876 we can recognize them by a (use (label_ref)). */
2879 computed_jump_p (rtx insn
)
2882 if (GET_CODE (insn
) == JUMP_INSN
)
2884 rtx pat
= PATTERN (insn
);
2886 if (find_reg_note (insn
, REG_LABEL
, NULL_RTX
))
2888 else if (GET_CODE (pat
) == PARALLEL
)
2890 int len
= XVECLEN (pat
, 0);
2891 int has_use_labelref
= 0;
2893 for (i
= len
- 1; i
>= 0; i
--)
2894 if (GET_CODE (XVECEXP (pat
, 0, i
)) == USE
2895 && (GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0))
2897 has_use_labelref
= 1;
2899 if (! has_use_labelref
)
2900 for (i
= len
- 1; i
>= 0; i
--)
2901 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
2902 && SET_DEST (XVECEXP (pat
, 0, i
)) == pc_rtx
2903 && computed_jump_p_1 (SET_SRC (XVECEXP (pat
, 0, i
))))
2906 else if (GET_CODE (pat
) == SET
2907 && SET_DEST (pat
) == pc_rtx
2908 && computed_jump_p_1 (SET_SRC (pat
)))
2914 /* Traverse X via depth-first search, calling F for each
2915 sub-expression (including X itself). F is also passed the DATA.
2916 If F returns -1, do not traverse sub-expressions, but continue
2917 traversing the rest of the tree. If F ever returns any other
2918 nonzero value, stop the traversal, and return the value returned
2919 by F. Otherwise, return 0. This function does not traverse inside
2920 tree structure that contains RTX_EXPRs, or into sub-expressions
2921 whose format code is `0' since it is not known whether or not those
2922 codes are actually RTL.
2924 This routine is very general, and could (should?) be used to
2925 implement many of the other routines in this file. */
2928 for_each_rtx (rtx
*x
, rtx_function f
, void *data
)
2936 result
= (*f
) (x
, data
);
2938 /* Do not traverse sub-expressions. */
2940 else if (result
!= 0)
2941 /* Stop the traversal. */
2945 /* There are no sub-expressions. */
2948 length
= GET_RTX_LENGTH (GET_CODE (*x
));
2949 format
= GET_RTX_FORMAT (GET_CODE (*x
));
2951 for (i
= 0; i
< length
; ++i
)
2956 result
= for_each_rtx (&XEXP (*x
, i
), f
, data
);
2963 if (XVEC (*x
, i
) != 0)
2966 for (j
= 0; j
< XVECLEN (*x
, i
); ++j
)
2968 result
= for_each_rtx (&XVECEXP (*x
, i
, j
), f
, data
);
2976 /* Nothing to do. */
2985 /* Searches X for any reference to REGNO, returning the rtx of the
2986 reference found if any. Otherwise, returns NULL_RTX. */
2989 regno_use_in (unsigned int regno
, rtx x
)
2995 if (GET_CODE (x
) == REG
&& REGNO (x
) == regno
)
2998 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2999 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
3003 if ((tem
= regno_use_in (regno
, XEXP (x
, i
))))
3006 else if (fmt
[i
] == 'E')
3007 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3008 if ((tem
= regno_use_in (regno
, XVECEXP (x
, i
, j
))))
3015 /* Return a value indicating whether OP, an operand of a commutative
3016 operation, is preferred as the first or second operand. The higher
3017 the value, the stronger the preference for being the first operand.
3018 We use negative values to indicate a preference for the first operand
3019 and positive values for the second operand. */
3022 commutative_operand_precedence (rtx op
)
3024 /* Constants always come the second operand. Prefer "nice" constants. */
3025 if (GET_CODE (op
) == CONST_INT
)
3027 if (GET_CODE (op
) == CONST_DOUBLE
)
3029 op
= avoid_constant_pool_reference (op
);
3030 if (GET_CODE (op
) == CONST_INT
)
3032 if (GET_CODE (op
) == CONST_DOUBLE
)
3034 if (CONSTANT_P (op
))
3037 /* SUBREGs of objects should come second. */
3038 if (GET_CODE (op
) == SUBREG
3039 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op
))) == 'o')
3042 /* If only one operand is a `neg', `not',
3043 `mult', `plus', or `minus' expression, it will be the first
3045 if (GET_CODE (op
) == NEG
|| GET_CODE (op
) == NOT
3046 || GET_CODE (op
) == MULT
|| GET_CODE (op
) == PLUS
3047 || GET_CODE (op
) == MINUS
)
3050 /* Complex expressions should be the first, so decrease priority
3052 if (GET_RTX_CLASS (GET_CODE (op
)) == 'o')
3057 /* Return 1 iff it is necessary to swap operands of commutative operation
3058 in order to canonicalize expression. */
3061 swap_commutative_operands_p (rtx x
, rtx y
)
3063 return (commutative_operand_precedence (x
)
3064 < commutative_operand_precedence (y
));
3067 /* Return 1 if X is an autoincrement side effect and the register is
3068 not the stack pointer. */
3072 switch (GET_CODE (x
))
3080 /* There are no REG_INC notes for SP. */
3081 if (XEXP (x
, 0) != stack_pointer_rtx
)
3089 /* Return 1 if the sequence of instructions beginning with FROM and up
3090 to and including TO is safe to move. If NEW_TO is non-NULL, and
3091 the sequence is not already safe to move, but can be easily
3092 extended to a sequence which is safe, then NEW_TO will point to the
3093 end of the extended sequence.
3095 For now, this function only checks that the region contains whole
3096 exception regions, but it could be extended to check additional
3097 conditions as well. */
3100 insns_safe_to_move_p (rtx from
, rtx to
, rtx
*new_to
)
3102 int eh_region_count
= 0;
3106 /* By default, assume the end of the region will be what was
3113 if (GET_CODE (r
) == NOTE
)
3115 switch (NOTE_LINE_NUMBER (r
))
3117 case NOTE_INSN_EH_REGION_BEG
:
3121 case NOTE_INSN_EH_REGION_END
:
3122 if (eh_region_count
== 0)
3123 /* This sequence of instructions contains the end of
3124 an exception region, but not he beginning. Moving
3125 it will cause chaos. */
3136 /* If we've passed TO, and we see a non-note instruction, we
3137 can't extend the sequence to a movable sequence. */
3143 /* It's OK to move the sequence if there were matched sets of
3144 exception region notes. */
3145 return eh_region_count
== 0;
3150 /* It's OK to move the sequence if there were matched sets of
3151 exception region notes. */
3152 if (past_to_p
&& eh_region_count
== 0)
3158 /* Go to the next instruction. */
3165 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3167 loc_mentioned_in_p (rtx
*loc
, rtx in
)
3169 enum rtx_code code
= GET_CODE (in
);
3170 const char *fmt
= GET_RTX_FORMAT (code
);
3173 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3175 if (loc
== &in
->u
.fld
[i
].rtx
)
3179 if (loc_mentioned_in_p (loc
, XEXP (in
, i
)))
3182 else if (fmt
[i
] == 'E')
3183 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
3184 if (loc_mentioned_in_p (loc
, XVECEXP (in
, i
, j
)))
3190 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3191 and SUBREG_BYTE, return the bit offset where the subreg begins
3192 (counting from the least significant bit of the operand). */
3195 subreg_lsb_1 (enum machine_mode outer_mode
,
3196 enum machine_mode inner_mode
,
3197 unsigned int subreg_byte
)
3199 unsigned int bitpos
;
3203 /* A paradoxical subreg begins at bit position 0. */
3204 if (GET_MODE_BITSIZE (outer_mode
) > GET_MODE_BITSIZE (inner_mode
))
3207 if (WORDS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
3208 /* If the subreg crosses a word boundary ensure that
3209 it also begins and ends on a word boundary. */
3210 if ((subreg_byte
% UNITS_PER_WORD
3211 + GET_MODE_SIZE (outer_mode
)) > UNITS_PER_WORD
3212 && (subreg_byte
% UNITS_PER_WORD
3213 || GET_MODE_SIZE (outer_mode
) % UNITS_PER_WORD
))
3216 if (WORDS_BIG_ENDIAN
)
3217 word
= (GET_MODE_SIZE (inner_mode
)
3218 - (subreg_byte
+ GET_MODE_SIZE (outer_mode
))) / UNITS_PER_WORD
;
3220 word
= subreg_byte
/ UNITS_PER_WORD
;
3221 bitpos
= word
* BITS_PER_WORD
;
3223 if (BYTES_BIG_ENDIAN
)
3224 byte
= (GET_MODE_SIZE (inner_mode
)
3225 - (subreg_byte
+ GET_MODE_SIZE (outer_mode
))) % UNITS_PER_WORD
;
3227 byte
= subreg_byte
% UNITS_PER_WORD
;
3228 bitpos
+= byte
* BITS_PER_UNIT
;
3233 /* Given a subreg X, return the bit offset where the subreg begins
3234 (counting from the least significant bit of the reg). */
3239 return subreg_lsb_1 (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)),
3243 /* This function returns the regno offset of a subreg expression.
3244 xregno - A regno of an inner hard subreg_reg (or what will become one).
3245 xmode - The mode of xregno.
3246 offset - The byte offset.
3247 ymode - The mode of a top level SUBREG (or what may become one).
3248 RETURN - The regno offset which would be used. */
3250 subreg_regno_offset (unsigned int xregno
, enum machine_mode xmode
,
3251 unsigned int offset
, enum machine_mode ymode
)
3253 int nregs_xmode
, nregs_ymode
;
3254 int mode_multiple
, nregs_multiple
;
3257 if (xregno
>= FIRST_PSEUDO_REGISTER
)
3260 nregs_xmode
= HARD_REGNO_NREGS (xregno
, xmode
);
3261 nregs_ymode
= HARD_REGNO_NREGS (xregno
, ymode
);
3263 /* If this is a big endian paradoxical subreg, which uses more actual
3264 hard registers than the original register, we must return a negative
3265 offset so that we find the proper highpart of the register. */
3267 && nregs_ymode
> nregs_xmode
3268 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3269 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3270 return nregs_xmode
- nregs_ymode
;
3272 if (offset
== 0 || nregs_xmode
== nregs_ymode
)
3275 /* size of ymode must not be greater than the size of xmode. */
3276 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3277 if (mode_multiple
== 0)
3280 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3281 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3282 return (y_offset
/ (mode_multiple
/ nregs_multiple
)) * nregs_ymode
;
3285 /* This function returns true when the offset is representable via
3286 subreg_offset in the given regno.
3287 xregno - A regno of an inner hard subreg_reg (or what will become one).
3288 xmode - The mode of xregno.
3289 offset - The byte offset.
3290 ymode - The mode of a top level SUBREG (or what may become one).
3291 RETURN - The regno offset which would be used. */
3293 subreg_offset_representable_p (unsigned int xregno
, enum machine_mode xmode
,
3294 unsigned int offset
, enum machine_mode ymode
)
3296 int nregs_xmode
, nregs_ymode
;
3297 int mode_multiple
, nregs_multiple
;
3300 if (xregno
>= FIRST_PSEUDO_REGISTER
)
3303 nregs_xmode
= HARD_REGNO_NREGS (xregno
, xmode
);
3304 nregs_ymode
= HARD_REGNO_NREGS (xregno
, ymode
);
3306 /* paradoxical subregs are always valid. */
3308 && nregs_ymode
> nregs_xmode
3309 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3310 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3313 /* Lowpart subregs are always valid. */
3314 if (offset
== subreg_lowpart_offset (ymode
, xmode
))
3317 #ifdef ENABLE_CHECKING
3318 /* This should always pass, otherwise we don't know how to verify the
3319 constraint. These conditions may be relaxed but subreg_offset would
3320 need to be redesigned. */
3321 if (GET_MODE_SIZE (xmode
) % GET_MODE_SIZE (ymode
)
3322 || GET_MODE_SIZE (ymode
) % nregs_ymode
3323 || nregs_xmode
% nregs_ymode
)
3327 /* The XMODE value can be seen as a vector of NREGS_XMODE
3328 values. The subreg must represent a lowpart of given field.
3329 Compute what field it is. */
3330 offset
-= subreg_lowpart_offset (ymode
,
3331 mode_for_size (GET_MODE_BITSIZE (xmode
)
3335 /* size of ymode must not be greater than the size of xmode. */
3336 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3337 if (mode_multiple
== 0)
3340 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3341 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3342 #ifdef ENABLE_CHECKING
3343 if (offset
% GET_MODE_SIZE (ymode
)
3344 || mode_multiple
% nregs_multiple
)
3347 return (!(y_offset
% (mode_multiple
/ nregs_multiple
)));
3350 /* Return the final regno that a subreg expression refers to. */
3352 subreg_regno (rtx x
)
3355 rtx subreg
= SUBREG_REG (x
);
3356 int regno
= REGNO (subreg
);
3358 ret
= regno
+ subreg_regno_offset (regno
,
3365 struct parms_set_data
3371 /* Helper function for noticing stores to parameter registers. */
3373 parms_set (rtx x
, rtx pat ATTRIBUTE_UNUSED
, void *data
)
3375 struct parms_set_data
*d
= data
;
3376 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3377 && TEST_HARD_REG_BIT (d
->regs
, REGNO (x
)))
3379 CLEAR_HARD_REG_BIT (d
->regs
, REGNO (x
));
3384 /* Look backward for first parameter to be loaded.
3385 Do not skip BOUNDARY. */
3387 find_first_parameter_load (rtx call_insn
, rtx boundary
)
3389 struct parms_set_data parm
;
3392 /* Since different machines initialize their parameter registers
3393 in different orders, assume nothing. Collect the set of all
3394 parameter registers. */
3395 CLEAR_HARD_REG_SET (parm
.regs
);
3397 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
3398 if (GET_CODE (XEXP (p
, 0)) == USE
3399 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == REG
)
3401 if (REGNO (XEXP (XEXP (p
, 0), 0)) >= FIRST_PSEUDO_REGISTER
)
3404 /* We only care about registers which can hold function
3406 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p
, 0), 0))))
3409 SET_HARD_REG_BIT (parm
.regs
, REGNO (XEXP (XEXP (p
, 0), 0)));
3414 /* Search backward for the first set of a register in this set. */
3415 while (parm
.nregs
&& before
!= boundary
)
3417 before
= PREV_INSN (before
);
3419 /* It is possible that some loads got CSEed from one call to
3420 another. Stop in that case. */
3421 if (GET_CODE (before
) == CALL_INSN
)
3424 /* Our caller needs either ensure that we will find all sets
3425 (in case code has not been optimized yet), or take care
3426 for possible labels in a way by setting boundary to preceding
3428 if (GET_CODE (before
) == CODE_LABEL
)
3430 if (before
!= boundary
)
3435 if (INSN_P (before
))
3436 note_stores (PATTERN (before
), parms_set
, &parm
);
3441 /* Return true if we should avoid inserting code between INSN and preceding
3442 call instruction. */
3445 keep_with_call_p (rtx insn
)
3449 if (INSN_P (insn
) && (set
= single_set (insn
)) != NULL
)
3451 if (GET_CODE (SET_DEST (set
)) == REG
3452 && REGNO (SET_DEST (set
)) < FIRST_PSEUDO_REGISTER
3453 && fixed_regs
[REGNO (SET_DEST (set
))]
3454 && general_operand (SET_SRC (set
), VOIDmode
))
3456 if (GET_CODE (SET_SRC (set
)) == REG
3457 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set
)))
3458 && GET_CODE (SET_DEST (set
)) == REG
3459 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
)
3461 /* There may be a stack pop just after the call and before the store
3462 of the return register. Search for the actual store when deciding
3463 if we can break or not. */
3464 if (SET_DEST (set
) == stack_pointer_rtx
)
3466 rtx i2
= next_nonnote_insn (insn
);
3467 if (i2
&& keep_with_call_p (i2
))
3474 /* Return true when store to register X can be hoisted to the place
3475 with LIVE registers (can be NULL). Value VAL contains destination
3476 whose value will be used. */
3479 hoist_test_store (rtx x
, rtx val
, regset live
)
3481 if (GET_CODE (x
) == SCRATCH
)
3484 if (rtx_equal_p (x
, val
))
3487 /* Allow subreg of X in case it is not writing just part of multireg pseudo.
3488 Then we would need to update all users to care hoisting the store too.
3489 Caller may represent that by specifying whole subreg as val. */
3491 if (GET_CODE (x
) == SUBREG
&& rtx_equal_p (SUBREG_REG (x
), val
))
3493 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) > UNITS_PER_WORD
3494 && GET_MODE_BITSIZE (GET_MODE (x
)) <
3495 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))))
3499 if (GET_CODE (x
) == SUBREG
)
3502 /* Anything except register store is not hoistable. This includes the
3503 partial stores to registers. */
3508 /* Pseudo registers can be always replaced by another pseudo to avoid
3509 the side effect, for hard register we must ensure that they are dead.
3510 Eventually we may want to add code to try turn pseudos to hards, but it
3511 is unlikely useful. */
3513 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3515 int regno
= REGNO (x
);
3516 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
3520 if (REGNO_REG_SET_P (live
, regno
))
3523 if (REGNO_REG_SET_P (live
, regno
+ n
))
3530 /* Return true if INSN can be hoisted to place with LIVE hard registers
3531 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3532 and used by the hoisting pass. */
3535 can_hoist_insn_p (rtx insn
, rtx val
, regset live
)
3537 rtx pat
= PATTERN (insn
);
3540 /* It probably does not worth the complexity to handle multiple
3542 if (!single_set (insn
))
3544 /* We can move CALL_INSN, but we need to check that all caller clobbered
3546 if (GET_CODE (insn
) == CALL_INSN
)
3548 /* In future we will handle hoisting of libcall sequences, but
3550 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
3552 switch (GET_CODE (pat
))
3555 if (!hoist_test_store (SET_DEST (pat
), val
, live
))
3559 /* USES do have sick semantics, so do not move them. */
3563 if (!hoist_test_store (XEXP (pat
, 0), val
, live
))
3567 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3569 rtx x
= XVECEXP (pat
, 0, i
);
3570 switch (GET_CODE (x
))
3573 if (!hoist_test_store (SET_DEST (x
), val
, live
))
3577 /* We need to fix callers to really ensure availability
3578 of all values insn uses, but for now it is safe to prohibit
3579 hoisting of any insn having such a hidden uses. */
3583 if (!hoist_test_store (SET_DEST (x
), val
, live
))
3597 /* Update store after hoisting - replace all stores to pseudo registers
3598 by new ones to avoid clobbering of values except for store to VAL that will
3599 be updated to NEW. */
3602 hoist_update_store (rtx insn
, rtx
*xp
, rtx val
, rtx
new)
3606 if (GET_CODE (x
) == SCRATCH
)
3609 if (GET_CODE (x
) == SUBREG
&& SUBREG_REG (x
) == val
)
3610 validate_change (insn
, xp
,
3611 simplify_gen_subreg (GET_MODE (x
), new, GET_MODE (new),
3612 SUBREG_BYTE (x
)), 1);
3613 if (rtx_equal_p (x
, val
))
3615 validate_change (insn
, xp
, new, 1);
3618 if (GET_CODE (x
) == SUBREG
)
3620 xp
= &SUBREG_REG (x
);
3627 /* We've verified that hard registers are dead, so we may keep the side
3628 effect. Otherwise replace it by new pseudo. */
3629 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
3630 validate_change (insn
, xp
, gen_reg_rtx (GET_MODE (x
)), 1);
3632 = alloc_EXPR_LIST (REG_UNUSED
, *xp
, REG_NOTES (insn
));
3635 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3636 and each other side effect to pseudo register by new pseudo register. */
3639 hoist_insn_after (rtx insn
, rtx after
, rtx val
, rtx
new)
3645 insn
= emit_copy_of_insn_after (insn
, after
);
3646 pat
= PATTERN (insn
);
3648 /* Remove REG_UNUSED notes as we will re-emit them. */
3649 while ((note
= find_reg_note (insn
, REG_UNUSED
, NULL_RTX
)))
3650 remove_note (insn
, note
);
3652 /* To get this working callers must ensure to move everything referenced
3653 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3655 while ((note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)))
3656 remove_note (insn
, note
);
3657 while ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)))
3658 remove_note (insn
, note
);
3660 /* Remove REG_DEAD notes as they might not be valid anymore in case
3661 we create redundancy. */
3662 while ((note
= find_reg_note (insn
, REG_DEAD
, NULL_RTX
)))
3663 remove_note (insn
, note
);
3664 switch (GET_CODE (pat
))
3667 hoist_update_store (insn
, &SET_DEST (pat
), val
, new);
3672 hoist_update_store (insn
, &XEXP (pat
, 0), val
, new);
3675 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3677 rtx x
= XVECEXP (pat
, 0, i
);
3678 switch (GET_CODE (x
))
3681 hoist_update_store (insn
, &SET_DEST (x
), val
, new);
3686 hoist_update_store (insn
, &SET_DEST (x
), val
, new);
3696 if (!apply_change_group ())
3703 hoist_insn_to_edge (rtx insn
, edge e
, rtx val
, rtx
new)
3707 /* We cannot insert instructions on an abnormal critical edge.
3708 It will be easier to find the culprit if we die now. */
3709 if ((e
->flags
& EDGE_ABNORMAL
) && EDGE_CRITICAL_P (e
))
3712 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3713 stuff. We also emit CALL_INSNS and firends. */
3714 if (e
->insns
== NULL_RTX
)
3717 emit_note (NOTE_INSN_DELETED
);
3720 push_to_sequence (e
->insns
);
3722 new_insn
= hoist_insn_after (insn
, get_last_insn (), val
, new);
3724 e
->insns
= get_insns ();
3729 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3730 to non-complex jumps. That is, direct unconditional, conditional,
3731 and tablejumps, but not computed jumps or returns. It also does
3732 not apply to the fallthru case of a conditional jump. */
3735 label_is_jump_target_p (rtx label
, rtx jump_insn
)
3737 rtx tmp
= JUMP_LABEL (jump_insn
);
3742 if (tablejump_p (jump_insn
, NULL
, &tmp
))
3744 rtvec vec
= XVEC (PATTERN (tmp
),
3745 GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
);
3746 int i
, veclen
= GET_NUM_ELEM (vec
);
3748 for (i
= 0; i
< veclen
; ++i
)
3749 if (XEXP (RTVEC_ELT (vec
, i
), 0) == label
)