1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
32 #include "basic-block.h"
35 /* Forward declarations */
36 static int global_reg_mentioned_p_1
PARAMS ((rtx
*, void *));
37 static void set_of_1
PARAMS ((rtx
, rtx
, void *));
38 static void insn_dependent_p_1
PARAMS ((rtx
, rtx
, void *));
39 static int computed_jump_p_1
PARAMS ((rtx
));
40 static void parms_set
PARAMS ((rtx
, rtx
, void *));
41 static bool hoist_test_store
PARAMS ((rtx
, rtx
, regset
));
42 static void hoist_update_store
PARAMS ((rtx
, rtx
*, rtx
, rtx
));
44 /* Bit flags that specify the machine subtype we are compiling for.
45 Bits are tested using macros TARGET_... defined in the tm.h file
46 and set by `-m...' switches. Must be defined in rtlanal.c. */
50 /* Return 1 if the value of X is unstable
51 (would be different at a different point in the program).
52 The frame pointer, arg pointer, etc. are considered stable
53 (within one function) and so is anything marked `unchanging'. */
59 RTX_CODE code
= GET_CODE (x
);
66 return ! RTX_UNCHANGING_P (x
) || rtx_unstable_p (XEXP (x
, 0));
81 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
82 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
83 /* The arg pointer varies if it is not a fixed register. */
84 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
85 || RTX_UNCHANGING_P (x
))
87 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
88 /* ??? When call-clobbered, the value is stable modulo the restore
89 that must happen after a call. This currently screws up local-alloc
90 into believing that the restore is not needed. */
91 if (x
== pic_offset_table_rtx
)
97 if (MEM_VOLATILE_P (x
))
106 fmt
= GET_RTX_FORMAT (code
);
107 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
110 if (rtx_unstable_p (XEXP (x
, i
)))
113 else if (fmt
[i
] == 'E')
116 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
117 if (rtx_unstable_p (XVECEXP (x
, i
, j
)))
124 /* Return 1 if X has a value that can vary even between two
125 executions of the program. 0 means X can be compared reliably
126 against certain constants or near-constants.
127 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
128 zero, we are slightly more conservative.
129 The frame pointer and the arg pointer are considered constant. */
132 rtx_varies_p (x
, for_alias
)
136 RTX_CODE code
= GET_CODE (x
);
143 return ! RTX_UNCHANGING_P (x
) || rtx_varies_p (XEXP (x
, 0), for_alias
);
157 /* Note that we have to test for the actual rtx used for the frame
158 and arg pointers and not just the register number in case we have
159 eliminated the frame and/or arg pointer and are using it
161 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
162 /* The arg pointer varies if it is not a fixed register. */
163 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
165 if (x
== pic_offset_table_rtx
166 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
167 /* ??? When call-clobbered, the value is stable modulo the restore
168 that must happen after a call. This currently screws up
169 local-alloc into believing that the restore is not needed, so we
170 must return 0 only if we are called from alias analysis. */
178 /* The operand 0 of a LO_SUM is considered constant
179 (in fact it is related specifically to operand 1)
180 during alias analysis. */
181 return (! for_alias
&& rtx_varies_p (XEXP (x
, 0), for_alias
))
182 || rtx_varies_p (XEXP (x
, 1), for_alias
);
185 if (MEM_VOLATILE_P (x
))
194 fmt
= GET_RTX_FORMAT (code
);
195 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
198 if (rtx_varies_p (XEXP (x
, i
), for_alias
))
201 else if (fmt
[i
] == 'E')
204 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
205 if (rtx_varies_p (XVECEXP (x
, i
, j
), for_alias
))
212 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
215 rtx_addr_can_trap_p (x
)
218 enum rtx_code code
= GET_CODE (x
);
223 return SYMBOL_REF_WEAK (x
);
229 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
230 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
231 || x
== stack_pointer_rtx
232 /* The arg pointer varies if it is not a fixed register. */
233 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
235 /* All of the virtual frame registers are stack references. */
236 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
237 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
242 return rtx_addr_can_trap_p (XEXP (x
, 0));
245 /* An address is assumed not to trap if it is an address that can't
246 trap plus a constant integer or it is the pic register plus a
248 return ! ((! rtx_addr_can_trap_p (XEXP (x
, 0))
249 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
250 || (XEXP (x
, 0) == pic_offset_table_rtx
251 && CONSTANT_P (XEXP (x
, 1))));
255 return rtx_addr_can_trap_p (XEXP (x
, 1));
262 return rtx_addr_can_trap_p (XEXP (x
, 0));
268 /* If it isn't one of the case above, it can cause a trap. */
272 /* Return 1 if X refers to a memory location whose address
273 cannot be compared reliably with constant addresses,
274 or if X refers to a BLKmode memory object.
275 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
276 zero, we are slightly more conservative. */
279 rtx_addr_varies_p (x
, for_alias
)
292 return GET_MODE (x
) == BLKmode
|| rtx_varies_p (XEXP (x
, 0), for_alias
);
294 fmt
= GET_RTX_FORMAT (code
);
295 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
298 if (rtx_addr_varies_p (XEXP (x
, i
), for_alias
))
301 else if (fmt
[i
] == 'E')
304 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
305 if (rtx_addr_varies_p (XVECEXP (x
, i
, j
), for_alias
))
311 /* Return the value of the integer term in X, if one is apparent;
313 Only obvious integer terms are detected.
314 This is used in cse.c with the `related_value' field. */
320 if (GET_CODE (x
) == CONST
)
323 if (GET_CODE (x
) == MINUS
324 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
325 return - INTVAL (XEXP (x
, 1));
326 if (GET_CODE (x
) == PLUS
327 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
328 return INTVAL (XEXP (x
, 1));
332 /* If X is a constant, return the value sans apparent integer term;
334 Only obvious integer terms are detected. */
337 get_related_value (x
)
340 if (GET_CODE (x
) != CONST
)
343 if (GET_CODE (x
) == PLUS
344 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
346 else if (GET_CODE (x
) == MINUS
347 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
352 /* Given a tablejump insn INSN, return the RTL expression for the offset
353 into the jump table. If the offset cannot be determined, then return
356 If EARLIEST is non-zero, it is a pointer to a place where the earliest
357 insn used in locating the offset was found. */
360 get_jump_table_offset (insn
, earliest
)
374 if (GET_CODE (insn
) != JUMP_INSN
375 || ! (label
= JUMP_LABEL (insn
))
376 || ! (table
= NEXT_INSN (label
))
377 || GET_CODE (table
) != JUMP_INSN
378 || (GET_CODE (PATTERN (table
)) != ADDR_VEC
379 && GET_CODE (PATTERN (table
)) != ADDR_DIFF_VEC
)
380 || ! (set
= single_set (insn
)))
385 /* Some targets (eg, ARM) emit a tablejump that also
386 contains the out-of-range target. */
387 if (GET_CODE (x
) == IF_THEN_ELSE
388 && GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
391 /* Search backwards and locate the expression stored in X. */
392 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
393 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
396 /* If X is an expression using a relative address then strip
397 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
398 or the jump table label. */
399 if (GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
400 && (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
))
402 for (i
= 0; i
< 2; i
++)
407 if (y
== pc_rtx
|| y
== pic_offset_table_rtx
)
410 for (old_y
= NULL_RTX
; GET_CODE (y
) == REG
&& y
!= old_y
;
411 old_y
= y
, y
= find_last_value (y
, &old_insn
, NULL_RTX
, 0))
414 if ((GET_CODE (y
) == LABEL_REF
&& XEXP (y
, 0) == label
))
423 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
424 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
428 /* Strip off any sign or zero extension. */
429 if (GET_CODE (x
) == SIGN_EXTEND
|| GET_CODE (x
) == ZERO_EXTEND
)
433 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
434 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
438 /* If X isn't a MEM then this isn't a tablejump we understand. */
439 if (GET_CODE (x
) != MEM
)
442 /* Strip off the MEM. */
445 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
446 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
449 /* If X isn't a PLUS than this isn't a tablejump we understand. */
450 if (GET_CODE (x
) != PLUS
)
453 /* At this point we should have an expression representing the jump table
454 plus an offset. Examine each operand in order to determine which one
455 represents the jump table. Knowing that tells us that the other operand
456 must represent the offset. */
457 for (i
= 0; i
< 2; i
++)
462 for (old_y
= NULL_RTX
; GET_CODE (y
) == REG
&& y
!= old_y
;
463 old_y
= y
, y
= find_last_value (y
, &old_insn
, NULL_RTX
, 0))
466 if ((GET_CODE (y
) == CONST
|| GET_CODE (y
) == LABEL_REF
)
467 && reg_mentioned_p (label
, y
))
476 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
477 if (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
)
478 for (i
= 0; i
< 2; i
++)
479 if (XEXP (x
, i
) == pic_offset_table_rtx
)
488 /* Return the RTL expression representing the offset. */
492 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
493 a global register. */
496 global_reg_mentioned_p_1 (loc
, data
)
498 void *data ATTRIBUTE_UNUSED
;
506 switch (GET_CODE (x
))
509 if (GET_CODE (SUBREG_REG (x
)) == REG
)
511 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
512 && global_regs
[subreg_regno (x
)])
520 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
534 /* A non-constant call might use a global register. */
544 /* Returns non-zero if X mentions a global register. */
547 global_reg_mentioned_p (x
)
553 if (GET_CODE (x
) == CALL_INSN
)
555 if (! CONST_OR_PURE_CALL_P (x
))
557 x
= CALL_INSN_FUNCTION_USAGE (x
);
565 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
568 /* Return the number of places FIND appears within X. If COUNT_DEST is
569 zero, we do not count occurrences inside the destination of a SET. */
572 count_occurrences (x
, find
, count_dest
)
578 const char *format_ptr
;
599 if (GET_CODE (find
) == MEM
&& rtx_equal_p (x
, find
))
604 if (SET_DEST (x
) == find
&& ! count_dest
)
605 return count_occurrences (SET_SRC (x
), find
, count_dest
);
612 format_ptr
= GET_RTX_FORMAT (code
);
615 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
617 switch (*format_ptr
++)
620 count
+= count_occurrences (XEXP (x
, i
), find
, count_dest
);
624 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
625 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
, count_dest
);
632 /* Nonzero if register REG appears somewhere within IN.
633 Also works if REG is not a register; in this case it checks
634 for a subexpression of IN that is Lisp "equal" to REG. */
637 reg_mentioned_p (reg
, in
)
650 if (GET_CODE (in
) == LABEL_REF
)
651 return reg
== XEXP (in
, 0);
653 code
= GET_CODE (in
);
657 /* Compare registers by number. */
659 return GET_CODE (reg
) == REG
&& REGNO (in
) == REGNO (reg
);
661 /* These codes have no constituent expressions
669 return GET_CODE (reg
) == CONST_INT
&& INTVAL (in
) == INTVAL (reg
);
673 /* These are kept unique for a given value. */
680 if (GET_CODE (reg
) == code
&& rtx_equal_p (reg
, in
))
683 fmt
= GET_RTX_FORMAT (code
);
685 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
690 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
691 if (reg_mentioned_p (reg
, XVECEXP (in
, i
, j
)))
694 else if (fmt
[i
] == 'e'
695 && reg_mentioned_p (reg
, XEXP (in
, i
)))
701 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
702 no CODE_LABEL insn. */
705 no_labels_between_p (beg
, end
)
711 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
712 if (GET_CODE (p
) == CODE_LABEL
)
717 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
718 no JUMP_INSN insn. */
721 no_jumps_between_p (beg
, end
)
725 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
726 if (GET_CODE (p
) == JUMP_INSN
)
731 /* Nonzero if register REG is used in an insn between
732 FROM_INSN and TO_INSN (exclusive of those two). */
735 reg_used_between_p (reg
, from_insn
, to_insn
)
736 rtx reg
, from_insn
, to_insn
;
740 if (from_insn
== to_insn
)
743 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
745 && (reg_overlap_mentioned_p (reg
, PATTERN (insn
))
746 || (GET_CODE (insn
) == CALL_INSN
747 && (find_reg_fusage (insn
, USE
, reg
)
748 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
753 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
754 is entirely replaced by a new value and the only use is as a SET_DEST,
755 we do not consider it a reference. */
758 reg_referenced_p (x
, body
)
764 switch (GET_CODE (body
))
767 if (reg_overlap_mentioned_p (x
, SET_SRC (body
)))
770 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
771 of a REG that occupies all of the REG, the insn references X if
772 it is mentioned in the destination. */
773 if (GET_CODE (SET_DEST (body
)) != CC0
774 && GET_CODE (SET_DEST (body
)) != PC
775 && GET_CODE (SET_DEST (body
)) != REG
776 && ! (GET_CODE (SET_DEST (body
)) == SUBREG
777 && GET_CODE (SUBREG_REG (SET_DEST (body
))) == REG
778 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body
))))
779 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
780 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body
)))
781 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
782 && reg_overlap_mentioned_p (x
, SET_DEST (body
)))
787 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
788 if (reg_overlap_mentioned_p (x
, ASM_OPERANDS_INPUT (body
, i
)))
795 return reg_overlap_mentioned_p (x
, body
);
798 return reg_overlap_mentioned_p (x
, TRAP_CONDITION (body
));
801 return reg_overlap_mentioned_p (x
, XEXP (body
, 0));
804 case UNSPEC_VOLATILE
:
805 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
806 if (reg_overlap_mentioned_p (x
, XVECEXP (body
, 0, i
)))
811 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
812 if (reg_referenced_p (x
, XVECEXP (body
, 0, i
)))
817 if (GET_CODE (XEXP (body
, 0)) == MEM
)
818 if (reg_overlap_mentioned_p (x
, XEXP (XEXP (body
, 0), 0)))
823 if (reg_overlap_mentioned_p (x
, COND_EXEC_TEST (body
)))
825 return reg_referenced_p (x
, COND_EXEC_CODE (body
));
832 /* Nonzero if register REG is referenced in an insn between
833 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
837 reg_referenced_between_p (reg
, from_insn
, to_insn
)
838 rtx reg
, from_insn
, to_insn
;
842 if (from_insn
== to_insn
)
845 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
847 && (reg_referenced_p (reg
, PATTERN (insn
))
848 || (GET_CODE (insn
) == CALL_INSN
849 && find_reg_fusage (insn
, USE
, reg
))))
854 /* Nonzero if register REG is set or clobbered in an insn between
855 FROM_INSN and TO_INSN (exclusive of those two). */
858 reg_set_between_p (reg
, from_insn
, to_insn
)
859 rtx reg
, from_insn
, to_insn
;
863 if (from_insn
== to_insn
)
866 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
867 if (INSN_P (insn
) && reg_set_p (reg
, insn
))
872 /* Internals of reg_set_between_p. */
874 reg_set_p (reg
, insn
)
879 /* We can be passed an insn or part of one. If we are passed an insn,
880 check if a side-effect of the insn clobbers REG. */
883 if (FIND_REG_INC_NOTE (insn
, reg
)
884 || (GET_CODE (insn
) == CALL_INSN
885 /* We'd like to test call_used_regs here, but rtlanal.c can't
886 reference that variable due to its use in genattrtab. So
887 we'll just be more conservative.
889 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
890 information holds all clobbered registers. */
891 && ((GET_CODE (reg
) == REG
892 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
893 || GET_CODE (reg
) == MEM
894 || find_reg_fusage (insn
, CLOBBER
, reg
))))
897 body
= PATTERN (insn
);
900 return set_of (reg
, insn
) != NULL_RTX
;
903 /* Similar to reg_set_between_p, but check all registers in X. Return 0
904 only if none of them are modified between START and END. Do not
905 consider non-registers one way or the other. */
908 regs_set_between_p (x
, start
, end
)
912 enum rtx_code code
= GET_CODE (x
);
929 return reg_set_between_p (x
, start
, end
);
935 fmt
= GET_RTX_FORMAT (code
);
936 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
938 if (fmt
[i
] == 'e' && regs_set_between_p (XEXP (x
, i
), start
, end
))
941 else if (fmt
[i
] == 'E')
942 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
943 if (regs_set_between_p (XVECEXP (x
, i
, j
), start
, end
))
950 /* Similar to reg_set_between_p, but check all registers in X. Return 0
951 only if none of them are modified between START and END. Return 1 if
952 X contains a MEM; this routine does not perform any memory aliasing. */
955 modified_between_p (x
, start
, end
)
959 enum rtx_code code
= GET_CODE (x
);
978 /* If the memory is not constant, assume it is modified. If it is
979 constant, we still have to check the address. */
980 if (! RTX_UNCHANGING_P (x
))
985 return reg_set_between_p (x
, start
, end
);
991 fmt
= GET_RTX_FORMAT (code
);
992 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
994 if (fmt
[i
] == 'e' && modified_between_p (XEXP (x
, i
), start
, end
))
997 else if (fmt
[i
] == 'E')
998 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
999 if (modified_between_p (XVECEXP (x
, i
, j
), start
, end
))
1006 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1007 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1008 does not perform any memory aliasing. */
1011 modified_in_p (x
, insn
)
1015 enum rtx_code code
= GET_CODE (x
);
1034 /* If the memory is not constant, assume it is modified. If it is
1035 constant, we still have to check the address. */
1036 if (! RTX_UNCHANGING_P (x
))
1041 return reg_set_p (x
, insn
);
1047 fmt
= GET_RTX_FORMAT (code
);
1048 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1050 if (fmt
[i
] == 'e' && modified_in_p (XEXP (x
, i
), insn
))
1053 else if (fmt
[i
] == 'E')
1054 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1055 if (modified_in_p (XVECEXP (x
, i
, j
), insn
))
1062 /* Return true if anything in insn X is (anti,output,true) dependent on
1063 anything in insn Y. */
1066 insn_dependent_p (x
, y
)
1071 if (! INSN_P (x
) || ! INSN_P (y
))
1075 note_stores (PATTERN (x
), insn_dependent_p_1
, &tmp
);
1076 if (tmp
== NULL_RTX
)
1080 note_stores (PATTERN (y
), insn_dependent_p_1
, &tmp
);
1081 if (tmp
== NULL_RTX
)
1087 /* A helper routine for insn_dependent_p called through note_stores. */
1090 insn_dependent_p_1 (x
, pat
, data
)
1092 rtx pat ATTRIBUTE_UNUSED
;
1095 rtx
* pinsn
= (rtx
*) data
;
1097 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
1101 /* Helper function for set_of. */
1109 set_of_1 (x
, pat
, data1
)
1114 struct set_of_data
*data
= (struct set_of_data
*) (data1
);
1115 if (rtx_equal_p (x
, data
->pat
)
1116 || (GET_CODE (x
) != MEM
&& reg_overlap_mentioned_p (data
->pat
, x
)))
1120 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1121 (either directly or via STRICT_LOW_PART and similar modifiers). */
1126 struct set_of_data data
;
1127 data
.found
= NULL_RTX
;
1129 note_stores (INSN_P (insn
) ? PATTERN (insn
) : insn
, set_of_1
, &data
);
1133 /* Given an INSN, return a SET expression if this insn has only a single SET.
1134 It may also have CLOBBERs, USEs, or SET whose output
1135 will not be used, which we ignore. */
1138 single_set_2 (insn
, pat
)
1142 int set_verified
= 1;
1145 if (GET_CODE (pat
) == PARALLEL
)
1147 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1149 rtx sub
= XVECEXP (pat
, 0, i
);
1150 switch (GET_CODE (sub
))
1157 /* We can consider insns having multiple sets, where all
1158 but one are dead as single set insns. In common case
1159 only single set is present in the pattern so we want
1160 to avoid checking for REG_UNUSED notes unless necessary.
1162 When we reach set first time, we just expect this is
1163 the single set we are looking for and only when more
1164 sets are found in the insn, we check them. */
1167 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (set
))
1168 && !side_effects_p (set
))
1174 set
= sub
, set_verified
= 0;
1175 else if (!find_reg_note (insn
, REG_UNUSED
, SET_DEST (sub
))
1176 || side_effects_p (sub
))
1188 /* Given an INSN, return nonzero if it has more than one SET, else return
1192 multiple_sets (insn
)
1198 /* INSN must be an insn. */
1199 if (! INSN_P (insn
))
1202 /* Only a PARALLEL can have multiple SETs. */
1203 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
1205 for (i
= 0, found
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1206 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == SET
)
1208 /* If we have already found a SET, then return now. */
1216 /* Either zero or one SET. */
1220 /* Return nonzero if the destination of SET equals the source
1221 and there are no side effects. */
1227 rtx src
= SET_SRC (set
);
1228 rtx dst
= SET_DEST (set
);
1230 if (side_effects_p (src
) || side_effects_p (dst
))
1233 if (GET_CODE (dst
) == MEM
&& GET_CODE (src
) == MEM
)
1234 return rtx_equal_p (dst
, src
);
1236 if (dst
== pc_rtx
&& src
== pc_rtx
)
1239 if (GET_CODE (dst
) == SIGN_EXTRACT
1240 || GET_CODE (dst
) == ZERO_EXTRACT
)
1241 return rtx_equal_p (XEXP (dst
, 0), src
)
1242 && ! BYTES_BIG_ENDIAN
&& XEXP (dst
, 2) == const0_rtx
;
1244 if (GET_CODE (dst
) == STRICT_LOW_PART
)
1245 dst
= XEXP (dst
, 0);
1247 if (GET_CODE (src
) == SUBREG
&& GET_CODE (dst
) == SUBREG
)
1249 if (SUBREG_BYTE (src
) != SUBREG_BYTE (dst
))
1251 src
= SUBREG_REG (src
);
1252 dst
= SUBREG_REG (dst
);
1255 return (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
1256 && REGNO (src
) == REGNO (dst
));
1259 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1266 rtx pat
= PATTERN (insn
);
1268 if (INSN_CODE (insn
) == NOOP_MOVE_INSN_CODE
)
1271 /* Insns carrying these notes are useful later on. */
1272 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1275 /* For now treat an insn with a REG_RETVAL note as a
1276 a special insn which should not be considered a no-op. */
1277 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
1280 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
1283 if (GET_CODE (pat
) == PARALLEL
)
1286 /* If nothing but SETs of registers to themselves,
1287 this insn can also be deleted. */
1288 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1290 rtx tem
= XVECEXP (pat
, 0, i
);
1292 if (GET_CODE (tem
) == USE
1293 || GET_CODE (tem
) == CLOBBER
)
1296 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
1306 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1307 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1308 If the object was modified, if we hit a partial assignment to X, or hit a
1309 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1310 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1314 find_last_value (x
, pinsn
, valid_to
, allow_hwreg
)
1322 for (p
= PREV_INSN (*pinsn
); p
&& GET_CODE (p
) != CODE_LABEL
;
1326 rtx set
= single_set (p
);
1327 rtx note
= find_reg_note (p
, REG_EQUAL
, NULL_RTX
);
1329 if (set
&& rtx_equal_p (x
, SET_DEST (set
)))
1331 rtx src
= SET_SRC (set
);
1333 if (note
&& GET_CODE (XEXP (note
, 0)) != EXPR_LIST
)
1334 src
= XEXP (note
, 0);
1336 if ((valid_to
== NULL_RTX
1337 || ! modified_between_p (src
, PREV_INSN (p
), valid_to
))
1338 /* Reject hard registers because we don't usually want
1339 to use them; we'd rather use a pseudo. */
1340 && (! (GET_CODE (src
) == REG
1341 && REGNO (src
) < FIRST_PSEUDO_REGISTER
) || allow_hwreg
))
1348 /* If set in non-simple way, we don't have a value. */
1349 if (reg_set_p (x
, p
))
1356 /* Return nonzero if register in range [REGNO, ENDREGNO)
1357 appears either explicitly or implicitly in X
1358 other than being stored into.
1360 References contained within the substructure at LOC do not count.
1361 LOC may be zero, meaning don't ignore anything. */
1364 refers_to_regno_p (regno
, endregno
, x
, loc
)
1365 unsigned int regno
, endregno
;
1370 unsigned int x_regno
;
1375 /* The contents of a REG_NONNEG note is always zero, so we must come here
1376 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1380 code
= GET_CODE (x
);
1385 x_regno
= REGNO (x
);
1387 /* If we modifying the stack, frame, or argument pointer, it will
1388 clobber a virtual register. In fact, we could be more precise,
1389 but it isn't worth it. */
1390 if ((x_regno
== STACK_POINTER_REGNUM
1391 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1392 || x_regno
== ARG_POINTER_REGNUM
1394 || x_regno
== FRAME_POINTER_REGNUM
)
1395 && regno
>= FIRST_VIRTUAL_REGISTER
&& regno
<= LAST_VIRTUAL_REGISTER
)
1398 return (endregno
> x_regno
1399 && regno
< x_regno
+ (x_regno
< FIRST_PSEUDO_REGISTER
1400 ? HARD_REGNO_NREGS (x_regno
, GET_MODE (x
))
1404 /* If this is a SUBREG of a hard reg, we can see exactly which
1405 registers are being modified. Otherwise, handle normally. */
1406 if (GET_CODE (SUBREG_REG (x
)) == REG
1407 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
1409 unsigned int inner_regno
= subreg_regno (x
);
1410 unsigned int inner_endregno
1411 = inner_regno
+ (inner_regno
< FIRST_PSEUDO_REGISTER
1412 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
1414 return endregno
> inner_regno
&& regno
< inner_endregno
;
1420 if (&SET_DEST (x
) != loc
1421 /* Note setting a SUBREG counts as referring to the REG it is in for
1422 a pseudo but not for hard registers since we can
1423 treat each word individually. */
1424 && ((GET_CODE (SET_DEST (x
)) == SUBREG
1425 && loc
!= &SUBREG_REG (SET_DEST (x
))
1426 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
1427 && REGNO (SUBREG_REG (SET_DEST (x
))) >= FIRST_PSEUDO_REGISTER
1428 && refers_to_regno_p (regno
, endregno
,
1429 SUBREG_REG (SET_DEST (x
)), loc
))
1430 || (GET_CODE (SET_DEST (x
)) != REG
1431 && refers_to_regno_p (regno
, endregno
, SET_DEST (x
), loc
))))
1434 if (code
== CLOBBER
|| loc
== &SET_SRC (x
))
1443 /* X does not match, so try its subexpressions. */
1445 fmt
= GET_RTX_FORMAT (code
);
1446 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1448 if (fmt
[i
] == 'e' && loc
!= &XEXP (x
, i
))
1456 if (refers_to_regno_p (regno
, endregno
, XEXP (x
, i
), loc
))
1459 else if (fmt
[i
] == 'E')
1462 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
1463 if (loc
!= &XVECEXP (x
, i
, j
)
1464 && refers_to_regno_p (regno
, endregno
, XVECEXP (x
, i
, j
), loc
))
1471 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1472 we check if any register number in X conflicts with the relevant register
1473 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1474 contains a MEM (we don't bother checking for memory addresses that can't
1475 conflict because we expect this to be a rare case. */
1478 reg_overlap_mentioned_p (x
, in
)
1481 unsigned int regno
, endregno
;
1483 /* Overly conservative. */
1484 if (GET_CODE (x
) == STRICT_LOW_PART
)
1487 /* If either argument is a constant, then modifying X can not affect IN. */
1488 if (CONSTANT_P (x
) || CONSTANT_P (in
))
1491 switch (GET_CODE (x
))
1494 regno
= REGNO (SUBREG_REG (x
));
1495 if (regno
< FIRST_PSEUDO_REGISTER
)
1496 regno
= subreg_regno (x
);
1502 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
1503 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
1504 return refers_to_regno_p (regno
, endregno
, in
, (rtx
*) 0);
1511 if (GET_CODE (in
) == MEM
)
1514 fmt
= GET_RTX_FORMAT (GET_CODE (in
));
1515 for (i
= GET_RTX_LENGTH (GET_CODE (in
)) - 1; i
>= 0; i
--)
1516 if (fmt
[i
] == 'e' && reg_overlap_mentioned_p (x
, XEXP (in
, i
)))
1525 return reg_mentioned_p (x
, in
);
1531 /* If any register in here refers to it we return true. */
1532 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1533 if (XEXP (XVECEXP (x
, 0, i
), 0) != 0
1534 && reg_overlap_mentioned_p (XEXP (XVECEXP (x
, 0, i
), 0), in
))
1546 /* Return the last value to which REG was set prior to INSN. If we can't
1547 find it easily, return 0.
1549 We only return a REG, SUBREG, or constant because it is too hard to
1550 check if a MEM remains unchanged. */
1553 reg_set_last (x
, insn
)
1557 rtx orig_insn
= insn
;
1559 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1560 Stop when we reach a label or X is a hard reg and we reach a
1561 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1563 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1565 /* We compare with <= here, because reg_set_last_last_regno
1566 is actually the number of the first reg *not* in X. */
1568 insn
&& GET_CODE (insn
) != CODE_LABEL
1569 && ! (GET_CODE (insn
) == CALL_INSN
1570 && REGNO (x
) <= FIRST_PSEUDO_REGISTER
);
1571 insn
= PREV_INSN (insn
))
1574 rtx set
= set_of (x
, insn
);
1575 /* OK, this function modify our register. See if we understand it. */
1579 if (GET_CODE (set
) != SET
|| SET_DEST (set
) != x
)
1581 last_value
= SET_SRC (x
);
1582 if (CONSTANT_P (last_value
)
1583 || ((GET_CODE (last_value
) == REG
1584 || GET_CODE (last_value
) == SUBREG
)
1585 && ! reg_set_between_p (last_value
,
1596 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1597 (X would be the pattern of an insn).
1598 FUN receives two arguments:
1599 the REG, MEM, CC0 or PC being stored in or clobbered,
1600 the SET or CLOBBER rtx that does the store.
1602 If the item being stored in or clobbered is a SUBREG of a hard register,
1603 the SUBREG will be passed. */
1606 note_stores (x
, fun
, data
)
1608 void (*fun
) PARAMS ((rtx
, rtx
, void *));
1613 if (GET_CODE (x
) == COND_EXEC
)
1614 x
= COND_EXEC_CODE (x
);
1616 if (GET_CODE (x
) == SET
|| GET_CODE (x
) == CLOBBER
)
1618 rtx dest
= SET_DEST (x
);
1620 while ((GET_CODE (dest
) == SUBREG
1621 && (GET_CODE (SUBREG_REG (dest
)) != REG
1622 || REGNO (SUBREG_REG (dest
)) >= FIRST_PSEUDO_REGISTER
))
1623 || GET_CODE (dest
) == ZERO_EXTRACT
1624 || GET_CODE (dest
) == SIGN_EXTRACT
1625 || GET_CODE (dest
) == STRICT_LOW_PART
)
1626 dest
= XEXP (dest
, 0);
1628 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1629 each of whose first operand is a register. */
1630 if (GET_CODE (dest
) == PARALLEL
)
1632 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1633 if (XEXP (XVECEXP (dest
, 0, i
), 0) != 0)
1634 (*fun
) (XEXP (XVECEXP (dest
, 0, i
), 0), x
, data
);
1637 (*fun
) (dest
, x
, data
);
1640 else if (GET_CODE (x
) == PARALLEL
)
1641 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1642 note_stores (XVECEXP (x
, 0, i
), fun
, data
);
1645 /* Like notes_stores, but call FUN for each expression that is being
1646 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1647 FUN for each expression, not any interior subexpressions. FUN receives a
1648 pointer to the expression and the DATA passed to this function.
1650 Note that this is not quite the same test as that done in reg_referenced_p
1651 since that considers something as being referenced if it is being
1652 partially set, while we do not. */
1655 note_uses (pbody
, fun
, data
)
1657 void (*fun
) PARAMS ((rtx
*, void *));
1663 switch (GET_CODE (body
))
1666 (*fun
) (&COND_EXEC_TEST (body
), data
);
1667 note_uses (&COND_EXEC_CODE (body
), fun
, data
);
1671 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1672 note_uses (&XVECEXP (body
, 0, i
), fun
, data
);
1676 (*fun
) (&XEXP (body
, 0), data
);
1680 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
1681 (*fun
) (&ASM_OPERANDS_INPUT (body
, i
), data
);
1685 (*fun
) (&TRAP_CONDITION (body
), data
);
1689 (*fun
) (&XEXP (body
, 0), data
);
1693 case UNSPEC_VOLATILE
:
1694 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1695 (*fun
) (&XVECEXP (body
, 0, i
), data
);
1699 if (GET_CODE (XEXP (body
, 0)) == MEM
)
1700 (*fun
) (&XEXP (XEXP (body
, 0), 0), data
);
1705 rtx dest
= SET_DEST (body
);
1707 /* For sets we replace everything in source plus registers in memory
1708 expression in store and operands of a ZERO_EXTRACT. */
1709 (*fun
) (&SET_SRC (body
), data
);
1711 if (GET_CODE (dest
) == ZERO_EXTRACT
)
1713 (*fun
) (&XEXP (dest
, 1), data
);
1714 (*fun
) (&XEXP (dest
, 2), data
);
1717 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
)
1718 dest
= XEXP (dest
, 0);
1720 if (GET_CODE (dest
) == MEM
)
1721 (*fun
) (&XEXP (dest
, 0), data
);
1726 /* All the other possibilities never store. */
1727 (*fun
) (pbody
, data
);
1732 /* Return nonzero if X's old contents don't survive after INSN.
1733 This will be true if X is (cc0) or if X is a register and
1734 X dies in INSN or because INSN entirely sets X.
1736 "Entirely set" means set directly and not through a SUBREG,
1737 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1738 Likewise, REG_INC does not count.
1740 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1741 but for this use that makes no difference, since regs don't overlap
1742 during their lifetimes. Therefore, this function may be used
1743 at any time after deaths have been computed (in flow.c).
1745 If REG is a hard reg that occupies multiple machine registers, this
1746 function will only return 1 if each of those registers will be replaced
1750 dead_or_set_p (insn
, x
)
1754 unsigned int regno
, last_regno
;
1757 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1758 if (GET_CODE (x
) == CC0
)
1761 if (GET_CODE (x
) != REG
)
1765 last_regno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
1766 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
)) - 1);
1768 for (i
= regno
; i
<= last_regno
; i
++)
1769 if (! dead_or_set_regno_p (insn
, i
))
1775 /* Utility function for dead_or_set_p to check an individual register. Also
1776 called from flow.c. */
1779 dead_or_set_regno_p (insn
, test_regno
)
1781 unsigned int test_regno
;
1783 unsigned int regno
, endregno
;
1786 /* See if there is a death note for something that includes TEST_REGNO. */
1787 if (find_regno_note (insn
, REG_DEAD
, test_regno
))
1790 if (GET_CODE (insn
) == CALL_INSN
1791 && find_regno_fusage (insn
, CLOBBER
, test_regno
))
1794 pattern
= PATTERN (insn
);
1796 if (GET_CODE (pattern
) == COND_EXEC
)
1797 pattern
= COND_EXEC_CODE (pattern
);
1799 if (GET_CODE (pattern
) == SET
)
1801 rtx dest
= SET_DEST (pattern
);
1803 /* A value is totally replaced if it is the destination or the
1804 destination is a SUBREG of REGNO that does not change the number of
1806 if (GET_CODE (dest
) == SUBREG
1807 && (((GET_MODE_SIZE (GET_MODE (dest
))
1808 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1809 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1810 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1811 dest
= SUBREG_REG (dest
);
1813 if (GET_CODE (dest
) != REG
)
1816 regno
= REGNO (dest
);
1817 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1818 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (dest
)));
1820 return (test_regno
>= regno
&& test_regno
< endregno
);
1822 else if (GET_CODE (pattern
) == PARALLEL
)
1826 for (i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
1828 rtx body
= XVECEXP (pattern
, 0, i
);
1830 if (GET_CODE (body
) == COND_EXEC
)
1831 body
= COND_EXEC_CODE (body
);
1833 if (GET_CODE (body
) == SET
|| GET_CODE (body
) == CLOBBER
)
1835 rtx dest
= SET_DEST (body
);
1837 if (GET_CODE (dest
) == SUBREG
1838 && (((GET_MODE_SIZE (GET_MODE (dest
))
1839 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1840 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1841 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1842 dest
= SUBREG_REG (dest
);
1844 if (GET_CODE (dest
) != REG
)
1847 regno
= REGNO (dest
);
1848 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1849 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (dest
)));
1851 if (test_regno
>= regno
&& test_regno
< endregno
)
1860 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1861 If DATUM is nonzero, look for one whose datum is DATUM. */
1864 find_reg_note (insn
, kind
, datum
)
1871 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1872 if (! INSN_P (insn
))
1875 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1876 if (REG_NOTE_KIND (link
) == kind
1877 && (datum
== 0 || datum
== XEXP (link
, 0)))
1882 /* Return the reg-note of kind KIND in insn INSN which applies to register
1883 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1884 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1885 it might be the case that the note overlaps REGNO. */
1888 find_regno_note (insn
, kind
, regno
)
1895 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1896 if (! INSN_P (insn
))
1899 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1900 if (REG_NOTE_KIND (link
) == kind
1901 /* Verify that it is a register, so that scratch and MEM won't cause a
1903 && GET_CODE (XEXP (link
, 0)) == REG
1904 && REGNO (XEXP (link
, 0)) <= regno
1905 && ((REGNO (XEXP (link
, 0))
1906 + (REGNO (XEXP (link
, 0)) >= FIRST_PSEUDO_REGISTER
? 1
1907 : HARD_REGNO_NREGS (REGNO (XEXP (link
, 0)),
1908 GET_MODE (XEXP (link
, 0)))))
1914 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1918 find_reg_equal_equiv_note (insn
)
1923 if (single_set (insn
) == 0)
1925 else if ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)) != 0)
1928 return find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
1931 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1932 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1935 find_reg_fusage (insn
, code
, datum
)
1940 /* If it's not a CALL_INSN, it can't possibly have a
1941 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1942 if (GET_CODE (insn
) != CALL_INSN
)
1948 if (GET_CODE (datum
) != REG
)
1952 for (link
= CALL_INSN_FUNCTION_USAGE (insn
);
1954 link
= XEXP (link
, 1))
1955 if (GET_CODE (XEXP (link
, 0)) == code
1956 && rtx_equal_p (datum
, XEXP (XEXP (link
, 0), 0)))
1961 unsigned int regno
= REGNO (datum
);
1963 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1964 to pseudo registers, so don't bother checking. */
1966 if (regno
< FIRST_PSEUDO_REGISTER
)
1968 unsigned int end_regno
1969 = regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (datum
));
1972 for (i
= regno
; i
< end_regno
; i
++)
1973 if (find_regno_fusage (insn
, code
, i
))
1981 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1982 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1985 find_regno_fusage (insn
, code
, regno
)
1992 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1993 to pseudo registers, so don't bother checking. */
1995 if (regno
>= FIRST_PSEUDO_REGISTER
1996 || GET_CODE (insn
) != CALL_INSN
)
1999 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
2001 unsigned int regnote
;
2004 if (GET_CODE (op
= XEXP (link
, 0)) == code
2005 && GET_CODE (reg
= XEXP (op
, 0)) == REG
2006 && (regnote
= REGNO (reg
)) <= regno
2007 && regnote
+ HARD_REGNO_NREGS (regnote
, GET_MODE (reg
)) > regno
)
2014 /* Return true if INSN is a call to a pure function. */
2022 if (GET_CODE (insn
) != CALL_INSN
|| ! CONST_OR_PURE_CALL_P (insn
))
2025 /* Look for the note that differentiates const and pure functions. */
2026 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
2030 if (GET_CODE (u
= XEXP (link
, 0)) == USE
2031 && GET_CODE (m
= XEXP (u
, 0)) == MEM
&& GET_MODE (m
) == BLKmode
2032 && GET_CODE (XEXP (m
, 0)) == SCRATCH
)
2039 /* Remove register note NOTE from the REG_NOTES of INSN. */
2042 remove_note (insn
, note
)
2048 if (note
== NULL_RTX
)
2051 if (REG_NOTES (insn
) == note
)
2053 REG_NOTES (insn
) = XEXP (note
, 1);
2057 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
2058 if (XEXP (link
, 1) == note
)
2060 XEXP (link
, 1) = XEXP (note
, 1);
2067 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2068 return 1 if it is found. A simple equality test is used to determine if
2072 in_expr_list_p (listp
, node
)
2078 for (x
= listp
; x
; x
= XEXP (x
, 1))
2079 if (node
== XEXP (x
, 0))
2085 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2086 remove that entry from the list if it is found.
2088 A simple equality test is used to determine if NODE matches. */
2091 remove_node_from_expr_list (node
, listp
)
2096 rtx prev
= NULL_RTX
;
2100 if (node
== XEXP (temp
, 0))
2102 /* Splice the node out of the list. */
2104 XEXP (prev
, 1) = XEXP (temp
, 1);
2106 *listp
= XEXP (temp
, 1);
2112 temp
= XEXP (temp
, 1);
2116 /* Nonzero if X contains any volatile instructions. These are instructions
2117 which may cause unpredictable machine state instructions, and thus no
2118 instructions should be moved or combined across them. This includes
2119 only volatile asms and UNSPEC_VOLATILE instructions. */
2127 code
= GET_CODE (x
);
2148 case UNSPEC_VOLATILE
:
2149 /* case TRAP_IF: This isn't clear yet. */
2153 if (MEM_VOLATILE_P (x
))
2160 /* Recursively scan the operands of this expression. */
2163 const char *fmt
= GET_RTX_FORMAT (code
);
2166 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2170 if (volatile_insn_p (XEXP (x
, i
)))
2173 else if (fmt
[i
] == 'E')
2176 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2177 if (volatile_insn_p (XVECEXP (x
, i
, j
)))
2185 /* Nonzero if X contains any volatile memory references
2186 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2194 code
= GET_CODE (x
);
2213 case UNSPEC_VOLATILE
:
2218 if (MEM_VOLATILE_P (x
))
2225 /* Recursively scan the operands of this expression. */
2228 const char *fmt
= GET_RTX_FORMAT (code
);
2231 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2235 if (volatile_refs_p (XEXP (x
, i
)))
2238 else if (fmt
[i
] == 'E')
2241 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2242 if (volatile_refs_p (XVECEXP (x
, i
, j
)))
2250 /* Similar to above, except that it also rejects register pre- and post-
2259 code
= GET_CODE (x
);
2278 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2279 when some combination can't be done. If we see one, don't think
2280 that we can simplify the expression. */
2281 return (GET_MODE (x
) != VOIDmode
);
2290 case UNSPEC_VOLATILE
:
2291 /* case TRAP_IF: This isn't clear yet. */
2296 if (MEM_VOLATILE_P (x
))
2303 /* Recursively scan the operands of this expression. */
2306 const char *fmt
= GET_RTX_FORMAT (code
);
2309 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2313 if (side_effects_p (XEXP (x
, i
)))
2316 else if (fmt
[i
] == 'E')
2319 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2320 if (side_effects_p (XVECEXP (x
, i
, j
)))
2328 /* Return nonzero if evaluating rtx X might cause a trap. */
2340 code
= GET_CODE (x
);
2343 /* Handle these cases quickly. */
2357 case UNSPEC_VOLATILE
:
2362 return MEM_VOLATILE_P (x
);
2364 /* Memory ref can trap unless it's a static var or a stack slot. */
2366 return rtx_addr_can_trap_p (XEXP (x
, 0));
2368 /* Division by a non-constant might trap. */
2373 if (HONOR_SNANS (GET_MODE (x
)))
2375 if (! CONSTANT_P (XEXP (x
, 1))
2376 || (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2377 && flag_trapping_math
))
2379 /* This was const0_rtx, but by not using that,
2380 we can link this file into other programs. */
2381 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& INTVAL (XEXP (x
, 1)) == 0)
2386 /* An EXPR_LIST is used to represent a function call. This
2387 certainly may trap. */
2395 /* Some floating point comparisons may trap. */
2396 if (!flag_trapping_math
)
2398 /* ??? There is no machine independent way to check for tests that trap
2399 when COMPARE is used, though many targets do make this distinction.
2400 For instance, sparc uses CCFPE for compares which generate exceptions
2401 and CCFP for compares which do not generate exceptions. */
2402 if (HONOR_NANS (GET_MODE (x
)))
2404 /* But often the compare has some CC mode, so check operand
2406 if (HONOR_NANS (GET_MODE (XEXP (x
, 0)))
2407 || HONOR_NANS (GET_MODE (XEXP (x
, 1))))
2413 if (HONOR_SNANS (GET_MODE (x
)))
2415 /* Often comparison is CC mode, so check operand modes. */
2416 if (HONOR_SNANS (GET_MODE (XEXP (x
, 0)))
2417 || HONOR_SNANS (GET_MODE (XEXP (x
, 1))))
2423 /* These operations don't trap even with floating point. */
2427 /* Any floating arithmetic may trap. */
2428 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2429 && flag_trapping_math
)
2433 fmt
= GET_RTX_FORMAT (code
);
2434 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2438 if (may_trap_p (XEXP (x
, i
)))
2441 else if (fmt
[i
] == 'E')
2444 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2445 if (may_trap_p (XVECEXP (x
, i
, j
)))
2452 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2453 i.e., an inequality. */
2456 inequality_comparisons_p (x
)
2461 enum rtx_code code
= GET_CODE (x
);
2491 len
= GET_RTX_LENGTH (code
);
2492 fmt
= GET_RTX_FORMAT (code
);
2494 for (i
= 0; i
< len
; i
++)
2498 if (inequality_comparisons_p (XEXP (x
, i
)))
2501 else if (fmt
[i
] == 'E')
2504 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2505 if (inequality_comparisons_p (XVECEXP (x
, i
, j
)))
2513 /* Replace any occurrence of FROM in X with TO. The function does
2514 not enter into CONST_DOUBLE for the replace.
2516 Note that copying is not done so X must not be shared unless all copies
2517 are to be modified. */
2520 replace_rtx (x
, from
, to
)
2526 /* The following prevents loops occurrence when we change MEM in
2527 CONST_DOUBLE onto the same CONST_DOUBLE. */
2528 if (x
!= 0 && GET_CODE (x
) == CONST_DOUBLE
)
2534 /* Allow this function to make replacements in EXPR_LISTs. */
2538 if (GET_CODE (x
) == SUBREG
)
2540 rtx
new = replace_rtx (SUBREG_REG (x
), from
, to
);
2542 if (GET_CODE (new) == CONST_INT
)
2544 x
= simplify_subreg (GET_MODE (x
), new,
2545 GET_MODE (SUBREG_REG (x
)),
2551 SUBREG_REG (x
) = new;
2555 else if (GET_CODE (x
) == ZERO_EXTEND
)
2557 rtx
new = replace_rtx (XEXP (x
, 0), from
, to
);
2559 if (GET_CODE (new) == CONST_INT
)
2561 x
= simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
2562 new, GET_MODE (XEXP (x
, 0)));
2572 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2573 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2576 XEXP (x
, i
) = replace_rtx (XEXP (x
, i
), from
, to
);
2577 else if (fmt
[i
] == 'E')
2578 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2579 XVECEXP (x
, i
, j
) = replace_rtx (XVECEXP (x
, i
, j
), from
, to
);
2585 /* Throughout the rtx X, replace many registers according to REG_MAP.
2586 Return the replacement for X (which may be X with altered contents).
2587 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2588 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2590 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2591 should not be mapped to pseudos or vice versa since validate_change
2594 If REPLACE_DEST is 1, replacements are also done in destinations;
2595 otherwise, only sources are replaced. */
2598 replace_regs (x
, reg_map
, nregs
, replace_dest
)
2611 code
= GET_CODE (x
);
2626 /* Verify that the register has an entry before trying to access it. */
2627 if (REGNO (x
) < nregs
&& reg_map
[REGNO (x
)] != 0)
2629 /* SUBREGs can't be shared. Always return a copy to ensure that if
2630 this replacement occurs more than once then each instance will
2631 get distinct rtx. */
2632 if (GET_CODE (reg_map
[REGNO (x
)]) == SUBREG
)
2633 return copy_rtx (reg_map
[REGNO (x
)]);
2634 return reg_map
[REGNO (x
)];
2639 /* Prevent making nested SUBREGs. */
2640 if (GET_CODE (SUBREG_REG (x
)) == REG
&& REGNO (SUBREG_REG (x
)) < nregs
2641 && reg_map
[REGNO (SUBREG_REG (x
))] != 0
2642 && GET_CODE (reg_map
[REGNO (SUBREG_REG (x
))]) == SUBREG
)
2644 rtx map_val
= reg_map
[REGNO (SUBREG_REG (x
))];
2645 return simplify_gen_subreg (GET_MODE (x
), map_val
,
2646 GET_MODE (SUBREG_REG (x
)),
2653 SET_DEST (x
) = replace_regs (SET_DEST (x
), reg_map
, nregs
, 0);
2655 else if (GET_CODE (SET_DEST (x
)) == MEM
2656 || GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2657 /* Even if we are not to replace destinations, replace register if it
2658 is CONTAINED in destination (destination is memory or
2659 STRICT_LOW_PART). */
2660 XEXP (SET_DEST (x
), 0) = replace_regs (XEXP (SET_DEST (x
), 0),
2662 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2663 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2666 SET_SRC (x
) = replace_regs (SET_SRC (x
), reg_map
, nregs
, 0);
2673 fmt
= GET_RTX_FORMAT (code
);
2674 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2677 XEXP (x
, i
) = replace_regs (XEXP (x
, i
), reg_map
, nregs
, replace_dest
);
2678 else if (fmt
[i
] == 'E')
2681 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2682 XVECEXP (x
, i
, j
) = replace_regs (XVECEXP (x
, i
, j
), reg_map
,
2683 nregs
, replace_dest
);
2689 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2690 constant that is not in the constant pool and not in the condition
2691 of an IF_THEN_ELSE. */
2694 computed_jump_p_1 (x
)
2697 enum rtx_code code
= GET_CODE (x
);
2716 return ! (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
2717 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)));
2720 return (computed_jump_p_1 (XEXP (x
, 1))
2721 || computed_jump_p_1 (XEXP (x
, 2)));
2727 fmt
= GET_RTX_FORMAT (code
);
2728 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2731 && computed_jump_p_1 (XEXP (x
, i
)))
2734 else if (fmt
[i
] == 'E')
2735 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2736 if (computed_jump_p_1 (XVECEXP (x
, i
, j
)))
2743 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2745 Tablejumps and casesi insns are not considered indirect jumps;
2746 we can recognize them by a (use (label_ref)). */
2749 computed_jump_p (insn
)
2753 if (GET_CODE (insn
) == JUMP_INSN
)
2755 rtx pat
= PATTERN (insn
);
2757 if (find_reg_note (insn
, REG_LABEL
, NULL_RTX
))
2759 else if (GET_CODE (pat
) == PARALLEL
)
2761 int len
= XVECLEN (pat
, 0);
2762 int has_use_labelref
= 0;
2764 for (i
= len
- 1; i
>= 0; i
--)
2765 if (GET_CODE (XVECEXP (pat
, 0, i
)) == USE
2766 && (GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0))
2768 has_use_labelref
= 1;
2770 if (! has_use_labelref
)
2771 for (i
= len
- 1; i
>= 0; i
--)
2772 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
2773 && SET_DEST (XVECEXP (pat
, 0, i
)) == pc_rtx
2774 && computed_jump_p_1 (SET_SRC (XVECEXP (pat
, 0, i
))))
2777 else if (GET_CODE (pat
) == SET
2778 && SET_DEST (pat
) == pc_rtx
2779 && computed_jump_p_1 (SET_SRC (pat
)))
2785 /* Traverse X via depth-first search, calling F for each
2786 sub-expression (including X itself). F is also passed the DATA.
2787 If F returns -1, do not traverse sub-expressions, but continue
2788 traversing the rest of the tree. If F ever returns any other
2789 non-zero value, stop the traversal, and return the value returned
2790 by F. Otherwise, return 0. This function does not traverse inside
2791 tree structure that contains RTX_EXPRs, or into sub-expressions
2792 whose format code is `0' since it is not known whether or not those
2793 codes are actually RTL.
2795 This routine is very general, and could (should?) be used to
2796 implement many of the other routines in this file. */
2799 for_each_rtx (x
, f
, data
)
2810 result
= (*f
) (x
, data
);
2812 /* Do not traverse sub-expressions. */
2814 else if (result
!= 0)
2815 /* Stop the traversal. */
2819 /* There are no sub-expressions. */
2822 length
= GET_RTX_LENGTH (GET_CODE (*x
));
2823 format
= GET_RTX_FORMAT (GET_CODE (*x
));
2825 for (i
= 0; i
< length
; ++i
)
2830 result
= for_each_rtx (&XEXP (*x
, i
), f
, data
);
2837 if (XVEC (*x
, i
) != 0)
2840 for (j
= 0; j
< XVECLEN (*x
, i
); ++j
)
2842 result
= for_each_rtx (&XVECEXP (*x
, i
, j
), f
, data
);
2850 /* Nothing to do. */
2859 /* Searches X for any reference to REGNO, returning the rtx of the
2860 reference found if any. Otherwise, returns NULL_RTX. */
2863 regno_use_in (regno
, x
)
2871 if (GET_CODE (x
) == REG
&& REGNO (x
) == regno
)
2874 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2875 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2879 if ((tem
= regno_use_in (regno
, XEXP (x
, i
))))
2882 else if (fmt
[i
] == 'E')
2883 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2884 if ((tem
= regno_use_in (regno
, XVECEXP (x
, i
, j
))))
2891 /* Return a value indicating whether OP, an operand of a commutative
2892 operation, is preferred as the first or second operand. The higher
2893 the value, the stronger the preference for being the first operand.
2894 We use negative values to indicate a preference for the first operand
2895 and positive values for the second operand. */
2898 commutative_operand_precedence (op
)
2901 /* Constants always come the second operand. Prefer "nice" constants. */
2902 if (GET_CODE (op
) == CONST_INT
)
2904 if (GET_CODE (op
) == CONST_DOUBLE
)
2906 if (CONSTANT_P (op
))
2909 /* SUBREGs of objects should come second. */
2910 if (GET_CODE (op
) == SUBREG
2911 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op
))) == 'o')
2914 /* If only one operand is a `neg', `not',
2915 `mult', `plus', or `minus' expression, it will be the first
2917 if (GET_CODE (op
) == NEG
|| GET_CODE (op
) == NOT
2918 || GET_CODE (op
) == MULT
|| GET_CODE (op
) == PLUS
2919 || GET_CODE (op
) == MINUS
)
2922 /* Complex expressions should be the first, so decrease priority
2924 if (GET_RTX_CLASS (GET_CODE (op
)) == 'o')
2929 /* Return 1 iff it is necessary to swap operands of commutative operation
2930 in order to canonicalize expression. */
2933 swap_commutative_operands_p (x
, y
)
2936 return (commutative_operand_precedence (x
)
2937 < commutative_operand_precedence (y
));
2940 /* Return 1 if X is an autoincrement side effect and the register is
2941 not the stack pointer. */
2946 switch (GET_CODE (x
))
2954 /* There are no REG_INC notes for SP. */
2955 if (XEXP (x
, 0) != stack_pointer_rtx
)
2963 /* Return 1 if the sequence of instructions beginning with FROM and up
2964 to and including TO is safe to move. If NEW_TO is non-NULL, and
2965 the sequence is not already safe to move, but can be easily
2966 extended to a sequence which is safe, then NEW_TO will point to the
2967 end of the extended sequence.
2969 For now, this function only checks that the region contains whole
2970 exception regions, but it could be extended to check additional
2971 conditions as well. */
2974 insns_safe_to_move_p (from
, to
, new_to
)
2979 int eh_region_count
= 0;
2983 /* By default, assume the end of the region will be what was
2990 if (GET_CODE (r
) == NOTE
)
2992 switch (NOTE_LINE_NUMBER (r
))
2994 case NOTE_INSN_EH_REGION_BEG
:
2998 case NOTE_INSN_EH_REGION_END
:
2999 if (eh_region_count
== 0)
3000 /* This sequence of instructions contains the end of
3001 an exception region, but not he beginning. Moving
3002 it will cause chaos. */
3013 /* If we've passed TO, and we see a non-note instruction, we
3014 can't extend the sequence to a movable sequence. */
3020 /* It's OK to move the sequence if there were matched sets of
3021 exception region notes. */
3022 return eh_region_count
== 0;
3027 /* It's OK to move the sequence if there were matched sets of
3028 exception region notes. */
3029 if (past_to_p
&& eh_region_count
== 0)
3035 /* Go to the next instruction. */
3042 /* Return non-zero if IN contains a piece of rtl that has the address LOC */
3044 loc_mentioned_in_p (loc
, in
)
3047 enum rtx_code code
= GET_CODE (in
);
3048 const char *fmt
= GET_RTX_FORMAT (code
);
3051 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3053 if (loc
== &in
->fld
[i
].rtx
)
3057 if (loc_mentioned_in_p (loc
, XEXP (in
, i
)))
3060 else if (fmt
[i
] == 'E')
3061 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
3062 if (loc_mentioned_in_p (loc
, XVECEXP (in
, i
, j
)))
3068 /* Given a subreg X, return the bit offset where the subreg begins
3069 (counting from the least significant bit of the reg). */
3075 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (x
));
3076 enum machine_mode mode
= GET_MODE (x
);
3077 unsigned int bitpos
;
3081 /* A paradoxical subreg begins at bit position 0. */
3082 if (GET_MODE_BITSIZE (mode
) > GET_MODE_BITSIZE (inner_mode
))
3085 if (WORDS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
3086 /* If the subreg crosses a word boundary ensure that
3087 it also begins and ends on a word boundary. */
3088 if ((SUBREG_BYTE (x
) % UNITS_PER_WORD
3089 + GET_MODE_SIZE (mode
)) > UNITS_PER_WORD
3090 && (SUBREG_BYTE (x
) % UNITS_PER_WORD
3091 || GET_MODE_SIZE (mode
) % UNITS_PER_WORD
))
3094 if (WORDS_BIG_ENDIAN
)
3095 word
= (GET_MODE_SIZE (inner_mode
)
3096 - (SUBREG_BYTE (x
) + GET_MODE_SIZE (mode
))) / UNITS_PER_WORD
;
3098 word
= SUBREG_BYTE (x
) / UNITS_PER_WORD
;
3099 bitpos
= word
* BITS_PER_WORD
;
3101 if (BYTES_BIG_ENDIAN
)
3102 byte
= (GET_MODE_SIZE (inner_mode
)
3103 - (SUBREG_BYTE (x
) + GET_MODE_SIZE (mode
))) % UNITS_PER_WORD
;
3105 byte
= SUBREG_BYTE (x
) % UNITS_PER_WORD
;
3106 bitpos
+= byte
* BITS_PER_UNIT
;
3111 /* This function returns the regno offset of a subreg expression.
3112 xregno - A regno of an inner hard subreg_reg (or what will become one).
3113 xmode - The mode of xregno.
3114 offset - The byte offset.
3115 ymode - The mode of a top level SUBREG (or what may become one).
3116 RETURN - The regno offset which would be used. */
3118 subreg_regno_offset (xregno
, xmode
, offset
, ymode
)
3119 unsigned int xregno
;
3120 enum machine_mode xmode
;
3121 unsigned int offset
;
3122 enum machine_mode ymode
;
3124 int nregs_xmode
, nregs_ymode
;
3125 int mode_multiple
, nregs_multiple
;
3128 if (xregno
>= FIRST_PSEUDO_REGISTER
)
3131 nregs_xmode
= HARD_REGNO_NREGS (xregno
, xmode
);
3132 nregs_ymode
= HARD_REGNO_NREGS (xregno
, ymode
);
3134 /* If this is a big endian paradoxical subreg, which uses more actual
3135 hard registers than the original register, we must return a negative
3136 offset so that we find the proper highpart of the register. */
3138 && nregs_ymode
> nregs_xmode
3139 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3140 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3141 return nregs_xmode
- nregs_ymode
;
3143 if (offset
== 0 || nregs_xmode
== nregs_ymode
)
3146 /* size of ymode must not be greater than the size of xmode. */
3147 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3148 if (mode_multiple
== 0)
3151 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3152 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3153 return (y_offset
/ (mode_multiple
/ nregs_multiple
)) * nregs_ymode
;
3156 /* Return the final regno that a subreg expression refers to. */
3162 rtx subreg
= SUBREG_REG (x
);
3163 int regno
= REGNO (subreg
);
3165 ret
= regno
+ subreg_regno_offset (regno
,
3172 struct parms_set_data
3178 /* Helper function for noticing stores to parameter registers. */
3180 parms_set (x
, pat
, data
)
3181 rtx x
, pat ATTRIBUTE_UNUSED
;
3184 struct parms_set_data
*d
= data
;
3185 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3186 && TEST_HARD_REG_BIT (d
->regs
, REGNO (x
)))
3188 CLEAR_HARD_REG_BIT (d
->regs
, REGNO (x
));
3193 /* Look backward for first parameter to be loaded.
3194 Do not skip BOUNDARY. */
3196 find_first_parameter_load (call_insn
, boundary
)
3197 rtx call_insn
, boundary
;
3199 struct parms_set_data parm
;
3202 /* Since different machines initialize their parameter registers
3203 in different orders, assume nothing. Collect the set of all
3204 parameter registers. */
3205 CLEAR_HARD_REG_SET (parm
.regs
);
3207 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
3208 if (GET_CODE (XEXP (p
, 0)) == USE
3209 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == REG
)
3211 if (REGNO (XEXP (XEXP (p
, 0), 0)) >= FIRST_PSEUDO_REGISTER
)
3214 /* We only care about registers which can hold function
3216 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p
, 0), 0))))
3219 SET_HARD_REG_BIT (parm
.regs
, REGNO (XEXP (XEXP (p
, 0), 0)));
3224 /* Search backward for the first set of a register in this set. */
3225 while (parm
.nregs
&& before
!= boundary
)
3227 before
= PREV_INSN (before
);
3229 /* It is possible that some loads got CSEed from one call to
3230 another. Stop in that case. */
3231 if (GET_CODE (before
) == CALL_INSN
)
3234 /* Our caller needs either ensure that we will find all sets
3235 (in case code has not been optimized yet), or take care
3236 for possible labels in a way by setting boundary to preceding
3238 if (GET_CODE (before
) == CODE_LABEL
)
3240 if (before
!= boundary
)
3245 if (INSN_P (before
))
3246 note_stores (PATTERN (before
), parms_set
, &parm
);
3251 /* Return true if we should avoid inserting code between INSN and preceeding
3252 call instruction. */
3255 keep_with_call_p (insn
)
3260 if (INSN_P (insn
) && (set
= single_set (insn
)) != NULL
)
3262 if (GET_CODE (SET_DEST (set
)) == REG
3263 && REGNO (SET_DEST (set
)) < FIRST_PSEUDO_REGISTER
3264 && fixed_regs
[REGNO (SET_DEST (set
))]
3265 && general_operand (SET_SRC (set
), VOIDmode
))
3267 if (GET_CODE (SET_SRC (set
)) == REG
3268 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set
)))
3269 && GET_CODE (SET_DEST (set
)) == REG
3270 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
)
3272 /* There may be a stack pop just after the call and before the store
3273 of the return register. Search for the actual store when deciding
3274 if we can break or not. */
3275 if (SET_DEST (set
) == stack_pointer_rtx
)
3277 rtx i2
= next_nonnote_insn (insn
);
3278 if (i2
&& keep_with_call_p (i2
))
3285 /* Return true when store to register X can be hoisted to the place
3286 with LIVE registers (can be NULL). Value VAL contains destination
3287 whose value will be used. */
3290 hoist_test_store (x
, val
, live
)
3294 if (GET_CODE (x
) == SCRATCH
)
3297 if (rtx_equal_p (x
, val
))
3300 /* Allow subreg of X in case it is not writting just part of multireg pseudo.
3301 Then we would need to update all users to care hoisting the store too.
3302 Caller may represent that by specifying whole subreg as val. */
3304 if (GET_CODE (x
) == SUBREG
&& rtx_equal_p (SUBREG_REG (x
), val
))
3306 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) > UNITS_PER_WORD
3307 && GET_MODE_BITSIZE (GET_MODE (x
)) <
3308 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))))
3312 if (GET_CODE (x
) == SUBREG
)
3315 /* Anything except register store is not hoistable. This includes the
3316 partial stores to registers. */
3321 /* Pseudo registers can be allways replaced by another pseudo to avoid
3322 the side effect, for hard register we must ensure that they are dead.
3323 Eventually we may want to add code to try turn pseudos to hards, but it
3324 is unlikely usefull. */
3326 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3328 int regno
= REGNO (x
);
3329 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
3333 if (REGNO_REG_SET_P (live
, regno
))
3336 if (REGNO_REG_SET_P (live
, regno
+ n
))
3343 /* Return true if INSN can be hoisted to place with LIVE hard registers
3344 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3345 and used by the hoisting pass. */
3348 can_hoist_insn_p (insn
, val
, live
)
3352 rtx pat
= PATTERN (insn
);
3355 /* It probably does not worth the complexity to handle multiple
3357 if (!single_set (insn
))
3359 /* We can move CALL_INSN, but we need to check that all caller clobbered
3361 if (GET_CODE (insn
) == CALL_INSN
)
3363 /* In future we will handle hoisting of libcall sequences, but
3365 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
3367 switch (GET_CODE (pat
))
3370 if (!hoist_test_store (SET_DEST (pat
), val
, live
))
3374 /* USES do have sick semantics, so do not move them. */
3378 if (!hoist_test_store (XEXP (pat
, 0), val
, live
))
3382 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3384 rtx x
= XVECEXP (pat
, 0, i
);
3385 switch (GET_CODE (x
))
3388 if (!hoist_test_store (SET_DEST (x
), val
, live
))
3392 /* We need to fix callers to really ensure availability
3393 of all values inisn uses, but for now it is safe to prohibit
3394 hoisting of any insn having such a hiden uses. */
3398 if (!hoist_test_store (SET_DEST (x
), val
, live
))
3412 /* Update store after hoisting - replace all stores to pseudo registers
3413 by new ones to avoid clobbering of values except for store to VAL that will
3414 be updated to NEW. */
3417 hoist_update_store (insn
, xp
, val
, new)
3418 rtx insn
, *xp
, val
, new;
3422 if (GET_CODE (x
) == SCRATCH
)
3425 if (GET_CODE (x
) == SUBREG
&& SUBREG_REG (x
) == val
)
3426 validate_change (insn
, xp
,
3427 simplify_gen_subreg (GET_MODE (x
), new, GET_MODE (new),
3428 SUBREG_BYTE (x
)), 1);
3429 if (rtx_equal_p (x
, val
))
3431 validate_change (insn
, xp
, new, 1);
3434 if (GET_CODE (x
) == SUBREG
)
3436 xp
= &SUBREG_REG (x
);
3443 /* We've verified that hard registers are dead, so we may keep the side
3444 effect. Otherwise replace it by new pseudo. */
3445 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
3446 validate_change (insn
, xp
, gen_reg_rtx (GET_MODE (x
)), 1);
3448 = alloc_EXPR_LIST (REG_UNUSED
, *xp
, REG_NOTES (insn
));
3451 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3452 and each other side effect to pseudo register by new pseudo register. */
3455 hoist_insn_after (insn
, after
, val
, new)
3456 rtx insn
, after
, val
, new;
3462 insn
= emit_copy_of_insn_after (insn
, after
);
3463 pat
= PATTERN (insn
);
3465 /* Remove REG_UNUSED notes as we will re-emit them. */
3466 while ((note
= find_reg_note (insn
, REG_UNUSED
, NULL_RTX
)))
3467 remove_note (insn
, note
);
3469 /* To get this working callers must ensure to move everything referenced
3470 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3472 while ((note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)))
3473 remove_note (insn
, note
);
3474 while ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)))
3475 remove_note (insn
, note
);
3477 /* Remove REG_DEAD notes as they might not be valid anymore in case
3478 we create redundancy. */
3479 while ((note
= find_reg_note (insn
, REG_DEAD
, NULL_RTX
)))
3480 remove_note (insn
, note
);
3481 switch (GET_CODE (pat
))
3484 hoist_update_store (insn
, &SET_DEST (pat
), val
, new);
3489 hoist_update_store (insn
, &XEXP (pat
, 0), val
, new);
3492 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3494 rtx x
= XVECEXP (pat
, 0, i
);
3495 switch (GET_CODE (x
))
3498 hoist_update_store (insn
, &SET_DEST (x
), val
, new);
3503 hoist_update_store (insn
, &SET_DEST (x
), val
, new);
3513 if (!apply_change_group ())
3520 hoist_insn_to_edge (insn
, e
, val
, new)
3526 /* We cannot insert instructions on an abnormal critical edge.
3527 It will be easier to find the culprit if we die now. */
3528 if ((e
->flags
& EDGE_ABNORMAL
) && EDGE_CRITICAL_P (e
))
3531 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3532 stuff. We also emit CALL_INSNS and firends. */
3533 if (e
->insns
== NULL_RTX
)
3536 emit_note (NULL
, NOTE_INSN_DELETED
);
3539 push_to_sequence (e
->insns
);
3541 new_insn
= hoist_insn_after (insn
, get_last_insn (), val
, new);
3543 e
->insns
= get_insns ();