1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
34 #include "basic-block.h"
37 /* Forward declarations */
38 static int global_reg_mentioned_p_1
PARAMS ((rtx
*, void *));
39 static void set_of_1
PARAMS ((rtx
, rtx
, void *));
40 static void insn_dependent_p_1
PARAMS ((rtx
, rtx
, void *));
41 static int computed_jump_p_1
PARAMS ((rtx
));
42 static void parms_set
PARAMS ((rtx
, rtx
, void *));
43 static bool hoist_test_store
PARAMS ((rtx
, rtx
, regset
));
44 static void hoist_update_store
PARAMS ((rtx
, rtx
*, rtx
, rtx
));
46 /* Bit flags that specify the machine subtype we are compiling for.
47 Bits are tested using macros TARGET_... defined in the tm.h file
48 and set by `-m...' switches. Must be defined in rtlanal.c. */
52 /* Return 1 if the value of X is unstable
53 (would be different at a different point in the program).
54 The frame pointer, arg pointer, etc. are considered stable
55 (within one function) and so is anything marked `unchanging'. */
61 RTX_CODE code
= GET_CODE (x
);
68 return ! RTX_UNCHANGING_P (x
) || rtx_unstable_p (XEXP (x
, 0));
83 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
84 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
85 /* The arg pointer varies if it is not a fixed register. */
86 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
87 || RTX_UNCHANGING_P (x
))
89 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
90 /* ??? When call-clobbered, the value is stable modulo the restore
91 that must happen after a call. This currently screws up local-alloc
92 into believing that the restore is not needed. */
93 if (x
== pic_offset_table_rtx
)
99 if (MEM_VOLATILE_P (x
))
108 fmt
= GET_RTX_FORMAT (code
);
109 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
112 if (rtx_unstable_p (XEXP (x
, i
)))
115 else if (fmt
[i
] == 'E')
118 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
119 if (rtx_unstable_p (XVECEXP (x
, i
, j
)))
126 /* Return 1 if X has a value that can vary even between two
127 executions of the program. 0 means X can be compared reliably
128 against certain constants or near-constants.
129 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
130 zero, we are slightly more conservative.
131 The frame pointer and the arg pointer are considered constant. */
134 rtx_varies_p (x
, for_alias
)
138 RTX_CODE code
= GET_CODE (x
);
145 return ! RTX_UNCHANGING_P (x
) || rtx_varies_p (XEXP (x
, 0), for_alias
);
159 /* This will resolve to some offset from the frame pointer. */
163 /* Note that we have to test for the actual rtx used for the frame
164 and arg pointers and not just the register number in case we have
165 eliminated the frame and/or arg pointer and are using it
167 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
168 /* The arg pointer varies if it is not a fixed register. */
169 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
171 if (x
== pic_offset_table_rtx
172 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
173 /* ??? When call-clobbered, the value is stable modulo the restore
174 that must happen after a call. This currently screws up
175 local-alloc into believing that the restore is not needed, so we
176 must return 0 only if we are called from alias analysis. */
184 /* The operand 0 of a LO_SUM is considered constant
185 (in fact it is related specifically to operand 1)
186 during alias analysis. */
187 return (! for_alias
&& rtx_varies_p (XEXP (x
, 0), for_alias
))
188 || rtx_varies_p (XEXP (x
, 1), for_alias
);
191 if (MEM_VOLATILE_P (x
))
200 fmt
= GET_RTX_FORMAT (code
);
201 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
204 if (rtx_varies_p (XEXP (x
, i
), for_alias
))
207 else if (fmt
[i
] == 'E')
210 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
211 if (rtx_varies_p (XVECEXP (x
, i
, j
), for_alias
))
218 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
221 rtx_addr_can_trap_p (x
)
224 enum rtx_code code
= GET_CODE (x
);
229 return SYMBOL_REF_WEAK (x
);
235 /* This will resolve to some offset from the frame pointer. */
239 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
240 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
241 || x
== stack_pointer_rtx
242 /* The arg pointer varies if it is not a fixed register. */
243 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
245 /* All of the virtual frame registers are stack references. */
246 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
247 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
252 return rtx_addr_can_trap_p (XEXP (x
, 0));
255 /* An address is assumed not to trap if it is an address that can't
256 trap plus a constant integer or it is the pic register plus a
258 return ! ((! rtx_addr_can_trap_p (XEXP (x
, 0))
259 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
260 || (XEXP (x
, 0) == pic_offset_table_rtx
261 && CONSTANT_P (XEXP (x
, 1))));
265 return rtx_addr_can_trap_p (XEXP (x
, 1));
272 return rtx_addr_can_trap_p (XEXP (x
, 0));
278 /* If it isn't one of the case above, it can cause a trap. */
282 /* Return true if X is an address that is known to not be zero. */
285 nonzero_address_p (x
)
288 enum rtx_code code
= GET_CODE (x
);
293 return !SYMBOL_REF_WEAK (x
);
299 /* This will resolve to some offset from the frame pointer. */
303 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
304 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
305 || x
== stack_pointer_rtx
306 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
308 /* All of the virtual frame registers are stack references. */
309 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
310 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
315 return nonzero_address_p (XEXP (x
, 0));
318 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
320 /* Pointers aren't allowed to wrap. If we've got a register
321 that is known to be a pointer, and a positive offset, then
322 the composite can't be zero. */
323 if (INTVAL (XEXP (x
, 1)) > 0
324 && REG_P (XEXP (x
, 0))
325 && REG_POINTER (XEXP (x
, 0)))
328 return nonzero_address_p (XEXP (x
, 0));
330 /* Handle PIC references. */
331 else if (XEXP (x
, 0) == pic_offset_table_rtx
332 && CONSTANT_P (XEXP (x
, 1)))
337 /* Similar to the above; allow positive offsets. Further, since
338 auto-inc is only allowed in memories, the register must be a
340 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
341 && INTVAL (XEXP (x
, 1)) > 0)
343 return nonzero_address_p (XEXP (x
, 0));
346 /* Similarly. Further, the offset is always positive. */
353 return nonzero_address_p (XEXP (x
, 0));
356 return nonzero_address_p (XEXP (x
, 1));
362 /* If it isn't one of the case above, might be zero. */
366 /* Return 1 if X refers to a memory location whose address
367 cannot be compared reliably with constant addresses,
368 or if X refers to a BLKmode memory object.
369 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
370 zero, we are slightly more conservative. */
373 rtx_addr_varies_p (x
, for_alias
)
386 return GET_MODE (x
) == BLKmode
|| rtx_varies_p (XEXP (x
, 0), for_alias
);
388 fmt
= GET_RTX_FORMAT (code
);
389 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
392 if (rtx_addr_varies_p (XEXP (x
, i
), for_alias
))
395 else if (fmt
[i
] == 'E')
398 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
399 if (rtx_addr_varies_p (XVECEXP (x
, i
, j
), for_alias
))
405 /* Return the value of the integer term in X, if one is apparent;
407 Only obvious integer terms are detected.
408 This is used in cse.c with the `related_value' field. */
414 if (GET_CODE (x
) == CONST
)
417 if (GET_CODE (x
) == MINUS
418 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
419 return - INTVAL (XEXP (x
, 1));
420 if (GET_CODE (x
) == PLUS
421 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
422 return INTVAL (XEXP (x
, 1));
426 /* If X is a constant, return the value sans apparent integer term;
428 Only obvious integer terms are detected. */
431 get_related_value (x
)
434 if (GET_CODE (x
) != CONST
)
437 if (GET_CODE (x
) == PLUS
438 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
440 else if (GET_CODE (x
) == MINUS
441 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
446 /* Given a tablejump insn INSN, return the RTL expression for the offset
447 into the jump table. If the offset cannot be determined, then return
450 If EARLIEST is nonzero, it is a pointer to a place where the earliest
451 insn used in locating the offset was found. */
454 get_jump_table_offset (insn
, earliest
)
468 if (GET_CODE (insn
) != JUMP_INSN
469 || ! (label
= JUMP_LABEL (insn
))
470 || ! (table
= NEXT_INSN (label
))
471 || GET_CODE (table
) != JUMP_INSN
472 || (GET_CODE (PATTERN (table
)) != ADDR_VEC
473 && GET_CODE (PATTERN (table
)) != ADDR_DIFF_VEC
)
474 || ! (set
= single_set (insn
)))
479 /* Some targets (eg, ARM) emit a tablejump that also
480 contains the out-of-range target. */
481 if (GET_CODE (x
) == IF_THEN_ELSE
482 && GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
485 /* Search backwards and locate the expression stored in X. */
486 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
487 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
490 /* If X is an expression using a relative address then strip
491 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
492 or the jump table label. */
493 if (GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
494 && (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
))
496 for (i
= 0; i
< 2; i
++)
501 if (y
== pc_rtx
|| y
== pic_offset_table_rtx
)
504 for (old_y
= NULL_RTX
; GET_CODE (y
) == REG
&& y
!= old_y
;
505 old_y
= y
, y
= find_last_value (y
, &old_insn
, NULL_RTX
, 0))
508 if ((GET_CODE (y
) == LABEL_REF
&& XEXP (y
, 0) == label
))
517 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
518 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
522 /* Strip off any sign or zero extension. */
523 if (GET_CODE (x
) == SIGN_EXTEND
|| GET_CODE (x
) == ZERO_EXTEND
)
527 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
528 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
532 /* If X isn't a MEM then this isn't a tablejump we understand. */
533 if (GET_CODE (x
) != MEM
)
536 /* Strip off the MEM. */
539 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
540 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
543 /* If X isn't a PLUS than this isn't a tablejump we understand. */
544 if (GET_CODE (x
) != PLUS
)
547 /* At this point we should have an expression representing the jump table
548 plus an offset. Examine each operand in order to determine which one
549 represents the jump table. Knowing that tells us that the other operand
550 must represent the offset. */
551 for (i
= 0; i
< 2; i
++)
556 for (old_y
= NULL_RTX
; GET_CODE (y
) == REG
&& y
!= old_y
;
557 old_y
= y
, y
= find_last_value (y
, &old_insn
, NULL_RTX
, 0))
560 if ((GET_CODE (y
) == CONST
|| GET_CODE (y
) == LABEL_REF
)
561 && reg_mentioned_p (label
, y
))
570 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
571 if (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
)
572 for (i
= 0; i
< 2; i
++)
573 if (XEXP (x
, i
) == pic_offset_table_rtx
)
582 /* Return the RTL expression representing the offset. */
586 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
587 a global register. */
590 global_reg_mentioned_p_1 (loc
, data
)
592 void *data ATTRIBUTE_UNUSED
;
600 switch (GET_CODE (x
))
603 if (GET_CODE (SUBREG_REG (x
)) == REG
)
605 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
606 && global_regs
[subreg_regno (x
)])
614 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
628 /* A non-constant call might use a global register. */
638 /* Returns nonzero if X mentions a global register. */
641 global_reg_mentioned_p (x
)
647 if (GET_CODE (x
) == CALL_INSN
)
649 if (! CONST_OR_PURE_CALL_P (x
))
651 x
= CALL_INSN_FUNCTION_USAGE (x
);
659 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
662 /* Return the number of places FIND appears within X. If COUNT_DEST is
663 zero, we do not count occurrences inside the destination of a SET. */
666 count_occurrences (x
, find
, count_dest
)
672 const char *format_ptr
;
693 if (GET_CODE (find
) == MEM
&& rtx_equal_p (x
, find
))
698 if (SET_DEST (x
) == find
&& ! count_dest
)
699 return count_occurrences (SET_SRC (x
), find
, count_dest
);
706 format_ptr
= GET_RTX_FORMAT (code
);
709 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
711 switch (*format_ptr
++)
714 count
+= count_occurrences (XEXP (x
, i
), find
, count_dest
);
718 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
719 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
, count_dest
);
726 /* Nonzero if register REG appears somewhere within IN.
727 Also works if REG is not a register; in this case it checks
728 for a subexpression of IN that is Lisp "equal" to REG. */
731 reg_mentioned_p (reg
, in
)
744 if (GET_CODE (in
) == LABEL_REF
)
745 return reg
== XEXP (in
, 0);
747 code
= GET_CODE (in
);
751 /* Compare registers by number. */
753 return GET_CODE (reg
) == REG
&& REGNO (in
) == REGNO (reg
);
755 /* These codes have no constituent expressions
763 return GET_CODE (reg
) == CONST_INT
&& INTVAL (in
) == INTVAL (reg
);
767 /* These are kept unique for a given value. */
774 if (GET_CODE (reg
) == code
&& rtx_equal_p (reg
, in
))
777 fmt
= GET_RTX_FORMAT (code
);
779 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
784 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
785 if (reg_mentioned_p (reg
, XVECEXP (in
, i
, j
)))
788 else if (fmt
[i
] == 'e'
789 && reg_mentioned_p (reg
, XEXP (in
, i
)))
795 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
796 no CODE_LABEL insn. */
799 no_labels_between_p (beg
, end
)
805 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
806 if (GET_CODE (p
) == CODE_LABEL
)
811 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
812 no JUMP_INSN insn. */
815 no_jumps_between_p (beg
, end
)
819 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
820 if (GET_CODE (p
) == JUMP_INSN
)
825 /* Nonzero if register REG is used in an insn between
826 FROM_INSN and TO_INSN (exclusive of those two). */
829 reg_used_between_p (reg
, from_insn
, to_insn
)
830 rtx reg
, from_insn
, to_insn
;
834 if (from_insn
== to_insn
)
837 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
839 && (reg_overlap_mentioned_p (reg
, PATTERN (insn
))
840 || (GET_CODE (insn
) == CALL_INSN
841 && (find_reg_fusage (insn
, USE
, reg
)
842 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
847 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
848 is entirely replaced by a new value and the only use is as a SET_DEST,
849 we do not consider it a reference. */
852 reg_referenced_p (x
, body
)
858 switch (GET_CODE (body
))
861 if (reg_overlap_mentioned_p (x
, SET_SRC (body
)))
864 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
865 of a REG that occupies all of the REG, the insn references X if
866 it is mentioned in the destination. */
867 if (GET_CODE (SET_DEST (body
)) != CC0
868 && GET_CODE (SET_DEST (body
)) != PC
869 && GET_CODE (SET_DEST (body
)) != REG
870 && ! (GET_CODE (SET_DEST (body
)) == SUBREG
871 && GET_CODE (SUBREG_REG (SET_DEST (body
))) == REG
872 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body
))))
873 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
874 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body
)))
875 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
876 && reg_overlap_mentioned_p (x
, SET_DEST (body
)))
881 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
882 if (reg_overlap_mentioned_p (x
, ASM_OPERANDS_INPUT (body
, i
)))
889 return reg_overlap_mentioned_p (x
, body
);
892 return reg_overlap_mentioned_p (x
, TRAP_CONDITION (body
));
895 return reg_overlap_mentioned_p (x
, XEXP (body
, 0));
898 case UNSPEC_VOLATILE
:
899 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
900 if (reg_overlap_mentioned_p (x
, XVECEXP (body
, 0, i
)))
905 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
906 if (reg_referenced_p (x
, XVECEXP (body
, 0, i
)))
911 if (GET_CODE (XEXP (body
, 0)) == MEM
)
912 if (reg_overlap_mentioned_p (x
, XEXP (XEXP (body
, 0), 0)))
917 if (reg_overlap_mentioned_p (x
, COND_EXEC_TEST (body
)))
919 return reg_referenced_p (x
, COND_EXEC_CODE (body
));
926 /* Nonzero if register REG is referenced in an insn between
927 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
931 reg_referenced_between_p (reg
, from_insn
, to_insn
)
932 rtx reg
, from_insn
, to_insn
;
936 if (from_insn
== to_insn
)
939 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
941 && (reg_referenced_p (reg
, PATTERN (insn
))
942 || (GET_CODE (insn
) == CALL_INSN
943 && find_reg_fusage (insn
, USE
, reg
))))
948 /* Nonzero if register REG is set or clobbered in an insn between
949 FROM_INSN and TO_INSN (exclusive of those two). */
952 reg_set_between_p (reg
, from_insn
, to_insn
)
953 rtx reg
, from_insn
, to_insn
;
957 if (from_insn
== to_insn
)
960 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
961 if (INSN_P (insn
) && reg_set_p (reg
, insn
))
966 /* Internals of reg_set_between_p. */
968 reg_set_p (reg
, insn
)
971 /* We can be passed an insn or part of one. If we are passed an insn,
972 check if a side-effect of the insn clobbers REG. */
974 && (FIND_REG_INC_NOTE (insn
, reg
)
975 || (GET_CODE (insn
) == CALL_INSN
976 /* We'd like to test call_used_regs here, but rtlanal.c can't
977 reference that variable due to its use in genattrtab. So
978 we'll just be more conservative.
980 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
981 information holds all clobbered registers. */
982 && ((GET_CODE (reg
) == REG
983 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
984 || GET_CODE (reg
) == MEM
985 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
988 return set_of (reg
, insn
) != NULL_RTX
;
991 /* Similar to reg_set_between_p, but check all registers in X. Return 0
992 only if none of them are modified between START and END. Do not
993 consider non-registers one way or the other. */
996 regs_set_between_p (x
, start
, end
)
1000 enum rtx_code code
= GET_CODE (x
);
1017 return reg_set_between_p (x
, start
, end
);
1023 fmt
= GET_RTX_FORMAT (code
);
1024 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1026 if (fmt
[i
] == 'e' && regs_set_between_p (XEXP (x
, i
), start
, end
))
1029 else if (fmt
[i
] == 'E')
1030 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1031 if (regs_set_between_p (XVECEXP (x
, i
, j
), start
, end
))
1038 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1039 only if none of them are modified between START and END. Return 1 if
1040 X contains a MEM; this routine does usememory aliasing. */
1043 modified_between_p (x
, start
, end
)
1047 enum rtx_code code
= GET_CODE (x
);
1070 if (RTX_UNCHANGING_P (x
))
1072 if (modified_between_p (XEXP (x
, 0), start
, end
))
1074 for (insn
= NEXT_INSN (start
); insn
!= end
; insn
= NEXT_INSN (insn
))
1075 if (memory_modified_in_insn_p (x
, insn
))
1081 return reg_set_between_p (x
, start
, end
);
1087 fmt
= GET_RTX_FORMAT (code
);
1088 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1090 if (fmt
[i
] == 'e' && modified_between_p (XEXP (x
, i
), start
, end
))
1093 else if (fmt
[i
] == 'E')
1094 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1095 if (modified_between_p (XVECEXP (x
, i
, j
), start
, end
))
1102 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1103 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1104 does use memory aliasing. */
1107 modified_in_p (x
, insn
)
1111 enum rtx_code code
= GET_CODE (x
);
1130 if (RTX_UNCHANGING_P (x
))
1132 if (modified_in_p (XEXP (x
, 0), insn
))
1134 if (memory_modified_in_insn_p (x
, insn
))
1140 return reg_set_p (x
, insn
);
1146 fmt
= GET_RTX_FORMAT (code
);
1147 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1149 if (fmt
[i
] == 'e' && modified_in_p (XEXP (x
, i
), insn
))
1152 else if (fmt
[i
] == 'E')
1153 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1154 if (modified_in_p (XVECEXP (x
, i
, j
), insn
))
1161 /* Return true if anything in insn X is (anti,output,true) dependent on
1162 anything in insn Y. */
1165 insn_dependent_p (x
, y
)
1170 if (! INSN_P (x
) || ! INSN_P (y
))
1174 note_stores (PATTERN (x
), insn_dependent_p_1
, &tmp
);
1175 if (tmp
== NULL_RTX
)
1179 note_stores (PATTERN (y
), insn_dependent_p_1
, &tmp
);
1180 if (tmp
== NULL_RTX
)
1186 /* A helper routine for insn_dependent_p called through note_stores. */
1189 insn_dependent_p_1 (x
, pat
, data
)
1191 rtx pat ATTRIBUTE_UNUSED
;
1194 rtx
* pinsn
= (rtx
*) data
;
1196 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
1200 /* Helper function for set_of. */
1208 set_of_1 (x
, pat
, data1
)
1213 struct set_of_data
*data
= (struct set_of_data
*) (data1
);
1214 if (rtx_equal_p (x
, data
->pat
)
1215 || (GET_CODE (x
) != MEM
&& reg_overlap_mentioned_p (data
->pat
, x
)))
1219 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1220 (either directly or via STRICT_LOW_PART and similar modifiers). */
1225 struct set_of_data data
;
1226 data
.found
= NULL_RTX
;
1228 note_stores (INSN_P (insn
) ? PATTERN (insn
) : insn
, set_of_1
, &data
);
1232 /* Given an INSN, return a SET expression if this insn has only a single SET.
1233 It may also have CLOBBERs, USEs, or SET whose output
1234 will not be used, which we ignore. */
1237 single_set_2 (insn
, pat
)
1241 int set_verified
= 1;
1244 if (GET_CODE (pat
) == PARALLEL
)
1246 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1248 rtx sub
= XVECEXP (pat
, 0, i
);
1249 switch (GET_CODE (sub
))
1256 /* We can consider insns having multiple sets, where all
1257 but one are dead as single set insns. In common case
1258 only single set is present in the pattern so we want
1259 to avoid checking for REG_UNUSED notes unless necessary.
1261 When we reach set first time, we just expect this is
1262 the single set we are looking for and only when more
1263 sets are found in the insn, we check them. */
1266 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (set
))
1267 && !side_effects_p (set
))
1273 set
= sub
, set_verified
= 0;
1274 else if (!find_reg_note (insn
, REG_UNUSED
, SET_DEST (sub
))
1275 || side_effects_p (sub
))
1287 /* Given an INSN, return nonzero if it has more than one SET, else return
1291 multiple_sets (insn
)
1297 /* INSN must be an insn. */
1298 if (! INSN_P (insn
))
1301 /* Only a PARALLEL can have multiple SETs. */
1302 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
1304 for (i
= 0, found
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1305 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == SET
)
1307 /* If we have already found a SET, then return now. */
1315 /* Either zero or one SET. */
1319 /* Return nonzero if the destination of SET equals the source
1320 and there are no side effects. */
1326 rtx src
= SET_SRC (set
);
1327 rtx dst
= SET_DEST (set
);
1329 if (side_effects_p (src
) || side_effects_p (dst
))
1332 if (GET_CODE (dst
) == MEM
&& GET_CODE (src
) == MEM
)
1333 return rtx_equal_p (dst
, src
);
1335 if (dst
== pc_rtx
&& src
== pc_rtx
)
1338 if (GET_CODE (dst
) == SIGN_EXTRACT
1339 || GET_CODE (dst
) == ZERO_EXTRACT
)
1340 return rtx_equal_p (XEXP (dst
, 0), src
)
1341 && ! BYTES_BIG_ENDIAN
&& XEXP (dst
, 2) == const0_rtx
;
1343 if (GET_CODE (dst
) == STRICT_LOW_PART
)
1344 dst
= XEXP (dst
, 0);
1346 if (GET_CODE (src
) == SUBREG
&& GET_CODE (dst
) == SUBREG
)
1348 if (SUBREG_BYTE (src
) != SUBREG_BYTE (dst
))
1350 src
= SUBREG_REG (src
);
1351 dst
= SUBREG_REG (dst
);
1354 return (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
1355 && REGNO (src
) == REGNO (dst
));
1358 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1365 rtx pat
= PATTERN (insn
);
1367 if (INSN_CODE (insn
) == NOOP_MOVE_INSN_CODE
)
1370 /* Insns carrying these notes are useful later on. */
1371 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1374 /* For now treat an insn with a REG_RETVAL note as a
1375 a special insn which should not be considered a no-op. */
1376 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
1379 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
1382 if (GET_CODE (pat
) == PARALLEL
)
1385 /* If nothing but SETs of registers to themselves,
1386 this insn can also be deleted. */
1387 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1389 rtx tem
= XVECEXP (pat
, 0, i
);
1391 if (GET_CODE (tem
) == USE
1392 || GET_CODE (tem
) == CLOBBER
)
1395 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
1405 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1406 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1407 If the object was modified, if we hit a partial assignment to X, or hit a
1408 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1409 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1413 find_last_value (x
, pinsn
, valid_to
, allow_hwreg
)
1421 for (p
= PREV_INSN (*pinsn
); p
&& GET_CODE (p
) != CODE_LABEL
;
1425 rtx set
= single_set (p
);
1426 rtx note
= find_reg_note (p
, REG_EQUAL
, NULL_RTX
);
1428 if (set
&& rtx_equal_p (x
, SET_DEST (set
)))
1430 rtx src
= SET_SRC (set
);
1432 if (note
&& GET_CODE (XEXP (note
, 0)) != EXPR_LIST
)
1433 src
= XEXP (note
, 0);
1435 if ((valid_to
== NULL_RTX
1436 || ! modified_between_p (src
, PREV_INSN (p
), valid_to
))
1437 /* Reject hard registers because we don't usually want
1438 to use them; we'd rather use a pseudo. */
1439 && (! (GET_CODE (src
) == REG
1440 && REGNO (src
) < FIRST_PSEUDO_REGISTER
) || allow_hwreg
))
1447 /* If set in non-simple way, we don't have a value. */
1448 if (reg_set_p (x
, p
))
1455 /* Return nonzero if register in range [REGNO, ENDREGNO)
1456 appears either explicitly or implicitly in X
1457 other than being stored into.
1459 References contained within the substructure at LOC do not count.
1460 LOC may be zero, meaning don't ignore anything. */
1463 refers_to_regno_p (regno
, endregno
, x
, loc
)
1464 unsigned int regno
, endregno
;
1469 unsigned int x_regno
;
1474 /* The contents of a REG_NONNEG note is always zero, so we must come here
1475 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1479 code
= GET_CODE (x
);
1484 x_regno
= REGNO (x
);
1486 /* If we modifying the stack, frame, or argument pointer, it will
1487 clobber a virtual register. In fact, we could be more precise,
1488 but it isn't worth it. */
1489 if ((x_regno
== STACK_POINTER_REGNUM
1490 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1491 || x_regno
== ARG_POINTER_REGNUM
1493 || x_regno
== FRAME_POINTER_REGNUM
)
1494 && regno
>= FIRST_VIRTUAL_REGISTER
&& regno
<= LAST_VIRTUAL_REGISTER
)
1497 return (endregno
> x_regno
1498 && regno
< x_regno
+ (x_regno
< FIRST_PSEUDO_REGISTER
1499 ? HARD_REGNO_NREGS (x_regno
, GET_MODE (x
))
1503 /* If this is a SUBREG of a hard reg, we can see exactly which
1504 registers are being modified. Otherwise, handle normally. */
1505 if (GET_CODE (SUBREG_REG (x
)) == REG
1506 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
1508 unsigned int inner_regno
= subreg_regno (x
);
1509 unsigned int inner_endregno
1510 = inner_regno
+ (inner_regno
< FIRST_PSEUDO_REGISTER
1511 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
1513 return endregno
> inner_regno
&& regno
< inner_endregno
;
1519 if (&SET_DEST (x
) != loc
1520 /* Note setting a SUBREG counts as referring to the REG it is in for
1521 a pseudo but not for hard registers since we can
1522 treat each word individually. */
1523 && ((GET_CODE (SET_DEST (x
)) == SUBREG
1524 && loc
!= &SUBREG_REG (SET_DEST (x
))
1525 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
1526 && REGNO (SUBREG_REG (SET_DEST (x
))) >= FIRST_PSEUDO_REGISTER
1527 && refers_to_regno_p (regno
, endregno
,
1528 SUBREG_REG (SET_DEST (x
)), loc
))
1529 || (GET_CODE (SET_DEST (x
)) != REG
1530 && refers_to_regno_p (regno
, endregno
, SET_DEST (x
), loc
))))
1533 if (code
== CLOBBER
|| loc
== &SET_SRC (x
))
1542 /* X does not match, so try its subexpressions. */
1544 fmt
= GET_RTX_FORMAT (code
);
1545 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1547 if (fmt
[i
] == 'e' && loc
!= &XEXP (x
, i
))
1555 if (refers_to_regno_p (regno
, endregno
, XEXP (x
, i
), loc
))
1558 else if (fmt
[i
] == 'E')
1561 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
1562 if (loc
!= &XVECEXP (x
, i
, j
)
1563 && refers_to_regno_p (regno
, endregno
, XVECEXP (x
, i
, j
), loc
))
1570 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1571 we check if any register number in X conflicts with the relevant register
1572 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1573 contains a MEM (we don't bother checking for memory addresses that can't
1574 conflict because we expect this to be a rare case. */
1577 reg_overlap_mentioned_p (x
, in
)
1580 unsigned int regno
, endregno
;
1582 /* Overly conservative. */
1583 if (GET_CODE (x
) == STRICT_LOW_PART
)
1586 /* If either argument is a constant, then modifying X can not affect IN. */
1587 if (CONSTANT_P (x
) || CONSTANT_P (in
))
1590 switch (GET_CODE (x
))
1593 regno
= REGNO (SUBREG_REG (x
));
1594 if (regno
< FIRST_PSEUDO_REGISTER
)
1595 regno
= subreg_regno (x
);
1601 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
1602 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
1603 return refers_to_regno_p (regno
, endregno
, in
, (rtx
*) 0);
1610 if (GET_CODE (in
) == MEM
)
1613 fmt
= GET_RTX_FORMAT (GET_CODE (in
));
1614 for (i
= GET_RTX_LENGTH (GET_CODE (in
)) - 1; i
>= 0; i
--)
1615 if (fmt
[i
] == 'e' && reg_overlap_mentioned_p (x
, XEXP (in
, i
)))
1624 return reg_mentioned_p (x
, in
);
1630 /* If any register in here refers to it we return true. */
1631 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1632 if (XEXP (XVECEXP (x
, 0, i
), 0) != 0
1633 && reg_overlap_mentioned_p (XEXP (XVECEXP (x
, 0, i
), 0), in
))
1645 /* Return the last value to which REG was set prior to INSN. If we can't
1646 find it easily, return 0.
1648 We only return a REG, SUBREG, or constant because it is too hard to
1649 check if a MEM remains unchanged. */
1652 reg_set_last (x
, insn
)
1656 rtx orig_insn
= insn
;
1658 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1659 Stop when we reach a label or X is a hard reg and we reach a
1660 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1662 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1664 /* We compare with <= here, because reg_set_last_last_regno
1665 is actually the number of the first reg *not* in X. */
1667 insn
&& GET_CODE (insn
) != CODE_LABEL
1668 && ! (GET_CODE (insn
) == CALL_INSN
1669 && REGNO (x
) <= FIRST_PSEUDO_REGISTER
);
1670 insn
= PREV_INSN (insn
))
1673 rtx set
= set_of (x
, insn
);
1674 /* OK, this function modify our register. See if we understand it. */
1678 if (GET_CODE (set
) != SET
|| SET_DEST (set
) != x
)
1680 last_value
= SET_SRC (x
);
1681 if (CONSTANT_P (last_value
)
1682 || ((GET_CODE (last_value
) == REG
1683 || GET_CODE (last_value
) == SUBREG
)
1684 && ! reg_set_between_p (last_value
,
1695 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1696 (X would be the pattern of an insn).
1697 FUN receives two arguments:
1698 the REG, MEM, CC0 or PC being stored in or clobbered,
1699 the SET or CLOBBER rtx that does the store.
1701 If the item being stored in or clobbered is a SUBREG of a hard register,
1702 the SUBREG will be passed. */
1705 note_stores (x
, fun
, data
)
1707 void (*fun
) PARAMS ((rtx
, rtx
, void *));
1712 if (GET_CODE (x
) == COND_EXEC
)
1713 x
= COND_EXEC_CODE (x
);
1715 if (GET_CODE (x
) == SET
|| GET_CODE (x
) == CLOBBER
)
1717 rtx dest
= SET_DEST (x
);
1719 while ((GET_CODE (dest
) == SUBREG
1720 && (GET_CODE (SUBREG_REG (dest
)) != REG
1721 || REGNO (SUBREG_REG (dest
)) >= FIRST_PSEUDO_REGISTER
))
1722 || GET_CODE (dest
) == ZERO_EXTRACT
1723 || GET_CODE (dest
) == SIGN_EXTRACT
1724 || GET_CODE (dest
) == STRICT_LOW_PART
)
1725 dest
= XEXP (dest
, 0);
1727 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1728 each of whose first operand is a register. */
1729 if (GET_CODE (dest
) == PARALLEL
)
1731 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1732 if (XEXP (XVECEXP (dest
, 0, i
), 0) != 0)
1733 (*fun
) (XEXP (XVECEXP (dest
, 0, i
), 0), x
, data
);
1736 (*fun
) (dest
, x
, data
);
1739 else if (GET_CODE (x
) == PARALLEL
)
1740 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1741 note_stores (XVECEXP (x
, 0, i
), fun
, data
);
1744 /* Like notes_stores, but call FUN for each expression that is being
1745 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1746 FUN for each expression, not any interior subexpressions. FUN receives a
1747 pointer to the expression and the DATA passed to this function.
1749 Note that this is not quite the same test as that done in reg_referenced_p
1750 since that considers something as being referenced if it is being
1751 partially set, while we do not. */
1754 note_uses (pbody
, fun
, data
)
1756 void (*fun
) PARAMS ((rtx
*, void *));
1762 switch (GET_CODE (body
))
1765 (*fun
) (&COND_EXEC_TEST (body
), data
);
1766 note_uses (&COND_EXEC_CODE (body
), fun
, data
);
1770 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1771 note_uses (&XVECEXP (body
, 0, i
), fun
, data
);
1775 (*fun
) (&XEXP (body
, 0), data
);
1779 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
1780 (*fun
) (&ASM_OPERANDS_INPUT (body
, i
), data
);
1784 (*fun
) (&TRAP_CONDITION (body
), data
);
1788 (*fun
) (&XEXP (body
, 0), data
);
1792 case UNSPEC_VOLATILE
:
1793 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1794 (*fun
) (&XVECEXP (body
, 0, i
), data
);
1798 if (GET_CODE (XEXP (body
, 0)) == MEM
)
1799 (*fun
) (&XEXP (XEXP (body
, 0), 0), data
);
1804 rtx dest
= SET_DEST (body
);
1806 /* For sets we replace everything in source plus registers in memory
1807 expression in store and operands of a ZERO_EXTRACT. */
1808 (*fun
) (&SET_SRC (body
), data
);
1810 if (GET_CODE (dest
) == ZERO_EXTRACT
)
1812 (*fun
) (&XEXP (dest
, 1), data
);
1813 (*fun
) (&XEXP (dest
, 2), data
);
1816 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
)
1817 dest
= XEXP (dest
, 0);
1819 if (GET_CODE (dest
) == MEM
)
1820 (*fun
) (&XEXP (dest
, 0), data
);
1825 /* All the other possibilities never store. */
1826 (*fun
) (pbody
, data
);
1831 /* Return nonzero if X's old contents don't survive after INSN.
1832 This will be true if X is (cc0) or if X is a register and
1833 X dies in INSN or because INSN entirely sets X.
1835 "Entirely set" means set directly and not through a SUBREG,
1836 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1837 Likewise, REG_INC does not count.
1839 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1840 but for this use that makes no difference, since regs don't overlap
1841 during their lifetimes. Therefore, this function may be used
1842 at any time after deaths have been computed (in flow.c).
1844 If REG is a hard reg that occupies multiple machine registers, this
1845 function will only return 1 if each of those registers will be replaced
1849 dead_or_set_p (insn
, x
)
1853 unsigned int regno
, last_regno
;
1856 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1857 if (GET_CODE (x
) == CC0
)
1860 if (GET_CODE (x
) != REG
)
1864 last_regno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
1865 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
)) - 1);
1867 for (i
= regno
; i
<= last_regno
; i
++)
1868 if (! dead_or_set_regno_p (insn
, i
))
1874 /* Utility function for dead_or_set_p to check an individual register. Also
1875 called from flow.c. */
1878 dead_or_set_regno_p (insn
, test_regno
)
1880 unsigned int test_regno
;
1882 unsigned int regno
, endregno
;
1885 /* See if there is a death note for something that includes TEST_REGNO. */
1886 if (find_regno_note (insn
, REG_DEAD
, test_regno
))
1889 if (GET_CODE (insn
) == CALL_INSN
1890 && find_regno_fusage (insn
, CLOBBER
, test_regno
))
1893 pattern
= PATTERN (insn
);
1895 if (GET_CODE (pattern
) == COND_EXEC
)
1896 pattern
= COND_EXEC_CODE (pattern
);
1898 if (GET_CODE (pattern
) == SET
)
1900 rtx dest
= SET_DEST (pattern
);
1902 /* A value is totally replaced if it is the destination or the
1903 destination is a SUBREG of REGNO that does not change the number of
1905 if (GET_CODE (dest
) == SUBREG
1906 && (((GET_MODE_SIZE (GET_MODE (dest
))
1907 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1908 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1909 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1910 dest
= SUBREG_REG (dest
);
1912 if (GET_CODE (dest
) != REG
)
1915 regno
= REGNO (dest
);
1916 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1917 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (dest
)));
1919 return (test_regno
>= regno
&& test_regno
< endregno
);
1921 else if (GET_CODE (pattern
) == PARALLEL
)
1925 for (i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
1927 rtx body
= XVECEXP (pattern
, 0, i
);
1929 if (GET_CODE (body
) == COND_EXEC
)
1930 body
= COND_EXEC_CODE (body
);
1932 if (GET_CODE (body
) == SET
|| GET_CODE (body
) == CLOBBER
)
1934 rtx dest
= SET_DEST (body
);
1936 if (GET_CODE (dest
) == SUBREG
1937 && (((GET_MODE_SIZE (GET_MODE (dest
))
1938 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1939 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1940 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1941 dest
= SUBREG_REG (dest
);
1943 if (GET_CODE (dest
) != REG
)
1946 regno
= REGNO (dest
);
1947 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1948 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (dest
)));
1950 if (test_regno
>= regno
&& test_regno
< endregno
)
1959 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1960 If DATUM is nonzero, look for one whose datum is DATUM. */
1963 find_reg_note (insn
, kind
, datum
)
1970 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1971 if (! INSN_P (insn
))
1974 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1975 if (REG_NOTE_KIND (link
) == kind
1976 && (datum
== 0 || datum
== XEXP (link
, 0)))
1981 /* Return the reg-note of kind KIND in insn INSN which applies to register
1982 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1983 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1984 it might be the case that the note overlaps REGNO. */
1987 find_regno_note (insn
, kind
, regno
)
1994 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1995 if (! INSN_P (insn
))
1998 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1999 if (REG_NOTE_KIND (link
) == kind
2000 /* Verify that it is a register, so that scratch and MEM won't cause a
2002 && GET_CODE (XEXP (link
, 0)) == REG
2003 && REGNO (XEXP (link
, 0)) <= regno
2004 && ((REGNO (XEXP (link
, 0))
2005 + (REGNO (XEXP (link
, 0)) >= FIRST_PSEUDO_REGISTER
? 1
2006 : HARD_REGNO_NREGS (REGNO (XEXP (link
, 0)),
2007 GET_MODE (XEXP (link
, 0)))))
2013 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
2017 find_reg_equal_equiv_note (insn
)
2022 if (single_set (insn
) == 0)
2024 else if ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)) != 0)
2027 return find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
2030 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
2031 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2034 find_reg_fusage (insn
, code
, datum
)
2039 /* If it's not a CALL_INSN, it can't possibly have a
2040 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
2041 if (GET_CODE (insn
) != CALL_INSN
)
2047 if (GET_CODE (datum
) != REG
)
2051 for (link
= CALL_INSN_FUNCTION_USAGE (insn
);
2053 link
= XEXP (link
, 1))
2054 if (GET_CODE (XEXP (link
, 0)) == code
2055 && rtx_equal_p (datum
, XEXP (XEXP (link
, 0), 0)))
2060 unsigned int regno
= REGNO (datum
);
2062 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2063 to pseudo registers, so don't bother checking. */
2065 if (regno
< FIRST_PSEUDO_REGISTER
)
2067 unsigned int end_regno
2068 = regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (datum
));
2071 for (i
= regno
; i
< end_regno
; i
++)
2072 if (find_regno_fusage (insn
, code
, i
))
2080 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2081 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2084 find_regno_fusage (insn
, code
, regno
)
2091 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2092 to pseudo registers, so don't bother checking. */
2094 if (regno
>= FIRST_PSEUDO_REGISTER
2095 || GET_CODE (insn
) != CALL_INSN
)
2098 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
2100 unsigned int regnote
;
2103 if (GET_CODE (op
= XEXP (link
, 0)) == code
2104 && GET_CODE (reg
= XEXP (op
, 0)) == REG
2105 && (regnote
= REGNO (reg
)) <= regno
2106 && regnote
+ HARD_REGNO_NREGS (regnote
, GET_MODE (reg
)) > regno
)
2113 /* Return true if INSN is a call to a pure function. */
2121 if (GET_CODE (insn
) != CALL_INSN
|| ! CONST_OR_PURE_CALL_P (insn
))
2124 /* Look for the note that differentiates const and pure functions. */
2125 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
2129 if (GET_CODE (u
= XEXP (link
, 0)) == USE
2130 && GET_CODE (m
= XEXP (u
, 0)) == MEM
&& GET_MODE (m
) == BLKmode
2131 && GET_CODE (XEXP (m
, 0)) == SCRATCH
)
2138 /* Remove register note NOTE from the REG_NOTES of INSN. */
2141 remove_note (insn
, note
)
2147 if (note
== NULL_RTX
)
2150 if (REG_NOTES (insn
) == note
)
2152 REG_NOTES (insn
) = XEXP (note
, 1);
2156 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
2157 if (XEXP (link
, 1) == note
)
2159 XEXP (link
, 1) = XEXP (note
, 1);
2166 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2167 return 1 if it is found. A simple equality test is used to determine if
2171 in_expr_list_p (listp
, node
)
2177 for (x
= listp
; x
; x
= XEXP (x
, 1))
2178 if (node
== XEXP (x
, 0))
2184 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2185 remove that entry from the list if it is found.
2187 A simple equality test is used to determine if NODE matches. */
2190 remove_node_from_expr_list (node
, listp
)
2195 rtx prev
= NULL_RTX
;
2199 if (node
== XEXP (temp
, 0))
2201 /* Splice the node out of the list. */
2203 XEXP (prev
, 1) = XEXP (temp
, 1);
2205 *listp
= XEXP (temp
, 1);
2211 temp
= XEXP (temp
, 1);
2215 /* Nonzero if X contains any volatile instructions. These are instructions
2216 which may cause unpredictable machine state instructions, and thus no
2217 instructions should be moved or combined across them. This includes
2218 only volatile asms and UNSPEC_VOLATILE instructions. */
2226 code
= GET_CODE (x
);
2246 case UNSPEC_VOLATILE
:
2247 /* case TRAP_IF: This isn't clear yet. */
2252 if (MEM_VOLATILE_P (x
))
2259 /* Recursively scan the operands of this expression. */
2262 const char *fmt
= GET_RTX_FORMAT (code
);
2265 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2269 if (volatile_insn_p (XEXP (x
, i
)))
2272 else if (fmt
[i
] == 'E')
2275 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2276 if (volatile_insn_p (XVECEXP (x
, i
, j
)))
2284 /* Nonzero if X contains any volatile memory references
2285 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2293 code
= GET_CODE (x
);
2311 case UNSPEC_VOLATILE
:
2317 if (MEM_VOLATILE_P (x
))
2324 /* Recursively scan the operands of this expression. */
2327 const char *fmt
= GET_RTX_FORMAT (code
);
2330 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2334 if (volatile_refs_p (XEXP (x
, i
)))
2337 else if (fmt
[i
] == 'E')
2340 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2341 if (volatile_refs_p (XVECEXP (x
, i
, j
)))
2349 /* Similar to above, except that it also rejects register pre- and post-
2358 code
= GET_CODE (x
);
2376 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2377 when some combination can't be done. If we see one, don't think
2378 that we can simplify the expression. */
2379 return (GET_MODE (x
) != VOIDmode
);
2388 case UNSPEC_VOLATILE
:
2389 /* case TRAP_IF: This isn't clear yet. */
2395 if (MEM_VOLATILE_P (x
))
2402 /* Recursively scan the operands of this expression. */
2405 const char *fmt
= GET_RTX_FORMAT (code
);
2408 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2412 if (side_effects_p (XEXP (x
, i
)))
2415 else if (fmt
[i
] == 'E')
2418 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2419 if (side_effects_p (XVECEXP (x
, i
, j
)))
2427 /* Return nonzero if evaluating rtx X might cause a trap. */
2439 code
= GET_CODE (x
);
2442 /* Handle these cases quickly. */
2456 case UNSPEC_VOLATILE
:
2461 return MEM_VOLATILE_P (x
);
2463 /* Memory ref can trap unless it's a static var or a stack slot. */
2465 return rtx_addr_can_trap_p (XEXP (x
, 0));
2467 /* Division by a non-constant might trap. */
2472 if (HONOR_SNANS (GET_MODE (x
)))
2474 if (! CONSTANT_P (XEXP (x
, 1))
2475 || (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2476 && flag_trapping_math
))
2478 /* This was const0_rtx, but by not using that,
2479 we can link this file into other programs. */
2480 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& INTVAL (XEXP (x
, 1)) == 0)
2485 /* An EXPR_LIST is used to represent a function call. This
2486 certainly may trap. */
2494 /* Some floating point comparisons may trap. */
2495 if (!flag_trapping_math
)
2497 /* ??? There is no machine independent way to check for tests that trap
2498 when COMPARE is used, though many targets do make this distinction.
2499 For instance, sparc uses CCFPE for compares which generate exceptions
2500 and CCFP for compares which do not generate exceptions. */
2501 if (HONOR_NANS (GET_MODE (x
)))
2503 /* But often the compare has some CC mode, so check operand
2505 if (HONOR_NANS (GET_MODE (XEXP (x
, 0)))
2506 || HONOR_NANS (GET_MODE (XEXP (x
, 1))))
2512 if (HONOR_SNANS (GET_MODE (x
)))
2514 /* Often comparison is CC mode, so check operand modes. */
2515 if (HONOR_SNANS (GET_MODE (XEXP (x
, 0)))
2516 || HONOR_SNANS (GET_MODE (XEXP (x
, 1))))
2522 /* These operations don't trap even with floating point. */
2526 /* Any floating arithmetic may trap. */
2527 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2528 && flag_trapping_math
)
2532 fmt
= GET_RTX_FORMAT (code
);
2533 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2537 if (may_trap_p (XEXP (x
, i
)))
2540 else if (fmt
[i
] == 'E')
2543 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2544 if (may_trap_p (XVECEXP (x
, i
, j
)))
2551 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2552 i.e., an inequality. */
2555 inequality_comparisons_p (x
)
2560 enum rtx_code code
= GET_CODE (x
);
2590 len
= GET_RTX_LENGTH (code
);
2591 fmt
= GET_RTX_FORMAT (code
);
2593 for (i
= 0; i
< len
; i
++)
2597 if (inequality_comparisons_p (XEXP (x
, i
)))
2600 else if (fmt
[i
] == 'E')
2603 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2604 if (inequality_comparisons_p (XVECEXP (x
, i
, j
)))
2612 /* Replace any occurrence of FROM in X with TO. The function does
2613 not enter into CONST_DOUBLE for the replace.
2615 Note that copying is not done so X must not be shared unless all copies
2616 are to be modified. */
2619 replace_rtx (x
, from
, to
)
2625 /* The following prevents loops occurrence when we change MEM in
2626 CONST_DOUBLE onto the same CONST_DOUBLE. */
2627 if (x
!= 0 && GET_CODE (x
) == CONST_DOUBLE
)
2633 /* Allow this function to make replacements in EXPR_LISTs. */
2637 if (GET_CODE (x
) == SUBREG
)
2639 rtx
new = replace_rtx (SUBREG_REG (x
), from
, to
);
2641 if (GET_CODE (new) == CONST_INT
)
2643 x
= simplify_subreg (GET_MODE (x
), new,
2644 GET_MODE (SUBREG_REG (x
)),
2650 SUBREG_REG (x
) = new;
2654 else if (GET_CODE (x
) == ZERO_EXTEND
)
2656 rtx
new = replace_rtx (XEXP (x
, 0), from
, to
);
2658 if (GET_CODE (new) == CONST_INT
)
2660 x
= simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
2661 new, GET_MODE (XEXP (x
, 0)));
2671 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2672 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2675 XEXP (x
, i
) = replace_rtx (XEXP (x
, i
), from
, to
);
2676 else if (fmt
[i
] == 'E')
2677 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2678 XVECEXP (x
, i
, j
) = replace_rtx (XVECEXP (x
, i
, j
), from
, to
);
2684 /* Throughout the rtx X, replace many registers according to REG_MAP.
2685 Return the replacement for X (which may be X with altered contents).
2686 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2687 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2689 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2690 should not be mapped to pseudos or vice versa since validate_change
2693 If REPLACE_DEST is 1, replacements are also done in destinations;
2694 otherwise, only sources are replaced. */
2697 replace_regs (x
, reg_map
, nregs
, replace_dest
)
2710 code
= GET_CODE (x
);
2725 /* Verify that the register has an entry before trying to access it. */
2726 if (REGNO (x
) < nregs
&& reg_map
[REGNO (x
)] != 0)
2728 /* SUBREGs can't be shared. Always return a copy to ensure that if
2729 this replacement occurs more than once then each instance will
2730 get distinct rtx. */
2731 if (GET_CODE (reg_map
[REGNO (x
)]) == SUBREG
)
2732 return copy_rtx (reg_map
[REGNO (x
)]);
2733 return reg_map
[REGNO (x
)];
2738 /* Prevent making nested SUBREGs. */
2739 if (GET_CODE (SUBREG_REG (x
)) == REG
&& REGNO (SUBREG_REG (x
)) < nregs
2740 && reg_map
[REGNO (SUBREG_REG (x
))] != 0
2741 && GET_CODE (reg_map
[REGNO (SUBREG_REG (x
))]) == SUBREG
)
2743 rtx map_val
= reg_map
[REGNO (SUBREG_REG (x
))];
2744 return simplify_gen_subreg (GET_MODE (x
), map_val
,
2745 GET_MODE (SUBREG_REG (x
)),
2752 SET_DEST (x
) = replace_regs (SET_DEST (x
), reg_map
, nregs
, 0);
2754 else if (GET_CODE (SET_DEST (x
)) == MEM
2755 || GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2756 /* Even if we are not to replace destinations, replace register if it
2757 is CONTAINED in destination (destination is memory or
2758 STRICT_LOW_PART). */
2759 XEXP (SET_DEST (x
), 0) = replace_regs (XEXP (SET_DEST (x
), 0),
2761 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2762 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2765 SET_SRC (x
) = replace_regs (SET_SRC (x
), reg_map
, nregs
, 0);
2772 fmt
= GET_RTX_FORMAT (code
);
2773 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2776 XEXP (x
, i
) = replace_regs (XEXP (x
, i
), reg_map
, nregs
, replace_dest
);
2777 else if (fmt
[i
] == 'E')
2780 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2781 XVECEXP (x
, i
, j
) = replace_regs (XVECEXP (x
, i
, j
), reg_map
,
2782 nregs
, replace_dest
);
2788 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2789 constant that is not in the constant pool and not in the condition
2790 of an IF_THEN_ELSE. */
2793 computed_jump_p_1 (x
)
2796 enum rtx_code code
= GET_CODE (x
);
2815 return ! (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
2816 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)));
2819 return (computed_jump_p_1 (XEXP (x
, 1))
2820 || computed_jump_p_1 (XEXP (x
, 2)));
2826 fmt
= GET_RTX_FORMAT (code
);
2827 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2830 && computed_jump_p_1 (XEXP (x
, i
)))
2833 else if (fmt
[i
] == 'E')
2834 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2835 if (computed_jump_p_1 (XVECEXP (x
, i
, j
)))
2842 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2844 Tablejumps and casesi insns are not considered indirect jumps;
2845 we can recognize them by a (use (label_ref)). */
2848 computed_jump_p (insn
)
2852 if (GET_CODE (insn
) == JUMP_INSN
)
2854 rtx pat
= PATTERN (insn
);
2856 if (find_reg_note (insn
, REG_LABEL
, NULL_RTX
))
2858 else if (GET_CODE (pat
) == PARALLEL
)
2860 int len
= XVECLEN (pat
, 0);
2861 int has_use_labelref
= 0;
2863 for (i
= len
- 1; i
>= 0; i
--)
2864 if (GET_CODE (XVECEXP (pat
, 0, i
)) == USE
2865 && (GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0))
2867 has_use_labelref
= 1;
2869 if (! has_use_labelref
)
2870 for (i
= len
- 1; i
>= 0; i
--)
2871 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
2872 && SET_DEST (XVECEXP (pat
, 0, i
)) == pc_rtx
2873 && computed_jump_p_1 (SET_SRC (XVECEXP (pat
, 0, i
))))
2876 else if (GET_CODE (pat
) == SET
2877 && SET_DEST (pat
) == pc_rtx
2878 && computed_jump_p_1 (SET_SRC (pat
)))
2884 /* Traverse X via depth-first search, calling F for each
2885 sub-expression (including X itself). F is also passed the DATA.
2886 If F returns -1, do not traverse sub-expressions, but continue
2887 traversing the rest of the tree. If F ever returns any other
2888 nonzero value, stop the traversal, and return the value returned
2889 by F. Otherwise, return 0. This function does not traverse inside
2890 tree structure that contains RTX_EXPRs, or into sub-expressions
2891 whose format code is `0' since it is not known whether or not those
2892 codes are actually RTL.
2894 This routine is very general, and could (should?) be used to
2895 implement many of the other routines in this file. */
2898 for_each_rtx (x
, f
, data
)
2909 result
= (*f
) (x
, data
);
2911 /* Do not traverse sub-expressions. */
2913 else if (result
!= 0)
2914 /* Stop the traversal. */
2918 /* There are no sub-expressions. */
2921 length
= GET_RTX_LENGTH (GET_CODE (*x
));
2922 format
= GET_RTX_FORMAT (GET_CODE (*x
));
2924 for (i
= 0; i
< length
; ++i
)
2929 result
= for_each_rtx (&XEXP (*x
, i
), f
, data
);
2936 if (XVEC (*x
, i
) != 0)
2939 for (j
= 0; j
< XVECLEN (*x
, i
); ++j
)
2941 result
= for_each_rtx (&XVECEXP (*x
, i
, j
), f
, data
);
2949 /* Nothing to do. */
2958 /* Searches X for any reference to REGNO, returning the rtx of the
2959 reference found if any. Otherwise, returns NULL_RTX. */
2962 regno_use_in (regno
, x
)
2970 if (GET_CODE (x
) == REG
&& REGNO (x
) == regno
)
2973 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2974 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2978 if ((tem
= regno_use_in (regno
, XEXP (x
, i
))))
2981 else if (fmt
[i
] == 'E')
2982 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2983 if ((tem
= regno_use_in (regno
, XVECEXP (x
, i
, j
))))
2990 /* Return a value indicating whether OP, an operand of a commutative
2991 operation, is preferred as the first or second operand. The higher
2992 the value, the stronger the preference for being the first operand.
2993 We use negative values to indicate a preference for the first operand
2994 and positive values for the second operand. */
2997 commutative_operand_precedence (op
)
3000 /* Constants always come the second operand. Prefer "nice" constants. */
3001 if (GET_CODE (op
) == CONST_INT
)
3003 if (GET_CODE (op
) == CONST_DOUBLE
)
3005 if (CONSTANT_P (op
))
3008 /* SUBREGs of objects should come second. */
3009 if (GET_CODE (op
) == SUBREG
3010 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op
))) == 'o')
3013 /* If only one operand is a `neg', `not',
3014 `mult', `plus', or `minus' expression, it will be the first
3016 if (GET_CODE (op
) == NEG
|| GET_CODE (op
) == NOT
3017 || GET_CODE (op
) == MULT
|| GET_CODE (op
) == PLUS
3018 || GET_CODE (op
) == MINUS
)
3021 /* Complex expressions should be the first, so decrease priority
3023 if (GET_RTX_CLASS (GET_CODE (op
)) == 'o')
3028 /* Return 1 iff it is necessary to swap operands of commutative operation
3029 in order to canonicalize expression. */
3032 swap_commutative_operands_p (x
, y
)
3035 return (commutative_operand_precedence (x
)
3036 < commutative_operand_precedence (y
));
3039 /* Return 1 if X is an autoincrement side effect and the register is
3040 not the stack pointer. */
3045 switch (GET_CODE (x
))
3053 /* There are no REG_INC notes for SP. */
3054 if (XEXP (x
, 0) != stack_pointer_rtx
)
3062 /* Return 1 if the sequence of instructions beginning with FROM and up
3063 to and including TO is safe to move. If NEW_TO is non-NULL, and
3064 the sequence is not already safe to move, but can be easily
3065 extended to a sequence which is safe, then NEW_TO will point to the
3066 end of the extended sequence.
3068 For now, this function only checks that the region contains whole
3069 exception regions, but it could be extended to check additional
3070 conditions as well. */
3073 insns_safe_to_move_p (from
, to
, new_to
)
3078 int eh_region_count
= 0;
3082 /* By default, assume the end of the region will be what was
3089 if (GET_CODE (r
) == NOTE
)
3091 switch (NOTE_LINE_NUMBER (r
))
3093 case NOTE_INSN_EH_REGION_BEG
:
3097 case NOTE_INSN_EH_REGION_END
:
3098 if (eh_region_count
== 0)
3099 /* This sequence of instructions contains the end of
3100 an exception region, but not he beginning. Moving
3101 it will cause chaos. */
3112 /* If we've passed TO, and we see a non-note instruction, we
3113 can't extend the sequence to a movable sequence. */
3119 /* It's OK to move the sequence if there were matched sets of
3120 exception region notes. */
3121 return eh_region_count
== 0;
3126 /* It's OK to move the sequence if there were matched sets of
3127 exception region notes. */
3128 if (past_to_p
&& eh_region_count
== 0)
3134 /* Go to the next instruction. */
3141 /* Return nonzero if IN contains a piece of rtl that has the address LOC */
3143 loc_mentioned_in_p (loc
, in
)
3146 enum rtx_code code
= GET_CODE (in
);
3147 const char *fmt
= GET_RTX_FORMAT (code
);
3150 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3152 if (loc
== &in
->fld
[i
].rtx
)
3156 if (loc_mentioned_in_p (loc
, XEXP (in
, i
)))
3159 else if (fmt
[i
] == 'E')
3160 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
3161 if (loc_mentioned_in_p (loc
, XVECEXP (in
, i
, j
)))
3167 /* Given a subreg X, return the bit offset where the subreg begins
3168 (counting from the least significant bit of the reg). */
3174 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (x
));
3175 enum machine_mode mode
= GET_MODE (x
);
3176 unsigned int bitpos
;
3180 /* A paradoxical subreg begins at bit position 0. */
3181 if (GET_MODE_BITSIZE (mode
) > GET_MODE_BITSIZE (inner_mode
))
3184 if (WORDS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
3185 /* If the subreg crosses a word boundary ensure that
3186 it also begins and ends on a word boundary. */
3187 if ((SUBREG_BYTE (x
) % UNITS_PER_WORD
3188 + GET_MODE_SIZE (mode
)) > UNITS_PER_WORD
3189 && (SUBREG_BYTE (x
) % UNITS_PER_WORD
3190 || GET_MODE_SIZE (mode
) % UNITS_PER_WORD
))
3193 if (WORDS_BIG_ENDIAN
)
3194 word
= (GET_MODE_SIZE (inner_mode
)
3195 - (SUBREG_BYTE (x
) + GET_MODE_SIZE (mode
))) / UNITS_PER_WORD
;
3197 word
= SUBREG_BYTE (x
) / UNITS_PER_WORD
;
3198 bitpos
= word
* BITS_PER_WORD
;
3200 if (BYTES_BIG_ENDIAN
)
3201 byte
= (GET_MODE_SIZE (inner_mode
)
3202 - (SUBREG_BYTE (x
) + GET_MODE_SIZE (mode
))) % UNITS_PER_WORD
;
3204 byte
= SUBREG_BYTE (x
) % UNITS_PER_WORD
;
3205 bitpos
+= byte
* BITS_PER_UNIT
;
3210 /* This function returns the regno offset of a subreg expression.
3211 xregno - A regno of an inner hard subreg_reg (or what will become one).
3212 xmode - The mode of xregno.
3213 offset - The byte offset.
3214 ymode - The mode of a top level SUBREG (or what may become one).
3215 RETURN - The regno offset which would be used. */
3217 subreg_regno_offset (xregno
, xmode
, offset
, ymode
)
3218 unsigned int xregno
;
3219 enum machine_mode xmode
;
3220 unsigned int offset
;
3221 enum machine_mode ymode
;
3223 int nregs_xmode
, nregs_ymode
;
3224 int mode_multiple
, nregs_multiple
;
3227 if (xregno
>= FIRST_PSEUDO_REGISTER
)
3230 nregs_xmode
= HARD_REGNO_NREGS (xregno
, xmode
);
3231 nregs_ymode
= HARD_REGNO_NREGS (xregno
, ymode
);
3233 /* If this is a big endian paradoxical subreg, which uses more actual
3234 hard registers than the original register, we must return a negative
3235 offset so that we find the proper highpart of the register. */
3237 && nregs_ymode
> nregs_xmode
3238 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3239 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3240 return nregs_xmode
- nregs_ymode
;
3242 if (offset
== 0 || nregs_xmode
== nregs_ymode
)
3245 /* size of ymode must not be greater than the size of xmode. */
3246 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3247 if (mode_multiple
== 0)
3250 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3251 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3252 return (y_offset
/ (mode_multiple
/ nregs_multiple
)) * nregs_ymode
;
3255 /* Return the final regno that a subreg expression refers to. */
3261 rtx subreg
= SUBREG_REG (x
);
3262 int regno
= REGNO (subreg
);
3264 ret
= regno
+ subreg_regno_offset (regno
,
3271 struct parms_set_data
3277 /* Helper function for noticing stores to parameter registers. */
3279 parms_set (x
, pat
, data
)
3280 rtx x
, pat ATTRIBUTE_UNUSED
;
3283 struct parms_set_data
*d
= data
;
3284 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3285 && TEST_HARD_REG_BIT (d
->regs
, REGNO (x
)))
3287 CLEAR_HARD_REG_BIT (d
->regs
, REGNO (x
));
3292 /* Look backward for first parameter to be loaded.
3293 Do not skip BOUNDARY. */
3295 find_first_parameter_load (call_insn
, boundary
)
3296 rtx call_insn
, boundary
;
3298 struct parms_set_data parm
;
3301 /* Since different machines initialize their parameter registers
3302 in different orders, assume nothing. Collect the set of all
3303 parameter registers. */
3304 CLEAR_HARD_REG_SET (parm
.regs
);
3306 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
3307 if (GET_CODE (XEXP (p
, 0)) == USE
3308 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == REG
)
3310 if (REGNO (XEXP (XEXP (p
, 0), 0)) >= FIRST_PSEUDO_REGISTER
)
3313 /* We only care about registers which can hold function
3315 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p
, 0), 0))))
3318 SET_HARD_REG_BIT (parm
.regs
, REGNO (XEXP (XEXP (p
, 0), 0)));
3323 /* Search backward for the first set of a register in this set. */
3324 while (parm
.nregs
&& before
!= boundary
)
3326 before
= PREV_INSN (before
);
3328 /* It is possible that some loads got CSEed from one call to
3329 another. Stop in that case. */
3330 if (GET_CODE (before
) == CALL_INSN
)
3333 /* Our caller needs either ensure that we will find all sets
3334 (in case code has not been optimized yet), or take care
3335 for possible labels in a way by setting boundary to preceding
3337 if (GET_CODE (before
) == CODE_LABEL
)
3339 if (before
!= boundary
)
3344 if (INSN_P (before
))
3345 note_stores (PATTERN (before
), parms_set
, &parm
);
3350 /* Return true if we should avoid inserting code between INSN and preceding
3351 call instruction. */
3354 keep_with_call_p (insn
)
3359 if (INSN_P (insn
) && (set
= single_set (insn
)) != NULL
)
3361 if (GET_CODE (SET_DEST (set
)) == REG
3362 && REGNO (SET_DEST (set
)) < FIRST_PSEUDO_REGISTER
3363 && fixed_regs
[REGNO (SET_DEST (set
))]
3364 && general_operand (SET_SRC (set
), VOIDmode
))
3366 if (GET_CODE (SET_SRC (set
)) == REG
3367 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set
)))
3368 && GET_CODE (SET_DEST (set
)) == REG
3369 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
)
3371 /* There may be a stack pop just after the call and before the store
3372 of the return register. Search for the actual store when deciding
3373 if we can break or not. */
3374 if (SET_DEST (set
) == stack_pointer_rtx
)
3376 rtx i2
= next_nonnote_insn (insn
);
3377 if (i2
&& keep_with_call_p (i2
))
3384 /* Return true when store to register X can be hoisted to the place
3385 with LIVE registers (can be NULL). Value VAL contains destination
3386 whose value will be used. */
3389 hoist_test_store (x
, val
, live
)
3393 if (GET_CODE (x
) == SCRATCH
)
3396 if (rtx_equal_p (x
, val
))
3399 /* Allow subreg of X in case it is not writting just part of multireg pseudo.
3400 Then we would need to update all users to care hoisting the store too.
3401 Caller may represent that by specifying whole subreg as val. */
3403 if (GET_CODE (x
) == SUBREG
&& rtx_equal_p (SUBREG_REG (x
), val
))
3405 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) > UNITS_PER_WORD
3406 && GET_MODE_BITSIZE (GET_MODE (x
)) <
3407 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))))
3411 if (GET_CODE (x
) == SUBREG
)
3414 /* Anything except register store is not hoistable. This includes the
3415 partial stores to registers. */
3420 /* Pseudo registers can be allways replaced by another pseudo to avoid
3421 the side effect, for hard register we must ensure that they are dead.
3422 Eventually we may want to add code to try turn pseudos to hards, but it
3423 is unlikely useful. */
3425 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3427 int regno
= REGNO (x
);
3428 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
3432 if (REGNO_REG_SET_P (live
, regno
))
3435 if (REGNO_REG_SET_P (live
, regno
+ n
))
3442 /* Return true if INSN can be hoisted to place with LIVE hard registers
3443 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3444 and used by the hoisting pass. */
3447 can_hoist_insn_p (insn
, val
, live
)
3451 rtx pat
= PATTERN (insn
);
3454 /* It probably does not worth the complexity to handle multiple
3456 if (!single_set (insn
))
3458 /* We can move CALL_INSN, but we need to check that all caller clobbered
3460 if (GET_CODE (insn
) == CALL_INSN
)
3462 /* In future we will handle hoisting of libcall sequences, but
3464 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
3466 switch (GET_CODE (pat
))
3469 if (!hoist_test_store (SET_DEST (pat
), val
, live
))
3473 /* USES do have sick semantics, so do not move them. */
3477 if (!hoist_test_store (XEXP (pat
, 0), val
, live
))
3481 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3483 rtx x
= XVECEXP (pat
, 0, i
);
3484 switch (GET_CODE (x
))
3487 if (!hoist_test_store (SET_DEST (x
), val
, live
))
3491 /* We need to fix callers to really ensure availability
3492 of all values inisn uses, but for now it is safe to prohibit
3493 hoisting of any insn having such a hidden uses. */
3497 if (!hoist_test_store (SET_DEST (x
), val
, live
))
3511 /* Update store after hoisting - replace all stores to pseudo registers
3512 by new ones to avoid clobbering of values except for store to VAL that will
3513 be updated to NEW. */
3516 hoist_update_store (insn
, xp
, val
, new)
3517 rtx insn
, *xp
, val
, new;
3521 if (GET_CODE (x
) == SCRATCH
)
3524 if (GET_CODE (x
) == SUBREG
&& SUBREG_REG (x
) == val
)
3525 validate_change (insn
, xp
,
3526 simplify_gen_subreg (GET_MODE (x
), new, GET_MODE (new),
3527 SUBREG_BYTE (x
)), 1);
3528 if (rtx_equal_p (x
, val
))
3530 validate_change (insn
, xp
, new, 1);
3533 if (GET_CODE (x
) == SUBREG
)
3535 xp
= &SUBREG_REG (x
);
3542 /* We've verified that hard registers are dead, so we may keep the side
3543 effect. Otherwise replace it by new pseudo. */
3544 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
3545 validate_change (insn
, xp
, gen_reg_rtx (GET_MODE (x
)), 1);
3547 = alloc_EXPR_LIST (REG_UNUSED
, *xp
, REG_NOTES (insn
));
3550 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3551 and each other side effect to pseudo register by new pseudo register. */
3554 hoist_insn_after (insn
, after
, val
, new)
3555 rtx insn
, after
, val
, new;
3561 insn
= emit_copy_of_insn_after (insn
, after
);
3562 pat
= PATTERN (insn
);
3564 /* Remove REG_UNUSED notes as we will re-emit them. */
3565 while ((note
= find_reg_note (insn
, REG_UNUSED
, NULL_RTX
)))
3566 remove_note (insn
, note
);
3568 /* To get this working callers must ensure to move everything referenced
3569 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3571 while ((note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)))
3572 remove_note (insn
, note
);
3573 while ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)))
3574 remove_note (insn
, note
);
3576 /* Remove REG_DEAD notes as they might not be valid anymore in case
3577 we create redundancy. */
3578 while ((note
= find_reg_note (insn
, REG_DEAD
, NULL_RTX
)))
3579 remove_note (insn
, note
);
3580 switch (GET_CODE (pat
))
3583 hoist_update_store (insn
, &SET_DEST (pat
), val
, new);
3588 hoist_update_store (insn
, &XEXP (pat
, 0), val
, new);
3591 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3593 rtx x
= XVECEXP (pat
, 0, i
);
3594 switch (GET_CODE (x
))
3597 hoist_update_store (insn
, &SET_DEST (x
), val
, new);
3602 hoist_update_store (insn
, &SET_DEST (x
), val
, new);
3612 if (!apply_change_group ())
3619 hoist_insn_to_edge (insn
, e
, val
, new)
3625 /* We cannot insert instructions on an abnormal critical edge.
3626 It will be easier to find the culprit if we die now. */
3627 if ((e
->flags
& EDGE_ABNORMAL
) && EDGE_CRITICAL_P (e
))
3630 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3631 stuff. We also emit CALL_INSNS and firends. */
3632 if (e
->insns
== NULL_RTX
)
3635 emit_note (NULL
, NOTE_INSN_DELETED
);
3638 push_to_sequence (e
->insns
);
3640 new_insn
= hoist_insn_after (insn
, get_last_insn (), val
, new);
3642 e
->insns
= get_insns ();