1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
34 #include "basic-block.h"
37 /* Forward declarations */
38 static int global_reg_mentioned_p_1
PARAMS ((rtx
*, void *));
39 static void set_of_1
PARAMS ((rtx
, rtx
, void *));
40 static void insn_dependent_p_1
PARAMS ((rtx
, rtx
, void *));
41 static int computed_jump_p_1
PARAMS ((rtx
));
42 static void parms_set
PARAMS ((rtx
, rtx
, void *));
43 static bool hoist_test_store
PARAMS ((rtx
, rtx
, regset
));
44 static void hoist_update_store
PARAMS ((rtx
, rtx
*, rtx
, rtx
));
46 /* Bit flags that specify the machine subtype we are compiling for.
47 Bits are tested using macros TARGET_... defined in the tm.h file
48 and set by `-m...' switches. Must be defined in rtlanal.c. */
52 /* Return 1 if the value of X is unstable
53 (would be different at a different point in the program).
54 The frame pointer, arg pointer, etc. are considered stable
55 (within one function) and so is anything marked `unchanging'. */
61 RTX_CODE code
= GET_CODE (x
);
68 return ! RTX_UNCHANGING_P (x
) || rtx_unstable_p (XEXP (x
, 0));
83 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
84 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
85 /* The arg pointer varies if it is not a fixed register. */
86 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
87 || RTX_UNCHANGING_P (x
))
89 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
90 /* ??? When call-clobbered, the value is stable modulo the restore
91 that must happen after a call. This currently screws up local-alloc
92 into believing that the restore is not needed. */
93 if (x
== pic_offset_table_rtx
)
99 if (MEM_VOLATILE_P (x
))
108 fmt
= GET_RTX_FORMAT (code
);
109 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
112 if (rtx_unstable_p (XEXP (x
, i
)))
115 else if (fmt
[i
] == 'E')
118 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
119 if (rtx_unstable_p (XVECEXP (x
, i
, j
)))
126 /* Return 1 if X has a value that can vary even between two
127 executions of the program. 0 means X can be compared reliably
128 against certain constants or near-constants.
129 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
130 zero, we are slightly more conservative.
131 The frame pointer and the arg pointer are considered constant. */
134 rtx_varies_p (x
, for_alias
)
138 RTX_CODE code
= GET_CODE (x
);
145 return ! RTX_UNCHANGING_P (x
) || rtx_varies_p (XEXP (x
, 0), for_alias
);
159 /* This will resolve to some offset from the frame pointer. */
163 /* Note that we have to test for the actual rtx used for the frame
164 and arg pointers and not just the register number in case we have
165 eliminated the frame and/or arg pointer and are using it
167 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
168 /* The arg pointer varies if it is not a fixed register. */
169 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
171 if (x
== pic_offset_table_rtx
172 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
173 /* ??? When call-clobbered, the value is stable modulo the restore
174 that must happen after a call. This currently screws up
175 local-alloc into believing that the restore is not needed, so we
176 must return 0 only if we are called from alias analysis. */
184 /* The operand 0 of a LO_SUM is considered constant
185 (in fact it is related specifically to operand 1)
186 during alias analysis. */
187 return (! for_alias
&& rtx_varies_p (XEXP (x
, 0), for_alias
))
188 || rtx_varies_p (XEXP (x
, 1), for_alias
);
191 if (MEM_VOLATILE_P (x
))
200 fmt
= GET_RTX_FORMAT (code
);
201 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
204 if (rtx_varies_p (XEXP (x
, i
), for_alias
))
207 else if (fmt
[i
] == 'E')
210 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
211 if (rtx_varies_p (XVECEXP (x
, i
, j
), for_alias
))
218 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
221 rtx_addr_can_trap_p (x
)
224 enum rtx_code code
= GET_CODE (x
);
229 return SYMBOL_REF_WEAK (x
);
235 /* This will resolve to some offset from the frame pointer. */
239 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
240 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
241 || x
== stack_pointer_rtx
242 /* The arg pointer varies if it is not a fixed register. */
243 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
245 /* All of the virtual frame registers are stack references. */
246 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
247 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
252 return rtx_addr_can_trap_p (XEXP (x
, 0));
255 /* An address is assumed not to trap if it is an address that can't
256 trap plus a constant integer or it is the pic register plus a
258 return ! ((! rtx_addr_can_trap_p (XEXP (x
, 0))
259 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
260 || (XEXP (x
, 0) == pic_offset_table_rtx
261 && CONSTANT_P (XEXP (x
, 1))));
265 return rtx_addr_can_trap_p (XEXP (x
, 1));
272 return rtx_addr_can_trap_p (XEXP (x
, 0));
278 /* If it isn't one of the case above, it can cause a trap. */
282 /* Return true if X is an address that is known to not be zero. */
285 nonzero_address_p (x
)
288 enum rtx_code code
= GET_CODE (x
);
293 return !SYMBOL_REF_WEAK (x
);
299 /* This will resolve to some offset from the frame pointer. */
303 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
304 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
305 || x
== stack_pointer_rtx
306 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
308 /* All of the virtual frame registers are stack references. */
309 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
310 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
315 return nonzero_address_p (XEXP (x
, 0));
318 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
320 /* Pointers aren't allowed to wrap. If we've got a register
321 that is known to be a pointer, and a positive offset, then
322 the composite can't be zero. */
323 if (INTVAL (XEXP (x
, 1)) > 0
324 && REG_P (XEXP (x
, 0))
325 && REG_POINTER (XEXP (x
, 0)))
328 return nonzero_address_p (XEXP (x
, 0));
330 /* Handle PIC references. */
331 else if (XEXP (x
, 0) == pic_offset_table_rtx
332 && CONSTANT_P (XEXP (x
, 1)))
337 /* Similar to the above; allow positive offsets. Further, since
338 auto-inc is only allowed in memories, the register must be a
340 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
341 && INTVAL (XEXP (x
, 1)) > 0)
343 return nonzero_address_p (XEXP (x
, 0));
346 /* Similarly. Further, the offset is always positive. */
353 return nonzero_address_p (XEXP (x
, 0));
356 return nonzero_address_p (XEXP (x
, 1));
362 /* If it isn't one of the case above, might be zero. */
366 /* Return 1 if X refers to a memory location whose address
367 cannot be compared reliably with constant addresses,
368 or if X refers to a BLKmode memory object.
369 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
370 zero, we are slightly more conservative. */
373 rtx_addr_varies_p (x
, for_alias
)
386 return GET_MODE (x
) == BLKmode
|| rtx_varies_p (XEXP (x
, 0), for_alias
);
388 fmt
= GET_RTX_FORMAT (code
);
389 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
392 if (rtx_addr_varies_p (XEXP (x
, i
), for_alias
))
395 else if (fmt
[i
] == 'E')
398 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
399 if (rtx_addr_varies_p (XVECEXP (x
, i
, j
), for_alias
))
405 /* Return the value of the integer term in X, if one is apparent;
407 Only obvious integer terms are detected.
408 This is used in cse.c with the `related_value' field. */
414 if (GET_CODE (x
) == CONST
)
417 if (GET_CODE (x
) == MINUS
418 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
419 return - INTVAL (XEXP (x
, 1));
420 if (GET_CODE (x
) == PLUS
421 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
422 return INTVAL (XEXP (x
, 1));
426 /* If X is a constant, return the value sans apparent integer term;
428 Only obvious integer terms are detected. */
431 get_related_value (x
)
434 if (GET_CODE (x
) != CONST
)
437 if (GET_CODE (x
) == PLUS
438 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
440 else if (GET_CODE (x
) == MINUS
441 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
446 /* Given a tablejump insn INSN, return the RTL expression for the offset
447 into the jump table. If the offset cannot be determined, then return
450 If EARLIEST is nonzero, it is a pointer to a place where the earliest
451 insn used in locating the offset was found. */
454 get_jump_table_offset (insn
, earliest
)
468 if (GET_CODE (insn
) != JUMP_INSN
469 || ! (label
= JUMP_LABEL (insn
))
470 || ! (table
= NEXT_INSN (label
))
471 || GET_CODE (table
) != JUMP_INSN
472 || (GET_CODE (PATTERN (table
)) != ADDR_VEC
473 && GET_CODE (PATTERN (table
)) != ADDR_DIFF_VEC
)
474 || ! (set
= single_set (insn
)))
479 /* Some targets (eg, ARM) emit a tablejump that also
480 contains the out-of-range target. */
481 if (GET_CODE (x
) == IF_THEN_ELSE
482 && GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
485 /* Search backwards and locate the expression stored in X. */
486 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
487 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
490 /* If X is an expression using a relative address then strip
491 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
492 or the jump table label. */
493 if (GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
494 && (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
))
496 for (i
= 0; i
< 2; i
++)
501 if (y
== pc_rtx
|| y
== pic_offset_table_rtx
)
504 for (old_y
= NULL_RTX
; GET_CODE (y
) == REG
&& y
!= old_y
;
505 old_y
= y
, y
= find_last_value (y
, &old_insn
, NULL_RTX
, 0))
508 if ((GET_CODE (y
) == LABEL_REF
&& XEXP (y
, 0) == label
))
517 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
518 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
522 /* Strip off any sign or zero extension. */
523 if (GET_CODE (x
) == SIGN_EXTEND
|| GET_CODE (x
) == ZERO_EXTEND
)
527 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
528 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
532 /* If X isn't a MEM then this isn't a tablejump we understand. */
533 if (GET_CODE (x
) != MEM
)
536 /* Strip off the MEM. */
539 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
540 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
543 /* If X isn't a PLUS than this isn't a tablejump we understand. */
544 if (GET_CODE (x
) != PLUS
)
547 /* At this point we should have an expression representing the jump table
548 plus an offset. Examine each operand in order to determine which one
549 represents the jump table. Knowing that tells us that the other operand
550 must represent the offset. */
551 for (i
= 0; i
< 2; i
++)
556 for (old_y
= NULL_RTX
; GET_CODE (y
) == REG
&& y
!= old_y
;
557 old_y
= y
, y
= find_last_value (y
, &old_insn
, NULL_RTX
, 0))
560 if ((GET_CODE (y
) == CONST
|| GET_CODE (y
) == LABEL_REF
)
561 && reg_mentioned_p (label
, y
))
570 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
571 if (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
)
572 for (i
= 0; i
< 2; i
++)
573 if (XEXP (x
, i
) == pic_offset_table_rtx
)
582 /* Return the RTL expression representing the offset. */
586 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
587 a global register. */
590 global_reg_mentioned_p_1 (loc
, data
)
592 void *data ATTRIBUTE_UNUSED
;
600 switch (GET_CODE (x
))
603 if (GET_CODE (SUBREG_REG (x
)) == REG
)
605 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
606 && global_regs
[subreg_regno (x
)])
614 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
628 /* A non-constant call might use a global register. */
638 /* Returns nonzero if X mentions a global register. */
641 global_reg_mentioned_p (x
)
647 if (GET_CODE (x
) == CALL_INSN
)
649 if (! CONST_OR_PURE_CALL_P (x
))
651 x
= CALL_INSN_FUNCTION_USAGE (x
);
659 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
662 /* Return the number of places FIND appears within X. If COUNT_DEST is
663 zero, we do not count occurrences inside the destination of a SET. */
666 count_occurrences (x
, find
, count_dest
)
672 const char *format_ptr
;
693 if (GET_CODE (find
) == MEM
&& rtx_equal_p (x
, find
))
698 if (SET_DEST (x
) == find
&& ! count_dest
)
699 return count_occurrences (SET_SRC (x
), find
, count_dest
);
706 format_ptr
= GET_RTX_FORMAT (code
);
709 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
711 switch (*format_ptr
++)
714 count
+= count_occurrences (XEXP (x
, i
), find
, count_dest
);
718 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
719 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
, count_dest
);
726 /* Nonzero if register REG appears somewhere within IN.
727 Also works if REG is not a register; in this case it checks
728 for a subexpression of IN that is Lisp "equal" to REG. */
731 reg_mentioned_p (reg
, in
)
744 if (GET_CODE (in
) == LABEL_REF
)
745 return reg
== XEXP (in
, 0);
747 code
= GET_CODE (in
);
751 /* Compare registers by number. */
753 return GET_CODE (reg
) == REG
&& REGNO (in
) == REGNO (reg
);
755 /* These codes have no constituent expressions
763 return GET_CODE (reg
) == CONST_INT
&& INTVAL (in
) == INTVAL (reg
);
767 /* These are kept unique for a given value. */
774 if (GET_CODE (reg
) == code
&& rtx_equal_p (reg
, in
))
777 fmt
= GET_RTX_FORMAT (code
);
779 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
784 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
785 if (reg_mentioned_p (reg
, XVECEXP (in
, i
, j
)))
788 else if (fmt
[i
] == 'e'
789 && reg_mentioned_p (reg
, XEXP (in
, i
)))
795 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
796 no CODE_LABEL insn. */
799 no_labels_between_p (beg
, end
)
805 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
806 if (GET_CODE (p
) == CODE_LABEL
)
811 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
812 no JUMP_INSN insn. */
815 no_jumps_between_p (beg
, end
)
819 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
820 if (GET_CODE (p
) == JUMP_INSN
)
825 /* Nonzero if register REG is used in an insn between
826 FROM_INSN and TO_INSN (exclusive of those two). */
829 reg_used_between_p (reg
, from_insn
, to_insn
)
830 rtx reg
, from_insn
, to_insn
;
834 if (from_insn
== to_insn
)
837 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
839 && (reg_overlap_mentioned_p (reg
, PATTERN (insn
))
840 || (GET_CODE (insn
) == CALL_INSN
841 && (find_reg_fusage (insn
, USE
, reg
)
842 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
847 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
848 is entirely replaced by a new value and the only use is as a SET_DEST,
849 we do not consider it a reference. */
852 reg_referenced_p (x
, body
)
858 switch (GET_CODE (body
))
861 if (reg_overlap_mentioned_p (x
, SET_SRC (body
)))
864 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
865 of a REG that occupies all of the REG, the insn references X if
866 it is mentioned in the destination. */
867 if (GET_CODE (SET_DEST (body
)) != CC0
868 && GET_CODE (SET_DEST (body
)) != PC
869 && GET_CODE (SET_DEST (body
)) != REG
870 && ! (GET_CODE (SET_DEST (body
)) == SUBREG
871 && GET_CODE (SUBREG_REG (SET_DEST (body
))) == REG
872 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body
))))
873 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
874 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body
)))
875 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
876 && reg_overlap_mentioned_p (x
, SET_DEST (body
)))
881 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
882 if (reg_overlap_mentioned_p (x
, ASM_OPERANDS_INPUT (body
, i
)))
889 return reg_overlap_mentioned_p (x
, body
);
892 return reg_overlap_mentioned_p (x
, TRAP_CONDITION (body
));
895 return reg_overlap_mentioned_p (x
, XEXP (body
, 0));
898 case UNSPEC_VOLATILE
:
899 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
900 if (reg_overlap_mentioned_p (x
, XVECEXP (body
, 0, i
)))
905 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
906 if (reg_referenced_p (x
, XVECEXP (body
, 0, i
)))
911 if (GET_CODE (XEXP (body
, 0)) == MEM
)
912 if (reg_overlap_mentioned_p (x
, XEXP (XEXP (body
, 0), 0)))
917 if (reg_overlap_mentioned_p (x
, COND_EXEC_TEST (body
)))
919 return reg_referenced_p (x
, COND_EXEC_CODE (body
));
926 /* Nonzero if register REG is referenced in an insn between
927 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
931 reg_referenced_between_p (reg
, from_insn
, to_insn
)
932 rtx reg
, from_insn
, to_insn
;
936 if (from_insn
== to_insn
)
939 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
941 && (reg_referenced_p (reg
, PATTERN (insn
))
942 || (GET_CODE (insn
) == CALL_INSN
943 && find_reg_fusage (insn
, USE
, reg
))))
948 /* Nonzero if register REG is set or clobbered in an insn between
949 FROM_INSN and TO_INSN (exclusive of those two). */
952 reg_set_between_p (reg
, from_insn
, to_insn
)
953 rtx reg
, from_insn
, to_insn
;
957 if (from_insn
== to_insn
)
960 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
961 if (INSN_P (insn
) && reg_set_p (reg
, insn
))
966 /* Internals of reg_set_between_p. */
968 reg_set_p (reg
, insn
)
971 /* We can be passed an insn or part of one. If we are passed an insn,
972 check if a side-effect of the insn clobbers REG. */
974 && (FIND_REG_INC_NOTE (insn
, reg
)
975 || (GET_CODE (insn
) == CALL_INSN
976 /* We'd like to test call_used_regs here, but rtlanal.c can't
977 reference that variable due to its use in genattrtab. So
978 we'll just be more conservative.
980 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
981 information holds all clobbered registers. */
982 && ((GET_CODE (reg
) == REG
983 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
984 || GET_CODE (reg
) == MEM
985 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
988 return set_of (reg
, insn
) != NULL_RTX
;
991 /* Similar to reg_set_between_p, but check all registers in X. Return 0
992 only if none of them are modified between START and END. Do not
993 consider non-registers one way or the other. */
996 regs_set_between_p (x
, start
, end
)
1000 enum rtx_code code
= GET_CODE (x
);
1017 return reg_set_between_p (x
, start
, end
);
1023 fmt
= GET_RTX_FORMAT (code
);
1024 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1026 if (fmt
[i
] == 'e' && regs_set_between_p (XEXP (x
, i
), start
, end
))
1029 else if (fmt
[i
] == 'E')
1030 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1031 if (regs_set_between_p (XVECEXP (x
, i
, j
), start
, end
))
1038 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1039 only if none of them are modified between START and END. Return 1 if
1040 X contains a MEM; this routine does not perform any memory aliasing. */
1043 modified_between_p (x
, start
, end
)
1047 enum rtx_code code
= GET_CODE (x
);
1066 /* If the memory is not constant, assume it is modified. If it is
1067 constant, we still have to check the address. */
1068 if (! RTX_UNCHANGING_P (x
))
1073 return reg_set_between_p (x
, start
, end
);
1079 fmt
= GET_RTX_FORMAT (code
);
1080 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1082 if (fmt
[i
] == 'e' && modified_between_p (XEXP (x
, i
), start
, end
))
1085 else if (fmt
[i
] == 'E')
1086 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1087 if (modified_between_p (XVECEXP (x
, i
, j
), start
, end
))
1094 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1095 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1096 does not perform any memory aliasing. */
1099 modified_in_p (x
, insn
)
1103 enum rtx_code code
= GET_CODE (x
);
1122 /* If the memory is not constant, assume it is modified. If it is
1123 constant, we still have to check the address. */
1124 if (! RTX_UNCHANGING_P (x
))
1129 return reg_set_p (x
, insn
);
1135 fmt
= GET_RTX_FORMAT (code
);
1136 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1138 if (fmt
[i
] == 'e' && modified_in_p (XEXP (x
, i
), insn
))
1141 else if (fmt
[i
] == 'E')
1142 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1143 if (modified_in_p (XVECEXP (x
, i
, j
), insn
))
1150 /* Return true if anything in insn X is (anti,output,true) dependent on
1151 anything in insn Y. */
1154 insn_dependent_p (x
, y
)
1159 if (! INSN_P (x
) || ! INSN_P (y
))
1163 note_stores (PATTERN (x
), insn_dependent_p_1
, &tmp
);
1164 if (tmp
== NULL_RTX
)
1168 note_stores (PATTERN (y
), insn_dependent_p_1
, &tmp
);
1169 if (tmp
== NULL_RTX
)
1175 /* A helper routine for insn_dependent_p called through note_stores. */
1178 insn_dependent_p_1 (x
, pat
, data
)
1180 rtx pat ATTRIBUTE_UNUSED
;
1183 rtx
* pinsn
= (rtx
*) data
;
1185 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
1189 /* Helper function for set_of. */
1197 set_of_1 (x
, pat
, data1
)
1202 struct set_of_data
*data
= (struct set_of_data
*) (data1
);
1203 if (rtx_equal_p (x
, data
->pat
)
1204 || (GET_CODE (x
) != MEM
&& reg_overlap_mentioned_p (data
->pat
, x
)))
1208 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1209 (either directly or via STRICT_LOW_PART and similar modifiers). */
1214 struct set_of_data data
;
1215 data
.found
= NULL_RTX
;
1217 note_stores (INSN_P (insn
) ? PATTERN (insn
) : insn
, set_of_1
, &data
);
1221 /* Given an INSN, return a SET expression if this insn has only a single SET.
1222 It may also have CLOBBERs, USEs, or SET whose output
1223 will not be used, which we ignore. */
1226 single_set_2 (insn
, pat
)
1230 int set_verified
= 1;
1233 if (GET_CODE (pat
) == PARALLEL
)
1235 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1237 rtx sub
= XVECEXP (pat
, 0, i
);
1238 switch (GET_CODE (sub
))
1245 /* We can consider insns having multiple sets, where all
1246 but one are dead as single set insns. In common case
1247 only single set is present in the pattern so we want
1248 to avoid checking for REG_UNUSED notes unless necessary.
1250 When we reach set first time, we just expect this is
1251 the single set we are looking for and only when more
1252 sets are found in the insn, we check them. */
1255 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (set
))
1256 && !side_effects_p (set
))
1262 set
= sub
, set_verified
= 0;
1263 else if (!find_reg_note (insn
, REG_UNUSED
, SET_DEST (sub
))
1264 || side_effects_p (sub
))
1276 /* Given an INSN, return nonzero if it has more than one SET, else return
1280 multiple_sets (insn
)
1286 /* INSN must be an insn. */
1287 if (! INSN_P (insn
))
1290 /* Only a PARALLEL can have multiple SETs. */
1291 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
1293 for (i
= 0, found
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1294 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == SET
)
1296 /* If we have already found a SET, then return now. */
1304 /* Either zero or one SET. */
1308 /* Return nonzero if the destination of SET equals the source
1309 and there are no side effects. */
1315 rtx src
= SET_SRC (set
);
1316 rtx dst
= SET_DEST (set
);
1318 if (side_effects_p (src
) || side_effects_p (dst
))
1321 if (GET_CODE (dst
) == MEM
&& GET_CODE (src
) == MEM
)
1322 return rtx_equal_p (dst
, src
);
1324 if (dst
== pc_rtx
&& src
== pc_rtx
)
1327 if (GET_CODE (dst
) == SIGN_EXTRACT
1328 || GET_CODE (dst
) == ZERO_EXTRACT
)
1329 return rtx_equal_p (XEXP (dst
, 0), src
)
1330 && ! BYTES_BIG_ENDIAN
&& XEXP (dst
, 2) == const0_rtx
;
1332 if (GET_CODE (dst
) == STRICT_LOW_PART
)
1333 dst
= XEXP (dst
, 0);
1335 if (GET_CODE (src
) == SUBREG
&& GET_CODE (dst
) == SUBREG
)
1337 if (SUBREG_BYTE (src
) != SUBREG_BYTE (dst
))
1339 src
= SUBREG_REG (src
);
1340 dst
= SUBREG_REG (dst
);
1343 return (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
1344 && REGNO (src
) == REGNO (dst
));
1347 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1354 rtx pat
= PATTERN (insn
);
1356 if (INSN_CODE (insn
) == NOOP_MOVE_INSN_CODE
)
1359 /* Insns carrying these notes are useful later on. */
1360 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1363 /* For now treat an insn with a REG_RETVAL note as a
1364 a special insn which should not be considered a no-op. */
1365 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
1368 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
1371 if (GET_CODE (pat
) == PARALLEL
)
1374 /* If nothing but SETs of registers to themselves,
1375 this insn can also be deleted. */
1376 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1378 rtx tem
= XVECEXP (pat
, 0, i
);
1380 if (GET_CODE (tem
) == USE
1381 || GET_CODE (tem
) == CLOBBER
)
1384 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
1394 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1395 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1396 If the object was modified, if we hit a partial assignment to X, or hit a
1397 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1398 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1402 find_last_value (x
, pinsn
, valid_to
, allow_hwreg
)
1410 for (p
= PREV_INSN (*pinsn
); p
&& GET_CODE (p
) != CODE_LABEL
;
1414 rtx set
= single_set (p
);
1415 rtx note
= find_reg_note (p
, REG_EQUAL
, NULL_RTX
);
1417 if (set
&& rtx_equal_p (x
, SET_DEST (set
)))
1419 rtx src
= SET_SRC (set
);
1421 if (note
&& GET_CODE (XEXP (note
, 0)) != EXPR_LIST
)
1422 src
= XEXP (note
, 0);
1424 if ((valid_to
== NULL_RTX
1425 || ! modified_between_p (src
, PREV_INSN (p
), valid_to
))
1426 /* Reject hard registers because we don't usually want
1427 to use them; we'd rather use a pseudo. */
1428 && (! (GET_CODE (src
) == REG
1429 && REGNO (src
) < FIRST_PSEUDO_REGISTER
) || allow_hwreg
))
1436 /* If set in non-simple way, we don't have a value. */
1437 if (reg_set_p (x
, p
))
1444 /* Return nonzero if register in range [REGNO, ENDREGNO)
1445 appears either explicitly or implicitly in X
1446 other than being stored into.
1448 References contained within the substructure at LOC do not count.
1449 LOC may be zero, meaning don't ignore anything. */
1452 refers_to_regno_p (regno
, endregno
, x
, loc
)
1453 unsigned int regno
, endregno
;
1458 unsigned int x_regno
;
1463 /* The contents of a REG_NONNEG note is always zero, so we must come here
1464 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1468 code
= GET_CODE (x
);
1473 x_regno
= REGNO (x
);
1475 /* If we modifying the stack, frame, or argument pointer, it will
1476 clobber a virtual register. In fact, we could be more precise,
1477 but it isn't worth it. */
1478 if ((x_regno
== STACK_POINTER_REGNUM
1479 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1480 || x_regno
== ARG_POINTER_REGNUM
1482 || x_regno
== FRAME_POINTER_REGNUM
)
1483 && regno
>= FIRST_VIRTUAL_REGISTER
&& regno
<= LAST_VIRTUAL_REGISTER
)
1486 return (endregno
> x_regno
1487 && regno
< x_regno
+ (x_regno
< FIRST_PSEUDO_REGISTER
1488 ? HARD_REGNO_NREGS (x_regno
, GET_MODE (x
))
1492 /* If this is a SUBREG of a hard reg, we can see exactly which
1493 registers are being modified. Otherwise, handle normally. */
1494 if (GET_CODE (SUBREG_REG (x
)) == REG
1495 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
1497 unsigned int inner_regno
= subreg_regno (x
);
1498 unsigned int inner_endregno
1499 = inner_regno
+ (inner_regno
< FIRST_PSEUDO_REGISTER
1500 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
1502 return endregno
> inner_regno
&& regno
< inner_endregno
;
1508 if (&SET_DEST (x
) != loc
1509 /* Note setting a SUBREG counts as referring to the REG it is in for
1510 a pseudo but not for hard registers since we can
1511 treat each word individually. */
1512 && ((GET_CODE (SET_DEST (x
)) == SUBREG
1513 && loc
!= &SUBREG_REG (SET_DEST (x
))
1514 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
1515 && REGNO (SUBREG_REG (SET_DEST (x
))) >= FIRST_PSEUDO_REGISTER
1516 && refers_to_regno_p (regno
, endregno
,
1517 SUBREG_REG (SET_DEST (x
)), loc
))
1518 || (GET_CODE (SET_DEST (x
)) != REG
1519 && refers_to_regno_p (regno
, endregno
, SET_DEST (x
), loc
))))
1522 if (code
== CLOBBER
|| loc
== &SET_SRC (x
))
1531 /* X does not match, so try its subexpressions. */
1533 fmt
= GET_RTX_FORMAT (code
);
1534 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1536 if (fmt
[i
] == 'e' && loc
!= &XEXP (x
, i
))
1544 if (refers_to_regno_p (regno
, endregno
, XEXP (x
, i
), loc
))
1547 else if (fmt
[i
] == 'E')
1550 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
1551 if (loc
!= &XVECEXP (x
, i
, j
)
1552 && refers_to_regno_p (regno
, endregno
, XVECEXP (x
, i
, j
), loc
))
1559 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1560 we check if any register number in X conflicts with the relevant register
1561 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1562 contains a MEM (we don't bother checking for memory addresses that can't
1563 conflict because we expect this to be a rare case. */
1566 reg_overlap_mentioned_p (x
, in
)
1569 unsigned int regno
, endregno
;
1571 /* Overly conservative. */
1572 if (GET_CODE (x
) == STRICT_LOW_PART
)
1575 /* If either argument is a constant, then modifying X can not affect IN. */
1576 if (CONSTANT_P (x
) || CONSTANT_P (in
))
1579 switch (GET_CODE (x
))
1582 regno
= REGNO (SUBREG_REG (x
));
1583 if (regno
< FIRST_PSEUDO_REGISTER
)
1584 regno
= subreg_regno (x
);
1590 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
1591 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
1592 return refers_to_regno_p (regno
, endregno
, in
, (rtx
*) 0);
1599 if (GET_CODE (in
) == MEM
)
1602 fmt
= GET_RTX_FORMAT (GET_CODE (in
));
1603 for (i
= GET_RTX_LENGTH (GET_CODE (in
)) - 1; i
>= 0; i
--)
1604 if (fmt
[i
] == 'e' && reg_overlap_mentioned_p (x
, XEXP (in
, i
)))
1613 return reg_mentioned_p (x
, in
);
1619 /* If any register in here refers to it we return true. */
1620 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1621 if (XEXP (XVECEXP (x
, 0, i
), 0) != 0
1622 && reg_overlap_mentioned_p (XEXP (XVECEXP (x
, 0, i
), 0), in
))
1634 /* Return the last value to which REG was set prior to INSN. If we can't
1635 find it easily, return 0.
1637 We only return a REG, SUBREG, or constant because it is too hard to
1638 check if a MEM remains unchanged. */
1641 reg_set_last (x
, insn
)
1645 rtx orig_insn
= insn
;
1647 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1648 Stop when we reach a label or X is a hard reg and we reach a
1649 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1651 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1653 /* We compare with <= here, because reg_set_last_last_regno
1654 is actually the number of the first reg *not* in X. */
1656 insn
&& GET_CODE (insn
) != CODE_LABEL
1657 && ! (GET_CODE (insn
) == CALL_INSN
1658 && REGNO (x
) <= FIRST_PSEUDO_REGISTER
);
1659 insn
= PREV_INSN (insn
))
1662 rtx set
= set_of (x
, insn
);
1663 /* OK, this function modify our register. See if we understand it. */
1667 if (GET_CODE (set
) != SET
|| SET_DEST (set
) != x
)
1669 last_value
= SET_SRC (x
);
1670 if (CONSTANT_P (last_value
)
1671 || ((GET_CODE (last_value
) == REG
1672 || GET_CODE (last_value
) == SUBREG
)
1673 && ! reg_set_between_p (last_value
,
1684 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1685 (X would be the pattern of an insn).
1686 FUN receives two arguments:
1687 the REG, MEM, CC0 or PC being stored in or clobbered,
1688 the SET or CLOBBER rtx that does the store.
1690 If the item being stored in or clobbered is a SUBREG of a hard register,
1691 the SUBREG will be passed. */
1694 note_stores (x
, fun
, data
)
1696 void (*fun
) PARAMS ((rtx
, rtx
, void *));
1701 if (GET_CODE (x
) == COND_EXEC
)
1702 x
= COND_EXEC_CODE (x
);
1704 if (GET_CODE (x
) == SET
|| GET_CODE (x
) == CLOBBER
)
1706 rtx dest
= SET_DEST (x
);
1708 while ((GET_CODE (dest
) == SUBREG
1709 && (GET_CODE (SUBREG_REG (dest
)) != REG
1710 || REGNO (SUBREG_REG (dest
)) >= FIRST_PSEUDO_REGISTER
))
1711 || GET_CODE (dest
) == ZERO_EXTRACT
1712 || GET_CODE (dest
) == SIGN_EXTRACT
1713 || GET_CODE (dest
) == STRICT_LOW_PART
)
1714 dest
= XEXP (dest
, 0);
1716 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1717 each of whose first operand is a register. */
1718 if (GET_CODE (dest
) == PARALLEL
)
1720 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1721 if (XEXP (XVECEXP (dest
, 0, i
), 0) != 0)
1722 (*fun
) (XEXP (XVECEXP (dest
, 0, i
), 0), x
, data
);
1725 (*fun
) (dest
, x
, data
);
1728 else if (GET_CODE (x
) == PARALLEL
)
1729 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1730 note_stores (XVECEXP (x
, 0, i
), fun
, data
);
1733 /* Like notes_stores, but call FUN for each expression that is being
1734 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1735 FUN for each expression, not any interior subexpressions. FUN receives a
1736 pointer to the expression and the DATA passed to this function.
1738 Note that this is not quite the same test as that done in reg_referenced_p
1739 since that considers something as being referenced if it is being
1740 partially set, while we do not. */
1743 note_uses (pbody
, fun
, data
)
1745 void (*fun
) PARAMS ((rtx
*, void *));
1751 switch (GET_CODE (body
))
1754 (*fun
) (&COND_EXEC_TEST (body
), data
);
1755 note_uses (&COND_EXEC_CODE (body
), fun
, data
);
1759 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1760 note_uses (&XVECEXP (body
, 0, i
), fun
, data
);
1764 (*fun
) (&XEXP (body
, 0), data
);
1768 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
1769 (*fun
) (&ASM_OPERANDS_INPUT (body
, i
), data
);
1773 (*fun
) (&TRAP_CONDITION (body
), data
);
1777 (*fun
) (&XEXP (body
, 0), data
);
1781 case UNSPEC_VOLATILE
:
1782 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1783 (*fun
) (&XVECEXP (body
, 0, i
), data
);
1787 if (GET_CODE (XEXP (body
, 0)) == MEM
)
1788 (*fun
) (&XEXP (XEXP (body
, 0), 0), data
);
1793 rtx dest
= SET_DEST (body
);
1795 /* For sets we replace everything in source plus registers in memory
1796 expression in store and operands of a ZERO_EXTRACT. */
1797 (*fun
) (&SET_SRC (body
), data
);
1799 if (GET_CODE (dest
) == ZERO_EXTRACT
)
1801 (*fun
) (&XEXP (dest
, 1), data
);
1802 (*fun
) (&XEXP (dest
, 2), data
);
1805 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
)
1806 dest
= XEXP (dest
, 0);
1808 if (GET_CODE (dest
) == MEM
)
1809 (*fun
) (&XEXP (dest
, 0), data
);
1814 /* All the other possibilities never store. */
1815 (*fun
) (pbody
, data
);
1820 /* Return nonzero if X's old contents don't survive after INSN.
1821 This will be true if X is (cc0) or if X is a register and
1822 X dies in INSN or because INSN entirely sets X.
1824 "Entirely set" means set directly and not through a SUBREG,
1825 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1826 Likewise, REG_INC does not count.
1828 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1829 but for this use that makes no difference, since regs don't overlap
1830 during their lifetimes. Therefore, this function may be used
1831 at any time after deaths have been computed (in flow.c).
1833 If REG is a hard reg that occupies multiple machine registers, this
1834 function will only return 1 if each of those registers will be replaced
1838 dead_or_set_p (insn
, x
)
1842 unsigned int regno
, last_regno
;
1845 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1846 if (GET_CODE (x
) == CC0
)
1849 if (GET_CODE (x
) != REG
)
1853 last_regno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
1854 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
)) - 1);
1856 for (i
= regno
; i
<= last_regno
; i
++)
1857 if (! dead_or_set_regno_p (insn
, i
))
1863 /* Utility function for dead_or_set_p to check an individual register. Also
1864 called from flow.c. */
1867 dead_or_set_regno_p (insn
, test_regno
)
1869 unsigned int test_regno
;
1871 unsigned int regno
, endregno
;
1874 /* See if there is a death note for something that includes TEST_REGNO. */
1875 if (find_regno_note (insn
, REG_DEAD
, test_regno
))
1878 if (GET_CODE (insn
) == CALL_INSN
1879 && find_regno_fusage (insn
, CLOBBER
, test_regno
))
1882 pattern
= PATTERN (insn
);
1884 if (GET_CODE (pattern
) == COND_EXEC
)
1885 pattern
= COND_EXEC_CODE (pattern
);
1887 if (GET_CODE (pattern
) == SET
)
1889 rtx dest
= SET_DEST (pattern
);
1891 /* A value is totally replaced if it is the destination or the
1892 destination is a SUBREG of REGNO that does not change the number of
1894 if (GET_CODE (dest
) == SUBREG
1895 && (((GET_MODE_SIZE (GET_MODE (dest
))
1896 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1897 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1898 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1899 dest
= SUBREG_REG (dest
);
1901 if (GET_CODE (dest
) != REG
)
1904 regno
= REGNO (dest
);
1905 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1906 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (dest
)));
1908 return (test_regno
>= regno
&& test_regno
< endregno
);
1910 else if (GET_CODE (pattern
) == PARALLEL
)
1914 for (i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
1916 rtx body
= XVECEXP (pattern
, 0, i
);
1918 if (GET_CODE (body
) == COND_EXEC
)
1919 body
= COND_EXEC_CODE (body
);
1921 if (GET_CODE (body
) == SET
|| GET_CODE (body
) == CLOBBER
)
1923 rtx dest
= SET_DEST (body
);
1925 if (GET_CODE (dest
) == SUBREG
1926 && (((GET_MODE_SIZE (GET_MODE (dest
))
1927 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1928 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1929 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1930 dest
= SUBREG_REG (dest
);
1932 if (GET_CODE (dest
) != REG
)
1935 regno
= REGNO (dest
);
1936 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1937 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (dest
)));
1939 if (test_regno
>= regno
&& test_regno
< endregno
)
1948 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1949 If DATUM is nonzero, look for one whose datum is DATUM. */
1952 find_reg_note (insn
, kind
, datum
)
1959 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1960 if (! INSN_P (insn
))
1963 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1964 if (REG_NOTE_KIND (link
) == kind
1965 && (datum
== 0 || datum
== XEXP (link
, 0)))
1970 /* Return the reg-note of kind KIND in insn INSN which applies to register
1971 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1972 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1973 it might be the case that the note overlaps REGNO. */
1976 find_regno_note (insn
, kind
, regno
)
1983 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1984 if (! INSN_P (insn
))
1987 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1988 if (REG_NOTE_KIND (link
) == kind
1989 /* Verify that it is a register, so that scratch and MEM won't cause a
1991 && GET_CODE (XEXP (link
, 0)) == REG
1992 && REGNO (XEXP (link
, 0)) <= regno
1993 && ((REGNO (XEXP (link
, 0))
1994 + (REGNO (XEXP (link
, 0)) >= FIRST_PSEUDO_REGISTER
? 1
1995 : HARD_REGNO_NREGS (REGNO (XEXP (link
, 0)),
1996 GET_MODE (XEXP (link
, 0)))))
2002 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
2006 find_reg_equal_equiv_note (insn
)
2011 if (single_set (insn
) == 0)
2013 else if ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)) != 0)
2016 return find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
2019 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
2020 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2023 find_reg_fusage (insn
, code
, datum
)
2028 /* If it's not a CALL_INSN, it can't possibly have a
2029 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
2030 if (GET_CODE (insn
) != CALL_INSN
)
2036 if (GET_CODE (datum
) != REG
)
2040 for (link
= CALL_INSN_FUNCTION_USAGE (insn
);
2042 link
= XEXP (link
, 1))
2043 if (GET_CODE (XEXP (link
, 0)) == code
2044 && rtx_equal_p (datum
, XEXP (XEXP (link
, 0), 0)))
2049 unsigned int regno
= REGNO (datum
);
2051 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2052 to pseudo registers, so don't bother checking. */
2054 if (regno
< FIRST_PSEUDO_REGISTER
)
2056 unsigned int end_regno
2057 = regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (datum
));
2060 for (i
= regno
; i
< end_regno
; i
++)
2061 if (find_regno_fusage (insn
, code
, i
))
2069 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2070 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2073 find_regno_fusage (insn
, code
, regno
)
2080 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2081 to pseudo registers, so don't bother checking. */
2083 if (regno
>= FIRST_PSEUDO_REGISTER
2084 || GET_CODE (insn
) != CALL_INSN
)
2087 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
2089 unsigned int regnote
;
2092 if (GET_CODE (op
= XEXP (link
, 0)) == code
2093 && GET_CODE (reg
= XEXP (op
, 0)) == REG
2094 && (regnote
= REGNO (reg
)) <= regno
2095 && regnote
+ HARD_REGNO_NREGS (regnote
, GET_MODE (reg
)) > regno
)
2102 /* Return true if INSN is a call to a pure function. */
2110 if (GET_CODE (insn
) != CALL_INSN
|| ! CONST_OR_PURE_CALL_P (insn
))
2113 /* Look for the note that differentiates const and pure functions. */
2114 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
2118 if (GET_CODE (u
= XEXP (link
, 0)) == USE
2119 && GET_CODE (m
= XEXP (u
, 0)) == MEM
&& GET_MODE (m
) == BLKmode
2120 && GET_CODE (XEXP (m
, 0)) == SCRATCH
)
2127 /* Remove register note NOTE from the REG_NOTES of INSN. */
2130 remove_note (insn
, note
)
2136 if (note
== NULL_RTX
)
2139 if (REG_NOTES (insn
) == note
)
2141 REG_NOTES (insn
) = XEXP (note
, 1);
2145 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
2146 if (XEXP (link
, 1) == note
)
2148 XEXP (link
, 1) = XEXP (note
, 1);
2155 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2156 return 1 if it is found. A simple equality test is used to determine if
2160 in_expr_list_p (listp
, node
)
2166 for (x
= listp
; x
; x
= XEXP (x
, 1))
2167 if (node
== XEXP (x
, 0))
2173 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2174 remove that entry from the list if it is found.
2176 A simple equality test is used to determine if NODE matches. */
2179 remove_node_from_expr_list (node
, listp
)
2184 rtx prev
= NULL_RTX
;
2188 if (node
== XEXP (temp
, 0))
2190 /* Splice the node out of the list. */
2192 XEXP (prev
, 1) = XEXP (temp
, 1);
2194 *listp
= XEXP (temp
, 1);
2200 temp
= XEXP (temp
, 1);
2204 /* Nonzero if X contains any volatile instructions. These are instructions
2205 which may cause unpredictable machine state instructions, and thus no
2206 instructions should be moved or combined across them. This includes
2207 only volatile asms and UNSPEC_VOLATILE instructions. */
2215 code
= GET_CODE (x
);
2236 case UNSPEC_VOLATILE
:
2237 /* case TRAP_IF: This isn't clear yet. */
2241 if (MEM_VOLATILE_P (x
))
2248 /* Recursively scan the operands of this expression. */
2251 const char *fmt
= GET_RTX_FORMAT (code
);
2254 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2258 if (volatile_insn_p (XEXP (x
, i
)))
2261 else if (fmt
[i
] == 'E')
2264 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2265 if (volatile_insn_p (XVECEXP (x
, i
, j
)))
2273 /* Nonzero if X contains any volatile memory references
2274 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2282 code
= GET_CODE (x
);
2301 case UNSPEC_VOLATILE
:
2306 if (MEM_VOLATILE_P (x
))
2313 /* Recursively scan the operands of this expression. */
2316 const char *fmt
= GET_RTX_FORMAT (code
);
2319 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2323 if (volatile_refs_p (XEXP (x
, i
)))
2326 else if (fmt
[i
] == 'E')
2329 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2330 if (volatile_refs_p (XVECEXP (x
, i
, j
)))
2338 /* Similar to above, except that it also rejects register pre- and post-
2347 code
= GET_CODE (x
);
2366 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2367 when some combination can't be done. If we see one, don't think
2368 that we can simplify the expression. */
2369 return (GET_MODE (x
) != VOIDmode
);
2378 case UNSPEC_VOLATILE
:
2379 /* case TRAP_IF: This isn't clear yet. */
2384 if (MEM_VOLATILE_P (x
))
2391 /* Recursively scan the operands of this expression. */
2394 const char *fmt
= GET_RTX_FORMAT (code
);
2397 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2401 if (side_effects_p (XEXP (x
, i
)))
2404 else if (fmt
[i
] == 'E')
2407 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2408 if (side_effects_p (XVECEXP (x
, i
, j
)))
2416 /* Return nonzero if evaluating rtx X might cause a trap. */
2428 code
= GET_CODE (x
);
2431 /* Handle these cases quickly. */
2445 case UNSPEC_VOLATILE
:
2450 return MEM_VOLATILE_P (x
);
2452 /* Memory ref can trap unless it's a static var or a stack slot. */
2454 return rtx_addr_can_trap_p (XEXP (x
, 0));
2456 /* Division by a non-constant might trap. */
2461 if (HONOR_SNANS (GET_MODE (x
)))
2463 if (! CONSTANT_P (XEXP (x
, 1))
2464 || (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2465 && flag_trapping_math
))
2467 /* This was const0_rtx, but by not using that,
2468 we can link this file into other programs. */
2469 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& INTVAL (XEXP (x
, 1)) == 0)
2474 /* An EXPR_LIST is used to represent a function call. This
2475 certainly may trap. */
2483 /* Some floating point comparisons may trap. */
2484 if (!flag_trapping_math
)
2486 /* ??? There is no machine independent way to check for tests that trap
2487 when COMPARE is used, though many targets do make this distinction.
2488 For instance, sparc uses CCFPE for compares which generate exceptions
2489 and CCFP for compares which do not generate exceptions. */
2490 if (HONOR_NANS (GET_MODE (x
)))
2492 /* But often the compare has some CC mode, so check operand
2494 if (HONOR_NANS (GET_MODE (XEXP (x
, 0)))
2495 || HONOR_NANS (GET_MODE (XEXP (x
, 1))))
2501 if (HONOR_SNANS (GET_MODE (x
)))
2503 /* Often comparison is CC mode, so check operand modes. */
2504 if (HONOR_SNANS (GET_MODE (XEXP (x
, 0)))
2505 || HONOR_SNANS (GET_MODE (XEXP (x
, 1))))
2511 /* These operations don't trap even with floating point. */
2515 /* Any floating arithmetic may trap. */
2516 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2517 && flag_trapping_math
)
2521 fmt
= GET_RTX_FORMAT (code
);
2522 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2526 if (may_trap_p (XEXP (x
, i
)))
2529 else if (fmt
[i
] == 'E')
2532 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2533 if (may_trap_p (XVECEXP (x
, i
, j
)))
2540 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2541 i.e., an inequality. */
2544 inequality_comparisons_p (x
)
2549 enum rtx_code code
= GET_CODE (x
);
2579 len
= GET_RTX_LENGTH (code
);
2580 fmt
= GET_RTX_FORMAT (code
);
2582 for (i
= 0; i
< len
; i
++)
2586 if (inequality_comparisons_p (XEXP (x
, i
)))
2589 else if (fmt
[i
] == 'E')
2592 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2593 if (inequality_comparisons_p (XVECEXP (x
, i
, j
)))
2601 /* Replace any occurrence of FROM in X with TO. The function does
2602 not enter into CONST_DOUBLE for the replace.
2604 Note that copying is not done so X must not be shared unless all copies
2605 are to be modified. */
2608 replace_rtx (x
, from
, to
)
2614 /* The following prevents loops occurrence when we change MEM in
2615 CONST_DOUBLE onto the same CONST_DOUBLE. */
2616 if (x
!= 0 && GET_CODE (x
) == CONST_DOUBLE
)
2622 /* Allow this function to make replacements in EXPR_LISTs. */
2626 if (GET_CODE (x
) == SUBREG
)
2628 rtx
new = replace_rtx (SUBREG_REG (x
), from
, to
);
2630 if (GET_CODE (new) == CONST_INT
)
2632 x
= simplify_subreg (GET_MODE (x
), new,
2633 GET_MODE (SUBREG_REG (x
)),
2639 SUBREG_REG (x
) = new;
2643 else if (GET_CODE (x
) == ZERO_EXTEND
)
2645 rtx
new = replace_rtx (XEXP (x
, 0), from
, to
);
2647 if (GET_CODE (new) == CONST_INT
)
2649 x
= simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
2650 new, GET_MODE (XEXP (x
, 0)));
2660 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2661 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2664 XEXP (x
, i
) = replace_rtx (XEXP (x
, i
), from
, to
);
2665 else if (fmt
[i
] == 'E')
2666 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2667 XVECEXP (x
, i
, j
) = replace_rtx (XVECEXP (x
, i
, j
), from
, to
);
2673 /* Throughout the rtx X, replace many registers according to REG_MAP.
2674 Return the replacement for X (which may be X with altered contents).
2675 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2676 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2678 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2679 should not be mapped to pseudos or vice versa since validate_change
2682 If REPLACE_DEST is 1, replacements are also done in destinations;
2683 otherwise, only sources are replaced. */
2686 replace_regs (x
, reg_map
, nregs
, replace_dest
)
2699 code
= GET_CODE (x
);
2714 /* Verify that the register has an entry before trying to access it. */
2715 if (REGNO (x
) < nregs
&& reg_map
[REGNO (x
)] != 0)
2717 /* SUBREGs can't be shared. Always return a copy to ensure that if
2718 this replacement occurs more than once then each instance will
2719 get distinct rtx. */
2720 if (GET_CODE (reg_map
[REGNO (x
)]) == SUBREG
)
2721 return copy_rtx (reg_map
[REGNO (x
)]);
2722 return reg_map
[REGNO (x
)];
2727 /* Prevent making nested SUBREGs. */
2728 if (GET_CODE (SUBREG_REG (x
)) == REG
&& REGNO (SUBREG_REG (x
)) < nregs
2729 && reg_map
[REGNO (SUBREG_REG (x
))] != 0
2730 && GET_CODE (reg_map
[REGNO (SUBREG_REG (x
))]) == SUBREG
)
2732 rtx map_val
= reg_map
[REGNO (SUBREG_REG (x
))];
2733 return simplify_gen_subreg (GET_MODE (x
), map_val
,
2734 GET_MODE (SUBREG_REG (x
)),
2741 SET_DEST (x
) = replace_regs (SET_DEST (x
), reg_map
, nregs
, 0);
2743 else if (GET_CODE (SET_DEST (x
)) == MEM
2744 || GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2745 /* Even if we are not to replace destinations, replace register if it
2746 is CONTAINED in destination (destination is memory or
2747 STRICT_LOW_PART). */
2748 XEXP (SET_DEST (x
), 0) = replace_regs (XEXP (SET_DEST (x
), 0),
2750 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2751 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2754 SET_SRC (x
) = replace_regs (SET_SRC (x
), reg_map
, nregs
, 0);
2761 fmt
= GET_RTX_FORMAT (code
);
2762 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2765 XEXP (x
, i
) = replace_regs (XEXP (x
, i
), reg_map
, nregs
, replace_dest
);
2766 else if (fmt
[i
] == 'E')
2769 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2770 XVECEXP (x
, i
, j
) = replace_regs (XVECEXP (x
, i
, j
), reg_map
,
2771 nregs
, replace_dest
);
2777 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2778 constant that is not in the constant pool and not in the condition
2779 of an IF_THEN_ELSE. */
2782 computed_jump_p_1 (x
)
2785 enum rtx_code code
= GET_CODE (x
);
2804 return ! (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
2805 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)));
2808 return (computed_jump_p_1 (XEXP (x
, 1))
2809 || computed_jump_p_1 (XEXP (x
, 2)));
2815 fmt
= GET_RTX_FORMAT (code
);
2816 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2819 && computed_jump_p_1 (XEXP (x
, i
)))
2822 else if (fmt
[i
] == 'E')
2823 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2824 if (computed_jump_p_1 (XVECEXP (x
, i
, j
)))
2831 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2833 Tablejumps and casesi insns are not considered indirect jumps;
2834 we can recognize them by a (use (label_ref)). */
2837 computed_jump_p (insn
)
2841 if (GET_CODE (insn
) == JUMP_INSN
)
2843 rtx pat
= PATTERN (insn
);
2845 if (find_reg_note (insn
, REG_LABEL
, NULL_RTX
))
2847 else if (GET_CODE (pat
) == PARALLEL
)
2849 int len
= XVECLEN (pat
, 0);
2850 int has_use_labelref
= 0;
2852 for (i
= len
- 1; i
>= 0; i
--)
2853 if (GET_CODE (XVECEXP (pat
, 0, i
)) == USE
2854 && (GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0))
2856 has_use_labelref
= 1;
2858 if (! has_use_labelref
)
2859 for (i
= len
- 1; i
>= 0; i
--)
2860 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
2861 && SET_DEST (XVECEXP (pat
, 0, i
)) == pc_rtx
2862 && computed_jump_p_1 (SET_SRC (XVECEXP (pat
, 0, i
))))
2865 else if (GET_CODE (pat
) == SET
2866 && SET_DEST (pat
) == pc_rtx
2867 && computed_jump_p_1 (SET_SRC (pat
)))
2873 /* Traverse X via depth-first search, calling F for each
2874 sub-expression (including X itself). F is also passed the DATA.
2875 If F returns -1, do not traverse sub-expressions, but continue
2876 traversing the rest of the tree. If F ever returns any other
2877 nonzero value, stop the traversal, and return the value returned
2878 by F. Otherwise, return 0. This function does not traverse inside
2879 tree structure that contains RTX_EXPRs, or into sub-expressions
2880 whose format code is `0' since it is not known whether or not those
2881 codes are actually RTL.
2883 This routine is very general, and could (should?) be used to
2884 implement many of the other routines in this file. */
2887 for_each_rtx (x
, f
, data
)
2898 result
= (*f
) (x
, data
);
2900 /* Do not traverse sub-expressions. */
2902 else if (result
!= 0)
2903 /* Stop the traversal. */
2907 /* There are no sub-expressions. */
2910 length
= GET_RTX_LENGTH (GET_CODE (*x
));
2911 format
= GET_RTX_FORMAT (GET_CODE (*x
));
2913 for (i
= 0; i
< length
; ++i
)
2918 result
= for_each_rtx (&XEXP (*x
, i
), f
, data
);
2925 if (XVEC (*x
, i
) != 0)
2928 for (j
= 0; j
< XVECLEN (*x
, i
); ++j
)
2930 result
= for_each_rtx (&XVECEXP (*x
, i
, j
), f
, data
);
2938 /* Nothing to do. */
2947 /* Searches X for any reference to REGNO, returning the rtx of the
2948 reference found if any. Otherwise, returns NULL_RTX. */
2951 regno_use_in (regno
, x
)
2959 if (GET_CODE (x
) == REG
&& REGNO (x
) == regno
)
2962 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2963 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2967 if ((tem
= regno_use_in (regno
, XEXP (x
, i
))))
2970 else if (fmt
[i
] == 'E')
2971 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2972 if ((tem
= regno_use_in (regno
, XVECEXP (x
, i
, j
))))
2979 /* Return a value indicating whether OP, an operand of a commutative
2980 operation, is preferred as the first or second operand. The higher
2981 the value, the stronger the preference for being the first operand.
2982 We use negative values to indicate a preference for the first operand
2983 and positive values for the second operand. */
2986 commutative_operand_precedence (op
)
2989 /* Constants always come the second operand. Prefer "nice" constants. */
2990 if (GET_CODE (op
) == CONST_INT
)
2992 if (GET_CODE (op
) == CONST_DOUBLE
)
2994 if (CONSTANT_P (op
))
2997 /* SUBREGs of objects should come second. */
2998 if (GET_CODE (op
) == SUBREG
2999 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op
))) == 'o')
3002 /* If only one operand is a `neg', `not',
3003 `mult', `plus', or `minus' expression, it will be the first
3005 if (GET_CODE (op
) == NEG
|| GET_CODE (op
) == NOT
3006 || GET_CODE (op
) == MULT
|| GET_CODE (op
) == PLUS
3007 || GET_CODE (op
) == MINUS
)
3010 /* Complex expressions should be the first, so decrease priority
3012 if (GET_RTX_CLASS (GET_CODE (op
)) == 'o')
3017 /* Return 1 iff it is necessary to swap operands of commutative operation
3018 in order to canonicalize expression. */
3021 swap_commutative_operands_p (x
, y
)
3024 return (commutative_operand_precedence (x
)
3025 < commutative_operand_precedence (y
));
3028 /* Return 1 if X is an autoincrement side effect and the register is
3029 not the stack pointer. */
3034 switch (GET_CODE (x
))
3042 /* There are no REG_INC notes for SP. */
3043 if (XEXP (x
, 0) != stack_pointer_rtx
)
3051 /* Return 1 if the sequence of instructions beginning with FROM and up
3052 to and including TO is safe to move. If NEW_TO is non-NULL, and
3053 the sequence is not already safe to move, but can be easily
3054 extended to a sequence which is safe, then NEW_TO will point to the
3055 end of the extended sequence.
3057 For now, this function only checks that the region contains whole
3058 exception regions, but it could be extended to check additional
3059 conditions as well. */
3062 insns_safe_to_move_p (from
, to
, new_to
)
3067 int eh_region_count
= 0;
3071 /* By default, assume the end of the region will be what was
3078 if (GET_CODE (r
) == NOTE
)
3080 switch (NOTE_LINE_NUMBER (r
))
3082 case NOTE_INSN_EH_REGION_BEG
:
3086 case NOTE_INSN_EH_REGION_END
:
3087 if (eh_region_count
== 0)
3088 /* This sequence of instructions contains the end of
3089 an exception region, but not he beginning. Moving
3090 it will cause chaos. */
3101 /* If we've passed TO, and we see a non-note instruction, we
3102 can't extend the sequence to a movable sequence. */
3108 /* It's OK to move the sequence if there were matched sets of
3109 exception region notes. */
3110 return eh_region_count
== 0;
3115 /* It's OK to move the sequence if there were matched sets of
3116 exception region notes. */
3117 if (past_to_p
&& eh_region_count
== 0)
3123 /* Go to the next instruction. */
3130 /* Return nonzero if IN contains a piece of rtl that has the address LOC */
3132 loc_mentioned_in_p (loc
, in
)
3135 enum rtx_code code
= GET_CODE (in
);
3136 const char *fmt
= GET_RTX_FORMAT (code
);
3139 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3141 if (loc
== &in
->fld
[i
].rtx
)
3145 if (loc_mentioned_in_p (loc
, XEXP (in
, i
)))
3148 else if (fmt
[i
] == 'E')
3149 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
3150 if (loc_mentioned_in_p (loc
, XVECEXP (in
, i
, j
)))
3156 /* Given a subreg X, return the bit offset where the subreg begins
3157 (counting from the least significant bit of the reg). */
3163 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (x
));
3164 enum machine_mode mode
= GET_MODE (x
);
3165 unsigned int bitpos
;
3169 /* A paradoxical subreg begins at bit position 0. */
3170 if (GET_MODE_BITSIZE (mode
) > GET_MODE_BITSIZE (inner_mode
))
3173 if (WORDS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
3174 /* If the subreg crosses a word boundary ensure that
3175 it also begins and ends on a word boundary. */
3176 if ((SUBREG_BYTE (x
) % UNITS_PER_WORD
3177 + GET_MODE_SIZE (mode
)) > UNITS_PER_WORD
3178 && (SUBREG_BYTE (x
) % UNITS_PER_WORD
3179 || GET_MODE_SIZE (mode
) % UNITS_PER_WORD
))
3182 if (WORDS_BIG_ENDIAN
)
3183 word
= (GET_MODE_SIZE (inner_mode
)
3184 - (SUBREG_BYTE (x
) + GET_MODE_SIZE (mode
))) / UNITS_PER_WORD
;
3186 word
= SUBREG_BYTE (x
) / UNITS_PER_WORD
;
3187 bitpos
= word
* BITS_PER_WORD
;
3189 if (BYTES_BIG_ENDIAN
)
3190 byte
= (GET_MODE_SIZE (inner_mode
)
3191 - (SUBREG_BYTE (x
) + GET_MODE_SIZE (mode
))) % UNITS_PER_WORD
;
3193 byte
= SUBREG_BYTE (x
) % UNITS_PER_WORD
;
3194 bitpos
+= byte
* BITS_PER_UNIT
;
3199 /* This function returns the regno offset of a subreg expression.
3200 xregno - A regno of an inner hard subreg_reg (or what will become one).
3201 xmode - The mode of xregno.
3202 offset - The byte offset.
3203 ymode - The mode of a top level SUBREG (or what may become one).
3204 RETURN - The regno offset which would be used. */
3206 subreg_regno_offset (xregno
, xmode
, offset
, ymode
)
3207 unsigned int xregno
;
3208 enum machine_mode xmode
;
3209 unsigned int offset
;
3210 enum machine_mode ymode
;
3212 int nregs_xmode
, nregs_ymode
;
3213 int mode_multiple
, nregs_multiple
;
3216 if (xregno
>= FIRST_PSEUDO_REGISTER
)
3219 nregs_xmode
= HARD_REGNO_NREGS (xregno
, xmode
);
3220 nregs_ymode
= HARD_REGNO_NREGS (xregno
, ymode
);
3222 /* If this is a big endian paradoxical subreg, which uses more actual
3223 hard registers than the original register, we must return a negative
3224 offset so that we find the proper highpart of the register. */
3226 && nregs_ymode
> nregs_xmode
3227 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3228 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3229 return nregs_xmode
- nregs_ymode
;
3231 if (offset
== 0 || nregs_xmode
== nregs_ymode
)
3234 /* size of ymode must not be greater than the size of xmode. */
3235 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3236 if (mode_multiple
== 0)
3239 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3240 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3241 return (y_offset
/ (mode_multiple
/ nregs_multiple
)) * nregs_ymode
;
3244 /* Return the final regno that a subreg expression refers to. */
3250 rtx subreg
= SUBREG_REG (x
);
3251 int regno
= REGNO (subreg
);
3253 ret
= regno
+ subreg_regno_offset (regno
,
3260 struct parms_set_data
3266 /* Helper function for noticing stores to parameter registers. */
3268 parms_set (x
, pat
, data
)
3269 rtx x
, pat ATTRIBUTE_UNUSED
;
3272 struct parms_set_data
*d
= data
;
3273 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3274 && TEST_HARD_REG_BIT (d
->regs
, REGNO (x
)))
3276 CLEAR_HARD_REG_BIT (d
->regs
, REGNO (x
));
3281 /* Look backward for first parameter to be loaded.
3282 Do not skip BOUNDARY. */
3284 find_first_parameter_load (call_insn
, boundary
)
3285 rtx call_insn
, boundary
;
3287 struct parms_set_data parm
;
3290 /* Since different machines initialize their parameter registers
3291 in different orders, assume nothing. Collect the set of all
3292 parameter registers. */
3293 CLEAR_HARD_REG_SET (parm
.regs
);
3295 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
3296 if (GET_CODE (XEXP (p
, 0)) == USE
3297 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == REG
)
3299 if (REGNO (XEXP (XEXP (p
, 0), 0)) >= FIRST_PSEUDO_REGISTER
)
3302 /* We only care about registers which can hold function
3304 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p
, 0), 0))))
3307 SET_HARD_REG_BIT (parm
.regs
, REGNO (XEXP (XEXP (p
, 0), 0)));
3312 /* Search backward for the first set of a register in this set. */
3313 while (parm
.nregs
&& before
!= boundary
)
3315 before
= PREV_INSN (before
);
3317 /* It is possible that some loads got CSEed from one call to
3318 another. Stop in that case. */
3319 if (GET_CODE (before
) == CALL_INSN
)
3322 /* Our caller needs either ensure that we will find all sets
3323 (in case code has not been optimized yet), or take care
3324 for possible labels in a way by setting boundary to preceding
3326 if (GET_CODE (before
) == CODE_LABEL
)
3328 if (before
!= boundary
)
3333 if (INSN_P (before
))
3334 note_stores (PATTERN (before
), parms_set
, &parm
);
3339 /* Return true if we should avoid inserting code between INSN and preceeding
3340 call instruction. */
3343 keep_with_call_p (insn
)
3348 if (INSN_P (insn
) && (set
= single_set (insn
)) != NULL
)
3350 if (GET_CODE (SET_DEST (set
)) == REG
3351 && REGNO (SET_DEST (set
)) < FIRST_PSEUDO_REGISTER
3352 && fixed_regs
[REGNO (SET_DEST (set
))]
3353 && general_operand (SET_SRC (set
), VOIDmode
))
3355 if (GET_CODE (SET_SRC (set
)) == REG
3356 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set
)))
3357 && GET_CODE (SET_DEST (set
)) == REG
3358 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
)
3360 /* There may be a stack pop just after the call and before the store
3361 of the return register. Search for the actual store when deciding
3362 if we can break or not. */
3363 if (SET_DEST (set
) == stack_pointer_rtx
)
3365 rtx i2
= next_nonnote_insn (insn
);
3366 if (i2
&& keep_with_call_p (i2
))
3373 /* Return true when store to register X can be hoisted to the place
3374 with LIVE registers (can be NULL). Value VAL contains destination
3375 whose value will be used. */
3378 hoist_test_store (x
, val
, live
)
3382 if (GET_CODE (x
) == SCRATCH
)
3385 if (rtx_equal_p (x
, val
))
3388 /* Allow subreg of X in case it is not writting just part of multireg pseudo.
3389 Then we would need to update all users to care hoisting the store too.
3390 Caller may represent that by specifying whole subreg as val. */
3392 if (GET_CODE (x
) == SUBREG
&& rtx_equal_p (SUBREG_REG (x
), val
))
3394 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) > UNITS_PER_WORD
3395 && GET_MODE_BITSIZE (GET_MODE (x
)) <
3396 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))))
3400 if (GET_CODE (x
) == SUBREG
)
3403 /* Anything except register store is not hoistable. This includes the
3404 partial stores to registers. */
3409 /* Pseudo registers can be allways replaced by another pseudo to avoid
3410 the side effect, for hard register we must ensure that they are dead.
3411 Eventually we may want to add code to try turn pseudos to hards, but it
3412 is unlikely usefull. */
3414 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3416 int regno
= REGNO (x
);
3417 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
3421 if (REGNO_REG_SET_P (live
, regno
))
3424 if (REGNO_REG_SET_P (live
, regno
+ n
))
3431 /* Return true if INSN can be hoisted to place with LIVE hard registers
3432 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3433 and used by the hoisting pass. */
3436 can_hoist_insn_p (insn
, val
, live
)
3440 rtx pat
= PATTERN (insn
);
3443 /* It probably does not worth the complexity to handle multiple
3445 if (!single_set (insn
))
3447 /* We can move CALL_INSN, but we need to check that all caller clobbered
3449 if (GET_CODE (insn
) == CALL_INSN
)
3451 /* In future we will handle hoisting of libcall sequences, but
3453 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
3455 switch (GET_CODE (pat
))
3458 if (!hoist_test_store (SET_DEST (pat
), val
, live
))
3462 /* USES do have sick semantics, so do not move them. */
3466 if (!hoist_test_store (XEXP (pat
, 0), val
, live
))
3470 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3472 rtx x
= XVECEXP (pat
, 0, i
);
3473 switch (GET_CODE (x
))
3476 if (!hoist_test_store (SET_DEST (x
), val
, live
))
3480 /* We need to fix callers to really ensure availability
3481 of all values inisn uses, but for now it is safe to prohibit
3482 hoisting of any insn having such a hiden uses. */
3486 if (!hoist_test_store (SET_DEST (x
), val
, live
))
3500 /* Update store after hoisting - replace all stores to pseudo registers
3501 by new ones to avoid clobbering of values except for store to VAL that will
3502 be updated to NEW. */
3505 hoist_update_store (insn
, xp
, val
, new)
3506 rtx insn
, *xp
, val
, new;
3510 if (GET_CODE (x
) == SCRATCH
)
3513 if (GET_CODE (x
) == SUBREG
&& SUBREG_REG (x
) == val
)
3514 validate_change (insn
, xp
,
3515 simplify_gen_subreg (GET_MODE (x
), new, GET_MODE (new),
3516 SUBREG_BYTE (x
)), 1);
3517 if (rtx_equal_p (x
, val
))
3519 validate_change (insn
, xp
, new, 1);
3522 if (GET_CODE (x
) == SUBREG
)
3524 xp
= &SUBREG_REG (x
);
3531 /* We've verified that hard registers are dead, so we may keep the side
3532 effect. Otherwise replace it by new pseudo. */
3533 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
3534 validate_change (insn
, xp
, gen_reg_rtx (GET_MODE (x
)), 1);
3536 = alloc_EXPR_LIST (REG_UNUSED
, *xp
, REG_NOTES (insn
));
3539 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3540 and each other side effect to pseudo register by new pseudo register. */
3543 hoist_insn_after (insn
, after
, val
, new)
3544 rtx insn
, after
, val
, new;
3550 insn
= emit_copy_of_insn_after (insn
, after
);
3551 pat
= PATTERN (insn
);
3553 /* Remove REG_UNUSED notes as we will re-emit them. */
3554 while ((note
= find_reg_note (insn
, REG_UNUSED
, NULL_RTX
)))
3555 remove_note (insn
, note
);
3557 /* To get this working callers must ensure to move everything referenced
3558 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3560 while ((note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)))
3561 remove_note (insn
, note
);
3562 while ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)))
3563 remove_note (insn
, note
);
3565 /* Remove REG_DEAD notes as they might not be valid anymore in case
3566 we create redundancy. */
3567 while ((note
= find_reg_note (insn
, REG_DEAD
, NULL_RTX
)))
3568 remove_note (insn
, note
);
3569 switch (GET_CODE (pat
))
3572 hoist_update_store (insn
, &SET_DEST (pat
), val
, new);
3577 hoist_update_store (insn
, &XEXP (pat
, 0), val
, new);
3580 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3582 rtx x
= XVECEXP (pat
, 0, i
);
3583 switch (GET_CODE (x
))
3586 hoist_update_store (insn
, &SET_DEST (x
), val
, new);
3591 hoist_update_store (insn
, &SET_DEST (x
), val
, new);
3601 if (!apply_change_group ())
3608 hoist_insn_to_edge (insn
, e
, val
, new)
3614 /* We cannot insert instructions on an abnormal critical edge.
3615 It will be easier to find the culprit if we die now. */
3616 if ((e
->flags
& EDGE_ABNORMAL
) && EDGE_CRITICAL_P (e
))
3619 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3620 stuff. We also emit CALL_INSNS and firends. */
3621 if (e
->insns
== NULL_RTX
)
3624 emit_note (NULL
, NOTE_INSN_DELETED
);
3627 push_to_sequence (e
->insns
);
3629 new_insn
= hoist_insn_after (insn
, get_last_insn (), val
, new);
3631 e
->insns
= get_insns ();