1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
32 #include "basic-block.h"
34 /* Forward declarations */
35 static int global_reg_mentioned_p_1
PARAMS ((rtx
*, void *));
36 static void set_of_1
PARAMS ((rtx
, rtx
, void *));
37 static void insn_dependent_p_1
PARAMS ((rtx
, rtx
, void *));
38 static int computed_jump_p_1
PARAMS ((rtx
));
39 static void parms_set
PARAMS ((rtx
, rtx
, void *));
40 static bool hoist_test_store
PARAMS ((rtx
, rtx
, regset
));
41 static void hoist_update_store
PARAMS ((rtx
, rtx
*, rtx
, rtx
));
43 /* Bit flags that specify the machine subtype we are compiling for.
44 Bits are tested using macros TARGET_... defined in the tm.h file
45 and set by `-m...' switches. Must be defined in rtlanal.c. */
49 /* Return 1 if the value of X is unstable
50 (would be different at a different point in the program).
51 The frame pointer, arg pointer, etc. are considered stable
52 (within one function) and so is anything marked `unchanging'. */
58 RTX_CODE code
= GET_CODE (x
);
65 return ! RTX_UNCHANGING_P (x
) || rtx_unstable_p (XEXP (x
, 0));
80 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
81 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
82 /* The arg pointer varies if it is not a fixed register. */
83 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
])
84 || RTX_UNCHANGING_P (x
))
86 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
87 /* ??? When call-clobbered, the value is stable modulo the restore
88 that must happen after a call. This currently screws up local-alloc
89 into believing that the restore is not needed. */
90 if (x
== pic_offset_table_rtx
)
96 if (MEM_VOLATILE_P (x
))
105 fmt
= GET_RTX_FORMAT (code
);
106 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
109 if (rtx_unstable_p (XEXP (x
, i
)))
112 else if (fmt
[i
] == 'E')
115 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
116 if (rtx_unstable_p (XVECEXP (x
, i
, j
)))
123 /* Return 1 if X has a value that can vary even between two
124 executions of the program. 0 means X can be compared reliably
125 against certain constants or near-constants.
126 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
127 zero, we are slightly more conservative.
128 The frame pointer and the arg pointer are considered constant. */
131 rtx_varies_p (x
, for_alias
)
135 RTX_CODE code
= GET_CODE (x
);
142 return ! RTX_UNCHANGING_P (x
) || rtx_varies_p (XEXP (x
, 0), for_alias
);
156 /* Note that we have to test for the actual rtx used for the frame
157 and arg pointers and not just the register number in case we have
158 eliminated the frame and/or arg pointer and are using it
160 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
161 /* The arg pointer varies if it is not a fixed register. */
162 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
164 if (x
== pic_offset_table_rtx
165 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
166 /* ??? When call-clobbered, the value is stable modulo the restore
167 that must happen after a call. This currently screws up
168 local-alloc into believing that the restore is not needed, so we
169 must return 0 only if we are called from alias analysis. */
177 /* The operand 0 of a LO_SUM is considered constant
178 (in fact it is related specifically to operand 1)
179 during alias analysis. */
180 return (! for_alias
&& rtx_varies_p (XEXP (x
, 0), for_alias
))
181 || rtx_varies_p (XEXP (x
, 1), for_alias
);
184 if (MEM_VOLATILE_P (x
))
193 fmt
= GET_RTX_FORMAT (code
);
194 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
197 if (rtx_varies_p (XEXP (x
, i
), for_alias
))
200 else if (fmt
[i
] == 'E')
203 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
204 if (rtx_varies_p (XVECEXP (x
, i
, j
), for_alias
))
211 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
214 rtx_addr_can_trap_p (x
)
217 enum rtx_code code
= GET_CODE (x
);
222 return SYMBOL_REF_WEAK (x
);
228 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
229 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
230 || x
== stack_pointer_rtx
231 /* The arg pointer varies if it is not a fixed register. */
232 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
234 /* All of the virtual frame registers are stack references. */
235 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
236 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
241 return rtx_addr_can_trap_p (XEXP (x
, 0));
244 /* An address is assumed not to trap if it is an address that can't
245 trap plus a constant integer or it is the pic register plus a
247 return ! ((! rtx_addr_can_trap_p (XEXP (x
, 0))
248 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
249 || (XEXP (x
, 0) == pic_offset_table_rtx
250 && CONSTANT_P (XEXP (x
, 1))));
254 return rtx_addr_can_trap_p (XEXP (x
, 1));
261 return rtx_addr_can_trap_p (XEXP (x
, 0));
267 /* If it isn't one of the case above, it can cause a trap. */
271 /* Return 1 if X refers to a memory location whose address
272 cannot be compared reliably with constant addresses,
273 or if X refers to a BLKmode memory object.
274 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
275 zero, we are slightly more conservative. */
278 rtx_addr_varies_p (x
, for_alias
)
291 return GET_MODE (x
) == BLKmode
|| rtx_varies_p (XEXP (x
, 0), for_alias
);
293 fmt
= GET_RTX_FORMAT (code
);
294 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
297 if (rtx_addr_varies_p (XEXP (x
, i
), for_alias
))
300 else if (fmt
[i
] == 'E')
303 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
304 if (rtx_addr_varies_p (XVECEXP (x
, i
, j
), for_alias
))
310 /* Return the value of the integer term in X, if one is apparent;
312 Only obvious integer terms are detected.
313 This is used in cse.c with the `related_value' field. */
319 if (GET_CODE (x
) == CONST
)
322 if (GET_CODE (x
) == MINUS
323 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
324 return - INTVAL (XEXP (x
, 1));
325 if (GET_CODE (x
) == PLUS
326 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
327 return INTVAL (XEXP (x
, 1));
331 /* If X is a constant, return the value sans apparent integer term;
333 Only obvious integer terms are detected. */
336 get_related_value (x
)
339 if (GET_CODE (x
) != CONST
)
342 if (GET_CODE (x
) == PLUS
343 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
345 else if (GET_CODE (x
) == MINUS
346 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
351 /* Given a tablejump insn INSN, return the RTL expression for the offset
352 into the jump table. If the offset cannot be determined, then return
355 If EARLIEST is non-zero, it is a pointer to a place where the earliest
356 insn used in locating the offset was found. */
359 get_jump_table_offset (insn
, earliest
)
373 if (GET_CODE (insn
) != JUMP_INSN
374 || ! (label
= JUMP_LABEL (insn
))
375 || ! (table
= NEXT_INSN (label
))
376 || GET_CODE (table
) != JUMP_INSN
377 || (GET_CODE (PATTERN (table
)) != ADDR_VEC
378 && GET_CODE (PATTERN (table
)) != ADDR_DIFF_VEC
)
379 || ! (set
= single_set (insn
)))
384 /* Some targets (eg, ARM) emit a tablejump that also
385 contains the out-of-range target. */
386 if (GET_CODE (x
) == IF_THEN_ELSE
387 && GET_CODE (XEXP (x
, 2)) == LABEL_REF
)
390 /* Search backwards and locate the expression stored in X. */
391 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
392 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
395 /* If X is an expression using a relative address then strip
396 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
397 or the jump table label. */
398 if (GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
399 && (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
))
401 for (i
= 0; i
< 2; i
++)
406 if (y
== pc_rtx
|| y
== pic_offset_table_rtx
)
409 for (old_y
= NULL_RTX
; GET_CODE (y
) == REG
&& y
!= old_y
;
410 old_y
= y
, y
= find_last_value (y
, &old_insn
, NULL_RTX
, 0))
413 if ((GET_CODE (y
) == LABEL_REF
&& XEXP (y
, 0) == label
))
422 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
423 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
427 /* Strip off any sign or zero extension. */
428 if (GET_CODE (x
) == SIGN_EXTEND
|| GET_CODE (x
) == ZERO_EXTEND
)
432 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
433 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
437 /* If X isn't a MEM then this isn't a tablejump we understand. */
438 if (GET_CODE (x
) != MEM
)
441 /* Strip off the MEM. */
444 for (old_x
= NULL_RTX
; GET_CODE (x
) == REG
&& x
!= old_x
;
445 old_x
= x
, x
= find_last_value (x
, &insn
, NULL_RTX
, 0))
448 /* If X isn't a PLUS than this isn't a tablejump we understand. */
449 if (GET_CODE (x
) != PLUS
)
452 /* At this point we should have an expression representing the jump table
453 plus an offset. Examine each operand in order to determine which one
454 represents the jump table. Knowing that tells us that the other operand
455 must represent the offset. */
456 for (i
= 0; i
< 2; i
++)
461 for (old_y
= NULL_RTX
; GET_CODE (y
) == REG
&& y
!= old_y
;
462 old_y
= y
, y
= find_last_value (y
, &old_insn
, NULL_RTX
, 0))
465 if ((GET_CODE (y
) == CONST
|| GET_CODE (y
) == LABEL_REF
)
466 && reg_mentioned_p (label
, y
))
475 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
476 if (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
)
477 for (i
= 0; i
< 2; i
++)
478 if (XEXP (x
, i
) == pic_offset_table_rtx
)
487 /* Return the RTL expression representing the offset. */
491 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
492 a global register. */
495 global_reg_mentioned_p_1 (loc
, data
)
497 void *data ATTRIBUTE_UNUSED
;
505 switch (GET_CODE (x
))
508 if (GET_CODE (SUBREG_REG (x
)) == REG
)
510 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
511 && global_regs
[subreg_regno (x
)])
519 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
533 /* A non-constant call might use a global register. */
543 /* Returns non-zero if X mentions a global register. */
546 global_reg_mentioned_p (x
)
552 if (GET_CODE (x
) == CALL_INSN
)
554 if (! CONST_OR_PURE_CALL_P (x
))
556 x
= CALL_INSN_FUNCTION_USAGE (x
);
564 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
567 /* Return the number of places FIND appears within X. If COUNT_DEST is
568 zero, we do not count occurrences inside the destination of a SET. */
571 count_occurrences (x
, find
, count_dest
)
577 const char *format_ptr
;
598 if (GET_CODE (find
) == MEM
&& rtx_equal_p (x
, find
))
603 if (SET_DEST (x
) == find
&& ! count_dest
)
604 return count_occurrences (SET_SRC (x
), find
, count_dest
);
611 format_ptr
= GET_RTX_FORMAT (code
);
614 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
616 switch (*format_ptr
++)
619 count
+= count_occurrences (XEXP (x
, i
), find
, count_dest
);
623 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
624 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
, count_dest
);
631 /* Nonzero if register REG appears somewhere within IN.
632 Also works if REG is not a register; in this case it checks
633 for a subexpression of IN that is Lisp "equal" to REG. */
636 reg_mentioned_p (reg
, in
)
649 if (GET_CODE (in
) == LABEL_REF
)
650 return reg
== XEXP (in
, 0);
652 code
= GET_CODE (in
);
656 /* Compare registers by number. */
658 return GET_CODE (reg
) == REG
&& REGNO (in
) == REGNO (reg
);
660 /* These codes have no constituent expressions
668 return GET_CODE (reg
) == CONST_INT
&& INTVAL (in
) == INTVAL (reg
);
672 /* These are kept unique for a given value. */
679 if (GET_CODE (reg
) == code
&& rtx_equal_p (reg
, in
))
682 fmt
= GET_RTX_FORMAT (code
);
684 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
689 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
690 if (reg_mentioned_p (reg
, XVECEXP (in
, i
, j
)))
693 else if (fmt
[i
] == 'e'
694 && reg_mentioned_p (reg
, XEXP (in
, i
)))
700 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
701 no CODE_LABEL insn. */
704 no_labels_between_p (beg
, end
)
710 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
711 if (GET_CODE (p
) == CODE_LABEL
)
716 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
717 no JUMP_INSN insn. */
720 no_jumps_between_p (beg
, end
)
724 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
725 if (GET_CODE (p
) == JUMP_INSN
)
730 /* Nonzero if register REG is used in an insn between
731 FROM_INSN and TO_INSN (exclusive of those two). */
734 reg_used_between_p (reg
, from_insn
, to_insn
)
735 rtx reg
, from_insn
, to_insn
;
739 if (from_insn
== to_insn
)
742 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
744 && (reg_overlap_mentioned_p (reg
, PATTERN (insn
))
745 || (GET_CODE (insn
) == CALL_INSN
746 && (find_reg_fusage (insn
, USE
, reg
)
747 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
752 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
753 is entirely replaced by a new value and the only use is as a SET_DEST,
754 we do not consider it a reference. */
757 reg_referenced_p (x
, body
)
763 switch (GET_CODE (body
))
766 if (reg_overlap_mentioned_p (x
, SET_SRC (body
)))
769 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
770 of a REG that occupies all of the REG, the insn references X if
771 it is mentioned in the destination. */
772 if (GET_CODE (SET_DEST (body
)) != CC0
773 && GET_CODE (SET_DEST (body
)) != PC
774 && GET_CODE (SET_DEST (body
)) != REG
775 && ! (GET_CODE (SET_DEST (body
)) == SUBREG
776 && GET_CODE (SUBREG_REG (SET_DEST (body
))) == REG
777 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body
))))
778 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
779 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body
)))
780 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
781 && reg_overlap_mentioned_p (x
, SET_DEST (body
)))
786 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
787 if (reg_overlap_mentioned_p (x
, ASM_OPERANDS_INPUT (body
, i
)))
794 return reg_overlap_mentioned_p (x
, body
);
797 return reg_overlap_mentioned_p (x
, TRAP_CONDITION (body
));
800 return reg_overlap_mentioned_p (x
, XEXP (body
, 0));
803 case UNSPEC_VOLATILE
:
804 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
805 if (reg_overlap_mentioned_p (x
, XVECEXP (body
, 0, i
)))
810 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
811 if (reg_referenced_p (x
, XVECEXP (body
, 0, i
)))
816 if (GET_CODE (XEXP (body
, 0)) == MEM
)
817 if (reg_overlap_mentioned_p (x
, XEXP (XEXP (body
, 0), 0)))
822 if (reg_overlap_mentioned_p (x
, COND_EXEC_TEST (body
)))
824 return reg_referenced_p (x
, COND_EXEC_CODE (body
));
831 /* Nonzero if register REG is referenced in an insn between
832 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
836 reg_referenced_between_p (reg
, from_insn
, to_insn
)
837 rtx reg
, from_insn
, to_insn
;
841 if (from_insn
== to_insn
)
844 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
846 && (reg_referenced_p (reg
, PATTERN (insn
))
847 || (GET_CODE (insn
) == CALL_INSN
848 && find_reg_fusage (insn
, USE
, reg
))))
853 /* Nonzero if register REG is set or clobbered in an insn between
854 FROM_INSN and TO_INSN (exclusive of those two). */
857 reg_set_between_p (reg
, from_insn
, to_insn
)
858 rtx reg
, from_insn
, to_insn
;
862 if (from_insn
== to_insn
)
865 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
866 if (INSN_P (insn
) && reg_set_p (reg
, insn
))
871 /* Internals of reg_set_between_p. */
873 reg_set_p (reg
, insn
)
878 /* We can be passed an insn or part of one. If we are passed an insn,
879 check if a side-effect of the insn clobbers REG. */
882 if (FIND_REG_INC_NOTE (insn
, reg
)
883 || (GET_CODE (insn
) == CALL_INSN
884 /* We'd like to test call_used_regs here, but rtlanal.c can't
885 reference that variable due to its use in genattrtab. So
886 we'll just be more conservative.
888 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
889 information holds all clobbered registers. */
890 && ((GET_CODE (reg
) == REG
891 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
)
892 || GET_CODE (reg
) == MEM
893 || find_reg_fusage (insn
, CLOBBER
, reg
))))
896 body
= PATTERN (insn
);
899 return set_of (reg
, insn
) != NULL_RTX
;
902 /* Similar to reg_set_between_p, but check all registers in X. Return 0
903 only if none of them are modified between START and END. Do not
904 consider non-registers one way or the other. */
907 regs_set_between_p (x
, start
, end
)
911 enum rtx_code code
= GET_CODE (x
);
928 return reg_set_between_p (x
, start
, end
);
934 fmt
= GET_RTX_FORMAT (code
);
935 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
937 if (fmt
[i
] == 'e' && regs_set_between_p (XEXP (x
, i
), start
, end
))
940 else if (fmt
[i
] == 'E')
941 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
942 if (regs_set_between_p (XVECEXP (x
, i
, j
), start
, end
))
949 /* Similar to reg_set_between_p, but check all registers in X. Return 0
950 only if none of them are modified between START and END. Return 1 if
951 X contains a MEM; this routine does not perform any memory aliasing. */
954 modified_between_p (x
, start
, end
)
958 enum rtx_code code
= GET_CODE (x
);
977 /* If the memory is not constant, assume it is modified. If it is
978 constant, we still have to check the address. */
979 if (! RTX_UNCHANGING_P (x
))
984 return reg_set_between_p (x
, start
, end
);
990 fmt
= GET_RTX_FORMAT (code
);
991 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
993 if (fmt
[i
] == 'e' && modified_between_p (XEXP (x
, i
), start
, end
))
996 else if (fmt
[i
] == 'E')
997 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
998 if (modified_between_p (XVECEXP (x
, i
, j
), start
, end
))
1005 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1006 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1007 does not perform any memory aliasing. */
1010 modified_in_p (x
, insn
)
1014 enum rtx_code code
= GET_CODE (x
);
1033 /* If the memory is not constant, assume it is modified. If it is
1034 constant, we still have to check the address. */
1035 if (! RTX_UNCHANGING_P (x
))
1040 return reg_set_p (x
, insn
);
1046 fmt
= GET_RTX_FORMAT (code
);
1047 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1049 if (fmt
[i
] == 'e' && modified_in_p (XEXP (x
, i
), insn
))
1052 else if (fmt
[i
] == 'E')
1053 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1054 if (modified_in_p (XVECEXP (x
, i
, j
), insn
))
1061 /* Return true if anything in insn X is (anti,output,true) dependent on
1062 anything in insn Y. */
1065 insn_dependent_p (x
, y
)
1070 if (! INSN_P (x
) || ! INSN_P (y
))
1074 note_stores (PATTERN (x
), insn_dependent_p_1
, &tmp
);
1075 if (tmp
== NULL_RTX
)
1079 note_stores (PATTERN (y
), insn_dependent_p_1
, &tmp
);
1080 if (tmp
== NULL_RTX
)
1086 /* A helper routine for insn_dependent_p called through note_stores. */
1089 insn_dependent_p_1 (x
, pat
, data
)
1091 rtx pat ATTRIBUTE_UNUSED
;
1094 rtx
* pinsn
= (rtx
*) data
;
1096 if (*pinsn
&& reg_mentioned_p (x
, *pinsn
))
1100 /* Helper function for set_of. */
1108 set_of_1 (x
, pat
, data1
)
1113 struct set_of_data
*data
= (struct set_of_data
*) (data1
);
1114 if (rtx_equal_p (x
, data
->pat
)
1115 || (GET_CODE (x
) != MEM
&& reg_overlap_mentioned_p (data
->pat
, x
)))
1119 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1120 (either directly or via STRICT_LOW_PART and similar modifiers). */
1125 struct set_of_data data
;
1126 data
.found
= NULL_RTX
;
1128 note_stores (INSN_P (insn
) ? PATTERN (insn
) : insn
, set_of_1
, &data
);
1132 /* Given an INSN, return a SET expression if this insn has only a single SET.
1133 It may also have CLOBBERs, USEs, or SET whose output
1134 will not be used, which we ignore. */
1137 single_set_2 (insn
, pat
)
1141 int set_verified
= 1;
1144 if (GET_CODE (pat
) == PARALLEL
)
1146 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1148 rtx sub
= XVECEXP (pat
, 0, i
);
1149 switch (GET_CODE (sub
))
1156 /* We can consider insns having multiple sets, where all
1157 but one are dead as single set insns. In common case
1158 only single set is present in the pattern so we want
1159 to avoid checking for REG_UNUSED notes unless necessary.
1161 When we reach set first time, we just expect this is
1162 the single set we are looking for and only when more
1163 sets are found in the insn, we check them. */
1166 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (set
))
1167 && !side_effects_p (set
))
1173 set
= sub
, set_verified
= 0;
1174 else if (!find_reg_note (insn
, REG_UNUSED
, SET_DEST (sub
))
1175 || side_effects_p (sub
))
1187 /* Given an INSN, return nonzero if it has more than one SET, else return
1191 multiple_sets (insn
)
1197 /* INSN must be an insn. */
1198 if (! INSN_P (insn
))
1201 /* Only a PARALLEL can have multiple SETs. */
1202 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
1204 for (i
= 0, found
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1205 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == SET
)
1207 /* If we have already found a SET, then return now. */
1215 /* Either zero or one SET. */
1219 /* Return nonzero if the destination of SET equals the source
1220 and there are no side effects. */
1226 rtx src
= SET_SRC (set
);
1227 rtx dst
= SET_DEST (set
);
1229 if (side_effects_p (src
) || side_effects_p (dst
))
1232 if (GET_CODE (dst
) == MEM
&& GET_CODE (src
) == MEM
)
1233 return rtx_equal_p (dst
, src
);
1235 if (dst
== pc_rtx
&& src
== pc_rtx
)
1238 if (GET_CODE (dst
) == SIGN_EXTRACT
1239 || GET_CODE (dst
) == ZERO_EXTRACT
)
1240 return rtx_equal_p (XEXP (dst
, 0), src
)
1241 && ! BYTES_BIG_ENDIAN
&& XEXP (dst
, 2) == const0_rtx
;
1243 if (GET_CODE (dst
) == STRICT_LOW_PART
)
1244 dst
= XEXP (dst
, 0);
1246 if (GET_CODE (src
) == SUBREG
&& GET_CODE (dst
) == SUBREG
)
1248 if (SUBREG_BYTE (src
) != SUBREG_BYTE (dst
))
1250 src
= SUBREG_REG (src
);
1251 dst
= SUBREG_REG (dst
);
1254 return (GET_CODE (src
) == REG
&& GET_CODE (dst
) == REG
1255 && REGNO (src
) == REGNO (dst
));
1258 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1265 rtx pat
= PATTERN (insn
);
1267 if (INSN_CODE (insn
) == NOOP_MOVE_INSN_CODE
)
1270 /* Insns carrying these notes are useful later on. */
1271 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1274 /* For now treat an insn with a REG_RETVAL note as a
1275 a special insn which should not be considered a no-op. */
1276 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
1279 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
1282 if (GET_CODE (pat
) == PARALLEL
)
1285 /* If nothing but SETs of registers to themselves,
1286 this insn can also be deleted. */
1287 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1289 rtx tem
= XVECEXP (pat
, 0, i
);
1291 if (GET_CODE (tem
) == USE
1292 || GET_CODE (tem
) == CLOBBER
)
1295 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
1305 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1306 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1307 If the object was modified, if we hit a partial assignment to X, or hit a
1308 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1309 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1313 find_last_value (x
, pinsn
, valid_to
, allow_hwreg
)
1321 for (p
= PREV_INSN (*pinsn
); p
&& GET_CODE (p
) != CODE_LABEL
;
1325 rtx set
= single_set (p
);
1326 rtx note
= find_reg_note (p
, REG_EQUAL
, NULL_RTX
);
1328 if (set
&& rtx_equal_p (x
, SET_DEST (set
)))
1330 rtx src
= SET_SRC (set
);
1332 if (note
&& GET_CODE (XEXP (note
, 0)) != EXPR_LIST
)
1333 src
= XEXP (note
, 0);
1335 if ((valid_to
== NULL_RTX
1336 || ! modified_between_p (src
, PREV_INSN (p
), valid_to
))
1337 /* Reject hard registers because we don't usually want
1338 to use them; we'd rather use a pseudo. */
1339 && (! (GET_CODE (src
) == REG
1340 && REGNO (src
) < FIRST_PSEUDO_REGISTER
) || allow_hwreg
))
1347 /* If set in non-simple way, we don't have a value. */
1348 if (reg_set_p (x
, p
))
1355 /* Return nonzero if register in range [REGNO, ENDREGNO)
1356 appears either explicitly or implicitly in X
1357 other than being stored into.
1359 References contained within the substructure at LOC do not count.
1360 LOC may be zero, meaning don't ignore anything. */
1363 refers_to_regno_p (regno
, endregno
, x
, loc
)
1364 unsigned int regno
, endregno
;
1369 unsigned int x_regno
;
1374 /* The contents of a REG_NONNEG note is always zero, so we must come here
1375 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1379 code
= GET_CODE (x
);
1384 x_regno
= REGNO (x
);
1386 /* If we modifying the stack, frame, or argument pointer, it will
1387 clobber a virtual register. In fact, we could be more precise,
1388 but it isn't worth it. */
1389 if ((x_regno
== STACK_POINTER_REGNUM
1390 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1391 || x_regno
== ARG_POINTER_REGNUM
1393 || x_regno
== FRAME_POINTER_REGNUM
)
1394 && regno
>= FIRST_VIRTUAL_REGISTER
&& regno
<= LAST_VIRTUAL_REGISTER
)
1397 return (endregno
> x_regno
1398 && regno
< x_regno
+ (x_regno
< FIRST_PSEUDO_REGISTER
1399 ? HARD_REGNO_NREGS (x_regno
, GET_MODE (x
))
1403 /* If this is a SUBREG of a hard reg, we can see exactly which
1404 registers are being modified. Otherwise, handle normally. */
1405 if (GET_CODE (SUBREG_REG (x
)) == REG
1406 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
1408 unsigned int inner_regno
= subreg_regno (x
);
1409 unsigned int inner_endregno
1410 = inner_regno
+ (inner_regno
< FIRST_PSEUDO_REGISTER
1411 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
1413 return endregno
> inner_regno
&& regno
< inner_endregno
;
1419 if (&SET_DEST (x
) != loc
1420 /* Note setting a SUBREG counts as referring to the REG it is in for
1421 a pseudo but not for hard registers since we can
1422 treat each word individually. */
1423 && ((GET_CODE (SET_DEST (x
)) == SUBREG
1424 && loc
!= &SUBREG_REG (SET_DEST (x
))
1425 && GET_CODE (SUBREG_REG (SET_DEST (x
))) == REG
1426 && REGNO (SUBREG_REG (SET_DEST (x
))) >= FIRST_PSEUDO_REGISTER
1427 && refers_to_regno_p (regno
, endregno
,
1428 SUBREG_REG (SET_DEST (x
)), loc
))
1429 || (GET_CODE (SET_DEST (x
)) != REG
1430 && refers_to_regno_p (regno
, endregno
, SET_DEST (x
), loc
))))
1433 if (code
== CLOBBER
|| loc
== &SET_SRC (x
))
1442 /* X does not match, so try its subexpressions. */
1444 fmt
= GET_RTX_FORMAT (code
);
1445 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1447 if (fmt
[i
] == 'e' && loc
!= &XEXP (x
, i
))
1455 if (refers_to_regno_p (regno
, endregno
, XEXP (x
, i
), loc
))
1458 else if (fmt
[i
] == 'E')
1461 for (j
= XVECLEN (x
, i
) - 1; j
>=0; j
--)
1462 if (loc
!= &XVECEXP (x
, i
, j
)
1463 && refers_to_regno_p (regno
, endregno
, XVECEXP (x
, i
, j
), loc
))
1470 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1471 we check if any register number in X conflicts with the relevant register
1472 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1473 contains a MEM (we don't bother checking for memory addresses that can't
1474 conflict because we expect this to be a rare case. */
1477 reg_overlap_mentioned_p (x
, in
)
1480 unsigned int regno
, endregno
;
1482 /* Overly conservative. */
1483 if (GET_CODE (x
) == STRICT_LOW_PART
)
1486 /* If either argument is a constant, then modifying X can not affect IN. */
1487 if (CONSTANT_P (x
) || CONSTANT_P (in
))
1490 switch (GET_CODE (x
))
1493 regno
= REGNO (SUBREG_REG (x
));
1494 if (regno
< FIRST_PSEUDO_REGISTER
)
1495 regno
= subreg_regno (x
);
1501 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
1502 ? HARD_REGNO_NREGS (regno
, GET_MODE (x
)) : 1);
1503 return refers_to_regno_p (regno
, endregno
, in
, (rtx
*) 0);
1510 if (GET_CODE (in
) == MEM
)
1513 fmt
= GET_RTX_FORMAT (GET_CODE (in
));
1514 for (i
= GET_RTX_LENGTH (GET_CODE (in
)) - 1; i
>= 0; i
--)
1515 if (fmt
[i
] == 'e' && reg_overlap_mentioned_p (x
, XEXP (in
, i
)))
1524 return reg_mentioned_p (x
, in
);
1530 /* If any register in here refers to it we return true. */
1531 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1532 if (XEXP (XVECEXP (x
, 0, i
), 0) != 0
1533 && reg_overlap_mentioned_p (XEXP (XVECEXP (x
, 0, i
), 0), in
))
1545 /* Return the last value to which REG was set prior to INSN. If we can't
1546 find it easily, return 0.
1548 We only return a REG, SUBREG, or constant because it is too hard to
1549 check if a MEM remains unchanged. */
1552 reg_set_last (x
, insn
)
1556 rtx orig_insn
= insn
;
1558 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1559 Stop when we reach a label or X is a hard reg and we reach a
1560 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1562 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1564 /* We compare with <= here, because reg_set_last_last_regno
1565 is actually the number of the first reg *not* in X. */
1567 insn
&& GET_CODE (insn
) != CODE_LABEL
1568 && ! (GET_CODE (insn
) == CALL_INSN
1569 && REGNO (x
) <= FIRST_PSEUDO_REGISTER
);
1570 insn
= PREV_INSN (insn
))
1573 rtx set
= set_of (x
, insn
);
1574 /* OK, this function modify our register. See if we understand it. */
1578 if (GET_CODE (set
) != SET
|| SET_DEST (set
) != x
)
1580 last_value
= SET_SRC (x
);
1581 if (CONSTANT_P (last_value
)
1582 || ((GET_CODE (last_value
) == REG
1583 || GET_CODE (last_value
) == SUBREG
)
1584 && ! reg_set_between_p (last_value
,
1595 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1596 (X would be the pattern of an insn).
1597 FUN receives two arguments:
1598 the REG, MEM, CC0 or PC being stored in or clobbered,
1599 the SET or CLOBBER rtx that does the store.
1601 If the item being stored in or clobbered is a SUBREG of a hard register,
1602 the SUBREG will be passed. */
1605 note_stores (x
, fun
, data
)
1607 void (*fun
) PARAMS ((rtx
, rtx
, void *));
1612 if (GET_CODE (x
) == COND_EXEC
)
1613 x
= COND_EXEC_CODE (x
);
1615 if (GET_CODE (x
) == SET
|| GET_CODE (x
) == CLOBBER
)
1617 rtx dest
= SET_DEST (x
);
1619 while ((GET_CODE (dest
) == SUBREG
1620 && (GET_CODE (SUBREG_REG (dest
)) != REG
1621 || REGNO (SUBREG_REG (dest
)) >= FIRST_PSEUDO_REGISTER
))
1622 || GET_CODE (dest
) == ZERO_EXTRACT
1623 || GET_CODE (dest
) == SIGN_EXTRACT
1624 || GET_CODE (dest
) == STRICT_LOW_PART
)
1625 dest
= XEXP (dest
, 0);
1627 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1628 each of whose first operand is a register. */
1629 if (GET_CODE (dest
) == PARALLEL
)
1631 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1632 if (XEXP (XVECEXP (dest
, 0, i
), 0) != 0)
1633 (*fun
) (XEXP (XVECEXP (dest
, 0, i
), 0), x
, data
);
1636 (*fun
) (dest
, x
, data
);
1639 else if (GET_CODE (x
) == PARALLEL
)
1640 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1641 note_stores (XVECEXP (x
, 0, i
), fun
, data
);
1644 /* Like notes_stores, but call FUN for each expression that is being
1645 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1646 FUN for each expression, not any interior subexpressions. FUN receives a
1647 pointer to the expression and the DATA passed to this function.
1649 Note that this is not quite the same test as that done in reg_referenced_p
1650 since that considers something as being referenced if it is being
1651 partially set, while we do not. */
1654 note_uses (pbody
, fun
, data
)
1656 void (*fun
) PARAMS ((rtx
*, void *));
1662 switch (GET_CODE (body
))
1665 (*fun
) (&COND_EXEC_TEST (body
), data
);
1666 note_uses (&COND_EXEC_CODE (body
), fun
, data
);
1670 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1671 note_uses (&XVECEXP (body
, 0, i
), fun
, data
);
1675 (*fun
) (&XEXP (body
, 0), data
);
1679 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
1680 (*fun
) (&ASM_OPERANDS_INPUT (body
, i
), data
);
1684 (*fun
) (&TRAP_CONDITION (body
), data
);
1688 (*fun
) (&XEXP (body
, 0), data
);
1692 case UNSPEC_VOLATILE
:
1693 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1694 (*fun
) (&XVECEXP (body
, 0, i
), data
);
1698 if (GET_CODE (XEXP (body
, 0)) == MEM
)
1699 (*fun
) (&XEXP (XEXP (body
, 0), 0), data
);
1704 rtx dest
= SET_DEST (body
);
1706 /* For sets we replace everything in source plus registers in memory
1707 expression in store and operands of a ZERO_EXTRACT. */
1708 (*fun
) (&SET_SRC (body
), data
);
1710 if (GET_CODE (dest
) == ZERO_EXTRACT
)
1712 (*fun
) (&XEXP (dest
, 1), data
);
1713 (*fun
) (&XEXP (dest
, 2), data
);
1716 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
)
1717 dest
= XEXP (dest
, 0);
1719 if (GET_CODE (dest
) == MEM
)
1720 (*fun
) (&XEXP (dest
, 0), data
);
1725 /* All the other possibilities never store. */
1726 (*fun
) (pbody
, data
);
1731 /* Return nonzero if X's old contents don't survive after INSN.
1732 This will be true if X is (cc0) or if X is a register and
1733 X dies in INSN or because INSN entirely sets X.
1735 "Entirely set" means set directly and not through a SUBREG,
1736 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1737 Likewise, REG_INC does not count.
1739 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1740 but for this use that makes no difference, since regs don't overlap
1741 during their lifetimes. Therefore, this function may be used
1742 at any time after deaths have been computed (in flow.c).
1744 If REG is a hard reg that occupies multiple machine registers, this
1745 function will only return 1 if each of those registers will be replaced
1749 dead_or_set_p (insn
, x
)
1753 unsigned int regno
, last_regno
;
1756 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1757 if (GET_CODE (x
) == CC0
)
1760 if (GET_CODE (x
) != REG
)
1764 last_regno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
1765 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (x
)) - 1);
1767 for (i
= regno
; i
<= last_regno
; i
++)
1768 if (! dead_or_set_regno_p (insn
, i
))
1774 /* Utility function for dead_or_set_p to check an individual register. Also
1775 called from flow.c. */
1778 dead_or_set_regno_p (insn
, test_regno
)
1780 unsigned int test_regno
;
1782 unsigned int regno
, endregno
;
1785 /* See if there is a death note for something that includes TEST_REGNO. */
1786 if (find_regno_note (insn
, REG_DEAD
, test_regno
))
1789 if (GET_CODE (insn
) == CALL_INSN
1790 && find_regno_fusage (insn
, CLOBBER
, test_regno
))
1793 pattern
= PATTERN (insn
);
1795 if (GET_CODE (pattern
) == COND_EXEC
)
1796 pattern
= COND_EXEC_CODE (pattern
);
1798 if (GET_CODE (pattern
) == SET
)
1800 rtx dest
= SET_DEST (PATTERN (insn
));
1802 /* A value is totally replaced if it is the destination or the
1803 destination is a SUBREG of REGNO that does not change the number of
1805 if (GET_CODE (dest
) == SUBREG
1806 && (((GET_MODE_SIZE (GET_MODE (dest
))
1807 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1808 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1809 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1810 dest
= SUBREG_REG (dest
);
1812 if (GET_CODE (dest
) != REG
)
1815 regno
= REGNO (dest
);
1816 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1817 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (dest
)));
1819 return (test_regno
>= regno
&& test_regno
< endregno
);
1821 else if (GET_CODE (pattern
) == PARALLEL
)
1825 for (i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
1827 rtx body
= XVECEXP (pattern
, 0, i
);
1829 if (GET_CODE (body
) == COND_EXEC
)
1830 body
= COND_EXEC_CODE (body
);
1832 if (GET_CODE (body
) == SET
|| GET_CODE (body
) == CLOBBER
)
1834 rtx dest
= SET_DEST (body
);
1836 if (GET_CODE (dest
) == SUBREG
1837 && (((GET_MODE_SIZE (GET_MODE (dest
))
1838 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1839 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1840 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1841 dest
= SUBREG_REG (dest
);
1843 if (GET_CODE (dest
) != REG
)
1846 regno
= REGNO (dest
);
1847 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1848 : regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (dest
)));
1850 if (test_regno
>= regno
&& test_regno
< endregno
)
1859 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1860 If DATUM is nonzero, look for one whose datum is DATUM. */
1863 find_reg_note (insn
, kind
, datum
)
1870 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1871 if (! INSN_P (insn
))
1874 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1875 if (REG_NOTE_KIND (link
) == kind
1876 && (datum
== 0 || datum
== XEXP (link
, 0)))
1881 /* Return the reg-note of kind KIND in insn INSN which applies to register
1882 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1883 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1884 it might be the case that the note overlaps REGNO. */
1887 find_regno_note (insn
, kind
, regno
)
1894 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1895 if (! INSN_P (insn
))
1898 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1899 if (REG_NOTE_KIND (link
) == kind
1900 /* Verify that it is a register, so that scratch and MEM won't cause a
1902 && GET_CODE (XEXP (link
, 0)) == REG
1903 && REGNO (XEXP (link
, 0)) <= regno
1904 && ((REGNO (XEXP (link
, 0))
1905 + (REGNO (XEXP (link
, 0)) >= FIRST_PSEUDO_REGISTER
? 1
1906 : HARD_REGNO_NREGS (REGNO (XEXP (link
, 0)),
1907 GET_MODE (XEXP (link
, 0)))))
1913 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1917 find_reg_equal_equiv_note (insn
)
1922 if (single_set (insn
) == 0)
1924 else if ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)) != 0)
1927 return find_reg_note (insn
, REG_EQUAL
, NULL_RTX
);
1930 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1931 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1934 find_reg_fusage (insn
, code
, datum
)
1939 /* If it's not a CALL_INSN, it can't possibly have a
1940 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1941 if (GET_CODE (insn
) != CALL_INSN
)
1947 if (GET_CODE (datum
) != REG
)
1951 for (link
= CALL_INSN_FUNCTION_USAGE (insn
);
1953 link
= XEXP (link
, 1))
1954 if (GET_CODE (XEXP (link
, 0)) == code
1955 && rtx_equal_p (datum
, XEXP (XEXP (link
, 0), 0)))
1960 unsigned int regno
= REGNO (datum
);
1962 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1963 to pseudo registers, so don't bother checking. */
1965 if (regno
< FIRST_PSEUDO_REGISTER
)
1967 unsigned int end_regno
1968 = regno
+ HARD_REGNO_NREGS (regno
, GET_MODE (datum
));
1971 for (i
= regno
; i
< end_regno
; i
++)
1972 if (find_regno_fusage (insn
, code
, i
))
1980 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1981 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1984 find_regno_fusage (insn
, code
, regno
)
1991 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1992 to pseudo registers, so don't bother checking. */
1994 if (regno
>= FIRST_PSEUDO_REGISTER
1995 || GET_CODE (insn
) != CALL_INSN
)
1998 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
2000 unsigned int regnote
;
2003 if (GET_CODE (op
= XEXP (link
, 0)) == code
2004 && GET_CODE (reg
= XEXP (op
, 0)) == REG
2005 && (regnote
= REGNO (reg
)) <= regno
2006 && regnote
+ HARD_REGNO_NREGS (regnote
, GET_MODE (reg
)) > regno
)
2013 /* Return true if INSN is a call to a pure function. */
2021 if (GET_CODE (insn
) != CALL_INSN
|| ! CONST_OR_PURE_CALL_P (insn
))
2024 /* Look for the note that differentiates const and pure functions. */
2025 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
2029 if (GET_CODE (u
= XEXP (link
, 0)) == USE
2030 && GET_CODE (m
= XEXP (u
, 0)) == MEM
&& GET_MODE (m
) == BLKmode
2031 && GET_CODE (XEXP (m
, 0)) == SCRATCH
)
2038 /* Remove register note NOTE from the REG_NOTES of INSN. */
2041 remove_note (insn
, note
)
2047 if (note
== NULL_RTX
)
2050 if (REG_NOTES (insn
) == note
)
2052 REG_NOTES (insn
) = XEXP (note
, 1);
2056 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
2057 if (XEXP (link
, 1) == note
)
2059 XEXP (link
, 1) = XEXP (note
, 1);
2066 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2067 return 1 if it is found. A simple equality test is used to determine if
2071 in_expr_list_p (listp
, node
)
2077 for (x
= listp
; x
; x
= XEXP (x
, 1))
2078 if (node
== XEXP (x
, 0))
2084 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2085 remove that entry from the list if it is found.
2087 A simple equality test is used to determine if NODE matches. */
2090 remove_node_from_expr_list (node
, listp
)
2095 rtx prev
= NULL_RTX
;
2099 if (node
== XEXP (temp
, 0))
2101 /* Splice the node out of the list. */
2103 XEXP (prev
, 1) = XEXP (temp
, 1);
2105 *listp
= XEXP (temp
, 1);
2111 temp
= XEXP (temp
, 1);
2115 /* Nonzero if X contains any volatile instructions. These are instructions
2116 which may cause unpredictable machine state instructions, and thus no
2117 instructions should be moved or combined across them. This includes
2118 only volatile asms and UNSPEC_VOLATILE instructions. */
2126 code
= GET_CODE (x
);
2147 case UNSPEC_VOLATILE
:
2148 /* case TRAP_IF: This isn't clear yet. */
2152 if (MEM_VOLATILE_P (x
))
2159 /* Recursively scan the operands of this expression. */
2162 const char *fmt
= GET_RTX_FORMAT (code
);
2165 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2169 if (volatile_insn_p (XEXP (x
, i
)))
2172 else if (fmt
[i
] == 'E')
2175 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2176 if (volatile_insn_p (XVECEXP (x
, i
, j
)))
2184 /* Nonzero if X contains any volatile memory references
2185 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2193 code
= GET_CODE (x
);
2213 case UNSPEC_VOLATILE
:
2214 /* case TRAP_IF: This isn't clear yet. */
2219 if (MEM_VOLATILE_P (x
))
2226 /* Recursively scan the operands of this expression. */
2229 const char *fmt
= GET_RTX_FORMAT (code
);
2232 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2236 if (volatile_refs_p (XEXP (x
, i
)))
2239 else if (fmt
[i
] == 'E')
2242 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2243 if (volatile_refs_p (XVECEXP (x
, i
, j
)))
2251 /* Similar to above, except that it also rejects register pre- and post-
2260 code
= GET_CODE (x
);
2279 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2280 when some combination can't be done. If we see one, don't think
2281 that we can simplify the expression. */
2282 return (GET_MODE (x
) != VOIDmode
);
2291 case UNSPEC_VOLATILE
:
2292 /* case TRAP_IF: This isn't clear yet. */
2297 if (MEM_VOLATILE_P (x
))
2304 /* Recursively scan the operands of this expression. */
2307 const char *fmt
= GET_RTX_FORMAT (code
);
2310 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2314 if (side_effects_p (XEXP (x
, i
)))
2317 else if (fmt
[i
] == 'E')
2320 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2321 if (side_effects_p (XVECEXP (x
, i
, j
)))
2329 /* Return nonzero if evaluating rtx X might cause a trap. */
2341 code
= GET_CODE (x
);
2344 /* Handle these cases quickly. */
2358 case UNSPEC_VOLATILE
:
2363 return MEM_VOLATILE_P (x
);
2365 /* Memory ref can trap unless it's a static var or a stack slot. */
2367 return rtx_addr_can_trap_p (XEXP (x
, 0));
2369 /* Division by a non-constant might trap. */
2374 if (! CONSTANT_P (XEXP (x
, 1))
2375 || (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2376 && flag_trapping_math
))
2378 /* This was const0_rtx, but by not using that,
2379 we can link this file into other programs. */
2380 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
&& INTVAL (XEXP (x
, 1)) == 0)
2385 /* An EXPR_LIST is used to represent a function call. This
2386 certainly may trap. */
2394 /* Some floating point comparisons may trap. */
2395 if (!flag_trapping_math
)
2397 /* ??? There is no machine independent way to check for tests that trap
2398 when COMPARE is used, though many targets do make this distinction.
2399 For instance, sparc uses CCFPE for compares which generate exceptions
2400 and CCFP for compares which do not generate exceptions. */
2401 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2403 /* But often the compare has some CC mode, so check operand
2405 if (GET_MODE_CLASS (GET_MODE (XEXP (x
, 0))) == MODE_FLOAT
2406 || GET_MODE_CLASS (GET_MODE (XEXP (x
, 1))) == MODE_FLOAT
)
2412 /* These operations don't trap even with floating point. */
2416 /* Any floating arithmetic may trap. */
2417 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2418 && flag_trapping_math
)
2422 fmt
= GET_RTX_FORMAT (code
);
2423 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2427 if (may_trap_p (XEXP (x
, i
)))
2430 else if (fmt
[i
] == 'E')
2433 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2434 if (may_trap_p (XVECEXP (x
, i
, j
)))
2441 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2442 i.e., an inequality. */
2445 inequality_comparisons_p (x
)
2450 enum rtx_code code
= GET_CODE (x
);
2480 len
= GET_RTX_LENGTH (code
);
2481 fmt
= GET_RTX_FORMAT (code
);
2483 for (i
= 0; i
< len
; i
++)
2487 if (inequality_comparisons_p (XEXP (x
, i
)))
2490 else if (fmt
[i
] == 'E')
2493 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2494 if (inequality_comparisons_p (XVECEXP (x
, i
, j
)))
2502 /* Replace any occurrence of FROM in X with TO. The function does
2503 not enter into CONST_DOUBLE for the replace.
2505 Note that copying is not done so X must not be shared unless all copies
2506 are to be modified. */
2509 replace_rtx (x
, from
, to
)
2515 /* The following prevents loops occurrence when we change MEM in
2516 CONST_DOUBLE onto the same CONST_DOUBLE. */
2517 if (x
!= 0 && GET_CODE (x
) == CONST_DOUBLE
)
2523 /* Allow this function to make replacements in EXPR_LISTs. */
2527 if (GET_CODE (x
) == SUBREG
)
2529 rtx
new = replace_rtx (SUBREG_REG (x
), from
, to
);
2531 if (GET_CODE (new) == CONST_INT
)
2533 x
= simplify_subreg (GET_MODE (x
), new,
2534 GET_MODE (SUBREG_REG (x
)),
2540 SUBREG_REG (x
) = new;
2544 else if (GET_CODE (x
) == ZERO_EXTEND
)
2546 rtx
new = replace_rtx (XEXP (x
, 0), from
, to
);
2548 if (GET_CODE (new) == CONST_INT
)
2550 x
= simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
2551 new, GET_MODE (XEXP (x
, 0)));
2561 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2562 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2565 XEXP (x
, i
) = replace_rtx (XEXP (x
, i
), from
, to
);
2566 else if (fmt
[i
] == 'E')
2567 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2568 XVECEXP (x
, i
, j
) = replace_rtx (XVECEXP (x
, i
, j
), from
, to
);
2574 /* Throughout the rtx X, replace many registers according to REG_MAP.
2575 Return the replacement for X (which may be X with altered contents).
2576 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2577 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2579 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2580 should not be mapped to pseudos or vice versa since validate_change
2583 If REPLACE_DEST is 1, replacements are also done in destinations;
2584 otherwise, only sources are replaced. */
2587 replace_regs (x
, reg_map
, nregs
, replace_dest
)
2600 code
= GET_CODE (x
);
2615 /* Verify that the register has an entry before trying to access it. */
2616 if (REGNO (x
) < nregs
&& reg_map
[REGNO (x
)] != 0)
2618 /* SUBREGs can't be shared. Always return a copy to ensure that if
2619 this replacement occurs more than once then each instance will
2620 get distinct rtx. */
2621 if (GET_CODE (reg_map
[REGNO (x
)]) == SUBREG
)
2622 return copy_rtx (reg_map
[REGNO (x
)]);
2623 return reg_map
[REGNO (x
)];
2628 /* Prevent making nested SUBREGs. */
2629 if (GET_CODE (SUBREG_REG (x
)) == REG
&& REGNO (SUBREG_REG (x
)) < nregs
2630 && reg_map
[REGNO (SUBREG_REG (x
))] != 0
2631 && GET_CODE (reg_map
[REGNO (SUBREG_REG (x
))]) == SUBREG
)
2633 rtx map_val
= reg_map
[REGNO (SUBREG_REG (x
))];
2634 return simplify_gen_subreg (GET_MODE (x
), map_val
,
2635 GET_MODE (SUBREG_REG (x
)),
2642 SET_DEST (x
) = replace_regs (SET_DEST (x
), reg_map
, nregs
, 0);
2644 else if (GET_CODE (SET_DEST (x
)) == MEM
2645 || GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2646 /* Even if we are not to replace destinations, replace register if it
2647 is CONTAINED in destination (destination is memory or
2648 STRICT_LOW_PART). */
2649 XEXP (SET_DEST (x
), 0) = replace_regs (XEXP (SET_DEST (x
), 0),
2651 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2652 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2655 SET_SRC (x
) = replace_regs (SET_SRC (x
), reg_map
, nregs
, 0);
2662 fmt
= GET_RTX_FORMAT (code
);
2663 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2666 XEXP (x
, i
) = replace_regs (XEXP (x
, i
), reg_map
, nregs
, replace_dest
);
2667 else if (fmt
[i
] == 'E')
2670 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2671 XVECEXP (x
, i
, j
) = replace_regs (XVECEXP (x
, i
, j
), reg_map
,
2672 nregs
, replace_dest
);
2678 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2679 constant that is not in the constant pool and not in the condition
2680 of an IF_THEN_ELSE. */
2683 computed_jump_p_1 (x
)
2686 enum rtx_code code
= GET_CODE (x
);
2705 return ! (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
2706 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)));
2709 return (computed_jump_p_1 (XEXP (x
, 1))
2710 || computed_jump_p_1 (XEXP (x
, 2)));
2716 fmt
= GET_RTX_FORMAT (code
);
2717 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2720 && computed_jump_p_1 (XEXP (x
, i
)))
2723 else if (fmt
[i
] == 'E')
2724 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2725 if (computed_jump_p_1 (XVECEXP (x
, i
, j
)))
2732 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2734 Tablejumps and casesi insns are not considered indirect jumps;
2735 we can recognize them by a (use (label_ref)). */
2738 computed_jump_p (insn
)
2742 if (GET_CODE (insn
) == JUMP_INSN
)
2744 rtx pat
= PATTERN (insn
);
2746 if (find_reg_note (insn
, REG_LABEL
, NULL_RTX
))
2748 else if (GET_CODE (pat
) == PARALLEL
)
2750 int len
= XVECLEN (pat
, 0);
2751 int has_use_labelref
= 0;
2753 for (i
= len
- 1; i
>= 0; i
--)
2754 if (GET_CODE (XVECEXP (pat
, 0, i
)) == USE
2755 && (GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0))
2757 has_use_labelref
= 1;
2759 if (! has_use_labelref
)
2760 for (i
= len
- 1; i
>= 0; i
--)
2761 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
2762 && SET_DEST (XVECEXP (pat
, 0, i
)) == pc_rtx
2763 && computed_jump_p_1 (SET_SRC (XVECEXP (pat
, 0, i
))))
2766 else if (GET_CODE (pat
) == SET
2767 && SET_DEST (pat
) == pc_rtx
2768 && computed_jump_p_1 (SET_SRC (pat
)))
2774 /* Traverse X via depth-first search, calling F for each
2775 sub-expression (including X itself). F is also passed the DATA.
2776 If F returns -1, do not traverse sub-expressions, but continue
2777 traversing the rest of the tree. If F ever returns any other
2778 non-zero value, stop the traversal, and return the value returned
2779 by F. Otherwise, return 0. This function does not traverse inside
2780 tree structure that contains RTX_EXPRs, or into sub-expressions
2781 whose format code is `0' since it is not known whether or not those
2782 codes are actually RTL.
2784 This routine is very general, and could (should?) be used to
2785 implement many of the other routines in this file. */
2788 for_each_rtx (x
, f
, data
)
2799 result
= (*f
) (x
, data
);
2801 /* Do not traverse sub-expressions. */
2803 else if (result
!= 0)
2804 /* Stop the traversal. */
2808 /* There are no sub-expressions. */
2811 length
= GET_RTX_LENGTH (GET_CODE (*x
));
2812 format
= GET_RTX_FORMAT (GET_CODE (*x
));
2814 for (i
= 0; i
< length
; ++i
)
2819 result
= for_each_rtx (&XEXP (*x
, i
), f
, data
);
2826 if (XVEC (*x
, i
) != 0)
2829 for (j
= 0; j
< XVECLEN (*x
, i
); ++j
)
2831 result
= for_each_rtx (&XVECEXP (*x
, i
, j
), f
, data
);
2839 /* Nothing to do. */
2848 /* Searches X for any reference to REGNO, returning the rtx of the
2849 reference found if any. Otherwise, returns NULL_RTX. */
2852 regno_use_in (regno
, x
)
2860 if (GET_CODE (x
) == REG
&& REGNO (x
) == regno
)
2863 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2864 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2868 if ((tem
= regno_use_in (regno
, XEXP (x
, i
))))
2871 else if (fmt
[i
] == 'E')
2872 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2873 if ((tem
= regno_use_in (regno
, XVECEXP (x
, i
, j
))))
2880 /* Return a value indicating whether OP, an operand of a commutative
2881 operation, is preferred as the first or second operand. The higher
2882 the value, the stronger the preference for being the first operand.
2883 We use negative values to indicate a preference for the first operand
2884 and positive values for the second operand. */
2887 commutative_operand_precedence (op
)
2890 /* Constants always come the second operand. Prefer "nice" constants. */
2891 if (GET_CODE (op
) == CONST_INT
)
2893 if (GET_CODE (op
) == CONST_DOUBLE
)
2895 if (CONSTANT_P (op
))
2898 /* SUBREGs of objects should come second. */
2899 if (GET_CODE (op
) == SUBREG
2900 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op
))) == 'o')
2903 /* If only one operand is a `neg', `not',
2904 `mult', `plus', or `minus' expression, it will be the first
2906 if (GET_CODE (op
) == NEG
|| GET_CODE (op
) == NOT
2907 || GET_CODE (op
) == MULT
|| GET_CODE (op
) == PLUS
2908 || GET_CODE (op
) == MINUS
)
2911 /* Complex expressions should be the first, so decrease priority
2913 if (GET_RTX_CLASS (GET_CODE (op
)) == 'o')
2918 /* Return 1 iff it is necessary to swap operands of commutative operation
2919 in order to canonicalize expression. */
2922 swap_commutative_operands_p (x
, y
)
2925 return (commutative_operand_precedence (x
)
2926 < commutative_operand_precedence (y
));
2929 /* Return 1 if X is an autoincrement side effect and the register is
2930 not the stack pointer. */
2935 switch (GET_CODE (x
))
2943 /* There are no REG_INC notes for SP. */
2944 if (XEXP (x
, 0) != stack_pointer_rtx
)
2952 /* Return 1 if the sequence of instructions beginning with FROM and up
2953 to and including TO is safe to move. If NEW_TO is non-NULL, and
2954 the sequence is not already safe to move, but can be easily
2955 extended to a sequence which is safe, then NEW_TO will point to the
2956 end of the extended sequence.
2958 For now, this function only checks that the region contains whole
2959 exception regions, but it could be extended to check additional
2960 conditions as well. */
2963 insns_safe_to_move_p (from
, to
, new_to
)
2968 int eh_region_count
= 0;
2972 /* By default, assume the end of the region will be what was
2979 if (GET_CODE (r
) == NOTE
)
2981 switch (NOTE_LINE_NUMBER (r
))
2983 case NOTE_INSN_EH_REGION_BEG
:
2987 case NOTE_INSN_EH_REGION_END
:
2988 if (eh_region_count
== 0)
2989 /* This sequence of instructions contains the end of
2990 an exception region, but not he beginning. Moving
2991 it will cause chaos. */
3002 /* If we've passed TO, and we see a non-note instruction, we
3003 can't extend the sequence to a movable sequence. */
3009 /* It's OK to move the sequence if there were matched sets of
3010 exception region notes. */
3011 return eh_region_count
== 0;
3016 /* It's OK to move the sequence if there were matched sets of
3017 exception region notes. */
3018 if (past_to_p
&& eh_region_count
== 0)
3024 /* Go to the next instruction. */
3031 /* Return non-zero if IN contains a piece of rtl that has the address LOC */
3033 loc_mentioned_in_p (loc
, in
)
3036 enum rtx_code code
= GET_CODE (in
);
3037 const char *fmt
= GET_RTX_FORMAT (code
);
3040 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3042 if (loc
== &in
->fld
[i
].rtx
)
3046 if (loc_mentioned_in_p (loc
, XEXP (in
, i
)))
3049 else if (fmt
[i
] == 'E')
3050 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
3051 if (loc_mentioned_in_p (loc
, XVECEXP (in
, i
, j
)))
3057 /* Given a subreg X, return the bit offset where the subreg begins
3058 (counting from the least significant bit of the reg). */
3064 enum machine_mode inner_mode
= GET_MODE (SUBREG_REG (x
));
3065 enum machine_mode mode
= GET_MODE (x
);
3066 unsigned int bitpos
;
3070 /* A paradoxical subreg begins at bit position 0. */
3071 if (GET_MODE_BITSIZE (mode
) > GET_MODE_BITSIZE (inner_mode
))
3074 if (WORDS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
3075 /* If the subreg crosses a word boundary ensure that
3076 it also begins and ends on a word boundary. */
3077 if ((SUBREG_BYTE (x
) % UNITS_PER_WORD
3078 + GET_MODE_SIZE (mode
)) > UNITS_PER_WORD
3079 && (SUBREG_BYTE (x
) % UNITS_PER_WORD
3080 || GET_MODE_SIZE (mode
) % UNITS_PER_WORD
))
3083 if (WORDS_BIG_ENDIAN
)
3084 word
= (GET_MODE_SIZE (inner_mode
)
3085 - (SUBREG_BYTE (x
) + GET_MODE_SIZE (mode
))) / UNITS_PER_WORD
;
3087 word
= SUBREG_BYTE (x
) / UNITS_PER_WORD
;
3088 bitpos
= word
* BITS_PER_WORD
;
3090 if (BYTES_BIG_ENDIAN
)
3091 byte
= (GET_MODE_SIZE (inner_mode
)
3092 - (SUBREG_BYTE (x
) + GET_MODE_SIZE (mode
))) % UNITS_PER_WORD
;
3094 byte
= SUBREG_BYTE (x
) % UNITS_PER_WORD
;
3095 bitpos
+= byte
* BITS_PER_UNIT
;
3100 /* This function returns the regno offset of a subreg expression.
3101 xregno - A regno of an inner hard subreg_reg (or what will become one).
3102 xmode - The mode of xregno.
3103 offset - The byte offset.
3104 ymode - The mode of a top level SUBREG (or what may become one).
3105 RETURN - The regno offset which would be used. */
3107 subreg_regno_offset (xregno
, xmode
, offset
, ymode
)
3108 unsigned int xregno
;
3109 enum machine_mode xmode
;
3110 unsigned int offset
;
3111 enum machine_mode ymode
;
3113 int nregs_xmode
, nregs_ymode
;
3114 int mode_multiple
, nregs_multiple
;
3117 if (xregno
>= FIRST_PSEUDO_REGISTER
)
3120 nregs_xmode
= HARD_REGNO_NREGS (xregno
, xmode
);
3121 nregs_ymode
= HARD_REGNO_NREGS (xregno
, ymode
);
3122 if (offset
== 0 || nregs_xmode
== nregs_ymode
)
3125 /* size of ymode must not be greater than the size of xmode. */
3126 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3127 if (mode_multiple
== 0)
3130 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3131 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3132 return (y_offset
/ (mode_multiple
/ nregs_multiple
)) * nregs_ymode
;
3135 /* Return the final regno that a subreg expression refers to. */
3141 rtx subreg
= SUBREG_REG (x
);
3142 int regno
= REGNO (subreg
);
3144 ret
= regno
+ subreg_regno_offset (regno
,
3151 struct parms_set_data
3157 /* Helper function for noticing stores to parameter registers. */
3159 parms_set (x
, pat
, data
)
3160 rtx x
, pat ATTRIBUTE_UNUSED
;
3163 struct parms_set_data
*d
= data
;
3164 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3165 && TEST_HARD_REG_BIT (d
->regs
, REGNO (x
)))
3167 CLEAR_HARD_REG_BIT (d
->regs
, REGNO (x
));
3172 /* Look backward for first parameter to be loaded.
3173 Do not skip BOUNDARY. */
3175 find_first_parameter_load (call_insn
, boundary
)
3176 rtx call_insn
, boundary
;
3178 struct parms_set_data parm
;
3181 /* Since different machines initialize their parameter registers
3182 in different orders, assume nothing. Collect the set of all
3183 parameter registers. */
3184 CLEAR_HARD_REG_SET (parm
.regs
);
3186 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
3187 if (GET_CODE (XEXP (p
, 0)) == USE
3188 && GET_CODE (XEXP (XEXP (p
, 0), 0)) == REG
)
3190 if (REGNO (XEXP (XEXP (p
, 0), 0)) >= FIRST_PSEUDO_REGISTER
)
3193 /* We only care about registers which can hold function
3195 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p
, 0), 0))))
3198 SET_HARD_REG_BIT (parm
.regs
, REGNO (XEXP (XEXP (p
, 0), 0)));
3203 /* Search backward for the first set of a register in this set. */
3204 while (parm
.nregs
&& before
!= boundary
)
3206 before
= PREV_INSN (before
);
3208 /* It is possible that some loads got CSEed from one call to
3209 another. Stop in that case. */
3210 if (GET_CODE (before
) == CALL_INSN
)
3213 /* Our caller needs either ensure that we will find all sets
3214 (in case code has not been optimized yet), or take care
3215 for possible labels in a way by setting boundary to preceding
3217 if (GET_CODE (before
) == CODE_LABEL
)
3219 if (before
!= boundary
)
3224 if (INSN_P (before
))
3225 note_stores (PATTERN (before
), parms_set
, &parm
);
3230 /* Return true if we should avoid inserting code between INSN and preceeding
3231 call instruction. */
3234 keep_with_call_p (insn
)
3239 if (INSN_P (insn
) && (set
= single_set (insn
)) != NULL
)
3241 if (GET_CODE (SET_DEST (set
)) == REG
3242 && fixed_regs
[REGNO (SET_DEST (set
))]
3243 && general_operand (SET_SRC (set
), VOIDmode
))
3245 if (GET_CODE (SET_SRC (set
)) == REG
3246 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set
)))
3247 && GET_CODE (SET_DEST (set
)) == REG
3248 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
)
3250 /* There may be a stack pop just after the call and before the store
3251 of the return register. Search for the actual store when deciding
3252 if we can break or not. */
3253 if (SET_DEST (set
) == stack_pointer_rtx
)
3255 rtx i2
= next_nonnote_insn (insn
);
3256 if (i2
&& keep_with_call_p (i2
))
3263 /* Return true when store to register X can be hoisted to the place
3264 with LIVE registers (can be NULL). Value VAL contains destination
3265 whose value will be used. */
3268 hoist_test_store (x
, val
, live
)
3272 if (GET_CODE (x
) == SCRATCH
)
3275 if (rtx_equal_p (x
, val
))
3278 /* Allow subreg of X in case it is not writting just part of multireg pseudo.
3279 Then we would need to update all users to care hoisting the store too.
3280 Caller may represent that by specifying whole subreg as val. */
3282 if (GET_CODE (x
) == SUBREG
&& rtx_equal_p (SUBREG_REG (x
), val
))
3284 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))) > UNITS_PER_WORD
3285 && GET_MODE_BITSIZE (GET_MODE (x
)) <
3286 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))))
3290 if (GET_CODE (x
) == SUBREG
)
3293 /* Anything except register store is not hoistable. This includes the
3294 partial stores to registers. */
3299 /* Pseudo registers can be allways replaced by another pseudo to avoid
3300 the side effect, for hard register we must ensure that they are dead.
3301 Eventually we may want to add code to try turn pseudos to hards, but it
3302 is unlikely usefull. */
3304 if (REGNO (x
) < FIRST_PSEUDO_REGISTER
)
3306 int regno
= REGNO (x
);
3307 int n
= HARD_REGNO_NREGS (regno
, GET_MODE (x
));
3311 if (REGNO_REG_SET_P (live
, regno
))
3314 if (REGNO_REG_SET_P (live
, regno
+ n
))
3321 /* Return true if INSN can be hoisted to place with LIVE hard registers
3322 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3323 and used by the hoisting pass. */
3326 can_hoist_insn_p (insn
, val
, live
)
3330 rtx pat
= PATTERN (insn
);
3333 /* It probably does not worth the complexity to handle multiple
3335 if (!single_set (insn
))
3337 /* We can move CALL_INSN, but we need to check that all caller clobbered
3339 if (GET_CODE (insn
) == CALL_INSN
)
3341 /* In future we will handle hoisting of libcall sequences, but
3343 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
3345 switch (GET_CODE (pat
))
3348 if (!hoist_test_store (SET_DEST (pat
), val
, live
))
3352 /* USES do have sick semantics, so do not move them. */
3356 if (!hoist_test_store (XEXP (pat
, 0), val
, live
))
3360 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3362 rtx x
= XVECEXP (pat
, 0, i
);
3363 switch (GET_CODE (x
))
3366 if (!hoist_test_store (SET_DEST (x
), val
, live
))
3370 /* We need to fix callers to really ensure availability
3371 of all values inisn uses, but for now it is safe to prohibit
3372 hoisting of any insn having such a hiden uses. */
3376 if (!hoist_test_store (SET_DEST (x
), val
, live
))
3390 /* Update store after hoisting - replace all stores to pseudo registers
3391 by new ones to avoid clobbering of values except for store to VAL that will
3392 be updated to NEW. */
3395 hoist_update_store (insn
, xp
, val
, new)
3396 rtx insn
, *xp
, val
, new;
3400 if (GET_CODE (x
) == SCRATCH
)
3403 if (GET_CODE (x
) == SUBREG
&& SUBREG_REG (x
) == val
)
3404 validate_change (insn
, xp
,
3405 simplify_gen_subreg (GET_MODE (x
), new, GET_MODE (new),
3406 SUBREG_BYTE (x
)), 1);
3407 if (rtx_equal_p (x
, val
))
3409 validate_change (insn
, xp
, new, 1);
3412 if (GET_CODE (x
) == SUBREG
)
3414 xp
= &SUBREG_REG (x
);
3421 /* We've verified that hard registers are dead, so we may keep the side
3422 effect. Otherwise replace it by new pseudo. */
3423 if (REGNO (x
) >= FIRST_PSEUDO_REGISTER
)
3424 validate_change (insn
, xp
, gen_reg_rtx (GET_MODE (x
)), 1);
3426 = alloc_EXPR_LIST (REG_UNUSED
, *xp
, REG_NOTES (insn
));
3429 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3430 and each other side effect to pseudo register by new pseudo register. */
3433 hoist_insn_after (insn
, after
, val
, new)
3434 rtx insn
, after
, val
, new;
3440 insn
= emit_copy_of_insn_after (insn
, after
);
3441 pat
= PATTERN (insn
);
3443 /* Remove REG_UNUSED notes as we will re-emit them. */
3444 while ((note
= find_reg_note (insn
, REG_UNUSED
, NULL_RTX
)))
3445 remove_note (insn
, note
);
3447 /* To get this working callers must ensure to move everything referenced
3448 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3450 while ((note
= find_reg_note (insn
, REG_EQUAL
, NULL_RTX
)))
3451 remove_note (insn
, note
);
3452 while ((note
= find_reg_note (insn
, REG_EQUIV
, NULL_RTX
)))
3453 remove_note (insn
, note
);
3455 /* Remove REG_DEAD notes as they might not be valid anymore in case
3456 we create redundancy. */
3457 while ((note
= find_reg_note (insn
, REG_DEAD
, NULL_RTX
)))
3458 remove_note (insn
, note
);
3459 switch (GET_CODE (pat
))
3462 hoist_update_store (insn
, &SET_DEST (pat
), val
, new);
3467 hoist_update_store (insn
, &XEXP (pat
, 0), val
, new);
3470 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
3472 rtx x
= XVECEXP (pat
, 0, i
);
3473 switch (GET_CODE (x
))
3476 hoist_update_store (insn
, &SET_DEST (x
), val
, new);
3481 hoist_update_store (insn
, &SET_DEST (x
), val
, new);
3491 if (!apply_change_group ())
3498 hoist_insn_to_edge (insn
, e
, val
, new)
3504 /* We cannot insert instructions on an abnormal critical edge.
3505 It will be easier to find the culprit if we die now. */
3506 if ((e
->flags
& EDGE_ABNORMAL
) && EDGE_CRITICAL_P (e
))
3509 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3510 stuff. We also emit CALL_INSNS and firends. */
3511 if (e
->insns
== NULL_RTX
)
3514 emit_note (NULL
, NOTE_INSN_DELETED
);
3517 push_to_sequence (e
->insns
);
3519 new_insn
= hoist_insn_after (insn
, get_last_insn (), val
, new);
3521 e
->insns
= get_insns ();