1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
40 /* Forward declarations */
41 static int global_reg_mentioned_p_1 (rtx
*, void *);
42 static void set_of_1 (rtx
, rtx
, void *);
43 static bool covers_regno_p (rtx
, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx
, unsigned int);
45 static int rtx_referenced_p_1 (rtx
*, void *);
46 static int computed_jump_p_1 (rtx
);
47 static void parms_set (rtx
, rtx
, void *);
49 static unsigned HOST_WIDE_INT
cached_nonzero_bits (rtx
, enum machine_mode
,
50 rtx
, enum machine_mode
,
51 unsigned HOST_WIDE_INT
);
52 static unsigned HOST_WIDE_INT
nonzero_bits1 (rtx
, enum machine_mode
, rtx
,
54 unsigned HOST_WIDE_INT
);
55 static unsigned int cached_num_sign_bit_copies (rtx
, enum machine_mode
, rtx
,
58 static unsigned int num_sign_bit_copies1 (rtx
, enum machine_mode
, rtx
,
59 enum machine_mode
, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands
[NUM_RTX_CODE
];
65 /* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
71 /* Return 1 if the value of X is unstable
72 (would be different at a different point in the program).
73 The frame pointer, arg pointer, etc. are considered stable
74 (within one function) and so is anything marked `unchanging'. */
77 rtx_unstable_p (rtx x
)
79 RTX_CODE code
= GET_CODE (x
);
86 return !MEM_READONLY_P (x
) || rtx_unstable_p (XEXP (x
, 0));
97 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
98 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
99 /* The arg pointer varies if it is not a fixed register. */
100 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
102 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
103 /* ??? When call-clobbered, the value is stable modulo the restore
104 that must happen after a call. This currently screws up local-alloc
105 into believing that the restore is not needed. */
106 if (x
== pic_offset_table_rtx
)
112 if (MEM_VOLATILE_P (x
))
121 fmt
= GET_RTX_FORMAT (code
);
122 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
125 if (rtx_unstable_p (XEXP (x
, i
)))
128 else if (fmt
[i
] == 'E')
131 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
132 if (rtx_unstable_p (XVECEXP (x
, i
, j
)))
139 /* Return 1 if X has a value that can vary even between two
140 executions of the program. 0 means X can be compared reliably
141 against certain constants or near-constants.
142 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
143 zero, we are slightly more conservative.
144 The frame pointer and the arg pointer are considered constant. */
147 rtx_varies_p (rtx x
, int for_alias
)
160 return !MEM_READONLY_P (x
) || rtx_varies_p (XEXP (x
, 0), for_alias
);
171 /* Note that we have to test for the actual rtx used for the frame
172 and arg pointers and not just the register number in case we have
173 eliminated the frame and/or arg pointer and are using it
175 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
176 /* The arg pointer varies if it is not a fixed register. */
177 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
179 if (x
== pic_offset_table_rtx
180 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
181 /* ??? When call-clobbered, the value is stable modulo the restore
182 that must happen after a call. This currently screws up
183 local-alloc into believing that the restore is not needed, so we
184 must return 0 only if we are called from alias analysis. */
192 /* The operand 0 of a LO_SUM is considered constant
193 (in fact it is related specifically to operand 1)
194 during alias analysis. */
195 return (! for_alias
&& rtx_varies_p (XEXP (x
, 0), for_alias
))
196 || rtx_varies_p (XEXP (x
, 1), for_alias
);
199 if (MEM_VOLATILE_P (x
))
208 fmt
= GET_RTX_FORMAT (code
);
209 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
212 if (rtx_varies_p (XEXP (x
, i
), for_alias
))
215 else if (fmt
[i
] == 'E')
218 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
219 if (rtx_varies_p (XVECEXP (x
, i
, j
), for_alias
))
226 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
229 rtx_addr_can_trap_p (rtx x
)
231 enum rtx_code code
= GET_CODE (x
);
236 return SYMBOL_REF_WEAK (x
);
242 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
243 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
244 || x
== stack_pointer_rtx
245 /* The arg pointer varies if it is not a fixed register. */
246 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
248 /* All of the virtual frame registers are stack references. */
249 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
250 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
255 return rtx_addr_can_trap_p (XEXP (x
, 0));
258 /* An address is assumed not to trap if it is an address that can't
259 trap plus a constant integer or it is the pic register plus a
261 return ! ((! rtx_addr_can_trap_p (XEXP (x
, 0))
262 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
263 || (XEXP (x
, 0) == pic_offset_table_rtx
264 && CONSTANT_P (XEXP (x
, 1))));
268 return rtx_addr_can_trap_p (XEXP (x
, 1));
275 return rtx_addr_can_trap_p (XEXP (x
, 0));
281 /* If it isn't one of the case above, it can cause a trap. */
285 /* Return true if X is an address that is known to not be zero. */
288 nonzero_address_p (rtx x
)
290 enum rtx_code code
= GET_CODE (x
);
295 return !SYMBOL_REF_WEAK (x
);
301 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
302 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
303 || x
== stack_pointer_rtx
304 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
306 /* All of the virtual frame registers are stack references. */
307 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
308 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
313 return nonzero_address_p (XEXP (x
, 0));
316 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
318 /* Pointers aren't allowed to wrap. If we've got a register
319 that is known to be a pointer, and a positive offset, then
320 the composite can't be zero. */
321 if (INTVAL (XEXP (x
, 1)) > 0
322 && REG_P (XEXP (x
, 0))
323 && REG_POINTER (XEXP (x
, 0)))
326 return nonzero_address_p (XEXP (x
, 0));
328 /* Handle PIC references. */
329 else if (XEXP (x
, 0) == pic_offset_table_rtx
330 && CONSTANT_P (XEXP (x
, 1)))
335 /* Similar to the above; allow positive offsets. Further, since
336 auto-inc is only allowed in memories, the register must be a
338 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
339 && INTVAL (XEXP (x
, 1)) > 0)
341 return nonzero_address_p (XEXP (x
, 0));
344 /* Similarly. Further, the offset is always positive. */
351 return nonzero_address_p (XEXP (x
, 0));
354 return nonzero_address_p (XEXP (x
, 1));
360 /* If it isn't one of the case above, might be zero. */
364 /* Return 1 if X refers to a memory location whose address
365 cannot be compared reliably with constant addresses,
366 or if X refers to a BLKmode memory object.
367 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
368 zero, we are slightly more conservative. */
371 rtx_addr_varies_p (rtx x
, int for_alias
)
382 return GET_MODE (x
) == BLKmode
|| rtx_varies_p (XEXP (x
, 0), for_alias
);
384 fmt
= GET_RTX_FORMAT (code
);
385 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
388 if (rtx_addr_varies_p (XEXP (x
, i
), for_alias
))
391 else if (fmt
[i
] == 'E')
394 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
395 if (rtx_addr_varies_p (XVECEXP (x
, i
, j
), for_alias
))
401 /* Return the value of the integer term in X, if one is apparent;
403 Only obvious integer terms are detected.
404 This is used in cse.c with the `related_value' field. */
407 get_integer_term (rtx x
)
409 if (GET_CODE (x
) == CONST
)
412 if (GET_CODE (x
) == MINUS
413 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
414 return - INTVAL (XEXP (x
, 1));
415 if (GET_CODE (x
) == PLUS
416 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
417 return INTVAL (XEXP (x
, 1));
421 /* If X is a constant, return the value sans apparent integer term;
423 Only obvious integer terms are detected. */
426 get_related_value (rtx x
)
428 if (GET_CODE (x
) != CONST
)
431 if (GET_CODE (x
) == PLUS
432 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
434 else if (GET_CODE (x
) == MINUS
435 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
440 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
441 a global register. */
444 global_reg_mentioned_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
452 switch (GET_CODE (x
))
455 if (REG_P (SUBREG_REG (x
)))
457 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
458 && global_regs
[subreg_regno (x
)])
466 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
480 /* A non-constant call might use a global register. */
490 /* Returns nonzero if X mentions a global register. */
493 global_reg_mentioned_p (rtx x
)
499 if (! CONST_OR_PURE_CALL_P (x
))
501 x
= CALL_INSN_FUNCTION_USAGE (x
);
509 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
512 /* Return the number of places FIND appears within X. If COUNT_DEST is
513 zero, we do not count occurrences inside the destination of a SET. */
516 count_occurrences (rtx x
, rtx find
, int count_dest
)
520 const char *format_ptr
;
541 if (MEM_P (find
) && rtx_equal_p (x
, find
))
546 if (SET_DEST (x
) == find
&& ! count_dest
)
547 return count_occurrences (SET_SRC (x
), find
, count_dest
);
554 format_ptr
= GET_RTX_FORMAT (code
);
557 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
559 switch (*format_ptr
++)
562 count
+= count_occurrences (XEXP (x
, i
), find
, count_dest
);
566 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
567 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
, count_dest
);
574 /* Nonzero if register REG appears somewhere within IN.
575 Also works if REG is not a register; in this case it checks
576 for a subexpression of IN that is Lisp "equal" to REG. */
579 reg_mentioned_p (rtx reg
, rtx in
)
591 if (GET_CODE (in
) == LABEL_REF
)
592 return reg
== XEXP (in
, 0);
594 code
= GET_CODE (in
);
598 /* Compare registers by number. */
600 return REG_P (reg
) && REGNO (in
) == REGNO (reg
);
602 /* These codes have no constituent expressions
612 /* These are kept unique for a given value. */
619 if (GET_CODE (reg
) == code
&& rtx_equal_p (reg
, in
))
622 fmt
= GET_RTX_FORMAT (code
);
624 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
629 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
630 if (reg_mentioned_p (reg
, XVECEXP (in
, i
, j
)))
633 else if (fmt
[i
] == 'e'
634 && reg_mentioned_p (reg
, XEXP (in
, i
)))
640 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
641 no CODE_LABEL insn. */
644 no_labels_between_p (rtx beg
, rtx end
)
649 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
655 /* Nonzero if register REG is used in an insn between
656 FROM_INSN and TO_INSN (exclusive of those two). */
659 reg_used_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
663 if (from_insn
== to_insn
)
666 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
668 && (reg_overlap_mentioned_p (reg
, PATTERN (insn
))
670 && (find_reg_fusage (insn
, USE
, reg
)
671 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
676 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
677 is entirely replaced by a new value and the only use is as a SET_DEST,
678 we do not consider it a reference. */
681 reg_referenced_p (rtx x
, rtx body
)
685 switch (GET_CODE (body
))
688 if (reg_overlap_mentioned_p (x
, SET_SRC (body
)))
691 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
692 of a REG that occupies all of the REG, the insn references X if
693 it is mentioned in the destination. */
694 if (GET_CODE (SET_DEST (body
)) != CC0
695 && GET_CODE (SET_DEST (body
)) != PC
696 && !REG_P (SET_DEST (body
))
697 && ! (GET_CODE (SET_DEST (body
)) == SUBREG
698 && REG_P (SUBREG_REG (SET_DEST (body
)))
699 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body
))))
700 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
701 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body
)))
702 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
703 && reg_overlap_mentioned_p (x
, SET_DEST (body
)))
708 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
709 if (reg_overlap_mentioned_p (x
, ASM_OPERANDS_INPUT (body
, i
)))
716 return reg_overlap_mentioned_p (x
, body
);
719 return reg_overlap_mentioned_p (x
, TRAP_CONDITION (body
));
722 return reg_overlap_mentioned_p (x
, XEXP (body
, 0));
725 case UNSPEC_VOLATILE
:
726 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
727 if (reg_overlap_mentioned_p (x
, XVECEXP (body
, 0, i
)))
732 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
733 if (reg_referenced_p (x
, XVECEXP (body
, 0, i
)))
738 if (MEM_P (XEXP (body
, 0)))
739 if (reg_overlap_mentioned_p (x
, XEXP (XEXP (body
, 0), 0)))
744 if (reg_overlap_mentioned_p (x
, COND_EXEC_TEST (body
)))
746 return reg_referenced_p (x
, COND_EXEC_CODE (body
));
753 /* Nonzero if register REG is set or clobbered in an insn between
754 FROM_INSN and TO_INSN (exclusive of those two). */
757 reg_set_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
761 if (from_insn
== to_insn
)
764 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
765 if (INSN_P (insn
) && reg_set_p (reg
, insn
))
770 /* Internals of reg_set_between_p. */
772 reg_set_p (rtx reg
, rtx insn
)
774 /* We can be passed an insn or part of one. If we are passed an insn,
775 check if a side-effect of the insn clobbers REG. */
777 && (FIND_REG_INC_NOTE (insn
, reg
)
780 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
781 && TEST_HARD_REG_BIT (regs_invalidated_by_call
,
784 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
787 return set_of (reg
, insn
) != NULL_RTX
;
790 /* Similar to reg_set_between_p, but check all registers in X. Return 0
791 only if none of them are modified between START and END. Return 1 if
792 X contains a MEM; this routine does usememory aliasing. */
795 modified_between_p (rtx x
, rtx start
, rtx end
)
797 enum rtx_code code
= GET_CODE (x
);
820 if (modified_between_p (XEXP (x
, 0), start
, end
))
822 if (MEM_READONLY_P (x
))
824 for (insn
= NEXT_INSN (start
); insn
!= end
; insn
= NEXT_INSN (insn
))
825 if (memory_modified_in_insn_p (x
, insn
))
831 return reg_set_between_p (x
, start
, end
);
837 fmt
= GET_RTX_FORMAT (code
);
838 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
840 if (fmt
[i
] == 'e' && modified_between_p (XEXP (x
, i
), start
, end
))
843 else if (fmt
[i
] == 'E')
844 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
845 if (modified_between_p (XVECEXP (x
, i
, j
), start
, end
))
852 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
853 of them are modified in INSN. Return 1 if X contains a MEM; this routine
854 does use memory aliasing. */
857 modified_in_p (rtx x
, rtx insn
)
859 enum rtx_code code
= GET_CODE (x
);
878 if (modified_in_p (XEXP (x
, 0), insn
))
880 if (MEM_READONLY_P (x
))
882 if (memory_modified_in_insn_p (x
, insn
))
888 return reg_set_p (x
, insn
);
894 fmt
= GET_RTX_FORMAT (code
);
895 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
897 if (fmt
[i
] == 'e' && modified_in_p (XEXP (x
, i
), insn
))
900 else if (fmt
[i
] == 'E')
901 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
902 if (modified_in_p (XVECEXP (x
, i
, j
), insn
))
909 /* Helper function for set_of. */
917 set_of_1 (rtx x
, rtx pat
, void *data1
)
919 struct set_of_data
*data
= (struct set_of_data
*) (data1
);
920 if (rtx_equal_p (x
, data
->pat
)
921 || (!MEM_P (x
) && reg_overlap_mentioned_p (data
->pat
, x
)))
925 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
926 (either directly or via STRICT_LOW_PART and similar modifiers). */
928 set_of (rtx pat
, rtx insn
)
930 struct set_of_data data
;
931 data
.found
= NULL_RTX
;
933 note_stores (INSN_P (insn
) ? PATTERN (insn
) : insn
, set_of_1
, &data
);
937 /* Given an INSN, return a SET expression if this insn has only a single SET.
938 It may also have CLOBBERs, USEs, or SET whose output
939 will not be used, which we ignore. */
942 single_set_2 (rtx insn
, rtx pat
)
945 int set_verified
= 1;
948 if (GET_CODE (pat
) == PARALLEL
)
950 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
952 rtx sub
= XVECEXP (pat
, 0, i
);
953 switch (GET_CODE (sub
))
960 /* We can consider insns having multiple sets, where all
961 but one are dead as single set insns. In common case
962 only single set is present in the pattern so we want
963 to avoid checking for REG_UNUSED notes unless necessary.
965 When we reach set first time, we just expect this is
966 the single set we are looking for and only when more
967 sets are found in the insn, we check them. */
970 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (set
))
971 && !side_effects_p (set
))
977 set
= sub
, set_verified
= 0;
978 else if (!find_reg_note (insn
, REG_UNUSED
, SET_DEST (sub
))
979 || side_effects_p (sub
))
991 /* Given an INSN, return nonzero if it has more than one SET, else return
995 multiple_sets (rtx insn
)
1000 /* INSN must be an insn. */
1001 if (! INSN_P (insn
))
1004 /* Only a PARALLEL can have multiple SETs. */
1005 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
1007 for (i
= 0, found
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1008 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == SET
)
1010 /* If we have already found a SET, then return now. */
1018 /* Either zero or one SET. */
1022 /* Return nonzero if the destination of SET equals the source
1023 and there are no side effects. */
1026 set_noop_p (rtx set
)
1028 rtx src
= SET_SRC (set
);
1029 rtx dst
= SET_DEST (set
);
1031 if (dst
== pc_rtx
&& src
== pc_rtx
)
1034 if (MEM_P (dst
) && MEM_P (src
))
1035 return rtx_equal_p (dst
, src
) && !side_effects_p (dst
);
1037 if (GET_CODE (dst
) == ZERO_EXTRACT
)
1038 return rtx_equal_p (XEXP (dst
, 0), src
)
1039 && ! BYTES_BIG_ENDIAN
&& XEXP (dst
, 2) == const0_rtx
1040 && !side_effects_p (src
);
1042 if (GET_CODE (dst
) == STRICT_LOW_PART
)
1043 dst
= XEXP (dst
, 0);
1045 if (GET_CODE (src
) == SUBREG
&& GET_CODE (dst
) == SUBREG
)
1047 if (SUBREG_BYTE (src
) != SUBREG_BYTE (dst
))
1049 src
= SUBREG_REG (src
);
1050 dst
= SUBREG_REG (dst
);
1053 return (REG_P (src
) && REG_P (dst
)
1054 && REGNO (src
) == REGNO (dst
));
1057 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1061 noop_move_p (rtx insn
)
1063 rtx pat
= PATTERN (insn
);
1065 if (INSN_CODE (insn
) == NOOP_MOVE_INSN_CODE
)
1068 /* Insns carrying these notes are useful later on. */
1069 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1072 /* For now treat an insn with a REG_RETVAL note as a
1073 a special insn which should not be considered a no-op. */
1074 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
1077 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
1080 if (GET_CODE (pat
) == PARALLEL
)
1083 /* If nothing but SETs of registers to themselves,
1084 this insn can also be deleted. */
1085 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1087 rtx tem
= XVECEXP (pat
, 0, i
);
1089 if (GET_CODE (tem
) == USE
1090 || GET_CODE (tem
) == CLOBBER
)
1093 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
1103 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1104 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1105 If the object was modified, if we hit a partial assignment to X, or hit a
1106 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1107 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1111 find_last_value (rtx x
, rtx
*pinsn
, rtx valid_to
, int allow_hwreg
)
1115 for (p
= PREV_INSN (*pinsn
); p
&& !LABEL_P (p
);
1119 rtx set
= single_set (p
);
1120 rtx note
= find_reg_note (p
, REG_EQUAL
, NULL_RTX
);
1122 if (set
&& rtx_equal_p (x
, SET_DEST (set
)))
1124 rtx src
= SET_SRC (set
);
1126 if (note
&& GET_CODE (XEXP (note
, 0)) != EXPR_LIST
)
1127 src
= XEXP (note
, 0);
1129 if ((valid_to
== NULL_RTX
1130 || ! modified_between_p (src
, PREV_INSN (p
), valid_to
))
1131 /* Reject hard registers because we don't usually want
1132 to use them; we'd rather use a pseudo. */
1134 && REGNO (src
) < FIRST_PSEUDO_REGISTER
) || allow_hwreg
))
1141 /* If set in non-simple way, we don't have a value. */
1142 if (reg_set_p (x
, p
))
1149 /* Return nonzero if register in range [REGNO, ENDREGNO)
1150 appears either explicitly or implicitly in X
1151 other than being stored into.
1153 References contained within the substructure at LOC do not count.
1154 LOC may be zero, meaning don't ignore anything. */
1157 refers_to_regno_p (unsigned int regno
, unsigned int endregno
, rtx x
,
1161 unsigned int x_regno
;
1166 /* The contents of a REG_NONNEG note is always zero, so we must come here
1167 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1171 code
= GET_CODE (x
);
1176 x_regno
= REGNO (x
);
1178 /* If we modifying the stack, frame, or argument pointer, it will
1179 clobber a virtual register. In fact, we could be more precise,
1180 but it isn't worth it. */
1181 if ((x_regno
== STACK_POINTER_REGNUM
1182 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1183 || x_regno
== ARG_POINTER_REGNUM
1185 || x_regno
== FRAME_POINTER_REGNUM
)
1186 && regno
>= FIRST_VIRTUAL_REGISTER
&& regno
<= LAST_VIRTUAL_REGISTER
)
1189 return (endregno
> x_regno
1190 && regno
< x_regno
+ (x_regno
< FIRST_PSEUDO_REGISTER
1191 ? hard_regno_nregs
[x_regno
][GET_MODE (x
)]
1195 /* If this is a SUBREG of a hard reg, we can see exactly which
1196 registers are being modified. Otherwise, handle normally. */
1197 if (REG_P (SUBREG_REG (x
))
1198 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
1200 unsigned int inner_regno
= subreg_regno (x
);
1201 unsigned int inner_endregno
1202 = inner_regno
+ (inner_regno
< FIRST_PSEUDO_REGISTER
1203 ? hard_regno_nregs
[inner_regno
][GET_MODE (x
)] : 1);
1205 return endregno
> inner_regno
&& regno
< inner_endregno
;
1211 if (&SET_DEST (x
) != loc
1212 /* Note setting a SUBREG counts as referring to the REG it is in for
1213 a pseudo but not for hard registers since we can
1214 treat each word individually. */
1215 && ((GET_CODE (SET_DEST (x
)) == SUBREG
1216 && loc
!= &SUBREG_REG (SET_DEST (x
))
1217 && REG_P (SUBREG_REG (SET_DEST (x
)))
1218 && REGNO (SUBREG_REG (SET_DEST (x
))) >= FIRST_PSEUDO_REGISTER
1219 && refers_to_regno_p (regno
, endregno
,
1220 SUBREG_REG (SET_DEST (x
)), loc
))
1221 || (!REG_P (SET_DEST (x
))
1222 && refers_to_regno_p (regno
, endregno
, SET_DEST (x
), loc
))))
1225 if (code
== CLOBBER
|| loc
== &SET_SRC (x
))
1234 /* X does not match, so try its subexpressions. */
1236 fmt
= GET_RTX_FORMAT (code
);
1237 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1239 if (fmt
[i
] == 'e' && loc
!= &XEXP (x
, i
))
1247 if (refers_to_regno_p (regno
, endregno
, XEXP (x
, i
), loc
))
1250 else if (fmt
[i
] == 'E')
1253 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1254 if (loc
!= &XVECEXP (x
, i
, j
)
1255 && refers_to_regno_p (regno
, endregno
, XVECEXP (x
, i
, j
), loc
))
1262 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1263 we check if any register number in X conflicts with the relevant register
1264 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1265 contains a MEM (we don't bother checking for memory addresses that can't
1266 conflict because we expect this to be a rare case. */
1269 reg_overlap_mentioned_p (rtx x
, rtx in
)
1271 unsigned int regno
, endregno
;
1273 /* If either argument is a constant, then modifying X can not
1274 affect IN. Here we look at IN, we can profitably combine
1275 CONSTANT_P (x) with the switch statement below. */
1276 if (CONSTANT_P (in
))
1280 switch (GET_CODE (x
))
1282 case STRICT_LOW_PART
:
1285 /* Overly conservative. */
1290 regno
= REGNO (SUBREG_REG (x
));
1291 if (regno
< FIRST_PSEUDO_REGISTER
)
1292 regno
= subreg_regno (x
);
1298 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
1299 ? hard_regno_nregs
[regno
][GET_MODE (x
)] : 1);
1300 return refers_to_regno_p (regno
, endregno
, in
, (rtx
*) 0);
1310 fmt
= GET_RTX_FORMAT (GET_CODE (in
));
1311 for (i
= GET_RTX_LENGTH (GET_CODE (in
)) - 1; i
>= 0; i
--)
1312 if (fmt
[i
] == 'e' && reg_overlap_mentioned_p (x
, XEXP (in
, i
)))
1321 return reg_mentioned_p (x
, in
);
1327 /* If any register in here refers to it we return true. */
1328 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1329 if (XEXP (XVECEXP (x
, 0, i
), 0) != 0
1330 && reg_overlap_mentioned_p (XEXP (XVECEXP (x
, 0, i
), 0), in
))
1336 gcc_assert (CONSTANT_P (x
));
1341 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1342 (X would be the pattern of an insn).
1343 FUN receives two arguments:
1344 the REG, MEM, CC0 or PC being stored in or clobbered,
1345 the SET or CLOBBER rtx that does the store.
1347 If the item being stored in or clobbered is a SUBREG of a hard register,
1348 the SUBREG will be passed. */
1351 note_stores (rtx x
, void (*fun
) (rtx
, rtx
, void *), void *data
)
1355 if (GET_CODE (x
) == COND_EXEC
)
1356 x
= COND_EXEC_CODE (x
);
1358 if (GET_CODE (x
) == SET
|| GET_CODE (x
) == CLOBBER
)
1360 rtx dest
= SET_DEST (x
);
1362 while ((GET_CODE (dest
) == SUBREG
1363 && (!REG_P (SUBREG_REG (dest
))
1364 || REGNO (SUBREG_REG (dest
)) >= FIRST_PSEUDO_REGISTER
))
1365 || GET_CODE (dest
) == ZERO_EXTRACT
1366 || GET_CODE (dest
) == STRICT_LOW_PART
)
1367 dest
= XEXP (dest
, 0);
1369 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1370 each of whose first operand is a register. */
1371 if (GET_CODE (dest
) == PARALLEL
)
1373 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1374 if (XEXP (XVECEXP (dest
, 0, i
), 0) != 0)
1375 (*fun
) (XEXP (XVECEXP (dest
, 0, i
), 0), x
, data
);
1378 (*fun
) (dest
, x
, data
);
1381 else if (GET_CODE (x
) == PARALLEL
)
1382 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1383 note_stores (XVECEXP (x
, 0, i
), fun
, data
);
1386 /* Like notes_stores, but call FUN for each expression that is being
1387 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1388 FUN for each expression, not any interior subexpressions. FUN receives a
1389 pointer to the expression and the DATA passed to this function.
1391 Note that this is not quite the same test as that done in reg_referenced_p
1392 since that considers something as being referenced if it is being
1393 partially set, while we do not. */
1396 note_uses (rtx
*pbody
, void (*fun
) (rtx
*, void *), void *data
)
1401 switch (GET_CODE (body
))
1404 (*fun
) (&COND_EXEC_TEST (body
), data
);
1405 note_uses (&COND_EXEC_CODE (body
), fun
, data
);
1409 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1410 note_uses (&XVECEXP (body
, 0, i
), fun
, data
);
1414 (*fun
) (&XEXP (body
, 0), data
);
1418 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
1419 (*fun
) (&ASM_OPERANDS_INPUT (body
, i
), data
);
1423 (*fun
) (&TRAP_CONDITION (body
), data
);
1427 (*fun
) (&XEXP (body
, 0), data
);
1431 case UNSPEC_VOLATILE
:
1432 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1433 (*fun
) (&XVECEXP (body
, 0, i
), data
);
1437 if (MEM_P (XEXP (body
, 0)))
1438 (*fun
) (&XEXP (XEXP (body
, 0), 0), data
);
1443 rtx dest
= SET_DEST (body
);
1445 /* For sets we replace everything in source plus registers in memory
1446 expression in store and operands of a ZERO_EXTRACT. */
1447 (*fun
) (&SET_SRC (body
), data
);
1449 if (GET_CODE (dest
) == ZERO_EXTRACT
)
1451 (*fun
) (&XEXP (dest
, 1), data
);
1452 (*fun
) (&XEXP (dest
, 2), data
);
1455 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
)
1456 dest
= XEXP (dest
, 0);
1459 (*fun
) (&XEXP (dest
, 0), data
);
1464 /* All the other possibilities never store. */
1465 (*fun
) (pbody
, data
);
1470 /* Return nonzero if X's old contents don't survive after INSN.
1471 This will be true if X is (cc0) or if X is a register and
1472 X dies in INSN or because INSN entirely sets X.
1474 "Entirely set" means set directly and not through a SUBREG, or
1475 ZERO_EXTRACT, so no trace of the old contents remains.
1476 Likewise, REG_INC does not count.
1478 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1479 but for this use that makes no difference, since regs don't overlap
1480 during their lifetimes. Therefore, this function may be used
1481 at any time after deaths have been computed (in flow.c).
1483 If REG is a hard reg that occupies multiple machine registers, this
1484 function will only return 1 if each of those registers will be replaced
1488 dead_or_set_p (rtx insn
, rtx x
)
1490 unsigned int regno
, last_regno
;
1493 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1494 if (GET_CODE (x
) == CC0
)
1497 gcc_assert (REG_P (x
));
1500 last_regno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
1501 : regno
+ hard_regno_nregs
[regno
][GET_MODE (x
)] - 1);
1503 for (i
= regno
; i
<= last_regno
; i
++)
1504 if (! dead_or_set_regno_p (insn
, i
))
1510 /* Return TRUE iff DEST is a register or subreg of a register and
1511 doesn't change the number of words of the inner register, and any
1512 part of the register is TEST_REGNO. */
1515 covers_regno_no_parallel_p (rtx dest
, unsigned int test_regno
)
1517 unsigned int regno
, endregno
;
1519 if (GET_CODE (dest
) == SUBREG
1520 && (((GET_MODE_SIZE (GET_MODE (dest
))
1521 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1522 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1523 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1524 dest
= SUBREG_REG (dest
);
1529 regno
= REGNO (dest
);
1530 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1531 : regno
+ hard_regno_nregs
[regno
][GET_MODE (dest
)]);
1532 return (test_regno
>= regno
&& test_regno
< endregno
);
1535 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1536 any member matches the covers_regno_no_parallel_p criteria. */
1539 covers_regno_p (rtx dest
, unsigned int test_regno
)
1541 if (GET_CODE (dest
) == PARALLEL
)
1543 /* Some targets place small structures in registers for return
1544 values of functions, and those registers are wrapped in
1545 PARALLELs that we may see as the destination of a SET. */
1548 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1550 rtx inner
= XEXP (XVECEXP (dest
, 0, i
), 0);
1551 if (inner
!= NULL_RTX
1552 && covers_regno_no_parallel_p (inner
, test_regno
))
1559 return covers_regno_no_parallel_p (dest
, test_regno
);
1562 /* Utility function for dead_or_set_p to check an individual register. Also
1563 called from flow.c. */
1566 dead_or_set_regno_p (rtx insn
, unsigned int test_regno
)
1570 /* See if there is a death note for something that includes TEST_REGNO. */
1571 if (find_regno_note (insn
, REG_DEAD
, test_regno
))
1575 && find_regno_fusage (insn
, CLOBBER
, test_regno
))
1578 pattern
= PATTERN (insn
);
1580 if (GET_CODE (pattern
) == COND_EXEC
)
1581 pattern
= COND_EXEC_CODE (pattern
);
1583 if (GET_CODE (pattern
) == SET
)
1584 return covers_regno_p (SET_DEST (pattern
), test_regno
);
1585 else if (GET_CODE (pattern
) == PARALLEL
)
1589 for (i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
1591 rtx body
= XVECEXP (pattern
, 0, i
);
1593 if (GET_CODE (body
) == COND_EXEC
)
1594 body
= COND_EXEC_CODE (body
);
1596 if ((GET_CODE (body
) == SET
|| GET_CODE (body
) == CLOBBER
)
1597 && covers_regno_p (SET_DEST (body
), test_regno
))
1605 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1606 If DATUM is nonzero, look for one whose datum is DATUM. */
1609 find_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
1613 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1614 if (! INSN_P (insn
))
1618 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1619 if (REG_NOTE_KIND (link
) == kind
)
1624 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1625 if (REG_NOTE_KIND (link
) == kind
&& datum
== XEXP (link
, 0))
1630 /* Return the reg-note of kind KIND in insn INSN which applies to register
1631 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1632 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1633 it might be the case that the note overlaps REGNO. */
1636 find_regno_note (rtx insn
, enum reg_note kind
, unsigned int regno
)
1640 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1641 if (! INSN_P (insn
))
1644 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1645 if (REG_NOTE_KIND (link
) == kind
1646 /* Verify that it is a register, so that scratch and MEM won't cause a
1648 && REG_P (XEXP (link
, 0))
1649 && REGNO (XEXP (link
, 0)) <= regno
1650 && ((REGNO (XEXP (link
, 0))
1651 + (REGNO (XEXP (link
, 0)) >= FIRST_PSEUDO_REGISTER
? 1
1652 : hard_regno_nregs
[REGNO (XEXP (link
, 0))]
1653 [GET_MODE (XEXP (link
, 0))]))
1659 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1663 find_reg_equal_equiv_note (rtx insn
)
1669 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1670 if (REG_NOTE_KIND (link
) == REG_EQUAL
1671 || REG_NOTE_KIND (link
) == REG_EQUIV
)
1673 if (single_set (insn
) == 0)
1680 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1681 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1684 find_reg_fusage (rtx insn
, enum rtx_code code
, rtx datum
)
1686 /* If it's not a CALL_INSN, it can't possibly have a
1687 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1697 for (link
= CALL_INSN_FUNCTION_USAGE (insn
);
1699 link
= XEXP (link
, 1))
1700 if (GET_CODE (XEXP (link
, 0)) == code
1701 && rtx_equal_p (datum
, XEXP (XEXP (link
, 0), 0)))
1706 unsigned int regno
= REGNO (datum
);
1708 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1709 to pseudo registers, so don't bother checking. */
1711 if (regno
< FIRST_PSEUDO_REGISTER
)
1713 unsigned int end_regno
1714 = regno
+ hard_regno_nregs
[regno
][GET_MODE (datum
)];
1717 for (i
= regno
; i
< end_regno
; i
++)
1718 if (find_regno_fusage (insn
, code
, i
))
1726 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1727 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1730 find_regno_fusage (rtx insn
, enum rtx_code code
, unsigned int regno
)
1734 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1735 to pseudo registers, so don't bother checking. */
1737 if (regno
>= FIRST_PSEUDO_REGISTER
1741 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
1743 unsigned int regnote
;
1746 if (GET_CODE (op
= XEXP (link
, 0)) == code
1747 && REG_P (reg
= XEXP (op
, 0))
1748 && (regnote
= REGNO (reg
)) <= regno
1749 && regnote
+ hard_regno_nregs
[regnote
][GET_MODE (reg
)] > regno
)
1756 /* Return true if INSN is a call to a pure function. */
1759 pure_call_p (rtx insn
)
1763 if (!CALL_P (insn
) || ! CONST_OR_PURE_CALL_P (insn
))
1766 /* Look for the note that differentiates const and pure functions. */
1767 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
1771 if (GET_CODE (u
= XEXP (link
, 0)) == USE
1772 && MEM_P (m
= XEXP (u
, 0)) && GET_MODE (m
) == BLKmode
1773 && GET_CODE (XEXP (m
, 0)) == SCRATCH
)
1780 /* Remove register note NOTE from the REG_NOTES of INSN. */
1783 remove_note (rtx insn
, rtx note
)
1787 if (note
== NULL_RTX
)
1790 if (REG_NOTES (insn
) == note
)
1792 REG_NOTES (insn
) = XEXP (note
, 1);
1796 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1797 if (XEXP (link
, 1) == note
)
1799 XEXP (link
, 1) = XEXP (note
, 1);
1806 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1807 return 1 if it is found. A simple equality test is used to determine if
1811 in_expr_list_p (rtx listp
, rtx node
)
1815 for (x
= listp
; x
; x
= XEXP (x
, 1))
1816 if (node
== XEXP (x
, 0))
1822 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1823 remove that entry from the list if it is found.
1825 A simple equality test is used to determine if NODE matches. */
1828 remove_node_from_expr_list (rtx node
, rtx
*listp
)
1831 rtx prev
= NULL_RTX
;
1835 if (node
== XEXP (temp
, 0))
1837 /* Splice the node out of the list. */
1839 XEXP (prev
, 1) = XEXP (temp
, 1);
1841 *listp
= XEXP (temp
, 1);
1847 temp
= XEXP (temp
, 1);
1851 /* Nonzero if X contains any volatile instructions. These are instructions
1852 which may cause unpredictable machine state instructions, and thus no
1853 instructions should be moved or combined across them. This includes
1854 only volatile asms and UNSPEC_VOLATILE instructions. */
1857 volatile_insn_p (rtx x
)
1861 code
= GET_CODE (x
);
1881 case UNSPEC_VOLATILE
:
1882 /* case TRAP_IF: This isn't clear yet. */
1887 if (MEM_VOLATILE_P (x
))
1894 /* Recursively scan the operands of this expression. */
1897 const char *fmt
= GET_RTX_FORMAT (code
);
1900 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1904 if (volatile_insn_p (XEXP (x
, i
)))
1907 else if (fmt
[i
] == 'E')
1910 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1911 if (volatile_insn_p (XVECEXP (x
, i
, j
)))
1919 /* Nonzero if X contains any volatile memory references
1920 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1923 volatile_refs_p (rtx x
)
1927 code
= GET_CODE (x
);
1945 case UNSPEC_VOLATILE
:
1951 if (MEM_VOLATILE_P (x
))
1958 /* Recursively scan the operands of this expression. */
1961 const char *fmt
= GET_RTX_FORMAT (code
);
1964 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1968 if (volatile_refs_p (XEXP (x
, i
)))
1971 else if (fmt
[i
] == 'E')
1974 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1975 if (volatile_refs_p (XVECEXP (x
, i
, j
)))
1983 /* Similar to above, except that it also rejects register pre- and post-
1987 side_effects_p (rtx x
)
1991 code
= GET_CODE (x
);
2009 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2010 when some combination can't be done. If we see one, don't think
2011 that we can simplify the expression. */
2012 return (GET_MODE (x
) != VOIDmode
);
2021 case UNSPEC_VOLATILE
:
2022 /* case TRAP_IF: This isn't clear yet. */
2028 if (MEM_VOLATILE_P (x
))
2035 /* Recursively scan the operands of this expression. */
2038 const char *fmt
= GET_RTX_FORMAT (code
);
2041 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2045 if (side_effects_p (XEXP (x
, i
)))
2048 else if (fmt
[i
] == 'E')
2051 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2052 if (side_effects_p (XVECEXP (x
, i
, j
)))
2060 /* Return nonzero if evaluating rtx X might cause a trap. */
2071 code
= GET_CODE (x
);
2074 /* Handle these cases quickly. */
2088 case UNSPEC_VOLATILE
:
2093 return MEM_VOLATILE_P (x
);
2095 /* Memory ref can trap unless it's a static var or a stack slot. */
2097 if (MEM_NOTRAP_P (x
))
2099 return rtx_addr_can_trap_p (XEXP (x
, 0));
2101 /* Division by a non-constant might trap. */
2106 if (HONOR_SNANS (GET_MODE (x
)))
2108 if (! CONSTANT_P (XEXP (x
, 1))
2109 || (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2110 && flag_trapping_math
))
2112 if (XEXP (x
, 1) == const0_rtx
)
2117 /* An EXPR_LIST is used to represent a function call. This
2118 certainly may trap. */
2127 /* Some floating point comparisons may trap. */
2128 if (!flag_trapping_math
)
2130 /* ??? There is no machine independent way to check for tests that trap
2131 when COMPARE is used, though many targets do make this distinction.
2132 For instance, sparc uses CCFPE for compares which generate exceptions
2133 and CCFP for compares which do not generate exceptions. */
2134 if (HONOR_NANS (GET_MODE (x
)))
2136 /* But often the compare has some CC mode, so check operand
2138 if (HONOR_NANS (GET_MODE (XEXP (x
, 0)))
2139 || HONOR_NANS (GET_MODE (XEXP (x
, 1))))
2145 if (HONOR_SNANS (GET_MODE (x
)))
2147 /* Often comparison is CC mode, so check operand modes. */
2148 if (HONOR_SNANS (GET_MODE (XEXP (x
, 0)))
2149 || HONOR_SNANS (GET_MODE (XEXP (x
, 1))))
2154 /* Conversion of floating point might trap. */
2155 if (flag_trapping_math
&& HONOR_NANS (GET_MODE (XEXP (x
, 0))))
2161 /* These operations don't trap even with floating point. */
2165 /* Any floating arithmetic may trap. */
2166 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2167 && flag_trapping_math
)
2171 fmt
= GET_RTX_FORMAT (code
);
2172 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2176 if (may_trap_p (XEXP (x
, i
)))
2179 else if (fmt
[i
] == 'E')
2182 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2183 if (may_trap_p (XVECEXP (x
, i
, j
)))
2190 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2191 i.e., an inequality. */
2194 inequality_comparisons_p (rtx x
)
2198 enum rtx_code code
= GET_CODE (x
);
2228 len
= GET_RTX_LENGTH (code
);
2229 fmt
= GET_RTX_FORMAT (code
);
2231 for (i
= 0; i
< len
; i
++)
2235 if (inequality_comparisons_p (XEXP (x
, i
)))
2238 else if (fmt
[i
] == 'E')
2241 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2242 if (inequality_comparisons_p (XVECEXP (x
, i
, j
)))
2250 /* Replace any occurrence of FROM in X with TO. The function does
2251 not enter into CONST_DOUBLE for the replace.
2253 Note that copying is not done so X must not be shared unless all copies
2254 are to be modified. */
2257 replace_rtx (rtx x
, rtx from
, rtx to
)
2262 /* The following prevents loops occurrence when we change MEM in
2263 CONST_DOUBLE onto the same CONST_DOUBLE. */
2264 if (x
!= 0 && GET_CODE (x
) == CONST_DOUBLE
)
2270 /* Allow this function to make replacements in EXPR_LISTs. */
2274 if (GET_CODE (x
) == SUBREG
)
2276 rtx
new = replace_rtx (SUBREG_REG (x
), from
, to
);
2278 if (GET_CODE (new) == CONST_INT
)
2280 x
= simplify_subreg (GET_MODE (x
), new,
2281 GET_MODE (SUBREG_REG (x
)),
2286 SUBREG_REG (x
) = new;
2290 else if (GET_CODE (x
) == ZERO_EXTEND
)
2292 rtx
new = replace_rtx (XEXP (x
, 0), from
, to
);
2294 if (GET_CODE (new) == CONST_INT
)
2296 x
= simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
2297 new, GET_MODE (XEXP (x
, 0)));
2306 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2307 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2310 XEXP (x
, i
) = replace_rtx (XEXP (x
, i
), from
, to
);
2311 else if (fmt
[i
] == 'E')
2312 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2313 XVECEXP (x
, i
, j
) = replace_rtx (XVECEXP (x
, i
, j
), from
, to
);
2319 /* Throughout the rtx X, replace many registers according to REG_MAP.
2320 Return the replacement for X (which may be X with altered contents).
2321 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2322 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2324 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2325 should not be mapped to pseudos or vice versa since validate_change
2328 If REPLACE_DEST is 1, replacements are also done in destinations;
2329 otherwise, only sources are replaced. */
2332 replace_regs (rtx x
, rtx
*reg_map
, unsigned int nregs
, int replace_dest
)
2341 code
= GET_CODE (x
);
2356 /* Verify that the register has an entry before trying to access it. */
2357 if (REGNO (x
) < nregs
&& reg_map
[REGNO (x
)] != 0)
2359 /* SUBREGs can't be shared. Always return a copy to ensure that if
2360 this replacement occurs more than once then each instance will
2361 get distinct rtx. */
2362 if (GET_CODE (reg_map
[REGNO (x
)]) == SUBREG
)
2363 return copy_rtx (reg_map
[REGNO (x
)]);
2364 return reg_map
[REGNO (x
)];
2369 /* Prevent making nested SUBREGs. */
2370 if (REG_P (SUBREG_REG (x
)) && REGNO (SUBREG_REG (x
)) < nregs
2371 && reg_map
[REGNO (SUBREG_REG (x
))] != 0
2372 && GET_CODE (reg_map
[REGNO (SUBREG_REG (x
))]) == SUBREG
)
2374 rtx map_val
= reg_map
[REGNO (SUBREG_REG (x
))];
2375 return simplify_gen_subreg (GET_MODE (x
), map_val
,
2376 GET_MODE (SUBREG_REG (x
)),
2383 SET_DEST (x
) = replace_regs (SET_DEST (x
), reg_map
, nregs
, 0);
2385 else if (MEM_P (SET_DEST (x
))
2386 || GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2387 /* Even if we are not to replace destinations, replace register if it
2388 is CONTAINED in destination (destination is memory or
2389 STRICT_LOW_PART). */
2390 XEXP (SET_DEST (x
), 0) = replace_regs (XEXP (SET_DEST (x
), 0),
2392 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2393 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2396 SET_SRC (x
) = replace_regs (SET_SRC (x
), reg_map
, nregs
, 0);
2403 fmt
= GET_RTX_FORMAT (code
);
2404 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2407 XEXP (x
, i
) = replace_regs (XEXP (x
, i
), reg_map
, nregs
, replace_dest
);
2408 else if (fmt
[i
] == 'E')
2411 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2412 XVECEXP (x
, i
, j
) = replace_regs (XVECEXP (x
, i
, j
), reg_map
,
2413 nregs
, replace_dest
);
2419 /* Replace occurrences of the old label in *X with the new one.
2420 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2423 replace_label (rtx
*x
, void *data
)
2426 rtx old_label
= ((replace_label_data
*) data
)->r1
;
2427 rtx new_label
= ((replace_label_data
*) data
)->r2
;
2428 bool update_label_nuses
= ((replace_label_data
*) data
)->update_label_nuses
;
2433 if (GET_CODE (l
) == SYMBOL_REF
2434 && CONSTANT_POOL_ADDRESS_P (l
))
2436 rtx c
= get_pool_constant (l
);
2437 if (rtx_referenced_p (old_label
, c
))
2440 replace_label_data
*d
= (replace_label_data
*) data
;
2442 /* Create a copy of constant C; replace the label inside
2443 but do not update LABEL_NUSES because uses in constant pool
2445 new_c
= copy_rtx (c
);
2446 d
->update_label_nuses
= false;
2447 for_each_rtx (&new_c
, replace_label
, data
);
2448 d
->update_label_nuses
= update_label_nuses
;
2450 /* Add the new constant NEW_C to constant pool and replace
2451 the old reference to constant by new reference. */
2452 new_l
= XEXP (force_const_mem (get_pool_mode (l
), new_c
), 0);
2453 *x
= replace_rtx (l
, l
, new_l
);
2458 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2459 field. This is not handled by for_each_rtx because it doesn't
2460 handle unprinted ('0') fields. */
2461 if (JUMP_P (l
) && JUMP_LABEL (l
) == old_label
)
2462 JUMP_LABEL (l
) = new_label
;
2464 if ((GET_CODE (l
) == LABEL_REF
2465 || GET_CODE (l
) == INSN_LIST
)
2466 && XEXP (l
, 0) == old_label
)
2468 XEXP (l
, 0) = new_label
;
2469 if (update_label_nuses
)
2471 ++LABEL_NUSES (new_label
);
2472 --LABEL_NUSES (old_label
);
2480 /* When *BODY is equal to X or X is directly referenced by *BODY
2481 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2482 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2485 rtx_referenced_p_1 (rtx
*body
, void *x
)
2489 if (*body
== NULL_RTX
)
2490 return y
== NULL_RTX
;
2492 /* Return true if a label_ref *BODY refers to label Y. */
2493 if (GET_CODE (*body
) == LABEL_REF
&& LABEL_P (y
))
2494 return XEXP (*body
, 0) == y
;
2496 /* If *BODY is a reference to pool constant traverse the constant. */
2497 if (GET_CODE (*body
) == SYMBOL_REF
2498 && CONSTANT_POOL_ADDRESS_P (*body
))
2499 return rtx_referenced_p (y
, get_pool_constant (*body
));
2501 /* By default, compare the RTL expressions. */
2502 return rtx_equal_p (*body
, y
);
2505 /* Return true if X is referenced in BODY. */
2508 rtx_referenced_p (rtx x
, rtx body
)
2510 return for_each_rtx (&body
, rtx_referenced_p_1
, x
);
2513 /* If INSN is a tablejump return true and store the label (before jump table) to
2514 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2517 tablejump_p (rtx insn
, rtx
*labelp
, rtx
*tablep
)
2522 && (label
= JUMP_LABEL (insn
)) != NULL_RTX
2523 && (table
= next_active_insn (label
)) != NULL_RTX
2525 && (GET_CODE (PATTERN (table
)) == ADDR_VEC
2526 || GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
))
2537 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2538 constant that is not in the constant pool and not in the condition
2539 of an IF_THEN_ELSE. */
2542 computed_jump_p_1 (rtx x
)
2544 enum rtx_code code
= GET_CODE (x
);
2563 return ! (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
2564 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)));
2567 return (computed_jump_p_1 (XEXP (x
, 1))
2568 || computed_jump_p_1 (XEXP (x
, 2)));
2574 fmt
= GET_RTX_FORMAT (code
);
2575 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2578 && computed_jump_p_1 (XEXP (x
, i
)))
2581 else if (fmt
[i
] == 'E')
2582 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2583 if (computed_jump_p_1 (XVECEXP (x
, i
, j
)))
2590 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2592 Tablejumps and casesi insns are not considered indirect jumps;
2593 we can recognize them by a (use (label_ref)). */
2596 computed_jump_p (rtx insn
)
2601 rtx pat
= PATTERN (insn
);
2603 if (find_reg_note (insn
, REG_LABEL
, NULL_RTX
))
2605 else if (GET_CODE (pat
) == PARALLEL
)
2607 int len
= XVECLEN (pat
, 0);
2608 int has_use_labelref
= 0;
2610 for (i
= len
- 1; i
>= 0; i
--)
2611 if (GET_CODE (XVECEXP (pat
, 0, i
)) == USE
2612 && (GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0))
2614 has_use_labelref
= 1;
2616 if (! has_use_labelref
)
2617 for (i
= len
- 1; i
>= 0; i
--)
2618 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
2619 && SET_DEST (XVECEXP (pat
, 0, i
)) == pc_rtx
2620 && computed_jump_p_1 (SET_SRC (XVECEXP (pat
, 0, i
))))
2623 else if (GET_CODE (pat
) == SET
2624 && SET_DEST (pat
) == pc_rtx
2625 && computed_jump_p_1 (SET_SRC (pat
)))
2631 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2632 calls. Processes the subexpressions of EXP and passes them to F. */
2634 for_each_rtx_1 (rtx exp
, int n
, rtx_function f
, void *data
)
2637 const char *format
= GET_RTX_FORMAT (GET_CODE (exp
));
2640 for (; format
[n
] != '\0'; n
++)
2647 result
= (*f
) (x
, data
);
2649 /* Do not traverse sub-expressions. */
2651 else if (result
!= 0)
2652 /* Stop the traversal. */
2656 /* There are no sub-expressions. */
2659 i
= non_rtx_starting_operands
[GET_CODE (*x
)];
2662 result
= for_each_rtx_1 (*x
, i
, f
, data
);
2670 if (XVEC (exp
, n
) == 0)
2672 for (j
= 0; j
< XVECLEN (exp
, n
); ++j
)
2675 x
= &XVECEXP (exp
, n
, j
);
2676 result
= (*f
) (x
, data
);
2678 /* Do not traverse sub-expressions. */
2680 else if (result
!= 0)
2681 /* Stop the traversal. */
2685 /* There are no sub-expressions. */
2688 i
= non_rtx_starting_operands
[GET_CODE (*x
)];
2691 result
= for_each_rtx_1 (*x
, i
, f
, data
);
2699 /* Nothing to do. */
2707 /* Traverse X via depth-first search, calling F for each
2708 sub-expression (including X itself). F is also passed the DATA.
2709 If F returns -1, do not traverse sub-expressions, but continue
2710 traversing the rest of the tree. If F ever returns any other
2711 nonzero value, stop the traversal, and return the value returned
2712 by F. Otherwise, return 0. This function does not traverse inside
2713 tree structure that contains RTX_EXPRs, or into sub-expressions
2714 whose format code is `0' since it is not known whether or not those
2715 codes are actually RTL.
2717 This routine is very general, and could (should?) be used to
2718 implement many of the other routines in this file. */
2721 for_each_rtx (rtx
*x
, rtx_function f
, void *data
)
2727 result
= (*f
) (x
, data
);
2729 /* Do not traverse sub-expressions. */
2731 else if (result
!= 0)
2732 /* Stop the traversal. */
2736 /* There are no sub-expressions. */
2739 i
= non_rtx_starting_operands
[GET_CODE (*x
)];
2743 return for_each_rtx_1 (*x
, i
, f
, data
);
2747 /* Searches X for any reference to REGNO, returning the rtx of the
2748 reference found if any. Otherwise, returns NULL_RTX. */
2751 regno_use_in (unsigned int regno
, rtx x
)
2757 if (REG_P (x
) && REGNO (x
) == regno
)
2760 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2761 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2765 if ((tem
= regno_use_in (regno
, XEXP (x
, i
))))
2768 else if (fmt
[i
] == 'E')
2769 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2770 if ((tem
= regno_use_in (regno
, XVECEXP (x
, i
, j
))))
2777 /* Return a value indicating whether OP, an operand of a commutative
2778 operation, is preferred as the first or second operand. The higher
2779 the value, the stronger the preference for being the first operand.
2780 We use negative values to indicate a preference for the first operand
2781 and positive values for the second operand. */
2784 commutative_operand_precedence (rtx op
)
2786 enum rtx_code code
= GET_CODE (op
);
2788 /* Constants always come the second operand. Prefer "nice" constants. */
2789 if (code
== CONST_INT
)
2791 if (code
== CONST_DOUBLE
)
2793 op
= avoid_constant_pool_reference (op
);
2794 code
= GET_CODE (op
);
2796 switch (GET_RTX_CLASS (code
))
2799 if (code
== CONST_INT
)
2801 if (code
== CONST_DOUBLE
)
2806 /* SUBREGs of objects should come second. */
2807 if (code
== SUBREG
&& OBJECT_P (SUBREG_REG (op
)))
2810 if (!CONSTANT_P (op
))
2813 /* As for RTX_CONST_OBJ. */
2817 /* Complex expressions should be the first, so decrease priority
2821 case RTX_COMM_ARITH
:
2822 /* Prefer operands that are themselves commutative to be first.
2823 This helps to make things linear. In particular,
2824 (and (and (reg) (reg)) (not (reg))) is canonical. */
2828 /* If only one operand is a binary expression, it will be the first
2829 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2830 is canonical, although it will usually be further simplified. */
2834 /* Then prefer NEG and NOT. */
2835 if (code
== NEG
|| code
== NOT
)
2843 /* Return 1 iff it is necessary to swap operands of commutative operation
2844 in order to canonicalize expression. */
2847 swap_commutative_operands_p (rtx x
, rtx y
)
2849 return (commutative_operand_precedence (x
)
2850 < commutative_operand_precedence (y
));
2853 /* Return 1 if X is an autoincrement side effect and the register is
2854 not the stack pointer. */
2858 switch (GET_CODE (x
))
2866 /* There are no REG_INC notes for SP. */
2867 if (XEXP (x
, 0) != stack_pointer_rtx
)
2875 /* Return 1 if the sequence of instructions beginning with FROM and up
2876 to and including TO is safe to move. If NEW_TO is non-NULL, and
2877 the sequence is not already safe to move, but can be easily
2878 extended to a sequence which is safe, then NEW_TO will point to the
2879 end of the extended sequence.
2881 For now, this function only checks that the region contains whole
2882 exception regions, but it could be extended to check additional
2883 conditions as well. */
2886 insns_safe_to_move_p (rtx from
, rtx to
, rtx
*new_to
)
2888 int eh_region_count
= 0;
2892 /* By default, assume the end of the region will be what was
2901 switch (NOTE_LINE_NUMBER (r
))
2903 case NOTE_INSN_EH_REGION_BEG
:
2907 case NOTE_INSN_EH_REGION_END
:
2908 if (eh_region_count
== 0)
2909 /* This sequence of instructions contains the end of
2910 an exception region, but not he beginning. Moving
2911 it will cause chaos. */
2922 /* If we've passed TO, and we see a non-note instruction, we
2923 can't extend the sequence to a movable sequence. */
2929 /* It's OK to move the sequence if there were matched sets of
2930 exception region notes. */
2931 return eh_region_count
== 0;
2936 /* It's OK to move the sequence if there were matched sets of
2937 exception region notes. */
2938 if (past_to_p
&& eh_region_count
== 0)
2944 /* Go to the next instruction. */
2951 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2953 loc_mentioned_in_p (rtx
*loc
, rtx in
)
2955 enum rtx_code code
= GET_CODE (in
);
2956 const char *fmt
= GET_RTX_FORMAT (code
);
2959 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2961 if (loc
== &in
->u
.fld
[i
].rt_rtx
)
2965 if (loc_mentioned_in_p (loc
, XEXP (in
, i
)))
2968 else if (fmt
[i
] == 'E')
2969 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
2970 if (loc_mentioned_in_p (loc
, XVECEXP (in
, i
, j
)))
2976 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2977 and SUBREG_BYTE, return the bit offset where the subreg begins
2978 (counting from the least significant bit of the operand). */
2981 subreg_lsb_1 (enum machine_mode outer_mode
,
2982 enum machine_mode inner_mode
,
2983 unsigned int subreg_byte
)
2985 unsigned int bitpos
;
2989 /* A paradoxical subreg begins at bit position 0. */
2990 if (GET_MODE_BITSIZE (outer_mode
) > GET_MODE_BITSIZE (inner_mode
))
2993 if (WORDS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
2994 /* If the subreg crosses a word boundary ensure that
2995 it also begins and ends on a word boundary. */
2996 gcc_assert (!((subreg_byte
% UNITS_PER_WORD
2997 + GET_MODE_SIZE (outer_mode
)) > UNITS_PER_WORD
2998 && (subreg_byte
% UNITS_PER_WORD
2999 || GET_MODE_SIZE (outer_mode
) % UNITS_PER_WORD
)));
3001 if (WORDS_BIG_ENDIAN
)
3002 word
= (GET_MODE_SIZE (inner_mode
)
3003 - (subreg_byte
+ GET_MODE_SIZE (outer_mode
))) / UNITS_PER_WORD
;
3005 word
= subreg_byte
/ UNITS_PER_WORD
;
3006 bitpos
= word
* BITS_PER_WORD
;
3008 if (BYTES_BIG_ENDIAN
)
3009 byte
= (GET_MODE_SIZE (inner_mode
)
3010 - (subreg_byte
+ GET_MODE_SIZE (outer_mode
))) % UNITS_PER_WORD
;
3012 byte
= subreg_byte
% UNITS_PER_WORD
;
3013 bitpos
+= byte
* BITS_PER_UNIT
;
3018 /* Given a subreg X, return the bit offset where the subreg begins
3019 (counting from the least significant bit of the reg). */
3024 return subreg_lsb_1 (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)),
3028 /* This function returns the regno offset of a subreg expression.
3029 xregno - A regno of an inner hard subreg_reg (or what will become one).
3030 xmode - The mode of xregno.
3031 offset - The byte offset.
3032 ymode - The mode of a top level SUBREG (or what may become one).
3033 RETURN - The regno offset which would be used. */
3035 subreg_regno_offset (unsigned int xregno
, enum machine_mode xmode
,
3036 unsigned int offset
, enum machine_mode ymode
)
3038 int nregs_xmode
, nregs_ymode
;
3039 int mode_multiple
, nregs_multiple
;
3042 gcc_assert (xregno
< FIRST_PSEUDO_REGISTER
);
3044 nregs_xmode
= hard_regno_nregs
[xregno
][xmode
];
3045 nregs_ymode
= hard_regno_nregs
[xregno
][ymode
];
3047 /* If this is a big endian paradoxical subreg, which uses more actual
3048 hard registers than the original register, we must return a negative
3049 offset so that we find the proper highpart of the register. */
3051 && nregs_ymode
> nregs_xmode
3052 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3053 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3054 return nregs_xmode
- nregs_ymode
;
3056 if (offset
== 0 || nregs_xmode
== nregs_ymode
)
3059 /* size of ymode must not be greater than the size of xmode. */
3060 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3061 gcc_assert (mode_multiple
!= 0);
3063 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3064 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3065 return (y_offset
/ (mode_multiple
/ nregs_multiple
)) * nregs_ymode
;
3068 /* This function returns true when the offset is representable via
3069 subreg_offset in the given regno.
3070 xregno - A regno of an inner hard subreg_reg (or what will become one).
3071 xmode - The mode of xregno.
3072 offset - The byte offset.
3073 ymode - The mode of a top level SUBREG (or what may become one).
3074 RETURN - The regno offset which would be used. */
3076 subreg_offset_representable_p (unsigned int xregno
, enum machine_mode xmode
,
3077 unsigned int offset
, enum machine_mode ymode
)
3079 int nregs_xmode
, nregs_ymode
;
3080 int mode_multiple
, nregs_multiple
;
3083 gcc_assert (xregno
< FIRST_PSEUDO_REGISTER
);
3085 nregs_xmode
= hard_regno_nregs
[xregno
][xmode
];
3086 nregs_ymode
= hard_regno_nregs
[xregno
][ymode
];
3088 /* Paradoxical subregs are always valid. */
3090 && nregs_ymode
> nregs_xmode
3091 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3092 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3095 /* Lowpart subregs are always valid. */
3096 if (offset
== subreg_lowpart_offset (ymode
, xmode
))
3099 /* This should always pass, otherwise we don't know how to verify the
3100 constraint. These conditions may be relaxed but subreg_offset would
3101 need to be redesigned. */
3102 gcc_assert ((GET_MODE_SIZE (xmode
) % GET_MODE_SIZE (ymode
)) == 0);
3103 gcc_assert ((GET_MODE_SIZE (ymode
) % nregs_ymode
) == 0);
3104 gcc_assert ((nregs_xmode
% nregs_ymode
) == 0);
3106 /* The XMODE value can be seen as a vector of NREGS_XMODE
3107 values. The subreg must represent a lowpart of given field.
3108 Compute what field it is. */
3109 offset
-= subreg_lowpart_offset (ymode
,
3110 mode_for_size (GET_MODE_BITSIZE (xmode
)
3114 /* size of ymode must not be greater than the size of xmode. */
3115 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3116 gcc_assert (mode_multiple
!= 0);
3118 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3119 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3121 gcc_assert ((offset
% GET_MODE_SIZE (ymode
)) == 0);
3122 gcc_assert ((mode_multiple
% nregs_multiple
) == 0);
3124 return (!(y_offset
% (mode_multiple
/ nregs_multiple
)));
3127 /* Return the final regno that a subreg expression refers to. */
3129 subreg_regno (rtx x
)
3132 rtx subreg
= SUBREG_REG (x
);
3133 int regno
= REGNO (subreg
);
3135 ret
= regno
+ subreg_regno_offset (regno
,
3142 struct parms_set_data
3148 /* Helper function for noticing stores to parameter registers. */
3150 parms_set (rtx x
, rtx pat ATTRIBUTE_UNUSED
, void *data
)
3152 struct parms_set_data
*d
= data
;
3153 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3154 && TEST_HARD_REG_BIT (d
->regs
, REGNO (x
)))
3156 CLEAR_HARD_REG_BIT (d
->regs
, REGNO (x
));
3161 /* Look backward for first parameter to be loaded.
3162 Do not skip BOUNDARY. */
3164 find_first_parameter_load (rtx call_insn
, rtx boundary
)
3166 struct parms_set_data parm
;
3169 /* Since different machines initialize their parameter registers
3170 in different orders, assume nothing. Collect the set of all
3171 parameter registers. */
3172 CLEAR_HARD_REG_SET (parm
.regs
);
3174 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
3175 if (GET_CODE (XEXP (p
, 0)) == USE
3176 && REG_P (XEXP (XEXP (p
, 0), 0)))
3178 gcc_assert (REGNO (XEXP (XEXP (p
, 0), 0)) < FIRST_PSEUDO_REGISTER
);
3180 /* We only care about registers which can hold function
3182 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p
, 0), 0))))
3185 SET_HARD_REG_BIT (parm
.regs
, REGNO (XEXP (XEXP (p
, 0), 0)));
3190 /* Search backward for the first set of a register in this set. */
3191 while (parm
.nregs
&& before
!= boundary
)
3193 before
= PREV_INSN (before
);
3195 /* It is possible that some loads got CSEed from one call to
3196 another. Stop in that case. */
3197 if (CALL_P (before
))
3200 /* Our caller needs either ensure that we will find all sets
3201 (in case code has not been optimized yet), or take care
3202 for possible labels in a way by setting boundary to preceding
3204 if (LABEL_P (before
))
3206 gcc_assert (before
== boundary
);
3210 if (INSN_P (before
))
3211 note_stores (PATTERN (before
), parms_set
, &parm
);
3216 /* Return true if we should avoid inserting code between INSN and preceding
3217 call instruction. */
3220 keep_with_call_p (rtx insn
)
3224 if (INSN_P (insn
) && (set
= single_set (insn
)) != NULL
)
3226 if (REG_P (SET_DEST (set
))
3227 && REGNO (SET_DEST (set
)) < FIRST_PSEUDO_REGISTER
3228 && fixed_regs
[REGNO (SET_DEST (set
))]
3229 && general_operand (SET_SRC (set
), VOIDmode
))
3231 if (REG_P (SET_SRC (set
))
3232 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set
)))
3233 && REG_P (SET_DEST (set
))
3234 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
)
3236 /* There may be a stack pop just after the call and before the store
3237 of the return register. Search for the actual store when deciding
3238 if we can break or not. */
3239 if (SET_DEST (set
) == stack_pointer_rtx
)
3241 rtx i2
= next_nonnote_insn (insn
);
3242 if (i2
&& keep_with_call_p (i2
))
3249 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3250 to non-complex jumps. That is, direct unconditional, conditional,
3251 and tablejumps, but not computed jumps or returns. It also does
3252 not apply to the fallthru case of a conditional jump. */
3255 label_is_jump_target_p (rtx label
, rtx jump_insn
)
3257 rtx tmp
= JUMP_LABEL (jump_insn
);
3262 if (tablejump_p (jump_insn
, NULL
, &tmp
))
3264 rtvec vec
= XVEC (PATTERN (tmp
),
3265 GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
);
3266 int i
, veclen
= GET_NUM_ELEM (vec
);
3268 for (i
= 0; i
< veclen
; ++i
)
3269 if (XEXP (RTVEC_ELT (vec
, i
), 0) == label
)
3277 /* Return an estimate of the cost of computing rtx X.
3278 One use is in cse, to decide which expression to keep in the hash table.
3279 Another is in rtl generation, to pick the cheapest way to multiply.
3280 Other uses like the latter are expected in the future. */
3283 rtx_cost (rtx x
, enum rtx_code outer_code ATTRIBUTE_UNUSED
)
3293 /* Compute the default costs of certain things.
3294 Note that targetm.rtx_costs can override the defaults. */
3296 code
= GET_CODE (x
);
3300 total
= COSTS_N_INSNS (5);
3306 total
= COSTS_N_INSNS (7);
3309 /* Used in loop.c and combine.c as a marker. */
3313 total
= COSTS_N_INSNS (1);
3323 /* If we can't tie these modes, make this expensive. The larger
3324 the mode, the more expensive it is. */
3325 if (! MODES_TIEABLE_P (GET_MODE (x
), GET_MODE (SUBREG_REG (x
))))
3326 return COSTS_N_INSNS (2
3327 + GET_MODE_SIZE (GET_MODE (x
)) / UNITS_PER_WORD
);
3331 if (targetm
.rtx_costs (x
, code
, outer_code
, &total
))
3336 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3337 which is already in total. */
3339 fmt
= GET_RTX_FORMAT (code
);
3340 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3342 total
+= rtx_cost (XEXP (x
, i
), code
);
3343 else if (fmt
[i
] == 'E')
3344 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3345 total
+= rtx_cost (XVECEXP (x
, i
, j
), code
);
3350 /* Return cost of address expression X.
3351 Expect that X is properly formed address reference. */
3354 address_cost (rtx x
, enum machine_mode mode
)
3356 /* We may be asked for cost of various unusual addresses, such as operands
3357 of push instruction. It is not worthwhile to complicate writing
3358 of the target hook by such cases. */
3360 if (!memory_address_p (mode
, x
))
3363 return targetm
.address_cost (x
);
3366 /* If the target doesn't override, compute the cost as with arithmetic. */
3369 default_address_cost (rtx x
)
3371 return rtx_cost (x
, MEM
);
3375 unsigned HOST_WIDE_INT
3376 nonzero_bits (rtx x
, enum machine_mode mode
)
3378 return cached_nonzero_bits (x
, mode
, NULL_RTX
, VOIDmode
, 0);
3382 num_sign_bit_copies (rtx x
, enum machine_mode mode
)
3384 return cached_num_sign_bit_copies (x
, mode
, NULL_RTX
, VOIDmode
, 0);
3387 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3388 It avoids exponential behavior in nonzero_bits1 when X has
3389 identical subexpressions on the first or the second level. */
3391 static unsigned HOST_WIDE_INT
3392 cached_nonzero_bits (rtx x
, enum machine_mode mode
, rtx known_x
,
3393 enum machine_mode known_mode
,
3394 unsigned HOST_WIDE_INT known_ret
)
3396 if (x
== known_x
&& mode
== known_mode
)
3399 /* Try to find identical subexpressions. If found call
3400 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3401 precomputed value for the subexpression as KNOWN_RET. */
3403 if (ARITHMETIC_P (x
))
3405 rtx x0
= XEXP (x
, 0);
3406 rtx x1
= XEXP (x
, 1);
3408 /* Check the first level. */
3410 return nonzero_bits1 (x
, mode
, x0
, mode
,
3411 cached_nonzero_bits (x0
, mode
, known_x
,
3412 known_mode
, known_ret
));
3414 /* Check the second level. */
3415 if (ARITHMETIC_P (x0
)
3416 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
3417 return nonzero_bits1 (x
, mode
, x1
, mode
,
3418 cached_nonzero_bits (x1
, mode
, known_x
,
3419 known_mode
, known_ret
));
3421 if (ARITHMETIC_P (x1
)
3422 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
3423 return nonzero_bits1 (x
, mode
, x0
, mode
,
3424 cached_nonzero_bits (x0
, mode
, known_x
,
3425 known_mode
, known_ret
));
3428 return nonzero_bits1 (x
, mode
, known_x
, known_mode
, known_ret
);
3431 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3432 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3433 is less useful. We can't allow both, because that results in exponential
3434 run time recursion. There is a nullstone testcase that triggered
3435 this. This macro avoids accidental uses of num_sign_bit_copies. */
3436 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3438 /* Given an expression, X, compute which bits in X can be nonzero.
3439 We don't care about bits outside of those defined in MODE.
3441 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3442 an arithmetic operation, we can do better. */
3444 static unsigned HOST_WIDE_INT
3445 nonzero_bits1 (rtx x
, enum machine_mode mode
, rtx known_x
,
3446 enum machine_mode known_mode
,
3447 unsigned HOST_WIDE_INT known_ret
)
3449 unsigned HOST_WIDE_INT nonzero
= GET_MODE_MASK (mode
);
3450 unsigned HOST_WIDE_INT inner_nz
;
3452 unsigned int mode_width
= GET_MODE_BITSIZE (mode
);
3454 /* For floating-point values, assume all bits are needed. */
3455 if (FLOAT_MODE_P (GET_MODE (x
)) || FLOAT_MODE_P (mode
))
3458 /* If X is wider than MODE, use its mode instead. */
3459 if (GET_MODE_BITSIZE (GET_MODE (x
)) > mode_width
)
3461 mode
= GET_MODE (x
);
3462 nonzero
= GET_MODE_MASK (mode
);
3463 mode_width
= GET_MODE_BITSIZE (mode
);
3466 if (mode_width
> HOST_BITS_PER_WIDE_INT
)
3467 /* Our only callers in this case look for single bit values. So
3468 just return the mode mask. Those tests will then be false. */
3471 #ifndef WORD_REGISTER_OPERATIONS
3472 /* If MODE is wider than X, but both are a single word for both the host
3473 and target machines, we can compute this from which bits of the
3474 object might be nonzero in its own mode, taking into account the fact
3475 that on many CISC machines, accessing an object in a wider mode
3476 causes the high-order bits to become undefined. So they are
3477 not known to be zero. */
3479 if (GET_MODE (x
) != VOIDmode
&& GET_MODE (x
) != mode
3480 && GET_MODE_BITSIZE (GET_MODE (x
)) <= BITS_PER_WORD
3481 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
3482 && GET_MODE_BITSIZE (mode
) > GET_MODE_BITSIZE (GET_MODE (x
)))
3484 nonzero
&= cached_nonzero_bits (x
, GET_MODE (x
),
3485 known_x
, known_mode
, known_ret
);
3486 nonzero
|= GET_MODE_MASK (mode
) & ~GET_MODE_MASK (GET_MODE (x
));
3491 code
= GET_CODE (x
);
3495 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3496 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3497 all the bits above ptr_mode are known to be zero. */
3498 if (POINTERS_EXTEND_UNSIGNED
&& GET_MODE (x
) == Pmode
3500 nonzero
&= GET_MODE_MASK (ptr_mode
);
3503 /* Include declared information about alignment of pointers. */
3504 /* ??? We don't properly preserve REG_POINTER changes across
3505 pointer-to-integer casts, so we can't trust it except for
3506 things that we know must be pointers. See execute/960116-1.c. */
3507 if ((x
== stack_pointer_rtx
3508 || x
== frame_pointer_rtx
3509 || x
== arg_pointer_rtx
)
3510 && REGNO_POINTER_ALIGN (REGNO (x
)))
3512 unsigned HOST_WIDE_INT alignment
3513 = REGNO_POINTER_ALIGN (REGNO (x
)) / BITS_PER_UNIT
;
3515 #ifdef PUSH_ROUNDING
3516 /* If PUSH_ROUNDING is defined, it is possible for the
3517 stack to be momentarily aligned only to that amount,
3518 so we pick the least alignment. */
3519 if (x
== stack_pointer_rtx
&& PUSH_ARGS
)
3520 alignment
= MIN ((unsigned HOST_WIDE_INT
) PUSH_ROUNDING (1),
3524 nonzero
&= ~(alignment
- 1);
3528 unsigned HOST_WIDE_INT nonzero_for_hook
= nonzero
;
3529 rtx
new = rtl_hooks
.reg_nonzero_bits (x
, mode
, known_x
,
3530 known_mode
, known_ret
,
3534 nonzero_for_hook
&= cached_nonzero_bits (new, mode
, known_x
,
3535 known_mode
, known_ret
);
3537 return nonzero_for_hook
;
3541 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3542 /* If X is negative in MODE, sign-extend the value. */
3543 if (INTVAL (x
) > 0 && mode_width
< BITS_PER_WORD
3544 && 0 != (INTVAL (x
) & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))))
3545 return (INTVAL (x
) | ((HOST_WIDE_INT
) (-1) << mode_width
));
3551 #ifdef LOAD_EXTEND_OP
3552 /* In many, if not most, RISC machines, reading a byte from memory
3553 zeros the rest of the register. Noticing that fact saves a lot
3554 of extra zero-extends. */
3555 if (LOAD_EXTEND_OP (GET_MODE (x
)) == ZERO_EXTEND
)
3556 nonzero
&= GET_MODE_MASK (GET_MODE (x
));
3561 case UNEQ
: case LTGT
:
3562 case GT
: case GTU
: case UNGT
:
3563 case LT
: case LTU
: case UNLT
:
3564 case GE
: case GEU
: case UNGE
:
3565 case LE
: case LEU
: case UNLE
:
3566 case UNORDERED
: case ORDERED
:
3568 /* If this produces an integer result, we know which bits are set.
3569 Code here used to clear bits outside the mode of X, but that is
3572 if (GET_MODE_CLASS (mode
) == MODE_INT
3573 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
3574 nonzero
= STORE_FLAG_VALUE
;
3579 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3580 and num_sign_bit_copies. */
3581 if (num_sign_bit_copies (XEXP (x
, 0), GET_MODE (x
))
3582 == GET_MODE_BITSIZE (GET_MODE (x
)))
3586 if (GET_MODE_SIZE (GET_MODE (x
)) < mode_width
)
3587 nonzero
|= (GET_MODE_MASK (mode
) & ~GET_MODE_MASK (GET_MODE (x
)));
3592 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3593 and num_sign_bit_copies. */
3594 if (num_sign_bit_copies (XEXP (x
, 0), GET_MODE (x
))
3595 == GET_MODE_BITSIZE (GET_MODE (x
)))
3601 nonzero
&= (cached_nonzero_bits (XEXP (x
, 0), mode
,
3602 known_x
, known_mode
, known_ret
)
3603 & GET_MODE_MASK (mode
));
3607 nonzero
&= cached_nonzero_bits (XEXP (x
, 0), mode
,
3608 known_x
, known_mode
, known_ret
);
3609 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
3610 nonzero
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
3614 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3615 Otherwise, show all the bits in the outer mode but not the inner
3617 inner_nz
= cached_nonzero_bits (XEXP (x
, 0), mode
,
3618 known_x
, known_mode
, known_ret
);
3619 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
3621 inner_nz
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
3623 & (((HOST_WIDE_INT
) 1
3624 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - 1))))
3625 inner_nz
|= (GET_MODE_MASK (mode
)
3626 & ~GET_MODE_MASK (GET_MODE (XEXP (x
, 0))));
3629 nonzero
&= inner_nz
;
3633 nonzero
&= cached_nonzero_bits (XEXP (x
, 0), mode
,
3634 known_x
, known_mode
, known_ret
)
3635 & cached_nonzero_bits (XEXP (x
, 1), mode
,
3636 known_x
, known_mode
, known_ret
);
3640 case UMIN
: case UMAX
: case SMIN
: case SMAX
:
3642 unsigned HOST_WIDE_INT nonzero0
=
3643 cached_nonzero_bits (XEXP (x
, 0), mode
,
3644 known_x
, known_mode
, known_ret
);
3646 /* Don't call nonzero_bits for the second time if it cannot change
3648 if ((nonzero
& nonzero0
) != nonzero
)
3650 | cached_nonzero_bits (XEXP (x
, 1), mode
,
3651 known_x
, known_mode
, known_ret
);
3655 case PLUS
: case MINUS
:
3657 case DIV
: case UDIV
:
3658 case MOD
: case UMOD
:
3659 /* We can apply the rules of arithmetic to compute the number of
3660 high- and low-order zero bits of these operations. We start by
3661 computing the width (position of the highest-order nonzero bit)
3662 and the number of low-order zero bits for each value. */
3664 unsigned HOST_WIDE_INT nz0
=
3665 cached_nonzero_bits (XEXP (x
, 0), mode
,
3666 known_x
, known_mode
, known_ret
);
3667 unsigned HOST_WIDE_INT nz1
=
3668 cached_nonzero_bits (XEXP (x
, 1), mode
,
3669 known_x
, known_mode
, known_ret
);
3670 int sign_index
= GET_MODE_BITSIZE (GET_MODE (x
)) - 1;
3671 int width0
= floor_log2 (nz0
) + 1;
3672 int width1
= floor_log2 (nz1
) + 1;
3673 int low0
= floor_log2 (nz0
& -nz0
);
3674 int low1
= floor_log2 (nz1
& -nz1
);
3675 HOST_WIDE_INT op0_maybe_minusp
3676 = (nz0
& ((HOST_WIDE_INT
) 1 << sign_index
));
3677 HOST_WIDE_INT op1_maybe_minusp
3678 = (nz1
& ((HOST_WIDE_INT
) 1 << sign_index
));
3679 unsigned int result_width
= mode_width
;
3685 result_width
= MAX (width0
, width1
) + 1;
3686 result_low
= MIN (low0
, low1
);
3689 result_low
= MIN (low0
, low1
);
3692 result_width
= width0
+ width1
;
3693 result_low
= low0
+ low1
;
3698 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
3699 result_width
= width0
;
3704 result_width
= width0
;
3709 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
3710 result_width
= MIN (width0
, width1
);
3711 result_low
= MIN (low0
, low1
);
3716 result_width
= MIN (width0
, width1
);
3717 result_low
= MIN (low0
, low1
);
3723 if (result_width
< mode_width
)
3724 nonzero
&= ((HOST_WIDE_INT
) 1 << result_width
) - 1;
3727 nonzero
&= ~(((HOST_WIDE_INT
) 1 << result_low
) - 1);
3729 #ifdef POINTERS_EXTEND_UNSIGNED
3730 /* If pointers extend unsigned and this is an addition or subtraction
3731 to a pointer in Pmode, all the bits above ptr_mode are known to be
3733 if (POINTERS_EXTEND_UNSIGNED
> 0 && GET_MODE (x
) == Pmode
3734 && (code
== PLUS
|| code
== MINUS
)
3735 && REG_P (XEXP (x
, 0)) && REG_POINTER (XEXP (x
, 0)))
3736 nonzero
&= GET_MODE_MASK (ptr_mode
);
3742 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
3743 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
3744 nonzero
&= ((HOST_WIDE_INT
) 1 << INTVAL (XEXP (x
, 1))) - 1;
3748 /* If this is a SUBREG formed for a promoted variable that has
3749 been zero-extended, we know that at least the high-order bits
3750 are zero, though others might be too. */
3752 if (SUBREG_PROMOTED_VAR_P (x
) && SUBREG_PROMOTED_UNSIGNED_P (x
) > 0)
3753 nonzero
= GET_MODE_MASK (GET_MODE (x
))
3754 & cached_nonzero_bits (SUBREG_REG (x
), GET_MODE (x
),
3755 known_x
, known_mode
, known_ret
);
3757 /* If the inner mode is a single word for both the host and target
3758 machines, we can compute this from which bits of the inner
3759 object might be nonzero. */
3760 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) <= BITS_PER_WORD
3761 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))
3762 <= HOST_BITS_PER_WIDE_INT
))
3764 nonzero
&= cached_nonzero_bits (SUBREG_REG (x
), mode
,
3765 known_x
, known_mode
, known_ret
);
3767 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3768 /* If this is a typical RISC machine, we only have to worry
3769 about the way loads are extended. */
3770 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x
))) == SIGN_EXTEND
3772 & (((unsigned HOST_WIDE_INT
) 1
3773 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) - 1))))
3775 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x
))) != ZERO_EXTEND
)
3776 || !MEM_P (SUBREG_REG (x
)))
3779 /* On many CISC machines, accessing an object in a wider mode
3780 causes the high-order bits to become undefined. So they are
3781 not known to be zero. */
3782 if (GET_MODE_SIZE (GET_MODE (x
))
3783 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3784 nonzero
|= (GET_MODE_MASK (GET_MODE (x
))
3785 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x
))));
3794 /* The nonzero bits are in two classes: any bits within MODE
3795 that aren't in GET_MODE (x) are always significant. The rest of the
3796 nonzero bits are those that are significant in the operand of
3797 the shift when shifted the appropriate number of bits. This
3798 shows that high-order bits are cleared by the right shift and
3799 low-order bits by left shifts. */
3800 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
3801 && INTVAL (XEXP (x
, 1)) >= 0
3802 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
3804 enum machine_mode inner_mode
= GET_MODE (x
);
3805 unsigned int width
= GET_MODE_BITSIZE (inner_mode
);
3806 int count
= INTVAL (XEXP (x
, 1));
3807 unsigned HOST_WIDE_INT mode_mask
= GET_MODE_MASK (inner_mode
);
3808 unsigned HOST_WIDE_INT op_nonzero
=
3809 cached_nonzero_bits (XEXP (x
, 0), mode
,
3810 known_x
, known_mode
, known_ret
);
3811 unsigned HOST_WIDE_INT inner
= op_nonzero
& mode_mask
;
3812 unsigned HOST_WIDE_INT outer
= 0;
3814 if (mode_width
> width
)
3815 outer
= (op_nonzero
& nonzero
& ~mode_mask
);
3817 if (code
== LSHIFTRT
)
3819 else if (code
== ASHIFTRT
)
3823 /* If the sign bit may have been nonzero before the shift, we
3824 need to mark all the places it could have been copied to
3825 by the shift as possibly nonzero. */
3826 if (inner
& ((HOST_WIDE_INT
) 1 << (width
- 1 - count
)))
3827 inner
|= (((HOST_WIDE_INT
) 1 << count
) - 1) << (width
- count
);
3829 else if (code
== ASHIFT
)
3832 inner
= ((inner
<< (count
% width
)
3833 | (inner
>> (width
- (count
% width
)))) & mode_mask
);
3835 nonzero
&= (outer
| inner
);
3841 /* This is at most the number of bits in the mode. */
3842 nonzero
= ((HOST_WIDE_INT
) 2 << (floor_log2 (mode_width
))) - 1;
3846 /* If CLZ has a known value at zero, then the nonzero bits are
3847 that value, plus the number of bits in the mode minus one. */
3848 if (CLZ_DEFINED_VALUE_AT_ZERO (mode
, nonzero
))
3849 nonzero
|= ((HOST_WIDE_INT
) 1 << (floor_log2 (mode_width
))) - 1;
3855 /* If CTZ has a known value at zero, then the nonzero bits are
3856 that value, plus the number of bits in the mode minus one. */
3857 if (CTZ_DEFINED_VALUE_AT_ZERO (mode
, nonzero
))
3858 nonzero
|= ((HOST_WIDE_INT
) 1 << (floor_log2 (mode_width
))) - 1;
3869 unsigned HOST_WIDE_INT nonzero_true
=
3870 cached_nonzero_bits (XEXP (x
, 1), mode
,
3871 known_x
, known_mode
, known_ret
);
3873 /* Don't call nonzero_bits for the second time if it cannot change
3875 if ((nonzero
& nonzero_true
) != nonzero
)
3876 nonzero
&= nonzero_true
3877 | cached_nonzero_bits (XEXP (x
, 2), mode
,
3878 known_x
, known_mode
, known_ret
);
3889 /* See the macro definition above. */
3890 #undef cached_num_sign_bit_copies
3893 /* The function cached_num_sign_bit_copies is a wrapper around
3894 num_sign_bit_copies1. It avoids exponential behavior in
3895 num_sign_bit_copies1 when X has identical subexpressions on the
3896 first or the second level. */
3899 cached_num_sign_bit_copies (rtx x
, enum machine_mode mode
, rtx known_x
,
3900 enum machine_mode known_mode
,
3901 unsigned int known_ret
)
3903 if (x
== known_x
&& mode
== known_mode
)
3906 /* Try to find identical subexpressions. If found call
3907 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3908 the precomputed value for the subexpression as KNOWN_RET. */
3910 if (ARITHMETIC_P (x
))
3912 rtx x0
= XEXP (x
, 0);
3913 rtx x1
= XEXP (x
, 1);
3915 /* Check the first level. */
3918 num_sign_bit_copies1 (x
, mode
, x0
, mode
,
3919 cached_num_sign_bit_copies (x0
, mode
, known_x
,
3923 /* Check the second level. */
3924 if (ARITHMETIC_P (x0
)
3925 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
3927 num_sign_bit_copies1 (x
, mode
, x1
, mode
,
3928 cached_num_sign_bit_copies (x1
, mode
, known_x
,
3932 if (ARITHMETIC_P (x1
)
3933 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
3935 num_sign_bit_copies1 (x
, mode
, x0
, mode
,
3936 cached_num_sign_bit_copies (x0
, mode
, known_x
,
3941 return num_sign_bit_copies1 (x
, mode
, known_x
, known_mode
, known_ret
);
3944 /* Return the number of bits at the high-order end of X that are known to
3945 be equal to the sign bit. X will be used in mode MODE; if MODE is
3946 VOIDmode, X will be used in its own mode. The returned value will always
3947 be between 1 and the number of bits in MODE. */
3950 num_sign_bit_copies1 (rtx x
, enum machine_mode mode
, rtx known_x
,
3951 enum machine_mode known_mode
,
3952 unsigned int known_ret
)
3954 enum rtx_code code
= GET_CODE (x
);
3955 unsigned int bitwidth
= GET_MODE_BITSIZE (mode
);
3956 int num0
, num1
, result
;
3957 unsigned HOST_WIDE_INT nonzero
;
3959 /* If we weren't given a mode, use the mode of X. If the mode is still
3960 VOIDmode, we don't know anything. Likewise if one of the modes is
3963 if (mode
== VOIDmode
)
3964 mode
= GET_MODE (x
);
3966 if (mode
== VOIDmode
|| FLOAT_MODE_P (mode
) || FLOAT_MODE_P (GET_MODE (x
)))
3969 /* For a smaller object, just ignore the high bits. */
3970 if (bitwidth
< GET_MODE_BITSIZE (GET_MODE (x
)))
3972 num0
= cached_num_sign_bit_copies (x
, GET_MODE (x
),
3973 known_x
, known_mode
, known_ret
);
3975 num0
- (int) (GET_MODE_BITSIZE (GET_MODE (x
)) - bitwidth
));
3978 if (GET_MODE (x
) != VOIDmode
&& bitwidth
> GET_MODE_BITSIZE (GET_MODE (x
)))
3980 #ifndef WORD_REGISTER_OPERATIONS
3981 /* If this machine does not do all register operations on the entire
3982 register and MODE is wider than the mode of X, we can say nothing
3983 at all about the high-order bits. */
3986 /* Likewise on machines that do, if the mode of the object is smaller
3987 than a word and loads of that size don't sign extend, we can say
3988 nothing about the high order bits. */
3989 if (GET_MODE_BITSIZE (GET_MODE (x
)) < BITS_PER_WORD
3990 #ifdef LOAD_EXTEND_OP
3991 && LOAD_EXTEND_OP (GET_MODE (x
)) != SIGN_EXTEND
4002 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4003 /* If pointers extend signed and this is a pointer in Pmode, say that
4004 all the bits above ptr_mode are known to be sign bit copies. */
4005 if (! POINTERS_EXTEND_UNSIGNED
&& GET_MODE (x
) == Pmode
&& mode
== Pmode
4007 return GET_MODE_BITSIZE (Pmode
) - GET_MODE_BITSIZE (ptr_mode
) + 1;
4011 unsigned int copies_for_hook
= 1, copies
= 1;
4012 rtx
new = rtl_hooks
.reg_num_sign_bit_copies (x
, mode
, known_x
,
4013 known_mode
, known_ret
,
4017 copies
= cached_num_sign_bit_copies (new, mode
, known_x
,
4018 known_mode
, known_ret
);
4020 if (copies
> 1 || copies_for_hook
> 1)
4021 return MAX (copies
, copies_for_hook
);
4023 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4028 #ifdef LOAD_EXTEND_OP
4029 /* Some RISC machines sign-extend all loads of smaller than a word. */
4030 if (LOAD_EXTEND_OP (GET_MODE (x
)) == SIGN_EXTEND
)
4031 return MAX (1, ((int) bitwidth
4032 - (int) GET_MODE_BITSIZE (GET_MODE (x
)) + 1));
4037 /* If the constant is negative, take its 1's complement and remask.
4038 Then see how many zero bits we have. */
4039 nonzero
= INTVAL (x
) & GET_MODE_MASK (mode
);
4040 if (bitwidth
<= HOST_BITS_PER_WIDE_INT
4041 && (nonzero
& ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4042 nonzero
= (~nonzero
) & GET_MODE_MASK (mode
);
4044 return (nonzero
== 0 ? bitwidth
: bitwidth
- floor_log2 (nonzero
) - 1);
4047 /* If this is a SUBREG for a promoted object that is sign-extended
4048 and we are looking at it in a wider mode, we know that at least the
4049 high-order bits are known to be sign bit copies. */
4051 if (SUBREG_PROMOTED_VAR_P (x
) && ! SUBREG_PROMOTED_UNSIGNED_P (x
))
4053 num0
= cached_num_sign_bit_copies (SUBREG_REG (x
), mode
,
4054 known_x
, known_mode
, known_ret
);
4055 return MAX ((int) bitwidth
4056 - (int) GET_MODE_BITSIZE (GET_MODE (x
)) + 1,
4060 /* For a smaller object, just ignore the high bits. */
4061 if (bitwidth
<= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))))
4063 num0
= cached_num_sign_bit_copies (SUBREG_REG (x
), VOIDmode
,
4064 known_x
, known_mode
, known_ret
);
4065 return MAX (1, (num0
4066 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))
4070 #ifdef WORD_REGISTER_OPERATIONS
4071 #ifdef LOAD_EXTEND_OP
4072 /* For paradoxical SUBREGs on machines where all register operations
4073 affect the entire register, just look inside. Note that we are
4074 passing MODE to the recursive call, so the number of sign bit copies
4075 will remain relative to that mode, not the inner mode. */
4077 /* This works only if loads sign extend. Otherwise, if we get a
4078 reload for the inner part, it may be loaded from the stack, and
4079 then we lose all sign bit copies that existed before the store
4082 if ((GET_MODE_SIZE (GET_MODE (x
))
4083 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
4084 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x
))) == SIGN_EXTEND
4085 && MEM_P (SUBREG_REG (x
)))
4086 return cached_num_sign_bit_copies (SUBREG_REG (x
), mode
,
4087 known_x
, known_mode
, known_ret
);
4093 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
4094 return MAX (1, (int) bitwidth
- INTVAL (XEXP (x
, 1)));
4098 return (bitwidth
- GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
4099 + cached_num_sign_bit_copies (XEXP (x
, 0), VOIDmode
,
4100 known_x
, known_mode
, known_ret
));
4103 /* For a smaller object, just ignore the high bits. */
4104 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), VOIDmode
,
4105 known_x
, known_mode
, known_ret
);
4106 return MAX (1, (num0
- (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
4110 return cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4111 known_x
, known_mode
, known_ret
);
4113 case ROTATE
: case ROTATERT
:
4114 /* If we are rotating left by a number of bits less than the number
4115 of sign bit copies, we can just subtract that amount from the
4117 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4118 && INTVAL (XEXP (x
, 1)) >= 0
4119 && INTVAL (XEXP (x
, 1)) < (int) bitwidth
)
4121 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4122 known_x
, known_mode
, known_ret
);
4123 return MAX (1, num0
- (code
== ROTATE
? INTVAL (XEXP (x
, 1))
4124 : (int) bitwidth
- INTVAL (XEXP (x
, 1))));
4129 /* In general, this subtracts one sign bit copy. But if the value
4130 is known to be positive, the number of sign bit copies is the
4131 same as that of the input. Finally, if the input has just one bit
4132 that might be nonzero, all the bits are copies of the sign bit. */
4133 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4134 known_x
, known_mode
, known_ret
);
4135 if (bitwidth
> HOST_BITS_PER_WIDE_INT
)
4136 return num0
> 1 ? num0
- 1 : 1;
4138 nonzero
= nonzero_bits (XEXP (x
, 0), mode
);
4143 && (((HOST_WIDE_INT
) 1 << (bitwidth
- 1)) & nonzero
))
4148 case IOR
: case AND
: case XOR
:
4149 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
4150 /* Logical operations will preserve the number of sign-bit copies.
4151 MIN and MAX operations always return one of the operands. */
4152 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4153 known_x
, known_mode
, known_ret
);
4154 num1
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4155 known_x
, known_mode
, known_ret
);
4156 return MIN (num0
, num1
);
4158 case PLUS
: case MINUS
:
4159 /* For addition and subtraction, we can have a 1-bit carry. However,
4160 if we are subtracting 1 from a positive number, there will not
4161 be such a carry. Furthermore, if the positive number is known to
4162 be 0 or 1, we know the result is either -1 or 0. */
4164 if (code
== PLUS
&& XEXP (x
, 1) == constm1_rtx
4165 && bitwidth
<= HOST_BITS_PER_WIDE_INT
)
4167 nonzero
= nonzero_bits (XEXP (x
, 0), mode
);
4168 if ((((HOST_WIDE_INT
) 1 << (bitwidth
- 1)) & nonzero
) == 0)
4169 return (nonzero
== 1 || nonzero
== 0 ? bitwidth
4170 : bitwidth
- floor_log2 (nonzero
) - 1);
4173 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4174 known_x
, known_mode
, known_ret
);
4175 num1
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4176 known_x
, known_mode
, known_ret
);
4177 result
= MAX (1, MIN (num0
, num1
) - 1);
4179 #ifdef POINTERS_EXTEND_UNSIGNED
4180 /* If pointers extend signed and this is an addition or subtraction
4181 to a pointer in Pmode, all the bits above ptr_mode are known to be
4183 if (! POINTERS_EXTEND_UNSIGNED
&& GET_MODE (x
) == Pmode
4184 && (code
== PLUS
|| code
== MINUS
)
4185 && REG_P (XEXP (x
, 0)) && REG_POINTER (XEXP (x
, 0)))
4186 result
= MAX ((int) (GET_MODE_BITSIZE (Pmode
)
4187 - GET_MODE_BITSIZE (ptr_mode
) + 1),
4193 /* The number of bits of the product is the sum of the number of
4194 bits of both terms. However, unless one of the terms if known
4195 to be positive, we must allow for an additional bit since negating
4196 a negative number can remove one sign bit copy. */
4198 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4199 known_x
, known_mode
, known_ret
);
4200 num1
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4201 known_x
, known_mode
, known_ret
);
4203 result
= bitwidth
- (bitwidth
- num0
) - (bitwidth
- num1
);
4205 && (bitwidth
> HOST_BITS_PER_WIDE_INT
4206 || (((nonzero_bits (XEXP (x
, 0), mode
)
4207 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4208 && ((nonzero_bits (XEXP (x
, 1), mode
)
4209 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0))))
4212 return MAX (1, result
);
4215 /* The result must be <= the first operand. If the first operand
4216 has the high bit set, we know nothing about the number of sign
4218 if (bitwidth
> HOST_BITS_PER_WIDE_INT
)
4220 else if ((nonzero_bits (XEXP (x
, 0), mode
)
4221 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4224 return cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4225 known_x
, known_mode
, known_ret
);
4228 /* The result must be <= the second operand. */
4229 return cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4230 known_x
, known_mode
, known_ret
);
4233 /* Similar to unsigned division, except that we have to worry about
4234 the case where the divisor is negative, in which case we have
4236 result
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4237 known_x
, known_mode
, known_ret
);
4239 && (bitwidth
> HOST_BITS_PER_WIDE_INT
4240 || (nonzero_bits (XEXP (x
, 1), mode
)
4241 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0))
4247 result
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4248 known_x
, known_mode
, known_ret
);
4250 && (bitwidth
> HOST_BITS_PER_WIDE_INT
4251 || (nonzero_bits (XEXP (x
, 1), mode
)
4252 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0))
4258 /* Shifts by a constant add to the number of bits equal to the
4260 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4261 known_x
, known_mode
, known_ret
);
4262 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4263 && INTVAL (XEXP (x
, 1)) > 0)
4264 num0
= MIN ((int) bitwidth
, num0
+ INTVAL (XEXP (x
, 1)));
4269 /* Left shifts destroy copies. */
4270 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
4271 || INTVAL (XEXP (x
, 1)) < 0
4272 || INTVAL (XEXP (x
, 1)) >= (int) bitwidth
)
4275 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4276 known_x
, known_mode
, known_ret
);
4277 return MAX (1, num0
- INTVAL (XEXP (x
, 1)));
4280 num0
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4281 known_x
, known_mode
, known_ret
);
4282 num1
= cached_num_sign_bit_copies (XEXP (x
, 2), mode
,
4283 known_x
, known_mode
, known_ret
);
4284 return MIN (num0
, num1
);
4286 case EQ
: case NE
: case GE
: case GT
: case LE
: case LT
:
4287 case UNEQ
: case LTGT
: case UNGE
: case UNGT
: case UNLE
: case UNLT
:
4288 case GEU
: case GTU
: case LEU
: case LTU
:
4289 case UNORDERED
: case ORDERED
:
4290 /* If the constant is negative, take its 1's complement and remask.
4291 Then see how many zero bits we have. */
4292 nonzero
= STORE_FLAG_VALUE
;
4293 if (bitwidth
<= HOST_BITS_PER_WIDE_INT
4294 && (nonzero
& ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4295 nonzero
= (~nonzero
) & GET_MODE_MASK (mode
);
4297 return (nonzero
== 0 ? bitwidth
: bitwidth
- floor_log2 (nonzero
) - 1);
4303 /* If we haven't been able to figure it out by one of the above rules,
4304 see if some of the high-order bits are known to be zero. If so,
4305 count those bits and return one less than that amount. If we can't
4306 safely compute the mask for this mode, always return BITWIDTH. */
4308 bitwidth
= GET_MODE_BITSIZE (mode
);
4309 if (bitwidth
> HOST_BITS_PER_WIDE_INT
)
4312 nonzero
= nonzero_bits (x
, mode
);
4313 return nonzero
& ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))
4314 ? 1 : bitwidth
- floor_log2 (nonzero
) - 1;
4317 /* Calculate the rtx_cost of a single instruction. A return value of
4318 zero indicates an instruction pattern without a known cost. */
4321 insn_rtx_cost (rtx pat
)
4326 /* Extract the single set rtx from the instruction pattern.
4327 We can't use single_set since we only have the pattern. */
4328 if (GET_CODE (pat
) == SET
)
4330 else if (GET_CODE (pat
) == PARALLEL
)
4333 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
4335 rtx x
= XVECEXP (pat
, 0, i
);
4336 if (GET_CODE (x
) == SET
)
4349 cost
= rtx_cost (SET_SRC (set
), SET
);
4350 return cost
> 0 ? cost
: COSTS_N_INSNS (1);
4353 /* Given an insn INSN and condition COND, return the condition in a
4354 canonical form to simplify testing by callers. Specifically:
4356 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4357 (2) Both operands will be machine operands; (cc0) will have been replaced.
4358 (3) If an operand is a constant, it will be the second operand.
4359 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4360 for GE, GEU, and LEU.
4362 If the condition cannot be understood, or is an inequality floating-point
4363 comparison which needs to be reversed, 0 will be returned.
4365 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4367 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4368 insn used in locating the condition was found. If a replacement test
4369 of the condition is desired, it should be placed in front of that
4370 insn and we will be sure that the inputs are still valid.
4372 If WANT_REG is nonzero, we wish the condition to be relative to that
4373 register, if possible. Therefore, do not canonicalize the condition
4374 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4375 to be a compare to a CC mode register.
4377 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4381 canonicalize_condition (rtx insn
, rtx cond
, int reverse
, rtx
*earliest
,
4382 rtx want_reg
, int allow_cc_mode
, int valid_at_insn_p
)
4389 int reverse_code
= 0;
4390 enum machine_mode mode
;
4392 code
= GET_CODE (cond
);
4393 mode
= GET_MODE (cond
);
4394 op0
= XEXP (cond
, 0);
4395 op1
= XEXP (cond
, 1);
4398 code
= reversed_comparison_code (cond
, insn
);
4399 if (code
== UNKNOWN
)
4405 /* If we are comparing a register with zero, see if the register is set
4406 in the previous insn to a COMPARE or a comparison operation. Perform
4407 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4410 while ((GET_RTX_CLASS (code
) == RTX_COMPARE
4411 || GET_RTX_CLASS (code
) == RTX_COMM_COMPARE
)
4412 && op1
== CONST0_RTX (GET_MODE (op0
))
4415 /* Set nonzero when we find something of interest. */
4419 /* If comparison with cc0, import actual comparison from compare
4423 if ((prev
= prev_nonnote_insn (prev
)) == 0
4424 || !NONJUMP_INSN_P (prev
)
4425 || (set
= single_set (prev
)) == 0
4426 || SET_DEST (set
) != cc0_rtx
)
4429 op0
= SET_SRC (set
);
4430 op1
= CONST0_RTX (GET_MODE (op0
));
4436 /* If this is a COMPARE, pick up the two things being compared. */
4437 if (GET_CODE (op0
) == COMPARE
)
4439 op1
= XEXP (op0
, 1);
4440 op0
= XEXP (op0
, 0);
4443 else if (!REG_P (op0
))
4446 /* Go back to the previous insn. Stop if it is not an INSN. We also
4447 stop if it isn't a single set or if it has a REG_INC note because
4448 we don't want to bother dealing with it. */
4450 if ((prev
= prev_nonnote_insn (prev
)) == 0
4451 || !NONJUMP_INSN_P (prev
)
4452 || FIND_REG_INC_NOTE (prev
, NULL_RTX
))
4455 set
= set_of (op0
, prev
);
4458 && (GET_CODE (set
) != SET
4459 || !rtx_equal_p (SET_DEST (set
), op0
)))
4462 /* If this is setting OP0, get what it sets it to if it looks
4466 enum machine_mode inner_mode
= GET_MODE (SET_DEST (set
));
4467 #ifdef FLOAT_STORE_FLAG_VALUE
4468 REAL_VALUE_TYPE fsfv
;
4471 /* ??? We may not combine comparisons done in a CCmode with
4472 comparisons not done in a CCmode. This is to aid targets
4473 like Alpha that have an IEEE compliant EQ instruction, and
4474 a non-IEEE compliant BEQ instruction. The use of CCmode is
4475 actually artificial, simply to prevent the combination, but
4476 should not affect other platforms.
4478 However, we must allow VOIDmode comparisons to match either
4479 CCmode or non-CCmode comparison, because some ports have
4480 modeless comparisons inside branch patterns.
4482 ??? This mode check should perhaps look more like the mode check
4483 in simplify_comparison in combine. */
4485 if ((GET_CODE (SET_SRC (set
)) == COMPARE
4488 && GET_MODE_CLASS (inner_mode
) == MODE_INT
4489 && (GET_MODE_BITSIZE (inner_mode
)
4490 <= HOST_BITS_PER_WIDE_INT
)
4491 && (STORE_FLAG_VALUE
4492 & ((HOST_WIDE_INT
) 1
4493 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
4494 #ifdef FLOAT_STORE_FLAG_VALUE
4496 && GET_MODE_CLASS (inner_mode
) == MODE_FLOAT
4497 && (fsfv
= FLOAT_STORE_FLAG_VALUE (inner_mode
),
4498 REAL_VALUE_NEGATIVE (fsfv
)))
4501 && COMPARISON_P (SET_SRC (set
))))
4502 && (((GET_MODE_CLASS (mode
) == MODE_CC
)
4503 == (GET_MODE_CLASS (inner_mode
) == MODE_CC
))
4504 || mode
== VOIDmode
|| inner_mode
== VOIDmode
))
4506 else if (((code
== EQ
4508 && (GET_MODE_BITSIZE (inner_mode
)
4509 <= HOST_BITS_PER_WIDE_INT
)
4510 && GET_MODE_CLASS (inner_mode
) == MODE_INT
4511 && (STORE_FLAG_VALUE
4512 & ((HOST_WIDE_INT
) 1
4513 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
4514 #ifdef FLOAT_STORE_FLAG_VALUE
4516 && GET_MODE_CLASS (inner_mode
) == MODE_FLOAT
4517 && (fsfv
= FLOAT_STORE_FLAG_VALUE (inner_mode
),
4518 REAL_VALUE_NEGATIVE (fsfv
)))
4521 && COMPARISON_P (SET_SRC (set
))
4522 && (((GET_MODE_CLASS (mode
) == MODE_CC
)
4523 == (GET_MODE_CLASS (inner_mode
) == MODE_CC
))
4524 || mode
== VOIDmode
|| inner_mode
== VOIDmode
))
4534 else if (reg_set_p (op0
, prev
))
4535 /* If this sets OP0, but not directly, we have to give up. */
4540 /* If the caller is expecting the condition to be valid at INSN,
4541 make sure X doesn't change before INSN. */
4542 if (valid_at_insn_p
)
4543 if (modified_in_p (x
, prev
) || modified_between_p (x
, prev
, insn
))
4545 if (COMPARISON_P (x
))
4546 code
= GET_CODE (x
);
4549 code
= reversed_comparison_code (x
, prev
);
4550 if (code
== UNKNOWN
)
4555 op0
= XEXP (x
, 0), op1
= XEXP (x
, 1);
4561 /* If constant is first, put it last. */
4562 if (CONSTANT_P (op0
))
4563 code
= swap_condition (code
), tem
= op0
, op0
= op1
, op1
= tem
;
4565 /* If OP0 is the result of a comparison, we weren't able to find what
4566 was really being compared, so fail. */
4568 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_CC
)
4571 /* Canonicalize any ordered comparison with integers involving equality
4572 if we can do computations in the relevant mode and we do not
4575 if (GET_MODE_CLASS (GET_MODE (op0
)) != MODE_CC
4576 && GET_CODE (op1
) == CONST_INT
4577 && GET_MODE (op0
) != VOIDmode
4578 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
4580 HOST_WIDE_INT const_val
= INTVAL (op1
);
4581 unsigned HOST_WIDE_INT uconst_val
= const_val
;
4582 unsigned HOST_WIDE_INT max_val
4583 = (unsigned HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (op0
));
4588 if ((unsigned HOST_WIDE_INT
) const_val
!= max_val
>> 1)
4589 code
= LT
, op1
= gen_int_mode (const_val
+ 1, GET_MODE (op0
));
4592 /* When cross-compiling, const_val might be sign-extended from
4593 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4595 if ((HOST_WIDE_INT
) (const_val
& max_val
)
4596 != (((HOST_WIDE_INT
) 1
4597 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1))))
4598 code
= GT
, op1
= gen_int_mode (const_val
- 1, GET_MODE (op0
));
4602 if (uconst_val
< max_val
)
4603 code
= LTU
, op1
= gen_int_mode (uconst_val
+ 1, GET_MODE (op0
));
4607 if (uconst_val
!= 0)
4608 code
= GTU
, op1
= gen_int_mode (uconst_val
- 1, GET_MODE (op0
));
4616 /* Never return CC0; return zero instead. */
4620 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
4623 /* Given a jump insn JUMP, return the condition that will cause it to branch
4624 to its JUMP_LABEL. If the condition cannot be understood, or is an
4625 inequality floating-point comparison which needs to be reversed, 0 will
4628 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4629 insn used in locating the condition was found. If a replacement test
4630 of the condition is desired, it should be placed in front of that
4631 insn and we will be sure that the inputs are still valid. If EARLIEST
4632 is null, the returned condition will be valid at INSN.
4634 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4635 compare CC mode register.
4637 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4640 get_condition (rtx jump
, rtx
*earliest
, int allow_cc_mode
, int valid_at_insn_p
)
4646 /* If this is not a standard conditional jump, we can't parse it. */
4648 || ! any_condjump_p (jump
))
4650 set
= pc_set (jump
);
4652 cond
= XEXP (SET_SRC (set
), 0);
4654 /* If this branches to JUMP_LABEL when the condition is false, reverse
4657 = GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
4658 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (jump
);
4660 return canonicalize_condition (jump
, cond
, reverse
, earliest
, NULL_RTX
,
4661 allow_cc_mode
, valid_at_insn_p
);
4665 /* Initialize non_rtx_starting_operands, which is used to speed up
4671 for (i
= 0; i
< NUM_RTX_CODE
; i
++)
4673 const char *format
= GET_RTX_FORMAT (i
);
4674 const char *first
= strpbrk (format
, "eEV");
4675 non_rtx_starting_operands
[i
] = first
? first
- format
: -1;