1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
40 /* Forward declarations */
41 static int global_reg_mentioned_p_1 (rtx
*, void *);
42 static void set_of_1 (rtx
, rtx
, void *);
43 static bool covers_regno_p (rtx
, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx
, unsigned int);
45 static int rtx_referenced_p_1 (rtx
*, void *);
46 static int computed_jump_p_1 (rtx
);
47 static void parms_set (rtx
, rtx
, void *);
49 static unsigned HOST_WIDE_INT
cached_nonzero_bits (rtx
, enum machine_mode
,
50 rtx
, enum machine_mode
,
51 unsigned HOST_WIDE_INT
);
52 static unsigned HOST_WIDE_INT
nonzero_bits1 (rtx
, enum machine_mode
, rtx
,
54 unsigned HOST_WIDE_INT
);
55 static unsigned int cached_num_sign_bit_copies (rtx
, enum machine_mode
, rtx
,
58 static unsigned int num_sign_bit_copies1 (rtx
, enum machine_mode
, rtx
,
59 enum machine_mode
, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands
[NUM_RTX_CODE
];
65 /* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
71 /* Return 1 if the value of X is unstable
72 (would be different at a different point in the program).
73 The frame pointer, arg pointer, etc. are considered stable
74 (within one function) and so is anything marked `unchanging'. */
77 rtx_unstable_p (rtx x
)
79 RTX_CODE code
= GET_CODE (x
);
86 return !MEM_READONLY_P (x
) || rtx_unstable_p (XEXP (x
, 0));
97 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
98 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
99 /* The arg pointer varies if it is not a fixed register. */
100 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
102 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
103 /* ??? When call-clobbered, the value is stable modulo the restore
104 that must happen after a call. This currently screws up local-alloc
105 into believing that the restore is not needed. */
106 if (x
== pic_offset_table_rtx
)
112 if (MEM_VOLATILE_P (x
))
121 fmt
= GET_RTX_FORMAT (code
);
122 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
125 if (rtx_unstable_p (XEXP (x
, i
)))
128 else if (fmt
[i
] == 'E')
131 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
132 if (rtx_unstable_p (XVECEXP (x
, i
, j
)))
139 /* Return 1 if X has a value that can vary even between two
140 executions of the program. 0 means X can be compared reliably
141 against certain constants or near-constants.
142 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
143 zero, we are slightly more conservative.
144 The frame pointer and the arg pointer are considered constant. */
147 rtx_varies_p (rtx x
, int for_alias
)
160 return !MEM_READONLY_P (x
) || rtx_varies_p (XEXP (x
, 0), for_alias
);
171 /* Note that we have to test for the actual rtx used for the frame
172 and arg pointers and not just the register number in case we have
173 eliminated the frame and/or arg pointer and are using it
175 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
176 /* The arg pointer varies if it is not a fixed register. */
177 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
179 if (x
== pic_offset_table_rtx
180 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
181 /* ??? When call-clobbered, the value is stable modulo the restore
182 that must happen after a call. This currently screws up
183 local-alloc into believing that the restore is not needed, so we
184 must return 0 only if we are called from alias analysis. */
192 /* The operand 0 of a LO_SUM is considered constant
193 (in fact it is related specifically to operand 1)
194 during alias analysis. */
195 return (! for_alias
&& rtx_varies_p (XEXP (x
, 0), for_alias
))
196 || rtx_varies_p (XEXP (x
, 1), for_alias
);
199 if (MEM_VOLATILE_P (x
))
208 fmt
= GET_RTX_FORMAT (code
);
209 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
212 if (rtx_varies_p (XEXP (x
, i
), for_alias
))
215 else if (fmt
[i
] == 'E')
218 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
219 if (rtx_varies_p (XVECEXP (x
, i
, j
), for_alias
))
226 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
229 rtx_addr_can_trap_p (rtx x
)
231 enum rtx_code code
= GET_CODE (x
);
236 return SYMBOL_REF_WEAK (x
);
242 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
243 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
244 || x
== stack_pointer_rtx
245 /* The arg pointer varies if it is not a fixed register. */
246 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
248 /* All of the virtual frame registers are stack references. */
249 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
250 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
255 return rtx_addr_can_trap_p (XEXP (x
, 0));
258 /* An address is assumed not to trap if it is an address that can't
259 trap plus a constant integer or it is the pic register plus a
261 return ! ((! rtx_addr_can_trap_p (XEXP (x
, 0))
262 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
263 || (XEXP (x
, 0) == pic_offset_table_rtx
264 && CONSTANT_P (XEXP (x
, 1))));
268 return rtx_addr_can_trap_p (XEXP (x
, 1));
275 return rtx_addr_can_trap_p (XEXP (x
, 0));
281 /* If it isn't one of the case above, it can cause a trap. */
285 /* Return true if X is an address that is known to not be zero. */
288 nonzero_address_p (rtx x
)
290 enum rtx_code code
= GET_CODE (x
);
295 return !SYMBOL_REF_WEAK (x
);
301 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
302 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
303 || x
== stack_pointer_rtx
304 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
306 /* All of the virtual frame registers are stack references. */
307 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
308 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
313 return nonzero_address_p (XEXP (x
, 0));
316 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
318 /* Pointers aren't allowed to wrap. If we've got a register
319 that is known to be a pointer, and a positive offset, then
320 the composite can't be zero. */
321 if (INTVAL (XEXP (x
, 1)) > 0
322 && REG_P (XEXP (x
, 0))
323 && REG_POINTER (XEXP (x
, 0)))
326 return nonzero_address_p (XEXP (x
, 0));
328 /* Handle PIC references. */
329 else if (XEXP (x
, 0) == pic_offset_table_rtx
330 && CONSTANT_P (XEXP (x
, 1)))
335 /* Similar to the above; allow positive offsets. Further, since
336 auto-inc is only allowed in memories, the register must be a
338 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
339 && INTVAL (XEXP (x
, 1)) > 0)
341 return nonzero_address_p (XEXP (x
, 0));
344 /* Similarly. Further, the offset is always positive. */
351 return nonzero_address_p (XEXP (x
, 0));
354 return nonzero_address_p (XEXP (x
, 1));
360 /* If it isn't one of the case above, might be zero. */
364 /* Return 1 if X refers to a memory location whose address
365 cannot be compared reliably with constant addresses,
366 or if X refers to a BLKmode memory object.
367 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
368 zero, we are slightly more conservative. */
371 rtx_addr_varies_p (rtx x
, int for_alias
)
382 return GET_MODE (x
) == BLKmode
|| rtx_varies_p (XEXP (x
, 0), for_alias
);
384 fmt
= GET_RTX_FORMAT (code
);
385 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
388 if (rtx_addr_varies_p (XEXP (x
, i
), for_alias
))
391 else if (fmt
[i
] == 'E')
394 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
395 if (rtx_addr_varies_p (XVECEXP (x
, i
, j
), for_alias
))
401 /* Return the value of the integer term in X, if one is apparent;
403 Only obvious integer terms are detected.
404 This is used in cse.c with the `related_value' field. */
407 get_integer_term (rtx x
)
409 if (GET_CODE (x
) == CONST
)
412 if (GET_CODE (x
) == MINUS
413 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
414 return - INTVAL (XEXP (x
, 1));
415 if (GET_CODE (x
) == PLUS
416 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
417 return INTVAL (XEXP (x
, 1));
421 /* If X is a constant, return the value sans apparent integer term;
423 Only obvious integer terms are detected. */
426 get_related_value (rtx x
)
428 if (GET_CODE (x
) != CONST
)
431 if (GET_CODE (x
) == PLUS
432 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
434 else if (GET_CODE (x
) == MINUS
435 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
440 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
441 a global register. */
444 global_reg_mentioned_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
452 switch (GET_CODE (x
))
455 if (REG_P (SUBREG_REG (x
)))
457 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
458 && global_regs
[subreg_regno (x
)])
466 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
480 /* A non-constant call might use a global register. */
490 /* Returns nonzero if X mentions a global register. */
493 global_reg_mentioned_p (rtx x
)
499 if (! CONST_OR_PURE_CALL_P (x
))
501 x
= CALL_INSN_FUNCTION_USAGE (x
);
509 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
512 /* Return the number of places FIND appears within X. If COUNT_DEST is
513 zero, we do not count occurrences inside the destination of a SET. */
516 count_occurrences (rtx x
, rtx find
, int count_dest
)
520 const char *format_ptr
;
541 if (MEM_P (find
) && rtx_equal_p (x
, find
))
546 if (SET_DEST (x
) == find
&& ! count_dest
)
547 return count_occurrences (SET_SRC (x
), find
, count_dest
);
554 format_ptr
= GET_RTX_FORMAT (code
);
557 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
559 switch (*format_ptr
++)
562 count
+= count_occurrences (XEXP (x
, i
), find
, count_dest
);
566 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
567 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
, count_dest
);
574 /* Nonzero if register REG appears somewhere within IN.
575 Also works if REG is not a register; in this case it checks
576 for a subexpression of IN that is Lisp "equal" to REG. */
579 reg_mentioned_p (rtx reg
, rtx in
)
591 if (GET_CODE (in
) == LABEL_REF
)
592 return reg
== XEXP (in
, 0);
594 code
= GET_CODE (in
);
598 /* Compare registers by number. */
600 return REG_P (reg
) && REGNO (in
) == REGNO (reg
);
602 /* These codes have no constituent expressions
612 /* These are kept unique for a given value. */
619 if (GET_CODE (reg
) == code
&& rtx_equal_p (reg
, in
))
622 fmt
= GET_RTX_FORMAT (code
);
624 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
629 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
630 if (reg_mentioned_p (reg
, XVECEXP (in
, i
, j
)))
633 else if (fmt
[i
] == 'e'
634 && reg_mentioned_p (reg
, XEXP (in
, i
)))
640 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
641 no CODE_LABEL insn. */
644 no_labels_between_p (rtx beg
, rtx end
)
649 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
655 /* Nonzero if register REG is used in an insn between
656 FROM_INSN and TO_INSN (exclusive of those two). */
659 reg_used_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
663 if (from_insn
== to_insn
)
666 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
668 && (reg_overlap_mentioned_p (reg
, PATTERN (insn
))
670 && (find_reg_fusage (insn
, USE
, reg
)
671 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
676 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
677 is entirely replaced by a new value and the only use is as a SET_DEST,
678 we do not consider it a reference. */
681 reg_referenced_p (rtx x
, rtx body
)
685 switch (GET_CODE (body
))
688 if (reg_overlap_mentioned_p (x
, SET_SRC (body
)))
691 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
692 of a REG that occupies all of the REG, the insn references X if
693 it is mentioned in the destination. */
694 if (GET_CODE (SET_DEST (body
)) != CC0
695 && GET_CODE (SET_DEST (body
)) != PC
696 && !REG_P (SET_DEST (body
))
697 && ! (GET_CODE (SET_DEST (body
)) == SUBREG
698 && REG_P (SUBREG_REG (SET_DEST (body
)))
699 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body
))))
700 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
701 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body
)))
702 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
703 && reg_overlap_mentioned_p (x
, SET_DEST (body
)))
708 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
709 if (reg_overlap_mentioned_p (x
, ASM_OPERANDS_INPUT (body
, i
)))
716 return reg_overlap_mentioned_p (x
, body
);
719 return reg_overlap_mentioned_p (x
, TRAP_CONDITION (body
));
722 return reg_overlap_mentioned_p (x
, XEXP (body
, 0));
725 case UNSPEC_VOLATILE
:
726 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
727 if (reg_overlap_mentioned_p (x
, XVECEXP (body
, 0, i
)))
732 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
733 if (reg_referenced_p (x
, XVECEXP (body
, 0, i
)))
738 if (MEM_P (XEXP (body
, 0)))
739 if (reg_overlap_mentioned_p (x
, XEXP (XEXP (body
, 0), 0)))
744 if (reg_overlap_mentioned_p (x
, COND_EXEC_TEST (body
)))
746 return reg_referenced_p (x
, COND_EXEC_CODE (body
));
753 /* Nonzero if register REG is set or clobbered in an insn between
754 FROM_INSN and TO_INSN (exclusive of those two). */
757 reg_set_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
761 if (from_insn
== to_insn
)
764 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
765 if (INSN_P (insn
) && reg_set_p (reg
, insn
))
770 /* Internals of reg_set_between_p. */
772 reg_set_p (rtx reg
, rtx insn
)
774 /* We can be passed an insn or part of one. If we are passed an insn,
775 check if a side-effect of the insn clobbers REG. */
777 && (FIND_REG_INC_NOTE (insn
, reg
)
780 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
781 && TEST_HARD_REG_BIT (regs_invalidated_by_call
,
784 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
787 return set_of (reg
, insn
) != NULL_RTX
;
790 /* Similar to reg_set_between_p, but check all registers in X. Return 0
791 only if none of them are modified between START and END. Return 1 if
792 X contains a MEM; this routine does usememory aliasing. */
795 modified_between_p (rtx x
, rtx start
, rtx end
)
797 enum rtx_code code
= GET_CODE (x
);
820 if (modified_between_p (XEXP (x
, 0), start
, end
))
822 if (MEM_READONLY_P (x
))
824 for (insn
= NEXT_INSN (start
); insn
!= end
; insn
= NEXT_INSN (insn
))
825 if (memory_modified_in_insn_p (x
, insn
))
831 return reg_set_between_p (x
, start
, end
);
837 fmt
= GET_RTX_FORMAT (code
);
838 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
840 if (fmt
[i
] == 'e' && modified_between_p (XEXP (x
, i
), start
, end
))
843 else if (fmt
[i
] == 'E')
844 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
845 if (modified_between_p (XVECEXP (x
, i
, j
), start
, end
))
852 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
853 of them are modified in INSN. Return 1 if X contains a MEM; this routine
854 does use memory aliasing. */
857 modified_in_p (rtx x
, rtx insn
)
859 enum rtx_code code
= GET_CODE (x
);
878 if (modified_in_p (XEXP (x
, 0), insn
))
880 if (MEM_READONLY_P (x
))
882 if (memory_modified_in_insn_p (x
, insn
))
888 return reg_set_p (x
, insn
);
894 fmt
= GET_RTX_FORMAT (code
);
895 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
897 if (fmt
[i
] == 'e' && modified_in_p (XEXP (x
, i
), insn
))
900 else if (fmt
[i
] == 'E')
901 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
902 if (modified_in_p (XVECEXP (x
, i
, j
), insn
))
909 /* Helper function for set_of. */
917 set_of_1 (rtx x
, rtx pat
, void *data1
)
919 struct set_of_data
*data
= (struct set_of_data
*) (data1
);
920 if (rtx_equal_p (x
, data
->pat
)
921 || (!MEM_P (x
) && reg_overlap_mentioned_p (data
->pat
, x
)))
925 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
926 (either directly or via STRICT_LOW_PART and similar modifiers). */
928 set_of (rtx pat
, rtx insn
)
930 struct set_of_data data
;
931 data
.found
= NULL_RTX
;
933 note_stores (INSN_P (insn
) ? PATTERN (insn
) : insn
, set_of_1
, &data
);
937 /* Given an INSN, return a SET expression if this insn has only a single SET.
938 It may also have CLOBBERs, USEs, or SET whose output
939 will not be used, which we ignore. */
942 single_set_2 (rtx insn
, rtx pat
)
945 int set_verified
= 1;
948 if (GET_CODE (pat
) == PARALLEL
)
950 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
952 rtx sub
= XVECEXP (pat
, 0, i
);
953 switch (GET_CODE (sub
))
960 /* We can consider insns having multiple sets, where all
961 but one are dead as single set insns. In common case
962 only single set is present in the pattern so we want
963 to avoid checking for REG_UNUSED notes unless necessary.
965 When we reach set first time, we just expect this is
966 the single set we are looking for and only when more
967 sets are found in the insn, we check them. */
970 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (set
))
971 && !side_effects_p (set
))
977 set
= sub
, set_verified
= 0;
978 else if (!find_reg_note (insn
, REG_UNUSED
, SET_DEST (sub
))
979 || side_effects_p (sub
))
991 /* Given an INSN, return nonzero if it has more than one SET, else return
995 multiple_sets (rtx insn
)
1000 /* INSN must be an insn. */
1001 if (! INSN_P (insn
))
1004 /* Only a PARALLEL can have multiple SETs. */
1005 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
1007 for (i
= 0, found
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1008 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == SET
)
1010 /* If we have already found a SET, then return now. */
1018 /* Either zero or one SET. */
1022 /* Return nonzero if the destination of SET equals the source
1023 and there are no side effects. */
1026 set_noop_p (rtx set
)
1028 rtx src
= SET_SRC (set
);
1029 rtx dst
= SET_DEST (set
);
1031 if (dst
== pc_rtx
&& src
== pc_rtx
)
1034 if (MEM_P (dst
) && MEM_P (src
))
1035 return rtx_equal_p (dst
, src
) && !side_effects_p (dst
);
1037 if (GET_CODE (dst
) == ZERO_EXTRACT
)
1038 return rtx_equal_p (XEXP (dst
, 0), src
)
1039 && ! BYTES_BIG_ENDIAN
&& XEXP (dst
, 2) == const0_rtx
1040 && !side_effects_p (src
);
1042 if (GET_CODE (dst
) == STRICT_LOW_PART
)
1043 dst
= XEXP (dst
, 0);
1045 if (GET_CODE (src
) == SUBREG
&& GET_CODE (dst
) == SUBREG
)
1047 if (SUBREG_BYTE (src
) != SUBREG_BYTE (dst
))
1049 src
= SUBREG_REG (src
);
1050 dst
= SUBREG_REG (dst
);
1053 return (REG_P (src
) && REG_P (dst
)
1054 && REGNO (src
) == REGNO (dst
));
1057 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1061 noop_move_p (rtx insn
)
1063 rtx pat
= PATTERN (insn
);
1065 if (INSN_CODE (insn
) == NOOP_MOVE_INSN_CODE
)
1068 /* Insns carrying these notes are useful later on. */
1069 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1072 /* For now treat an insn with a REG_RETVAL note as a
1073 a special insn which should not be considered a no-op. */
1074 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
1077 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
1080 if (GET_CODE (pat
) == PARALLEL
)
1083 /* If nothing but SETs of registers to themselves,
1084 this insn can also be deleted. */
1085 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1087 rtx tem
= XVECEXP (pat
, 0, i
);
1089 if (GET_CODE (tem
) == USE
1090 || GET_CODE (tem
) == CLOBBER
)
1093 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
1103 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1104 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1105 If the object was modified, if we hit a partial assignment to X, or hit a
1106 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1107 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1111 find_last_value (rtx x
, rtx
*pinsn
, rtx valid_to
, int allow_hwreg
)
1115 for (p
= PREV_INSN (*pinsn
); p
&& !LABEL_P (p
);
1119 rtx set
= single_set (p
);
1120 rtx note
= find_reg_note (p
, REG_EQUAL
, NULL_RTX
);
1122 if (set
&& rtx_equal_p (x
, SET_DEST (set
)))
1124 rtx src
= SET_SRC (set
);
1126 if (note
&& GET_CODE (XEXP (note
, 0)) != EXPR_LIST
)
1127 src
= XEXP (note
, 0);
1129 if ((valid_to
== NULL_RTX
1130 || ! modified_between_p (src
, PREV_INSN (p
), valid_to
))
1131 /* Reject hard registers because we don't usually want
1132 to use them; we'd rather use a pseudo. */
1134 && REGNO (src
) < FIRST_PSEUDO_REGISTER
) || allow_hwreg
))
1141 /* If set in non-simple way, we don't have a value. */
1142 if (reg_set_p (x
, p
))
1149 /* Return nonzero if register in range [REGNO, ENDREGNO)
1150 appears either explicitly or implicitly in X
1151 other than being stored into.
1153 References contained within the substructure at LOC do not count.
1154 LOC may be zero, meaning don't ignore anything. */
1157 refers_to_regno_p (unsigned int regno
, unsigned int endregno
, rtx x
,
1161 unsigned int x_regno
;
1166 /* The contents of a REG_NONNEG note is always zero, so we must come here
1167 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1171 code
= GET_CODE (x
);
1176 x_regno
= REGNO (x
);
1178 /* If we modifying the stack, frame, or argument pointer, it will
1179 clobber a virtual register. In fact, we could be more precise,
1180 but it isn't worth it. */
1181 if ((x_regno
== STACK_POINTER_REGNUM
1182 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1183 || x_regno
== ARG_POINTER_REGNUM
1185 || x_regno
== FRAME_POINTER_REGNUM
)
1186 && regno
>= FIRST_VIRTUAL_REGISTER
&& regno
<= LAST_VIRTUAL_REGISTER
)
1189 return (endregno
> x_regno
1190 && regno
< x_regno
+ (x_regno
< FIRST_PSEUDO_REGISTER
1191 ? hard_regno_nregs
[x_regno
][GET_MODE (x
)]
1195 /* If this is a SUBREG of a hard reg, we can see exactly which
1196 registers are being modified. Otherwise, handle normally. */
1197 if (REG_P (SUBREG_REG (x
))
1198 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
1200 unsigned int inner_regno
= subreg_regno (x
);
1201 unsigned int inner_endregno
1202 = inner_regno
+ (inner_regno
< FIRST_PSEUDO_REGISTER
1203 ? hard_regno_nregs
[inner_regno
][GET_MODE (x
)] : 1);
1205 return endregno
> inner_regno
&& regno
< inner_endregno
;
1211 if (&SET_DEST (x
) != loc
1212 /* Note setting a SUBREG counts as referring to the REG it is in for
1213 a pseudo but not for hard registers since we can
1214 treat each word individually. */
1215 && ((GET_CODE (SET_DEST (x
)) == SUBREG
1216 && loc
!= &SUBREG_REG (SET_DEST (x
))
1217 && REG_P (SUBREG_REG (SET_DEST (x
)))
1218 && REGNO (SUBREG_REG (SET_DEST (x
))) >= FIRST_PSEUDO_REGISTER
1219 && refers_to_regno_p (regno
, endregno
,
1220 SUBREG_REG (SET_DEST (x
)), loc
))
1221 || (!REG_P (SET_DEST (x
))
1222 && refers_to_regno_p (regno
, endregno
, SET_DEST (x
), loc
))))
1225 if (code
== CLOBBER
|| loc
== &SET_SRC (x
))
1234 /* X does not match, so try its subexpressions. */
1236 fmt
= GET_RTX_FORMAT (code
);
1237 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1239 if (fmt
[i
] == 'e' && loc
!= &XEXP (x
, i
))
1247 if (refers_to_regno_p (regno
, endregno
, XEXP (x
, i
), loc
))
1250 else if (fmt
[i
] == 'E')
1253 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1254 if (loc
!= &XVECEXP (x
, i
, j
)
1255 && refers_to_regno_p (regno
, endregno
, XVECEXP (x
, i
, j
), loc
))
1262 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1263 we check if any register number in X conflicts with the relevant register
1264 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1265 contains a MEM (we don't bother checking for memory addresses that can't
1266 conflict because we expect this to be a rare case. */
1269 reg_overlap_mentioned_p (rtx x
, rtx in
)
1271 unsigned int regno
, endregno
;
1273 /* If either argument is a constant, then modifying X can not
1274 affect IN. Here we look at IN, we can profitably combine
1275 CONSTANT_P (x) with the switch statement below. */
1276 if (CONSTANT_P (in
))
1280 switch (GET_CODE (x
))
1282 case STRICT_LOW_PART
:
1285 /* Overly conservative. */
1290 regno
= REGNO (SUBREG_REG (x
));
1291 if (regno
< FIRST_PSEUDO_REGISTER
)
1292 regno
= subreg_regno (x
);
1298 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
1299 ? hard_regno_nregs
[regno
][GET_MODE (x
)] : 1);
1300 return refers_to_regno_p (regno
, endregno
, in
, (rtx
*) 0);
1310 fmt
= GET_RTX_FORMAT (GET_CODE (in
));
1311 for (i
= GET_RTX_LENGTH (GET_CODE (in
)) - 1; i
>= 0; i
--)
1312 if (fmt
[i
] == 'e' && reg_overlap_mentioned_p (x
, XEXP (in
, i
)))
1321 return reg_mentioned_p (x
, in
);
1327 /* If any register in here refers to it we return true. */
1328 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1329 if (XEXP (XVECEXP (x
, 0, i
), 0) != 0
1330 && reg_overlap_mentioned_p (XEXP (XVECEXP (x
, 0, i
), 0), in
))
1336 gcc_assert (CONSTANT_P (x
));
1341 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1342 (X would be the pattern of an insn).
1343 FUN receives two arguments:
1344 the REG, MEM, CC0 or PC being stored in or clobbered,
1345 the SET or CLOBBER rtx that does the store.
1347 If the item being stored in or clobbered is a SUBREG of a hard register,
1348 the SUBREG will be passed. */
1351 note_stores (rtx x
, void (*fun
) (rtx
, rtx
, void *), void *data
)
1355 if (GET_CODE (x
) == COND_EXEC
)
1356 x
= COND_EXEC_CODE (x
);
1358 if (GET_CODE (x
) == SET
|| GET_CODE (x
) == CLOBBER
)
1360 rtx dest
= SET_DEST (x
);
1362 while ((GET_CODE (dest
) == SUBREG
1363 && (!REG_P (SUBREG_REG (dest
))
1364 || REGNO (SUBREG_REG (dest
)) >= FIRST_PSEUDO_REGISTER
))
1365 || GET_CODE (dest
) == ZERO_EXTRACT
1366 || GET_CODE (dest
) == STRICT_LOW_PART
)
1367 dest
= XEXP (dest
, 0);
1369 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1370 each of whose first operand is a register. */
1371 if (GET_CODE (dest
) == PARALLEL
)
1373 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1374 if (XEXP (XVECEXP (dest
, 0, i
), 0) != 0)
1375 (*fun
) (XEXP (XVECEXP (dest
, 0, i
), 0), x
, data
);
1378 (*fun
) (dest
, x
, data
);
1381 else if (GET_CODE (x
) == PARALLEL
)
1382 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1383 note_stores (XVECEXP (x
, 0, i
), fun
, data
);
1386 /* Like notes_stores, but call FUN for each expression that is being
1387 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1388 FUN for each expression, not any interior subexpressions. FUN receives a
1389 pointer to the expression and the DATA passed to this function.
1391 Note that this is not quite the same test as that done in reg_referenced_p
1392 since that considers something as being referenced if it is being
1393 partially set, while we do not. */
1396 note_uses (rtx
*pbody
, void (*fun
) (rtx
*, void *), void *data
)
1401 switch (GET_CODE (body
))
1404 (*fun
) (&COND_EXEC_TEST (body
), data
);
1405 note_uses (&COND_EXEC_CODE (body
), fun
, data
);
1409 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1410 note_uses (&XVECEXP (body
, 0, i
), fun
, data
);
1414 (*fun
) (&XEXP (body
, 0), data
);
1418 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
1419 (*fun
) (&ASM_OPERANDS_INPUT (body
, i
), data
);
1423 (*fun
) (&TRAP_CONDITION (body
), data
);
1427 (*fun
) (&XEXP (body
, 0), data
);
1431 case UNSPEC_VOLATILE
:
1432 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1433 (*fun
) (&XVECEXP (body
, 0, i
), data
);
1437 if (MEM_P (XEXP (body
, 0)))
1438 (*fun
) (&XEXP (XEXP (body
, 0), 0), data
);
1443 rtx dest
= SET_DEST (body
);
1445 /* For sets we replace everything in source plus registers in memory
1446 expression in store and operands of a ZERO_EXTRACT. */
1447 (*fun
) (&SET_SRC (body
), data
);
1449 if (GET_CODE (dest
) == ZERO_EXTRACT
)
1451 (*fun
) (&XEXP (dest
, 1), data
);
1452 (*fun
) (&XEXP (dest
, 2), data
);
1455 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
)
1456 dest
= XEXP (dest
, 0);
1459 (*fun
) (&XEXP (dest
, 0), data
);
1464 /* All the other possibilities never store. */
1465 (*fun
) (pbody
, data
);
1470 /* Return nonzero if X's old contents don't survive after INSN.
1471 This will be true if X is (cc0) or if X is a register and
1472 X dies in INSN or because INSN entirely sets X.
1474 "Entirely set" means set directly and not through a SUBREG, or
1475 ZERO_EXTRACT, so no trace of the old contents remains.
1476 Likewise, REG_INC does not count.
1478 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1479 but for this use that makes no difference, since regs don't overlap
1480 during their lifetimes. Therefore, this function may be used
1481 at any time after deaths have been computed (in flow.c).
1483 If REG is a hard reg that occupies multiple machine registers, this
1484 function will only return 1 if each of those registers will be replaced
1488 dead_or_set_p (rtx insn
, rtx x
)
1490 unsigned int regno
, last_regno
;
1493 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1494 if (GET_CODE (x
) == CC0
)
1497 gcc_assert (REG_P (x
));
1500 last_regno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
1501 : regno
+ hard_regno_nregs
[regno
][GET_MODE (x
)] - 1);
1503 for (i
= regno
; i
<= last_regno
; i
++)
1504 if (! dead_or_set_regno_p (insn
, i
))
1510 /* Return TRUE iff DEST is a register or subreg of a register and
1511 doesn't change the number of words of the inner register, and any
1512 part of the register is TEST_REGNO. */
1515 covers_regno_no_parallel_p (rtx dest
, unsigned int test_regno
)
1517 unsigned int regno
, endregno
;
1519 if (GET_CODE (dest
) == SUBREG
1520 && (((GET_MODE_SIZE (GET_MODE (dest
))
1521 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1522 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1523 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1524 dest
= SUBREG_REG (dest
);
1529 regno
= REGNO (dest
);
1530 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1531 : regno
+ hard_regno_nregs
[regno
][GET_MODE (dest
)]);
1532 return (test_regno
>= regno
&& test_regno
< endregno
);
1535 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1536 any member matches the covers_regno_no_parallel_p criteria. */
1539 covers_regno_p (rtx dest
, unsigned int test_regno
)
1541 if (GET_CODE (dest
) == PARALLEL
)
1543 /* Some targets place small structures in registers for return
1544 values of functions, and those registers are wrapped in
1545 PARALLELs that we may see as the destination of a SET. */
1548 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1550 rtx inner
= XEXP (XVECEXP (dest
, 0, i
), 0);
1551 if (inner
!= NULL_RTX
1552 && covers_regno_no_parallel_p (inner
, test_regno
))
1559 return covers_regno_no_parallel_p (dest
, test_regno
);
1562 /* Utility function for dead_or_set_p to check an individual register. Also
1563 called from flow.c. */
1566 dead_or_set_regno_p (rtx insn
, unsigned int test_regno
)
1570 /* See if there is a death note for something that includes TEST_REGNO. */
1571 if (find_regno_note (insn
, REG_DEAD
, test_regno
))
1575 && find_regno_fusage (insn
, CLOBBER
, test_regno
))
1578 pattern
= PATTERN (insn
);
1580 if (GET_CODE (pattern
) == COND_EXEC
)
1581 pattern
= COND_EXEC_CODE (pattern
);
1583 if (GET_CODE (pattern
) == SET
)
1584 return covers_regno_p (SET_DEST (pattern
), test_regno
);
1585 else if (GET_CODE (pattern
) == PARALLEL
)
1589 for (i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
1591 rtx body
= XVECEXP (pattern
, 0, i
);
1593 if (GET_CODE (body
) == COND_EXEC
)
1594 body
= COND_EXEC_CODE (body
);
1596 if ((GET_CODE (body
) == SET
|| GET_CODE (body
) == CLOBBER
)
1597 && covers_regno_p (SET_DEST (body
), test_regno
))
1605 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1606 If DATUM is nonzero, look for one whose datum is DATUM. */
1609 find_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
1613 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1614 if (! INSN_P (insn
))
1618 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1619 if (REG_NOTE_KIND (link
) == kind
)
1624 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1625 if (REG_NOTE_KIND (link
) == kind
&& datum
== XEXP (link
, 0))
1630 /* Return the reg-note of kind KIND in insn INSN which applies to register
1631 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1632 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1633 it might be the case that the note overlaps REGNO. */
1636 find_regno_note (rtx insn
, enum reg_note kind
, unsigned int regno
)
1640 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1641 if (! INSN_P (insn
))
1644 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1645 if (REG_NOTE_KIND (link
) == kind
1646 /* Verify that it is a register, so that scratch and MEM won't cause a
1648 && REG_P (XEXP (link
, 0))
1649 && REGNO (XEXP (link
, 0)) <= regno
1650 && ((REGNO (XEXP (link
, 0))
1651 + (REGNO (XEXP (link
, 0)) >= FIRST_PSEUDO_REGISTER
? 1
1652 : hard_regno_nregs
[REGNO (XEXP (link
, 0))]
1653 [GET_MODE (XEXP (link
, 0))]))
1659 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1663 find_reg_equal_equiv_note (rtx insn
)
1669 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1670 if (REG_NOTE_KIND (link
) == REG_EQUAL
1671 || REG_NOTE_KIND (link
) == REG_EQUIV
)
1673 if (single_set (insn
) == 0)
1680 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1681 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1684 find_reg_fusage (rtx insn
, enum rtx_code code
, rtx datum
)
1686 /* If it's not a CALL_INSN, it can't possibly have a
1687 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1697 for (link
= CALL_INSN_FUNCTION_USAGE (insn
);
1699 link
= XEXP (link
, 1))
1700 if (GET_CODE (XEXP (link
, 0)) == code
1701 && rtx_equal_p (datum
, XEXP (XEXP (link
, 0), 0)))
1706 unsigned int regno
= REGNO (datum
);
1708 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1709 to pseudo registers, so don't bother checking. */
1711 if (regno
< FIRST_PSEUDO_REGISTER
)
1713 unsigned int end_regno
1714 = regno
+ hard_regno_nregs
[regno
][GET_MODE (datum
)];
1717 for (i
= regno
; i
< end_regno
; i
++)
1718 if (find_regno_fusage (insn
, code
, i
))
1726 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1727 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1730 find_regno_fusage (rtx insn
, enum rtx_code code
, unsigned int regno
)
1734 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1735 to pseudo registers, so don't bother checking. */
1737 if (regno
>= FIRST_PSEUDO_REGISTER
1741 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
1743 unsigned int regnote
;
1746 if (GET_CODE (op
= XEXP (link
, 0)) == code
1747 && REG_P (reg
= XEXP (op
, 0))
1748 && (regnote
= REGNO (reg
)) <= regno
1749 && regnote
+ hard_regno_nregs
[regnote
][GET_MODE (reg
)] > regno
)
1756 /* Return true if INSN is a call to a pure function. */
1759 pure_call_p (rtx insn
)
1763 if (!CALL_P (insn
) || ! CONST_OR_PURE_CALL_P (insn
))
1766 /* Look for the note that differentiates const and pure functions. */
1767 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
1771 if (GET_CODE (u
= XEXP (link
, 0)) == USE
1772 && MEM_P (m
= XEXP (u
, 0)) && GET_MODE (m
) == BLKmode
1773 && GET_CODE (XEXP (m
, 0)) == SCRATCH
)
1780 /* Remove register note NOTE from the REG_NOTES of INSN. */
1783 remove_note (rtx insn
, rtx note
)
1787 if (note
== NULL_RTX
)
1790 if (REG_NOTES (insn
) == note
)
1792 REG_NOTES (insn
) = XEXP (note
, 1);
1796 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1797 if (XEXP (link
, 1) == note
)
1799 XEXP (link
, 1) = XEXP (note
, 1);
1806 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1807 return 1 if it is found. A simple equality test is used to determine if
1811 in_expr_list_p (rtx listp
, rtx node
)
1815 for (x
= listp
; x
; x
= XEXP (x
, 1))
1816 if (node
== XEXP (x
, 0))
1822 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1823 remove that entry from the list if it is found.
1825 A simple equality test is used to determine if NODE matches. */
1828 remove_node_from_expr_list (rtx node
, rtx
*listp
)
1831 rtx prev
= NULL_RTX
;
1835 if (node
== XEXP (temp
, 0))
1837 /* Splice the node out of the list. */
1839 XEXP (prev
, 1) = XEXP (temp
, 1);
1841 *listp
= XEXP (temp
, 1);
1847 temp
= XEXP (temp
, 1);
1851 /* Nonzero if X contains any volatile instructions. These are instructions
1852 which may cause unpredictable machine state instructions, and thus no
1853 instructions should be moved or combined across them. This includes
1854 only volatile asms and UNSPEC_VOLATILE instructions. */
1857 volatile_insn_p (rtx x
)
1861 code
= GET_CODE (x
);
1881 case UNSPEC_VOLATILE
:
1882 /* case TRAP_IF: This isn't clear yet. */
1887 if (MEM_VOLATILE_P (x
))
1894 /* Recursively scan the operands of this expression. */
1897 const char *fmt
= GET_RTX_FORMAT (code
);
1900 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1904 if (volatile_insn_p (XEXP (x
, i
)))
1907 else if (fmt
[i
] == 'E')
1910 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1911 if (volatile_insn_p (XVECEXP (x
, i
, j
)))
1919 /* Nonzero if X contains any volatile memory references
1920 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1923 volatile_refs_p (rtx x
)
1927 code
= GET_CODE (x
);
1945 case UNSPEC_VOLATILE
:
1951 if (MEM_VOLATILE_P (x
))
1958 /* Recursively scan the operands of this expression. */
1961 const char *fmt
= GET_RTX_FORMAT (code
);
1964 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1968 if (volatile_refs_p (XEXP (x
, i
)))
1971 else if (fmt
[i
] == 'E')
1974 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1975 if (volatile_refs_p (XVECEXP (x
, i
, j
)))
1983 /* Similar to above, except that it also rejects register pre- and post-
1987 side_effects_p (rtx x
)
1991 code
= GET_CODE (x
);
2009 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2010 when some combination can't be done. If we see one, don't think
2011 that we can simplify the expression. */
2012 return (GET_MODE (x
) != VOIDmode
);
2021 case UNSPEC_VOLATILE
:
2022 /* case TRAP_IF: This isn't clear yet. */
2028 if (MEM_VOLATILE_P (x
))
2035 /* Recursively scan the operands of this expression. */
2038 const char *fmt
= GET_RTX_FORMAT (code
);
2041 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2045 if (side_effects_p (XEXP (x
, i
)))
2048 else if (fmt
[i
] == 'E')
2051 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2052 if (side_effects_p (XVECEXP (x
, i
, j
)))
2060 /* Return nonzero if evaluating rtx X might cause a trap. */
2071 code
= GET_CODE (x
);
2074 /* Handle these cases quickly. */
2088 case UNSPEC_VOLATILE
:
2093 return MEM_VOLATILE_P (x
);
2095 /* Memory ref can trap unless it's a static var or a stack slot. */
2097 if (MEM_NOTRAP_P (x
))
2099 return rtx_addr_can_trap_p (XEXP (x
, 0));
2101 /* Division by a non-constant might trap. */
2106 if (HONOR_SNANS (GET_MODE (x
)))
2108 if (! CONSTANT_P (XEXP (x
, 1))
2109 || (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2110 && flag_trapping_math
))
2112 if (XEXP (x
, 1) == const0_rtx
)
2117 /* An EXPR_LIST is used to represent a function call. This
2118 certainly may trap. */
2127 /* Some floating point comparisons may trap. */
2128 if (!flag_trapping_math
)
2130 /* ??? There is no machine independent way to check for tests that trap
2131 when COMPARE is used, though many targets do make this distinction.
2132 For instance, sparc uses CCFPE for compares which generate exceptions
2133 and CCFP for compares which do not generate exceptions. */
2134 if (HONOR_NANS (GET_MODE (x
)))
2136 /* But often the compare has some CC mode, so check operand
2138 if (HONOR_NANS (GET_MODE (XEXP (x
, 0)))
2139 || HONOR_NANS (GET_MODE (XEXP (x
, 1))))
2145 if (HONOR_SNANS (GET_MODE (x
)))
2147 /* Often comparison is CC mode, so check operand modes. */
2148 if (HONOR_SNANS (GET_MODE (XEXP (x
, 0)))
2149 || HONOR_SNANS (GET_MODE (XEXP (x
, 1))))
2154 /* Conversion of floating point might trap. */
2155 if (flag_trapping_math
&& HONOR_NANS (GET_MODE (XEXP (x
, 0))))
2161 /* These operations don't trap even with floating point. */
2165 /* Any floating arithmetic may trap. */
2166 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2167 && flag_trapping_math
)
2171 fmt
= GET_RTX_FORMAT (code
);
2172 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2176 if (may_trap_p (XEXP (x
, i
)))
2179 else if (fmt
[i
] == 'E')
2182 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2183 if (may_trap_p (XVECEXP (x
, i
, j
)))
2190 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2191 i.e., an inequality. */
2194 inequality_comparisons_p (rtx x
)
2198 enum rtx_code code
= GET_CODE (x
);
2228 len
= GET_RTX_LENGTH (code
);
2229 fmt
= GET_RTX_FORMAT (code
);
2231 for (i
= 0; i
< len
; i
++)
2235 if (inequality_comparisons_p (XEXP (x
, i
)))
2238 else if (fmt
[i
] == 'E')
2241 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2242 if (inequality_comparisons_p (XVECEXP (x
, i
, j
)))
2250 /* Replace any occurrence of FROM in X with TO. The function does
2251 not enter into CONST_DOUBLE for the replace.
2253 Note that copying is not done so X must not be shared unless all copies
2254 are to be modified. */
2257 replace_rtx (rtx x
, rtx from
, rtx to
)
2262 /* The following prevents loops occurrence when we change MEM in
2263 CONST_DOUBLE onto the same CONST_DOUBLE. */
2264 if (x
!= 0 && GET_CODE (x
) == CONST_DOUBLE
)
2270 /* Allow this function to make replacements in EXPR_LISTs. */
2274 if (GET_CODE (x
) == SUBREG
)
2276 rtx
new = replace_rtx (SUBREG_REG (x
), from
, to
);
2278 if (GET_CODE (new) == CONST_INT
)
2280 x
= simplify_subreg (GET_MODE (x
), new,
2281 GET_MODE (SUBREG_REG (x
)),
2286 SUBREG_REG (x
) = new;
2290 else if (GET_CODE (x
) == ZERO_EXTEND
)
2292 rtx
new = replace_rtx (XEXP (x
, 0), from
, to
);
2294 if (GET_CODE (new) == CONST_INT
)
2296 x
= simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
2297 new, GET_MODE (XEXP (x
, 0)));
2306 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2307 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2310 XEXP (x
, i
) = replace_rtx (XEXP (x
, i
), from
, to
);
2311 else if (fmt
[i
] == 'E')
2312 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2313 XVECEXP (x
, i
, j
) = replace_rtx (XVECEXP (x
, i
, j
), from
, to
);
2319 /* Throughout the rtx X, replace many registers according to REG_MAP.
2320 Return the replacement for X (which may be X with altered contents).
2321 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2322 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2324 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2325 should not be mapped to pseudos or vice versa since validate_change
2328 If REPLACE_DEST is 1, replacements are also done in destinations;
2329 otherwise, only sources are replaced. */
2332 replace_regs (rtx x
, rtx
*reg_map
, unsigned int nregs
, int replace_dest
)
2341 code
= GET_CODE (x
);
2356 /* Verify that the register has an entry before trying to access it. */
2357 if (REGNO (x
) < nregs
&& reg_map
[REGNO (x
)] != 0)
2359 /* SUBREGs can't be shared. Always return a copy to ensure that if
2360 this replacement occurs more than once then each instance will
2361 get distinct rtx. */
2362 if (GET_CODE (reg_map
[REGNO (x
)]) == SUBREG
)
2363 return copy_rtx (reg_map
[REGNO (x
)]);
2364 return reg_map
[REGNO (x
)];
2369 /* Prevent making nested SUBREGs. */
2370 if (REG_P (SUBREG_REG (x
)) && REGNO (SUBREG_REG (x
)) < nregs
2371 && reg_map
[REGNO (SUBREG_REG (x
))] != 0
2372 && GET_CODE (reg_map
[REGNO (SUBREG_REG (x
))]) == SUBREG
)
2374 rtx map_val
= reg_map
[REGNO (SUBREG_REG (x
))];
2375 return simplify_gen_subreg (GET_MODE (x
), map_val
,
2376 GET_MODE (SUBREG_REG (x
)),
2383 SET_DEST (x
) = replace_regs (SET_DEST (x
), reg_map
, nregs
, 0);
2385 else if (MEM_P (SET_DEST (x
))
2386 || GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2387 /* Even if we are not to replace destinations, replace register if it
2388 is CONTAINED in destination (destination is memory or
2389 STRICT_LOW_PART). */
2390 XEXP (SET_DEST (x
), 0) = replace_regs (XEXP (SET_DEST (x
), 0),
2392 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2393 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2396 SET_SRC (x
) = replace_regs (SET_SRC (x
), reg_map
, nregs
, 0);
2403 fmt
= GET_RTX_FORMAT (code
);
2404 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2407 XEXP (x
, i
) = replace_regs (XEXP (x
, i
), reg_map
, nregs
, replace_dest
);
2408 else if (fmt
[i
] == 'E')
2411 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2412 XVECEXP (x
, i
, j
) = replace_regs (XVECEXP (x
, i
, j
), reg_map
,
2413 nregs
, replace_dest
);
2419 /* Replace occurrences of the old label in *X with the new one.
2420 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2423 replace_label (rtx
*x
, void *data
)
2426 rtx old_label
= ((replace_label_data
*) data
)->r1
;
2427 rtx new_label
= ((replace_label_data
*) data
)->r2
;
2428 bool update_label_nuses
= ((replace_label_data
*) data
)->update_label_nuses
;
2433 if (GET_CODE (l
) == SYMBOL_REF
2434 && CONSTANT_POOL_ADDRESS_P (l
))
2436 rtx c
= get_pool_constant (l
);
2437 if (rtx_referenced_p (old_label
, c
))
2440 replace_label_data
*d
= (replace_label_data
*) data
;
2442 /* Create a copy of constant C; replace the label inside
2443 but do not update LABEL_NUSES because uses in constant pool
2445 new_c
= copy_rtx (c
);
2446 d
->update_label_nuses
= false;
2447 for_each_rtx (&new_c
, replace_label
, data
);
2448 d
->update_label_nuses
= update_label_nuses
;
2450 /* Add the new constant NEW_C to constant pool and replace
2451 the old reference to constant by new reference. */
2452 new_l
= XEXP (force_const_mem (get_pool_mode (l
), new_c
), 0);
2453 *x
= replace_rtx (l
, l
, new_l
);
2458 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2459 field. This is not handled by for_each_rtx because it doesn't
2460 handle unprinted ('0') fields. */
2461 if (JUMP_P (l
) && JUMP_LABEL (l
) == old_label
)
2462 JUMP_LABEL (l
) = new_label
;
2464 if ((GET_CODE (l
) == LABEL_REF
2465 || GET_CODE (l
) == INSN_LIST
)
2466 && XEXP (l
, 0) == old_label
)
2468 XEXP (l
, 0) = new_label
;
2469 if (update_label_nuses
)
2471 ++LABEL_NUSES (new_label
);
2472 --LABEL_NUSES (old_label
);
2480 /* When *BODY is equal to X or X is directly referenced by *BODY
2481 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2482 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2485 rtx_referenced_p_1 (rtx
*body
, void *x
)
2489 if (*body
== NULL_RTX
)
2490 return y
== NULL_RTX
;
2492 /* Return true if a label_ref *BODY refers to label Y. */
2493 if (GET_CODE (*body
) == LABEL_REF
&& LABEL_P (y
))
2494 return XEXP (*body
, 0) == y
;
2496 /* If *BODY is a reference to pool constant traverse the constant. */
2497 if (GET_CODE (*body
) == SYMBOL_REF
2498 && CONSTANT_POOL_ADDRESS_P (*body
))
2499 return rtx_referenced_p (y
, get_pool_constant (*body
));
2501 /* By default, compare the RTL expressions. */
2502 return rtx_equal_p (*body
, y
);
2505 /* Return true if X is referenced in BODY. */
2508 rtx_referenced_p (rtx x
, rtx body
)
2510 return for_each_rtx (&body
, rtx_referenced_p_1
, x
);
2513 /* If INSN is a tablejump return true and store the label (before jump table) to
2514 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2517 tablejump_p (rtx insn
, rtx
*labelp
, rtx
*tablep
)
2522 && (label
= JUMP_LABEL (insn
)) != NULL_RTX
2523 && (table
= next_active_insn (label
)) != NULL_RTX
2525 && (GET_CODE (PATTERN (table
)) == ADDR_VEC
2526 || GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
))
2537 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2538 constant that is not in the constant pool and not in the condition
2539 of an IF_THEN_ELSE. */
2542 computed_jump_p_1 (rtx x
)
2544 enum rtx_code code
= GET_CODE (x
);
2563 return ! (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
2564 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)));
2567 return (computed_jump_p_1 (XEXP (x
, 1))
2568 || computed_jump_p_1 (XEXP (x
, 2)));
2574 fmt
= GET_RTX_FORMAT (code
);
2575 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2578 && computed_jump_p_1 (XEXP (x
, i
)))
2581 else if (fmt
[i
] == 'E')
2582 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2583 if (computed_jump_p_1 (XVECEXP (x
, i
, j
)))
2590 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2592 Tablejumps and casesi insns are not considered indirect jumps;
2593 we can recognize them by a (use (label_ref)). */
2596 computed_jump_p (rtx insn
)
2601 rtx pat
= PATTERN (insn
);
2603 if (find_reg_note (insn
, REG_LABEL
, NULL_RTX
))
2605 else if (GET_CODE (pat
) == PARALLEL
)
2607 int len
= XVECLEN (pat
, 0);
2608 int has_use_labelref
= 0;
2610 for (i
= len
- 1; i
>= 0; i
--)
2611 if (GET_CODE (XVECEXP (pat
, 0, i
)) == USE
2612 && (GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0))
2614 has_use_labelref
= 1;
2616 if (! has_use_labelref
)
2617 for (i
= len
- 1; i
>= 0; i
--)
2618 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
2619 && SET_DEST (XVECEXP (pat
, 0, i
)) == pc_rtx
2620 && computed_jump_p_1 (SET_SRC (XVECEXP (pat
, 0, i
))))
2623 else if (GET_CODE (pat
) == SET
2624 && SET_DEST (pat
) == pc_rtx
2625 && computed_jump_p_1 (SET_SRC (pat
)))
2631 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2632 calls. Processes the subexpressions of EXP and passes them to F. */
2634 for_each_rtx_1 (rtx exp
, int n
, rtx_function f
, void *data
)
2637 const char *format
= GET_RTX_FORMAT (GET_CODE (exp
));
2640 for (; format
[n
] != '\0'; n
++)
2647 result
= (*f
) (x
, data
);
2649 /* Do not traverse sub-expressions. */
2651 else if (result
!= 0)
2652 /* Stop the traversal. */
2656 /* There are no sub-expressions. */
2659 i
= non_rtx_starting_operands
[GET_CODE (*x
)];
2662 result
= for_each_rtx_1 (*x
, i
, f
, data
);
2670 if (XVEC (exp
, n
) == 0)
2672 for (j
= 0; j
< XVECLEN (exp
, n
); ++j
)
2675 x
= &XVECEXP (exp
, n
, j
);
2676 result
= (*f
) (x
, data
);
2678 /* Do not traverse sub-expressions. */
2680 else if (result
!= 0)
2681 /* Stop the traversal. */
2685 /* There are no sub-expressions. */
2688 i
= non_rtx_starting_operands
[GET_CODE (*x
)];
2691 result
= for_each_rtx_1 (*x
, i
, f
, data
);
2699 /* Nothing to do. */
2707 /* Traverse X via depth-first search, calling F for each
2708 sub-expression (including X itself). F is also passed the DATA.
2709 If F returns -1, do not traverse sub-expressions, but continue
2710 traversing the rest of the tree. If F ever returns any other
2711 nonzero value, stop the traversal, and return the value returned
2712 by F. Otherwise, return 0. This function does not traverse inside
2713 tree structure that contains RTX_EXPRs, or into sub-expressions
2714 whose format code is `0' since it is not known whether or not those
2715 codes are actually RTL.
2717 This routine is very general, and could (should?) be used to
2718 implement many of the other routines in this file. */
2721 for_each_rtx (rtx
*x
, rtx_function f
, void *data
)
2727 result
= (*f
) (x
, data
);
2729 /* Do not traverse sub-expressions. */
2731 else if (result
!= 0)
2732 /* Stop the traversal. */
2736 /* There are no sub-expressions. */
2739 i
= non_rtx_starting_operands
[GET_CODE (*x
)];
2743 return for_each_rtx_1 (*x
, i
, f
, data
);
2747 /* Searches X for any reference to REGNO, returning the rtx of the
2748 reference found if any. Otherwise, returns NULL_RTX. */
2751 regno_use_in (unsigned int regno
, rtx x
)
2757 if (REG_P (x
) && REGNO (x
) == regno
)
2760 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2761 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2765 if ((tem
= regno_use_in (regno
, XEXP (x
, i
))))
2768 else if (fmt
[i
] == 'E')
2769 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2770 if ((tem
= regno_use_in (regno
, XVECEXP (x
, i
, j
))))
2777 /* Return a value indicating whether OP, an operand of a commutative
2778 operation, is preferred as the first or second operand. The higher
2779 the value, the stronger the preference for being the first operand.
2780 We use negative values to indicate a preference for the first operand
2781 and positive values for the second operand. */
2784 commutative_operand_precedence (rtx op
)
2786 enum rtx_code code
= GET_CODE (op
);
2788 /* Constants always come the second operand. Prefer "nice" constants. */
2789 if (code
== CONST_INT
)
2791 if (code
== CONST_DOUBLE
)
2793 op
= avoid_constant_pool_reference (op
);
2794 code
= GET_CODE (op
);
2796 switch (GET_RTX_CLASS (code
))
2799 if (code
== CONST_INT
)
2801 if (code
== CONST_DOUBLE
)
2806 /* SUBREGs of objects should come second. */
2807 if (code
== SUBREG
&& OBJECT_P (SUBREG_REG (op
)))
2810 if (!CONSTANT_P (op
))
2813 /* As for RTX_CONST_OBJ. */
2817 /* Complex expressions should be the first, so decrease priority
2821 case RTX_COMM_ARITH
:
2822 /* Prefer operands that are themselves commutative to be first.
2823 This helps to make things linear. In particular,
2824 (and (and (reg) (reg)) (not (reg))) is canonical. */
2828 /* If only one operand is a binary expression, it will be the first
2829 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2830 is canonical, although it will usually be further simplified. */
2834 /* Then prefer NEG and NOT. */
2835 if (code
== NEG
|| code
== NOT
)
2843 /* Return 1 iff it is necessary to swap operands of commutative operation
2844 in order to canonicalize expression. */
2847 swap_commutative_operands_p (rtx x
, rtx y
)
2849 return (commutative_operand_precedence (x
)
2850 < commutative_operand_precedence (y
));
2853 /* Return 1 if X is an autoincrement side effect and the register is
2854 not the stack pointer. */
2858 switch (GET_CODE (x
))
2866 /* There are no REG_INC notes for SP. */
2867 if (XEXP (x
, 0) != stack_pointer_rtx
)
2875 /* Return 1 if the sequence of instructions beginning with FROM and up
2876 to and including TO is safe to move. If NEW_TO is non-NULL, and
2877 the sequence is not already safe to move, but can be easily
2878 extended to a sequence which is safe, then NEW_TO will point to the
2879 end of the extended sequence.
2881 For now, this function only checks that the region contains whole
2882 exception regions, but it could be extended to check additional
2883 conditions as well. */
2886 insns_safe_to_move_p (rtx from
, rtx to
, rtx
*new_to
)
2888 int eh_region_count
= 0;
2892 /* By default, assume the end of the region will be what was
2901 switch (NOTE_LINE_NUMBER (r
))
2903 case NOTE_INSN_EH_REGION_BEG
:
2907 case NOTE_INSN_EH_REGION_END
:
2908 if (eh_region_count
== 0)
2909 /* This sequence of instructions contains the end of
2910 an exception region, but not he beginning. Moving
2911 it will cause chaos. */
2922 /* If we've passed TO, and we see a non-note instruction, we
2923 can't extend the sequence to a movable sequence. */
2929 /* It's OK to move the sequence if there were matched sets of
2930 exception region notes. */
2931 return eh_region_count
== 0;
2936 /* It's OK to move the sequence if there were matched sets of
2937 exception region notes. */
2938 if (past_to_p
&& eh_region_count
== 0)
2944 /* Go to the next instruction. */
2951 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2953 loc_mentioned_in_p (rtx
*loc
, rtx in
)
2955 enum rtx_code code
= GET_CODE (in
);
2956 const char *fmt
= GET_RTX_FORMAT (code
);
2959 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2961 if (loc
== &in
->u
.fld
[i
].rt_rtx
)
2965 if (loc_mentioned_in_p (loc
, XEXP (in
, i
)))
2968 else if (fmt
[i
] == 'E')
2969 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
2970 if (loc_mentioned_in_p (loc
, XVECEXP (in
, i
, j
)))
2976 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2977 and SUBREG_BYTE, return the bit offset where the subreg begins
2978 (counting from the least significant bit of the operand). */
2981 subreg_lsb_1 (enum machine_mode outer_mode
,
2982 enum machine_mode inner_mode
,
2983 unsigned int subreg_byte
)
2985 unsigned int bitpos
;
2989 /* A paradoxical subreg begins at bit position 0. */
2990 if (GET_MODE_BITSIZE (outer_mode
) > GET_MODE_BITSIZE (inner_mode
))
2993 if (WORDS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
2994 /* If the subreg crosses a word boundary ensure that
2995 it also begins and ends on a word boundary. */
2996 gcc_assert (!((subreg_byte
% UNITS_PER_WORD
2997 + GET_MODE_SIZE (outer_mode
)) > UNITS_PER_WORD
2998 && (subreg_byte
% UNITS_PER_WORD
2999 || GET_MODE_SIZE (outer_mode
) % UNITS_PER_WORD
)));
3001 if (WORDS_BIG_ENDIAN
)
3002 word
= (GET_MODE_SIZE (inner_mode
)
3003 - (subreg_byte
+ GET_MODE_SIZE (outer_mode
))) / UNITS_PER_WORD
;
3005 word
= subreg_byte
/ UNITS_PER_WORD
;
3006 bitpos
= word
* BITS_PER_WORD
;
3008 if (BYTES_BIG_ENDIAN
)
3009 byte
= (GET_MODE_SIZE (inner_mode
)
3010 - (subreg_byte
+ GET_MODE_SIZE (outer_mode
))) % UNITS_PER_WORD
;
3012 byte
= subreg_byte
% UNITS_PER_WORD
;
3013 bitpos
+= byte
* BITS_PER_UNIT
;
3018 /* Given a subreg X, return the bit offset where the subreg begins
3019 (counting from the least significant bit of the reg). */
3024 return subreg_lsb_1 (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)),
3028 /* This function returns the regno offset of a subreg expression.
3029 xregno - A regno of an inner hard subreg_reg (or what will become one).
3030 xmode - The mode of xregno.
3031 offset - The byte offset.
3032 ymode - The mode of a top level SUBREG (or what may become one).
3033 RETURN - The regno offset which would be used. */
3035 subreg_regno_offset (unsigned int xregno
, enum machine_mode xmode
,
3036 unsigned int offset
, enum machine_mode ymode
)
3038 int nregs_xmode
, nregs_ymode
;
3039 int mode_multiple
, nregs_multiple
;
3042 gcc_assert (xregno
< FIRST_PSEUDO_REGISTER
);
3044 nregs_xmode
= hard_regno_nregs
[xregno
][xmode
];
3045 nregs_ymode
= hard_regno_nregs
[xregno
][ymode
];
3047 /* If this is a big endian paradoxical subreg, which uses more actual
3048 hard registers than the original register, we must return a negative
3049 offset so that we find the proper highpart of the register. */
3051 && nregs_ymode
> nregs_xmode
3052 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3053 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3054 return nregs_xmode
- nregs_ymode
;
3056 if (offset
== 0 || nregs_xmode
== nregs_ymode
)
3059 /* size of ymode must not be greater than the size of xmode. */
3060 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3061 gcc_assert (mode_multiple
!= 0);
3063 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3064 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3065 return (y_offset
/ (mode_multiple
/ nregs_multiple
)) * nregs_ymode
;
3068 /* This function returns true when the offset is representable via
3069 subreg_offset in the given regno.
3070 xregno - A regno of an inner hard subreg_reg (or what will become one).
3071 xmode - The mode of xregno.
3072 offset - The byte offset.
3073 ymode - The mode of a top level SUBREG (or what may become one).
3074 RETURN - The regno offset which would be used. */
3076 subreg_offset_representable_p (unsigned int xregno
, enum machine_mode xmode
,
3077 unsigned int offset
, enum machine_mode ymode
)
3079 int nregs_xmode
, nregs_ymode
;
3080 int mode_multiple
, nregs_multiple
;
3083 gcc_assert (xregno
< FIRST_PSEUDO_REGISTER
);
3085 nregs_xmode
= hard_regno_nregs
[xregno
][xmode
];
3086 nregs_ymode
= hard_regno_nregs
[xregno
][ymode
];
3088 /* Paradoxical subregs are always valid. */
3090 && nregs_ymode
> nregs_xmode
3091 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3092 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3095 /* Lowpart subregs are always valid. */
3096 if (offset
== subreg_lowpart_offset (ymode
, xmode
))
3099 /* This should always pass, otherwise we don't know how to verify the
3100 constraint. These conditions may be relaxed but subreg_offset would
3101 need to be redesigned. */
3102 gcc_assert ((GET_MODE_SIZE (xmode
) % GET_MODE_SIZE (ymode
)) == 0);
3103 gcc_assert ((GET_MODE_SIZE (ymode
) % nregs_ymode
) == 0);
3104 gcc_assert ((nregs_xmode
% nregs_ymode
) == 0);
3106 /* The XMODE value can be seen as a vector of NREGS_XMODE
3107 values. The subreg must represent a lowpart of given field.
3108 Compute what field it is. */
3109 offset
-= subreg_lowpart_offset (ymode
,
3110 mode_for_size (GET_MODE_BITSIZE (xmode
)
3114 /* size of ymode must not be greater than the size of xmode. */
3115 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3116 gcc_assert (mode_multiple
!= 0);
3118 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3119 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3121 gcc_assert ((offset
% GET_MODE_SIZE (ymode
)) == 0);
3122 gcc_assert ((mode_multiple
% nregs_multiple
) == 0);
3124 return (!(y_offset
% (mode_multiple
/ nregs_multiple
)));
3127 /* Return the final regno that a subreg expression refers to. */
3129 subreg_regno (rtx x
)
3132 rtx subreg
= SUBREG_REG (x
);
3133 int regno
= REGNO (subreg
);
3135 ret
= regno
+ subreg_regno_offset (regno
,
3142 struct parms_set_data
3148 /* Helper function for noticing stores to parameter registers. */
3150 parms_set (rtx x
, rtx pat ATTRIBUTE_UNUSED
, void *data
)
3152 struct parms_set_data
*d
= data
;
3153 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3154 && TEST_HARD_REG_BIT (d
->regs
, REGNO (x
)))
3156 CLEAR_HARD_REG_BIT (d
->regs
, REGNO (x
));
3161 /* Look backward for first parameter to be loaded.
3162 Note that loads of all parameters will not necessarily be
3163 found if CSE has eliminated some of them (e.g., an argument
3164 to the outer function is passed down as a parameter).
3165 Do not skip BOUNDARY. */
3167 find_first_parameter_load (rtx call_insn
, rtx boundary
)
3169 struct parms_set_data parm
;
3170 rtx p
, before
, first_set
;
3172 /* Since different machines initialize their parameter registers
3173 in different orders, assume nothing. Collect the set of all
3174 parameter registers. */
3175 CLEAR_HARD_REG_SET (parm
.regs
);
3177 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
3178 if (GET_CODE (XEXP (p
, 0)) == USE
3179 && REG_P (XEXP (XEXP (p
, 0), 0)))
3181 gcc_assert (REGNO (XEXP (XEXP (p
, 0), 0)) < FIRST_PSEUDO_REGISTER
);
3183 /* We only care about registers which can hold function
3185 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p
, 0), 0))))
3188 SET_HARD_REG_BIT (parm
.regs
, REGNO (XEXP (XEXP (p
, 0), 0)));
3192 first_set
= call_insn
;
3194 /* Search backward for the first set of a register in this set. */
3195 while (parm
.nregs
&& before
!= boundary
)
3197 before
= PREV_INSN (before
);
3199 /* It is possible that some loads got CSEed from one call to
3200 another. Stop in that case. */
3201 if (CALL_P (before
))
3204 /* Our caller needs either ensure that we will find all sets
3205 (in case code has not been optimized yet), or take care
3206 for possible labels in a way by setting boundary to preceding
3208 if (LABEL_P (before
))
3210 gcc_assert (before
== boundary
);
3214 if (INSN_P (before
))
3216 int nregs_old
= parm
.nregs
;
3217 note_stores (PATTERN (before
), parms_set
, &parm
);
3218 /* If we found something that did not set a parameter reg,
3219 we're done. Do not keep going, as that might result
3220 in hoisting an insn before the setting of a pseudo
3221 that is used by the hoisted insn. */
3222 if (nregs_old
!= parm
.nregs
)
3231 /* Return true if we should avoid inserting code between INSN and preceding
3232 call instruction. */
3235 keep_with_call_p (rtx insn
)
3239 if (INSN_P (insn
) && (set
= single_set (insn
)) != NULL
)
3241 if (REG_P (SET_DEST (set
))
3242 && REGNO (SET_DEST (set
)) < FIRST_PSEUDO_REGISTER
3243 && fixed_regs
[REGNO (SET_DEST (set
))]
3244 && general_operand (SET_SRC (set
), VOIDmode
))
3246 if (REG_P (SET_SRC (set
))
3247 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set
)))
3248 && REG_P (SET_DEST (set
))
3249 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
)
3251 /* There may be a stack pop just after the call and before the store
3252 of the return register. Search for the actual store when deciding
3253 if we can break or not. */
3254 if (SET_DEST (set
) == stack_pointer_rtx
)
3256 rtx i2
= next_nonnote_insn (insn
);
3257 if (i2
&& keep_with_call_p (i2
))
3264 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3265 to non-complex jumps. That is, direct unconditional, conditional,
3266 and tablejumps, but not computed jumps or returns. It also does
3267 not apply to the fallthru case of a conditional jump. */
3270 label_is_jump_target_p (rtx label
, rtx jump_insn
)
3272 rtx tmp
= JUMP_LABEL (jump_insn
);
3277 if (tablejump_p (jump_insn
, NULL
, &tmp
))
3279 rtvec vec
= XVEC (PATTERN (tmp
),
3280 GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
);
3281 int i
, veclen
= GET_NUM_ELEM (vec
);
3283 for (i
= 0; i
< veclen
; ++i
)
3284 if (XEXP (RTVEC_ELT (vec
, i
), 0) == label
)
3292 /* Return an estimate of the cost of computing rtx X.
3293 One use is in cse, to decide which expression to keep in the hash table.
3294 Another is in rtl generation, to pick the cheapest way to multiply.
3295 Other uses like the latter are expected in the future. */
3298 rtx_cost (rtx x
, enum rtx_code outer_code ATTRIBUTE_UNUSED
)
3308 /* Compute the default costs of certain things.
3309 Note that targetm.rtx_costs can override the defaults. */
3311 code
= GET_CODE (x
);
3315 total
= COSTS_N_INSNS (5);
3321 total
= COSTS_N_INSNS (7);
3324 /* Used in loop.c and combine.c as a marker. */
3328 total
= COSTS_N_INSNS (1);
3338 /* If we can't tie these modes, make this expensive. The larger
3339 the mode, the more expensive it is. */
3340 if (! MODES_TIEABLE_P (GET_MODE (x
), GET_MODE (SUBREG_REG (x
))))
3341 return COSTS_N_INSNS (2
3342 + GET_MODE_SIZE (GET_MODE (x
)) / UNITS_PER_WORD
);
3346 if (targetm
.rtx_costs (x
, code
, outer_code
, &total
))
3351 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3352 which is already in total. */
3354 fmt
= GET_RTX_FORMAT (code
);
3355 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3357 total
+= rtx_cost (XEXP (x
, i
), code
);
3358 else if (fmt
[i
] == 'E')
3359 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3360 total
+= rtx_cost (XVECEXP (x
, i
, j
), code
);
3365 /* Return cost of address expression X.
3366 Expect that X is properly formed address reference. */
3369 address_cost (rtx x
, enum machine_mode mode
)
3371 /* We may be asked for cost of various unusual addresses, such as operands
3372 of push instruction. It is not worthwhile to complicate writing
3373 of the target hook by such cases. */
3375 if (!memory_address_p (mode
, x
))
3378 return targetm
.address_cost (x
);
3381 /* If the target doesn't override, compute the cost as with arithmetic. */
3384 default_address_cost (rtx x
)
3386 return rtx_cost (x
, MEM
);
3390 unsigned HOST_WIDE_INT
3391 nonzero_bits (rtx x
, enum machine_mode mode
)
3393 return cached_nonzero_bits (x
, mode
, NULL_RTX
, VOIDmode
, 0);
3397 num_sign_bit_copies (rtx x
, enum machine_mode mode
)
3399 return cached_num_sign_bit_copies (x
, mode
, NULL_RTX
, VOIDmode
, 0);
3402 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3403 It avoids exponential behavior in nonzero_bits1 when X has
3404 identical subexpressions on the first or the second level. */
3406 static unsigned HOST_WIDE_INT
3407 cached_nonzero_bits (rtx x
, enum machine_mode mode
, rtx known_x
,
3408 enum machine_mode known_mode
,
3409 unsigned HOST_WIDE_INT known_ret
)
3411 if (x
== known_x
&& mode
== known_mode
)
3414 /* Try to find identical subexpressions. If found call
3415 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3416 precomputed value for the subexpression as KNOWN_RET. */
3418 if (ARITHMETIC_P (x
))
3420 rtx x0
= XEXP (x
, 0);
3421 rtx x1
= XEXP (x
, 1);
3423 /* Check the first level. */
3425 return nonzero_bits1 (x
, mode
, x0
, mode
,
3426 cached_nonzero_bits (x0
, mode
, known_x
,
3427 known_mode
, known_ret
));
3429 /* Check the second level. */
3430 if (ARITHMETIC_P (x0
)
3431 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
3432 return nonzero_bits1 (x
, mode
, x1
, mode
,
3433 cached_nonzero_bits (x1
, mode
, known_x
,
3434 known_mode
, known_ret
));
3436 if (ARITHMETIC_P (x1
)
3437 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
3438 return nonzero_bits1 (x
, mode
, x0
, mode
,
3439 cached_nonzero_bits (x0
, mode
, known_x
,
3440 known_mode
, known_ret
));
3443 return nonzero_bits1 (x
, mode
, known_x
, known_mode
, known_ret
);
3446 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3447 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3448 is less useful. We can't allow both, because that results in exponential
3449 run time recursion. There is a nullstone testcase that triggered
3450 this. This macro avoids accidental uses of num_sign_bit_copies. */
3451 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3453 /* Given an expression, X, compute which bits in X can be nonzero.
3454 We don't care about bits outside of those defined in MODE.
3456 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3457 an arithmetic operation, we can do better. */
3459 static unsigned HOST_WIDE_INT
3460 nonzero_bits1 (rtx x
, enum machine_mode mode
, rtx known_x
,
3461 enum machine_mode known_mode
,
3462 unsigned HOST_WIDE_INT known_ret
)
3464 unsigned HOST_WIDE_INT nonzero
= GET_MODE_MASK (mode
);
3465 unsigned HOST_WIDE_INT inner_nz
;
3467 unsigned int mode_width
= GET_MODE_BITSIZE (mode
);
3469 /* For floating-point values, assume all bits are needed. */
3470 if (FLOAT_MODE_P (GET_MODE (x
)) || FLOAT_MODE_P (mode
))
3473 /* If X is wider than MODE, use its mode instead. */
3474 if (GET_MODE_BITSIZE (GET_MODE (x
)) > mode_width
)
3476 mode
= GET_MODE (x
);
3477 nonzero
= GET_MODE_MASK (mode
);
3478 mode_width
= GET_MODE_BITSIZE (mode
);
3481 if (mode_width
> HOST_BITS_PER_WIDE_INT
)
3482 /* Our only callers in this case look for single bit values. So
3483 just return the mode mask. Those tests will then be false. */
3486 #ifndef WORD_REGISTER_OPERATIONS
3487 /* If MODE is wider than X, but both are a single word for both the host
3488 and target machines, we can compute this from which bits of the
3489 object might be nonzero in its own mode, taking into account the fact
3490 that on many CISC machines, accessing an object in a wider mode
3491 causes the high-order bits to become undefined. So they are
3492 not known to be zero. */
3494 if (GET_MODE (x
) != VOIDmode
&& GET_MODE (x
) != mode
3495 && GET_MODE_BITSIZE (GET_MODE (x
)) <= BITS_PER_WORD
3496 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
3497 && GET_MODE_BITSIZE (mode
) > GET_MODE_BITSIZE (GET_MODE (x
)))
3499 nonzero
&= cached_nonzero_bits (x
, GET_MODE (x
),
3500 known_x
, known_mode
, known_ret
);
3501 nonzero
|= GET_MODE_MASK (mode
) & ~GET_MODE_MASK (GET_MODE (x
));
3506 code
= GET_CODE (x
);
3510 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3511 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3512 all the bits above ptr_mode are known to be zero. */
3513 if (POINTERS_EXTEND_UNSIGNED
&& GET_MODE (x
) == Pmode
3515 nonzero
&= GET_MODE_MASK (ptr_mode
);
3518 /* Include declared information about alignment of pointers. */
3519 /* ??? We don't properly preserve REG_POINTER changes across
3520 pointer-to-integer casts, so we can't trust it except for
3521 things that we know must be pointers. See execute/960116-1.c. */
3522 if ((x
== stack_pointer_rtx
3523 || x
== frame_pointer_rtx
3524 || x
== arg_pointer_rtx
)
3525 && REGNO_POINTER_ALIGN (REGNO (x
)))
3527 unsigned HOST_WIDE_INT alignment
3528 = REGNO_POINTER_ALIGN (REGNO (x
)) / BITS_PER_UNIT
;
3530 #ifdef PUSH_ROUNDING
3531 /* If PUSH_ROUNDING is defined, it is possible for the
3532 stack to be momentarily aligned only to that amount,
3533 so we pick the least alignment. */
3534 if (x
== stack_pointer_rtx
&& PUSH_ARGS
)
3535 alignment
= MIN ((unsigned HOST_WIDE_INT
) PUSH_ROUNDING (1),
3539 nonzero
&= ~(alignment
- 1);
3543 unsigned HOST_WIDE_INT nonzero_for_hook
= nonzero
;
3544 rtx
new = rtl_hooks
.reg_nonzero_bits (x
, mode
, known_x
,
3545 known_mode
, known_ret
,
3549 nonzero_for_hook
&= cached_nonzero_bits (new, mode
, known_x
,
3550 known_mode
, known_ret
);
3552 return nonzero_for_hook
;
3556 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3557 /* If X is negative in MODE, sign-extend the value. */
3558 if (INTVAL (x
) > 0 && mode_width
< BITS_PER_WORD
3559 && 0 != (INTVAL (x
) & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))))
3560 return (INTVAL (x
) | ((HOST_WIDE_INT
) (-1) << mode_width
));
3566 #ifdef LOAD_EXTEND_OP
3567 /* In many, if not most, RISC machines, reading a byte from memory
3568 zeros the rest of the register. Noticing that fact saves a lot
3569 of extra zero-extends. */
3570 if (LOAD_EXTEND_OP (GET_MODE (x
)) == ZERO_EXTEND
)
3571 nonzero
&= GET_MODE_MASK (GET_MODE (x
));
3576 case UNEQ
: case LTGT
:
3577 case GT
: case GTU
: case UNGT
:
3578 case LT
: case LTU
: case UNLT
:
3579 case GE
: case GEU
: case UNGE
:
3580 case LE
: case LEU
: case UNLE
:
3581 case UNORDERED
: case ORDERED
:
3583 /* If this produces an integer result, we know which bits are set.
3584 Code here used to clear bits outside the mode of X, but that is
3587 if (GET_MODE_CLASS (mode
) == MODE_INT
3588 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
3589 nonzero
= STORE_FLAG_VALUE
;
3594 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3595 and num_sign_bit_copies. */
3596 if (num_sign_bit_copies (XEXP (x
, 0), GET_MODE (x
))
3597 == GET_MODE_BITSIZE (GET_MODE (x
)))
3601 if (GET_MODE_SIZE (GET_MODE (x
)) < mode_width
)
3602 nonzero
|= (GET_MODE_MASK (mode
) & ~GET_MODE_MASK (GET_MODE (x
)));
3607 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3608 and num_sign_bit_copies. */
3609 if (num_sign_bit_copies (XEXP (x
, 0), GET_MODE (x
))
3610 == GET_MODE_BITSIZE (GET_MODE (x
)))
3616 nonzero
&= (cached_nonzero_bits (XEXP (x
, 0), mode
,
3617 known_x
, known_mode
, known_ret
)
3618 & GET_MODE_MASK (mode
));
3622 nonzero
&= cached_nonzero_bits (XEXP (x
, 0), mode
,
3623 known_x
, known_mode
, known_ret
);
3624 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
3625 nonzero
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
3629 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3630 Otherwise, show all the bits in the outer mode but not the inner
3632 inner_nz
= cached_nonzero_bits (XEXP (x
, 0), mode
,
3633 known_x
, known_mode
, known_ret
);
3634 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
3636 inner_nz
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
3638 & (((HOST_WIDE_INT
) 1
3639 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - 1))))
3640 inner_nz
|= (GET_MODE_MASK (mode
)
3641 & ~GET_MODE_MASK (GET_MODE (XEXP (x
, 0))));
3644 nonzero
&= inner_nz
;
3648 nonzero
&= cached_nonzero_bits (XEXP (x
, 0), mode
,
3649 known_x
, known_mode
, known_ret
)
3650 & cached_nonzero_bits (XEXP (x
, 1), mode
,
3651 known_x
, known_mode
, known_ret
);
3655 case UMIN
: case UMAX
: case SMIN
: case SMAX
:
3657 unsigned HOST_WIDE_INT nonzero0
=
3658 cached_nonzero_bits (XEXP (x
, 0), mode
,
3659 known_x
, known_mode
, known_ret
);
3661 /* Don't call nonzero_bits for the second time if it cannot change
3663 if ((nonzero
& nonzero0
) != nonzero
)
3665 | cached_nonzero_bits (XEXP (x
, 1), mode
,
3666 known_x
, known_mode
, known_ret
);
3670 case PLUS
: case MINUS
:
3672 case DIV
: case UDIV
:
3673 case MOD
: case UMOD
:
3674 /* We can apply the rules of arithmetic to compute the number of
3675 high- and low-order zero bits of these operations. We start by
3676 computing the width (position of the highest-order nonzero bit)
3677 and the number of low-order zero bits for each value. */
3679 unsigned HOST_WIDE_INT nz0
=
3680 cached_nonzero_bits (XEXP (x
, 0), mode
,
3681 known_x
, known_mode
, known_ret
);
3682 unsigned HOST_WIDE_INT nz1
=
3683 cached_nonzero_bits (XEXP (x
, 1), mode
,
3684 known_x
, known_mode
, known_ret
);
3685 int sign_index
= GET_MODE_BITSIZE (GET_MODE (x
)) - 1;
3686 int width0
= floor_log2 (nz0
) + 1;
3687 int width1
= floor_log2 (nz1
) + 1;
3688 int low0
= floor_log2 (nz0
& -nz0
);
3689 int low1
= floor_log2 (nz1
& -nz1
);
3690 HOST_WIDE_INT op0_maybe_minusp
3691 = (nz0
& ((HOST_WIDE_INT
) 1 << sign_index
));
3692 HOST_WIDE_INT op1_maybe_minusp
3693 = (nz1
& ((HOST_WIDE_INT
) 1 << sign_index
));
3694 unsigned int result_width
= mode_width
;
3700 result_width
= MAX (width0
, width1
) + 1;
3701 result_low
= MIN (low0
, low1
);
3704 result_low
= MIN (low0
, low1
);
3707 result_width
= width0
+ width1
;
3708 result_low
= low0
+ low1
;
3713 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
3714 result_width
= width0
;
3719 result_width
= width0
;
3724 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
3725 result_width
= MIN (width0
, width1
);
3726 result_low
= MIN (low0
, low1
);
3731 result_width
= MIN (width0
, width1
);
3732 result_low
= MIN (low0
, low1
);
3738 if (result_width
< mode_width
)
3739 nonzero
&= ((HOST_WIDE_INT
) 1 << result_width
) - 1;
3742 nonzero
&= ~(((HOST_WIDE_INT
) 1 << result_low
) - 1);
3744 #ifdef POINTERS_EXTEND_UNSIGNED
3745 /* If pointers extend unsigned and this is an addition or subtraction
3746 to a pointer in Pmode, all the bits above ptr_mode are known to be
3748 if (POINTERS_EXTEND_UNSIGNED
> 0 && GET_MODE (x
) == Pmode
3749 && (code
== PLUS
|| code
== MINUS
)
3750 && REG_P (XEXP (x
, 0)) && REG_POINTER (XEXP (x
, 0)))
3751 nonzero
&= GET_MODE_MASK (ptr_mode
);
3757 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
3758 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
3759 nonzero
&= ((HOST_WIDE_INT
) 1 << INTVAL (XEXP (x
, 1))) - 1;
3763 /* If this is a SUBREG formed for a promoted variable that has
3764 been zero-extended, we know that at least the high-order bits
3765 are zero, though others might be too. */
3767 if (SUBREG_PROMOTED_VAR_P (x
) && SUBREG_PROMOTED_UNSIGNED_P (x
) > 0)
3768 nonzero
= GET_MODE_MASK (GET_MODE (x
))
3769 & cached_nonzero_bits (SUBREG_REG (x
), GET_MODE (x
),
3770 known_x
, known_mode
, known_ret
);
3772 /* If the inner mode is a single word for both the host and target
3773 machines, we can compute this from which bits of the inner
3774 object might be nonzero. */
3775 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) <= BITS_PER_WORD
3776 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))
3777 <= HOST_BITS_PER_WIDE_INT
))
3779 nonzero
&= cached_nonzero_bits (SUBREG_REG (x
), mode
,
3780 known_x
, known_mode
, known_ret
);
3782 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3783 /* If this is a typical RISC machine, we only have to worry
3784 about the way loads are extended. */
3785 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x
))) == SIGN_EXTEND
3787 & (((unsigned HOST_WIDE_INT
) 1
3788 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) - 1))))
3790 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x
))) != ZERO_EXTEND
)
3791 || !MEM_P (SUBREG_REG (x
)))
3794 /* On many CISC machines, accessing an object in a wider mode
3795 causes the high-order bits to become undefined. So they are
3796 not known to be zero. */
3797 if (GET_MODE_SIZE (GET_MODE (x
))
3798 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3799 nonzero
|= (GET_MODE_MASK (GET_MODE (x
))
3800 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x
))));
3809 /* The nonzero bits are in two classes: any bits within MODE
3810 that aren't in GET_MODE (x) are always significant. The rest of the
3811 nonzero bits are those that are significant in the operand of
3812 the shift when shifted the appropriate number of bits. This
3813 shows that high-order bits are cleared by the right shift and
3814 low-order bits by left shifts. */
3815 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
3816 && INTVAL (XEXP (x
, 1)) >= 0
3817 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
3819 enum machine_mode inner_mode
= GET_MODE (x
);
3820 unsigned int width
= GET_MODE_BITSIZE (inner_mode
);
3821 int count
= INTVAL (XEXP (x
, 1));
3822 unsigned HOST_WIDE_INT mode_mask
= GET_MODE_MASK (inner_mode
);
3823 unsigned HOST_WIDE_INT op_nonzero
=
3824 cached_nonzero_bits (XEXP (x
, 0), mode
,
3825 known_x
, known_mode
, known_ret
);
3826 unsigned HOST_WIDE_INT inner
= op_nonzero
& mode_mask
;
3827 unsigned HOST_WIDE_INT outer
= 0;
3829 if (mode_width
> width
)
3830 outer
= (op_nonzero
& nonzero
& ~mode_mask
);
3832 if (code
== LSHIFTRT
)
3834 else if (code
== ASHIFTRT
)
3838 /* If the sign bit may have been nonzero before the shift, we
3839 need to mark all the places it could have been copied to
3840 by the shift as possibly nonzero. */
3841 if (inner
& ((HOST_WIDE_INT
) 1 << (width
- 1 - count
)))
3842 inner
|= (((HOST_WIDE_INT
) 1 << count
) - 1) << (width
- count
);
3844 else if (code
== ASHIFT
)
3847 inner
= ((inner
<< (count
% width
)
3848 | (inner
>> (width
- (count
% width
)))) & mode_mask
);
3850 nonzero
&= (outer
| inner
);
3856 /* This is at most the number of bits in the mode. */
3857 nonzero
= ((HOST_WIDE_INT
) 2 << (floor_log2 (mode_width
))) - 1;
3861 /* If CLZ has a known value at zero, then the nonzero bits are
3862 that value, plus the number of bits in the mode minus one. */
3863 if (CLZ_DEFINED_VALUE_AT_ZERO (mode
, nonzero
))
3864 nonzero
|= ((HOST_WIDE_INT
) 1 << (floor_log2 (mode_width
))) - 1;
3870 /* If CTZ has a known value at zero, then the nonzero bits are
3871 that value, plus the number of bits in the mode minus one. */
3872 if (CTZ_DEFINED_VALUE_AT_ZERO (mode
, nonzero
))
3873 nonzero
|= ((HOST_WIDE_INT
) 1 << (floor_log2 (mode_width
))) - 1;
3884 unsigned HOST_WIDE_INT nonzero_true
=
3885 cached_nonzero_bits (XEXP (x
, 1), mode
,
3886 known_x
, known_mode
, known_ret
);
3888 /* Don't call nonzero_bits for the second time if it cannot change
3890 if ((nonzero
& nonzero_true
) != nonzero
)
3891 nonzero
&= nonzero_true
3892 | cached_nonzero_bits (XEXP (x
, 2), mode
,
3893 known_x
, known_mode
, known_ret
);
3904 /* See the macro definition above. */
3905 #undef cached_num_sign_bit_copies
3908 /* The function cached_num_sign_bit_copies is a wrapper around
3909 num_sign_bit_copies1. It avoids exponential behavior in
3910 num_sign_bit_copies1 when X has identical subexpressions on the
3911 first or the second level. */
3914 cached_num_sign_bit_copies (rtx x
, enum machine_mode mode
, rtx known_x
,
3915 enum machine_mode known_mode
,
3916 unsigned int known_ret
)
3918 if (x
== known_x
&& mode
== known_mode
)
3921 /* Try to find identical subexpressions. If found call
3922 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3923 the precomputed value for the subexpression as KNOWN_RET. */
3925 if (ARITHMETIC_P (x
))
3927 rtx x0
= XEXP (x
, 0);
3928 rtx x1
= XEXP (x
, 1);
3930 /* Check the first level. */
3933 num_sign_bit_copies1 (x
, mode
, x0
, mode
,
3934 cached_num_sign_bit_copies (x0
, mode
, known_x
,
3938 /* Check the second level. */
3939 if (ARITHMETIC_P (x0
)
3940 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
3942 num_sign_bit_copies1 (x
, mode
, x1
, mode
,
3943 cached_num_sign_bit_copies (x1
, mode
, known_x
,
3947 if (ARITHMETIC_P (x1
)
3948 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
3950 num_sign_bit_copies1 (x
, mode
, x0
, mode
,
3951 cached_num_sign_bit_copies (x0
, mode
, known_x
,
3956 return num_sign_bit_copies1 (x
, mode
, known_x
, known_mode
, known_ret
);
3959 /* Return the number of bits at the high-order end of X that are known to
3960 be equal to the sign bit. X will be used in mode MODE; if MODE is
3961 VOIDmode, X will be used in its own mode. The returned value will always
3962 be between 1 and the number of bits in MODE. */
3965 num_sign_bit_copies1 (rtx x
, enum machine_mode mode
, rtx known_x
,
3966 enum machine_mode known_mode
,
3967 unsigned int known_ret
)
3969 enum rtx_code code
= GET_CODE (x
);
3970 unsigned int bitwidth
= GET_MODE_BITSIZE (mode
);
3971 int num0
, num1
, result
;
3972 unsigned HOST_WIDE_INT nonzero
;
3974 /* If we weren't given a mode, use the mode of X. If the mode is still
3975 VOIDmode, we don't know anything. Likewise if one of the modes is
3978 if (mode
== VOIDmode
)
3979 mode
= GET_MODE (x
);
3981 if (mode
== VOIDmode
|| FLOAT_MODE_P (mode
) || FLOAT_MODE_P (GET_MODE (x
)))
3984 /* For a smaller object, just ignore the high bits. */
3985 if (bitwidth
< GET_MODE_BITSIZE (GET_MODE (x
)))
3987 num0
= cached_num_sign_bit_copies (x
, GET_MODE (x
),
3988 known_x
, known_mode
, known_ret
);
3990 num0
- (int) (GET_MODE_BITSIZE (GET_MODE (x
)) - bitwidth
));
3993 if (GET_MODE (x
) != VOIDmode
&& bitwidth
> GET_MODE_BITSIZE (GET_MODE (x
)))
3995 #ifndef WORD_REGISTER_OPERATIONS
3996 /* If this machine does not do all register operations on the entire
3997 register and MODE is wider than the mode of X, we can say nothing
3998 at all about the high-order bits. */
4001 /* Likewise on machines that do, if the mode of the object is smaller
4002 than a word and loads of that size don't sign extend, we can say
4003 nothing about the high order bits. */
4004 if (GET_MODE_BITSIZE (GET_MODE (x
)) < BITS_PER_WORD
4005 #ifdef LOAD_EXTEND_OP
4006 && LOAD_EXTEND_OP (GET_MODE (x
)) != SIGN_EXTEND
4017 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4018 /* If pointers extend signed and this is a pointer in Pmode, say that
4019 all the bits above ptr_mode are known to be sign bit copies. */
4020 if (! POINTERS_EXTEND_UNSIGNED
&& GET_MODE (x
) == Pmode
&& mode
== Pmode
4022 return GET_MODE_BITSIZE (Pmode
) - GET_MODE_BITSIZE (ptr_mode
) + 1;
4026 unsigned int copies_for_hook
= 1, copies
= 1;
4027 rtx
new = rtl_hooks
.reg_num_sign_bit_copies (x
, mode
, known_x
,
4028 known_mode
, known_ret
,
4032 copies
= cached_num_sign_bit_copies (new, mode
, known_x
,
4033 known_mode
, known_ret
);
4035 if (copies
> 1 || copies_for_hook
> 1)
4036 return MAX (copies
, copies_for_hook
);
4038 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4043 #ifdef LOAD_EXTEND_OP
4044 /* Some RISC machines sign-extend all loads of smaller than a word. */
4045 if (LOAD_EXTEND_OP (GET_MODE (x
)) == SIGN_EXTEND
)
4046 return MAX (1, ((int) bitwidth
4047 - (int) GET_MODE_BITSIZE (GET_MODE (x
)) + 1));
4052 /* If the constant is negative, take its 1's complement and remask.
4053 Then see how many zero bits we have. */
4054 nonzero
= INTVAL (x
) & GET_MODE_MASK (mode
);
4055 if (bitwidth
<= HOST_BITS_PER_WIDE_INT
4056 && (nonzero
& ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4057 nonzero
= (~nonzero
) & GET_MODE_MASK (mode
);
4059 return (nonzero
== 0 ? bitwidth
: bitwidth
- floor_log2 (nonzero
) - 1);
4062 /* If this is a SUBREG for a promoted object that is sign-extended
4063 and we are looking at it in a wider mode, we know that at least the
4064 high-order bits are known to be sign bit copies. */
4066 if (SUBREG_PROMOTED_VAR_P (x
) && ! SUBREG_PROMOTED_UNSIGNED_P (x
))
4068 num0
= cached_num_sign_bit_copies (SUBREG_REG (x
), mode
,
4069 known_x
, known_mode
, known_ret
);
4070 return MAX ((int) bitwidth
4071 - (int) GET_MODE_BITSIZE (GET_MODE (x
)) + 1,
4075 /* For a smaller object, just ignore the high bits. */
4076 if (bitwidth
<= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))))
4078 num0
= cached_num_sign_bit_copies (SUBREG_REG (x
), VOIDmode
,
4079 known_x
, known_mode
, known_ret
);
4080 return MAX (1, (num0
4081 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))
4085 #ifdef WORD_REGISTER_OPERATIONS
4086 #ifdef LOAD_EXTEND_OP
4087 /* For paradoxical SUBREGs on machines where all register operations
4088 affect the entire register, just look inside. Note that we are
4089 passing MODE to the recursive call, so the number of sign bit copies
4090 will remain relative to that mode, not the inner mode. */
4092 /* This works only if loads sign extend. Otherwise, if we get a
4093 reload for the inner part, it may be loaded from the stack, and
4094 then we lose all sign bit copies that existed before the store
4097 if ((GET_MODE_SIZE (GET_MODE (x
))
4098 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
4099 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x
))) == SIGN_EXTEND
4100 && MEM_P (SUBREG_REG (x
)))
4101 return cached_num_sign_bit_copies (SUBREG_REG (x
), mode
,
4102 known_x
, known_mode
, known_ret
);
4108 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
4109 return MAX (1, (int) bitwidth
- INTVAL (XEXP (x
, 1)));
4113 return (bitwidth
- GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
4114 + cached_num_sign_bit_copies (XEXP (x
, 0), VOIDmode
,
4115 known_x
, known_mode
, known_ret
));
4118 /* For a smaller object, just ignore the high bits. */
4119 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), VOIDmode
,
4120 known_x
, known_mode
, known_ret
);
4121 return MAX (1, (num0
- (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
4125 return cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4126 known_x
, known_mode
, known_ret
);
4128 case ROTATE
: case ROTATERT
:
4129 /* If we are rotating left by a number of bits less than the number
4130 of sign bit copies, we can just subtract that amount from the
4132 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4133 && INTVAL (XEXP (x
, 1)) >= 0
4134 && INTVAL (XEXP (x
, 1)) < (int) bitwidth
)
4136 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4137 known_x
, known_mode
, known_ret
);
4138 return MAX (1, num0
- (code
== ROTATE
? INTVAL (XEXP (x
, 1))
4139 : (int) bitwidth
- INTVAL (XEXP (x
, 1))));
4144 /* In general, this subtracts one sign bit copy. But if the value
4145 is known to be positive, the number of sign bit copies is the
4146 same as that of the input. Finally, if the input has just one bit
4147 that might be nonzero, all the bits are copies of the sign bit. */
4148 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4149 known_x
, known_mode
, known_ret
);
4150 if (bitwidth
> HOST_BITS_PER_WIDE_INT
)
4151 return num0
> 1 ? num0
- 1 : 1;
4153 nonzero
= nonzero_bits (XEXP (x
, 0), mode
);
4158 && (((HOST_WIDE_INT
) 1 << (bitwidth
- 1)) & nonzero
))
4163 case IOR
: case AND
: case XOR
:
4164 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
4165 /* Logical operations will preserve the number of sign-bit copies.
4166 MIN and MAX operations always return one of the operands. */
4167 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4168 known_x
, known_mode
, known_ret
);
4169 num1
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4170 known_x
, known_mode
, known_ret
);
4171 return MIN (num0
, num1
);
4173 case PLUS
: case MINUS
:
4174 /* For addition and subtraction, we can have a 1-bit carry. However,
4175 if we are subtracting 1 from a positive number, there will not
4176 be such a carry. Furthermore, if the positive number is known to
4177 be 0 or 1, we know the result is either -1 or 0. */
4179 if (code
== PLUS
&& XEXP (x
, 1) == constm1_rtx
4180 && bitwidth
<= HOST_BITS_PER_WIDE_INT
)
4182 nonzero
= nonzero_bits (XEXP (x
, 0), mode
);
4183 if ((((HOST_WIDE_INT
) 1 << (bitwidth
- 1)) & nonzero
) == 0)
4184 return (nonzero
== 1 || nonzero
== 0 ? bitwidth
4185 : bitwidth
- floor_log2 (nonzero
) - 1);
4188 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4189 known_x
, known_mode
, known_ret
);
4190 num1
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4191 known_x
, known_mode
, known_ret
);
4192 result
= MAX (1, MIN (num0
, num1
) - 1);
4194 #ifdef POINTERS_EXTEND_UNSIGNED
4195 /* If pointers extend signed and this is an addition or subtraction
4196 to a pointer in Pmode, all the bits above ptr_mode are known to be
4198 if (! POINTERS_EXTEND_UNSIGNED
&& GET_MODE (x
) == Pmode
4199 && (code
== PLUS
|| code
== MINUS
)
4200 && REG_P (XEXP (x
, 0)) && REG_POINTER (XEXP (x
, 0)))
4201 result
= MAX ((int) (GET_MODE_BITSIZE (Pmode
)
4202 - GET_MODE_BITSIZE (ptr_mode
) + 1),
4208 /* The number of bits of the product is the sum of the number of
4209 bits of both terms. However, unless one of the terms if known
4210 to be positive, we must allow for an additional bit since negating
4211 a negative number can remove one sign bit copy. */
4213 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4214 known_x
, known_mode
, known_ret
);
4215 num1
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4216 known_x
, known_mode
, known_ret
);
4218 result
= bitwidth
- (bitwidth
- num0
) - (bitwidth
- num1
);
4220 && (bitwidth
> HOST_BITS_PER_WIDE_INT
4221 || (((nonzero_bits (XEXP (x
, 0), mode
)
4222 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4223 && ((nonzero_bits (XEXP (x
, 1), mode
)
4224 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0))))
4227 return MAX (1, result
);
4230 /* The result must be <= the first operand. If the first operand
4231 has the high bit set, we know nothing about the number of sign
4233 if (bitwidth
> HOST_BITS_PER_WIDE_INT
)
4235 else if ((nonzero_bits (XEXP (x
, 0), mode
)
4236 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4239 return cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4240 known_x
, known_mode
, known_ret
);
4243 /* The result must be <= the second operand. */
4244 return cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4245 known_x
, known_mode
, known_ret
);
4248 /* Similar to unsigned division, except that we have to worry about
4249 the case where the divisor is negative, in which case we have
4251 result
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4252 known_x
, known_mode
, known_ret
);
4254 && (bitwidth
> HOST_BITS_PER_WIDE_INT
4255 || (nonzero_bits (XEXP (x
, 1), mode
)
4256 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0))
4262 result
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4263 known_x
, known_mode
, known_ret
);
4265 && (bitwidth
> HOST_BITS_PER_WIDE_INT
4266 || (nonzero_bits (XEXP (x
, 1), mode
)
4267 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0))
4273 /* Shifts by a constant add to the number of bits equal to the
4275 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4276 known_x
, known_mode
, known_ret
);
4277 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4278 && INTVAL (XEXP (x
, 1)) > 0)
4279 num0
= MIN ((int) bitwidth
, num0
+ INTVAL (XEXP (x
, 1)));
4284 /* Left shifts destroy copies. */
4285 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
4286 || INTVAL (XEXP (x
, 1)) < 0
4287 || INTVAL (XEXP (x
, 1)) >= (int) bitwidth
)
4290 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4291 known_x
, known_mode
, known_ret
);
4292 return MAX (1, num0
- INTVAL (XEXP (x
, 1)));
4295 num0
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4296 known_x
, known_mode
, known_ret
);
4297 num1
= cached_num_sign_bit_copies (XEXP (x
, 2), mode
,
4298 known_x
, known_mode
, known_ret
);
4299 return MIN (num0
, num1
);
4301 case EQ
: case NE
: case GE
: case GT
: case LE
: case LT
:
4302 case UNEQ
: case LTGT
: case UNGE
: case UNGT
: case UNLE
: case UNLT
:
4303 case GEU
: case GTU
: case LEU
: case LTU
:
4304 case UNORDERED
: case ORDERED
:
4305 /* If the constant is negative, take its 1's complement and remask.
4306 Then see how many zero bits we have. */
4307 nonzero
= STORE_FLAG_VALUE
;
4308 if (bitwidth
<= HOST_BITS_PER_WIDE_INT
4309 && (nonzero
& ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4310 nonzero
= (~nonzero
) & GET_MODE_MASK (mode
);
4312 return (nonzero
== 0 ? bitwidth
: bitwidth
- floor_log2 (nonzero
) - 1);
4318 /* If we haven't been able to figure it out by one of the above rules,
4319 see if some of the high-order bits are known to be zero. If so,
4320 count those bits and return one less than that amount. If we can't
4321 safely compute the mask for this mode, always return BITWIDTH. */
4323 bitwidth
= GET_MODE_BITSIZE (mode
);
4324 if (bitwidth
> HOST_BITS_PER_WIDE_INT
)
4327 nonzero
= nonzero_bits (x
, mode
);
4328 return nonzero
& ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))
4329 ? 1 : bitwidth
- floor_log2 (nonzero
) - 1;
4332 /* Calculate the rtx_cost of a single instruction. A return value of
4333 zero indicates an instruction pattern without a known cost. */
4336 insn_rtx_cost (rtx pat
)
4341 /* Extract the single set rtx from the instruction pattern.
4342 We can't use single_set since we only have the pattern. */
4343 if (GET_CODE (pat
) == SET
)
4345 else if (GET_CODE (pat
) == PARALLEL
)
4348 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
4350 rtx x
= XVECEXP (pat
, 0, i
);
4351 if (GET_CODE (x
) == SET
)
4364 cost
= rtx_cost (SET_SRC (set
), SET
);
4365 return cost
> 0 ? cost
: COSTS_N_INSNS (1);
4368 /* Given an insn INSN and condition COND, return the condition in a
4369 canonical form to simplify testing by callers. Specifically:
4371 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4372 (2) Both operands will be machine operands; (cc0) will have been replaced.
4373 (3) If an operand is a constant, it will be the second operand.
4374 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4375 for GE, GEU, and LEU.
4377 If the condition cannot be understood, or is an inequality floating-point
4378 comparison which needs to be reversed, 0 will be returned.
4380 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4382 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4383 insn used in locating the condition was found. If a replacement test
4384 of the condition is desired, it should be placed in front of that
4385 insn and we will be sure that the inputs are still valid.
4387 If WANT_REG is nonzero, we wish the condition to be relative to that
4388 register, if possible. Therefore, do not canonicalize the condition
4389 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4390 to be a compare to a CC mode register.
4392 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4396 canonicalize_condition (rtx insn
, rtx cond
, int reverse
, rtx
*earliest
,
4397 rtx want_reg
, int allow_cc_mode
, int valid_at_insn_p
)
4404 int reverse_code
= 0;
4405 enum machine_mode mode
;
4407 code
= GET_CODE (cond
);
4408 mode
= GET_MODE (cond
);
4409 op0
= XEXP (cond
, 0);
4410 op1
= XEXP (cond
, 1);
4413 code
= reversed_comparison_code (cond
, insn
);
4414 if (code
== UNKNOWN
)
4420 /* If we are comparing a register with zero, see if the register is set
4421 in the previous insn to a COMPARE or a comparison operation. Perform
4422 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4425 while ((GET_RTX_CLASS (code
) == RTX_COMPARE
4426 || GET_RTX_CLASS (code
) == RTX_COMM_COMPARE
)
4427 && op1
== CONST0_RTX (GET_MODE (op0
))
4430 /* Set nonzero when we find something of interest. */
4434 /* If comparison with cc0, import actual comparison from compare
4438 if ((prev
= prev_nonnote_insn (prev
)) == 0
4439 || !NONJUMP_INSN_P (prev
)
4440 || (set
= single_set (prev
)) == 0
4441 || SET_DEST (set
) != cc0_rtx
)
4444 op0
= SET_SRC (set
);
4445 op1
= CONST0_RTX (GET_MODE (op0
));
4451 /* If this is a COMPARE, pick up the two things being compared. */
4452 if (GET_CODE (op0
) == COMPARE
)
4454 op1
= XEXP (op0
, 1);
4455 op0
= XEXP (op0
, 0);
4458 else if (!REG_P (op0
))
4461 /* Go back to the previous insn. Stop if it is not an INSN. We also
4462 stop if it isn't a single set or if it has a REG_INC note because
4463 we don't want to bother dealing with it. */
4465 if ((prev
= prev_nonnote_insn (prev
)) == 0
4466 || !NONJUMP_INSN_P (prev
)
4467 || FIND_REG_INC_NOTE (prev
, NULL_RTX
))
4470 set
= set_of (op0
, prev
);
4473 && (GET_CODE (set
) != SET
4474 || !rtx_equal_p (SET_DEST (set
), op0
)))
4477 /* If this is setting OP0, get what it sets it to if it looks
4481 enum machine_mode inner_mode
= GET_MODE (SET_DEST (set
));
4482 #ifdef FLOAT_STORE_FLAG_VALUE
4483 REAL_VALUE_TYPE fsfv
;
4486 /* ??? We may not combine comparisons done in a CCmode with
4487 comparisons not done in a CCmode. This is to aid targets
4488 like Alpha that have an IEEE compliant EQ instruction, and
4489 a non-IEEE compliant BEQ instruction. The use of CCmode is
4490 actually artificial, simply to prevent the combination, but
4491 should not affect other platforms.
4493 However, we must allow VOIDmode comparisons to match either
4494 CCmode or non-CCmode comparison, because some ports have
4495 modeless comparisons inside branch patterns.
4497 ??? This mode check should perhaps look more like the mode check
4498 in simplify_comparison in combine. */
4500 if ((GET_CODE (SET_SRC (set
)) == COMPARE
4503 && GET_MODE_CLASS (inner_mode
) == MODE_INT
4504 && (GET_MODE_BITSIZE (inner_mode
)
4505 <= HOST_BITS_PER_WIDE_INT
)
4506 && (STORE_FLAG_VALUE
4507 & ((HOST_WIDE_INT
) 1
4508 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
4509 #ifdef FLOAT_STORE_FLAG_VALUE
4511 && GET_MODE_CLASS (inner_mode
) == MODE_FLOAT
4512 && (fsfv
= FLOAT_STORE_FLAG_VALUE (inner_mode
),
4513 REAL_VALUE_NEGATIVE (fsfv
)))
4516 && COMPARISON_P (SET_SRC (set
))))
4517 && (((GET_MODE_CLASS (mode
) == MODE_CC
)
4518 == (GET_MODE_CLASS (inner_mode
) == MODE_CC
))
4519 || mode
== VOIDmode
|| inner_mode
== VOIDmode
))
4521 else if (((code
== EQ
4523 && (GET_MODE_BITSIZE (inner_mode
)
4524 <= HOST_BITS_PER_WIDE_INT
)
4525 && GET_MODE_CLASS (inner_mode
) == MODE_INT
4526 && (STORE_FLAG_VALUE
4527 & ((HOST_WIDE_INT
) 1
4528 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
4529 #ifdef FLOAT_STORE_FLAG_VALUE
4531 && GET_MODE_CLASS (inner_mode
) == MODE_FLOAT
4532 && (fsfv
= FLOAT_STORE_FLAG_VALUE (inner_mode
),
4533 REAL_VALUE_NEGATIVE (fsfv
)))
4536 && COMPARISON_P (SET_SRC (set
))
4537 && (((GET_MODE_CLASS (mode
) == MODE_CC
)
4538 == (GET_MODE_CLASS (inner_mode
) == MODE_CC
))
4539 || mode
== VOIDmode
|| inner_mode
== VOIDmode
))
4549 else if (reg_set_p (op0
, prev
))
4550 /* If this sets OP0, but not directly, we have to give up. */
4555 /* If the caller is expecting the condition to be valid at INSN,
4556 make sure X doesn't change before INSN. */
4557 if (valid_at_insn_p
)
4558 if (modified_in_p (x
, prev
) || modified_between_p (x
, prev
, insn
))
4560 if (COMPARISON_P (x
))
4561 code
= GET_CODE (x
);
4564 code
= reversed_comparison_code (x
, prev
);
4565 if (code
== UNKNOWN
)
4570 op0
= XEXP (x
, 0), op1
= XEXP (x
, 1);
4576 /* If constant is first, put it last. */
4577 if (CONSTANT_P (op0
))
4578 code
= swap_condition (code
), tem
= op0
, op0
= op1
, op1
= tem
;
4580 /* If OP0 is the result of a comparison, we weren't able to find what
4581 was really being compared, so fail. */
4583 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_CC
)
4586 /* Canonicalize any ordered comparison with integers involving equality
4587 if we can do computations in the relevant mode and we do not
4590 if (GET_MODE_CLASS (GET_MODE (op0
)) != MODE_CC
4591 && GET_CODE (op1
) == CONST_INT
4592 && GET_MODE (op0
) != VOIDmode
4593 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
4595 HOST_WIDE_INT const_val
= INTVAL (op1
);
4596 unsigned HOST_WIDE_INT uconst_val
= const_val
;
4597 unsigned HOST_WIDE_INT max_val
4598 = (unsigned HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (op0
));
4603 if ((unsigned HOST_WIDE_INT
) const_val
!= max_val
>> 1)
4604 code
= LT
, op1
= gen_int_mode (const_val
+ 1, GET_MODE (op0
));
4607 /* When cross-compiling, const_val might be sign-extended from
4608 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4610 if ((HOST_WIDE_INT
) (const_val
& max_val
)
4611 != (((HOST_WIDE_INT
) 1
4612 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1))))
4613 code
= GT
, op1
= gen_int_mode (const_val
- 1, GET_MODE (op0
));
4617 if (uconst_val
< max_val
)
4618 code
= LTU
, op1
= gen_int_mode (uconst_val
+ 1, GET_MODE (op0
));
4622 if (uconst_val
!= 0)
4623 code
= GTU
, op1
= gen_int_mode (uconst_val
- 1, GET_MODE (op0
));
4631 /* Never return CC0; return zero instead. */
4635 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
4638 /* Given a jump insn JUMP, return the condition that will cause it to branch
4639 to its JUMP_LABEL. If the condition cannot be understood, or is an
4640 inequality floating-point comparison which needs to be reversed, 0 will
4643 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4644 insn used in locating the condition was found. If a replacement test
4645 of the condition is desired, it should be placed in front of that
4646 insn and we will be sure that the inputs are still valid. If EARLIEST
4647 is null, the returned condition will be valid at INSN.
4649 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4650 compare CC mode register.
4652 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4655 get_condition (rtx jump
, rtx
*earliest
, int allow_cc_mode
, int valid_at_insn_p
)
4661 /* If this is not a standard conditional jump, we can't parse it. */
4663 || ! any_condjump_p (jump
))
4665 set
= pc_set (jump
);
4667 cond
= XEXP (SET_SRC (set
), 0);
4669 /* If this branches to JUMP_LABEL when the condition is false, reverse
4672 = GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
4673 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (jump
);
4675 return canonicalize_condition (jump
, cond
, reverse
, earliest
, NULL_RTX
,
4676 allow_cc_mode
, valid_at_insn_p
);
4680 /* Initialize non_rtx_starting_operands, which is used to speed up
4686 for (i
= 0; i
< NUM_RTX_CODE
; i
++)
4688 const char *format
= GET_RTX_FORMAT (i
);
4689 const char *first
= strpbrk (format
, "eEV");
4690 non_rtx_starting_operands
[i
] = first
? first
- format
: -1;