1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
26 #include "coretypes.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
41 /* Forward declarations */
42 static int global_reg_mentioned_p_1 (rtx
*, void *);
43 static void set_of_1 (rtx
, rtx
, void *);
44 static bool covers_regno_p (rtx
, unsigned int);
45 static bool covers_regno_no_parallel_p (rtx
, unsigned int);
46 static int rtx_referenced_p_1 (rtx
*, void *);
47 static int computed_jump_p_1 (rtx
);
48 static void parms_set (rtx
, rtx
, void *);
50 static unsigned HOST_WIDE_INT
cached_nonzero_bits (rtx
, enum machine_mode
,
51 rtx
, enum machine_mode
,
52 unsigned HOST_WIDE_INT
);
53 static unsigned HOST_WIDE_INT
nonzero_bits1 (rtx
, enum machine_mode
, rtx
,
55 unsigned HOST_WIDE_INT
);
56 static unsigned int cached_num_sign_bit_copies (rtx
, enum machine_mode
, rtx
,
59 static unsigned int num_sign_bit_copies1 (rtx
, enum machine_mode
, rtx
,
60 enum machine_mode
, unsigned int);
62 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
63 -1 if a code has no such operand. */
64 static int non_rtx_starting_operands
[NUM_RTX_CODE
];
66 /* Bit flags that specify the machine subtype we are compiling for.
67 Bits are tested using macros TARGET_... defined in the tm.h file
68 and set by `-m...' switches. Must be defined in rtlanal.c. */
72 /* Return 1 if the value of X is unstable
73 (would be different at a different point in the program).
74 The frame pointer, arg pointer, etc. are considered stable
75 (within one function) and so is anything marked `unchanging'. */
78 rtx_unstable_p (rtx x
)
80 RTX_CODE code
= GET_CODE (x
);
87 return !MEM_READONLY_P (x
) || rtx_unstable_p (XEXP (x
, 0));
98 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
99 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
100 /* The arg pointer varies if it is not a fixed register. */
101 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
103 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
104 /* ??? When call-clobbered, the value is stable modulo the restore
105 that must happen after a call. This currently screws up local-alloc
106 into believing that the restore is not needed. */
107 if (x
== pic_offset_table_rtx
)
113 if (MEM_VOLATILE_P (x
))
122 fmt
= GET_RTX_FORMAT (code
);
123 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
126 if (rtx_unstable_p (XEXP (x
, i
)))
129 else if (fmt
[i
] == 'E')
132 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
133 if (rtx_unstable_p (XVECEXP (x
, i
, j
)))
140 /* Return 1 if X has a value that can vary even between two
141 executions of the program. 0 means X can be compared reliably
142 against certain constants or near-constants.
143 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
144 zero, we are slightly more conservative.
145 The frame pointer and the arg pointer are considered constant. */
148 rtx_varies_p (rtx x
, int for_alias
)
161 return !MEM_READONLY_P (x
) || rtx_varies_p (XEXP (x
, 0), for_alias
);
172 /* Note that we have to test for the actual rtx used for the frame
173 and arg pointers and not just the register number in case we have
174 eliminated the frame and/or arg pointer and are using it
176 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
177 /* The arg pointer varies if it is not a fixed register. */
178 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
180 if (x
== pic_offset_table_rtx
181 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
182 /* ??? When call-clobbered, the value is stable modulo the restore
183 that must happen after a call. This currently screws up
184 local-alloc into believing that the restore is not needed, so we
185 must return 0 only if we are called from alias analysis. */
193 /* The operand 0 of a LO_SUM is considered constant
194 (in fact it is related specifically to operand 1)
195 during alias analysis. */
196 return (! for_alias
&& rtx_varies_p (XEXP (x
, 0), for_alias
))
197 || rtx_varies_p (XEXP (x
, 1), for_alias
);
200 if (MEM_VOLATILE_P (x
))
209 fmt
= GET_RTX_FORMAT (code
);
210 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
213 if (rtx_varies_p (XEXP (x
, i
), for_alias
))
216 else if (fmt
[i
] == 'E')
219 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
220 if (rtx_varies_p (XVECEXP (x
, i
, j
), for_alias
))
227 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
228 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
229 whether nonzero is returned for unaligned memory accesses on strict
230 alignment machines. */
233 rtx_addr_can_trap_p_1 (rtx x
, enum machine_mode mode
, bool unaligned_mems
)
235 enum rtx_code code
= GET_CODE (x
);
240 return SYMBOL_REF_WEAK (x
);
246 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
247 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
248 || x
== stack_pointer_rtx
249 /* The arg pointer varies if it is not a fixed register. */
250 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
252 /* All of the virtual frame registers are stack references. */
253 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
254 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
259 return rtx_addr_can_trap_p_1 (XEXP (x
, 0), mode
, unaligned_mems
);
262 /* An address is assumed not to trap if:
263 - it is an address that can't trap plus a constant integer,
264 with the proper remainder modulo the mode size if we are
265 considering unaligned memory references. */
266 if (!rtx_addr_can_trap_p_1 (XEXP (x
, 0), mode
, unaligned_mems
)
267 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
269 HOST_WIDE_INT offset
;
271 if (!STRICT_ALIGNMENT
273 || GET_MODE_SIZE (mode
) == 0)
276 offset
= INTVAL (XEXP (x
, 1));
278 #ifdef SPARC_STACK_BOUNDARY_HACK
279 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
280 the real alignment of %sp. However, when it does this, the
281 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
282 if (SPARC_STACK_BOUNDARY_HACK
283 && (XEXP (x
, 0) == stack_pointer_rtx
284 || XEXP (x
, 0) == hard_frame_pointer_rtx
))
285 offset
-= STACK_POINTER_OFFSET
;
288 return offset
% GET_MODE_SIZE (mode
) != 0;
291 /* - or it is the pic register plus a constant. */
292 if (XEXP (x
, 0) == pic_offset_table_rtx
&& CONSTANT_P (XEXP (x
, 1)))
299 return rtx_addr_can_trap_p_1 (XEXP (x
, 1), mode
, unaligned_mems
);
306 return rtx_addr_can_trap_p_1 (XEXP (x
, 0), mode
, unaligned_mems
);
312 /* If it isn't one of the case above, it can cause a trap. */
316 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
319 rtx_addr_can_trap_p (rtx x
)
321 return rtx_addr_can_trap_p_1 (x
, VOIDmode
, false);
324 /* Return true if X is an address that is known to not be zero. */
327 nonzero_address_p (rtx x
)
329 enum rtx_code code
= GET_CODE (x
);
334 return !SYMBOL_REF_WEAK (x
);
340 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
341 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
342 || x
== stack_pointer_rtx
343 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
345 /* All of the virtual frame registers are stack references. */
346 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
347 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
352 return nonzero_address_p (XEXP (x
, 0));
355 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
357 /* Pointers aren't allowed to wrap. If we've got a register
358 that is known to be a pointer, and a positive offset, then
359 the composite can't be zero. */
360 if (INTVAL (XEXP (x
, 1)) > 0
361 && REG_P (XEXP (x
, 0))
362 && REG_POINTER (XEXP (x
, 0)))
365 return nonzero_address_p (XEXP (x
, 0));
367 /* Handle PIC references. */
368 else if (XEXP (x
, 0) == pic_offset_table_rtx
369 && CONSTANT_P (XEXP (x
, 1)))
374 /* Similar to the above; allow positive offsets. Further, since
375 auto-inc is only allowed in memories, the register must be a
377 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
378 && INTVAL (XEXP (x
, 1)) > 0)
380 return nonzero_address_p (XEXP (x
, 0));
383 /* Similarly. Further, the offset is always positive. */
390 return nonzero_address_p (XEXP (x
, 0));
393 return nonzero_address_p (XEXP (x
, 1));
399 /* If it isn't one of the case above, might be zero. */
403 /* Return 1 if X refers to a memory location whose address
404 cannot be compared reliably with constant addresses,
405 or if X refers to a BLKmode memory object.
406 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
407 zero, we are slightly more conservative. */
410 rtx_addr_varies_p (rtx x
, int for_alias
)
421 return GET_MODE (x
) == BLKmode
|| rtx_varies_p (XEXP (x
, 0), for_alias
);
423 fmt
= GET_RTX_FORMAT (code
);
424 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
427 if (rtx_addr_varies_p (XEXP (x
, i
), for_alias
))
430 else if (fmt
[i
] == 'E')
433 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
434 if (rtx_addr_varies_p (XVECEXP (x
, i
, j
), for_alias
))
440 /* Return the value of the integer term in X, if one is apparent;
442 Only obvious integer terms are detected.
443 This is used in cse.c with the `related_value' field. */
446 get_integer_term (rtx x
)
448 if (GET_CODE (x
) == CONST
)
451 if (GET_CODE (x
) == MINUS
452 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
453 return - INTVAL (XEXP (x
, 1));
454 if (GET_CODE (x
) == PLUS
455 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
456 return INTVAL (XEXP (x
, 1));
460 /* If X is a constant, return the value sans apparent integer term;
462 Only obvious integer terms are detected. */
465 get_related_value (rtx x
)
467 if (GET_CODE (x
) != CONST
)
470 if (GET_CODE (x
) == PLUS
471 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
473 else if (GET_CODE (x
) == MINUS
474 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
479 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
480 a global register. */
483 global_reg_mentioned_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
491 switch (GET_CODE (x
))
494 if (REG_P (SUBREG_REG (x
)))
496 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
497 && global_regs
[subreg_regno (x
)])
505 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
519 /* A non-constant call might use a global register. */
529 /* Returns nonzero if X mentions a global register. */
532 global_reg_mentioned_p (rtx x
)
538 if (! CONST_OR_PURE_CALL_P (x
))
540 x
= CALL_INSN_FUNCTION_USAGE (x
);
548 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
551 /* Return the number of places FIND appears within X. If COUNT_DEST is
552 zero, we do not count occurrences inside the destination of a SET. */
555 count_occurrences (rtx x
, rtx find
, int count_dest
)
559 const char *format_ptr
;
580 if (MEM_P (find
) && rtx_equal_p (x
, find
))
585 if (SET_DEST (x
) == find
&& ! count_dest
)
586 return count_occurrences (SET_SRC (x
), find
, count_dest
);
593 format_ptr
= GET_RTX_FORMAT (code
);
596 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
598 switch (*format_ptr
++)
601 count
+= count_occurrences (XEXP (x
, i
), find
, count_dest
);
605 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
606 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
, count_dest
);
613 /* Nonzero if register REG appears somewhere within IN.
614 Also works if REG is not a register; in this case it checks
615 for a subexpression of IN that is Lisp "equal" to REG. */
618 reg_mentioned_p (rtx reg
, rtx in
)
630 if (GET_CODE (in
) == LABEL_REF
)
631 return reg
== XEXP (in
, 0);
633 code
= GET_CODE (in
);
637 /* Compare registers by number. */
639 return REG_P (reg
) && REGNO (in
) == REGNO (reg
);
641 /* These codes have no constituent expressions
651 /* These are kept unique for a given value. */
658 if (GET_CODE (reg
) == code
&& rtx_equal_p (reg
, in
))
661 fmt
= GET_RTX_FORMAT (code
);
663 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
668 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
669 if (reg_mentioned_p (reg
, XVECEXP (in
, i
, j
)))
672 else if (fmt
[i
] == 'e'
673 && reg_mentioned_p (reg
, XEXP (in
, i
)))
679 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
680 no CODE_LABEL insn. */
683 no_labels_between_p (rtx beg
, rtx end
)
688 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
694 /* Nonzero if register REG is used in an insn between
695 FROM_INSN and TO_INSN (exclusive of those two). */
698 reg_used_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
702 if (from_insn
== to_insn
)
705 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
707 && (reg_overlap_mentioned_p (reg
, PATTERN (insn
))
708 || (CALL_P (insn
) && find_reg_fusage (insn
, USE
, reg
))))
713 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
714 is entirely replaced by a new value and the only use is as a SET_DEST,
715 we do not consider it a reference. */
718 reg_referenced_p (rtx x
, rtx body
)
722 switch (GET_CODE (body
))
725 if (reg_overlap_mentioned_p (x
, SET_SRC (body
)))
728 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
729 of a REG that occupies all of the REG, the insn references X if
730 it is mentioned in the destination. */
731 if (GET_CODE (SET_DEST (body
)) != CC0
732 && GET_CODE (SET_DEST (body
)) != PC
733 && !REG_P (SET_DEST (body
))
734 && ! (GET_CODE (SET_DEST (body
)) == SUBREG
735 && REG_P (SUBREG_REG (SET_DEST (body
)))
736 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body
))))
737 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
738 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body
)))
739 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
740 && reg_overlap_mentioned_p (x
, SET_DEST (body
)))
745 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
746 if (reg_overlap_mentioned_p (x
, ASM_OPERANDS_INPUT (body
, i
)))
753 return reg_overlap_mentioned_p (x
, body
);
756 return reg_overlap_mentioned_p (x
, TRAP_CONDITION (body
));
759 return reg_overlap_mentioned_p (x
, XEXP (body
, 0));
762 case UNSPEC_VOLATILE
:
763 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
764 if (reg_overlap_mentioned_p (x
, XVECEXP (body
, 0, i
)))
769 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
770 if (reg_referenced_p (x
, XVECEXP (body
, 0, i
)))
775 if (MEM_P (XEXP (body
, 0)))
776 if (reg_overlap_mentioned_p (x
, XEXP (XEXP (body
, 0), 0)))
781 if (reg_overlap_mentioned_p (x
, COND_EXEC_TEST (body
)))
783 return reg_referenced_p (x
, COND_EXEC_CODE (body
));
790 /* Nonzero if register REG is set or clobbered in an insn between
791 FROM_INSN and TO_INSN (exclusive of those two). */
794 reg_set_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
798 if (from_insn
== to_insn
)
801 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
802 if (INSN_P (insn
) && reg_set_p (reg
, insn
))
807 /* Internals of reg_set_between_p. */
809 reg_set_p (rtx reg
, rtx insn
)
811 /* We can be passed an insn or part of one. If we are passed an insn,
812 check if a side-effect of the insn clobbers REG. */
814 && (FIND_REG_INC_NOTE (insn
, reg
)
817 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
818 && TEST_HARD_REG_BIT (regs_invalidated_by_call
,
821 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
824 return set_of (reg
, insn
) != NULL_RTX
;
827 /* Similar to reg_set_between_p, but check all registers in X. Return 0
828 only if none of them are modified between START and END. Return 1 if
829 X contains a MEM; this routine does usememory aliasing. */
832 modified_between_p (rtx x
, rtx start
, rtx end
)
834 enum rtx_code code
= GET_CODE (x
);
857 if (modified_between_p (XEXP (x
, 0), start
, end
))
859 if (MEM_READONLY_P (x
))
861 for (insn
= NEXT_INSN (start
); insn
!= end
; insn
= NEXT_INSN (insn
))
862 if (memory_modified_in_insn_p (x
, insn
))
868 return reg_set_between_p (x
, start
, end
);
874 fmt
= GET_RTX_FORMAT (code
);
875 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
877 if (fmt
[i
] == 'e' && modified_between_p (XEXP (x
, i
), start
, end
))
880 else if (fmt
[i
] == 'E')
881 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
882 if (modified_between_p (XVECEXP (x
, i
, j
), start
, end
))
889 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
890 of them are modified in INSN. Return 1 if X contains a MEM; this routine
891 does use memory aliasing. */
894 modified_in_p (rtx x
, rtx insn
)
896 enum rtx_code code
= GET_CODE (x
);
915 if (modified_in_p (XEXP (x
, 0), insn
))
917 if (MEM_READONLY_P (x
))
919 if (memory_modified_in_insn_p (x
, insn
))
925 return reg_set_p (x
, insn
);
931 fmt
= GET_RTX_FORMAT (code
);
932 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
934 if (fmt
[i
] == 'e' && modified_in_p (XEXP (x
, i
), insn
))
937 else if (fmt
[i
] == 'E')
938 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
939 if (modified_in_p (XVECEXP (x
, i
, j
), insn
))
946 /* Helper function for set_of. */
954 set_of_1 (rtx x
, rtx pat
, void *data1
)
956 struct set_of_data
*data
= (struct set_of_data
*) (data1
);
957 if (rtx_equal_p (x
, data
->pat
)
958 || (!MEM_P (x
) && reg_overlap_mentioned_p (data
->pat
, x
)))
962 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
963 (either directly or via STRICT_LOW_PART and similar modifiers). */
965 set_of (rtx pat
, rtx insn
)
967 struct set_of_data data
;
968 data
.found
= NULL_RTX
;
970 note_stores (INSN_P (insn
) ? PATTERN (insn
) : insn
, set_of_1
, &data
);
974 /* Given an INSN, return a SET expression if this insn has only a single SET.
975 It may also have CLOBBERs, USEs, or SET whose output
976 will not be used, which we ignore. */
979 single_set_2 (rtx insn
, rtx pat
)
982 int set_verified
= 1;
985 if (GET_CODE (pat
) == PARALLEL
)
987 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
989 rtx sub
= XVECEXP (pat
, 0, i
);
990 switch (GET_CODE (sub
))
997 /* We can consider insns having multiple sets, where all
998 but one are dead as single set insns. In common case
999 only single set is present in the pattern so we want
1000 to avoid checking for REG_UNUSED notes unless necessary.
1002 When we reach set first time, we just expect this is
1003 the single set we are looking for and only when more
1004 sets are found in the insn, we check them. */
1007 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (set
))
1008 && !side_effects_p (set
))
1014 set
= sub
, set_verified
= 0;
1015 else if (!find_reg_note (insn
, REG_UNUSED
, SET_DEST (sub
))
1016 || side_effects_p (sub
))
1028 /* Given an INSN, return nonzero if it has more than one SET, else return
1032 multiple_sets (rtx insn
)
1037 /* INSN must be an insn. */
1038 if (! INSN_P (insn
))
1041 /* Only a PARALLEL can have multiple SETs. */
1042 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
1044 for (i
= 0, found
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1045 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == SET
)
1047 /* If we have already found a SET, then return now. */
1055 /* Either zero or one SET. */
1059 /* Return nonzero if the destination of SET equals the source
1060 and there are no side effects. */
1063 set_noop_p (rtx set
)
1065 rtx src
= SET_SRC (set
);
1066 rtx dst
= SET_DEST (set
);
1068 if (dst
== pc_rtx
&& src
== pc_rtx
)
1071 if (MEM_P (dst
) && MEM_P (src
))
1072 return rtx_equal_p (dst
, src
) && !side_effects_p (dst
);
1074 if (GET_CODE (dst
) == ZERO_EXTRACT
)
1075 return rtx_equal_p (XEXP (dst
, 0), src
)
1076 && ! BYTES_BIG_ENDIAN
&& XEXP (dst
, 2) == const0_rtx
1077 && !side_effects_p (src
);
1079 if (GET_CODE (dst
) == STRICT_LOW_PART
)
1080 dst
= XEXP (dst
, 0);
1082 if (GET_CODE (src
) == SUBREG
&& GET_CODE (dst
) == SUBREG
)
1084 if (SUBREG_BYTE (src
) != SUBREG_BYTE (dst
))
1086 src
= SUBREG_REG (src
);
1087 dst
= SUBREG_REG (dst
);
1090 return (REG_P (src
) && REG_P (dst
)
1091 && REGNO (src
) == REGNO (dst
));
1094 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1098 noop_move_p (rtx insn
)
1100 rtx pat
= PATTERN (insn
);
1102 if (INSN_CODE (insn
) == NOOP_MOVE_INSN_CODE
)
1105 /* Insns carrying these notes are useful later on. */
1106 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1109 /* For now treat an insn with a REG_RETVAL note as a
1110 a special insn which should not be considered a no-op. */
1111 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
1114 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
1117 if (GET_CODE (pat
) == PARALLEL
)
1120 /* If nothing but SETs of registers to themselves,
1121 this insn can also be deleted. */
1122 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1124 rtx tem
= XVECEXP (pat
, 0, i
);
1126 if (GET_CODE (tem
) == USE
1127 || GET_CODE (tem
) == CLOBBER
)
1130 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
1140 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1141 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1142 If the object was modified, if we hit a partial assignment to X, or hit a
1143 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1144 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1148 find_last_value (rtx x
, rtx
*pinsn
, rtx valid_to
, int allow_hwreg
)
1152 for (p
= PREV_INSN (*pinsn
); p
&& !LABEL_P (p
);
1156 rtx set
= single_set (p
);
1157 rtx note
= find_reg_note (p
, REG_EQUAL
, NULL_RTX
);
1159 if (set
&& rtx_equal_p (x
, SET_DEST (set
)))
1161 rtx src
= SET_SRC (set
);
1163 if (note
&& GET_CODE (XEXP (note
, 0)) != EXPR_LIST
)
1164 src
= XEXP (note
, 0);
1166 if ((valid_to
== NULL_RTX
1167 || ! modified_between_p (src
, PREV_INSN (p
), valid_to
))
1168 /* Reject hard registers because we don't usually want
1169 to use them; we'd rather use a pseudo. */
1171 && REGNO (src
) < FIRST_PSEUDO_REGISTER
) || allow_hwreg
))
1178 /* If set in non-simple way, we don't have a value. */
1179 if (reg_set_p (x
, p
))
1186 /* Return nonzero if register in range [REGNO, ENDREGNO)
1187 appears either explicitly or implicitly in X
1188 other than being stored into.
1190 References contained within the substructure at LOC do not count.
1191 LOC may be zero, meaning don't ignore anything. */
1194 refers_to_regno_p (unsigned int regno
, unsigned int endregno
, rtx x
,
1198 unsigned int x_regno
;
1203 /* The contents of a REG_NONNEG note is always zero, so we must come here
1204 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1208 code
= GET_CODE (x
);
1213 x_regno
= REGNO (x
);
1215 /* If we modifying the stack, frame, or argument pointer, it will
1216 clobber a virtual register. In fact, we could be more precise,
1217 but it isn't worth it. */
1218 if ((x_regno
== STACK_POINTER_REGNUM
1219 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1220 || x_regno
== ARG_POINTER_REGNUM
1222 || x_regno
== FRAME_POINTER_REGNUM
)
1223 && regno
>= FIRST_VIRTUAL_REGISTER
&& regno
<= LAST_VIRTUAL_REGISTER
)
1226 return (endregno
> x_regno
1227 && regno
< x_regno
+ (x_regno
< FIRST_PSEUDO_REGISTER
1228 ? hard_regno_nregs
[x_regno
][GET_MODE (x
)]
1232 /* If this is a SUBREG of a hard reg, we can see exactly which
1233 registers are being modified. Otherwise, handle normally. */
1234 if (REG_P (SUBREG_REG (x
))
1235 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
1237 unsigned int inner_regno
= subreg_regno (x
);
1238 unsigned int inner_endregno
1239 = inner_regno
+ (inner_regno
< FIRST_PSEUDO_REGISTER
1240 ? hard_regno_nregs
[inner_regno
][GET_MODE (x
)] : 1);
1242 return endregno
> inner_regno
&& regno
< inner_endregno
;
1248 if (&SET_DEST (x
) != loc
1249 /* Note setting a SUBREG counts as referring to the REG it is in for
1250 a pseudo but not for hard registers since we can
1251 treat each word individually. */
1252 && ((GET_CODE (SET_DEST (x
)) == SUBREG
1253 && loc
!= &SUBREG_REG (SET_DEST (x
))
1254 && REG_P (SUBREG_REG (SET_DEST (x
)))
1255 && REGNO (SUBREG_REG (SET_DEST (x
))) >= FIRST_PSEUDO_REGISTER
1256 && refers_to_regno_p (regno
, endregno
,
1257 SUBREG_REG (SET_DEST (x
)), loc
))
1258 || (!REG_P (SET_DEST (x
))
1259 && refers_to_regno_p (regno
, endregno
, SET_DEST (x
), loc
))))
1262 if (code
== CLOBBER
|| loc
== &SET_SRC (x
))
1271 /* X does not match, so try its subexpressions. */
1273 fmt
= GET_RTX_FORMAT (code
);
1274 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1276 if (fmt
[i
] == 'e' && loc
!= &XEXP (x
, i
))
1284 if (refers_to_regno_p (regno
, endregno
, XEXP (x
, i
), loc
))
1287 else if (fmt
[i
] == 'E')
1290 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1291 if (loc
!= &XVECEXP (x
, i
, j
)
1292 && refers_to_regno_p (regno
, endregno
, XVECEXP (x
, i
, j
), loc
))
1299 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1300 we check if any register number in X conflicts with the relevant register
1301 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1302 contains a MEM (we don't bother checking for memory addresses that can't
1303 conflict because we expect this to be a rare case. */
1306 reg_overlap_mentioned_p (rtx x
, rtx in
)
1308 unsigned int regno
, endregno
;
1310 /* If either argument is a constant, then modifying X can not
1311 affect IN. Here we look at IN, we can profitably combine
1312 CONSTANT_P (x) with the switch statement below. */
1313 if (CONSTANT_P (in
))
1317 switch (GET_CODE (x
))
1319 case STRICT_LOW_PART
:
1322 /* Overly conservative. */
1327 regno
= REGNO (SUBREG_REG (x
));
1328 if (regno
< FIRST_PSEUDO_REGISTER
)
1329 regno
= subreg_regno (x
);
1335 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
1336 ? hard_regno_nregs
[regno
][GET_MODE (x
)] : 1);
1337 return refers_to_regno_p (regno
, endregno
, in
, (rtx
*) 0);
1347 fmt
= GET_RTX_FORMAT (GET_CODE (in
));
1348 for (i
= GET_RTX_LENGTH (GET_CODE (in
)) - 1; i
>= 0; i
--)
1351 if (reg_overlap_mentioned_p (x
, XEXP (in
, i
)))
1354 else if (fmt
[i
] == 'E')
1357 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; --j
)
1358 if (reg_overlap_mentioned_p (x
, XVECEXP (in
, i
, j
)))
1368 return reg_mentioned_p (x
, in
);
1374 /* If any register in here refers to it we return true. */
1375 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1376 if (XEXP (XVECEXP (x
, 0, i
), 0) != 0
1377 && reg_overlap_mentioned_p (XEXP (XVECEXP (x
, 0, i
), 0), in
))
1383 gcc_assert (CONSTANT_P (x
));
1388 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1389 (X would be the pattern of an insn).
1390 FUN receives two arguments:
1391 the REG, MEM, CC0 or PC being stored in or clobbered,
1392 the SET or CLOBBER rtx that does the store.
1394 If the item being stored in or clobbered is a SUBREG of a hard register,
1395 the SUBREG will be passed. */
1398 note_stores (rtx x
, void (*fun
) (rtx
, rtx
, void *), void *data
)
1402 if (GET_CODE (x
) == COND_EXEC
)
1403 x
= COND_EXEC_CODE (x
);
1405 if (GET_CODE (x
) == SET
|| GET_CODE (x
) == CLOBBER
)
1407 rtx dest
= SET_DEST (x
);
1409 while ((GET_CODE (dest
) == SUBREG
1410 && (!REG_P (SUBREG_REG (dest
))
1411 || REGNO (SUBREG_REG (dest
)) >= FIRST_PSEUDO_REGISTER
))
1412 || GET_CODE (dest
) == ZERO_EXTRACT
1413 || GET_CODE (dest
) == STRICT_LOW_PART
)
1414 dest
= XEXP (dest
, 0);
1416 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1417 each of whose first operand is a register. */
1418 if (GET_CODE (dest
) == PARALLEL
)
1420 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1421 if (XEXP (XVECEXP (dest
, 0, i
), 0) != 0)
1422 (*fun
) (XEXP (XVECEXP (dest
, 0, i
), 0), x
, data
);
1425 (*fun
) (dest
, x
, data
);
1428 else if (GET_CODE (x
) == PARALLEL
)
1429 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1430 note_stores (XVECEXP (x
, 0, i
), fun
, data
);
1433 /* Like notes_stores, but call FUN for each expression that is being
1434 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1435 FUN for each expression, not any interior subexpressions. FUN receives a
1436 pointer to the expression and the DATA passed to this function.
1438 Note that this is not quite the same test as that done in reg_referenced_p
1439 since that considers something as being referenced if it is being
1440 partially set, while we do not. */
1443 note_uses (rtx
*pbody
, void (*fun
) (rtx
*, void *), void *data
)
1448 switch (GET_CODE (body
))
1451 (*fun
) (&COND_EXEC_TEST (body
), data
);
1452 note_uses (&COND_EXEC_CODE (body
), fun
, data
);
1456 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1457 note_uses (&XVECEXP (body
, 0, i
), fun
, data
);
1461 (*fun
) (&XEXP (body
, 0), data
);
1465 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
1466 (*fun
) (&ASM_OPERANDS_INPUT (body
, i
), data
);
1470 (*fun
) (&TRAP_CONDITION (body
), data
);
1474 (*fun
) (&XEXP (body
, 0), data
);
1478 case UNSPEC_VOLATILE
:
1479 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1480 (*fun
) (&XVECEXP (body
, 0, i
), data
);
1484 if (MEM_P (XEXP (body
, 0)))
1485 (*fun
) (&XEXP (XEXP (body
, 0), 0), data
);
1490 rtx dest
= SET_DEST (body
);
1492 /* For sets we replace everything in source plus registers in memory
1493 expression in store and operands of a ZERO_EXTRACT. */
1494 (*fun
) (&SET_SRC (body
), data
);
1496 if (GET_CODE (dest
) == ZERO_EXTRACT
)
1498 (*fun
) (&XEXP (dest
, 1), data
);
1499 (*fun
) (&XEXP (dest
, 2), data
);
1502 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
)
1503 dest
= XEXP (dest
, 0);
1506 (*fun
) (&XEXP (dest
, 0), data
);
1511 /* All the other possibilities never store. */
1512 (*fun
) (pbody
, data
);
1517 /* Return nonzero if X's old contents don't survive after INSN.
1518 This will be true if X is (cc0) or if X is a register and
1519 X dies in INSN or because INSN entirely sets X.
1521 "Entirely set" means set directly and not through a SUBREG, or
1522 ZERO_EXTRACT, so no trace of the old contents remains.
1523 Likewise, REG_INC does not count.
1525 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1526 but for this use that makes no difference, since regs don't overlap
1527 during their lifetimes. Therefore, this function may be used
1528 at any time after deaths have been computed (in flow.c).
1530 If REG is a hard reg that occupies multiple machine registers, this
1531 function will only return 1 if each of those registers will be replaced
1535 dead_or_set_p (rtx insn
, rtx x
)
1537 unsigned int regno
, last_regno
;
1540 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1541 if (GET_CODE (x
) == CC0
)
1544 gcc_assert (REG_P (x
));
1547 last_regno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
1548 : regno
+ hard_regno_nregs
[regno
][GET_MODE (x
)] - 1);
1550 for (i
= regno
; i
<= last_regno
; i
++)
1551 if (! dead_or_set_regno_p (insn
, i
))
1557 /* Return TRUE iff DEST is a register or subreg of a register and
1558 doesn't change the number of words of the inner register, and any
1559 part of the register is TEST_REGNO. */
1562 covers_regno_no_parallel_p (rtx dest
, unsigned int test_regno
)
1564 unsigned int regno
, endregno
;
1566 if (GET_CODE (dest
) == SUBREG
1567 && (((GET_MODE_SIZE (GET_MODE (dest
))
1568 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1569 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1570 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1571 dest
= SUBREG_REG (dest
);
1576 regno
= REGNO (dest
);
1577 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1578 : regno
+ hard_regno_nregs
[regno
][GET_MODE (dest
)]);
1579 return (test_regno
>= regno
&& test_regno
< endregno
);
1582 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1583 any member matches the covers_regno_no_parallel_p criteria. */
1586 covers_regno_p (rtx dest
, unsigned int test_regno
)
1588 if (GET_CODE (dest
) == PARALLEL
)
1590 /* Some targets place small structures in registers for return
1591 values of functions, and those registers are wrapped in
1592 PARALLELs that we may see as the destination of a SET. */
1595 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1597 rtx inner
= XEXP (XVECEXP (dest
, 0, i
), 0);
1598 if (inner
!= NULL_RTX
1599 && covers_regno_no_parallel_p (inner
, test_regno
))
1606 return covers_regno_no_parallel_p (dest
, test_regno
);
1609 /* Utility function for dead_or_set_p to check an individual register. Also
1610 called from flow.c. */
1613 dead_or_set_regno_p (rtx insn
, unsigned int test_regno
)
1617 /* See if there is a death note for something that includes TEST_REGNO. */
1618 if (find_regno_note (insn
, REG_DEAD
, test_regno
))
1622 && find_regno_fusage (insn
, CLOBBER
, test_regno
))
1625 pattern
= PATTERN (insn
);
1627 if (GET_CODE (pattern
) == COND_EXEC
)
1628 pattern
= COND_EXEC_CODE (pattern
);
1630 if (GET_CODE (pattern
) == SET
)
1631 return covers_regno_p (SET_DEST (pattern
), test_regno
);
1632 else if (GET_CODE (pattern
) == PARALLEL
)
1636 for (i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
1638 rtx body
= XVECEXP (pattern
, 0, i
);
1640 if (GET_CODE (body
) == COND_EXEC
)
1641 body
= COND_EXEC_CODE (body
);
1643 if ((GET_CODE (body
) == SET
|| GET_CODE (body
) == CLOBBER
)
1644 && covers_regno_p (SET_DEST (body
), test_regno
))
1652 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1653 If DATUM is nonzero, look for one whose datum is DATUM. */
1656 find_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
1662 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1663 if (! INSN_P (insn
))
1667 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1668 if (REG_NOTE_KIND (link
) == kind
)
1673 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1674 if (REG_NOTE_KIND (link
) == kind
&& datum
== XEXP (link
, 0))
1679 /* Return the reg-note of kind KIND in insn INSN which applies to register
1680 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1681 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1682 it might be the case that the note overlaps REGNO. */
1685 find_regno_note (rtx insn
, enum reg_note kind
, unsigned int regno
)
1689 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1690 if (! INSN_P (insn
))
1693 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1694 if (REG_NOTE_KIND (link
) == kind
1695 /* Verify that it is a register, so that scratch and MEM won't cause a
1697 && REG_P (XEXP (link
, 0))
1698 && REGNO (XEXP (link
, 0)) <= regno
1699 && ((REGNO (XEXP (link
, 0))
1700 + (REGNO (XEXP (link
, 0)) >= FIRST_PSEUDO_REGISTER
? 1
1701 : hard_regno_nregs
[REGNO (XEXP (link
, 0))]
1702 [GET_MODE (XEXP (link
, 0))]))
1708 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1712 find_reg_equal_equiv_note (rtx insn
)
1718 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1719 if (REG_NOTE_KIND (link
) == REG_EQUAL
1720 || REG_NOTE_KIND (link
) == REG_EQUIV
)
1722 if (single_set (insn
) == 0)
1729 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1730 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1733 find_reg_fusage (rtx insn
, enum rtx_code code
, rtx datum
)
1735 /* If it's not a CALL_INSN, it can't possibly have a
1736 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1746 for (link
= CALL_INSN_FUNCTION_USAGE (insn
);
1748 link
= XEXP (link
, 1))
1749 if (GET_CODE (XEXP (link
, 0)) == code
1750 && rtx_equal_p (datum
, XEXP (XEXP (link
, 0), 0)))
1755 unsigned int regno
= REGNO (datum
);
1757 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1758 to pseudo registers, so don't bother checking. */
1760 if (regno
< FIRST_PSEUDO_REGISTER
)
1762 unsigned int end_regno
1763 = regno
+ hard_regno_nregs
[regno
][GET_MODE (datum
)];
1766 for (i
= regno
; i
< end_regno
; i
++)
1767 if (find_regno_fusage (insn
, code
, i
))
1775 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1776 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1779 find_regno_fusage (rtx insn
, enum rtx_code code
, unsigned int regno
)
1783 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1784 to pseudo registers, so don't bother checking. */
1786 if (regno
>= FIRST_PSEUDO_REGISTER
1790 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
1792 unsigned int regnote
;
1795 if (GET_CODE (op
= XEXP (link
, 0)) == code
1796 && REG_P (reg
= XEXP (op
, 0))
1797 && (regnote
= REGNO (reg
)) <= regno
1798 && regnote
+ hard_regno_nregs
[regnote
][GET_MODE (reg
)] > regno
)
1805 /* Return true if INSN is a call to a pure function. */
1808 pure_call_p (rtx insn
)
1812 if (!CALL_P (insn
) || ! CONST_OR_PURE_CALL_P (insn
))
1815 /* Look for the note that differentiates const and pure functions. */
1816 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
1820 if (GET_CODE (u
= XEXP (link
, 0)) == USE
1821 && MEM_P (m
= XEXP (u
, 0)) && GET_MODE (m
) == BLKmode
1822 && GET_CODE (XEXP (m
, 0)) == SCRATCH
)
1829 /* Remove register note NOTE from the REG_NOTES of INSN. */
1832 remove_note (rtx insn
, rtx note
)
1836 if (note
== NULL_RTX
)
1839 if (REG_NOTES (insn
) == note
)
1841 REG_NOTES (insn
) = XEXP (note
, 1);
1845 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1846 if (XEXP (link
, 1) == note
)
1848 XEXP (link
, 1) = XEXP (note
, 1);
1855 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1856 return 1 if it is found. A simple equality test is used to determine if
1860 in_expr_list_p (rtx listp
, rtx node
)
1864 for (x
= listp
; x
; x
= XEXP (x
, 1))
1865 if (node
== XEXP (x
, 0))
1871 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1872 remove that entry from the list if it is found.
1874 A simple equality test is used to determine if NODE matches. */
1877 remove_node_from_expr_list (rtx node
, rtx
*listp
)
1880 rtx prev
= NULL_RTX
;
1884 if (node
== XEXP (temp
, 0))
1886 /* Splice the node out of the list. */
1888 XEXP (prev
, 1) = XEXP (temp
, 1);
1890 *listp
= XEXP (temp
, 1);
1896 temp
= XEXP (temp
, 1);
1900 /* Nonzero if X contains any volatile instructions. These are instructions
1901 which may cause unpredictable machine state instructions, and thus no
1902 instructions should be moved or combined across them. This includes
1903 only volatile asms and UNSPEC_VOLATILE instructions. */
1906 volatile_insn_p (rtx x
)
1910 code
= GET_CODE (x
);
1930 case UNSPEC_VOLATILE
:
1931 /* case TRAP_IF: This isn't clear yet. */
1936 if (MEM_VOLATILE_P (x
))
1943 /* Recursively scan the operands of this expression. */
1946 const char *fmt
= GET_RTX_FORMAT (code
);
1949 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1953 if (volatile_insn_p (XEXP (x
, i
)))
1956 else if (fmt
[i
] == 'E')
1959 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1960 if (volatile_insn_p (XVECEXP (x
, i
, j
)))
1968 /* Nonzero if X contains any volatile memory references
1969 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1972 volatile_refs_p (rtx x
)
1976 code
= GET_CODE (x
);
1994 case UNSPEC_VOLATILE
:
2000 if (MEM_VOLATILE_P (x
))
2007 /* Recursively scan the operands of this expression. */
2010 const char *fmt
= GET_RTX_FORMAT (code
);
2013 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2017 if (volatile_refs_p (XEXP (x
, i
)))
2020 else if (fmt
[i
] == 'E')
2023 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2024 if (volatile_refs_p (XVECEXP (x
, i
, j
)))
2032 /* Similar to above, except that it also rejects register pre- and post-
2036 side_effects_p (rtx x
)
2040 code
= GET_CODE (x
);
2058 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2059 when some combination can't be done. If we see one, don't think
2060 that we can simplify the expression. */
2061 return (GET_MODE (x
) != VOIDmode
);
2070 case UNSPEC_VOLATILE
:
2071 /* case TRAP_IF: This isn't clear yet. */
2077 if (MEM_VOLATILE_P (x
))
2084 /* Recursively scan the operands of this expression. */
2087 const char *fmt
= GET_RTX_FORMAT (code
);
2090 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2094 if (side_effects_p (XEXP (x
, i
)))
2097 else if (fmt
[i
] == 'E')
2100 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2101 if (side_effects_p (XVECEXP (x
, i
, j
)))
2109 /* Return nonzero if evaluating rtx X might cause a trap. UNALIGNED_MEMS
2110 controls whether nonzero is returned for unaligned memory accesses on
2111 strict alignment machines. */
2114 may_trap_p_1 (rtx x
, bool unaligned_mems
)
2122 code
= GET_CODE (x
);
2125 /* Handle these cases quickly. */
2139 case UNSPEC_VOLATILE
:
2144 return MEM_VOLATILE_P (x
);
2146 /* Memory ref can trap unless it's a static var or a stack slot. */
2148 if (MEM_NOTRAP_P (x
)
2149 && (!STRICT_ALIGNMENT
|| !unaligned_mems
))
2152 rtx_addr_can_trap_p_1 (XEXP (x
, 0), GET_MODE (x
), unaligned_mems
);
2154 /* Division by a non-constant might trap. */
2159 if (HONOR_SNANS (GET_MODE (x
)))
2161 if (SCALAR_FLOAT_MODE_P (GET_MODE (x
)))
2162 return flag_trapping_math
;
2163 if (!CONSTANT_P (XEXP (x
, 1)) || (XEXP (x
, 1) == const0_rtx
))
2168 /* An EXPR_LIST is used to represent a function call. This
2169 certainly may trap. */
2178 /* Some floating point comparisons may trap. */
2179 if (!flag_trapping_math
)
2181 /* ??? There is no machine independent way to check for tests that trap
2182 when COMPARE is used, though many targets do make this distinction.
2183 For instance, sparc uses CCFPE for compares which generate exceptions
2184 and CCFP for compares which do not generate exceptions. */
2185 if (HONOR_NANS (GET_MODE (x
)))
2187 /* But often the compare has some CC mode, so check operand
2189 if (HONOR_NANS (GET_MODE (XEXP (x
, 0)))
2190 || HONOR_NANS (GET_MODE (XEXP (x
, 1))))
2196 if (HONOR_SNANS (GET_MODE (x
)))
2198 /* Often comparison is CC mode, so check operand modes. */
2199 if (HONOR_SNANS (GET_MODE (XEXP (x
, 0)))
2200 || HONOR_SNANS (GET_MODE (XEXP (x
, 1))))
2205 /* Conversion of floating point might trap. */
2206 if (flag_trapping_math
&& HONOR_NANS (GET_MODE (XEXP (x
, 0))))
2213 /* These operations don't trap even with floating point. */
2217 /* Any floating arithmetic may trap. */
2218 if (SCALAR_FLOAT_MODE_P (GET_MODE (x
))
2219 && flag_trapping_math
)
2223 fmt
= GET_RTX_FORMAT (code
);
2224 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2228 if (may_trap_p_1 (XEXP (x
, i
), unaligned_mems
))
2231 else if (fmt
[i
] == 'E')
2234 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2235 if (may_trap_p_1 (XVECEXP (x
, i
, j
), unaligned_mems
))
2242 /* Return nonzero if evaluating rtx X might cause a trap. */
2247 return may_trap_p_1 (x
, false);
2250 /* Same as above, but additionally return non-zero if evaluating rtx X might
2251 cause a fault. We define a fault for the purpose of this function as a
2252 erroneous execution condition that cannot be encountered during the normal
2253 execution of a valid program; the typical example is an unaligned memory
2254 access on a strict alignment machine. The compiler guarantees that it
2255 doesn't generate code that will fault from a valid program, but this
2256 guarantee doesn't mean anything for individual instructions. Consider
2257 the following example:
2259 struct S { int d; union { char *cp; int *ip; }; };
2261 int foo(struct S *s)
2269 on a strict alignment machine. In a valid program, foo will never be
2270 invoked on a structure for which d is equal to 1 and the underlying
2271 unique field of the union not aligned on a 4-byte boundary, but the
2272 expression *s->ip might cause a fault if considered individually.
2274 At the RTL level, potentially problematic expressions will almost always
2275 verify may_trap_p; for example, the above dereference can be emitted as
2276 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2277 However, suppose that foo is inlined in a caller that causes s->cp to
2278 point to a local character variable and guarantees that s->d is not set
2279 to 1; foo may have been effectively translated into pseudo-RTL as:
2282 (set (reg:SI) (mem:SI (%fp - 7)))
2284 (set (reg:QI) (mem:QI (%fp - 7)))
2286 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2287 memory reference to a stack slot, but it will certainly cause a fault
2288 on a strict alignment machine. */
2291 may_trap_or_fault_p (rtx x
)
2293 return may_trap_p_1 (x
, true);
2296 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2297 i.e., an inequality. */
2300 inequality_comparisons_p (rtx x
)
2304 enum rtx_code code
= GET_CODE (x
);
2334 len
= GET_RTX_LENGTH (code
);
2335 fmt
= GET_RTX_FORMAT (code
);
2337 for (i
= 0; i
< len
; i
++)
2341 if (inequality_comparisons_p (XEXP (x
, i
)))
2344 else if (fmt
[i
] == 'E')
2347 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2348 if (inequality_comparisons_p (XVECEXP (x
, i
, j
)))
2356 /* Replace any occurrence of FROM in X with TO. The function does
2357 not enter into CONST_DOUBLE for the replace.
2359 Note that copying is not done so X must not be shared unless all copies
2360 are to be modified. */
2363 replace_rtx (rtx x
, rtx from
, rtx to
)
2368 /* The following prevents loops occurrence when we change MEM in
2369 CONST_DOUBLE onto the same CONST_DOUBLE. */
2370 if (x
!= 0 && GET_CODE (x
) == CONST_DOUBLE
)
2376 /* Allow this function to make replacements in EXPR_LISTs. */
2380 if (GET_CODE (x
) == SUBREG
)
2382 rtx
new = replace_rtx (SUBREG_REG (x
), from
, to
);
2384 if (GET_CODE (new) == CONST_INT
)
2386 x
= simplify_subreg (GET_MODE (x
), new,
2387 GET_MODE (SUBREG_REG (x
)),
2392 SUBREG_REG (x
) = new;
2396 else if (GET_CODE (x
) == ZERO_EXTEND
)
2398 rtx
new = replace_rtx (XEXP (x
, 0), from
, to
);
2400 if (GET_CODE (new) == CONST_INT
)
2402 x
= simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
2403 new, GET_MODE (XEXP (x
, 0)));
2412 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2413 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2416 XEXP (x
, i
) = replace_rtx (XEXP (x
, i
), from
, to
);
2417 else if (fmt
[i
] == 'E')
2418 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2419 XVECEXP (x
, i
, j
) = replace_rtx (XVECEXP (x
, i
, j
), from
, to
);
2425 /* Throughout the rtx X, replace many registers according to REG_MAP.
2426 Return the replacement for X (which may be X with altered contents).
2427 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2428 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2430 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2431 should not be mapped to pseudos or vice versa since validate_change
2434 If REPLACE_DEST is 1, replacements are also done in destinations;
2435 otherwise, only sources are replaced. */
2438 replace_regs (rtx x
, rtx
*reg_map
, unsigned int nregs
, int replace_dest
)
2447 code
= GET_CODE (x
);
2462 /* Verify that the register has an entry before trying to access it. */
2463 if (REGNO (x
) < nregs
&& reg_map
[REGNO (x
)] != 0)
2465 /* SUBREGs can't be shared. Always return a copy to ensure that if
2466 this replacement occurs more than once then each instance will
2467 get distinct rtx. */
2468 if (GET_CODE (reg_map
[REGNO (x
)]) == SUBREG
)
2469 return copy_rtx (reg_map
[REGNO (x
)]);
2470 return reg_map
[REGNO (x
)];
2475 /* Prevent making nested SUBREGs. */
2476 if (REG_P (SUBREG_REG (x
)) && REGNO (SUBREG_REG (x
)) < nregs
2477 && reg_map
[REGNO (SUBREG_REG (x
))] != 0
2478 && GET_CODE (reg_map
[REGNO (SUBREG_REG (x
))]) == SUBREG
)
2480 rtx map_val
= reg_map
[REGNO (SUBREG_REG (x
))];
2481 return simplify_gen_subreg (GET_MODE (x
), map_val
,
2482 GET_MODE (SUBREG_REG (x
)),
2489 SET_DEST (x
) = replace_regs (SET_DEST (x
), reg_map
, nregs
, 0);
2491 else if (MEM_P (SET_DEST (x
))
2492 || GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2493 /* Even if we are not to replace destinations, replace register if it
2494 is CONTAINED in destination (destination is memory or
2495 STRICT_LOW_PART). */
2496 XEXP (SET_DEST (x
), 0) = replace_regs (XEXP (SET_DEST (x
), 0),
2498 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2499 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2502 SET_SRC (x
) = replace_regs (SET_SRC (x
), reg_map
, nregs
, 0);
2509 fmt
= GET_RTX_FORMAT (code
);
2510 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2513 XEXP (x
, i
) = replace_regs (XEXP (x
, i
), reg_map
, nregs
, replace_dest
);
2514 else if (fmt
[i
] == 'E')
2517 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2518 XVECEXP (x
, i
, j
) = replace_regs (XVECEXP (x
, i
, j
), reg_map
,
2519 nregs
, replace_dest
);
2525 /* Replace occurrences of the old label in *X with the new one.
2526 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2529 replace_label (rtx
*x
, void *data
)
2532 rtx old_label
= ((replace_label_data
*) data
)->r1
;
2533 rtx new_label
= ((replace_label_data
*) data
)->r2
;
2534 bool update_label_nuses
= ((replace_label_data
*) data
)->update_label_nuses
;
2539 if (GET_CODE (l
) == SYMBOL_REF
2540 && CONSTANT_POOL_ADDRESS_P (l
))
2542 rtx c
= get_pool_constant (l
);
2543 if (rtx_referenced_p (old_label
, c
))
2546 replace_label_data
*d
= (replace_label_data
*) data
;
2548 /* Create a copy of constant C; replace the label inside
2549 but do not update LABEL_NUSES because uses in constant pool
2551 new_c
= copy_rtx (c
);
2552 d
->update_label_nuses
= false;
2553 for_each_rtx (&new_c
, replace_label
, data
);
2554 d
->update_label_nuses
= update_label_nuses
;
2556 /* Add the new constant NEW_C to constant pool and replace
2557 the old reference to constant by new reference. */
2558 new_l
= XEXP (force_const_mem (get_pool_mode (l
), new_c
), 0);
2559 *x
= replace_rtx (l
, l
, new_l
);
2564 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2565 field. This is not handled by for_each_rtx because it doesn't
2566 handle unprinted ('0') fields. */
2567 if (JUMP_P (l
) && JUMP_LABEL (l
) == old_label
)
2568 JUMP_LABEL (l
) = new_label
;
2570 if ((GET_CODE (l
) == LABEL_REF
2571 || GET_CODE (l
) == INSN_LIST
)
2572 && XEXP (l
, 0) == old_label
)
2574 XEXP (l
, 0) = new_label
;
2575 if (update_label_nuses
)
2577 ++LABEL_NUSES (new_label
);
2578 --LABEL_NUSES (old_label
);
2586 /* When *BODY is equal to X or X is directly referenced by *BODY
2587 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2588 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2591 rtx_referenced_p_1 (rtx
*body
, void *x
)
2595 if (*body
== NULL_RTX
)
2596 return y
== NULL_RTX
;
2598 /* Return true if a label_ref *BODY refers to label Y. */
2599 if (GET_CODE (*body
) == LABEL_REF
&& LABEL_P (y
))
2600 return XEXP (*body
, 0) == y
;
2602 /* If *BODY is a reference to pool constant traverse the constant. */
2603 if (GET_CODE (*body
) == SYMBOL_REF
2604 && CONSTANT_POOL_ADDRESS_P (*body
))
2605 return rtx_referenced_p (y
, get_pool_constant (*body
));
2607 /* By default, compare the RTL expressions. */
2608 return rtx_equal_p (*body
, y
);
2611 /* Return true if X is referenced in BODY. */
2614 rtx_referenced_p (rtx x
, rtx body
)
2616 return for_each_rtx (&body
, rtx_referenced_p_1
, x
);
2619 /* If INSN is a tablejump return true and store the label (before jump table) to
2620 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2623 tablejump_p (rtx insn
, rtx
*labelp
, rtx
*tablep
)
2628 && (label
= JUMP_LABEL (insn
)) != NULL_RTX
2629 && (table
= next_active_insn (label
)) != NULL_RTX
2631 && (GET_CODE (PATTERN (table
)) == ADDR_VEC
2632 || GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
))
2643 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2644 constant that is not in the constant pool and not in the condition
2645 of an IF_THEN_ELSE. */
2648 computed_jump_p_1 (rtx x
)
2650 enum rtx_code code
= GET_CODE (x
);
2669 return ! (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
2670 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)));
2673 return (computed_jump_p_1 (XEXP (x
, 1))
2674 || computed_jump_p_1 (XEXP (x
, 2)));
2680 fmt
= GET_RTX_FORMAT (code
);
2681 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2684 && computed_jump_p_1 (XEXP (x
, i
)))
2687 else if (fmt
[i
] == 'E')
2688 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2689 if (computed_jump_p_1 (XVECEXP (x
, i
, j
)))
2696 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2698 Tablejumps and casesi insns are not considered indirect jumps;
2699 we can recognize them by a (use (label_ref)). */
2702 computed_jump_p (rtx insn
)
2707 rtx pat
= PATTERN (insn
);
2709 if (find_reg_note (insn
, REG_LABEL
, NULL_RTX
))
2711 else if (GET_CODE (pat
) == PARALLEL
)
2713 int len
= XVECLEN (pat
, 0);
2714 int has_use_labelref
= 0;
2716 for (i
= len
- 1; i
>= 0; i
--)
2717 if (GET_CODE (XVECEXP (pat
, 0, i
)) == USE
2718 && (GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0))
2720 has_use_labelref
= 1;
2722 if (! has_use_labelref
)
2723 for (i
= len
- 1; i
>= 0; i
--)
2724 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
2725 && SET_DEST (XVECEXP (pat
, 0, i
)) == pc_rtx
2726 && computed_jump_p_1 (SET_SRC (XVECEXP (pat
, 0, i
))))
2729 else if (GET_CODE (pat
) == SET
2730 && SET_DEST (pat
) == pc_rtx
2731 && computed_jump_p_1 (SET_SRC (pat
)))
2737 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2738 calls. Processes the subexpressions of EXP and passes them to F. */
2740 for_each_rtx_1 (rtx exp
, int n
, rtx_function f
, void *data
)
2743 const char *format
= GET_RTX_FORMAT (GET_CODE (exp
));
2746 for (; format
[n
] != '\0'; n
++)
2753 result
= (*f
) (x
, data
);
2755 /* Do not traverse sub-expressions. */
2757 else if (result
!= 0)
2758 /* Stop the traversal. */
2762 /* There are no sub-expressions. */
2765 i
= non_rtx_starting_operands
[GET_CODE (*x
)];
2768 result
= for_each_rtx_1 (*x
, i
, f
, data
);
2776 if (XVEC (exp
, n
) == 0)
2778 for (j
= 0; j
< XVECLEN (exp
, n
); ++j
)
2781 x
= &XVECEXP (exp
, n
, j
);
2782 result
= (*f
) (x
, data
);
2784 /* Do not traverse sub-expressions. */
2786 else if (result
!= 0)
2787 /* Stop the traversal. */
2791 /* There are no sub-expressions. */
2794 i
= non_rtx_starting_operands
[GET_CODE (*x
)];
2797 result
= for_each_rtx_1 (*x
, i
, f
, data
);
2805 /* Nothing to do. */
2813 /* Traverse X via depth-first search, calling F for each
2814 sub-expression (including X itself). F is also passed the DATA.
2815 If F returns -1, do not traverse sub-expressions, but continue
2816 traversing the rest of the tree. If F ever returns any other
2817 nonzero value, stop the traversal, and return the value returned
2818 by F. Otherwise, return 0. This function does not traverse inside
2819 tree structure that contains RTX_EXPRs, or into sub-expressions
2820 whose format code is `0' since it is not known whether or not those
2821 codes are actually RTL.
2823 This routine is very general, and could (should?) be used to
2824 implement many of the other routines in this file. */
2827 for_each_rtx (rtx
*x
, rtx_function f
, void *data
)
2833 result
= (*f
) (x
, data
);
2835 /* Do not traverse sub-expressions. */
2837 else if (result
!= 0)
2838 /* Stop the traversal. */
2842 /* There are no sub-expressions. */
2845 i
= non_rtx_starting_operands
[GET_CODE (*x
)];
2849 return for_each_rtx_1 (*x
, i
, f
, data
);
2853 /* Searches X for any reference to REGNO, returning the rtx of the
2854 reference found if any. Otherwise, returns NULL_RTX. */
2857 regno_use_in (unsigned int regno
, rtx x
)
2863 if (REG_P (x
) && REGNO (x
) == regno
)
2866 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2867 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2871 if ((tem
= regno_use_in (regno
, XEXP (x
, i
))))
2874 else if (fmt
[i
] == 'E')
2875 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2876 if ((tem
= regno_use_in (regno
, XVECEXP (x
, i
, j
))))
2883 /* Return a value indicating whether OP, an operand of a commutative
2884 operation, is preferred as the first or second operand. The higher
2885 the value, the stronger the preference for being the first operand.
2886 We use negative values to indicate a preference for the first operand
2887 and positive values for the second operand. */
2890 commutative_operand_precedence (rtx op
)
2892 enum rtx_code code
= GET_CODE (op
);
2894 /* Constants always come the second operand. Prefer "nice" constants. */
2895 if (code
== CONST_INT
)
2897 if (code
== CONST_DOUBLE
)
2899 op
= avoid_constant_pool_reference (op
);
2900 code
= GET_CODE (op
);
2902 switch (GET_RTX_CLASS (code
))
2905 if (code
== CONST_INT
)
2907 if (code
== CONST_DOUBLE
)
2912 /* SUBREGs of objects should come second. */
2913 if (code
== SUBREG
&& OBJECT_P (SUBREG_REG (op
)))
2916 if (!CONSTANT_P (op
))
2919 /* As for RTX_CONST_OBJ. */
2923 /* Complex expressions should be the first, so decrease priority
2927 case RTX_COMM_ARITH
:
2928 /* Prefer operands that are themselves commutative to be first.
2929 This helps to make things linear. In particular,
2930 (and (and (reg) (reg)) (not (reg))) is canonical. */
2934 /* If only one operand is a binary expression, it will be the first
2935 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2936 is canonical, although it will usually be further simplified. */
2940 /* Then prefer NEG and NOT. */
2941 if (code
== NEG
|| code
== NOT
)
2949 /* Return 1 iff it is necessary to swap operands of commutative operation
2950 in order to canonicalize expression. */
2953 swap_commutative_operands_p (rtx x
, rtx y
)
2955 return (commutative_operand_precedence (x
)
2956 < commutative_operand_precedence (y
));
2959 /* Return 1 if X is an autoincrement side effect and the register is
2960 not the stack pointer. */
2964 switch (GET_CODE (x
))
2972 /* There are no REG_INC notes for SP. */
2973 if (XEXP (x
, 0) != stack_pointer_rtx
)
2981 /* Return 1 if the sequence of instructions beginning with FROM and up
2982 to and including TO is safe to move. If NEW_TO is non-NULL, and
2983 the sequence is not already safe to move, but can be easily
2984 extended to a sequence which is safe, then NEW_TO will point to the
2985 end of the extended sequence.
2987 For now, this function only checks that the region contains whole
2988 exception regions, but it could be extended to check additional
2989 conditions as well. */
2992 insns_safe_to_move_p (rtx from
, rtx to
, rtx
*new_to
)
2994 int eh_region_count
= 0;
2998 /* By default, assume the end of the region will be what was
3007 switch (NOTE_LINE_NUMBER (r
))
3009 case NOTE_INSN_EH_REGION_BEG
:
3013 case NOTE_INSN_EH_REGION_END
:
3014 if (eh_region_count
== 0)
3015 /* This sequence of instructions contains the end of
3016 an exception region, but not he beginning. Moving
3017 it will cause chaos. */
3028 /* If we've passed TO, and we see a non-note instruction, we
3029 can't extend the sequence to a movable sequence. */
3035 /* It's OK to move the sequence if there were matched sets of
3036 exception region notes. */
3037 return eh_region_count
== 0;
3042 /* It's OK to move the sequence if there were matched sets of
3043 exception region notes. */
3044 if (past_to_p
&& eh_region_count
== 0)
3050 /* Go to the next instruction. */
3057 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3059 loc_mentioned_in_p (rtx
*loc
, rtx in
)
3061 enum rtx_code code
= GET_CODE (in
);
3062 const char *fmt
= GET_RTX_FORMAT (code
);
3065 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3067 if (loc
== &in
->u
.fld
[i
].rt_rtx
)
3071 if (loc_mentioned_in_p (loc
, XEXP (in
, i
)))
3074 else if (fmt
[i
] == 'E')
3075 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
3076 if (loc_mentioned_in_p (loc
, XVECEXP (in
, i
, j
)))
3082 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3083 and SUBREG_BYTE, return the bit offset where the subreg begins
3084 (counting from the least significant bit of the operand). */
3087 subreg_lsb_1 (enum machine_mode outer_mode
,
3088 enum machine_mode inner_mode
,
3089 unsigned int subreg_byte
)
3091 unsigned int bitpos
;
3095 /* A paradoxical subreg begins at bit position 0. */
3096 if (GET_MODE_BITSIZE (outer_mode
) > GET_MODE_BITSIZE (inner_mode
))
3099 if (WORDS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
3100 /* If the subreg crosses a word boundary ensure that
3101 it also begins and ends on a word boundary. */
3102 gcc_assert (!((subreg_byte
% UNITS_PER_WORD
3103 + GET_MODE_SIZE (outer_mode
)) > UNITS_PER_WORD
3104 && (subreg_byte
% UNITS_PER_WORD
3105 || GET_MODE_SIZE (outer_mode
) % UNITS_PER_WORD
)));
3107 if (WORDS_BIG_ENDIAN
)
3108 word
= (GET_MODE_SIZE (inner_mode
)
3109 - (subreg_byte
+ GET_MODE_SIZE (outer_mode
))) / UNITS_PER_WORD
;
3111 word
= subreg_byte
/ UNITS_PER_WORD
;
3112 bitpos
= word
* BITS_PER_WORD
;
3114 if (BYTES_BIG_ENDIAN
)
3115 byte
= (GET_MODE_SIZE (inner_mode
)
3116 - (subreg_byte
+ GET_MODE_SIZE (outer_mode
))) % UNITS_PER_WORD
;
3118 byte
= subreg_byte
% UNITS_PER_WORD
;
3119 bitpos
+= byte
* BITS_PER_UNIT
;
3124 /* Given a subreg X, return the bit offset where the subreg begins
3125 (counting from the least significant bit of the reg). */
3130 return subreg_lsb_1 (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)),
3134 /* This function returns the regno offset of a subreg expression.
3135 xregno - A regno of an inner hard subreg_reg (or what will become one).
3136 xmode - The mode of xregno.
3137 offset - The byte offset.
3138 ymode - The mode of a top level SUBREG (or what may become one).
3139 RETURN - The regno offset which would be used. */
3141 subreg_regno_offset (unsigned int xregno
, enum machine_mode xmode
,
3142 unsigned int offset
, enum machine_mode ymode
)
3144 int nregs_xmode
, nregs_ymode
, nregs_xmode_unit_int
;
3145 int mode_multiple
, nregs_multiple
;
3147 enum machine_mode xmode_unit
, xmode_unit_int
;
3149 gcc_assert (xregno
< FIRST_PSEUDO_REGISTER
);
3151 if (GET_MODE_INNER (xmode
) == VOIDmode
)
3154 xmode_unit
= GET_MODE_INNER (xmode
);
3156 if (FLOAT_MODE_P (xmode_unit
))
3158 xmode_unit_int
= int_mode_for_mode (xmode_unit
);
3159 if (xmode_unit_int
== BLKmode
)
3160 /* It's probably bad to be here; a port should have an integer mode
3161 that's the same size as anything of which it takes a SUBREG. */
3162 xmode_unit_int
= xmode_unit
;
3165 xmode_unit_int
= xmode_unit
;
3167 nregs_xmode_unit_int
= hard_regno_nregs
[xregno
][xmode_unit_int
];
3169 /* Adjust nregs_xmode to allow for 'holes'. */
3170 if (nregs_xmode_unit_int
!= hard_regno_nregs
[xregno
][xmode_unit
])
3171 nregs_xmode
= nregs_xmode_unit_int
* GET_MODE_NUNITS (xmode
);
3173 nregs_xmode
= hard_regno_nregs
[xregno
][xmode
];
3175 nregs_ymode
= hard_regno_nregs
[xregno
][ymode
];
3177 /* If this is a big endian paradoxical subreg, which uses more actual
3178 hard registers than the original register, we must return a negative
3179 offset so that we find the proper highpart of the register. */
3181 && nregs_ymode
> nregs_xmode
3182 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3183 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3184 return nregs_xmode
- nregs_ymode
;
3186 if (offset
== 0 || nregs_xmode
== nregs_ymode
)
3189 /* Size of ymode must not be greater than the size of xmode. */
3190 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3191 gcc_assert (mode_multiple
!= 0);
3193 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3194 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3195 return (y_offset
/ (mode_multiple
/ nregs_multiple
)) * nregs_ymode
;
3198 /* This function returns true when the offset is representable via
3199 subreg_offset in the given regno.
3200 xregno - A regno of an inner hard subreg_reg (or what will become one).
3201 xmode - The mode of xregno.
3202 offset - The byte offset.
3203 ymode - The mode of a top level SUBREG (or what may become one).
3204 RETURN - Whether the offset is representable. */
3206 subreg_offset_representable_p (unsigned int xregno
, enum machine_mode xmode
,
3207 unsigned int offset
, enum machine_mode ymode
)
3209 int nregs_xmode
, nregs_ymode
, nregs_xmode_unit
, nregs_xmode_unit_int
;
3210 int mode_multiple
, nregs_multiple
;
3212 enum machine_mode xmode_unit
, xmode_unit_int
;
3214 gcc_assert (xregno
< FIRST_PSEUDO_REGISTER
);
3216 if (GET_MODE_INNER (xmode
) == VOIDmode
)
3219 xmode_unit
= GET_MODE_INNER (xmode
);
3221 if (FLOAT_MODE_P (xmode_unit
))
3223 xmode_unit_int
= int_mode_for_mode (xmode_unit
);
3224 if (xmode_unit_int
== BLKmode
)
3225 /* It's probably bad to be here; a port should have an integer mode
3226 that's the same size as anything of which it takes a SUBREG. */
3227 xmode_unit_int
= xmode_unit
;
3230 xmode_unit_int
= xmode_unit
;
3232 nregs_xmode_unit
= hard_regno_nregs
[xregno
][xmode_unit
];
3233 nregs_xmode_unit_int
= hard_regno_nregs
[xregno
][xmode_unit_int
];
3235 /* If there are holes in a non-scalar mode in registers, we expect
3236 that it is made up of its units concatenated together. */
3237 if (nregs_xmode_unit
!= nregs_xmode_unit_int
)
3239 gcc_assert (nregs_xmode_unit
* GET_MODE_NUNITS (xmode
)
3240 == hard_regno_nregs
[xregno
][xmode
]);
3242 /* You can only ask for a SUBREG of a value with holes in the middle
3243 if you don't cross the holes. (Such a SUBREG should be done by
3244 picking a different register class, or doing it in memory if
3245 necessary.) An example of a value with holes is XCmode on 32-bit
3246 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3247 3 for each part, but in memory it's two 128-bit parts.
3248 Padding is assumed to be at the end (not necessarily the 'high part')
3250 if (nregs_xmode_unit
!= nregs_xmode_unit_int
3251 && (offset
/ GET_MODE_SIZE (xmode_unit_int
) + 1
3252 < GET_MODE_NUNITS (xmode
))
3253 && (offset
/ GET_MODE_SIZE (xmode_unit_int
)
3254 != ((offset
+ GET_MODE_SIZE (ymode
) - 1)
3255 / GET_MODE_SIZE (xmode_unit_int
))))
3258 nregs_xmode
= nregs_xmode_unit_int
* GET_MODE_NUNITS (xmode
);
3261 nregs_xmode
= hard_regno_nregs
[xregno
][xmode
];
3263 nregs_ymode
= hard_regno_nregs
[xregno
][ymode
];
3265 /* Paradoxical subregs are otherwise valid. */
3267 && nregs_ymode
> nregs_xmode
3268 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3269 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3272 /* Lowpart subregs are otherwise valid. */
3273 if (offset
== subreg_lowpart_offset (ymode
, xmode
))
3276 /* This should always pass, otherwise we don't know how to verify
3277 the constraint. These conditions may be relaxed but
3278 subreg_regno_offset would need to be redesigned. */
3279 gcc_assert ((GET_MODE_SIZE (xmode
) % GET_MODE_SIZE (ymode
)) == 0);
3280 gcc_assert ((nregs_xmode
% nregs_ymode
) == 0);
3282 /* The XMODE value can be seen as a vector of NREGS_XMODE
3283 values. The subreg must represent a lowpart of given field.
3284 Compute what field it is. */
3285 offset
-= subreg_lowpart_offset (ymode
,
3286 mode_for_size (GET_MODE_BITSIZE (xmode
)
3290 /* Size of ymode must not be greater than the size of xmode. */
3291 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3292 gcc_assert (mode_multiple
!= 0);
3294 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3295 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3297 gcc_assert ((offset
% GET_MODE_SIZE (ymode
)) == 0);
3298 gcc_assert ((mode_multiple
% nregs_multiple
) == 0);
3300 return (!(y_offset
% (mode_multiple
/ nregs_multiple
)));
3303 /* Return the final regno that a subreg expression refers to. */
3305 subreg_regno (rtx x
)
3308 rtx subreg
= SUBREG_REG (x
);
3309 int regno
= REGNO (subreg
);
3311 ret
= regno
+ subreg_regno_offset (regno
,
3318 struct parms_set_data
3324 /* Helper function for noticing stores to parameter registers. */
3326 parms_set (rtx x
, rtx pat ATTRIBUTE_UNUSED
, void *data
)
3328 struct parms_set_data
*d
= data
;
3329 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3330 && TEST_HARD_REG_BIT (d
->regs
, REGNO (x
)))
3332 CLEAR_HARD_REG_BIT (d
->regs
, REGNO (x
));
3337 /* Look backward for first parameter to be loaded.
3338 Note that loads of all parameters will not necessarily be
3339 found if CSE has eliminated some of them (e.g., an argument
3340 to the outer function is passed down as a parameter).
3341 Do not skip BOUNDARY. */
3343 find_first_parameter_load (rtx call_insn
, rtx boundary
)
3345 struct parms_set_data parm
;
3346 rtx p
, before
, first_set
;
3348 /* Since different machines initialize their parameter registers
3349 in different orders, assume nothing. Collect the set of all
3350 parameter registers. */
3351 CLEAR_HARD_REG_SET (parm
.regs
);
3353 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
3354 if (GET_CODE (XEXP (p
, 0)) == USE
3355 && REG_P (XEXP (XEXP (p
, 0), 0)))
3357 gcc_assert (REGNO (XEXP (XEXP (p
, 0), 0)) < FIRST_PSEUDO_REGISTER
);
3359 /* We only care about registers which can hold function
3361 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p
, 0), 0))))
3364 SET_HARD_REG_BIT (parm
.regs
, REGNO (XEXP (XEXP (p
, 0), 0)));
3368 first_set
= call_insn
;
3370 /* Search backward for the first set of a register in this set. */
3371 while (parm
.nregs
&& before
!= boundary
)
3373 before
= PREV_INSN (before
);
3375 /* It is possible that some loads got CSEed from one call to
3376 another. Stop in that case. */
3377 if (CALL_P (before
))
3380 /* Our caller needs either ensure that we will find all sets
3381 (in case code has not been optimized yet), or take care
3382 for possible labels in a way by setting boundary to preceding
3384 if (LABEL_P (before
))
3386 gcc_assert (before
== boundary
);
3390 if (INSN_P (before
))
3392 int nregs_old
= parm
.nregs
;
3393 note_stores (PATTERN (before
), parms_set
, &parm
);
3394 /* If we found something that did not set a parameter reg,
3395 we're done. Do not keep going, as that might result
3396 in hoisting an insn before the setting of a pseudo
3397 that is used by the hoisted insn. */
3398 if (nregs_old
!= parm
.nregs
)
3407 /* Return true if we should avoid inserting code between INSN and preceding
3408 call instruction. */
3411 keep_with_call_p (rtx insn
)
3415 if (INSN_P (insn
) && (set
= single_set (insn
)) != NULL
)
3417 if (REG_P (SET_DEST (set
))
3418 && REGNO (SET_DEST (set
)) < FIRST_PSEUDO_REGISTER
3419 && fixed_regs
[REGNO (SET_DEST (set
))]
3420 && general_operand (SET_SRC (set
), VOIDmode
))
3422 if (REG_P (SET_SRC (set
))
3423 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set
)))
3424 && REG_P (SET_DEST (set
))
3425 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
)
3427 /* There may be a stack pop just after the call and before the store
3428 of the return register. Search for the actual store when deciding
3429 if we can break or not. */
3430 if (SET_DEST (set
) == stack_pointer_rtx
)
3432 rtx i2
= next_nonnote_insn (insn
);
3433 if (i2
&& keep_with_call_p (i2
))
3440 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3441 to non-complex jumps. That is, direct unconditional, conditional,
3442 and tablejumps, but not computed jumps or returns. It also does
3443 not apply to the fallthru case of a conditional jump. */
3446 label_is_jump_target_p (rtx label
, rtx jump_insn
)
3448 rtx tmp
= JUMP_LABEL (jump_insn
);
3453 if (tablejump_p (jump_insn
, NULL
, &tmp
))
3455 rtvec vec
= XVEC (PATTERN (tmp
),
3456 GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
);
3457 int i
, veclen
= GET_NUM_ELEM (vec
);
3459 for (i
= 0; i
< veclen
; ++i
)
3460 if (XEXP (RTVEC_ELT (vec
, i
), 0) == label
)
3468 /* Return an estimate of the cost of computing rtx X.
3469 One use is in cse, to decide which expression to keep in the hash table.
3470 Another is in rtl generation, to pick the cheapest way to multiply.
3471 Other uses like the latter are expected in the future. */
3474 rtx_cost (rtx x
, enum rtx_code outer_code ATTRIBUTE_UNUSED
)
3484 /* Compute the default costs of certain things.
3485 Note that targetm.rtx_costs can override the defaults. */
3487 code
= GET_CODE (x
);
3491 total
= COSTS_N_INSNS (5);
3497 total
= COSTS_N_INSNS (7);
3500 /* Used in loop.c and combine.c as a marker. */
3504 total
= COSTS_N_INSNS (1);
3514 /* If we can't tie these modes, make this expensive. The larger
3515 the mode, the more expensive it is. */
3516 if (! MODES_TIEABLE_P (GET_MODE (x
), GET_MODE (SUBREG_REG (x
))))
3517 return COSTS_N_INSNS (2
3518 + GET_MODE_SIZE (GET_MODE (x
)) / UNITS_PER_WORD
);
3522 if (targetm
.rtx_costs (x
, code
, outer_code
, &total
))
3527 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3528 which is already in total. */
3530 fmt
= GET_RTX_FORMAT (code
);
3531 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3533 total
+= rtx_cost (XEXP (x
, i
), code
);
3534 else if (fmt
[i
] == 'E')
3535 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3536 total
+= rtx_cost (XVECEXP (x
, i
, j
), code
);
3541 /* Return cost of address expression X.
3542 Expect that X is properly formed address reference. */
3545 address_cost (rtx x
, enum machine_mode mode
)
3547 /* We may be asked for cost of various unusual addresses, such as operands
3548 of push instruction. It is not worthwhile to complicate writing
3549 of the target hook by such cases. */
3551 if (!memory_address_p (mode
, x
))
3554 return targetm
.address_cost (x
);
3557 /* If the target doesn't override, compute the cost as with arithmetic. */
3560 default_address_cost (rtx x
)
3562 return rtx_cost (x
, MEM
);
3566 unsigned HOST_WIDE_INT
3567 nonzero_bits (rtx x
, enum machine_mode mode
)
3569 return cached_nonzero_bits (x
, mode
, NULL_RTX
, VOIDmode
, 0);
3573 num_sign_bit_copies (rtx x
, enum machine_mode mode
)
3575 return cached_num_sign_bit_copies (x
, mode
, NULL_RTX
, VOIDmode
, 0);
3578 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3579 It avoids exponential behavior in nonzero_bits1 when X has
3580 identical subexpressions on the first or the second level. */
3582 static unsigned HOST_WIDE_INT
3583 cached_nonzero_bits (rtx x
, enum machine_mode mode
, rtx known_x
,
3584 enum machine_mode known_mode
,
3585 unsigned HOST_WIDE_INT known_ret
)
3587 if (x
== known_x
&& mode
== known_mode
)
3590 /* Try to find identical subexpressions. If found call
3591 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3592 precomputed value for the subexpression as KNOWN_RET. */
3594 if (ARITHMETIC_P (x
))
3596 rtx x0
= XEXP (x
, 0);
3597 rtx x1
= XEXP (x
, 1);
3599 /* Check the first level. */
3601 return nonzero_bits1 (x
, mode
, x0
, mode
,
3602 cached_nonzero_bits (x0
, mode
, known_x
,
3603 known_mode
, known_ret
));
3605 /* Check the second level. */
3606 if (ARITHMETIC_P (x0
)
3607 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
3608 return nonzero_bits1 (x
, mode
, x1
, mode
,
3609 cached_nonzero_bits (x1
, mode
, known_x
,
3610 known_mode
, known_ret
));
3612 if (ARITHMETIC_P (x1
)
3613 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
3614 return nonzero_bits1 (x
, mode
, x0
, mode
,
3615 cached_nonzero_bits (x0
, mode
, known_x
,
3616 known_mode
, known_ret
));
3619 return nonzero_bits1 (x
, mode
, known_x
, known_mode
, known_ret
);
3622 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3623 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3624 is less useful. We can't allow both, because that results in exponential
3625 run time recursion. There is a nullstone testcase that triggered
3626 this. This macro avoids accidental uses of num_sign_bit_copies. */
3627 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3629 /* Given an expression, X, compute which bits in X can be nonzero.
3630 We don't care about bits outside of those defined in MODE.
3632 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3633 an arithmetic operation, we can do better. */
3635 static unsigned HOST_WIDE_INT
3636 nonzero_bits1 (rtx x
, enum machine_mode mode
, rtx known_x
,
3637 enum machine_mode known_mode
,
3638 unsigned HOST_WIDE_INT known_ret
)
3640 unsigned HOST_WIDE_INT nonzero
= GET_MODE_MASK (mode
);
3641 unsigned HOST_WIDE_INT inner_nz
;
3643 unsigned int mode_width
= GET_MODE_BITSIZE (mode
);
3645 /* For floating-point values, assume all bits are needed. */
3646 if (FLOAT_MODE_P (GET_MODE (x
)) || FLOAT_MODE_P (mode
))
3649 /* If X is wider than MODE, use its mode instead. */
3650 if (GET_MODE_BITSIZE (GET_MODE (x
)) > mode_width
)
3652 mode
= GET_MODE (x
);
3653 nonzero
= GET_MODE_MASK (mode
);
3654 mode_width
= GET_MODE_BITSIZE (mode
);
3657 if (mode_width
> HOST_BITS_PER_WIDE_INT
)
3658 /* Our only callers in this case look for single bit values. So
3659 just return the mode mask. Those tests will then be false. */
3662 #ifndef WORD_REGISTER_OPERATIONS
3663 /* If MODE is wider than X, but both are a single word for both the host
3664 and target machines, we can compute this from which bits of the
3665 object might be nonzero in its own mode, taking into account the fact
3666 that on many CISC machines, accessing an object in a wider mode
3667 causes the high-order bits to become undefined. So they are
3668 not known to be zero. */
3670 if (GET_MODE (x
) != VOIDmode
&& GET_MODE (x
) != mode
3671 && GET_MODE_BITSIZE (GET_MODE (x
)) <= BITS_PER_WORD
3672 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
3673 && GET_MODE_BITSIZE (mode
) > GET_MODE_BITSIZE (GET_MODE (x
)))
3675 nonzero
&= cached_nonzero_bits (x
, GET_MODE (x
),
3676 known_x
, known_mode
, known_ret
);
3677 nonzero
|= GET_MODE_MASK (mode
) & ~GET_MODE_MASK (GET_MODE (x
));
3682 code
= GET_CODE (x
);
3686 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3687 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3688 all the bits above ptr_mode are known to be zero. */
3689 if (POINTERS_EXTEND_UNSIGNED
&& GET_MODE (x
) == Pmode
3691 nonzero
&= GET_MODE_MASK (ptr_mode
);
3694 /* Include declared information about alignment of pointers. */
3695 /* ??? We don't properly preserve REG_POINTER changes across
3696 pointer-to-integer casts, so we can't trust it except for
3697 things that we know must be pointers. See execute/960116-1.c. */
3698 if ((x
== stack_pointer_rtx
3699 || x
== frame_pointer_rtx
3700 || x
== arg_pointer_rtx
)
3701 && REGNO_POINTER_ALIGN (REGNO (x
)))
3703 unsigned HOST_WIDE_INT alignment
3704 = REGNO_POINTER_ALIGN (REGNO (x
)) / BITS_PER_UNIT
;
3706 #ifdef PUSH_ROUNDING
3707 /* If PUSH_ROUNDING is defined, it is possible for the
3708 stack to be momentarily aligned only to that amount,
3709 so we pick the least alignment. */
3710 if (x
== stack_pointer_rtx
&& PUSH_ARGS
)
3711 alignment
= MIN ((unsigned HOST_WIDE_INT
) PUSH_ROUNDING (1),
3715 nonzero
&= ~(alignment
- 1);
3719 unsigned HOST_WIDE_INT nonzero_for_hook
= nonzero
;
3720 rtx
new = rtl_hooks
.reg_nonzero_bits (x
, mode
, known_x
,
3721 known_mode
, known_ret
,
3725 nonzero_for_hook
&= cached_nonzero_bits (new, mode
, known_x
,
3726 known_mode
, known_ret
);
3728 return nonzero_for_hook
;
3732 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3733 /* If X is negative in MODE, sign-extend the value. */
3734 if (INTVAL (x
) > 0 && mode_width
< BITS_PER_WORD
3735 && 0 != (INTVAL (x
) & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))))
3736 return (INTVAL (x
) | ((HOST_WIDE_INT
) (-1) << mode_width
));
3742 #ifdef LOAD_EXTEND_OP
3743 /* In many, if not most, RISC machines, reading a byte from memory
3744 zeros the rest of the register. Noticing that fact saves a lot
3745 of extra zero-extends. */
3746 if (LOAD_EXTEND_OP (GET_MODE (x
)) == ZERO_EXTEND
)
3747 nonzero
&= GET_MODE_MASK (GET_MODE (x
));
3752 case UNEQ
: case LTGT
:
3753 case GT
: case GTU
: case UNGT
:
3754 case LT
: case LTU
: case UNLT
:
3755 case GE
: case GEU
: case UNGE
:
3756 case LE
: case LEU
: case UNLE
:
3757 case UNORDERED
: case ORDERED
:
3758 /* If this produces an integer result, we know which bits are set.
3759 Code here used to clear bits outside the mode of X, but that is
3761 /* Mind that MODE is the mode the caller wants to look at this
3762 operation in, and not the actual operation mode. We can wind
3763 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3764 that describes the results of a vector compare. */
3765 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
3766 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
3767 nonzero
= STORE_FLAG_VALUE
;
3772 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3773 and num_sign_bit_copies. */
3774 if (num_sign_bit_copies (XEXP (x
, 0), GET_MODE (x
))
3775 == GET_MODE_BITSIZE (GET_MODE (x
)))
3779 if (GET_MODE_SIZE (GET_MODE (x
)) < mode_width
)
3780 nonzero
|= (GET_MODE_MASK (mode
) & ~GET_MODE_MASK (GET_MODE (x
)));
3785 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3786 and num_sign_bit_copies. */
3787 if (num_sign_bit_copies (XEXP (x
, 0), GET_MODE (x
))
3788 == GET_MODE_BITSIZE (GET_MODE (x
)))
3794 nonzero
&= (cached_nonzero_bits (XEXP (x
, 0), mode
,
3795 known_x
, known_mode
, known_ret
)
3796 & GET_MODE_MASK (mode
));
3800 nonzero
&= cached_nonzero_bits (XEXP (x
, 0), mode
,
3801 known_x
, known_mode
, known_ret
);
3802 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
3803 nonzero
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
3807 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3808 Otherwise, show all the bits in the outer mode but not the inner
3810 inner_nz
= cached_nonzero_bits (XEXP (x
, 0), mode
,
3811 known_x
, known_mode
, known_ret
);
3812 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
3814 inner_nz
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
3816 & (((HOST_WIDE_INT
) 1
3817 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - 1))))
3818 inner_nz
|= (GET_MODE_MASK (mode
)
3819 & ~GET_MODE_MASK (GET_MODE (XEXP (x
, 0))));
3822 nonzero
&= inner_nz
;
3826 nonzero
&= cached_nonzero_bits (XEXP (x
, 0), mode
,
3827 known_x
, known_mode
, known_ret
)
3828 & cached_nonzero_bits (XEXP (x
, 1), mode
,
3829 known_x
, known_mode
, known_ret
);
3833 case UMIN
: case UMAX
: case SMIN
: case SMAX
:
3835 unsigned HOST_WIDE_INT nonzero0
=
3836 cached_nonzero_bits (XEXP (x
, 0), mode
,
3837 known_x
, known_mode
, known_ret
);
3839 /* Don't call nonzero_bits for the second time if it cannot change
3841 if ((nonzero
& nonzero0
) != nonzero
)
3843 | cached_nonzero_bits (XEXP (x
, 1), mode
,
3844 known_x
, known_mode
, known_ret
);
3848 case PLUS
: case MINUS
:
3850 case DIV
: case UDIV
:
3851 case MOD
: case UMOD
:
3852 /* We can apply the rules of arithmetic to compute the number of
3853 high- and low-order zero bits of these operations. We start by
3854 computing the width (position of the highest-order nonzero bit)
3855 and the number of low-order zero bits for each value. */
3857 unsigned HOST_WIDE_INT nz0
=
3858 cached_nonzero_bits (XEXP (x
, 0), mode
,
3859 known_x
, known_mode
, known_ret
);
3860 unsigned HOST_WIDE_INT nz1
=
3861 cached_nonzero_bits (XEXP (x
, 1), mode
,
3862 known_x
, known_mode
, known_ret
);
3863 int sign_index
= GET_MODE_BITSIZE (GET_MODE (x
)) - 1;
3864 int width0
= floor_log2 (nz0
) + 1;
3865 int width1
= floor_log2 (nz1
) + 1;
3866 int low0
= floor_log2 (nz0
& -nz0
);
3867 int low1
= floor_log2 (nz1
& -nz1
);
3868 HOST_WIDE_INT op0_maybe_minusp
3869 = (nz0
& ((HOST_WIDE_INT
) 1 << sign_index
));
3870 HOST_WIDE_INT op1_maybe_minusp
3871 = (nz1
& ((HOST_WIDE_INT
) 1 << sign_index
));
3872 unsigned int result_width
= mode_width
;
3878 result_width
= MAX (width0
, width1
) + 1;
3879 result_low
= MIN (low0
, low1
);
3882 result_low
= MIN (low0
, low1
);
3885 result_width
= width0
+ width1
;
3886 result_low
= low0
+ low1
;
3891 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
3892 result_width
= width0
;
3897 result_width
= width0
;
3902 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
3903 result_width
= MIN (width0
, width1
);
3904 result_low
= MIN (low0
, low1
);
3909 result_width
= MIN (width0
, width1
);
3910 result_low
= MIN (low0
, low1
);
3916 if (result_width
< mode_width
)
3917 nonzero
&= ((HOST_WIDE_INT
) 1 << result_width
) - 1;
3920 nonzero
&= ~(((HOST_WIDE_INT
) 1 << result_low
) - 1);
3922 #ifdef POINTERS_EXTEND_UNSIGNED
3923 /* If pointers extend unsigned and this is an addition or subtraction
3924 to a pointer in Pmode, all the bits above ptr_mode are known to be
3926 if (POINTERS_EXTEND_UNSIGNED
> 0 && GET_MODE (x
) == Pmode
3927 && (code
== PLUS
|| code
== MINUS
)
3928 && REG_P (XEXP (x
, 0)) && REG_POINTER (XEXP (x
, 0)))
3929 nonzero
&= GET_MODE_MASK (ptr_mode
);
3935 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
3936 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
3937 nonzero
&= ((HOST_WIDE_INT
) 1 << INTVAL (XEXP (x
, 1))) - 1;
3941 /* If this is a SUBREG formed for a promoted variable that has
3942 been zero-extended, we know that at least the high-order bits
3943 are zero, though others might be too. */
3945 if (SUBREG_PROMOTED_VAR_P (x
) && SUBREG_PROMOTED_UNSIGNED_P (x
) > 0)
3946 nonzero
= GET_MODE_MASK (GET_MODE (x
))
3947 & cached_nonzero_bits (SUBREG_REG (x
), GET_MODE (x
),
3948 known_x
, known_mode
, known_ret
);
3950 /* If the inner mode is a single word for both the host and target
3951 machines, we can compute this from which bits of the inner
3952 object might be nonzero. */
3953 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) <= BITS_PER_WORD
3954 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))
3955 <= HOST_BITS_PER_WIDE_INT
))
3957 nonzero
&= cached_nonzero_bits (SUBREG_REG (x
), mode
,
3958 known_x
, known_mode
, known_ret
);
3960 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3961 /* If this is a typical RISC machine, we only have to worry
3962 about the way loads are extended. */
3963 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x
))) == SIGN_EXTEND
3965 & (((unsigned HOST_WIDE_INT
) 1
3966 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) - 1))))
3968 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x
))) != ZERO_EXTEND
)
3969 || !MEM_P (SUBREG_REG (x
)))
3972 /* On many CISC machines, accessing an object in a wider mode
3973 causes the high-order bits to become undefined. So they are
3974 not known to be zero. */
3975 if (GET_MODE_SIZE (GET_MODE (x
))
3976 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3977 nonzero
|= (GET_MODE_MASK (GET_MODE (x
))
3978 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x
))));
3987 /* The nonzero bits are in two classes: any bits within MODE
3988 that aren't in GET_MODE (x) are always significant. The rest of the
3989 nonzero bits are those that are significant in the operand of
3990 the shift when shifted the appropriate number of bits. This
3991 shows that high-order bits are cleared by the right shift and
3992 low-order bits by left shifts. */
3993 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
3994 && INTVAL (XEXP (x
, 1)) >= 0
3995 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
3997 enum machine_mode inner_mode
= GET_MODE (x
);
3998 unsigned int width
= GET_MODE_BITSIZE (inner_mode
);
3999 int count
= INTVAL (XEXP (x
, 1));
4000 unsigned HOST_WIDE_INT mode_mask
= GET_MODE_MASK (inner_mode
);
4001 unsigned HOST_WIDE_INT op_nonzero
=
4002 cached_nonzero_bits (XEXP (x
, 0), mode
,
4003 known_x
, known_mode
, known_ret
);
4004 unsigned HOST_WIDE_INT inner
= op_nonzero
& mode_mask
;
4005 unsigned HOST_WIDE_INT outer
= 0;
4007 if (mode_width
> width
)
4008 outer
= (op_nonzero
& nonzero
& ~mode_mask
);
4010 if (code
== LSHIFTRT
)
4012 else if (code
== ASHIFTRT
)
4016 /* If the sign bit may have been nonzero before the shift, we
4017 need to mark all the places it could have been copied to
4018 by the shift as possibly nonzero. */
4019 if (inner
& ((HOST_WIDE_INT
) 1 << (width
- 1 - count
)))
4020 inner
|= (((HOST_WIDE_INT
) 1 << count
) - 1) << (width
- count
);
4022 else if (code
== ASHIFT
)
4025 inner
= ((inner
<< (count
% width
)
4026 | (inner
>> (width
- (count
% width
)))) & mode_mask
);
4028 nonzero
&= (outer
| inner
);
4034 /* This is at most the number of bits in the mode. */
4035 nonzero
= ((HOST_WIDE_INT
) 2 << (floor_log2 (mode_width
))) - 1;
4039 /* If CLZ has a known value at zero, then the nonzero bits are
4040 that value, plus the number of bits in the mode minus one. */
4041 if (CLZ_DEFINED_VALUE_AT_ZERO (mode
, nonzero
))
4042 nonzero
|= ((HOST_WIDE_INT
) 1 << (floor_log2 (mode_width
))) - 1;
4048 /* If CTZ has a known value at zero, then the nonzero bits are
4049 that value, plus the number of bits in the mode minus one. */
4050 if (CTZ_DEFINED_VALUE_AT_ZERO (mode
, nonzero
))
4051 nonzero
|= ((HOST_WIDE_INT
) 1 << (floor_log2 (mode_width
))) - 1;
4062 unsigned HOST_WIDE_INT nonzero_true
=
4063 cached_nonzero_bits (XEXP (x
, 1), mode
,
4064 known_x
, known_mode
, known_ret
);
4066 /* Don't call nonzero_bits for the second time if it cannot change
4068 if ((nonzero
& nonzero_true
) != nonzero
)
4069 nonzero
&= nonzero_true
4070 | cached_nonzero_bits (XEXP (x
, 2), mode
,
4071 known_x
, known_mode
, known_ret
);
4082 /* See the macro definition above. */
4083 #undef cached_num_sign_bit_copies
4086 /* The function cached_num_sign_bit_copies is a wrapper around
4087 num_sign_bit_copies1. It avoids exponential behavior in
4088 num_sign_bit_copies1 when X has identical subexpressions on the
4089 first or the second level. */
4092 cached_num_sign_bit_copies (rtx x
, enum machine_mode mode
, rtx known_x
,
4093 enum machine_mode known_mode
,
4094 unsigned int known_ret
)
4096 if (x
== known_x
&& mode
== known_mode
)
4099 /* Try to find identical subexpressions. If found call
4100 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4101 the precomputed value for the subexpression as KNOWN_RET. */
4103 if (ARITHMETIC_P (x
))
4105 rtx x0
= XEXP (x
, 0);
4106 rtx x1
= XEXP (x
, 1);
4108 /* Check the first level. */
4111 num_sign_bit_copies1 (x
, mode
, x0
, mode
,
4112 cached_num_sign_bit_copies (x0
, mode
, known_x
,
4116 /* Check the second level. */
4117 if (ARITHMETIC_P (x0
)
4118 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
4120 num_sign_bit_copies1 (x
, mode
, x1
, mode
,
4121 cached_num_sign_bit_copies (x1
, mode
, known_x
,
4125 if (ARITHMETIC_P (x1
)
4126 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
4128 num_sign_bit_copies1 (x
, mode
, x0
, mode
,
4129 cached_num_sign_bit_copies (x0
, mode
, known_x
,
4134 return num_sign_bit_copies1 (x
, mode
, known_x
, known_mode
, known_ret
);
4137 /* Return the number of bits at the high-order end of X that are known to
4138 be equal to the sign bit. X will be used in mode MODE; if MODE is
4139 VOIDmode, X will be used in its own mode. The returned value will always
4140 be between 1 and the number of bits in MODE. */
4143 num_sign_bit_copies1 (rtx x
, enum machine_mode mode
, rtx known_x
,
4144 enum machine_mode known_mode
,
4145 unsigned int known_ret
)
4147 enum rtx_code code
= GET_CODE (x
);
4148 unsigned int bitwidth
= GET_MODE_BITSIZE (mode
);
4149 int num0
, num1
, result
;
4150 unsigned HOST_WIDE_INT nonzero
;
4152 /* If we weren't given a mode, use the mode of X. If the mode is still
4153 VOIDmode, we don't know anything. Likewise if one of the modes is
4156 if (mode
== VOIDmode
)
4157 mode
= GET_MODE (x
);
4159 if (mode
== VOIDmode
|| FLOAT_MODE_P (mode
) || FLOAT_MODE_P (GET_MODE (x
)))
4162 /* For a smaller object, just ignore the high bits. */
4163 if (bitwidth
< GET_MODE_BITSIZE (GET_MODE (x
)))
4165 num0
= cached_num_sign_bit_copies (x
, GET_MODE (x
),
4166 known_x
, known_mode
, known_ret
);
4168 num0
- (int) (GET_MODE_BITSIZE (GET_MODE (x
)) - bitwidth
));
4171 if (GET_MODE (x
) != VOIDmode
&& bitwidth
> GET_MODE_BITSIZE (GET_MODE (x
)))
4173 #ifndef WORD_REGISTER_OPERATIONS
4174 /* If this machine does not do all register operations on the entire
4175 register and MODE is wider than the mode of X, we can say nothing
4176 at all about the high-order bits. */
4179 /* Likewise on machines that do, if the mode of the object is smaller
4180 than a word and loads of that size don't sign extend, we can say
4181 nothing about the high order bits. */
4182 if (GET_MODE_BITSIZE (GET_MODE (x
)) < BITS_PER_WORD
4183 #ifdef LOAD_EXTEND_OP
4184 && LOAD_EXTEND_OP (GET_MODE (x
)) != SIGN_EXTEND
4195 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4196 /* If pointers extend signed and this is a pointer in Pmode, say that
4197 all the bits above ptr_mode are known to be sign bit copies. */
4198 if (! POINTERS_EXTEND_UNSIGNED
&& GET_MODE (x
) == Pmode
&& mode
== Pmode
4200 return GET_MODE_BITSIZE (Pmode
) - GET_MODE_BITSIZE (ptr_mode
) + 1;
4204 unsigned int copies_for_hook
= 1, copies
= 1;
4205 rtx
new = rtl_hooks
.reg_num_sign_bit_copies (x
, mode
, known_x
,
4206 known_mode
, known_ret
,
4210 copies
= cached_num_sign_bit_copies (new, mode
, known_x
,
4211 known_mode
, known_ret
);
4213 if (copies
> 1 || copies_for_hook
> 1)
4214 return MAX (copies
, copies_for_hook
);
4216 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4221 #ifdef LOAD_EXTEND_OP
4222 /* Some RISC machines sign-extend all loads of smaller than a word. */
4223 if (LOAD_EXTEND_OP (GET_MODE (x
)) == SIGN_EXTEND
)
4224 return MAX (1, ((int) bitwidth
4225 - (int) GET_MODE_BITSIZE (GET_MODE (x
)) + 1));
4230 /* If the constant is negative, take its 1's complement and remask.
4231 Then see how many zero bits we have. */
4232 nonzero
= INTVAL (x
) & GET_MODE_MASK (mode
);
4233 if (bitwidth
<= HOST_BITS_PER_WIDE_INT
4234 && (nonzero
& ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4235 nonzero
= (~nonzero
) & GET_MODE_MASK (mode
);
4237 return (nonzero
== 0 ? bitwidth
: bitwidth
- floor_log2 (nonzero
) - 1);
4240 /* If this is a SUBREG for a promoted object that is sign-extended
4241 and we are looking at it in a wider mode, we know that at least the
4242 high-order bits are known to be sign bit copies. */
4244 if (SUBREG_PROMOTED_VAR_P (x
) && ! SUBREG_PROMOTED_UNSIGNED_P (x
))
4246 num0
= cached_num_sign_bit_copies (SUBREG_REG (x
), mode
,
4247 known_x
, known_mode
, known_ret
);
4248 return MAX ((int) bitwidth
4249 - (int) GET_MODE_BITSIZE (GET_MODE (x
)) + 1,
4253 /* For a smaller object, just ignore the high bits. */
4254 if (bitwidth
<= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))))
4256 num0
= cached_num_sign_bit_copies (SUBREG_REG (x
), VOIDmode
,
4257 known_x
, known_mode
, known_ret
);
4258 return MAX (1, (num0
4259 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))
4263 #ifdef WORD_REGISTER_OPERATIONS
4264 #ifdef LOAD_EXTEND_OP
4265 /* For paradoxical SUBREGs on machines where all register operations
4266 affect the entire register, just look inside. Note that we are
4267 passing MODE to the recursive call, so the number of sign bit copies
4268 will remain relative to that mode, not the inner mode. */
4270 /* This works only if loads sign extend. Otherwise, if we get a
4271 reload for the inner part, it may be loaded from the stack, and
4272 then we lose all sign bit copies that existed before the store
4275 if ((GET_MODE_SIZE (GET_MODE (x
))
4276 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
4277 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x
))) == SIGN_EXTEND
4278 && MEM_P (SUBREG_REG (x
)))
4279 return cached_num_sign_bit_copies (SUBREG_REG (x
), mode
,
4280 known_x
, known_mode
, known_ret
);
4286 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
4287 return MAX (1, (int) bitwidth
- INTVAL (XEXP (x
, 1)));
4291 return (bitwidth
- GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
4292 + cached_num_sign_bit_copies (XEXP (x
, 0), VOIDmode
,
4293 known_x
, known_mode
, known_ret
));
4296 /* For a smaller object, just ignore the high bits. */
4297 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), VOIDmode
,
4298 known_x
, known_mode
, known_ret
);
4299 return MAX (1, (num0
- (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
4303 return cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4304 known_x
, known_mode
, known_ret
);
4306 case ROTATE
: case ROTATERT
:
4307 /* If we are rotating left by a number of bits less than the number
4308 of sign bit copies, we can just subtract that amount from the
4310 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4311 && INTVAL (XEXP (x
, 1)) >= 0
4312 && INTVAL (XEXP (x
, 1)) < (int) bitwidth
)
4314 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4315 known_x
, known_mode
, known_ret
);
4316 return MAX (1, num0
- (code
== ROTATE
? INTVAL (XEXP (x
, 1))
4317 : (int) bitwidth
- INTVAL (XEXP (x
, 1))));
4322 /* In general, this subtracts one sign bit copy. But if the value
4323 is known to be positive, the number of sign bit copies is the
4324 same as that of the input. Finally, if the input has just one bit
4325 that might be nonzero, all the bits are copies of the sign bit. */
4326 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4327 known_x
, known_mode
, known_ret
);
4328 if (bitwidth
> HOST_BITS_PER_WIDE_INT
)
4329 return num0
> 1 ? num0
- 1 : 1;
4331 nonzero
= nonzero_bits (XEXP (x
, 0), mode
);
4336 && (((HOST_WIDE_INT
) 1 << (bitwidth
- 1)) & nonzero
))
4341 case IOR
: case AND
: case XOR
:
4342 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
4343 /* Logical operations will preserve the number of sign-bit copies.
4344 MIN and MAX operations always return one of the operands. */
4345 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4346 known_x
, known_mode
, known_ret
);
4347 num1
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4348 known_x
, known_mode
, known_ret
);
4349 return MIN (num0
, num1
);
4351 case PLUS
: case MINUS
:
4352 /* For addition and subtraction, we can have a 1-bit carry. However,
4353 if we are subtracting 1 from a positive number, there will not
4354 be such a carry. Furthermore, if the positive number is known to
4355 be 0 or 1, we know the result is either -1 or 0. */
4357 if (code
== PLUS
&& XEXP (x
, 1) == constm1_rtx
4358 && bitwidth
<= HOST_BITS_PER_WIDE_INT
)
4360 nonzero
= nonzero_bits (XEXP (x
, 0), mode
);
4361 if ((((HOST_WIDE_INT
) 1 << (bitwidth
- 1)) & nonzero
) == 0)
4362 return (nonzero
== 1 || nonzero
== 0 ? bitwidth
4363 : bitwidth
- floor_log2 (nonzero
) - 1);
4366 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4367 known_x
, known_mode
, known_ret
);
4368 num1
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4369 known_x
, known_mode
, known_ret
);
4370 result
= MAX (1, MIN (num0
, num1
) - 1);
4372 #ifdef POINTERS_EXTEND_UNSIGNED
4373 /* If pointers extend signed and this is an addition or subtraction
4374 to a pointer in Pmode, all the bits above ptr_mode are known to be
4376 if (! POINTERS_EXTEND_UNSIGNED
&& GET_MODE (x
) == Pmode
4377 && (code
== PLUS
|| code
== MINUS
)
4378 && REG_P (XEXP (x
, 0)) && REG_POINTER (XEXP (x
, 0)))
4379 result
= MAX ((int) (GET_MODE_BITSIZE (Pmode
)
4380 - GET_MODE_BITSIZE (ptr_mode
) + 1),
4386 /* The number of bits of the product is the sum of the number of
4387 bits of both terms. However, unless one of the terms if known
4388 to be positive, we must allow for an additional bit since negating
4389 a negative number can remove one sign bit copy. */
4391 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4392 known_x
, known_mode
, known_ret
);
4393 num1
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4394 known_x
, known_mode
, known_ret
);
4396 result
= bitwidth
- (bitwidth
- num0
) - (bitwidth
- num1
);
4398 && (bitwidth
> HOST_BITS_PER_WIDE_INT
4399 || (((nonzero_bits (XEXP (x
, 0), mode
)
4400 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4401 && ((nonzero_bits (XEXP (x
, 1), mode
)
4402 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0))))
4405 return MAX (1, result
);
4408 /* The result must be <= the first operand. If the first operand
4409 has the high bit set, we know nothing about the number of sign
4411 if (bitwidth
> HOST_BITS_PER_WIDE_INT
)
4413 else if ((nonzero_bits (XEXP (x
, 0), mode
)
4414 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4417 return cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4418 known_x
, known_mode
, known_ret
);
4421 /* The result must be <= the second operand. */
4422 return cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4423 known_x
, known_mode
, known_ret
);
4426 /* Similar to unsigned division, except that we have to worry about
4427 the case where the divisor is negative, in which case we have
4429 result
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4430 known_x
, known_mode
, known_ret
);
4432 && (bitwidth
> HOST_BITS_PER_WIDE_INT
4433 || (nonzero_bits (XEXP (x
, 1), mode
)
4434 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0))
4440 result
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4441 known_x
, known_mode
, known_ret
);
4443 && (bitwidth
> HOST_BITS_PER_WIDE_INT
4444 || (nonzero_bits (XEXP (x
, 1), mode
)
4445 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0))
4451 /* Shifts by a constant add to the number of bits equal to the
4453 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4454 known_x
, known_mode
, known_ret
);
4455 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4456 && INTVAL (XEXP (x
, 1)) > 0)
4457 num0
= MIN ((int) bitwidth
, num0
+ INTVAL (XEXP (x
, 1)));
4462 /* Left shifts destroy copies. */
4463 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
4464 || INTVAL (XEXP (x
, 1)) < 0
4465 || INTVAL (XEXP (x
, 1)) >= (int) bitwidth
)
4468 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4469 known_x
, known_mode
, known_ret
);
4470 return MAX (1, num0
- INTVAL (XEXP (x
, 1)));
4473 num0
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4474 known_x
, known_mode
, known_ret
);
4475 num1
= cached_num_sign_bit_copies (XEXP (x
, 2), mode
,
4476 known_x
, known_mode
, known_ret
);
4477 return MIN (num0
, num1
);
4479 case EQ
: case NE
: case GE
: case GT
: case LE
: case LT
:
4480 case UNEQ
: case LTGT
: case UNGE
: case UNGT
: case UNLE
: case UNLT
:
4481 case GEU
: case GTU
: case LEU
: case LTU
:
4482 case UNORDERED
: case ORDERED
:
4483 /* If the constant is negative, take its 1's complement and remask.
4484 Then see how many zero bits we have. */
4485 nonzero
= STORE_FLAG_VALUE
;
4486 if (bitwidth
<= HOST_BITS_PER_WIDE_INT
4487 && (nonzero
& ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4488 nonzero
= (~nonzero
) & GET_MODE_MASK (mode
);
4490 return (nonzero
== 0 ? bitwidth
: bitwidth
- floor_log2 (nonzero
) - 1);
4496 /* If we haven't been able to figure it out by one of the above rules,
4497 see if some of the high-order bits are known to be zero. If so,
4498 count those bits and return one less than that amount. If we can't
4499 safely compute the mask for this mode, always return BITWIDTH. */
4501 bitwidth
= GET_MODE_BITSIZE (mode
);
4502 if (bitwidth
> HOST_BITS_PER_WIDE_INT
)
4505 nonzero
= nonzero_bits (x
, mode
);
4506 return nonzero
& ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))
4507 ? 1 : bitwidth
- floor_log2 (nonzero
) - 1;
4510 /* Calculate the rtx_cost of a single instruction. A return value of
4511 zero indicates an instruction pattern without a known cost. */
4514 insn_rtx_cost (rtx pat
)
4519 /* Extract the single set rtx from the instruction pattern.
4520 We can't use single_set since we only have the pattern. */
4521 if (GET_CODE (pat
) == SET
)
4523 else if (GET_CODE (pat
) == PARALLEL
)
4526 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
4528 rtx x
= XVECEXP (pat
, 0, i
);
4529 if (GET_CODE (x
) == SET
)
4542 cost
= rtx_cost (SET_SRC (set
), SET
);
4543 return cost
> 0 ? cost
: COSTS_N_INSNS (1);
4546 /* Given an insn INSN and condition COND, return the condition in a
4547 canonical form to simplify testing by callers. Specifically:
4549 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4550 (2) Both operands will be machine operands; (cc0) will have been replaced.
4551 (3) If an operand is a constant, it will be the second operand.
4552 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4553 for GE, GEU, and LEU.
4555 If the condition cannot be understood, or is an inequality floating-point
4556 comparison which needs to be reversed, 0 will be returned.
4558 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4560 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4561 insn used in locating the condition was found. If a replacement test
4562 of the condition is desired, it should be placed in front of that
4563 insn and we will be sure that the inputs are still valid.
4565 If WANT_REG is nonzero, we wish the condition to be relative to that
4566 register, if possible. Therefore, do not canonicalize the condition
4567 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4568 to be a compare to a CC mode register.
4570 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4574 canonicalize_condition (rtx insn
, rtx cond
, int reverse
, rtx
*earliest
,
4575 rtx want_reg
, int allow_cc_mode
, int valid_at_insn_p
)
4582 int reverse_code
= 0;
4583 enum machine_mode mode
;
4585 code
= GET_CODE (cond
);
4586 mode
= GET_MODE (cond
);
4587 op0
= XEXP (cond
, 0);
4588 op1
= XEXP (cond
, 1);
4591 code
= reversed_comparison_code (cond
, insn
);
4592 if (code
== UNKNOWN
)
4598 /* If we are comparing a register with zero, see if the register is set
4599 in the previous insn to a COMPARE or a comparison operation. Perform
4600 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4603 while ((GET_RTX_CLASS (code
) == RTX_COMPARE
4604 || GET_RTX_CLASS (code
) == RTX_COMM_COMPARE
)
4605 && op1
== CONST0_RTX (GET_MODE (op0
))
4608 /* Set nonzero when we find something of interest. */
4612 /* If comparison with cc0, import actual comparison from compare
4616 if ((prev
= prev_nonnote_insn (prev
)) == 0
4617 || !NONJUMP_INSN_P (prev
)
4618 || (set
= single_set (prev
)) == 0
4619 || SET_DEST (set
) != cc0_rtx
)
4622 op0
= SET_SRC (set
);
4623 op1
= CONST0_RTX (GET_MODE (op0
));
4629 /* If this is a COMPARE, pick up the two things being compared. */
4630 if (GET_CODE (op0
) == COMPARE
)
4632 op1
= XEXP (op0
, 1);
4633 op0
= XEXP (op0
, 0);
4636 else if (!REG_P (op0
))
4639 /* Go back to the previous insn. Stop if it is not an INSN. We also
4640 stop if it isn't a single set or if it has a REG_INC note because
4641 we don't want to bother dealing with it. */
4643 if ((prev
= prev_nonnote_insn (prev
)) == 0
4644 || !NONJUMP_INSN_P (prev
)
4645 || FIND_REG_INC_NOTE (prev
, NULL_RTX
))
4648 set
= set_of (op0
, prev
);
4651 && (GET_CODE (set
) != SET
4652 || !rtx_equal_p (SET_DEST (set
), op0
)))
4655 /* If this is setting OP0, get what it sets it to if it looks
4659 enum machine_mode inner_mode
= GET_MODE (SET_DEST (set
));
4660 #ifdef FLOAT_STORE_FLAG_VALUE
4661 REAL_VALUE_TYPE fsfv
;
4664 /* ??? We may not combine comparisons done in a CCmode with
4665 comparisons not done in a CCmode. This is to aid targets
4666 like Alpha that have an IEEE compliant EQ instruction, and
4667 a non-IEEE compliant BEQ instruction. The use of CCmode is
4668 actually artificial, simply to prevent the combination, but
4669 should not affect other platforms.
4671 However, we must allow VOIDmode comparisons to match either
4672 CCmode or non-CCmode comparison, because some ports have
4673 modeless comparisons inside branch patterns.
4675 ??? This mode check should perhaps look more like the mode check
4676 in simplify_comparison in combine. */
4678 if ((GET_CODE (SET_SRC (set
)) == COMPARE
4681 && GET_MODE_CLASS (inner_mode
) == MODE_INT
4682 && (GET_MODE_BITSIZE (inner_mode
)
4683 <= HOST_BITS_PER_WIDE_INT
)
4684 && (STORE_FLAG_VALUE
4685 & ((HOST_WIDE_INT
) 1
4686 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
4687 #ifdef FLOAT_STORE_FLAG_VALUE
4689 && SCALAR_FLOAT_MODE_P (inner_mode
)
4690 && (fsfv
= FLOAT_STORE_FLAG_VALUE (inner_mode
),
4691 REAL_VALUE_NEGATIVE (fsfv
)))
4694 && COMPARISON_P (SET_SRC (set
))))
4695 && (((GET_MODE_CLASS (mode
) == MODE_CC
)
4696 == (GET_MODE_CLASS (inner_mode
) == MODE_CC
))
4697 || mode
== VOIDmode
|| inner_mode
== VOIDmode
))
4699 else if (((code
== EQ
4701 && (GET_MODE_BITSIZE (inner_mode
)
4702 <= HOST_BITS_PER_WIDE_INT
)
4703 && GET_MODE_CLASS (inner_mode
) == MODE_INT
4704 && (STORE_FLAG_VALUE
4705 & ((HOST_WIDE_INT
) 1
4706 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
4707 #ifdef FLOAT_STORE_FLAG_VALUE
4709 && SCALAR_FLOAT_MODE_P (inner_mode
)
4710 && (fsfv
= FLOAT_STORE_FLAG_VALUE (inner_mode
),
4711 REAL_VALUE_NEGATIVE (fsfv
)))
4714 && COMPARISON_P (SET_SRC (set
))
4715 && (((GET_MODE_CLASS (mode
) == MODE_CC
)
4716 == (GET_MODE_CLASS (inner_mode
) == MODE_CC
))
4717 || mode
== VOIDmode
|| inner_mode
== VOIDmode
))
4727 else if (reg_set_p (op0
, prev
))
4728 /* If this sets OP0, but not directly, we have to give up. */
4733 /* If the caller is expecting the condition to be valid at INSN,
4734 make sure X doesn't change before INSN. */
4735 if (valid_at_insn_p
)
4736 if (modified_in_p (x
, prev
) || modified_between_p (x
, prev
, insn
))
4738 if (COMPARISON_P (x
))
4739 code
= GET_CODE (x
);
4742 code
= reversed_comparison_code (x
, prev
);
4743 if (code
== UNKNOWN
)
4748 op0
= XEXP (x
, 0), op1
= XEXP (x
, 1);
4754 /* If constant is first, put it last. */
4755 if (CONSTANT_P (op0
))
4756 code
= swap_condition (code
), tem
= op0
, op0
= op1
, op1
= tem
;
4758 /* If OP0 is the result of a comparison, we weren't able to find what
4759 was really being compared, so fail. */
4761 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_CC
)
4764 /* Canonicalize any ordered comparison with integers involving equality
4765 if we can do computations in the relevant mode and we do not
4768 if (GET_MODE_CLASS (GET_MODE (op0
)) != MODE_CC
4769 && GET_CODE (op1
) == CONST_INT
4770 && GET_MODE (op0
) != VOIDmode
4771 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
4773 HOST_WIDE_INT const_val
= INTVAL (op1
);
4774 unsigned HOST_WIDE_INT uconst_val
= const_val
;
4775 unsigned HOST_WIDE_INT max_val
4776 = (unsigned HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (op0
));
4781 if ((unsigned HOST_WIDE_INT
) const_val
!= max_val
>> 1)
4782 code
= LT
, op1
= gen_int_mode (const_val
+ 1, GET_MODE (op0
));
4785 /* When cross-compiling, const_val might be sign-extended from
4786 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4788 if ((HOST_WIDE_INT
) (const_val
& max_val
)
4789 != (((HOST_WIDE_INT
) 1
4790 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1))))
4791 code
= GT
, op1
= gen_int_mode (const_val
- 1, GET_MODE (op0
));
4795 if (uconst_val
< max_val
)
4796 code
= LTU
, op1
= gen_int_mode (uconst_val
+ 1, GET_MODE (op0
));
4800 if (uconst_val
!= 0)
4801 code
= GTU
, op1
= gen_int_mode (uconst_val
- 1, GET_MODE (op0
));
4809 /* Never return CC0; return zero instead. */
4813 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
4816 /* Given a jump insn JUMP, return the condition that will cause it to branch
4817 to its JUMP_LABEL. If the condition cannot be understood, or is an
4818 inequality floating-point comparison which needs to be reversed, 0 will
4821 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4822 insn used in locating the condition was found. If a replacement test
4823 of the condition is desired, it should be placed in front of that
4824 insn and we will be sure that the inputs are still valid. If EARLIEST
4825 is null, the returned condition will be valid at INSN.
4827 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4828 compare CC mode register.
4830 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4833 get_condition (rtx jump
, rtx
*earliest
, int allow_cc_mode
, int valid_at_insn_p
)
4839 /* If this is not a standard conditional jump, we can't parse it. */
4841 || ! any_condjump_p (jump
))
4843 set
= pc_set (jump
);
4845 cond
= XEXP (SET_SRC (set
), 0);
4847 /* If this branches to JUMP_LABEL when the condition is false, reverse
4850 = GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
4851 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (jump
);
4853 return canonicalize_condition (jump
, cond
, reverse
, earliest
, NULL_RTX
,
4854 allow_cc_mode
, valid_at_insn_p
);
4858 /* Initialize non_rtx_starting_operands, which is used to speed up
4864 for (i
= 0; i
< NUM_RTX_CODE
; i
++)
4866 const char *format
= GET_RTX_FORMAT (i
);
4867 const char *first
= strpbrk (format
, "eEV");
4868 non_rtx_starting_operands
[i
] = first
? first
- format
: -1;