1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
25 #include "coretypes.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
40 /* Forward declarations */
41 static int global_reg_mentioned_p_1 (rtx
*, void *);
42 static void set_of_1 (rtx
, rtx
, void *);
43 static bool covers_regno_p (rtx
, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx
, unsigned int);
45 static int rtx_referenced_p_1 (rtx
*, void *);
46 static int computed_jump_p_1 (rtx
);
47 static void parms_set (rtx
, rtx
, void *);
49 static unsigned HOST_WIDE_INT
cached_nonzero_bits (rtx
, enum machine_mode
,
50 rtx
, enum machine_mode
,
51 unsigned HOST_WIDE_INT
);
52 static unsigned HOST_WIDE_INT
nonzero_bits1 (rtx
, enum machine_mode
, rtx
,
54 unsigned HOST_WIDE_INT
);
55 static unsigned int cached_num_sign_bit_copies (rtx
, enum machine_mode
, rtx
,
58 static unsigned int num_sign_bit_copies1 (rtx
, enum machine_mode
, rtx
,
59 enum machine_mode
, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands
[NUM_RTX_CODE
];
65 /* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
71 /* Return 1 if the value of X is unstable
72 (would be different at a different point in the program).
73 The frame pointer, arg pointer, etc. are considered stable
74 (within one function) and so is anything marked `unchanging'. */
77 rtx_unstable_p (rtx x
)
79 RTX_CODE code
= GET_CODE (x
);
86 return !MEM_READONLY_P (x
) || rtx_unstable_p (XEXP (x
, 0));
97 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
98 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
99 /* The arg pointer varies if it is not a fixed register. */
100 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
102 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
103 /* ??? When call-clobbered, the value is stable modulo the restore
104 that must happen after a call. This currently screws up local-alloc
105 into believing that the restore is not needed. */
106 if (x
== pic_offset_table_rtx
)
112 if (MEM_VOLATILE_P (x
))
121 fmt
= GET_RTX_FORMAT (code
);
122 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
125 if (rtx_unstable_p (XEXP (x
, i
)))
128 else if (fmt
[i
] == 'E')
131 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
132 if (rtx_unstable_p (XVECEXP (x
, i
, j
)))
139 /* Return 1 if X has a value that can vary even between two
140 executions of the program. 0 means X can be compared reliably
141 against certain constants or near-constants.
142 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
143 zero, we are slightly more conservative.
144 The frame pointer and the arg pointer are considered constant. */
147 rtx_varies_p (rtx x
, int for_alias
)
160 return !MEM_READONLY_P (x
) || rtx_varies_p (XEXP (x
, 0), for_alias
);
171 /* Note that we have to test for the actual rtx used for the frame
172 and arg pointers and not just the register number in case we have
173 eliminated the frame and/or arg pointer and are using it
175 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
176 /* The arg pointer varies if it is not a fixed register. */
177 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
179 if (x
== pic_offset_table_rtx
180 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
181 /* ??? When call-clobbered, the value is stable modulo the restore
182 that must happen after a call. This currently screws up
183 local-alloc into believing that the restore is not needed, so we
184 must return 0 only if we are called from alias analysis. */
192 /* The operand 0 of a LO_SUM is considered constant
193 (in fact it is related specifically to operand 1)
194 during alias analysis. */
195 return (! for_alias
&& rtx_varies_p (XEXP (x
, 0), for_alias
))
196 || rtx_varies_p (XEXP (x
, 1), for_alias
);
199 if (MEM_VOLATILE_P (x
))
208 fmt
= GET_RTX_FORMAT (code
);
209 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
212 if (rtx_varies_p (XEXP (x
, i
), for_alias
))
215 else if (fmt
[i
] == 'E')
218 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
219 if (rtx_varies_p (XVECEXP (x
, i
, j
), for_alias
))
226 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
227 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
228 whether nonzero is returned for unaligned memory accesses on strict
229 alignment machines. */
232 rtx_addr_can_trap_p_1 (rtx x
, enum machine_mode mode
, bool unaligned_mems
)
234 enum rtx_code code
= GET_CODE (x
);
239 return SYMBOL_REF_WEAK (x
);
245 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
246 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
247 || x
== stack_pointer_rtx
248 /* The arg pointer varies if it is not a fixed register. */
249 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
251 /* All of the virtual frame registers are stack references. */
252 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
253 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
258 return rtx_addr_can_trap_p_1 (XEXP (x
, 0), mode
, unaligned_mems
);
261 /* An address is assumed not to trap if:
262 - it is an address that can't trap plus a constant integer,
263 with the proper remainder modulo the mode size if we are
264 considering unaligned memory references. */
265 if (!rtx_addr_can_trap_p_1 (XEXP (x
, 0), mode
, unaligned_mems
)
266 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
268 HOST_WIDE_INT offset
;
270 if (!STRICT_ALIGNMENT
272 || GET_MODE_SIZE (mode
) == 0)
275 offset
= INTVAL (XEXP (x
, 1));
277 #ifdef SPARC_STACK_BOUNDARY_HACK
278 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
279 the real alignment of %sp. However, when it does this, the
280 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
281 if (SPARC_STACK_BOUNDARY_HACK
282 && (XEXP (x
, 0) == stack_pointer_rtx
283 || XEXP (x
, 0) == hard_frame_pointer_rtx
))
284 offset
-= STACK_POINTER_OFFSET
;
287 return offset
% GET_MODE_SIZE (mode
) != 0;
290 /* - or it is the pic register plus a constant. */
291 if (XEXP (x
, 0) == pic_offset_table_rtx
&& CONSTANT_P (XEXP (x
, 1)))
298 return rtx_addr_can_trap_p_1 (XEXP (x
, 1), mode
, unaligned_mems
);
305 return rtx_addr_can_trap_p_1 (XEXP (x
, 0), mode
, unaligned_mems
);
311 /* If it isn't one of the case above, it can cause a trap. */
315 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
318 rtx_addr_can_trap_p (rtx x
)
320 return rtx_addr_can_trap_p_1 (x
, VOIDmode
, false);
323 /* Return true if X is an address that is known to not be zero. */
326 nonzero_address_p (rtx x
)
328 enum rtx_code code
= GET_CODE (x
);
333 return !SYMBOL_REF_WEAK (x
);
339 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
340 if (x
== frame_pointer_rtx
|| x
== hard_frame_pointer_rtx
341 || x
== stack_pointer_rtx
342 || (x
== arg_pointer_rtx
&& fixed_regs
[ARG_POINTER_REGNUM
]))
344 /* All of the virtual frame registers are stack references. */
345 if (REGNO (x
) >= FIRST_VIRTUAL_REGISTER
346 && REGNO (x
) <= LAST_VIRTUAL_REGISTER
)
351 return nonzero_address_p (XEXP (x
, 0));
354 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
356 /* Pointers aren't allowed to wrap. If we've got a register
357 that is known to be a pointer, and a positive offset, then
358 the composite can't be zero. */
359 if (INTVAL (XEXP (x
, 1)) > 0
360 && REG_P (XEXP (x
, 0))
361 && REG_POINTER (XEXP (x
, 0)))
364 return nonzero_address_p (XEXP (x
, 0));
366 /* Handle PIC references. */
367 else if (XEXP (x
, 0) == pic_offset_table_rtx
368 && CONSTANT_P (XEXP (x
, 1)))
373 /* Similar to the above; allow positive offsets. Further, since
374 auto-inc is only allowed in memories, the register must be a
376 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
377 && INTVAL (XEXP (x
, 1)) > 0)
379 return nonzero_address_p (XEXP (x
, 0));
382 /* Similarly. Further, the offset is always positive. */
389 return nonzero_address_p (XEXP (x
, 0));
392 return nonzero_address_p (XEXP (x
, 1));
398 /* If it isn't one of the case above, might be zero. */
402 /* Return 1 if X refers to a memory location whose address
403 cannot be compared reliably with constant addresses,
404 or if X refers to a BLKmode memory object.
405 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
406 zero, we are slightly more conservative. */
409 rtx_addr_varies_p (rtx x
, int for_alias
)
420 return GET_MODE (x
) == BLKmode
|| rtx_varies_p (XEXP (x
, 0), for_alias
);
422 fmt
= GET_RTX_FORMAT (code
);
423 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
426 if (rtx_addr_varies_p (XEXP (x
, i
), for_alias
))
429 else if (fmt
[i
] == 'E')
432 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
433 if (rtx_addr_varies_p (XVECEXP (x
, i
, j
), for_alias
))
439 /* Return the value of the integer term in X, if one is apparent;
441 Only obvious integer terms are detected.
442 This is used in cse.c with the `related_value' field. */
445 get_integer_term (rtx x
)
447 if (GET_CODE (x
) == CONST
)
450 if (GET_CODE (x
) == MINUS
451 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
452 return - INTVAL (XEXP (x
, 1));
453 if (GET_CODE (x
) == PLUS
454 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
455 return INTVAL (XEXP (x
, 1));
459 /* If X is a constant, return the value sans apparent integer term;
461 Only obvious integer terms are detected. */
464 get_related_value (rtx x
)
466 if (GET_CODE (x
) != CONST
)
469 if (GET_CODE (x
) == PLUS
470 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
472 else if (GET_CODE (x
) == MINUS
473 && GET_CODE (XEXP (x
, 1)) == CONST_INT
)
478 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
479 a global register. */
482 global_reg_mentioned_p_1 (rtx
*loc
, void *data ATTRIBUTE_UNUSED
)
490 switch (GET_CODE (x
))
493 if (REG_P (SUBREG_REG (x
)))
495 if (REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
496 && global_regs
[subreg_regno (x
)])
504 if (regno
< FIRST_PSEUDO_REGISTER
&& global_regs
[regno
])
518 /* A non-constant call might use a global register. */
528 /* Returns nonzero if X mentions a global register. */
531 global_reg_mentioned_p (rtx x
)
537 if (! CONST_OR_PURE_CALL_P (x
))
539 x
= CALL_INSN_FUNCTION_USAGE (x
);
547 return for_each_rtx (&x
, global_reg_mentioned_p_1
, NULL
);
550 /* Return the number of places FIND appears within X. If COUNT_DEST is
551 zero, we do not count occurrences inside the destination of a SET. */
554 count_occurrences (rtx x
, rtx find
, int count_dest
)
558 const char *format_ptr
;
579 if (MEM_P (find
) && rtx_equal_p (x
, find
))
584 if (SET_DEST (x
) == find
&& ! count_dest
)
585 return count_occurrences (SET_SRC (x
), find
, count_dest
);
592 format_ptr
= GET_RTX_FORMAT (code
);
595 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
597 switch (*format_ptr
++)
600 count
+= count_occurrences (XEXP (x
, i
), find
, count_dest
);
604 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
605 count
+= count_occurrences (XVECEXP (x
, i
, j
), find
, count_dest
);
612 /* Nonzero if register REG appears somewhere within IN.
613 Also works if REG is not a register; in this case it checks
614 for a subexpression of IN that is Lisp "equal" to REG. */
617 reg_mentioned_p (rtx reg
, rtx in
)
629 if (GET_CODE (in
) == LABEL_REF
)
630 return reg
== XEXP (in
, 0);
632 code
= GET_CODE (in
);
636 /* Compare registers by number. */
638 return REG_P (reg
) && REGNO (in
) == REGNO (reg
);
640 /* These codes have no constituent expressions
650 /* These are kept unique for a given value. */
657 if (GET_CODE (reg
) == code
&& rtx_equal_p (reg
, in
))
660 fmt
= GET_RTX_FORMAT (code
);
662 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
667 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
668 if (reg_mentioned_p (reg
, XVECEXP (in
, i
, j
)))
671 else if (fmt
[i
] == 'e'
672 && reg_mentioned_p (reg
, XEXP (in
, i
)))
678 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
679 no CODE_LABEL insn. */
682 no_labels_between_p (rtx beg
, rtx end
)
687 for (p
= NEXT_INSN (beg
); p
!= end
; p
= NEXT_INSN (p
))
693 /* Nonzero if register REG is used in an insn between
694 FROM_INSN and TO_INSN (exclusive of those two). */
697 reg_used_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
701 if (from_insn
== to_insn
)
704 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
706 && (reg_overlap_mentioned_p (reg
, PATTERN (insn
))
707 || (CALL_P (insn
) && find_reg_fusage (insn
, USE
, reg
))))
712 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
713 is entirely replaced by a new value and the only use is as a SET_DEST,
714 we do not consider it a reference. */
717 reg_referenced_p (rtx x
, rtx body
)
721 switch (GET_CODE (body
))
724 if (reg_overlap_mentioned_p (x
, SET_SRC (body
)))
727 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
728 of a REG that occupies all of the REG, the insn references X if
729 it is mentioned in the destination. */
730 if (GET_CODE (SET_DEST (body
)) != CC0
731 && GET_CODE (SET_DEST (body
)) != PC
732 && !REG_P (SET_DEST (body
))
733 && ! (GET_CODE (SET_DEST (body
)) == SUBREG
734 && REG_P (SUBREG_REG (SET_DEST (body
)))
735 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body
))))
736 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)
737 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body
)))
738 + (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
)))
739 && reg_overlap_mentioned_p (x
, SET_DEST (body
)))
744 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
745 if (reg_overlap_mentioned_p (x
, ASM_OPERANDS_INPUT (body
, i
)))
752 return reg_overlap_mentioned_p (x
, body
);
755 return reg_overlap_mentioned_p (x
, TRAP_CONDITION (body
));
758 return reg_overlap_mentioned_p (x
, XEXP (body
, 0));
761 case UNSPEC_VOLATILE
:
762 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
763 if (reg_overlap_mentioned_p (x
, XVECEXP (body
, 0, i
)))
768 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
769 if (reg_referenced_p (x
, XVECEXP (body
, 0, i
)))
774 if (MEM_P (XEXP (body
, 0)))
775 if (reg_overlap_mentioned_p (x
, XEXP (XEXP (body
, 0), 0)))
780 if (reg_overlap_mentioned_p (x
, COND_EXEC_TEST (body
)))
782 return reg_referenced_p (x
, COND_EXEC_CODE (body
));
789 /* Nonzero if register REG is set or clobbered in an insn between
790 FROM_INSN and TO_INSN (exclusive of those two). */
793 reg_set_between_p (rtx reg
, rtx from_insn
, rtx to_insn
)
797 if (from_insn
== to_insn
)
800 for (insn
= NEXT_INSN (from_insn
); insn
!= to_insn
; insn
= NEXT_INSN (insn
))
801 if (INSN_P (insn
) && reg_set_p (reg
, insn
))
806 /* Internals of reg_set_between_p. */
808 reg_set_p (rtx reg
, rtx insn
)
810 /* We can be passed an insn or part of one. If we are passed an insn,
811 check if a side-effect of the insn clobbers REG. */
813 && (FIND_REG_INC_NOTE (insn
, reg
)
816 && REGNO (reg
) < FIRST_PSEUDO_REGISTER
817 && TEST_HARD_REG_BIT (regs_invalidated_by_call
,
820 || find_reg_fusage (insn
, CLOBBER
, reg
)))))
823 return set_of (reg
, insn
) != NULL_RTX
;
826 /* Similar to reg_set_between_p, but check all registers in X. Return 0
827 only if none of them are modified between START and END. Return 1 if
828 X contains a MEM; this routine does usememory aliasing. */
831 modified_between_p (rtx x
, rtx start
, rtx end
)
833 enum rtx_code code
= GET_CODE (x
);
856 if (modified_between_p (XEXP (x
, 0), start
, end
))
858 if (MEM_READONLY_P (x
))
860 for (insn
= NEXT_INSN (start
); insn
!= end
; insn
= NEXT_INSN (insn
))
861 if (memory_modified_in_insn_p (x
, insn
))
867 return reg_set_between_p (x
, start
, end
);
873 fmt
= GET_RTX_FORMAT (code
);
874 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
876 if (fmt
[i
] == 'e' && modified_between_p (XEXP (x
, i
), start
, end
))
879 else if (fmt
[i
] == 'E')
880 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
881 if (modified_between_p (XVECEXP (x
, i
, j
), start
, end
))
888 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
889 of them are modified in INSN. Return 1 if X contains a MEM; this routine
890 does use memory aliasing. */
893 modified_in_p (rtx x
, rtx insn
)
895 enum rtx_code code
= GET_CODE (x
);
914 if (modified_in_p (XEXP (x
, 0), insn
))
916 if (MEM_READONLY_P (x
))
918 if (memory_modified_in_insn_p (x
, insn
))
924 return reg_set_p (x
, insn
);
930 fmt
= GET_RTX_FORMAT (code
);
931 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
933 if (fmt
[i
] == 'e' && modified_in_p (XEXP (x
, i
), insn
))
936 else if (fmt
[i
] == 'E')
937 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
938 if (modified_in_p (XVECEXP (x
, i
, j
), insn
))
945 /* Helper function for set_of. */
953 set_of_1 (rtx x
, rtx pat
, void *data1
)
955 struct set_of_data
*data
= (struct set_of_data
*) (data1
);
956 if (rtx_equal_p (x
, data
->pat
)
957 || (!MEM_P (x
) && reg_overlap_mentioned_p (data
->pat
, x
)))
961 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
962 (either directly or via STRICT_LOW_PART and similar modifiers). */
964 set_of (rtx pat
, rtx insn
)
966 struct set_of_data data
;
967 data
.found
= NULL_RTX
;
969 note_stores (INSN_P (insn
) ? PATTERN (insn
) : insn
, set_of_1
, &data
);
973 /* Given an INSN, return a SET expression if this insn has only a single SET.
974 It may also have CLOBBERs, USEs, or SET whose output
975 will not be used, which we ignore. */
978 single_set_2 (rtx insn
, rtx pat
)
981 int set_verified
= 1;
984 if (GET_CODE (pat
) == PARALLEL
)
986 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
988 rtx sub
= XVECEXP (pat
, 0, i
);
989 switch (GET_CODE (sub
))
996 /* We can consider insns having multiple sets, where all
997 but one are dead as single set insns. In common case
998 only single set is present in the pattern so we want
999 to avoid checking for REG_UNUSED notes unless necessary.
1001 When we reach set first time, we just expect this is
1002 the single set we are looking for and only when more
1003 sets are found in the insn, we check them. */
1006 if (find_reg_note (insn
, REG_UNUSED
, SET_DEST (set
))
1007 && !side_effects_p (set
))
1013 set
= sub
, set_verified
= 0;
1014 else if (!find_reg_note (insn
, REG_UNUSED
, SET_DEST (sub
))
1015 || side_effects_p (sub
))
1027 /* Given an INSN, return nonzero if it has more than one SET, else return
1031 multiple_sets (rtx insn
)
1036 /* INSN must be an insn. */
1037 if (! INSN_P (insn
))
1040 /* Only a PARALLEL can have multiple SETs. */
1041 if (GET_CODE (PATTERN (insn
)) == PARALLEL
)
1043 for (i
= 0, found
= 0; i
< XVECLEN (PATTERN (insn
), 0); i
++)
1044 if (GET_CODE (XVECEXP (PATTERN (insn
), 0, i
)) == SET
)
1046 /* If we have already found a SET, then return now. */
1054 /* Either zero or one SET. */
1058 /* Return nonzero if the destination of SET equals the source
1059 and there are no side effects. */
1062 set_noop_p (rtx set
)
1064 rtx src
= SET_SRC (set
);
1065 rtx dst
= SET_DEST (set
);
1067 if (dst
== pc_rtx
&& src
== pc_rtx
)
1070 if (MEM_P (dst
) && MEM_P (src
))
1071 return rtx_equal_p (dst
, src
) && !side_effects_p (dst
);
1073 if (GET_CODE (dst
) == ZERO_EXTRACT
)
1074 return rtx_equal_p (XEXP (dst
, 0), src
)
1075 && ! BYTES_BIG_ENDIAN
&& XEXP (dst
, 2) == const0_rtx
1076 && !side_effects_p (src
);
1078 if (GET_CODE (dst
) == STRICT_LOW_PART
)
1079 dst
= XEXP (dst
, 0);
1081 if (GET_CODE (src
) == SUBREG
&& GET_CODE (dst
) == SUBREG
)
1083 if (SUBREG_BYTE (src
) != SUBREG_BYTE (dst
))
1085 src
= SUBREG_REG (src
);
1086 dst
= SUBREG_REG (dst
);
1089 return (REG_P (src
) && REG_P (dst
)
1090 && REGNO (src
) == REGNO (dst
));
1093 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1097 noop_move_p (rtx insn
)
1099 rtx pat
= PATTERN (insn
);
1101 if (INSN_CODE (insn
) == NOOP_MOVE_INSN_CODE
)
1104 /* Insns carrying these notes are useful later on. */
1105 if (find_reg_note (insn
, REG_EQUAL
, NULL_RTX
))
1108 /* For now treat an insn with a REG_RETVAL note as a
1109 a special insn which should not be considered a no-op. */
1110 if (find_reg_note (insn
, REG_RETVAL
, NULL_RTX
))
1113 if (GET_CODE (pat
) == SET
&& set_noop_p (pat
))
1116 if (GET_CODE (pat
) == PARALLEL
)
1119 /* If nothing but SETs of registers to themselves,
1120 this insn can also be deleted. */
1121 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
1123 rtx tem
= XVECEXP (pat
, 0, i
);
1125 if (GET_CODE (tem
) == USE
1126 || GET_CODE (tem
) == CLOBBER
)
1129 if (GET_CODE (tem
) != SET
|| ! set_noop_p (tem
))
1139 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1140 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1141 If the object was modified, if we hit a partial assignment to X, or hit a
1142 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1143 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1147 find_last_value (rtx x
, rtx
*pinsn
, rtx valid_to
, int allow_hwreg
)
1151 for (p
= PREV_INSN (*pinsn
); p
&& !LABEL_P (p
);
1155 rtx set
= single_set (p
);
1156 rtx note
= find_reg_note (p
, REG_EQUAL
, NULL_RTX
);
1158 if (set
&& rtx_equal_p (x
, SET_DEST (set
)))
1160 rtx src
= SET_SRC (set
);
1162 if (note
&& GET_CODE (XEXP (note
, 0)) != EXPR_LIST
)
1163 src
= XEXP (note
, 0);
1165 if ((valid_to
== NULL_RTX
1166 || ! modified_between_p (src
, PREV_INSN (p
), valid_to
))
1167 /* Reject hard registers because we don't usually want
1168 to use them; we'd rather use a pseudo. */
1170 && REGNO (src
) < FIRST_PSEUDO_REGISTER
) || allow_hwreg
))
1177 /* If set in non-simple way, we don't have a value. */
1178 if (reg_set_p (x
, p
))
1185 /* Return nonzero if register in range [REGNO, ENDREGNO)
1186 appears either explicitly or implicitly in X
1187 other than being stored into.
1189 References contained within the substructure at LOC do not count.
1190 LOC may be zero, meaning don't ignore anything. */
1193 refers_to_regno_p (unsigned int regno
, unsigned int endregno
, rtx x
,
1197 unsigned int x_regno
;
1202 /* The contents of a REG_NONNEG note is always zero, so we must come here
1203 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1207 code
= GET_CODE (x
);
1212 x_regno
= REGNO (x
);
1214 /* If we modifying the stack, frame, or argument pointer, it will
1215 clobber a virtual register. In fact, we could be more precise,
1216 but it isn't worth it. */
1217 if ((x_regno
== STACK_POINTER_REGNUM
1218 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1219 || x_regno
== ARG_POINTER_REGNUM
1221 || x_regno
== FRAME_POINTER_REGNUM
)
1222 && regno
>= FIRST_VIRTUAL_REGISTER
&& regno
<= LAST_VIRTUAL_REGISTER
)
1225 return (endregno
> x_regno
1226 && regno
< x_regno
+ (x_regno
< FIRST_PSEUDO_REGISTER
1227 ? hard_regno_nregs
[x_regno
][GET_MODE (x
)]
1231 /* If this is a SUBREG of a hard reg, we can see exactly which
1232 registers are being modified. Otherwise, handle normally. */
1233 if (REG_P (SUBREG_REG (x
))
1234 && REGNO (SUBREG_REG (x
)) < FIRST_PSEUDO_REGISTER
)
1236 unsigned int inner_regno
= subreg_regno (x
);
1237 unsigned int inner_endregno
1238 = inner_regno
+ (inner_regno
< FIRST_PSEUDO_REGISTER
1239 ? hard_regno_nregs
[inner_regno
][GET_MODE (x
)] : 1);
1241 return endregno
> inner_regno
&& regno
< inner_endregno
;
1247 if (&SET_DEST (x
) != loc
1248 /* Note setting a SUBREG counts as referring to the REG it is in for
1249 a pseudo but not for hard registers since we can
1250 treat each word individually. */
1251 && ((GET_CODE (SET_DEST (x
)) == SUBREG
1252 && loc
!= &SUBREG_REG (SET_DEST (x
))
1253 && REG_P (SUBREG_REG (SET_DEST (x
)))
1254 && REGNO (SUBREG_REG (SET_DEST (x
))) >= FIRST_PSEUDO_REGISTER
1255 && refers_to_regno_p (regno
, endregno
,
1256 SUBREG_REG (SET_DEST (x
)), loc
))
1257 || (!REG_P (SET_DEST (x
))
1258 && refers_to_regno_p (regno
, endregno
, SET_DEST (x
), loc
))))
1261 if (code
== CLOBBER
|| loc
== &SET_SRC (x
))
1270 /* X does not match, so try its subexpressions. */
1272 fmt
= GET_RTX_FORMAT (code
);
1273 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1275 if (fmt
[i
] == 'e' && loc
!= &XEXP (x
, i
))
1283 if (refers_to_regno_p (regno
, endregno
, XEXP (x
, i
), loc
))
1286 else if (fmt
[i
] == 'E')
1289 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
1290 if (loc
!= &XVECEXP (x
, i
, j
)
1291 && refers_to_regno_p (regno
, endregno
, XVECEXP (x
, i
, j
), loc
))
1298 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1299 we check if any register number in X conflicts with the relevant register
1300 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1301 contains a MEM (we don't bother checking for memory addresses that can't
1302 conflict because we expect this to be a rare case. */
1305 reg_overlap_mentioned_p (rtx x
, rtx in
)
1307 unsigned int regno
, endregno
;
1309 /* If either argument is a constant, then modifying X can not
1310 affect IN. Here we look at IN, we can profitably combine
1311 CONSTANT_P (x) with the switch statement below. */
1312 if (CONSTANT_P (in
))
1316 switch (GET_CODE (x
))
1318 case STRICT_LOW_PART
:
1321 /* Overly conservative. */
1326 regno
= REGNO (SUBREG_REG (x
));
1327 if (regno
< FIRST_PSEUDO_REGISTER
)
1328 regno
= subreg_regno (x
);
1334 endregno
= regno
+ (regno
< FIRST_PSEUDO_REGISTER
1335 ? hard_regno_nregs
[regno
][GET_MODE (x
)] : 1);
1336 return refers_to_regno_p (regno
, endregno
, in
, (rtx
*) 0);
1346 fmt
= GET_RTX_FORMAT (GET_CODE (in
));
1347 for (i
= GET_RTX_LENGTH (GET_CODE (in
)) - 1; i
>= 0; i
--)
1350 if (reg_overlap_mentioned_p (x
, XEXP (in
, i
)))
1353 else if (fmt
[i
] == 'E')
1356 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; --j
)
1357 if (reg_overlap_mentioned_p (x
, XVECEXP (in
, i
, j
)))
1367 return reg_mentioned_p (x
, in
);
1373 /* If any register in here refers to it we return true. */
1374 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1375 if (XEXP (XVECEXP (x
, 0, i
), 0) != 0
1376 && reg_overlap_mentioned_p (XEXP (XVECEXP (x
, 0, i
), 0), in
))
1382 gcc_assert (CONSTANT_P (x
));
1387 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1388 (X would be the pattern of an insn).
1389 FUN receives two arguments:
1390 the REG, MEM, CC0 or PC being stored in or clobbered,
1391 the SET or CLOBBER rtx that does the store.
1393 If the item being stored in or clobbered is a SUBREG of a hard register,
1394 the SUBREG will be passed. */
1397 note_stores (rtx x
, void (*fun
) (rtx
, rtx
, void *), void *data
)
1401 if (GET_CODE (x
) == COND_EXEC
)
1402 x
= COND_EXEC_CODE (x
);
1404 if (GET_CODE (x
) == SET
|| GET_CODE (x
) == CLOBBER
)
1406 rtx dest
= SET_DEST (x
);
1408 while ((GET_CODE (dest
) == SUBREG
1409 && (!REG_P (SUBREG_REG (dest
))
1410 || REGNO (SUBREG_REG (dest
)) >= FIRST_PSEUDO_REGISTER
))
1411 || GET_CODE (dest
) == ZERO_EXTRACT
1412 || GET_CODE (dest
) == STRICT_LOW_PART
)
1413 dest
= XEXP (dest
, 0);
1415 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1416 each of whose first operand is a register. */
1417 if (GET_CODE (dest
) == PARALLEL
)
1419 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1420 if (XEXP (XVECEXP (dest
, 0, i
), 0) != 0)
1421 (*fun
) (XEXP (XVECEXP (dest
, 0, i
), 0), x
, data
);
1424 (*fun
) (dest
, x
, data
);
1427 else if (GET_CODE (x
) == PARALLEL
)
1428 for (i
= XVECLEN (x
, 0) - 1; i
>= 0; i
--)
1429 note_stores (XVECEXP (x
, 0, i
), fun
, data
);
1432 /* Like notes_stores, but call FUN for each expression that is being
1433 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1434 FUN for each expression, not any interior subexpressions. FUN receives a
1435 pointer to the expression and the DATA passed to this function.
1437 Note that this is not quite the same test as that done in reg_referenced_p
1438 since that considers something as being referenced if it is being
1439 partially set, while we do not. */
1442 note_uses (rtx
*pbody
, void (*fun
) (rtx
*, void *), void *data
)
1447 switch (GET_CODE (body
))
1450 (*fun
) (&COND_EXEC_TEST (body
), data
);
1451 note_uses (&COND_EXEC_CODE (body
), fun
, data
);
1455 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1456 note_uses (&XVECEXP (body
, 0, i
), fun
, data
);
1460 (*fun
) (&XEXP (body
, 0), data
);
1464 for (i
= ASM_OPERANDS_INPUT_LENGTH (body
) - 1; i
>= 0; i
--)
1465 (*fun
) (&ASM_OPERANDS_INPUT (body
, i
), data
);
1469 (*fun
) (&TRAP_CONDITION (body
), data
);
1473 (*fun
) (&XEXP (body
, 0), data
);
1477 case UNSPEC_VOLATILE
:
1478 for (i
= XVECLEN (body
, 0) - 1; i
>= 0; i
--)
1479 (*fun
) (&XVECEXP (body
, 0, i
), data
);
1483 if (MEM_P (XEXP (body
, 0)))
1484 (*fun
) (&XEXP (XEXP (body
, 0), 0), data
);
1489 rtx dest
= SET_DEST (body
);
1491 /* For sets we replace everything in source plus registers in memory
1492 expression in store and operands of a ZERO_EXTRACT. */
1493 (*fun
) (&SET_SRC (body
), data
);
1495 if (GET_CODE (dest
) == ZERO_EXTRACT
)
1497 (*fun
) (&XEXP (dest
, 1), data
);
1498 (*fun
) (&XEXP (dest
, 2), data
);
1501 while (GET_CODE (dest
) == SUBREG
|| GET_CODE (dest
) == STRICT_LOW_PART
)
1502 dest
= XEXP (dest
, 0);
1505 (*fun
) (&XEXP (dest
, 0), data
);
1510 /* All the other possibilities never store. */
1511 (*fun
) (pbody
, data
);
1516 /* Return nonzero if X's old contents don't survive after INSN.
1517 This will be true if X is (cc0) or if X is a register and
1518 X dies in INSN or because INSN entirely sets X.
1520 "Entirely set" means set directly and not through a SUBREG, or
1521 ZERO_EXTRACT, so no trace of the old contents remains.
1522 Likewise, REG_INC does not count.
1524 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1525 but for this use that makes no difference, since regs don't overlap
1526 during their lifetimes. Therefore, this function may be used
1527 at any time after deaths have been computed (in flow.c).
1529 If REG is a hard reg that occupies multiple machine registers, this
1530 function will only return 1 if each of those registers will be replaced
1534 dead_or_set_p (rtx insn
, rtx x
)
1536 unsigned int regno
, last_regno
;
1539 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1540 if (GET_CODE (x
) == CC0
)
1543 gcc_assert (REG_P (x
));
1546 last_regno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
1547 : regno
+ hard_regno_nregs
[regno
][GET_MODE (x
)] - 1);
1549 for (i
= regno
; i
<= last_regno
; i
++)
1550 if (! dead_or_set_regno_p (insn
, i
))
1556 /* Return TRUE iff DEST is a register or subreg of a register and
1557 doesn't change the number of words of the inner register, and any
1558 part of the register is TEST_REGNO. */
1561 covers_regno_no_parallel_p (rtx dest
, unsigned int test_regno
)
1563 unsigned int regno
, endregno
;
1565 if (GET_CODE (dest
) == SUBREG
1566 && (((GET_MODE_SIZE (GET_MODE (dest
))
1567 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)
1568 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest
)))
1569 + UNITS_PER_WORD
- 1) / UNITS_PER_WORD
)))
1570 dest
= SUBREG_REG (dest
);
1575 regno
= REGNO (dest
);
1576 endregno
= (regno
>= FIRST_PSEUDO_REGISTER
? regno
+ 1
1577 : regno
+ hard_regno_nregs
[regno
][GET_MODE (dest
)]);
1578 return (test_regno
>= regno
&& test_regno
< endregno
);
1581 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1582 any member matches the covers_regno_no_parallel_p criteria. */
1585 covers_regno_p (rtx dest
, unsigned int test_regno
)
1587 if (GET_CODE (dest
) == PARALLEL
)
1589 /* Some targets place small structures in registers for return
1590 values of functions, and those registers are wrapped in
1591 PARALLELs that we may see as the destination of a SET. */
1594 for (i
= XVECLEN (dest
, 0) - 1; i
>= 0; i
--)
1596 rtx inner
= XEXP (XVECEXP (dest
, 0, i
), 0);
1597 if (inner
!= NULL_RTX
1598 && covers_regno_no_parallel_p (inner
, test_regno
))
1605 return covers_regno_no_parallel_p (dest
, test_regno
);
1608 /* Utility function for dead_or_set_p to check an individual register. Also
1609 called from flow.c. */
1612 dead_or_set_regno_p (rtx insn
, unsigned int test_regno
)
1616 /* See if there is a death note for something that includes TEST_REGNO. */
1617 if (find_regno_note (insn
, REG_DEAD
, test_regno
))
1621 && find_regno_fusage (insn
, CLOBBER
, test_regno
))
1624 pattern
= PATTERN (insn
);
1626 if (GET_CODE (pattern
) == COND_EXEC
)
1627 pattern
= COND_EXEC_CODE (pattern
);
1629 if (GET_CODE (pattern
) == SET
)
1630 return covers_regno_p (SET_DEST (pattern
), test_regno
);
1631 else if (GET_CODE (pattern
) == PARALLEL
)
1635 for (i
= XVECLEN (pattern
, 0) - 1; i
>= 0; i
--)
1637 rtx body
= XVECEXP (pattern
, 0, i
);
1639 if (GET_CODE (body
) == COND_EXEC
)
1640 body
= COND_EXEC_CODE (body
);
1642 if ((GET_CODE (body
) == SET
|| GET_CODE (body
) == CLOBBER
)
1643 && covers_regno_p (SET_DEST (body
), test_regno
))
1651 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1652 If DATUM is nonzero, look for one whose datum is DATUM. */
1655 find_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
1659 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1660 if (! INSN_P (insn
))
1664 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1665 if (REG_NOTE_KIND (link
) == kind
)
1670 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1671 if (REG_NOTE_KIND (link
) == kind
&& datum
== XEXP (link
, 0))
1676 /* Return the reg-note of kind KIND in insn INSN which applies to register
1677 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1678 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1679 it might be the case that the note overlaps REGNO. */
1682 find_regno_note (rtx insn
, enum reg_note kind
, unsigned int regno
)
1686 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1687 if (! INSN_P (insn
))
1690 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1691 if (REG_NOTE_KIND (link
) == kind
1692 /* Verify that it is a register, so that scratch and MEM won't cause a
1694 && REG_P (XEXP (link
, 0))
1695 && REGNO (XEXP (link
, 0)) <= regno
1696 && ((REGNO (XEXP (link
, 0))
1697 + (REGNO (XEXP (link
, 0)) >= FIRST_PSEUDO_REGISTER
? 1
1698 : hard_regno_nregs
[REGNO (XEXP (link
, 0))]
1699 [GET_MODE (XEXP (link
, 0))]))
1705 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1709 find_reg_equal_equiv_note (rtx insn
)
1715 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1716 if (REG_NOTE_KIND (link
) == REG_EQUAL
1717 || REG_NOTE_KIND (link
) == REG_EQUIV
)
1719 if (single_set (insn
) == 0)
1726 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1727 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1730 find_reg_fusage (rtx insn
, enum rtx_code code
, rtx datum
)
1732 /* If it's not a CALL_INSN, it can't possibly have a
1733 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1743 for (link
= CALL_INSN_FUNCTION_USAGE (insn
);
1745 link
= XEXP (link
, 1))
1746 if (GET_CODE (XEXP (link
, 0)) == code
1747 && rtx_equal_p (datum
, XEXP (XEXP (link
, 0), 0)))
1752 unsigned int regno
= REGNO (datum
);
1754 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1755 to pseudo registers, so don't bother checking. */
1757 if (regno
< FIRST_PSEUDO_REGISTER
)
1759 unsigned int end_regno
1760 = regno
+ hard_regno_nregs
[regno
][GET_MODE (datum
)];
1763 for (i
= regno
; i
< end_regno
; i
++)
1764 if (find_regno_fusage (insn
, code
, i
))
1772 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1773 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1776 find_regno_fusage (rtx insn
, enum rtx_code code
, unsigned int regno
)
1780 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1781 to pseudo registers, so don't bother checking. */
1783 if (regno
>= FIRST_PSEUDO_REGISTER
1787 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
1789 unsigned int regnote
;
1792 if (GET_CODE (op
= XEXP (link
, 0)) == code
1793 && REG_P (reg
= XEXP (op
, 0))
1794 && (regnote
= REGNO (reg
)) <= regno
1795 && regnote
+ hard_regno_nregs
[regnote
][GET_MODE (reg
)] > regno
)
1802 /* Return true if INSN is a call to a pure function. */
1805 pure_call_p (rtx insn
)
1809 if (!CALL_P (insn
) || ! CONST_OR_PURE_CALL_P (insn
))
1812 /* Look for the note that differentiates const and pure functions. */
1813 for (link
= CALL_INSN_FUNCTION_USAGE (insn
); link
; link
= XEXP (link
, 1))
1817 if (GET_CODE (u
= XEXP (link
, 0)) == USE
1818 && MEM_P (m
= XEXP (u
, 0)) && GET_MODE (m
) == BLKmode
1819 && GET_CODE (XEXP (m
, 0)) == SCRATCH
)
1826 /* Remove register note NOTE from the REG_NOTES of INSN. */
1829 remove_note (rtx insn
, rtx note
)
1833 if (note
== NULL_RTX
)
1836 if (REG_NOTES (insn
) == note
)
1838 REG_NOTES (insn
) = XEXP (note
, 1);
1842 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
1843 if (XEXP (link
, 1) == note
)
1845 XEXP (link
, 1) = XEXP (note
, 1);
1852 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1853 return 1 if it is found. A simple equality test is used to determine if
1857 in_expr_list_p (rtx listp
, rtx node
)
1861 for (x
= listp
; x
; x
= XEXP (x
, 1))
1862 if (node
== XEXP (x
, 0))
1868 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1869 remove that entry from the list if it is found.
1871 A simple equality test is used to determine if NODE matches. */
1874 remove_node_from_expr_list (rtx node
, rtx
*listp
)
1877 rtx prev
= NULL_RTX
;
1881 if (node
== XEXP (temp
, 0))
1883 /* Splice the node out of the list. */
1885 XEXP (prev
, 1) = XEXP (temp
, 1);
1887 *listp
= XEXP (temp
, 1);
1893 temp
= XEXP (temp
, 1);
1897 /* Nonzero if X contains any volatile instructions. These are instructions
1898 which may cause unpredictable machine state instructions, and thus no
1899 instructions should be moved or combined across them. This includes
1900 only volatile asms and UNSPEC_VOLATILE instructions. */
1903 volatile_insn_p (rtx x
)
1907 code
= GET_CODE (x
);
1927 case UNSPEC_VOLATILE
:
1928 /* case TRAP_IF: This isn't clear yet. */
1933 if (MEM_VOLATILE_P (x
))
1940 /* Recursively scan the operands of this expression. */
1943 const char *fmt
= GET_RTX_FORMAT (code
);
1946 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
1950 if (volatile_insn_p (XEXP (x
, i
)))
1953 else if (fmt
[i
] == 'E')
1956 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
1957 if (volatile_insn_p (XVECEXP (x
, i
, j
)))
1965 /* Nonzero if X contains any volatile memory references
1966 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1969 volatile_refs_p (rtx x
)
1973 code
= GET_CODE (x
);
1991 case UNSPEC_VOLATILE
:
1997 if (MEM_VOLATILE_P (x
))
2004 /* Recursively scan the operands of this expression. */
2007 const char *fmt
= GET_RTX_FORMAT (code
);
2010 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2014 if (volatile_refs_p (XEXP (x
, i
)))
2017 else if (fmt
[i
] == 'E')
2020 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2021 if (volatile_refs_p (XVECEXP (x
, i
, j
)))
2029 /* Similar to above, except that it also rejects register pre- and post-
2033 side_effects_p (rtx x
)
2037 code
= GET_CODE (x
);
2055 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2056 when some combination can't be done. If we see one, don't think
2057 that we can simplify the expression. */
2058 return (GET_MODE (x
) != VOIDmode
);
2067 case UNSPEC_VOLATILE
:
2068 /* case TRAP_IF: This isn't clear yet. */
2074 if (MEM_VOLATILE_P (x
))
2081 /* Recursively scan the operands of this expression. */
2084 const char *fmt
= GET_RTX_FORMAT (code
);
2087 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2091 if (side_effects_p (XEXP (x
, i
)))
2094 else if (fmt
[i
] == 'E')
2097 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2098 if (side_effects_p (XVECEXP (x
, i
, j
)))
2106 /* Return nonzero if evaluating rtx X might cause a trap. UNALIGNED_MEMS
2107 controls whether nonzero is returned for unaligned memory accesses on
2108 strict alignment machines. */
2111 may_trap_p_1 (rtx x
, bool unaligned_mems
)
2119 code
= GET_CODE (x
);
2122 /* Handle these cases quickly. */
2136 case UNSPEC_VOLATILE
:
2141 return MEM_VOLATILE_P (x
);
2143 /* Memory ref can trap unless it's a static var or a stack slot. */
2145 if (MEM_NOTRAP_P (x
)
2146 && (!STRICT_ALIGNMENT
|| !unaligned_mems
))
2149 rtx_addr_can_trap_p_1 (XEXP (x
, 0), GET_MODE (x
), unaligned_mems
);
2151 /* Division by a non-constant might trap. */
2156 if (HONOR_SNANS (GET_MODE (x
)))
2158 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
)
2159 return flag_trapping_math
;
2160 if (!CONSTANT_P (XEXP (x
, 1)) || (XEXP (x
, 1) == const0_rtx
))
2165 /* An EXPR_LIST is used to represent a function call. This
2166 certainly may trap. */
2175 /* Some floating point comparisons may trap. */
2176 if (!flag_trapping_math
)
2178 /* ??? There is no machine independent way to check for tests that trap
2179 when COMPARE is used, though many targets do make this distinction.
2180 For instance, sparc uses CCFPE for compares which generate exceptions
2181 and CCFP for compares which do not generate exceptions. */
2182 if (HONOR_NANS (GET_MODE (x
)))
2184 /* But often the compare has some CC mode, so check operand
2186 if (HONOR_NANS (GET_MODE (XEXP (x
, 0)))
2187 || HONOR_NANS (GET_MODE (XEXP (x
, 1))))
2193 if (HONOR_SNANS (GET_MODE (x
)))
2195 /* Often comparison is CC mode, so check operand modes. */
2196 if (HONOR_SNANS (GET_MODE (XEXP (x
, 0)))
2197 || HONOR_SNANS (GET_MODE (XEXP (x
, 1))))
2202 /* Conversion of floating point might trap. */
2203 if (flag_trapping_math
&& HONOR_NANS (GET_MODE (XEXP (x
, 0))))
2210 /* These operations don't trap even with floating point. */
2214 /* Any floating arithmetic may trap. */
2215 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_FLOAT
2216 && flag_trapping_math
)
2220 fmt
= GET_RTX_FORMAT (code
);
2221 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2225 if (may_trap_p_1 (XEXP (x
, i
), unaligned_mems
))
2228 else if (fmt
[i
] == 'E')
2231 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2232 if (may_trap_p_1 (XVECEXP (x
, i
, j
), unaligned_mems
))
2239 /* Return nonzero if evaluating rtx X might cause a trap. */
2244 return may_trap_p_1 (x
, false);
2247 /* Same as above, but additionally return non-zero if evaluating rtx X might
2248 cause a fault. We define a fault for the purpose of this function as a
2249 erroneous execution condition that cannot be encountered during the normal
2250 execution of a valid program; the typical example is an unaligned memory
2251 access on a strict alignment machine. The compiler guarantees that it
2252 doesn't generate code that will fault from a valid program, but this
2253 guarantee doesn't mean anything for individual instructions. Consider
2254 the following example:
2256 struct S { int d; union { char *cp; int *ip; }; };
2258 int foo(struct S *s)
2266 on a strict alignment machine. In a valid program, foo will never be
2267 invoked on a structure for which d is equal to 1 and the underlying
2268 unique field of the union not aligned on a 4-byte boundary, but the
2269 expression *s->ip might cause a fault if considered individually.
2271 At the RTL level, potentially problematic expressions will almost always
2272 verify may_trap_p; for example, the above dereference can be emitted as
2273 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2274 However, suppose that foo is inlined in a caller that causes s->cp to
2275 point to a local character variable and guarantees that s->d is not set
2276 to 1; foo may have been effectively translated into pseudo-RTL as:
2279 (set (reg:SI) (mem:SI (%fp - 7)))
2281 (set (reg:QI) (mem:QI (%fp - 7)))
2283 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2284 memory reference to a stack slot, but it will certainly cause a fault
2285 on a strict alignment machine. */
2288 may_trap_or_fault_p (rtx x
)
2290 return may_trap_p_1 (x
, true);
2293 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2294 i.e., an inequality. */
2297 inequality_comparisons_p (rtx x
)
2301 enum rtx_code code
= GET_CODE (x
);
2331 len
= GET_RTX_LENGTH (code
);
2332 fmt
= GET_RTX_FORMAT (code
);
2334 for (i
= 0; i
< len
; i
++)
2338 if (inequality_comparisons_p (XEXP (x
, i
)))
2341 else if (fmt
[i
] == 'E')
2344 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2345 if (inequality_comparisons_p (XVECEXP (x
, i
, j
)))
2353 /* Replace any occurrence of FROM in X with TO. The function does
2354 not enter into CONST_DOUBLE for the replace.
2356 Note that copying is not done so X must not be shared unless all copies
2357 are to be modified. */
2360 replace_rtx (rtx x
, rtx from
, rtx to
)
2365 /* The following prevents loops occurrence when we change MEM in
2366 CONST_DOUBLE onto the same CONST_DOUBLE. */
2367 if (x
!= 0 && GET_CODE (x
) == CONST_DOUBLE
)
2373 /* Allow this function to make replacements in EXPR_LISTs. */
2377 if (GET_CODE (x
) == SUBREG
)
2379 rtx
new = replace_rtx (SUBREG_REG (x
), from
, to
);
2381 if (GET_CODE (new) == CONST_INT
)
2383 x
= simplify_subreg (GET_MODE (x
), new,
2384 GET_MODE (SUBREG_REG (x
)),
2389 SUBREG_REG (x
) = new;
2393 else if (GET_CODE (x
) == ZERO_EXTEND
)
2395 rtx
new = replace_rtx (XEXP (x
, 0), from
, to
);
2397 if (GET_CODE (new) == CONST_INT
)
2399 x
= simplify_unary_operation (ZERO_EXTEND
, GET_MODE (x
),
2400 new, GET_MODE (XEXP (x
, 0)));
2409 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2410 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2413 XEXP (x
, i
) = replace_rtx (XEXP (x
, i
), from
, to
);
2414 else if (fmt
[i
] == 'E')
2415 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2416 XVECEXP (x
, i
, j
) = replace_rtx (XVECEXP (x
, i
, j
), from
, to
);
2422 /* Throughout the rtx X, replace many registers according to REG_MAP.
2423 Return the replacement for X (which may be X with altered contents).
2424 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2425 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2427 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2428 should not be mapped to pseudos or vice versa since validate_change
2431 If REPLACE_DEST is 1, replacements are also done in destinations;
2432 otherwise, only sources are replaced. */
2435 replace_regs (rtx x
, rtx
*reg_map
, unsigned int nregs
, int replace_dest
)
2444 code
= GET_CODE (x
);
2459 /* Verify that the register has an entry before trying to access it. */
2460 if (REGNO (x
) < nregs
&& reg_map
[REGNO (x
)] != 0)
2462 /* SUBREGs can't be shared. Always return a copy to ensure that if
2463 this replacement occurs more than once then each instance will
2464 get distinct rtx. */
2465 if (GET_CODE (reg_map
[REGNO (x
)]) == SUBREG
)
2466 return copy_rtx (reg_map
[REGNO (x
)]);
2467 return reg_map
[REGNO (x
)];
2472 /* Prevent making nested SUBREGs. */
2473 if (REG_P (SUBREG_REG (x
)) && REGNO (SUBREG_REG (x
)) < nregs
2474 && reg_map
[REGNO (SUBREG_REG (x
))] != 0
2475 && GET_CODE (reg_map
[REGNO (SUBREG_REG (x
))]) == SUBREG
)
2477 rtx map_val
= reg_map
[REGNO (SUBREG_REG (x
))];
2478 return simplify_gen_subreg (GET_MODE (x
), map_val
,
2479 GET_MODE (SUBREG_REG (x
)),
2486 SET_DEST (x
) = replace_regs (SET_DEST (x
), reg_map
, nregs
, 0);
2488 else if (MEM_P (SET_DEST (x
))
2489 || GET_CODE (SET_DEST (x
)) == STRICT_LOW_PART
)
2490 /* Even if we are not to replace destinations, replace register if it
2491 is CONTAINED in destination (destination is memory or
2492 STRICT_LOW_PART). */
2493 XEXP (SET_DEST (x
), 0) = replace_regs (XEXP (SET_DEST (x
), 0),
2495 else if (GET_CODE (SET_DEST (x
)) == ZERO_EXTRACT
)
2496 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2499 SET_SRC (x
) = replace_regs (SET_SRC (x
), reg_map
, nregs
, 0);
2506 fmt
= GET_RTX_FORMAT (code
);
2507 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2510 XEXP (x
, i
) = replace_regs (XEXP (x
, i
), reg_map
, nregs
, replace_dest
);
2511 else if (fmt
[i
] == 'E')
2514 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2515 XVECEXP (x
, i
, j
) = replace_regs (XVECEXP (x
, i
, j
), reg_map
,
2516 nregs
, replace_dest
);
2522 /* Replace occurrences of the old label in *X with the new one.
2523 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2526 replace_label (rtx
*x
, void *data
)
2529 rtx old_label
= ((replace_label_data
*) data
)->r1
;
2530 rtx new_label
= ((replace_label_data
*) data
)->r2
;
2531 bool update_label_nuses
= ((replace_label_data
*) data
)->update_label_nuses
;
2536 if (GET_CODE (l
) == SYMBOL_REF
2537 && CONSTANT_POOL_ADDRESS_P (l
))
2539 rtx c
= get_pool_constant (l
);
2540 if (rtx_referenced_p (old_label
, c
))
2543 replace_label_data
*d
= (replace_label_data
*) data
;
2545 /* Create a copy of constant C; replace the label inside
2546 but do not update LABEL_NUSES because uses in constant pool
2548 new_c
= copy_rtx (c
);
2549 d
->update_label_nuses
= false;
2550 for_each_rtx (&new_c
, replace_label
, data
);
2551 d
->update_label_nuses
= update_label_nuses
;
2553 /* Add the new constant NEW_C to constant pool and replace
2554 the old reference to constant by new reference. */
2555 new_l
= XEXP (force_const_mem (get_pool_mode (l
), new_c
), 0);
2556 *x
= replace_rtx (l
, l
, new_l
);
2561 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2562 field. This is not handled by for_each_rtx because it doesn't
2563 handle unprinted ('0') fields. */
2564 if (JUMP_P (l
) && JUMP_LABEL (l
) == old_label
)
2565 JUMP_LABEL (l
) = new_label
;
2567 if ((GET_CODE (l
) == LABEL_REF
2568 || GET_CODE (l
) == INSN_LIST
)
2569 && XEXP (l
, 0) == old_label
)
2571 XEXP (l
, 0) = new_label
;
2572 if (update_label_nuses
)
2574 ++LABEL_NUSES (new_label
);
2575 --LABEL_NUSES (old_label
);
2583 /* When *BODY is equal to X or X is directly referenced by *BODY
2584 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2585 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2588 rtx_referenced_p_1 (rtx
*body
, void *x
)
2592 if (*body
== NULL_RTX
)
2593 return y
== NULL_RTX
;
2595 /* Return true if a label_ref *BODY refers to label Y. */
2596 if (GET_CODE (*body
) == LABEL_REF
&& LABEL_P (y
))
2597 return XEXP (*body
, 0) == y
;
2599 /* If *BODY is a reference to pool constant traverse the constant. */
2600 if (GET_CODE (*body
) == SYMBOL_REF
2601 && CONSTANT_POOL_ADDRESS_P (*body
))
2602 return rtx_referenced_p (y
, get_pool_constant (*body
));
2604 /* By default, compare the RTL expressions. */
2605 return rtx_equal_p (*body
, y
);
2608 /* Return true if X is referenced in BODY. */
2611 rtx_referenced_p (rtx x
, rtx body
)
2613 return for_each_rtx (&body
, rtx_referenced_p_1
, x
);
2616 /* If INSN is a tablejump return true and store the label (before jump table) to
2617 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2620 tablejump_p (rtx insn
, rtx
*labelp
, rtx
*tablep
)
2625 && (label
= JUMP_LABEL (insn
)) != NULL_RTX
2626 && (table
= next_active_insn (label
)) != NULL_RTX
2628 && (GET_CODE (PATTERN (table
)) == ADDR_VEC
2629 || GET_CODE (PATTERN (table
)) == ADDR_DIFF_VEC
))
2640 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2641 constant that is not in the constant pool and not in the condition
2642 of an IF_THEN_ELSE. */
2645 computed_jump_p_1 (rtx x
)
2647 enum rtx_code code
= GET_CODE (x
);
2666 return ! (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
2667 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)));
2670 return (computed_jump_p_1 (XEXP (x
, 1))
2671 || computed_jump_p_1 (XEXP (x
, 2)));
2677 fmt
= GET_RTX_FORMAT (code
);
2678 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
2681 && computed_jump_p_1 (XEXP (x
, i
)))
2684 else if (fmt
[i
] == 'E')
2685 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2686 if (computed_jump_p_1 (XVECEXP (x
, i
, j
)))
2693 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2695 Tablejumps and casesi insns are not considered indirect jumps;
2696 we can recognize them by a (use (label_ref)). */
2699 computed_jump_p (rtx insn
)
2704 rtx pat
= PATTERN (insn
);
2706 if (find_reg_note (insn
, REG_LABEL
, NULL_RTX
))
2708 else if (GET_CODE (pat
) == PARALLEL
)
2710 int len
= XVECLEN (pat
, 0);
2711 int has_use_labelref
= 0;
2713 for (i
= len
- 1; i
>= 0; i
--)
2714 if (GET_CODE (XVECEXP (pat
, 0, i
)) == USE
2715 && (GET_CODE (XEXP (XVECEXP (pat
, 0, i
), 0))
2717 has_use_labelref
= 1;
2719 if (! has_use_labelref
)
2720 for (i
= len
- 1; i
>= 0; i
--)
2721 if (GET_CODE (XVECEXP (pat
, 0, i
)) == SET
2722 && SET_DEST (XVECEXP (pat
, 0, i
)) == pc_rtx
2723 && computed_jump_p_1 (SET_SRC (XVECEXP (pat
, 0, i
))))
2726 else if (GET_CODE (pat
) == SET
2727 && SET_DEST (pat
) == pc_rtx
2728 && computed_jump_p_1 (SET_SRC (pat
)))
2734 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2735 calls. Processes the subexpressions of EXP and passes them to F. */
2737 for_each_rtx_1 (rtx exp
, int n
, rtx_function f
, void *data
)
2740 const char *format
= GET_RTX_FORMAT (GET_CODE (exp
));
2743 for (; format
[n
] != '\0'; n
++)
2750 result
= (*f
) (x
, data
);
2752 /* Do not traverse sub-expressions. */
2754 else if (result
!= 0)
2755 /* Stop the traversal. */
2759 /* There are no sub-expressions. */
2762 i
= non_rtx_starting_operands
[GET_CODE (*x
)];
2765 result
= for_each_rtx_1 (*x
, i
, f
, data
);
2773 if (XVEC (exp
, n
) == 0)
2775 for (j
= 0; j
< XVECLEN (exp
, n
); ++j
)
2778 x
= &XVECEXP (exp
, n
, j
);
2779 result
= (*f
) (x
, data
);
2781 /* Do not traverse sub-expressions. */
2783 else if (result
!= 0)
2784 /* Stop the traversal. */
2788 /* There are no sub-expressions. */
2791 i
= non_rtx_starting_operands
[GET_CODE (*x
)];
2794 result
= for_each_rtx_1 (*x
, i
, f
, data
);
2802 /* Nothing to do. */
2810 /* Traverse X via depth-first search, calling F for each
2811 sub-expression (including X itself). F is also passed the DATA.
2812 If F returns -1, do not traverse sub-expressions, but continue
2813 traversing the rest of the tree. If F ever returns any other
2814 nonzero value, stop the traversal, and return the value returned
2815 by F. Otherwise, return 0. This function does not traverse inside
2816 tree structure that contains RTX_EXPRs, or into sub-expressions
2817 whose format code is `0' since it is not known whether or not those
2818 codes are actually RTL.
2820 This routine is very general, and could (should?) be used to
2821 implement many of the other routines in this file. */
2824 for_each_rtx (rtx
*x
, rtx_function f
, void *data
)
2830 result
= (*f
) (x
, data
);
2832 /* Do not traverse sub-expressions. */
2834 else if (result
!= 0)
2835 /* Stop the traversal. */
2839 /* There are no sub-expressions. */
2842 i
= non_rtx_starting_operands
[GET_CODE (*x
)];
2846 return for_each_rtx_1 (*x
, i
, f
, data
);
2850 /* Searches X for any reference to REGNO, returning the rtx of the
2851 reference found if any. Otherwise, returns NULL_RTX. */
2854 regno_use_in (unsigned int regno
, rtx x
)
2860 if (REG_P (x
) && REGNO (x
) == regno
)
2863 fmt
= GET_RTX_FORMAT (GET_CODE (x
));
2864 for (i
= GET_RTX_LENGTH (GET_CODE (x
)) - 1; i
>= 0; i
--)
2868 if ((tem
= regno_use_in (regno
, XEXP (x
, i
))))
2871 else if (fmt
[i
] == 'E')
2872 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
2873 if ((tem
= regno_use_in (regno
, XVECEXP (x
, i
, j
))))
2880 /* Return a value indicating whether OP, an operand of a commutative
2881 operation, is preferred as the first or second operand. The higher
2882 the value, the stronger the preference for being the first operand.
2883 We use negative values to indicate a preference for the first operand
2884 and positive values for the second operand. */
2887 commutative_operand_precedence (rtx op
)
2889 enum rtx_code code
= GET_CODE (op
);
2891 /* Constants always come the second operand. Prefer "nice" constants. */
2892 if (code
== CONST_INT
)
2894 if (code
== CONST_DOUBLE
)
2896 op
= avoid_constant_pool_reference (op
);
2897 code
= GET_CODE (op
);
2899 switch (GET_RTX_CLASS (code
))
2902 if (code
== CONST_INT
)
2904 if (code
== CONST_DOUBLE
)
2909 /* SUBREGs of objects should come second. */
2910 if (code
== SUBREG
&& OBJECT_P (SUBREG_REG (op
)))
2913 if (!CONSTANT_P (op
))
2916 /* As for RTX_CONST_OBJ. */
2920 /* Complex expressions should be the first, so decrease priority
2924 case RTX_COMM_ARITH
:
2925 /* Prefer operands that are themselves commutative to be first.
2926 This helps to make things linear. In particular,
2927 (and (and (reg) (reg)) (not (reg))) is canonical. */
2931 /* If only one operand is a binary expression, it will be the first
2932 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2933 is canonical, although it will usually be further simplified. */
2937 /* Then prefer NEG and NOT. */
2938 if (code
== NEG
|| code
== NOT
)
2946 /* Return 1 iff it is necessary to swap operands of commutative operation
2947 in order to canonicalize expression. */
2950 swap_commutative_operands_p (rtx x
, rtx y
)
2952 return (commutative_operand_precedence (x
)
2953 < commutative_operand_precedence (y
));
2956 /* Return 1 if X is an autoincrement side effect and the register is
2957 not the stack pointer. */
2961 switch (GET_CODE (x
))
2969 /* There are no REG_INC notes for SP. */
2970 if (XEXP (x
, 0) != stack_pointer_rtx
)
2978 /* Return 1 if the sequence of instructions beginning with FROM and up
2979 to and including TO is safe to move. If NEW_TO is non-NULL, and
2980 the sequence is not already safe to move, but can be easily
2981 extended to a sequence which is safe, then NEW_TO will point to the
2982 end of the extended sequence.
2984 For now, this function only checks that the region contains whole
2985 exception regions, but it could be extended to check additional
2986 conditions as well. */
2989 insns_safe_to_move_p (rtx from
, rtx to
, rtx
*new_to
)
2991 int eh_region_count
= 0;
2995 /* By default, assume the end of the region will be what was
3004 switch (NOTE_LINE_NUMBER (r
))
3006 case NOTE_INSN_EH_REGION_BEG
:
3010 case NOTE_INSN_EH_REGION_END
:
3011 if (eh_region_count
== 0)
3012 /* This sequence of instructions contains the end of
3013 an exception region, but not he beginning. Moving
3014 it will cause chaos. */
3025 /* If we've passed TO, and we see a non-note instruction, we
3026 can't extend the sequence to a movable sequence. */
3032 /* It's OK to move the sequence if there were matched sets of
3033 exception region notes. */
3034 return eh_region_count
== 0;
3039 /* It's OK to move the sequence if there were matched sets of
3040 exception region notes. */
3041 if (past_to_p
&& eh_region_count
== 0)
3047 /* Go to the next instruction. */
3054 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3056 loc_mentioned_in_p (rtx
*loc
, rtx in
)
3058 enum rtx_code code
= GET_CODE (in
);
3059 const char *fmt
= GET_RTX_FORMAT (code
);
3062 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3064 if (loc
== &in
->u
.fld
[i
].rt_rtx
)
3068 if (loc_mentioned_in_p (loc
, XEXP (in
, i
)))
3071 else if (fmt
[i
] == 'E')
3072 for (j
= XVECLEN (in
, i
) - 1; j
>= 0; j
--)
3073 if (loc_mentioned_in_p (loc
, XVECEXP (in
, i
, j
)))
3079 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3080 and SUBREG_BYTE, return the bit offset where the subreg begins
3081 (counting from the least significant bit of the operand). */
3084 subreg_lsb_1 (enum machine_mode outer_mode
,
3085 enum machine_mode inner_mode
,
3086 unsigned int subreg_byte
)
3088 unsigned int bitpos
;
3092 /* A paradoxical subreg begins at bit position 0. */
3093 if (GET_MODE_BITSIZE (outer_mode
) > GET_MODE_BITSIZE (inner_mode
))
3096 if (WORDS_BIG_ENDIAN
!= BYTES_BIG_ENDIAN
)
3097 /* If the subreg crosses a word boundary ensure that
3098 it also begins and ends on a word boundary. */
3099 gcc_assert (!((subreg_byte
% UNITS_PER_WORD
3100 + GET_MODE_SIZE (outer_mode
)) > UNITS_PER_WORD
3101 && (subreg_byte
% UNITS_PER_WORD
3102 || GET_MODE_SIZE (outer_mode
) % UNITS_PER_WORD
)));
3104 if (WORDS_BIG_ENDIAN
)
3105 word
= (GET_MODE_SIZE (inner_mode
)
3106 - (subreg_byte
+ GET_MODE_SIZE (outer_mode
))) / UNITS_PER_WORD
;
3108 word
= subreg_byte
/ UNITS_PER_WORD
;
3109 bitpos
= word
* BITS_PER_WORD
;
3111 if (BYTES_BIG_ENDIAN
)
3112 byte
= (GET_MODE_SIZE (inner_mode
)
3113 - (subreg_byte
+ GET_MODE_SIZE (outer_mode
))) % UNITS_PER_WORD
;
3115 byte
= subreg_byte
% UNITS_PER_WORD
;
3116 bitpos
+= byte
* BITS_PER_UNIT
;
3121 /* Given a subreg X, return the bit offset where the subreg begins
3122 (counting from the least significant bit of the reg). */
3127 return subreg_lsb_1 (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)),
3131 /* This function returns the regno offset of a subreg expression.
3132 xregno - A regno of an inner hard subreg_reg (or what will become one).
3133 xmode - The mode of xregno.
3134 offset - The byte offset.
3135 ymode - The mode of a top level SUBREG (or what may become one).
3136 RETURN - The regno offset which would be used. */
3138 subreg_regno_offset (unsigned int xregno
, enum machine_mode xmode
,
3139 unsigned int offset
, enum machine_mode ymode
)
3141 int nregs_xmode
, nregs_ymode
, nregs_xmode_unit_int
;
3142 int mode_multiple
, nregs_multiple
;
3144 enum machine_mode xmode_unit
, xmode_unit_int
;
3146 gcc_assert (xregno
< FIRST_PSEUDO_REGISTER
);
3148 if (GET_MODE_INNER (xmode
) == VOIDmode
)
3151 xmode_unit
= GET_MODE_INNER (xmode
);
3153 if (FLOAT_MODE_P (xmode_unit
))
3155 xmode_unit_int
= int_mode_for_mode (xmode_unit
);
3156 if (xmode_unit_int
== BLKmode
)
3157 /* It's probably bad to be here; a port should have an integer mode
3158 that's the same size as anything of which it takes a SUBREG. */
3159 xmode_unit_int
= xmode_unit
;
3162 xmode_unit_int
= xmode_unit
;
3164 nregs_xmode_unit_int
= hard_regno_nregs
[xregno
][xmode_unit_int
];
3166 /* Adjust nregs_xmode to allow for 'holes'. */
3167 if (nregs_xmode_unit_int
!= hard_regno_nregs
[xregno
][xmode_unit
])
3168 nregs_xmode
= nregs_xmode_unit_int
* GET_MODE_NUNITS (xmode
);
3170 nregs_xmode
= hard_regno_nregs
[xregno
][xmode
];
3172 nregs_ymode
= hard_regno_nregs
[xregno
][ymode
];
3174 /* If this is a big endian paradoxical subreg, which uses more actual
3175 hard registers than the original register, we must return a negative
3176 offset so that we find the proper highpart of the register. */
3178 && nregs_ymode
> nregs_xmode
3179 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3180 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3181 return nregs_xmode
- nregs_ymode
;
3183 if (offset
== 0 || nregs_xmode
== nregs_ymode
)
3186 /* Size of ymode must not be greater than the size of xmode. */
3187 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3188 gcc_assert (mode_multiple
!= 0);
3190 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3191 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3192 return (y_offset
/ (mode_multiple
/ nregs_multiple
)) * nregs_ymode
;
3195 /* This function returns true when the offset is representable via
3196 subreg_offset in the given regno.
3197 xregno - A regno of an inner hard subreg_reg (or what will become one).
3198 xmode - The mode of xregno.
3199 offset - The byte offset.
3200 ymode - The mode of a top level SUBREG (or what may become one).
3201 RETURN - Whether the offset is representable. */
3203 subreg_offset_representable_p (unsigned int xregno
, enum machine_mode xmode
,
3204 unsigned int offset
, enum machine_mode ymode
)
3206 int nregs_xmode
, nregs_ymode
, nregs_xmode_unit
, nregs_xmode_unit_int
;
3207 int mode_multiple
, nregs_multiple
;
3209 enum machine_mode xmode_unit
, xmode_unit_int
;
3211 gcc_assert (xregno
< FIRST_PSEUDO_REGISTER
);
3213 if (GET_MODE_INNER (xmode
) == VOIDmode
)
3216 xmode_unit
= GET_MODE_INNER (xmode
);
3218 if (FLOAT_MODE_P (xmode_unit
))
3220 xmode_unit_int
= int_mode_for_mode (xmode_unit
);
3221 if (xmode_unit_int
== BLKmode
)
3222 /* It's probably bad to be here; a port should have an integer mode
3223 that's the same size as anything of which it takes a SUBREG. */
3224 xmode_unit_int
= xmode_unit
;
3227 xmode_unit_int
= xmode_unit
;
3229 nregs_xmode_unit
= hard_regno_nregs
[xregno
][xmode_unit
];
3230 nregs_xmode_unit_int
= hard_regno_nregs
[xregno
][xmode_unit_int
];
3232 /* If there are holes in a non-scalar mode in registers, we expect
3233 that it is made up of its units concatenated together. */
3234 if (nregs_xmode_unit
!= nregs_xmode_unit_int
)
3236 gcc_assert (nregs_xmode_unit
* GET_MODE_NUNITS (xmode
)
3237 == hard_regno_nregs
[xregno
][xmode
]);
3239 /* You can only ask for a SUBREG of a value with holes in the middle
3240 if you don't cross the holes. (Such a SUBREG should be done by
3241 picking a different register class, or doing it in memory if
3242 necessary.) An example of a value with holes is XCmode on 32-bit
3243 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3244 3 for each part, but in memory it's two 128-bit parts.
3245 Padding is assumed to be at the end (not necessarily the 'high part')
3247 if (nregs_xmode_unit
!= nregs_xmode_unit_int
3248 && (offset
/ GET_MODE_SIZE (xmode_unit_int
) + 1
3249 < GET_MODE_NUNITS (xmode
))
3250 && (offset
/ GET_MODE_SIZE (xmode_unit_int
)
3251 != ((offset
+ GET_MODE_SIZE (ymode
) - 1)
3252 / GET_MODE_SIZE (xmode_unit_int
))))
3255 nregs_xmode
= nregs_xmode_unit_int
* GET_MODE_NUNITS (xmode
);
3258 nregs_xmode
= hard_regno_nregs
[xregno
][xmode
];
3260 nregs_ymode
= hard_regno_nregs
[xregno
][ymode
];
3262 /* Paradoxical subregs are otherwise valid. */
3264 && nregs_ymode
> nregs_xmode
3265 && (GET_MODE_SIZE (ymode
) > UNITS_PER_WORD
3266 ? WORDS_BIG_ENDIAN
: BYTES_BIG_ENDIAN
))
3269 /* Lowpart subregs are otherwise valid. */
3270 if (offset
== subreg_lowpart_offset (ymode
, xmode
))
3273 /* This should always pass, otherwise we don't know how to verify
3274 the constraint. These conditions may be relaxed but
3275 subreg_regno_offset would need to be redesigned. */
3276 gcc_assert ((GET_MODE_SIZE (xmode
) % GET_MODE_SIZE (ymode
)) == 0);
3277 gcc_assert ((nregs_xmode
% nregs_ymode
) == 0);
3279 /* The XMODE value can be seen as a vector of NREGS_XMODE
3280 values. The subreg must represent a lowpart of given field.
3281 Compute what field it is. */
3282 offset
-= subreg_lowpart_offset (ymode
,
3283 mode_for_size (GET_MODE_BITSIZE (xmode
)
3287 /* Size of ymode must not be greater than the size of xmode. */
3288 mode_multiple
= GET_MODE_SIZE (xmode
) / GET_MODE_SIZE (ymode
);
3289 gcc_assert (mode_multiple
!= 0);
3291 y_offset
= offset
/ GET_MODE_SIZE (ymode
);
3292 nregs_multiple
= nregs_xmode
/ nregs_ymode
;
3294 gcc_assert ((offset
% GET_MODE_SIZE (ymode
)) == 0);
3295 gcc_assert ((mode_multiple
% nregs_multiple
) == 0);
3297 return (!(y_offset
% (mode_multiple
/ nregs_multiple
)));
3300 /* Return the final regno that a subreg expression refers to. */
3302 subreg_regno (rtx x
)
3305 rtx subreg
= SUBREG_REG (x
);
3306 int regno
= REGNO (subreg
);
3308 ret
= regno
+ subreg_regno_offset (regno
,
3315 struct parms_set_data
3321 /* Helper function for noticing stores to parameter registers. */
3323 parms_set (rtx x
, rtx pat ATTRIBUTE_UNUSED
, void *data
)
3325 struct parms_set_data
*d
= data
;
3326 if (REG_P (x
) && REGNO (x
) < FIRST_PSEUDO_REGISTER
3327 && TEST_HARD_REG_BIT (d
->regs
, REGNO (x
)))
3329 CLEAR_HARD_REG_BIT (d
->regs
, REGNO (x
));
3334 /* Look backward for first parameter to be loaded.
3335 Note that loads of all parameters will not necessarily be
3336 found if CSE has eliminated some of them (e.g., an argument
3337 to the outer function is passed down as a parameter).
3338 Do not skip BOUNDARY. */
3340 find_first_parameter_load (rtx call_insn
, rtx boundary
)
3342 struct parms_set_data parm
;
3343 rtx p
, before
, first_set
;
3345 /* Since different machines initialize their parameter registers
3346 in different orders, assume nothing. Collect the set of all
3347 parameter registers. */
3348 CLEAR_HARD_REG_SET (parm
.regs
);
3350 for (p
= CALL_INSN_FUNCTION_USAGE (call_insn
); p
; p
= XEXP (p
, 1))
3351 if (GET_CODE (XEXP (p
, 0)) == USE
3352 && REG_P (XEXP (XEXP (p
, 0), 0)))
3354 gcc_assert (REGNO (XEXP (XEXP (p
, 0), 0)) < FIRST_PSEUDO_REGISTER
);
3356 /* We only care about registers which can hold function
3358 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p
, 0), 0))))
3361 SET_HARD_REG_BIT (parm
.regs
, REGNO (XEXP (XEXP (p
, 0), 0)));
3365 first_set
= call_insn
;
3367 /* Search backward for the first set of a register in this set. */
3368 while (parm
.nregs
&& before
!= boundary
)
3370 before
= PREV_INSN (before
);
3372 /* It is possible that some loads got CSEed from one call to
3373 another. Stop in that case. */
3374 if (CALL_P (before
))
3377 /* Our caller needs either ensure that we will find all sets
3378 (in case code has not been optimized yet), or take care
3379 for possible labels in a way by setting boundary to preceding
3381 if (LABEL_P (before
))
3383 gcc_assert (before
== boundary
);
3387 if (INSN_P (before
))
3389 int nregs_old
= parm
.nregs
;
3390 note_stores (PATTERN (before
), parms_set
, &parm
);
3391 /* If we found something that did not set a parameter reg,
3392 we're done. Do not keep going, as that might result
3393 in hoisting an insn before the setting of a pseudo
3394 that is used by the hoisted insn. */
3395 if (nregs_old
!= parm
.nregs
)
3404 /* Return true if we should avoid inserting code between INSN and preceding
3405 call instruction. */
3408 keep_with_call_p (rtx insn
)
3412 if (INSN_P (insn
) && (set
= single_set (insn
)) != NULL
)
3414 if (REG_P (SET_DEST (set
))
3415 && REGNO (SET_DEST (set
)) < FIRST_PSEUDO_REGISTER
3416 && fixed_regs
[REGNO (SET_DEST (set
))]
3417 && general_operand (SET_SRC (set
), VOIDmode
))
3419 if (REG_P (SET_SRC (set
))
3420 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set
)))
3421 && REG_P (SET_DEST (set
))
3422 && REGNO (SET_DEST (set
)) >= FIRST_PSEUDO_REGISTER
)
3424 /* There may be a stack pop just after the call and before the store
3425 of the return register. Search for the actual store when deciding
3426 if we can break or not. */
3427 if (SET_DEST (set
) == stack_pointer_rtx
)
3429 rtx i2
= next_nonnote_insn (insn
);
3430 if (i2
&& keep_with_call_p (i2
))
3437 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3438 to non-complex jumps. That is, direct unconditional, conditional,
3439 and tablejumps, but not computed jumps or returns. It also does
3440 not apply to the fallthru case of a conditional jump. */
3443 label_is_jump_target_p (rtx label
, rtx jump_insn
)
3445 rtx tmp
= JUMP_LABEL (jump_insn
);
3450 if (tablejump_p (jump_insn
, NULL
, &tmp
))
3452 rtvec vec
= XVEC (PATTERN (tmp
),
3453 GET_CODE (PATTERN (tmp
)) == ADDR_DIFF_VEC
);
3454 int i
, veclen
= GET_NUM_ELEM (vec
);
3456 for (i
= 0; i
< veclen
; ++i
)
3457 if (XEXP (RTVEC_ELT (vec
, i
), 0) == label
)
3465 /* Return an estimate of the cost of computing rtx X.
3466 One use is in cse, to decide which expression to keep in the hash table.
3467 Another is in rtl generation, to pick the cheapest way to multiply.
3468 Other uses like the latter are expected in the future. */
3471 rtx_cost (rtx x
, enum rtx_code outer_code ATTRIBUTE_UNUSED
)
3481 /* Compute the default costs of certain things.
3482 Note that targetm.rtx_costs can override the defaults. */
3484 code
= GET_CODE (x
);
3488 total
= COSTS_N_INSNS (5);
3494 total
= COSTS_N_INSNS (7);
3497 /* Used in loop.c and combine.c as a marker. */
3501 total
= COSTS_N_INSNS (1);
3511 /* If we can't tie these modes, make this expensive. The larger
3512 the mode, the more expensive it is. */
3513 if (! MODES_TIEABLE_P (GET_MODE (x
), GET_MODE (SUBREG_REG (x
))))
3514 return COSTS_N_INSNS (2
3515 + GET_MODE_SIZE (GET_MODE (x
)) / UNITS_PER_WORD
);
3519 if (targetm
.rtx_costs (x
, code
, outer_code
, &total
))
3524 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3525 which is already in total. */
3527 fmt
= GET_RTX_FORMAT (code
);
3528 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3530 total
+= rtx_cost (XEXP (x
, i
), code
);
3531 else if (fmt
[i
] == 'E')
3532 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3533 total
+= rtx_cost (XVECEXP (x
, i
, j
), code
);
3538 /* Return cost of address expression X.
3539 Expect that X is properly formed address reference. */
3542 address_cost (rtx x
, enum machine_mode mode
)
3544 /* We may be asked for cost of various unusual addresses, such as operands
3545 of push instruction. It is not worthwhile to complicate writing
3546 of the target hook by such cases. */
3548 if (!memory_address_p (mode
, x
))
3551 return targetm
.address_cost (x
);
3554 /* If the target doesn't override, compute the cost as with arithmetic. */
3557 default_address_cost (rtx x
)
3559 return rtx_cost (x
, MEM
);
3563 unsigned HOST_WIDE_INT
3564 nonzero_bits (rtx x
, enum machine_mode mode
)
3566 return cached_nonzero_bits (x
, mode
, NULL_RTX
, VOIDmode
, 0);
3570 num_sign_bit_copies (rtx x
, enum machine_mode mode
)
3572 return cached_num_sign_bit_copies (x
, mode
, NULL_RTX
, VOIDmode
, 0);
3575 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3576 It avoids exponential behavior in nonzero_bits1 when X has
3577 identical subexpressions on the first or the second level. */
3579 static unsigned HOST_WIDE_INT
3580 cached_nonzero_bits (rtx x
, enum machine_mode mode
, rtx known_x
,
3581 enum machine_mode known_mode
,
3582 unsigned HOST_WIDE_INT known_ret
)
3584 if (x
== known_x
&& mode
== known_mode
)
3587 /* Try to find identical subexpressions. If found call
3588 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3589 precomputed value for the subexpression as KNOWN_RET. */
3591 if (ARITHMETIC_P (x
))
3593 rtx x0
= XEXP (x
, 0);
3594 rtx x1
= XEXP (x
, 1);
3596 /* Check the first level. */
3598 return nonzero_bits1 (x
, mode
, x0
, mode
,
3599 cached_nonzero_bits (x0
, mode
, known_x
,
3600 known_mode
, known_ret
));
3602 /* Check the second level. */
3603 if (ARITHMETIC_P (x0
)
3604 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
3605 return nonzero_bits1 (x
, mode
, x1
, mode
,
3606 cached_nonzero_bits (x1
, mode
, known_x
,
3607 known_mode
, known_ret
));
3609 if (ARITHMETIC_P (x1
)
3610 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
3611 return nonzero_bits1 (x
, mode
, x0
, mode
,
3612 cached_nonzero_bits (x0
, mode
, known_x
,
3613 known_mode
, known_ret
));
3616 return nonzero_bits1 (x
, mode
, known_x
, known_mode
, known_ret
);
3619 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3620 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3621 is less useful. We can't allow both, because that results in exponential
3622 run time recursion. There is a nullstone testcase that triggered
3623 this. This macro avoids accidental uses of num_sign_bit_copies. */
3624 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3626 /* Given an expression, X, compute which bits in X can be nonzero.
3627 We don't care about bits outside of those defined in MODE.
3629 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3630 an arithmetic operation, we can do better. */
3632 static unsigned HOST_WIDE_INT
3633 nonzero_bits1 (rtx x
, enum machine_mode mode
, rtx known_x
,
3634 enum machine_mode known_mode
,
3635 unsigned HOST_WIDE_INT known_ret
)
3637 unsigned HOST_WIDE_INT nonzero
= GET_MODE_MASK (mode
);
3638 unsigned HOST_WIDE_INT inner_nz
;
3640 unsigned int mode_width
= GET_MODE_BITSIZE (mode
);
3642 /* For floating-point values, assume all bits are needed. */
3643 if (FLOAT_MODE_P (GET_MODE (x
)) || FLOAT_MODE_P (mode
))
3646 /* If X is wider than MODE, use its mode instead. */
3647 if (GET_MODE_BITSIZE (GET_MODE (x
)) > mode_width
)
3649 mode
= GET_MODE (x
);
3650 nonzero
= GET_MODE_MASK (mode
);
3651 mode_width
= GET_MODE_BITSIZE (mode
);
3654 if (mode_width
> HOST_BITS_PER_WIDE_INT
)
3655 /* Our only callers in this case look for single bit values. So
3656 just return the mode mask. Those tests will then be false. */
3659 #ifndef WORD_REGISTER_OPERATIONS
3660 /* If MODE is wider than X, but both are a single word for both the host
3661 and target machines, we can compute this from which bits of the
3662 object might be nonzero in its own mode, taking into account the fact
3663 that on many CISC machines, accessing an object in a wider mode
3664 causes the high-order bits to become undefined. So they are
3665 not known to be zero. */
3667 if (GET_MODE (x
) != VOIDmode
&& GET_MODE (x
) != mode
3668 && GET_MODE_BITSIZE (GET_MODE (x
)) <= BITS_PER_WORD
3669 && GET_MODE_BITSIZE (GET_MODE (x
)) <= HOST_BITS_PER_WIDE_INT
3670 && GET_MODE_BITSIZE (mode
) > GET_MODE_BITSIZE (GET_MODE (x
)))
3672 nonzero
&= cached_nonzero_bits (x
, GET_MODE (x
),
3673 known_x
, known_mode
, known_ret
);
3674 nonzero
|= GET_MODE_MASK (mode
) & ~GET_MODE_MASK (GET_MODE (x
));
3679 code
= GET_CODE (x
);
3683 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3684 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3685 all the bits above ptr_mode are known to be zero. */
3686 if (POINTERS_EXTEND_UNSIGNED
&& GET_MODE (x
) == Pmode
3688 nonzero
&= GET_MODE_MASK (ptr_mode
);
3691 /* Include declared information about alignment of pointers. */
3692 /* ??? We don't properly preserve REG_POINTER changes across
3693 pointer-to-integer casts, so we can't trust it except for
3694 things that we know must be pointers. See execute/960116-1.c. */
3695 if ((x
== stack_pointer_rtx
3696 || x
== frame_pointer_rtx
3697 || x
== arg_pointer_rtx
)
3698 && REGNO_POINTER_ALIGN (REGNO (x
)))
3700 unsigned HOST_WIDE_INT alignment
3701 = REGNO_POINTER_ALIGN (REGNO (x
)) / BITS_PER_UNIT
;
3703 #ifdef PUSH_ROUNDING
3704 /* If PUSH_ROUNDING is defined, it is possible for the
3705 stack to be momentarily aligned only to that amount,
3706 so we pick the least alignment. */
3707 if (x
== stack_pointer_rtx
&& PUSH_ARGS
)
3708 alignment
= MIN ((unsigned HOST_WIDE_INT
) PUSH_ROUNDING (1),
3712 nonzero
&= ~(alignment
- 1);
3716 unsigned HOST_WIDE_INT nonzero_for_hook
= nonzero
;
3717 rtx
new = rtl_hooks
.reg_nonzero_bits (x
, mode
, known_x
,
3718 known_mode
, known_ret
,
3722 nonzero_for_hook
&= cached_nonzero_bits (new, mode
, known_x
,
3723 known_mode
, known_ret
);
3725 return nonzero_for_hook
;
3729 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3730 /* If X is negative in MODE, sign-extend the value. */
3731 if (INTVAL (x
) > 0 && mode_width
< BITS_PER_WORD
3732 && 0 != (INTVAL (x
) & ((HOST_WIDE_INT
) 1 << (mode_width
- 1))))
3733 return (INTVAL (x
) | ((HOST_WIDE_INT
) (-1) << mode_width
));
3739 #ifdef LOAD_EXTEND_OP
3740 /* In many, if not most, RISC machines, reading a byte from memory
3741 zeros the rest of the register. Noticing that fact saves a lot
3742 of extra zero-extends. */
3743 if (LOAD_EXTEND_OP (GET_MODE (x
)) == ZERO_EXTEND
)
3744 nonzero
&= GET_MODE_MASK (GET_MODE (x
));
3749 case UNEQ
: case LTGT
:
3750 case GT
: case GTU
: case UNGT
:
3751 case LT
: case LTU
: case UNLT
:
3752 case GE
: case GEU
: case UNGE
:
3753 case LE
: case LEU
: case UNLE
:
3754 case UNORDERED
: case ORDERED
:
3755 /* If this produces an integer result, we know which bits are set.
3756 Code here used to clear bits outside the mode of X, but that is
3758 /* Mind that MODE is the mode the caller wants to look at this
3759 operation in, and not the actual operation mode. We can wind
3760 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3761 that describes the results of a vector compare. */
3762 if (GET_MODE_CLASS (GET_MODE (x
)) == MODE_INT
3763 && mode_width
<= HOST_BITS_PER_WIDE_INT
)
3764 nonzero
= STORE_FLAG_VALUE
;
3769 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3770 and num_sign_bit_copies. */
3771 if (num_sign_bit_copies (XEXP (x
, 0), GET_MODE (x
))
3772 == GET_MODE_BITSIZE (GET_MODE (x
)))
3776 if (GET_MODE_SIZE (GET_MODE (x
)) < mode_width
)
3777 nonzero
|= (GET_MODE_MASK (mode
) & ~GET_MODE_MASK (GET_MODE (x
)));
3782 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3783 and num_sign_bit_copies. */
3784 if (num_sign_bit_copies (XEXP (x
, 0), GET_MODE (x
))
3785 == GET_MODE_BITSIZE (GET_MODE (x
)))
3791 nonzero
&= (cached_nonzero_bits (XEXP (x
, 0), mode
,
3792 known_x
, known_mode
, known_ret
)
3793 & GET_MODE_MASK (mode
));
3797 nonzero
&= cached_nonzero_bits (XEXP (x
, 0), mode
,
3798 known_x
, known_mode
, known_ret
);
3799 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
3800 nonzero
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
3804 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3805 Otherwise, show all the bits in the outer mode but not the inner
3807 inner_nz
= cached_nonzero_bits (XEXP (x
, 0), mode
,
3808 known_x
, known_mode
, known_ret
);
3809 if (GET_MODE (XEXP (x
, 0)) != VOIDmode
)
3811 inner_nz
&= GET_MODE_MASK (GET_MODE (XEXP (x
, 0)));
3813 & (((HOST_WIDE_INT
) 1
3814 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0))) - 1))))
3815 inner_nz
|= (GET_MODE_MASK (mode
)
3816 & ~GET_MODE_MASK (GET_MODE (XEXP (x
, 0))));
3819 nonzero
&= inner_nz
;
3823 nonzero
&= cached_nonzero_bits (XEXP (x
, 0), mode
,
3824 known_x
, known_mode
, known_ret
)
3825 & cached_nonzero_bits (XEXP (x
, 1), mode
,
3826 known_x
, known_mode
, known_ret
);
3830 case UMIN
: case UMAX
: case SMIN
: case SMAX
:
3832 unsigned HOST_WIDE_INT nonzero0
=
3833 cached_nonzero_bits (XEXP (x
, 0), mode
,
3834 known_x
, known_mode
, known_ret
);
3836 /* Don't call nonzero_bits for the second time if it cannot change
3838 if ((nonzero
& nonzero0
) != nonzero
)
3840 | cached_nonzero_bits (XEXP (x
, 1), mode
,
3841 known_x
, known_mode
, known_ret
);
3845 case PLUS
: case MINUS
:
3847 case DIV
: case UDIV
:
3848 case MOD
: case UMOD
:
3849 /* We can apply the rules of arithmetic to compute the number of
3850 high- and low-order zero bits of these operations. We start by
3851 computing the width (position of the highest-order nonzero bit)
3852 and the number of low-order zero bits for each value. */
3854 unsigned HOST_WIDE_INT nz0
=
3855 cached_nonzero_bits (XEXP (x
, 0), mode
,
3856 known_x
, known_mode
, known_ret
);
3857 unsigned HOST_WIDE_INT nz1
=
3858 cached_nonzero_bits (XEXP (x
, 1), mode
,
3859 known_x
, known_mode
, known_ret
);
3860 int sign_index
= GET_MODE_BITSIZE (GET_MODE (x
)) - 1;
3861 int width0
= floor_log2 (nz0
) + 1;
3862 int width1
= floor_log2 (nz1
) + 1;
3863 int low0
= floor_log2 (nz0
& -nz0
);
3864 int low1
= floor_log2 (nz1
& -nz1
);
3865 HOST_WIDE_INT op0_maybe_minusp
3866 = (nz0
& ((HOST_WIDE_INT
) 1 << sign_index
));
3867 HOST_WIDE_INT op1_maybe_minusp
3868 = (nz1
& ((HOST_WIDE_INT
) 1 << sign_index
));
3869 unsigned int result_width
= mode_width
;
3875 result_width
= MAX (width0
, width1
) + 1;
3876 result_low
= MIN (low0
, low1
);
3879 result_low
= MIN (low0
, low1
);
3882 result_width
= width0
+ width1
;
3883 result_low
= low0
+ low1
;
3888 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
3889 result_width
= width0
;
3894 result_width
= width0
;
3899 if (! op0_maybe_minusp
&& ! op1_maybe_minusp
)
3900 result_width
= MIN (width0
, width1
);
3901 result_low
= MIN (low0
, low1
);
3906 result_width
= MIN (width0
, width1
);
3907 result_low
= MIN (low0
, low1
);
3913 if (result_width
< mode_width
)
3914 nonzero
&= ((HOST_WIDE_INT
) 1 << result_width
) - 1;
3917 nonzero
&= ~(((HOST_WIDE_INT
) 1 << result_low
) - 1);
3919 #ifdef POINTERS_EXTEND_UNSIGNED
3920 /* If pointers extend unsigned and this is an addition or subtraction
3921 to a pointer in Pmode, all the bits above ptr_mode are known to be
3923 if (POINTERS_EXTEND_UNSIGNED
> 0 && GET_MODE (x
) == Pmode
3924 && (code
== PLUS
|| code
== MINUS
)
3925 && REG_P (XEXP (x
, 0)) && REG_POINTER (XEXP (x
, 0)))
3926 nonzero
&= GET_MODE_MASK (ptr_mode
);
3932 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
3933 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
3934 nonzero
&= ((HOST_WIDE_INT
) 1 << INTVAL (XEXP (x
, 1))) - 1;
3938 /* If this is a SUBREG formed for a promoted variable that has
3939 been zero-extended, we know that at least the high-order bits
3940 are zero, though others might be too. */
3942 if (SUBREG_PROMOTED_VAR_P (x
) && SUBREG_PROMOTED_UNSIGNED_P (x
) > 0)
3943 nonzero
= GET_MODE_MASK (GET_MODE (x
))
3944 & cached_nonzero_bits (SUBREG_REG (x
), GET_MODE (x
),
3945 known_x
, known_mode
, known_ret
);
3947 /* If the inner mode is a single word for both the host and target
3948 machines, we can compute this from which bits of the inner
3949 object might be nonzero. */
3950 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) <= BITS_PER_WORD
3951 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))
3952 <= HOST_BITS_PER_WIDE_INT
))
3954 nonzero
&= cached_nonzero_bits (SUBREG_REG (x
), mode
,
3955 known_x
, known_mode
, known_ret
);
3957 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3958 /* If this is a typical RISC machine, we only have to worry
3959 about the way loads are extended. */
3960 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x
))) == SIGN_EXTEND
3962 & (((unsigned HOST_WIDE_INT
) 1
3963 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))) - 1))))
3965 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x
))) != ZERO_EXTEND
)
3966 || !MEM_P (SUBREG_REG (x
)))
3969 /* On many CISC machines, accessing an object in a wider mode
3970 causes the high-order bits to become undefined. So they are
3971 not known to be zero. */
3972 if (GET_MODE_SIZE (GET_MODE (x
))
3973 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
3974 nonzero
|= (GET_MODE_MASK (GET_MODE (x
))
3975 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x
))));
3984 /* The nonzero bits are in two classes: any bits within MODE
3985 that aren't in GET_MODE (x) are always significant. The rest of the
3986 nonzero bits are those that are significant in the operand of
3987 the shift when shifted the appropriate number of bits. This
3988 shows that high-order bits are cleared by the right shift and
3989 low-order bits by left shifts. */
3990 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
3991 && INTVAL (XEXP (x
, 1)) >= 0
3992 && INTVAL (XEXP (x
, 1)) < HOST_BITS_PER_WIDE_INT
)
3994 enum machine_mode inner_mode
= GET_MODE (x
);
3995 unsigned int width
= GET_MODE_BITSIZE (inner_mode
);
3996 int count
= INTVAL (XEXP (x
, 1));
3997 unsigned HOST_WIDE_INT mode_mask
= GET_MODE_MASK (inner_mode
);
3998 unsigned HOST_WIDE_INT op_nonzero
=
3999 cached_nonzero_bits (XEXP (x
, 0), mode
,
4000 known_x
, known_mode
, known_ret
);
4001 unsigned HOST_WIDE_INT inner
= op_nonzero
& mode_mask
;
4002 unsigned HOST_WIDE_INT outer
= 0;
4004 if (mode_width
> width
)
4005 outer
= (op_nonzero
& nonzero
& ~mode_mask
);
4007 if (code
== LSHIFTRT
)
4009 else if (code
== ASHIFTRT
)
4013 /* If the sign bit may have been nonzero before the shift, we
4014 need to mark all the places it could have been copied to
4015 by the shift as possibly nonzero. */
4016 if (inner
& ((HOST_WIDE_INT
) 1 << (width
- 1 - count
)))
4017 inner
|= (((HOST_WIDE_INT
) 1 << count
) - 1) << (width
- count
);
4019 else if (code
== ASHIFT
)
4022 inner
= ((inner
<< (count
% width
)
4023 | (inner
>> (width
- (count
% width
)))) & mode_mask
);
4025 nonzero
&= (outer
| inner
);
4031 /* This is at most the number of bits in the mode. */
4032 nonzero
= ((HOST_WIDE_INT
) 2 << (floor_log2 (mode_width
))) - 1;
4036 /* If CLZ has a known value at zero, then the nonzero bits are
4037 that value, plus the number of bits in the mode minus one. */
4038 if (CLZ_DEFINED_VALUE_AT_ZERO (mode
, nonzero
))
4039 nonzero
|= ((HOST_WIDE_INT
) 1 << (floor_log2 (mode_width
))) - 1;
4045 /* If CTZ has a known value at zero, then the nonzero bits are
4046 that value, plus the number of bits in the mode minus one. */
4047 if (CTZ_DEFINED_VALUE_AT_ZERO (mode
, nonzero
))
4048 nonzero
|= ((HOST_WIDE_INT
) 1 << (floor_log2 (mode_width
))) - 1;
4059 unsigned HOST_WIDE_INT nonzero_true
=
4060 cached_nonzero_bits (XEXP (x
, 1), mode
,
4061 known_x
, known_mode
, known_ret
);
4063 /* Don't call nonzero_bits for the second time if it cannot change
4065 if ((nonzero
& nonzero_true
) != nonzero
)
4066 nonzero
&= nonzero_true
4067 | cached_nonzero_bits (XEXP (x
, 2), mode
,
4068 known_x
, known_mode
, known_ret
);
4079 /* See the macro definition above. */
4080 #undef cached_num_sign_bit_copies
4083 /* The function cached_num_sign_bit_copies is a wrapper around
4084 num_sign_bit_copies1. It avoids exponential behavior in
4085 num_sign_bit_copies1 when X has identical subexpressions on the
4086 first or the second level. */
4089 cached_num_sign_bit_copies (rtx x
, enum machine_mode mode
, rtx known_x
,
4090 enum machine_mode known_mode
,
4091 unsigned int known_ret
)
4093 if (x
== known_x
&& mode
== known_mode
)
4096 /* Try to find identical subexpressions. If found call
4097 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4098 the precomputed value for the subexpression as KNOWN_RET. */
4100 if (ARITHMETIC_P (x
))
4102 rtx x0
= XEXP (x
, 0);
4103 rtx x1
= XEXP (x
, 1);
4105 /* Check the first level. */
4108 num_sign_bit_copies1 (x
, mode
, x0
, mode
,
4109 cached_num_sign_bit_copies (x0
, mode
, known_x
,
4113 /* Check the second level. */
4114 if (ARITHMETIC_P (x0
)
4115 && (x1
== XEXP (x0
, 0) || x1
== XEXP (x0
, 1)))
4117 num_sign_bit_copies1 (x
, mode
, x1
, mode
,
4118 cached_num_sign_bit_copies (x1
, mode
, known_x
,
4122 if (ARITHMETIC_P (x1
)
4123 && (x0
== XEXP (x1
, 0) || x0
== XEXP (x1
, 1)))
4125 num_sign_bit_copies1 (x
, mode
, x0
, mode
,
4126 cached_num_sign_bit_copies (x0
, mode
, known_x
,
4131 return num_sign_bit_copies1 (x
, mode
, known_x
, known_mode
, known_ret
);
4134 /* Return the number of bits at the high-order end of X that are known to
4135 be equal to the sign bit. X will be used in mode MODE; if MODE is
4136 VOIDmode, X will be used in its own mode. The returned value will always
4137 be between 1 and the number of bits in MODE. */
4140 num_sign_bit_copies1 (rtx x
, enum machine_mode mode
, rtx known_x
,
4141 enum machine_mode known_mode
,
4142 unsigned int known_ret
)
4144 enum rtx_code code
= GET_CODE (x
);
4145 unsigned int bitwidth
= GET_MODE_BITSIZE (mode
);
4146 int num0
, num1
, result
;
4147 unsigned HOST_WIDE_INT nonzero
;
4149 /* If we weren't given a mode, use the mode of X. If the mode is still
4150 VOIDmode, we don't know anything. Likewise if one of the modes is
4153 if (mode
== VOIDmode
)
4154 mode
= GET_MODE (x
);
4156 if (mode
== VOIDmode
|| FLOAT_MODE_P (mode
) || FLOAT_MODE_P (GET_MODE (x
)))
4159 /* For a smaller object, just ignore the high bits. */
4160 if (bitwidth
< GET_MODE_BITSIZE (GET_MODE (x
)))
4162 num0
= cached_num_sign_bit_copies (x
, GET_MODE (x
),
4163 known_x
, known_mode
, known_ret
);
4165 num0
- (int) (GET_MODE_BITSIZE (GET_MODE (x
)) - bitwidth
));
4168 if (GET_MODE (x
) != VOIDmode
&& bitwidth
> GET_MODE_BITSIZE (GET_MODE (x
)))
4170 #ifndef WORD_REGISTER_OPERATIONS
4171 /* If this machine does not do all register operations on the entire
4172 register and MODE is wider than the mode of X, we can say nothing
4173 at all about the high-order bits. */
4176 /* Likewise on machines that do, if the mode of the object is smaller
4177 than a word and loads of that size don't sign extend, we can say
4178 nothing about the high order bits. */
4179 if (GET_MODE_BITSIZE (GET_MODE (x
)) < BITS_PER_WORD
4180 #ifdef LOAD_EXTEND_OP
4181 && LOAD_EXTEND_OP (GET_MODE (x
)) != SIGN_EXTEND
4192 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4193 /* If pointers extend signed and this is a pointer in Pmode, say that
4194 all the bits above ptr_mode are known to be sign bit copies. */
4195 if (! POINTERS_EXTEND_UNSIGNED
&& GET_MODE (x
) == Pmode
&& mode
== Pmode
4197 return GET_MODE_BITSIZE (Pmode
) - GET_MODE_BITSIZE (ptr_mode
) + 1;
4201 unsigned int copies_for_hook
= 1, copies
= 1;
4202 rtx
new = rtl_hooks
.reg_num_sign_bit_copies (x
, mode
, known_x
,
4203 known_mode
, known_ret
,
4207 copies
= cached_num_sign_bit_copies (new, mode
, known_x
,
4208 known_mode
, known_ret
);
4210 if (copies
> 1 || copies_for_hook
> 1)
4211 return MAX (copies
, copies_for_hook
);
4213 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4218 #ifdef LOAD_EXTEND_OP
4219 /* Some RISC machines sign-extend all loads of smaller than a word. */
4220 if (LOAD_EXTEND_OP (GET_MODE (x
)) == SIGN_EXTEND
)
4221 return MAX (1, ((int) bitwidth
4222 - (int) GET_MODE_BITSIZE (GET_MODE (x
)) + 1));
4227 /* If the constant is negative, take its 1's complement and remask.
4228 Then see how many zero bits we have. */
4229 nonzero
= INTVAL (x
) & GET_MODE_MASK (mode
);
4230 if (bitwidth
<= HOST_BITS_PER_WIDE_INT
4231 && (nonzero
& ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4232 nonzero
= (~nonzero
) & GET_MODE_MASK (mode
);
4234 return (nonzero
== 0 ? bitwidth
: bitwidth
- floor_log2 (nonzero
) - 1);
4237 /* If this is a SUBREG for a promoted object that is sign-extended
4238 and we are looking at it in a wider mode, we know that at least the
4239 high-order bits are known to be sign bit copies. */
4241 if (SUBREG_PROMOTED_VAR_P (x
) && ! SUBREG_PROMOTED_UNSIGNED_P (x
))
4243 num0
= cached_num_sign_bit_copies (SUBREG_REG (x
), mode
,
4244 known_x
, known_mode
, known_ret
);
4245 return MAX ((int) bitwidth
4246 - (int) GET_MODE_BITSIZE (GET_MODE (x
)) + 1,
4250 /* For a smaller object, just ignore the high bits. */
4251 if (bitwidth
<= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
))))
4253 num0
= cached_num_sign_bit_copies (SUBREG_REG (x
), VOIDmode
,
4254 known_x
, known_mode
, known_ret
);
4255 return MAX (1, (num0
4256 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x
)))
4260 #ifdef WORD_REGISTER_OPERATIONS
4261 #ifdef LOAD_EXTEND_OP
4262 /* For paradoxical SUBREGs on machines where all register operations
4263 affect the entire register, just look inside. Note that we are
4264 passing MODE to the recursive call, so the number of sign bit copies
4265 will remain relative to that mode, not the inner mode. */
4267 /* This works only if loads sign extend. Otherwise, if we get a
4268 reload for the inner part, it may be loaded from the stack, and
4269 then we lose all sign bit copies that existed before the store
4272 if ((GET_MODE_SIZE (GET_MODE (x
))
4273 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x
))))
4274 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x
))) == SIGN_EXTEND
4275 && MEM_P (SUBREG_REG (x
)))
4276 return cached_num_sign_bit_copies (SUBREG_REG (x
), mode
,
4277 known_x
, known_mode
, known_ret
);
4283 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
)
4284 return MAX (1, (int) bitwidth
- INTVAL (XEXP (x
, 1)));
4288 return (bitwidth
- GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
4289 + cached_num_sign_bit_copies (XEXP (x
, 0), VOIDmode
,
4290 known_x
, known_mode
, known_ret
));
4293 /* For a smaller object, just ignore the high bits. */
4294 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), VOIDmode
,
4295 known_x
, known_mode
, known_ret
);
4296 return MAX (1, (num0
- (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x
, 0)))
4300 return cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4301 known_x
, known_mode
, known_ret
);
4303 case ROTATE
: case ROTATERT
:
4304 /* If we are rotating left by a number of bits less than the number
4305 of sign bit copies, we can just subtract that amount from the
4307 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4308 && INTVAL (XEXP (x
, 1)) >= 0
4309 && INTVAL (XEXP (x
, 1)) < (int) bitwidth
)
4311 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4312 known_x
, known_mode
, known_ret
);
4313 return MAX (1, num0
- (code
== ROTATE
? INTVAL (XEXP (x
, 1))
4314 : (int) bitwidth
- INTVAL (XEXP (x
, 1))));
4319 /* In general, this subtracts one sign bit copy. But if the value
4320 is known to be positive, the number of sign bit copies is the
4321 same as that of the input. Finally, if the input has just one bit
4322 that might be nonzero, all the bits are copies of the sign bit. */
4323 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4324 known_x
, known_mode
, known_ret
);
4325 if (bitwidth
> HOST_BITS_PER_WIDE_INT
)
4326 return num0
> 1 ? num0
- 1 : 1;
4328 nonzero
= nonzero_bits (XEXP (x
, 0), mode
);
4333 && (((HOST_WIDE_INT
) 1 << (bitwidth
- 1)) & nonzero
))
4338 case IOR
: case AND
: case XOR
:
4339 case SMIN
: case SMAX
: case UMIN
: case UMAX
:
4340 /* Logical operations will preserve the number of sign-bit copies.
4341 MIN and MAX operations always return one of the operands. */
4342 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4343 known_x
, known_mode
, known_ret
);
4344 num1
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4345 known_x
, known_mode
, known_ret
);
4346 return MIN (num0
, num1
);
4348 case PLUS
: case MINUS
:
4349 /* For addition and subtraction, we can have a 1-bit carry. However,
4350 if we are subtracting 1 from a positive number, there will not
4351 be such a carry. Furthermore, if the positive number is known to
4352 be 0 or 1, we know the result is either -1 or 0. */
4354 if (code
== PLUS
&& XEXP (x
, 1) == constm1_rtx
4355 && bitwidth
<= HOST_BITS_PER_WIDE_INT
)
4357 nonzero
= nonzero_bits (XEXP (x
, 0), mode
);
4358 if ((((HOST_WIDE_INT
) 1 << (bitwidth
- 1)) & nonzero
) == 0)
4359 return (nonzero
== 1 || nonzero
== 0 ? bitwidth
4360 : bitwidth
- floor_log2 (nonzero
) - 1);
4363 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4364 known_x
, known_mode
, known_ret
);
4365 num1
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4366 known_x
, known_mode
, known_ret
);
4367 result
= MAX (1, MIN (num0
, num1
) - 1);
4369 #ifdef POINTERS_EXTEND_UNSIGNED
4370 /* If pointers extend signed and this is an addition or subtraction
4371 to a pointer in Pmode, all the bits above ptr_mode are known to be
4373 if (! POINTERS_EXTEND_UNSIGNED
&& GET_MODE (x
) == Pmode
4374 && (code
== PLUS
|| code
== MINUS
)
4375 && REG_P (XEXP (x
, 0)) && REG_POINTER (XEXP (x
, 0)))
4376 result
= MAX ((int) (GET_MODE_BITSIZE (Pmode
)
4377 - GET_MODE_BITSIZE (ptr_mode
) + 1),
4383 /* The number of bits of the product is the sum of the number of
4384 bits of both terms. However, unless one of the terms if known
4385 to be positive, we must allow for an additional bit since negating
4386 a negative number can remove one sign bit copy. */
4388 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4389 known_x
, known_mode
, known_ret
);
4390 num1
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4391 known_x
, known_mode
, known_ret
);
4393 result
= bitwidth
- (bitwidth
- num0
) - (bitwidth
- num1
);
4395 && (bitwidth
> HOST_BITS_PER_WIDE_INT
4396 || (((nonzero_bits (XEXP (x
, 0), mode
)
4397 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4398 && ((nonzero_bits (XEXP (x
, 1), mode
)
4399 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0))))
4402 return MAX (1, result
);
4405 /* The result must be <= the first operand. If the first operand
4406 has the high bit set, we know nothing about the number of sign
4408 if (bitwidth
> HOST_BITS_PER_WIDE_INT
)
4410 else if ((nonzero_bits (XEXP (x
, 0), mode
)
4411 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4414 return cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4415 known_x
, known_mode
, known_ret
);
4418 /* The result must be <= the second operand. */
4419 return cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4420 known_x
, known_mode
, known_ret
);
4423 /* Similar to unsigned division, except that we have to worry about
4424 the case where the divisor is negative, in which case we have
4426 result
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4427 known_x
, known_mode
, known_ret
);
4429 && (bitwidth
> HOST_BITS_PER_WIDE_INT
4430 || (nonzero_bits (XEXP (x
, 1), mode
)
4431 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0))
4437 result
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4438 known_x
, known_mode
, known_ret
);
4440 && (bitwidth
> HOST_BITS_PER_WIDE_INT
4441 || (nonzero_bits (XEXP (x
, 1), mode
)
4442 & ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0))
4448 /* Shifts by a constant add to the number of bits equal to the
4450 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4451 known_x
, known_mode
, known_ret
);
4452 if (GET_CODE (XEXP (x
, 1)) == CONST_INT
4453 && INTVAL (XEXP (x
, 1)) > 0)
4454 num0
= MIN ((int) bitwidth
, num0
+ INTVAL (XEXP (x
, 1)));
4459 /* Left shifts destroy copies. */
4460 if (GET_CODE (XEXP (x
, 1)) != CONST_INT
4461 || INTVAL (XEXP (x
, 1)) < 0
4462 || INTVAL (XEXP (x
, 1)) >= (int) bitwidth
)
4465 num0
= cached_num_sign_bit_copies (XEXP (x
, 0), mode
,
4466 known_x
, known_mode
, known_ret
);
4467 return MAX (1, num0
- INTVAL (XEXP (x
, 1)));
4470 num0
= cached_num_sign_bit_copies (XEXP (x
, 1), mode
,
4471 known_x
, known_mode
, known_ret
);
4472 num1
= cached_num_sign_bit_copies (XEXP (x
, 2), mode
,
4473 known_x
, known_mode
, known_ret
);
4474 return MIN (num0
, num1
);
4476 case EQ
: case NE
: case GE
: case GT
: case LE
: case LT
:
4477 case UNEQ
: case LTGT
: case UNGE
: case UNGT
: case UNLE
: case UNLT
:
4478 case GEU
: case GTU
: case LEU
: case LTU
:
4479 case UNORDERED
: case ORDERED
:
4480 /* If the constant is negative, take its 1's complement and remask.
4481 Then see how many zero bits we have. */
4482 nonzero
= STORE_FLAG_VALUE
;
4483 if (bitwidth
<= HOST_BITS_PER_WIDE_INT
4484 && (nonzero
& ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))) != 0)
4485 nonzero
= (~nonzero
) & GET_MODE_MASK (mode
);
4487 return (nonzero
== 0 ? bitwidth
: bitwidth
- floor_log2 (nonzero
) - 1);
4493 /* If we haven't been able to figure it out by one of the above rules,
4494 see if some of the high-order bits are known to be zero. If so,
4495 count those bits and return one less than that amount. If we can't
4496 safely compute the mask for this mode, always return BITWIDTH. */
4498 bitwidth
= GET_MODE_BITSIZE (mode
);
4499 if (bitwidth
> HOST_BITS_PER_WIDE_INT
)
4502 nonzero
= nonzero_bits (x
, mode
);
4503 return nonzero
& ((HOST_WIDE_INT
) 1 << (bitwidth
- 1))
4504 ? 1 : bitwidth
- floor_log2 (nonzero
) - 1;
4507 /* Calculate the rtx_cost of a single instruction. A return value of
4508 zero indicates an instruction pattern without a known cost. */
4511 insn_rtx_cost (rtx pat
)
4516 /* Extract the single set rtx from the instruction pattern.
4517 We can't use single_set since we only have the pattern. */
4518 if (GET_CODE (pat
) == SET
)
4520 else if (GET_CODE (pat
) == PARALLEL
)
4523 for (i
= 0; i
< XVECLEN (pat
, 0); i
++)
4525 rtx x
= XVECEXP (pat
, 0, i
);
4526 if (GET_CODE (x
) == SET
)
4539 cost
= rtx_cost (SET_SRC (set
), SET
);
4540 return cost
> 0 ? cost
: COSTS_N_INSNS (1);
4543 /* Given an insn INSN and condition COND, return the condition in a
4544 canonical form to simplify testing by callers. Specifically:
4546 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4547 (2) Both operands will be machine operands; (cc0) will have been replaced.
4548 (3) If an operand is a constant, it will be the second operand.
4549 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4550 for GE, GEU, and LEU.
4552 If the condition cannot be understood, or is an inequality floating-point
4553 comparison which needs to be reversed, 0 will be returned.
4555 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4557 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4558 insn used in locating the condition was found. If a replacement test
4559 of the condition is desired, it should be placed in front of that
4560 insn and we will be sure that the inputs are still valid.
4562 If WANT_REG is nonzero, we wish the condition to be relative to that
4563 register, if possible. Therefore, do not canonicalize the condition
4564 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4565 to be a compare to a CC mode register.
4567 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4571 canonicalize_condition (rtx insn
, rtx cond
, int reverse
, rtx
*earliest
,
4572 rtx want_reg
, int allow_cc_mode
, int valid_at_insn_p
)
4579 int reverse_code
= 0;
4580 enum machine_mode mode
;
4582 code
= GET_CODE (cond
);
4583 mode
= GET_MODE (cond
);
4584 op0
= XEXP (cond
, 0);
4585 op1
= XEXP (cond
, 1);
4588 code
= reversed_comparison_code (cond
, insn
);
4589 if (code
== UNKNOWN
)
4595 /* If we are comparing a register with zero, see if the register is set
4596 in the previous insn to a COMPARE or a comparison operation. Perform
4597 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4600 while ((GET_RTX_CLASS (code
) == RTX_COMPARE
4601 || GET_RTX_CLASS (code
) == RTX_COMM_COMPARE
)
4602 && op1
== CONST0_RTX (GET_MODE (op0
))
4605 /* Set nonzero when we find something of interest. */
4609 /* If comparison with cc0, import actual comparison from compare
4613 if ((prev
= prev_nonnote_insn (prev
)) == 0
4614 || !NONJUMP_INSN_P (prev
)
4615 || (set
= single_set (prev
)) == 0
4616 || SET_DEST (set
) != cc0_rtx
)
4619 op0
= SET_SRC (set
);
4620 op1
= CONST0_RTX (GET_MODE (op0
));
4626 /* If this is a COMPARE, pick up the two things being compared. */
4627 if (GET_CODE (op0
) == COMPARE
)
4629 op1
= XEXP (op0
, 1);
4630 op0
= XEXP (op0
, 0);
4633 else if (!REG_P (op0
))
4636 /* Go back to the previous insn. Stop if it is not an INSN. We also
4637 stop if it isn't a single set or if it has a REG_INC note because
4638 we don't want to bother dealing with it. */
4640 if ((prev
= prev_nonnote_insn (prev
)) == 0
4641 || !NONJUMP_INSN_P (prev
)
4642 || FIND_REG_INC_NOTE (prev
, NULL_RTX
))
4645 set
= set_of (op0
, prev
);
4648 && (GET_CODE (set
) != SET
4649 || !rtx_equal_p (SET_DEST (set
), op0
)))
4652 /* If this is setting OP0, get what it sets it to if it looks
4656 enum machine_mode inner_mode
= GET_MODE (SET_DEST (set
));
4657 #ifdef FLOAT_STORE_FLAG_VALUE
4658 REAL_VALUE_TYPE fsfv
;
4661 /* ??? We may not combine comparisons done in a CCmode with
4662 comparisons not done in a CCmode. This is to aid targets
4663 like Alpha that have an IEEE compliant EQ instruction, and
4664 a non-IEEE compliant BEQ instruction. The use of CCmode is
4665 actually artificial, simply to prevent the combination, but
4666 should not affect other platforms.
4668 However, we must allow VOIDmode comparisons to match either
4669 CCmode or non-CCmode comparison, because some ports have
4670 modeless comparisons inside branch patterns.
4672 ??? This mode check should perhaps look more like the mode check
4673 in simplify_comparison in combine. */
4675 if ((GET_CODE (SET_SRC (set
)) == COMPARE
4678 && GET_MODE_CLASS (inner_mode
) == MODE_INT
4679 && (GET_MODE_BITSIZE (inner_mode
)
4680 <= HOST_BITS_PER_WIDE_INT
)
4681 && (STORE_FLAG_VALUE
4682 & ((HOST_WIDE_INT
) 1
4683 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
4684 #ifdef FLOAT_STORE_FLAG_VALUE
4686 && GET_MODE_CLASS (inner_mode
) == MODE_FLOAT
4687 && (fsfv
= FLOAT_STORE_FLAG_VALUE (inner_mode
),
4688 REAL_VALUE_NEGATIVE (fsfv
)))
4691 && COMPARISON_P (SET_SRC (set
))))
4692 && (((GET_MODE_CLASS (mode
) == MODE_CC
)
4693 == (GET_MODE_CLASS (inner_mode
) == MODE_CC
))
4694 || mode
== VOIDmode
|| inner_mode
== VOIDmode
))
4696 else if (((code
== EQ
4698 && (GET_MODE_BITSIZE (inner_mode
)
4699 <= HOST_BITS_PER_WIDE_INT
)
4700 && GET_MODE_CLASS (inner_mode
) == MODE_INT
4701 && (STORE_FLAG_VALUE
4702 & ((HOST_WIDE_INT
) 1
4703 << (GET_MODE_BITSIZE (inner_mode
) - 1))))
4704 #ifdef FLOAT_STORE_FLAG_VALUE
4706 && GET_MODE_CLASS (inner_mode
) == MODE_FLOAT
4707 && (fsfv
= FLOAT_STORE_FLAG_VALUE (inner_mode
),
4708 REAL_VALUE_NEGATIVE (fsfv
)))
4711 && COMPARISON_P (SET_SRC (set
))
4712 && (((GET_MODE_CLASS (mode
) == MODE_CC
)
4713 == (GET_MODE_CLASS (inner_mode
) == MODE_CC
))
4714 || mode
== VOIDmode
|| inner_mode
== VOIDmode
))
4724 else if (reg_set_p (op0
, prev
))
4725 /* If this sets OP0, but not directly, we have to give up. */
4730 /* If the caller is expecting the condition to be valid at INSN,
4731 make sure X doesn't change before INSN. */
4732 if (valid_at_insn_p
)
4733 if (modified_in_p (x
, prev
) || modified_between_p (x
, prev
, insn
))
4735 if (COMPARISON_P (x
))
4736 code
= GET_CODE (x
);
4739 code
= reversed_comparison_code (x
, prev
);
4740 if (code
== UNKNOWN
)
4745 op0
= XEXP (x
, 0), op1
= XEXP (x
, 1);
4751 /* If constant is first, put it last. */
4752 if (CONSTANT_P (op0
))
4753 code
= swap_condition (code
), tem
= op0
, op0
= op1
, op1
= tem
;
4755 /* If OP0 is the result of a comparison, we weren't able to find what
4756 was really being compared, so fail. */
4758 && GET_MODE_CLASS (GET_MODE (op0
)) == MODE_CC
)
4761 /* Canonicalize any ordered comparison with integers involving equality
4762 if we can do computations in the relevant mode and we do not
4765 if (GET_MODE_CLASS (GET_MODE (op0
)) != MODE_CC
4766 && GET_CODE (op1
) == CONST_INT
4767 && GET_MODE (op0
) != VOIDmode
4768 && GET_MODE_BITSIZE (GET_MODE (op0
)) <= HOST_BITS_PER_WIDE_INT
)
4770 HOST_WIDE_INT const_val
= INTVAL (op1
);
4771 unsigned HOST_WIDE_INT uconst_val
= const_val
;
4772 unsigned HOST_WIDE_INT max_val
4773 = (unsigned HOST_WIDE_INT
) GET_MODE_MASK (GET_MODE (op0
));
4778 if ((unsigned HOST_WIDE_INT
) const_val
!= max_val
>> 1)
4779 code
= LT
, op1
= gen_int_mode (const_val
+ 1, GET_MODE (op0
));
4782 /* When cross-compiling, const_val might be sign-extended from
4783 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4785 if ((HOST_WIDE_INT
) (const_val
& max_val
)
4786 != (((HOST_WIDE_INT
) 1
4787 << (GET_MODE_BITSIZE (GET_MODE (op0
)) - 1))))
4788 code
= GT
, op1
= gen_int_mode (const_val
- 1, GET_MODE (op0
));
4792 if (uconst_val
< max_val
)
4793 code
= LTU
, op1
= gen_int_mode (uconst_val
+ 1, GET_MODE (op0
));
4797 if (uconst_val
!= 0)
4798 code
= GTU
, op1
= gen_int_mode (uconst_val
- 1, GET_MODE (op0
));
4806 /* Never return CC0; return zero instead. */
4810 return gen_rtx_fmt_ee (code
, VOIDmode
, op0
, op1
);
4813 /* Given a jump insn JUMP, return the condition that will cause it to branch
4814 to its JUMP_LABEL. If the condition cannot be understood, or is an
4815 inequality floating-point comparison which needs to be reversed, 0 will
4818 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4819 insn used in locating the condition was found. If a replacement test
4820 of the condition is desired, it should be placed in front of that
4821 insn and we will be sure that the inputs are still valid. If EARLIEST
4822 is null, the returned condition will be valid at INSN.
4824 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4825 compare CC mode register.
4827 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4830 get_condition (rtx jump
, rtx
*earliest
, int allow_cc_mode
, int valid_at_insn_p
)
4836 /* If this is not a standard conditional jump, we can't parse it. */
4838 || ! any_condjump_p (jump
))
4840 set
= pc_set (jump
);
4842 cond
= XEXP (SET_SRC (set
), 0);
4844 /* If this branches to JUMP_LABEL when the condition is false, reverse
4847 = GET_CODE (XEXP (SET_SRC (set
), 2)) == LABEL_REF
4848 && XEXP (XEXP (SET_SRC (set
), 2), 0) == JUMP_LABEL (jump
);
4850 return canonicalize_condition (jump
, cond
, reverse
, earliest
, NULL_RTX
,
4851 allow_cc_mode
, valid_at_insn_p
);
4855 /* Initialize non_rtx_starting_operands, which is used to speed up
4861 for (i
= 0; i
< NUM_RTX_CODE
; i
++)
4863 const char *format
= GET_RTX_FORMAT (i
);
4864 const char *first
= strpbrk (format
, "eEV");
4865 non_rtx_starting_operands
[i
] = first
? first
- format
: -1;