2007-01-19 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / rtlanal.c
blob0644cdc5763f6909c249f1e5eb2e52580d7f5cd0
1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software
4 Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "toplev.h"
29 #include "rtl.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "target.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "regs.h"
39 #include "function.h"
41 /* Forward declarations */
42 static void set_of_1 (rtx, rtx, void *);
43 static bool covers_regno_p (rtx, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx, unsigned int);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
49 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50 rtx, enum machine_mode,
51 unsigned HOST_WIDE_INT);
52 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53 enum machine_mode,
54 unsigned HOST_WIDE_INT);
55 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56 enum machine_mode,
57 unsigned int);
58 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59 enum machine_mode, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands[NUM_RTX_CODE];
65 /* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
69 int target_flags;
71 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
72 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
73 SIGN_EXTEND then while narrowing we also have to enforce the
74 representation and sign-extend the value to mode DESTINATION_REP.
76 If the value is already sign-extended to DESTINATION_REP mode we
77 can just switch to DESTINATION mode on it. For each pair of
78 integral modes SOURCE and DESTINATION, when truncating from SOURCE
79 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
80 contains the number of high-order bits in SOURCE that have to be
81 copies of the sign-bit so that we can do this mode-switch to
82 DESTINATION. */
84 static unsigned int
85 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
87 /* Return 1 if the value of X is unstable
88 (would be different at a different point in the program).
89 The frame pointer, arg pointer, etc. are considered stable
90 (within one function) and so is anything marked `unchanging'. */
92 int
93 rtx_unstable_p (rtx x)
95 RTX_CODE code = GET_CODE (x);
96 int i;
97 const char *fmt;
99 switch (code)
101 case MEM:
102 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
104 case CONST:
105 case CONST_INT:
106 case CONST_DOUBLE:
107 case CONST_VECTOR:
108 case SYMBOL_REF:
109 case LABEL_REF:
110 return 0;
112 case REG:
113 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
114 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
115 /* The arg pointer varies if it is not a fixed register. */
116 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
117 return 0;
118 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
119 /* ??? When call-clobbered, the value is stable modulo the restore
120 that must happen after a call. This currently screws up local-alloc
121 into believing that the restore is not needed. */
122 if (x == pic_offset_table_rtx)
123 return 0;
124 #endif
125 return 1;
127 case ASM_OPERANDS:
128 if (MEM_VOLATILE_P (x))
129 return 1;
131 /* Fall through. */
133 default:
134 break;
137 fmt = GET_RTX_FORMAT (code);
138 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
139 if (fmt[i] == 'e')
141 if (rtx_unstable_p (XEXP (x, i)))
142 return 1;
144 else if (fmt[i] == 'E')
146 int j;
147 for (j = 0; j < XVECLEN (x, i); j++)
148 if (rtx_unstable_p (XVECEXP (x, i, j)))
149 return 1;
152 return 0;
155 /* Return 1 if X has a value that can vary even between two
156 executions of the program. 0 means X can be compared reliably
157 against certain constants or near-constants.
158 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
159 zero, we are slightly more conservative.
160 The frame pointer and the arg pointer are considered constant. */
163 rtx_varies_p (rtx x, int for_alias)
165 RTX_CODE code;
166 int i;
167 const char *fmt;
169 if (!x)
170 return 0;
172 code = GET_CODE (x);
173 switch (code)
175 case MEM:
176 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
178 case CONST:
179 case CONST_INT:
180 case CONST_DOUBLE:
181 case CONST_VECTOR:
182 case SYMBOL_REF:
183 case LABEL_REF:
184 return 0;
186 case REG:
187 /* Note that we have to test for the actual rtx used for the frame
188 and arg pointers and not just the register number in case we have
189 eliminated the frame and/or arg pointer and are using it
190 for pseudos. */
191 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
192 /* The arg pointer varies if it is not a fixed register. */
193 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
194 return 0;
195 if (x == pic_offset_table_rtx
196 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
197 /* ??? When call-clobbered, the value is stable modulo the restore
198 that must happen after a call. This currently screws up
199 local-alloc into believing that the restore is not needed, so we
200 must return 0 only if we are called from alias analysis. */
201 && for_alias
202 #endif
204 return 0;
205 return 1;
207 case LO_SUM:
208 /* The operand 0 of a LO_SUM is considered constant
209 (in fact it is related specifically to operand 1)
210 during alias analysis. */
211 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
212 || rtx_varies_p (XEXP (x, 1), for_alias);
214 case ASM_OPERANDS:
215 if (MEM_VOLATILE_P (x))
216 return 1;
218 /* Fall through. */
220 default:
221 break;
224 fmt = GET_RTX_FORMAT (code);
225 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
226 if (fmt[i] == 'e')
228 if (rtx_varies_p (XEXP (x, i), for_alias))
229 return 1;
231 else if (fmt[i] == 'E')
233 int j;
234 for (j = 0; j < XVECLEN (x, i); j++)
235 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
236 return 1;
239 return 0;
242 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
243 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
244 whether nonzero is returned for unaligned memory accesses on strict
245 alignment machines. */
247 static int
248 rtx_addr_can_trap_p_1 (rtx x, enum machine_mode mode, bool unaligned_mems)
250 enum rtx_code code = GET_CODE (x);
252 switch (code)
254 case SYMBOL_REF:
255 return SYMBOL_REF_WEAK (x);
257 case LABEL_REF:
258 return 0;
260 case REG:
261 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
262 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
263 || x == stack_pointer_rtx
264 /* The arg pointer varies if it is not a fixed register. */
265 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
266 return 0;
267 /* All of the virtual frame registers are stack references. */
268 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
269 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
270 return 0;
271 return 1;
273 case CONST:
274 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
276 case PLUS:
277 /* An address is assumed not to trap if:
278 - it is an address that can't trap plus a constant integer,
279 with the proper remainder modulo the mode size if we are
280 considering unaligned memory references. */
281 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
282 && GET_CODE (XEXP (x, 1)) == CONST_INT)
284 HOST_WIDE_INT offset;
286 if (!STRICT_ALIGNMENT
287 || !unaligned_mems
288 || GET_MODE_SIZE (mode) == 0)
289 return 0;
291 offset = INTVAL (XEXP (x, 1));
293 #ifdef SPARC_STACK_BOUNDARY_HACK
294 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
295 the real alignment of %sp. However, when it does this, the
296 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
297 if (SPARC_STACK_BOUNDARY_HACK
298 && (XEXP (x, 0) == stack_pointer_rtx
299 || XEXP (x, 0) == hard_frame_pointer_rtx))
300 offset -= STACK_POINTER_OFFSET;
301 #endif
303 return offset % GET_MODE_SIZE (mode) != 0;
306 /* - or it is the pic register plus a constant. */
307 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
308 return 0;
310 return 1;
312 case LO_SUM:
313 case PRE_MODIFY:
314 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
316 case PRE_DEC:
317 case PRE_INC:
318 case POST_DEC:
319 case POST_INC:
320 case POST_MODIFY:
321 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
323 default:
324 break;
327 /* If it isn't one of the case above, it can cause a trap. */
328 return 1;
331 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
334 rtx_addr_can_trap_p (rtx x)
336 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
339 /* Return true if X is an address that is known to not be zero. */
341 bool
342 nonzero_address_p (rtx x)
344 enum rtx_code code = GET_CODE (x);
346 switch (code)
348 case SYMBOL_REF:
349 return !SYMBOL_REF_WEAK (x);
351 case LABEL_REF:
352 return true;
354 case REG:
355 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
356 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
357 || x == stack_pointer_rtx
358 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
359 return true;
360 /* All of the virtual frame registers are stack references. */
361 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
362 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
363 return true;
364 return false;
366 case CONST:
367 return nonzero_address_p (XEXP (x, 0));
369 case PLUS:
370 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
371 return nonzero_address_p (XEXP (x, 0));
372 /* Handle PIC references. */
373 else if (XEXP (x, 0) == pic_offset_table_rtx
374 && CONSTANT_P (XEXP (x, 1)))
375 return true;
376 return false;
378 case PRE_MODIFY:
379 /* Similar to the above; allow positive offsets. Further, since
380 auto-inc is only allowed in memories, the register must be a
381 pointer. */
382 if (GET_CODE (XEXP (x, 1)) == CONST_INT
383 && INTVAL (XEXP (x, 1)) > 0)
384 return true;
385 return nonzero_address_p (XEXP (x, 0));
387 case PRE_INC:
388 /* Similarly. Further, the offset is always positive. */
389 return true;
391 case PRE_DEC:
392 case POST_DEC:
393 case POST_INC:
394 case POST_MODIFY:
395 return nonzero_address_p (XEXP (x, 0));
397 case LO_SUM:
398 return nonzero_address_p (XEXP (x, 1));
400 default:
401 break;
404 /* If it isn't one of the case above, might be zero. */
405 return false;
408 /* Return 1 if X refers to a memory location whose address
409 cannot be compared reliably with constant addresses,
410 or if X refers to a BLKmode memory object.
411 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
412 zero, we are slightly more conservative. */
415 rtx_addr_varies_p (rtx x, int for_alias)
417 enum rtx_code code;
418 int i;
419 const char *fmt;
421 if (x == 0)
422 return 0;
424 code = GET_CODE (x);
425 if (code == MEM)
426 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
428 fmt = GET_RTX_FORMAT (code);
429 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
430 if (fmt[i] == 'e')
432 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
433 return 1;
435 else if (fmt[i] == 'E')
437 int j;
438 for (j = 0; j < XVECLEN (x, i); j++)
439 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
440 return 1;
442 return 0;
445 /* Return the value of the integer term in X, if one is apparent;
446 otherwise return 0.
447 Only obvious integer terms are detected.
448 This is used in cse.c with the `related_value' field. */
450 HOST_WIDE_INT
451 get_integer_term (rtx x)
453 if (GET_CODE (x) == CONST)
454 x = XEXP (x, 0);
456 if (GET_CODE (x) == MINUS
457 && GET_CODE (XEXP (x, 1)) == CONST_INT)
458 return - INTVAL (XEXP (x, 1));
459 if (GET_CODE (x) == PLUS
460 && GET_CODE (XEXP (x, 1)) == CONST_INT)
461 return INTVAL (XEXP (x, 1));
462 return 0;
465 /* If X is a constant, return the value sans apparent integer term;
466 otherwise return 0.
467 Only obvious integer terms are detected. */
470 get_related_value (rtx x)
472 if (GET_CODE (x) != CONST)
473 return 0;
474 x = XEXP (x, 0);
475 if (GET_CODE (x) == PLUS
476 && GET_CODE (XEXP (x, 1)) == CONST_INT)
477 return XEXP (x, 0);
478 else if (GET_CODE (x) == MINUS
479 && GET_CODE (XEXP (x, 1)) == CONST_INT)
480 return XEXP (x, 0);
481 return 0;
484 /* Return the number of places FIND appears within X. If COUNT_DEST is
485 zero, we do not count occurrences inside the destination of a SET. */
488 count_occurrences (rtx x, rtx find, int count_dest)
490 int i, j;
491 enum rtx_code code;
492 const char *format_ptr;
493 int count;
495 if (x == find)
496 return 1;
498 code = GET_CODE (x);
500 switch (code)
502 case REG:
503 case CONST_INT:
504 case CONST_DOUBLE:
505 case CONST_VECTOR:
506 case SYMBOL_REF:
507 case CODE_LABEL:
508 case PC:
509 case CC0:
510 return 0;
512 case EXPR_LIST:
513 count = count_occurrences (XEXP (x, 0), find, count_dest);
514 if (XEXP (x, 1))
515 count += count_occurrences (XEXP (x, 1), find, count_dest);
516 return count;
518 case MEM:
519 if (MEM_P (find) && rtx_equal_p (x, find))
520 return 1;
521 break;
523 case SET:
524 if (SET_DEST (x) == find && ! count_dest)
525 return count_occurrences (SET_SRC (x), find, count_dest);
526 break;
528 default:
529 break;
532 format_ptr = GET_RTX_FORMAT (code);
533 count = 0;
535 for (i = 0; i < GET_RTX_LENGTH (code); i++)
537 switch (*format_ptr++)
539 case 'e':
540 count += count_occurrences (XEXP (x, i), find, count_dest);
541 break;
543 case 'E':
544 for (j = 0; j < XVECLEN (x, i); j++)
545 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
546 break;
549 return count;
552 /* Nonzero if register REG appears somewhere within IN.
553 Also works if REG is not a register; in this case it checks
554 for a subexpression of IN that is Lisp "equal" to REG. */
557 reg_mentioned_p (rtx reg, rtx in)
559 const char *fmt;
560 int i;
561 enum rtx_code code;
563 if (in == 0)
564 return 0;
566 if (reg == in)
567 return 1;
569 if (GET_CODE (in) == LABEL_REF)
570 return reg == XEXP (in, 0);
572 code = GET_CODE (in);
574 switch (code)
576 /* Compare registers by number. */
577 case REG:
578 return REG_P (reg) && REGNO (in) == REGNO (reg);
580 /* These codes have no constituent expressions
581 and are unique. */
582 case SCRATCH:
583 case CC0:
584 case PC:
585 return 0;
587 case CONST_INT:
588 case CONST_VECTOR:
589 case CONST_DOUBLE:
590 /* These are kept unique for a given value. */
591 return 0;
593 default:
594 break;
597 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
598 return 1;
600 fmt = GET_RTX_FORMAT (code);
602 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
604 if (fmt[i] == 'E')
606 int j;
607 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
608 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
609 return 1;
611 else if (fmt[i] == 'e'
612 && reg_mentioned_p (reg, XEXP (in, i)))
613 return 1;
615 return 0;
618 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
619 no CODE_LABEL insn. */
622 no_labels_between_p (rtx beg, rtx end)
624 rtx p;
625 if (beg == end)
626 return 0;
627 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
628 if (LABEL_P (p))
629 return 0;
630 return 1;
633 /* Nonzero if register REG is used in an insn between
634 FROM_INSN and TO_INSN (exclusive of those two). */
637 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
639 rtx insn;
641 if (from_insn == to_insn)
642 return 0;
644 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
645 if (INSN_P (insn)
646 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
647 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
648 return 1;
649 return 0;
652 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
653 is entirely replaced by a new value and the only use is as a SET_DEST,
654 we do not consider it a reference. */
657 reg_referenced_p (rtx x, rtx body)
659 int i;
661 switch (GET_CODE (body))
663 case SET:
664 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
665 return 1;
667 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
668 of a REG that occupies all of the REG, the insn references X if
669 it is mentioned in the destination. */
670 if (GET_CODE (SET_DEST (body)) != CC0
671 && GET_CODE (SET_DEST (body)) != PC
672 && !REG_P (SET_DEST (body))
673 && ! (GET_CODE (SET_DEST (body)) == SUBREG
674 && REG_P (SUBREG_REG (SET_DEST (body)))
675 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
676 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
677 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
678 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
679 && reg_overlap_mentioned_p (x, SET_DEST (body)))
680 return 1;
681 return 0;
683 case ASM_OPERANDS:
684 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
685 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
686 return 1;
687 return 0;
689 case CALL:
690 case USE:
691 case IF_THEN_ELSE:
692 return reg_overlap_mentioned_p (x, body);
694 case TRAP_IF:
695 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
697 case PREFETCH:
698 return reg_overlap_mentioned_p (x, XEXP (body, 0));
700 case UNSPEC:
701 case UNSPEC_VOLATILE:
702 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
703 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
704 return 1;
705 return 0;
707 case PARALLEL:
708 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
709 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
710 return 1;
711 return 0;
713 case CLOBBER:
714 if (MEM_P (XEXP (body, 0)))
715 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
716 return 1;
717 return 0;
719 case COND_EXEC:
720 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
721 return 1;
722 return reg_referenced_p (x, COND_EXEC_CODE (body));
724 default:
725 return 0;
729 /* Nonzero if register REG is set or clobbered in an insn between
730 FROM_INSN and TO_INSN (exclusive of those two). */
733 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
735 rtx insn;
737 if (from_insn == to_insn)
738 return 0;
740 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
741 if (INSN_P (insn) && reg_set_p (reg, insn))
742 return 1;
743 return 0;
746 /* Internals of reg_set_between_p. */
748 reg_set_p (rtx reg, rtx insn)
750 if (INSN_P (insn))
752 if (FIND_REG_INC_NOTE (insn, reg))
753 return 1;
754 if (CALL_P (insn))
756 if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
758 HARD_REG_SET clobbered_regs;
760 get_call_invalidated_used_regs (insn, &clobbered_regs, true);
761 if (TEST_HARD_REG_BIT (clobbered_regs, REGNO (reg)))
762 return 1;
764 if (MEM_P (reg) || find_reg_fusage (insn, CLOBBER, reg))
765 return 1;
769 return set_of (reg, insn) != NULL_RTX;
772 /* Similar to reg_set_between_p, but check all registers in X. Return 0
773 only if none of them are modified between START and END. Return 1 if
774 X contains a MEM; this routine does usememory aliasing. */
777 modified_between_p (rtx x, rtx start, rtx end)
779 enum rtx_code code = GET_CODE (x);
780 const char *fmt;
781 int i, j;
782 rtx insn;
784 if (start == end)
785 return 0;
787 switch (code)
789 case CONST_INT:
790 case CONST_DOUBLE:
791 case CONST_VECTOR:
792 case CONST:
793 case SYMBOL_REF:
794 case LABEL_REF:
795 return 0;
797 case PC:
798 case CC0:
799 return 1;
801 case MEM:
802 if (modified_between_p (XEXP (x, 0), start, end))
803 return 1;
804 if (MEM_READONLY_P (x))
805 return 0;
806 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
807 if (memory_modified_in_insn_p (x, insn))
808 return 1;
809 return 0;
810 break;
812 case REG:
813 return reg_set_between_p (x, start, end);
815 default:
816 break;
819 fmt = GET_RTX_FORMAT (code);
820 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
822 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
823 return 1;
825 else if (fmt[i] == 'E')
826 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
827 if (modified_between_p (XVECEXP (x, i, j), start, end))
828 return 1;
831 return 0;
834 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
835 of them are modified in INSN. Return 1 if X contains a MEM; this routine
836 does use memory aliasing. */
839 modified_in_p (rtx x, rtx insn)
841 enum rtx_code code = GET_CODE (x);
842 const char *fmt;
843 int i, j;
845 switch (code)
847 case CONST_INT:
848 case CONST_DOUBLE:
849 case CONST_VECTOR:
850 case CONST:
851 case SYMBOL_REF:
852 case LABEL_REF:
853 return 0;
855 case PC:
856 case CC0:
857 return 1;
859 case MEM:
860 if (modified_in_p (XEXP (x, 0), insn))
861 return 1;
862 if (MEM_READONLY_P (x))
863 return 0;
864 if (memory_modified_in_insn_p (x, insn))
865 return 1;
866 return 0;
867 break;
869 case REG:
870 return reg_set_p (x, insn);
872 default:
873 break;
876 fmt = GET_RTX_FORMAT (code);
877 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
879 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
880 return 1;
882 else if (fmt[i] == 'E')
883 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
884 if (modified_in_p (XVECEXP (x, i, j), insn))
885 return 1;
888 return 0;
891 /* Helper function for set_of. */
892 struct set_of_data
894 rtx found;
895 rtx pat;
898 static void
899 set_of_1 (rtx x, rtx pat, void *data1)
901 struct set_of_data *data = (struct set_of_data *) (data1);
902 if (rtx_equal_p (x, data->pat)
903 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
904 data->found = pat;
907 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
908 (either directly or via STRICT_LOW_PART and similar modifiers). */
910 set_of (rtx pat, rtx insn)
912 struct set_of_data data;
913 data.found = NULL_RTX;
914 data.pat = pat;
915 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
916 return data.found;
919 /* Given an INSN, return a SET expression if this insn has only a single SET.
920 It may also have CLOBBERs, USEs, or SET whose output
921 will not be used, which we ignore. */
924 single_set_2 (rtx insn, rtx pat)
926 rtx set = NULL;
927 int set_verified = 1;
928 int i;
930 if (GET_CODE (pat) == PARALLEL)
932 for (i = 0; i < XVECLEN (pat, 0); i++)
934 rtx sub = XVECEXP (pat, 0, i);
935 switch (GET_CODE (sub))
937 case USE:
938 case CLOBBER:
939 break;
941 case SET:
942 /* We can consider insns having multiple sets, where all
943 but one are dead as single set insns. In common case
944 only single set is present in the pattern so we want
945 to avoid checking for REG_UNUSED notes unless necessary.
947 When we reach set first time, we just expect this is
948 the single set we are looking for and only when more
949 sets are found in the insn, we check them. */
950 if (!set_verified)
952 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
953 && !side_effects_p (set))
954 set = NULL;
955 else
956 set_verified = 1;
958 if (!set)
959 set = sub, set_verified = 0;
960 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
961 || side_effects_p (sub))
962 return NULL_RTX;
963 break;
965 default:
966 return NULL_RTX;
970 return set;
973 /* Given an INSN, return nonzero if it has more than one SET, else return
974 zero. */
977 multiple_sets (rtx insn)
979 int found;
980 int i;
982 /* INSN must be an insn. */
983 if (! INSN_P (insn))
984 return 0;
986 /* Only a PARALLEL can have multiple SETs. */
987 if (GET_CODE (PATTERN (insn)) == PARALLEL)
989 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
990 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
992 /* If we have already found a SET, then return now. */
993 if (found)
994 return 1;
995 else
996 found = 1;
1000 /* Either zero or one SET. */
1001 return 0;
1004 /* Return nonzero if the destination of SET equals the source
1005 and there are no side effects. */
1008 set_noop_p (rtx set)
1010 rtx src = SET_SRC (set);
1011 rtx dst = SET_DEST (set);
1013 if (dst == pc_rtx && src == pc_rtx)
1014 return 1;
1016 if (MEM_P (dst) && MEM_P (src))
1017 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1019 if (GET_CODE (dst) == ZERO_EXTRACT)
1020 return rtx_equal_p (XEXP (dst, 0), src)
1021 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1022 && !side_effects_p (src);
1024 if (GET_CODE (dst) == STRICT_LOW_PART)
1025 dst = XEXP (dst, 0);
1027 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1029 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1030 return 0;
1031 src = SUBREG_REG (src);
1032 dst = SUBREG_REG (dst);
1035 return (REG_P (src) && REG_P (dst)
1036 && REGNO (src) == REGNO (dst));
1039 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1040 value to itself. */
1043 noop_move_p (rtx insn)
1045 rtx pat = PATTERN (insn);
1047 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1048 return 1;
1050 /* Insns carrying these notes are useful later on. */
1051 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1052 return 0;
1054 /* For now treat an insn with a REG_RETVAL note as a
1055 a special insn which should not be considered a no-op. */
1056 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1057 return 0;
1059 if (GET_CODE (pat) == SET && set_noop_p (pat))
1060 return 1;
1062 if (GET_CODE (pat) == PARALLEL)
1064 int i;
1065 /* If nothing but SETs of registers to themselves,
1066 this insn can also be deleted. */
1067 for (i = 0; i < XVECLEN (pat, 0); i++)
1069 rtx tem = XVECEXP (pat, 0, i);
1071 if (GET_CODE (tem) == USE
1072 || GET_CODE (tem) == CLOBBER)
1073 continue;
1075 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1076 return 0;
1079 return 1;
1081 return 0;
1085 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1086 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1087 If the object was modified, if we hit a partial assignment to X, or hit a
1088 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1089 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1090 be the src. */
1093 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1095 rtx p;
1097 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1098 p = PREV_INSN (p))
1099 if (INSN_P (p))
1101 rtx set = single_set (p);
1102 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1104 if (set && rtx_equal_p (x, SET_DEST (set)))
1106 rtx src = SET_SRC (set);
1108 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1109 src = XEXP (note, 0);
1111 if ((valid_to == NULL_RTX
1112 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1113 /* Reject hard registers because we don't usually want
1114 to use them; we'd rather use a pseudo. */
1115 && (! (REG_P (src)
1116 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1118 *pinsn = p;
1119 return src;
1123 /* If set in non-simple way, we don't have a value. */
1124 if (reg_set_p (x, p))
1125 break;
1128 return x;
1131 /* Return nonzero if register in range [REGNO, ENDREGNO)
1132 appears either explicitly or implicitly in X
1133 other than being stored into.
1135 References contained within the substructure at LOC do not count.
1136 LOC may be zero, meaning don't ignore anything. */
1139 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1140 rtx *loc)
1142 int i;
1143 unsigned int x_regno;
1144 RTX_CODE code;
1145 const char *fmt;
1147 repeat:
1148 /* The contents of a REG_NONNEG note is always zero, so we must come here
1149 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1150 if (x == 0)
1151 return 0;
1153 code = GET_CODE (x);
1155 switch (code)
1157 case REG:
1158 x_regno = REGNO (x);
1160 /* If we modifying the stack, frame, or argument pointer, it will
1161 clobber a virtual register. In fact, we could be more precise,
1162 but it isn't worth it. */
1163 if ((x_regno == STACK_POINTER_REGNUM
1164 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1165 || x_regno == ARG_POINTER_REGNUM
1166 #endif
1167 || x_regno == FRAME_POINTER_REGNUM)
1168 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1169 return 1;
1171 return (endregno > x_regno
1172 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1173 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1174 : 1));
1176 case SUBREG:
1177 /* If this is a SUBREG of a hard reg, we can see exactly which
1178 registers are being modified. Otherwise, handle normally. */
1179 if (REG_P (SUBREG_REG (x))
1180 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1182 unsigned int inner_regno = subreg_regno (x);
1183 unsigned int inner_endregno
1184 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1185 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1187 return endregno > inner_regno && regno < inner_endregno;
1189 break;
1191 case CLOBBER:
1192 case SET:
1193 if (&SET_DEST (x) != loc
1194 /* Note setting a SUBREG counts as referring to the REG it is in for
1195 a pseudo but not for hard registers since we can
1196 treat each word individually. */
1197 && ((GET_CODE (SET_DEST (x)) == SUBREG
1198 && loc != &SUBREG_REG (SET_DEST (x))
1199 && REG_P (SUBREG_REG (SET_DEST (x)))
1200 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1201 && refers_to_regno_p (regno, endregno,
1202 SUBREG_REG (SET_DEST (x)), loc))
1203 || (!REG_P (SET_DEST (x))
1204 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1205 return 1;
1207 if (code == CLOBBER || loc == &SET_SRC (x))
1208 return 0;
1209 x = SET_SRC (x);
1210 goto repeat;
1212 default:
1213 break;
1216 /* X does not match, so try its subexpressions. */
1218 fmt = GET_RTX_FORMAT (code);
1219 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1221 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1223 if (i == 0)
1225 x = XEXP (x, 0);
1226 goto repeat;
1228 else
1229 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1230 return 1;
1232 else if (fmt[i] == 'E')
1234 int j;
1235 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1236 if (loc != &XVECEXP (x, i, j)
1237 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1238 return 1;
1241 return 0;
1244 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1245 we check if any register number in X conflicts with the relevant register
1246 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1247 contains a MEM (we don't bother checking for memory addresses that can't
1248 conflict because we expect this to be a rare case. */
1251 reg_overlap_mentioned_p (rtx x, rtx in)
1253 unsigned int regno, endregno;
1255 /* If either argument is a constant, then modifying X can not
1256 affect IN. Here we look at IN, we can profitably combine
1257 CONSTANT_P (x) with the switch statement below. */
1258 if (CONSTANT_P (in))
1259 return 0;
1261 recurse:
1262 switch (GET_CODE (x))
1264 case STRICT_LOW_PART:
1265 case ZERO_EXTRACT:
1266 case SIGN_EXTRACT:
1267 /* Overly conservative. */
1268 x = XEXP (x, 0);
1269 goto recurse;
1271 case SUBREG:
1272 regno = REGNO (SUBREG_REG (x));
1273 if (regno < FIRST_PSEUDO_REGISTER)
1274 regno = subreg_regno (x);
1275 goto do_reg;
1277 case REG:
1278 regno = REGNO (x);
1279 do_reg:
1280 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1281 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1282 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1284 case MEM:
1286 const char *fmt;
1287 int i;
1289 if (MEM_P (in))
1290 return 1;
1292 fmt = GET_RTX_FORMAT (GET_CODE (in));
1293 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1294 if (fmt[i] == 'e')
1296 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1297 return 1;
1299 else if (fmt[i] == 'E')
1301 int j;
1302 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1303 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1304 return 1;
1307 return 0;
1310 case SCRATCH:
1311 case PC:
1312 case CC0:
1313 return reg_mentioned_p (x, in);
1315 case PARALLEL:
1317 int i;
1319 /* If any register in here refers to it we return true. */
1320 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1321 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1322 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1323 return 1;
1324 return 0;
1327 default:
1328 gcc_assert (CONSTANT_P (x));
1329 return 0;
1333 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1334 (X would be the pattern of an insn).
1335 FUN receives two arguments:
1336 the REG, MEM, CC0 or PC being stored in or clobbered,
1337 the SET or CLOBBER rtx that does the store.
1339 If the item being stored in or clobbered is a SUBREG of a hard register,
1340 the SUBREG will be passed. */
1342 void
1343 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1345 int i;
1347 if (GET_CODE (x) == COND_EXEC)
1348 x = COND_EXEC_CODE (x);
1350 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1352 rtx dest = SET_DEST (x);
1354 while ((GET_CODE (dest) == SUBREG
1355 && (!REG_P (SUBREG_REG (dest))
1356 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1357 || GET_CODE (dest) == ZERO_EXTRACT
1358 || GET_CODE (dest) == STRICT_LOW_PART)
1359 dest = XEXP (dest, 0);
1361 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1362 each of whose first operand is a register. */
1363 if (GET_CODE (dest) == PARALLEL)
1365 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1366 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1367 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1369 else
1370 (*fun) (dest, x, data);
1373 else if (GET_CODE (x) == PARALLEL)
1374 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1375 note_stores (XVECEXP (x, 0, i), fun, data);
1378 /* Like notes_stores, but call FUN for each expression that is being
1379 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1380 FUN for each expression, not any interior subexpressions. FUN receives a
1381 pointer to the expression and the DATA passed to this function.
1383 Note that this is not quite the same test as that done in reg_referenced_p
1384 since that considers something as being referenced if it is being
1385 partially set, while we do not. */
1387 void
1388 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1390 rtx body = *pbody;
1391 int i;
1393 switch (GET_CODE (body))
1395 case COND_EXEC:
1396 (*fun) (&COND_EXEC_TEST (body), data);
1397 note_uses (&COND_EXEC_CODE (body), fun, data);
1398 return;
1400 case PARALLEL:
1401 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1402 note_uses (&XVECEXP (body, 0, i), fun, data);
1403 return;
1405 case SEQUENCE:
1406 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1407 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1408 return;
1410 case USE:
1411 (*fun) (&XEXP (body, 0), data);
1412 return;
1414 case ASM_OPERANDS:
1415 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1416 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1417 return;
1419 case TRAP_IF:
1420 (*fun) (&TRAP_CONDITION (body), data);
1421 return;
1423 case PREFETCH:
1424 (*fun) (&XEXP (body, 0), data);
1425 return;
1427 case UNSPEC:
1428 case UNSPEC_VOLATILE:
1429 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1430 (*fun) (&XVECEXP (body, 0, i), data);
1431 return;
1433 case CLOBBER:
1434 if (MEM_P (XEXP (body, 0)))
1435 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1436 return;
1438 case SET:
1440 rtx dest = SET_DEST (body);
1442 /* For sets we replace everything in source plus registers in memory
1443 expression in store and operands of a ZERO_EXTRACT. */
1444 (*fun) (&SET_SRC (body), data);
1446 if (GET_CODE (dest) == ZERO_EXTRACT)
1448 (*fun) (&XEXP (dest, 1), data);
1449 (*fun) (&XEXP (dest, 2), data);
1452 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1453 dest = XEXP (dest, 0);
1455 if (MEM_P (dest))
1456 (*fun) (&XEXP (dest, 0), data);
1458 return;
1460 default:
1461 /* All the other possibilities never store. */
1462 (*fun) (pbody, data);
1463 return;
1467 /* Return nonzero if X's old contents don't survive after INSN.
1468 This will be true if X is (cc0) or if X is a register and
1469 X dies in INSN or because INSN entirely sets X.
1471 "Entirely set" means set directly and not through a SUBREG, or
1472 ZERO_EXTRACT, so no trace of the old contents remains.
1473 Likewise, REG_INC does not count.
1475 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1476 but for this use that makes no difference, since regs don't overlap
1477 during their lifetimes. Therefore, this function may be used
1478 at any time after deaths have been computed (in flow.c).
1480 If REG is a hard reg that occupies multiple machine registers, this
1481 function will only return 1 if each of those registers will be replaced
1482 by INSN. */
1485 dead_or_set_p (rtx insn, rtx x)
1487 unsigned int regno, last_regno;
1488 unsigned int i;
1490 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1491 if (GET_CODE (x) == CC0)
1492 return 1;
1494 gcc_assert (REG_P (x));
1496 regno = REGNO (x);
1497 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1498 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1500 for (i = regno; i <= last_regno; i++)
1501 if (! dead_or_set_regno_p (insn, i))
1502 return 0;
1504 return 1;
1507 /* Return TRUE iff DEST is a register or subreg of a register and
1508 doesn't change the number of words of the inner register, and any
1509 part of the register is TEST_REGNO. */
1511 static bool
1512 covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1514 unsigned int regno, endregno;
1516 if (GET_CODE (dest) == SUBREG
1517 && (((GET_MODE_SIZE (GET_MODE (dest))
1518 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1519 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1520 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1521 dest = SUBREG_REG (dest);
1523 if (!REG_P (dest))
1524 return false;
1526 regno = REGNO (dest);
1527 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1528 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1529 return (test_regno >= regno && test_regno < endregno);
1532 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1533 any member matches the covers_regno_no_parallel_p criteria. */
1535 static bool
1536 covers_regno_p (rtx dest, unsigned int test_regno)
1538 if (GET_CODE (dest) == PARALLEL)
1540 /* Some targets place small structures in registers for return
1541 values of functions, and those registers are wrapped in
1542 PARALLELs that we may see as the destination of a SET. */
1543 int i;
1545 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1547 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1548 if (inner != NULL_RTX
1549 && covers_regno_no_parallel_p (inner, test_regno))
1550 return true;
1553 return false;
1555 else
1556 return covers_regno_no_parallel_p (dest, test_regno);
1559 /* Utility function for dead_or_set_p to check an individual register. Also
1560 called from flow.c. */
1563 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1565 rtx pattern;
1567 /* See if there is a death note for something that includes TEST_REGNO. */
1568 if (find_regno_note (insn, REG_DEAD, test_regno))
1569 return 1;
1571 if (CALL_P (insn)
1572 && find_regno_fusage (insn, CLOBBER, test_regno))
1573 return 1;
1575 pattern = PATTERN (insn);
1577 if (GET_CODE (pattern) == COND_EXEC)
1578 pattern = COND_EXEC_CODE (pattern);
1580 if (GET_CODE (pattern) == SET)
1581 return covers_regno_p (SET_DEST (pattern), test_regno);
1582 else if (GET_CODE (pattern) == PARALLEL)
1584 int i;
1586 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1588 rtx body = XVECEXP (pattern, 0, i);
1590 if (GET_CODE (body) == COND_EXEC)
1591 body = COND_EXEC_CODE (body);
1593 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1594 && covers_regno_p (SET_DEST (body), test_regno))
1595 return 1;
1599 return 0;
1602 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1603 If DATUM is nonzero, look for one whose datum is DATUM. */
1606 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1608 rtx link;
1610 gcc_assert (insn);
1612 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1613 if (! INSN_P (insn))
1614 return 0;
1615 if (datum == 0)
1617 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1618 if (REG_NOTE_KIND (link) == kind)
1619 return link;
1620 return 0;
1623 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1624 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1625 return link;
1626 return 0;
1629 /* Return the reg-note of kind KIND in insn INSN which applies to register
1630 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1631 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1632 it might be the case that the note overlaps REGNO. */
1635 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1637 rtx link;
1639 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1640 if (! INSN_P (insn))
1641 return 0;
1643 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1644 if (REG_NOTE_KIND (link) == kind
1645 /* Verify that it is a register, so that scratch and MEM won't cause a
1646 problem here. */
1647 && REG_P (XEXP (link, 0))
1648 && REGNO (XEXP (link, 0)) <= regno
1649 && ((REGNO (XEXP (link, 0))
1650 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1651 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1652 [GET_MODE (XEXP (link, 0))]))
1653 > regno))
1654 return link;
1655 return 0;
1658 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1659 has such a note. */
1662 find_reg_equal_equiv_note (rtx insn)
1664 rtx link;
1666 if (!INSN_P (insn))
1667 return 0;
1668 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1669 if (REG_NOTE_KIND (link) == REG_EQUAL
1670 || REG_NOTE_KIND (link) == REG_EQUIV)
1672 if (single_set (insn) == 0)
1673 return 0;
1674 return link;
1676 return NULL;
1679 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1680 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1683 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1685 /* If it's not a CALL_INSN, it can't possibly have a
1686 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1687 if (!CALL_P (insn))
1688 return 0;
1690 gcc_assert (datum);
1692 if (!REG_P (datum))
1694 rtx link;
1696 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1697 link;
1698 link = XEXP (link, 1))
1699 if (GET_CODE (XEXP (link, 0)) == code
1700 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1701 return 1;
1703 else
1705 unsigned int regno = REGNO (datum);
1707 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1708 to pseudo registers, so don't bother checking. */
1710 if (regno < FIRST_PSEUDO_REGISTER)
1712 unsigned int end_regno
1713 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1714 unsigned int i;
1716 for (i = regno; i < end_regno; i++)
1717 if (find_regno_fusage (insn, code, i))
1718 return 1;
1722 return 0;
1725 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1726 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1729 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1731 rtx link;
1733 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1734 to pseudo registers, so don't bother checking. */
1736 if (regno >= FIRST_PSEUDO_REGISTER
1737 || !CALL_P (insn) )
1738 return 0;
1740 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1742 unsigned int regnote;
1743 rtx op, reg;
1745 if (GET_CODE (op = XEXP (link, 0)) == code
1746 && REG_P (reg = XEXP (op, 0))
1747 && (regnote = REGNO (reg)) <= regno
1748 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1749 return 1;
1752 return 0;
1755 /* Return true if INSN is a call to a pure function. */
1758 pure_call_p (rtx insn)
1760 rtx link;
1762 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1763 return 0;
1765 /* Look for the note that differentiates const and pure functions. */
1766 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1768 rtx u, m;
1770 if (GET_CODE (u = XEXP (link, 0)) == USE
1771 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1772 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1773 return 1;
1776 return 0;
1779 /* Remove register note NOTE from the REG_NOTES of INSN. */
1781 void
1782 remove_note (rtx insn, rtx note)
1784 rtx link;
1786 if (note == NULL_RTX)
1787 return;
1789 if (REG_NOTES (insn) == note)
1791 REG_NOTES (insn) = XEXP (note, 1);
1792 return;
1795 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1796 if (XEXP (link, 1) == note)
1798 XEXP (link, 1) = XEXP (note, 1);
1799 return;
1802 gcc_unreachable ();
1805 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1806 return 1 if it is found. A simple equality test is used to determine if
1807 NODE matches. */
1810 in_expr_list_p (rtx listp, rtx node)
1812 rtx x;
1814 for (x = listp; x; x = XEXP (x, 1))
1815 if (node == XEXP (x, 0))
1816 return 1;
1818 return 0;
1821 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1822 remove that entry from the list if it is found.
1824 A simple equality test is used to determine if NODE matches. */
1826 void
1827 remove_node_from_expr_list (rtx node, rtx *listp)
1829 rtx temp = *listp;
1830 rtx prev = NULL_RTX;
1832 while (temp)
1834 if (node == XEXP (temp, 0))
1836 /* Splice the node out of the list. */
1837 if (prev)
1838 XEXP (prev, 1) = XEXP (temp, 1);
1839 else
1840 *listp = XEXP (temp, 1);
1842 return;
1845 prev = temp;
1846 temp = XEXP (temp, 1);
1850 /* Nonzero if X contains any volatile instructions. These are instructions
1851 which may cause unpredictable machine state instructions, and thus no
1852 instructions should be moved or combined across them. This includes
1853 only volatile asms and UNSPEC_VOLATILE instructions. */
1856 volatile_insn_p (rtx x)
1858 RTX_CODE code;
1860 code = GET_CODE (x);
1861 switch (code)
1863 case LABEL_REF:
1864 case SYMBOL_REF:
1865 case CONST_INT:
1866 case CONST:
1867 case CONST_DOUBLE:
1868 case CONST_VECTOR:
1869 case CC0:
1870 case PC:
1871 case REG:
1872 case SCRATCH:
1873 case CLOBBER:
1874 case ADDR_VEC:
1875 case ADDR_DIFF_VEC:
1876 case CALL:
1877 case MEM:
1878 return 0;
1880 case UNSPEC_VOLATILE:
1881 /* case TRAP_IF: This isn't clear yet. */
1882 return 1;
1884 case ASM_INPUT:
1885 case ASM_OPERANDS:
1886 if (MEM_VOLATILE_P (x))
1887 return 1;
1889 default:
1890 break;
1893 /* Recursively scan the operands of this expression. */
1896 const char *fmt = GET_RTX_FORMAT (code);
1897 int i;
1899 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1901 if (fmt[i] == 'e')
1903 if (volatile_insn_p (XEXP (x, i)))
1904 return 1;
1906 else if (fmt[i] == 'E')
1908 int j;
1909 for (j = 0; j < XVECLEN (x, i); j++)
1910 if (volatile_insn_p (XVECEXP (x, i, j)))
1911 return 1;
1915 return 0;
1918 /* Nonzero if X contains any volatile memory references
1919 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1922 volatile_refs_p (rtx x)
1924 RTX_CODE code;
1926 code = GET_CODE (x);
1927 switch (code)
1929 case LABEL_REF:
1930 case SYMBOL_REF:
1931 case CONST_INT:
1932 case CONST:
1933 case CONST_DOUBLE:
1934 case CONST_VECTOR:
1935 case CC0:
1936 case PC:
1937 case REG:
1938 case SCRATCH:
1939 case CLOBBER:
1940 case ADDR_VEC:
1941 case ADDR_DIFF_VEC:
1942 return 0;
1944 case UNSPEC_VOLATILE:
1945 return 1;
1947 case MEM:
1948 case ASM_INPUT:
1949 case ASM_OPERANDS:
1950 if (MEM_VOLATILE_P (x))
1951 return 1;
1953 default:
1954 break;
1957 /* Recursively scan the operands of this expression. */
1960 const char *fmt = GET_RTX_FORMAT (code);
1961 int i;
1963 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1965 if (fmt[i] == 'e')
1967 if (volatile_refs_p (XEXP (x, i)))
1968 return 1;
1970 else if (fmt[i] == 'E')
1972 int j;
1973 for (j = 0; j < XVECLEN (x, i); j++)
1974 if (volatile_refs_p (XVECEXP (x, i, j)))
1975 return 1;
1979 return 0;
1982 /* Similar to above, except that it also rejects register pre- and post-
1983 incrementing. */
1986 side_effects_p (rtx x)
1988 RTX_CODE code;
1990 code = GET_CODE (x);
1991 switch (code)
1993 case LABEL_REF:
1994 case SYMBOL_REF:
1995 case CONST_INT:
1996 case CONST:
1997 case CONST_DOUBLE:
1998 case CONST_VECTOR:
1999 case CC0:
2000 case PC:
2001 case REG:
2002 case SCRATCH:
2003 case ADDR_VEC:
2004 case ADDR_DIFF_VEC:
2005 return 0;
2007 case CLOBBER:
2008 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2009 when some combination can't be done. If we see one, don't think
2010 that we can simplify the expression. */
2011 return (GET_MODE (x) != VOIDmode);
2013 case PRE_INC:
2014 case PRE_DEC:
2015 case POST_INC:
2016 case POST_DEC:
2017 case PRE_MODIFY:
2018 case POST_MODIFY:
2019 case CALL:
2020 case UNSPEC_VOLATILE:
2021 /* case TRAP_IF: This isn't clear yet. */
2022 return 1;
2024 case MEM:
2025 case ASM_INPUT:
2026 case ASM_OPERANDS:
2027 if (MEM_VOLATILE_P (x))
2028 return 1;
2030 default:
2031 break;
2034 /* Recursively scan the operands of this expression. */
2037 const char *fmt = GET_RTX_FORMAT (code);
2038 int i;
2040 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2042 if (fmt[i] == 'e')
2044 if (side_effects_p (XEXP (x, i)))
2045 return 1;
2047 else if (fmt[i] == 'E')
2049 int j;
2050 for (j = 0; j < XVECLEN (x, i); j++)
2051 if (side_effects_p (XVECEXP (x, i, j)))
2052 return 1;
2056 return 0;
2059 enum may_trap_p_flags
2061 MTP_UNALIGNED_MEMS = 1,
2062 MTP_AFTER_MOVE = 2
2064 /* Return nonzero if evaluating rtx X might cause a trap.
2065 (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2066 unaligned memory accesses on strict alignment machines. If
2067 (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2068 cannot trap at its current location, but it might become trapping if moved
2069 elsewhere. */
2071 static int
2072 may_trap_p_1 (rtx x, unsigned flags)
2074 int i;
2075 enum rtx_code code;
2076 const char *fmt;
2077 bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2079 if (x == 0)
2080 return 0;
2081 code = GET_CODE (x);
2082 switch (code)
2084 /* Handle these cases quickly. */
2085 case CONST_INT:
2086 case CONST_DOUBLE:
2087 case CONST_VECTOR:
2088 case SYMBOL_REF:
2089 case LABEL_REF:
2090 case CONST:
2091 case PC:
2092 case CC0:
2093 case REG:
2094 case SCRATCH:
2095 return 0;
2097 case ASM_INPUT:
2098 case UNSPEC_VOLATILE:
2099 case TRAP_IF:
2100 return 1;
2102 case ASM_OPERANDS:
2103 return MEM_VOLATILE_P (x);
2105 /* Memory ref can trap unless it's a static var or a stack slot. */
2106 case MEM:
2107 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2108 reference; moving it out of condition might cause its address
2109 become invalid. */
2110 !(flags & MTP_AFTER_MOVE)
2111 && MEM_NOTRAP_P (x)
2112 && (!STRICT_ALIGNMENT || !unaligned_mems))
2113 return 0;
2114 return
2115 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2117 /* Division by a non-constant might trap. */
2118 case DIV:
2119 case MOD:
2120 case UDIV:
2121 case UMOD:
2122 if (HONOR_SNANS (GET_MODE (x)))
2123 return 1;
2124 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2125 return flag_trapping_math;
2126 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2127 return 1;
2128 break;
2130 case EXPR_LIST:
2131 /* An EXPR_LIST is used to represent a function call. This
2132 certainly may trap. */
2133 return 1;
2135 case GE:
2136 case GT:
2137 case LE:
2138 case LT:
2139 case LTGT:
2140 case COMPARE:
2141 /* Some floating point comparisons may trap. */
2142 if (!flag_trapping_math)
2143 break;
2144 /* ??? There is no machine independent way to check for tests that trap
2145 when COMPARE is used, though many targets do make this distinction.
2146 For instance, sparc uses CCFPE for compares which generate exceptions
2147 and CCFP for compares which do not generate exceptions. */
2148 if (HONOR_NANS (GET_MODE (x)))
2149 return 1;
2150 /* But often the compare has some CC mode, so check operand
2151 modes as well. */
2152 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2153 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2154 return 1;
2155 break;
2157 case EQ:
2158 case NE:
2159 if (HONOR_SNANS (GET_MODE (x)))
2160 return 1;
2161 /* Often comparison is CC mode, so check operand modes. */
2162 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2163 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2164 return 1;
2165 break;
2167 case FIX:
2168 /* Conversion of floating point might trap. */
2169 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2170 return 1;
2171 break;
2173 case NEG:
2174 case ABS:
2175 case SUBREG:
2176 /* These operations don't trap even with floating point. */
2177 break;
2179 default:
2180 /* Any floating arithmetic may trap. */
2181 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2182 && flag_trapping_math)
2183 return 1;
2186 fmt = GET_RTX_FORMAT (code);
2187 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2189 if (fmt[i] == 'e')
2191 if (may_trap_p_1 (XEXP (x, i), flags))
2192 return 1;
2194 else if (fmt[i] == 'E')
2196 int j;
2197 for (j = 0; j < XVECLEN (x, i); j++)
2198 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2199 return 1;
2202 return 0;
2205 /* Return nonzero if evaluating rtx X might cause a trap. */
2208 may_trap_p (rtx x)
2210 return may_trap_p_1 (x, 0);
2213 /* Return nonzero if evaluating rtx X might cause a trap, when the expression
2214 is moved from its current location by some optimization. */
2217 may_trap_after_code_motion_p (rtx x)
2219 return may_trap_p_1 (x, MTP_AFTER_MOVE);
2222 /* Same as above, but additionally return nonzero if evaluating rtx X might
2223 cause a fault. We define a fault for the purpose of this function as a
2224 erroneous execution condition that cannot be encountered during the normal
2225 execution of a valid program; the typical example is an unaligned memory
2226 access on a strict alignment machine. The compiler guarantees that it
2227 doesn't generate code that will fault from a valid program, but this
2228 guarantee doesn't mean anything for individual instructions. Consider
2229 the following example:
2231 struct S { int d; union { char *cp; int *ip; }; };
2233 int foo(struct S *s)
2235 if (s->d == 1)
2236 return *s->ip;
2237 else
2238 return *s->cp;
2241 on a strict alignment machine. In a valid program, foo will never be
2242 invoked on a structure for which d is equal to 1 and the underlying
2243 unique field of the union not aligned on a 4-byte boundary, but the
2244 expression *s->ip might cause a fault if considered individually.
2246 At the RTL level, potentially problematic expressions will almost always
2247 verify may_trap_p; for example, the above dereference can be emitted as
2248 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2249 However, suppose that foo is inlined in a caller that causes s->cp to
2250 point to a local character variable and guarantees that s->d is not set
2251 to 1; foo may have been effectively translated into pseudo-RTL as:
2253 if ((reg:SI) == 1)
2254 (set (reg:SI) (mem:SI (%fp - 7)))
2255 else
2256 (set (reg:QI) (mem:QI (%fp - 7)))
2258 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2259 memory reference to a stack slot, but it will certainly cause a fault
2260 on a strict alignment machine. */
2263 may_trap_or_fault_p (rtx x)
2265 return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2268 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2269 i.e., an inequality. */
2272 inequality_comparisons_p (rtx x)
2274 const char *fmt;
2275 int len, i;
2276 enum rtx_code code = GET_CODE (x);
2278 switch (code)
2280 case REG:
2281 case SCRATCH:
2282 case PC:
2283 case CC0:
2284 case CONST_INT:
2285 case CONST_DOUBLE:
2286 case CONST_VECTOR:
2287 case CONST:
2288 case LABEL_REF:
2289 case SYMBOL_REF:
2290 return 0;
2292 case LT:
2293 case LTU:
2294 case GT:
2295 case GTU:
2296 case LE:
2297 case LEU:
2298 case GE:
2299 case GEU:
2300 return 1;
2302 default:
2303 break;
2306 len = GET_RTX_LENGTH (code);
2307 fmt = GET_RTX_FORMAT (code);
2309 for (i = 0; i < len; i++)
2311 if (fmt[i] == 'e')
2313 if (inequality_comparisons_p (XEXP (x, i)))
2314 return 1;
2316 else if (fmt[i] == 'E')
2318 int j;
2319 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2320 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2321 return 1;
2325 return 0;
2328 /* Replace any occurrence of FROM in X with TO. The function does
2329 not enter into CONST_DOUBLE for the replace.
2331 Note that copying is not done so X must not be shared unless all copies
2332 are to be modified. */
2335 replace_rtx (rtx x, rtx from, rtx to)
2337 int i, j;
2338 const char *fmt;
2340 /* The following prevents loops occurrence when we change MEM in
2341 CONST_DOUBLE onto the same CONST_DOUBLE. */
2342 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2343 return x;
2345 if (x == from)
2346 return to;
2348 /* Allow this function to make replacements in EXPR_LISTs. */
2349 if (x == 0)
2350 return 0;
2352 if (GET_CODE (x) == SUBREG)
2354 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2356 if (GET_CODE (new) == CONST_INT)
2358 x = simplify_subreg (GET_MODE (x), new,
2359 GET_MODE (SUBREG_REG (x)),
2360 SUBREG_BYTE (x));
2361 gcc_assert (x);
2363 else
2364 SUBREG_REG (x) = new;
2366 return x;
2368 else if (GET_CODE (x) == ZERO_EXTEND)
2370 rtx new = replace_rtx (XEXP (x, 0), from, to);
2372 if (GET_CODE (new) == CONST_INT)
2374 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2375 new, GET_MODE (XEXP (x, 0)));
2376 gcc_assert (x);
2378 else
2379 XEXP (x, 0) = new;
2381 return x;
2384 fmt = GET_RTX_FORMAT (GET_CODE (x));
2385 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2387 if (fmt[i] == 'e')
2388 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2389 else if (fmt[i] == 'E')
2390 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2391 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2394 return x;
2397 /* Replace occurrences of the old label in *X with the new one.
2398 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2401 replace_label (rtx *x, void *data)
2403 rtx l = *x;
2404 rtx old_label = ((replace_label_data *) data)->r1;
2405 rtx new_label = ((replace_label_data *) data)->r2;
2406 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2408 if (l == NULL_RTX)
2409 return 0;
2411 if (GET_CODE (l) == SYMBOL_REF
2412 && CONSTANT_POOL_ADDRESS_P (l))
2414 rtx c = get_pool_constant (l);
2415 if (rtx_referenced_p (old_label, c))
2417 rtx new_c, new_l;
2418 replace_label_data *d = (replace_label_data *) data;
2420 /* Create a copy of constant C; replace the label inside
2421 but do not update LABEL_NUSES because uses in constant pool
2422 are not counted. */
2423 new_c = copy_rtx (c);
2424 d->update_label_nuses = false;
2425 for_each_rtx (&new_c, replace_label, data);
2426 d->update_label_nuses = update_label_nuses;
2428 /* Add the new constant NEW_C to constant pool and replace
2429 the old reference to constant by new reference. */
2430 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2431 *x = replace_rtx (l, l, new_l);
2433 return 0;
2436 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2437 field. This is not handled by for_each_rtx because it doesn't
2438 handle unprinted ('0') fields. */
2439 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2440 JUMP_LABEL (l) = new_label;
2442 if ((GET_CODE (l) == LABEL_REF
2443 || GET_CODE (l) == INSN_LIST)
2444 && XEXP (l, 0) == old_label)
2446 XEXP (l, 0) = new_label;
2447 if (update_label_nuses)
2449 ++LABEL_NUSES (new_label);
2450 --LABEL_NUSES (old_label);
2452 return 0;
2455 return 0;
2458 /* When *BODY is equal to X or X is directly referenced by *BODY
2459 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2460 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2462 static int
2463 rtx_referenced_p_1 (rtx *body, void *x)
2465 rtx y = (rtx) x;
2467 if (*body == NULL_RTX)
2468 return y == NULL_RTX;
2470 /* Return true if a label_ref *BODY refers to label Y. */
2471 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2472 return XEXP (*body, 0) == y;
2474 /* If *BODY is a reference to pool constant traverse the constant. */
2475 if (GET_CODE (*body) == SYMBOL_REF
2476 && CONSTANT_POOL_ADDRESS_P (*body))
2477 return rtx_referenced_p (y, get_pool_constant (*body));
2479 /* By default, compare the RTL expressions. */
2480 return rtx_equal_p (*body, y);
2483 /* Return true if X is referenced in BODY. */
2486 rtx_referenced_p (rtx x, rtx body)
2488 return for_each_rtx (&body, rtx_referenced_p_1, x);
2491 /* If INSN is a tablejump return true and store the label (before jump table) to
2492 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2494 bool
2495 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2497 rtx label, table;
2499 if (JUMP_P (insn)
2500 && (label = JUMP_LABEL (insn)) != NULL_RTX
2501 && (table = next_active_insn (label)) != NULL_RTX
2502 && JUMP_P (table)
2503 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2504 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2506 if (labelp)
2507 *labelp = label;
2508 if (tablep)
2509 *tablep = table;
2510 return true;
2512 return false;
2515 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2516 constant that is not in the constant pool and not in the condition
2517 of an IF_THEN_ELSE. */
2519 static int
2520 computed_jump_p_1 (rtx x)
2522 enum rtx_code code = GET_CODE (x);
2523 int i, j;
2524 const char *fmt;
2526 switch (code)
2528 case LABEL_REF:
2529 case PC:
2530 return 0;
2532 case CONST:
2533 case CONST_INT:
2534 case CONST_DOUBLE:
2535 case CONST_VECTOR:
2536 case SYMBOL_REF:
2537 case REG:
2538 return 1;
2540 case MEM:
2541 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2542 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2544 case IF_THEN_ELSE:
2545 return (computed_jump_p_1 (XEXP (x, 1))
2546 || computed_jump_p_1 (XEXP (x, 2)));
2548 default:
2549 break;
2552 fmt = GET_RTX_FORMAT (code);
2553 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2555 if (fmt[i] == 'e'
2556 && computed_jump_p_1 (XEXP (x, i)))
2557 return 1;
2559 else if (fmt[i] == 'E')
2560 for (j = 0; j < XVECLEN (x, i); j++)
2561 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2562 return 1;
2565 return 0;
2568 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2570 Tablejumps and casesi insns are not considered indirect jumps;
2571 we can recognize them by a (use (label_ref)). */
2574 computed_jump_p (rtx insn)
2576 int i;
2577 if (JUMP_P (insn))
2579 rtx pat = PATTERN (insn);
2581 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2582 return 0;
2583 else if (GET_CODE (pat) == PARALLEL)
2585 int len = XVECLEN (pat, 0);
2586 int has_use_labelref = 0;
2588 for (i = len - 1; i >= 0; i--)
2589 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2590 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2591 == LABEL_REF))
2592 has_use_labelref = 1;
2594 if (! has_use_labelref)
2595 for (i = len - 1; i >= 0; i--)
2596 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2597 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2598 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2599 return 1;
2601 else if (GET_CODE (pat) == SET
2602 && SET_DEST (pat) == pc_rtx
2603 && computed_jump_p_1 (SET_SRC (pat)))
2604 return 1;
2606 return 0;
2609 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2610 calls. Processes the subexpressions of EXP and passes them to F. */
2611 static int
2612 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2614 int result, i, j;
2615 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2616 rtx *x;
2618 for (; format[n] != '\0'; n++)
2620 switch (format[n])
2622 case 'e':
2623 /* Call F on X. */
2624 x = &XEXP (exp, n);
2625 result = (*f) (x, data);
2626 if (result == -1)
2627 /* Do not traverse sub-expressions. */
2628 continue;
2629 else if (result != 0)
2630 /* Stop the traversal. */
2631 return result;
2633 if (*x == NULL_RTX)
2634 /* There are no sub-expressions. */
2635 continue;
2637 i = non_rtx_starting_operands[GET_CODE (*x)];
2638 if (i >= 0)
2640 result = for_each_rtx_1 (*x, i, f, data);
2641 if (result != 0)
2642 return result;
2644 break;
2646 case 'V':
2647 case 'E':
2648 if (XVEC (exp, n) == 0)
2649 continue;
2650 for (j = 0; j < XVECLEN (exp, n); ++j)
2652 /* Call F on X. */
2653 x = &XVECEXP (exp, n, j);
2654 result = (*f) (x, data);
2655 if (result == -1)
2656 /* Do not traverse sub-expressions. */
2657 continue;
2658 else if (result != 0)
2659 /* Stop the traversal. */
2660 return result;
2662 if (*x == NULL_RTX)
2663 /* There are no sub-expressions. */
2664 continue;
2666 i = non_rtx_starting_operands[GET_CODE (*x)];
2667 if (i >= 0)
2669 result = for_each_rtx_1 (*x, i, f, data);
2670 if (result != 0)
2671 return result;
2674 break;
2676 default:
2677 /* Nothing to do. */
2678 break;
2682 return 0;
2685 /* Traverse X via depth-first search, calling F for each
2686 sub-expression (including X itself). F is also passed the DATA.
2687 If F returns -1, do not traverse sub-expressions, but continue
2688 traversing the rest of the tree. If F ever returns any other
2689 nonzero value, stop the traversal, and return the value returned
2690 by F. Otherwise, return 0. This function does not traverse inside
2691 tree structure that contains RTX_EXPRs, or into sub-expressions
2692 whose format code is `0' since it is not known whether or not those
2693 codes are actually RTL.
2695 This routine is very general, and could (should?) be used to
2696 implement many of the other routines in this file. */
2699 for_each_rtx (rtx *x, rtx_function f, void *data)
2701 int result;
2702 int i;
2704 /* Call F on X. */
2705 result = (*f) (x, data);
2706 if (result == -1)
2707 /* Do not traverse sub-expressions. */
2708 return 0;
2709 else if (result != 0)
2710 /* Stop the traversal. */
2711 return result;
2713 if (*x == NULL_RTX)
2714 /* There are no sub-expressions. */
2715 return 0;
2717 i = non_rtx_starting_operands[GET_CODE (*x)];
2718 if (i < 0)
2719 return 0;
2721 return for_each_rtx_1 (*x, i, f, data);
2725 /* Searches X for any reference to REGNO, returning the rtx of the
2726 reference found if any. Otherwise, returns NULL_RTX. */
2729 regno_use_in (unsigned int regno, rtx x)
2731 const char *fmt;
2732 int i, j;
2733 rtx tem;
2735 if (REG_P (x) && REGNO (x) == regno)
2736 return x;
2738 fmt = GET_RTX_FORMAT (GET_CODE (x));
2739 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2741 if (fmt[i] == 'e')
2743 if ((tem = regno_use_in (regno, XEXP (x, i))))
2744 return tem;
2746 else if (fmt[i] == 'E')
2747 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2748 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2749 return tem;
2752 return NULL_RTX;
2755 /* Return a value indicating whether OP, an operand of a commutative
2756 operation, is preferred as the first or second operand. The higher
2757 the value, the stronger the preference for being the first operand.
2758 We use negative values to indicate a preference for the first operand
2759 and positive values for the second operand. */
2762 commutative_operand_precedence (rtx op)
2764 enum rtx_code code = GET_CODE (op);
2766 /* Constants always come the second operand. Prefer "nice" constants. */
2767 if (code == CONST_INT)
2768 return -7;
2769 if (code == CONST_DOUBLE)
2770 return -6;
2771 op = avoid_constant_pool_reference (op);
2772 code = GET_CODE (op);
2774 switch (GET_RTX_CLASS (code))
2776 case RTX_CONST_OBJ:
2777 if (code == CONST_INT)
2778 return -5;
2779 if (code == CONST_DOUBLE)
2780 return -4;
2781 return -3;
2783 case RTX_EXTRA:
2784 /* SUBREGs of objects should come second. */
2785 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2786 return -2;
2788 if (!CONSTANT_P (op))
2789 return 0;
2790 else
2791 /* As for RTX_CONST_OBJ. */
2792 return -3;
2794 case RTX_OBJ:
2795 /* Complex expressions should be the first, so decrease priority
2796 of objects. */
2797 return -1;
2799 case RTX_COMM_ARITH:
2800 /* Prefer operands that are themselves commutative to be first.
2801 This helps to make things linear. In particular,
2802 (and (and (reg) (reg)) (not (reg))) is canonical. */
2803 return 4;
2805 case RTX_BIN_ARITH:
2806 /* If only one operand is a binary expression, it will be the first
2807 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2808 is canonical, although it will usually be further simplified. */
2809 return 2;
2811 case RTX_UNARY:
2812 /* Then prefer NEG and NOT. */
2813 if (code == NEG || code == NOT)
2814 return 1;
2816 default:
2817 return 0;
2821 /* Return 1 iff it is necessary to swap operands of commutative operation
2822 in order to canonicalize expression. */
2825 swap_commutative_operands_p (rtx x, rtx y)
2827 return (commutative_operand_precedence (x)
2828 < commutative_operand_precedence (y));
2831 /* Return 1 if X is an autoincrement side effect and the register is
2832 not the stack pointer. */
2834 auto_inc_p (rtx x)
2836 switch (GET_CODE (x))
2838 case PRE_INC:
2839 case POST_INC:
2840 case PRE_DEC:
2841 case POST_DEC:
2842 case PRE_MODIFY:
2843 case POST_MODIFY:
2844 /* There are no REG_INC notes for SP. */
2845 if (XEXP (x, 0) != stack_pointer_rtx)
2846 return 1;
2847 default:
2848 break;
2850 return 0;
2853 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2855 loc_mentioned_in_p (rtx *loc, rtx in)
2857 enum rtx_code code;
2858 const char *fmt;
2859 int i, j;
2861 if (!in)
2862 return 0;
2864 code = GET_CODE (in);
2865 fmt = GET_RTX_FORMAT (code);
2866 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2868 if (loc == &in->u.fld[i].rt_rtx)
2869 return 1;
2870 if (fmt[i] == 'e')
2872 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2873 return 1;
2875 else if (fmt[i] == 'E')
2876 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2877 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2878 return 1;
2880 return 0;
2883 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2884 and SUBREG_BYTE, return the bit offset where the subreg begins
2885 (counting from the least significant bit of the operand). */
2887 unsigned int
2888 subreg_lsb_1 (enum machine_mode outer_mode,
2889 enum machine_mode inner_mode,
2890 unsigned int subreg_byte)
2892 unsigned int bitpos;
2893 unsigned int byte;
2894 unsigned int word;
2896 /* A paradoxical subreg begins at bit position 0. */
2897 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
2898 return 0;
2900 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2901 /* If the subreg crosses a word boundary ensure that
2902 it also begins and ends on a word boundary. */
2903 gcc_assert (!((subreg_byte % UNITS_PER_WORD
2904 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
2905 && (subreg_byte % UNITS_PER_WORD
2906 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
2908 if (WORDS_BIG_ENDIAN)
2909 word = (GET_MODE_SIZE (inner_mode)
2910 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
2911 else
2912 word = subreg_byte / UNITS_PER_WORD;
2913 bitpos = word * BITS_PER_WORD;
2915 if (BYTES_BIG_ENDIAN)
2916 byte = (GET_MODE_SIZE (inner_mode)
2917 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
2918 else
2919 byte = subreg_byte % UNITS_PER_WORD;
2920 bitpos += byte * BITS_PER_UNIT;
2922 return bitpos;
2925 /* Given a subreg X, return the bit offset where the subreg begins
2926 (counting from the least significant bit of the reg). */
2928 unsigned int
2929 subreg_lsb (rtx x)
2931 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
2932 SUBREG_BYTE (x));
2935 /* This function returns the regno offset of a subreg expression.
2936 xregno - A regno of an inner hard subreg_reg (or what will become one).
2937 xmode - The mode of xregno.
2938 offset - The byte offset.
2939 ymode - The mode of a top level SUBREG (or what may become one).
2940 RETURN - The regno offset which would be used. */
2941 unsigned int
2942 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
2943 unsigned int offset, enum machine_mode ymode)
2945 int nregs_xmode, nregs_ymode;
2946 int mode_multiple, nregs_multiple;
2947 int y_offset;
2949 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
2951 /* Adjust nregs_xmode to allow for 'holes'. */
2952 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
2953 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
2954 else
2955 nregs_xmode = hard_regno_nregs[xregno][xmode];
2957 nregs_ymode = hard_regno_nregs[xregno][ymode];
2959 /* If this is a big endian paradoxical subreg, which uses more actual
2960 hard registers than the original register, we must return a negative
2961 offset so that we find the proper highpart of the register. */
2962 if (offset == 0
2963 && nregs_ymode > nregs_xmode
2964 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2965 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
2966 return nregs_xmode - nregs_ymode;
2968 if (offset == 0 || nregs_xmode == nregs_ymode)
2969 return 0;
2971 /* Size of ymode must not be greater than the size of xmode. */
2972 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
2973 gcc_assert (mode_multiple != 0);
2975 y_offset = offset / GET_MODE_SIZE (ymode);
2976 nregs_multiple = nregs_xmode / nregs_ymode;
2977 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
2980 /* This function returns true when the offset is representable via
2981 subreg_offset in the given regno.
2982 xregno - A regno of an inner hard subreg_reg (or what will become one).
2983 xmode - The mode of xregno.
2984 offset - The byte offset.
2985 ymode - The mode of a top level SUBREG (or what may become one).
2986 RETURN - Whether the offset is representable. */
2987 bool
2988 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
2989 unsigned int offset, enum machine_mode ymode)
2991 int nregs_xmode, nregs_ymode;
2992 int mode_multiple, nregs_multiple;
2993 int y_offset;
2994 int regsize_xmode, regsize_ymode;
2996 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
2998 /* If there are holes in a non-scalar mode in registers, we expect
2999 that it is made up of its units concatenated together. */
3000 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3002 enum machine_mode xmode_unit;
3004 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3005 if (GET_MODE_INNER (xmode) == VOIDmode)
3006 xmode_unit = xmode;
3007 else
3008 xmode_unit = GET_MODE_INNER (xmode);
3009 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3010 gcc_assert (nregs_xmode
3011 == (GET_MODE_NUNITS (xmode)
3012 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3013 gcc_assert (hard_regno_nregs[xregno][xmode]
3014 == (hard_regno_nregs[xregno][xmode_unit]
3015 * GET_MODE_NUNITS (xmode)));
3017 /* You can only ask for a SUBREG of a value with holes in the middle
3018 if you don't cross the holes. (Such a SUBREG should be done by
3019 picking a different register class, or doing it in memory if
3020 necessary.) An example of a value with holes is XCmode on 32-bit
3021 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3022 3 for each part, but in memory it's two 128-bit parts.
3023 Padding is assumed to be at the end (not necessarily the 'high part')
3024 of each unit. */
3025 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3026 < GET_MODE_NUNITS (xmode))
3027 && (offset / GET_MODE_SIZE (xmode_unit)
3028 != ((offset + GET_MODE_SIZE (ymode) - 1)
3029 / GET_MODE_SIZE (xmode_unit))))
3030 return false;
3032 else
3033 nregs_xmode = hard_regno_nregs[xregno][xmode];
3035 nregs_ymode = hard_regno_nregs[xregno][ymode];
3037 /* Paradoxical subregs are otherwise valid. */
3038 if (offset == 0
3039 && nregs_ymode > nregs_xmode
3040 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3041 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3042 return true;
3044 /* If registers store different numbers of bits in the different
3045 modes, we cannot generally form this subreg. */
3046 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3047 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3048 if (regsize_xmode > regsize_ymode && nregs_ymode > 1)
3049 return false;
3050 if (regsize_ymode > regsize_xmode && nregs_xmode > 1)
3051 return false;
3053 /* Lowpart subregs are otherwise valid. */
3054 if (offset == subreg_lowpart_offset (ymode, xmode))
3055 return true;
3057 /* This should always pass, otherwise we don't know how to verify
3058 the constraint. These conditions may be relaxed but
3059 subreg_regno_offset would need to be redesigned. */
3060 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3061 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3063 /* The XMODE value can be seen as a vector of NREGS_XMODE
3064 values. The subreg must represent a lowpart of given field.
3065 Compute what field it is. */
3066 offset -= subreg_lowpart_offset (ymode,
3067 mode_for_size (GET_MODE_BITSIZE (xmode)
3068 / nregs_xmode,
3069 MODE_INT, 0));
3071 /* Size of ymode must not be greater than the size of xmode. */
3072 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3073 gcc_assert (mode_multiple != 0);
3075 y_offset = offset / GET_MODE_SIZE (ymode);
3076 nregs_multiple = nregs_xmode / nregs_ymode;
3078 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3079 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3081 return (!(y_offset % (mode_multiple / nregs_multiple)));
3084 /* Return the final regno that a subreg expression refers to. */
3085 unsigned int
3086 subreg_regno (rtx x)
3088 unsigned int ret;
3089 rtx subreg = SUBREG_REG (x);
3090 int regno = REGNO (subreg);
3092 ret = regno + subreg_regno_offset (regno,
3093 GET_MODE (subreg),
3094 SUBREG_BYTE (x),
3095 GET_MODE (x));
3096 return ret;
3099 struct parms_set_data
3101 int nregs;
3102 HARD_REG_SET regs;
3105 /* Helper function for noticing stores to parameter registers. */
3106 static void
3107 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3109 struct parms_set_data *d = data;
3110 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3111 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3113 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3114 d->nregs--;
3118 /* Look backward for first parameter to be loaded.
3119 Note that loads of all parameters will not necessarily be
3120 found if CSE has eliminated some of them (e.g., an argument
3121 to the outer function is passed down as a parameter).
3122 Do not skip BOUNDARY. */
3124 find_first_parameter_load (rtx call_insn, rtx boundary)
3126 struct parms_set_data parm;
3127 rtx p, before, first_set;
3129 /* Since different machines initialize their parameter registers
3130 in different orders, assume nothing. Collect the set of all
3131 parameter registers. */
3132 CLEAR_HARD_REG_SET (parm.regs);
3133 parm.nregs = 0;
3134 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3135 if (GET_CODE (XEXP (p, 0)) == USE
3136 && REG_P (XEXP (XEXP (p, 0), 0)))
3138 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3140 /* We only care about registers which can hold function
3141 arguments. */
3142 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3143 continue;
3145 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3146 parm.nregs++;
3148 before = call_insn;
3149 first_set = call_insn;
3151 /* Search backward for the first set of a register in this set. */
3152 while (parm.nregs && before != boundary)
3154 before = PREV_INSN (before);
3156 /* It is possible that some loads got CSEed from one call to
3157 another. Stop in that case. */
3158 if (CALL_P (before))
3159 break;
3161 /* Our caller needs either ensure that we will find all sets
3162 (in case code has not been optimized yet), or take care
3163 for possible labels in a way by setting boundary to preceding
3164 CODE_LABEL. */
3165 if (LABEL_P (before))
3167 gcc_assert (before == boundary);
3168 break;
3171 if (INSN_P (before))
3173 int nregs_old = parm.nregs;
3174 note_stores (PATTERN (before), parms_set, &parm);
3175 /* If we found something that did not set a parameter reg,
3176 we're done. Do not keep going, as that might result
3177 in hoisting an insn before the setting of a pseudo
3178 that is used by the hoisted insn. */
3179 if (nregs_old != parm.nregs)
3180 first_set = before;
3181 else
3182 break;
3185 return first_set;
3188 /* Return true if we should avoid inserting code between INSN and preceding
3189 call instruction. */
3191 bool
3192 keep_with_call_p (rtx insn)
3194 rtx set;
3196 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3198 if (REG_P (SET_DEST (set))
3199 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3200 && fixed_regs[REGNO (SET_DEST (set))]
3201 && general_operand (SET_SRC (set), VOIDmode))
3202 return true;
3203 if (REG_P (SET_SRC (set))
3204 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3205 && REG_P (SET_DEST (set))
3206 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3207 return true;
3208 /* There may be a stack pop just after the call and before the store
3209 of the return register. Search for the actual store when deciding
3210 if we can break or not. */
3211 if (SET_DEST (set) == stack_pointer_rtx)
3213 rtx i2 = next_nonnote_insn (insn);
3214 if (i2 && keep_with_call_p (i2))
3215 return true;
3218 return false;
3221 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3222 to non-complex jumps. That is, direct unconditional, conditional,
3223 and tablejumps, but not computed jumps or returns. It also does
3224 not apply to the fallthru case of a conditional jump. */
3226 bool
3227 label_is_jump_target_p (rtx label, rtx jump_insn)
3229 rtx tmp = JUMP_LABEL (jump_insn);
3231 if (label == tmp)
3232 return true;
3234 if (tablejump_p (jump_insn, NULL, &tmp))
3236 rtvec vec = XVEC (PATTERN (tmp),
3237 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3238 int i, veclen = GET_NUM_ELEM (vec);
3240 for (i = 0; i < veclen; ++i)
3241 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3242 return true;
3245 return false;
3249 /* Return an estimate of the cost of computing rtx X.
3250 One use is in cse, to decide which expression to keep in the hash table.
3251 Another is in rtl generation, to pick the cheapest way to multiply.
3252 Other uses like the latter are expected in the future. */
3255 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3257 int i, j;
3258 enum rtx_code code;
3259 const char *fmt;
3260 int total;
3262 if (x == 0)
3263 return 0;
3265 /* Compute the default costs of certain things.
3266 Note that targetm.rtx_costs can override the defaults. */
3268 code = GET_CODE (x);
3269 switch (code)
3271 case MULT:
3272 total = COSTS_N_INSNS (5);
3273 break;
3274 case DIV:
3275 case UDIV:
3276 case MOD:
3277 case UMOD:
3278 total = COSTS_N_INSNS (7);
3279 break;
3280 case USE:
3281 /* Used in combine.c as a marker. */
3282 total = 0;
3283 break;
3284 default:
3285 total = COSTS_N_INSNS (1);
3288 switch (code)
3290 case REG:
3291 return 0;
3293 case SUBREG:
3294 total = 0;
3295 /* If we can't tie these modes, make this expensive. The larger
3296 the mode, the more expensive it is. */
3297 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3298 return COSTS_N_INSNS (2
3299 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3300 break;
3302 default:
3303 if (targetm.rtx_costs (x, code, outer_code, &total))
3304 return total;
3305 break;
3308 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3309 which is already in total. */
3311 fmt = GET_RTX_FORMAT (code);
3312 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3313 if (fmt[i] == 'e')
3314 total += rtx_cost (XEXP (x, i), code);
3315 else if (fmt[i] == 'E')
3316 for (j = 0; j < XVECLEN (x, i); j++)
3317 total += rtx_cost (XVECEXP (x, i, j), code);
3319 return total;
3322 /* Return cost of address expression X.
3323 Expect that X is properly formed address reference. */
3326 address_cost (rtx x, enum machine_mode mode)
3328 /* We may be asked for cost of various unusual addresses, such as operands
3329 of push instruction. It is not worthwhile to complicate writing
3330 of the target hook by such cases. */
3332 if (!memory_address_p (mode, x))
3333 return 1000;
3335 return targetm.address_cost (x);
3338 /* If the target doesn't override, compute the cost as with arithmetic. */
3341 default_address_cost (rtx x)
3343 return rtx_cost (x, MEM);
3347 unsigned HOST_WIDE_INT
3348 nonzero_bits (rtx x, enum machine_mode mode)
3350 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3353 unsigned int
3354 num_sign_bit_copies (rtx x, enum machine_mode mode)
3356 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3359 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3360 It avoids exponential behavior in nonzero_bits1 when X has
3361 identical subexpressions on the first or the second level. */
3363 static unsigned HOST_WIDE_INT
3364 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3365 enum machine_mode known_mode,
3366 unsigned HOST_WIDE_INT known_ret)
3368 if (x == known_x && mode == known_mode)
3369 return known_ret;
3371 /* Try to find identical subexpressions. If found call
3372 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3373 precomputed value for the subexpression as KNOWN_RET. */
3375 if (ARITHMETIC_P (x))
3377 rtx x0 = XEXP (x, 0);
3378 rtx x1 = XEXP (x, 1);
3380 /* Check the first level. */
3381 if (x0 == x1)
3382 return nonzero_bits1 (x, mode, x0, mode,
3383 cached_nonzero_bits (x0, mode, known_x,
3384 known_mode, known_ret));
3386 /* Check the second level. */
3387 if (ARITHMETIC_P (x0)
3388 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3389 return nonzero_bits1 (x, mode, x1, mode,
3390 cached_nonzero_bits (x1, mode, known_x,
3391 known_mode, known_ret));
3393 if (ARITHMETIC_P (x1)
3394 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3395 return nonzero_bits1 (x, mode, x0, mode,
3396 cached_nonzero_bits (x0, mode, known_x,
3397 known_mode, known_ret));
3400 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3403 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3404 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3405 is less useful. We can't allow both, because that results in exponential
3406 run time recursion. There is a nullstone testcase that triggered
3407 this. This macro avoids accidental uses of num_sign_bit_copies. */
3408 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3410 /* Given an expression, X, compute which bits in X can be nonzero.
3411 We don't care about bits outside of those defined in MODE.
3413 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3414 an arithmetic operation, we can do better. */
3416 static unsigned HOST_WIDE_INT
3417 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3418 enum machine_mode known_mode,
3419 unsigned HOST_WIDE_INT known_ret)
3421 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3422 unsigned HOST_WIDE_INT inner_nz;
3423 enum rtx_code code;
3424 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3426 /* For floating-point values, assume all bits are needed. */
3427 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3428 return nonzero;
3430 /* If X is wider than MODE, use its mode instead. */
3431 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3433 mode = GET_MODE (x);
3434 nonzero = GET_MODE_MASK (mode);
3435 mode_width = GET_MODE_BITSIZE (mode);
3438 if (mode_width > HOST_BITS_PER_WIDE_INT)
3439 /* Our only callers in this case look for single bit values. So
3440 just return the mode mask. Those tests will then be false. */
3441 return nonzero;
3443 #ifndef WORD_REGISTER_OPERATIONS
3444 /* If MODE is wider than X, but both are a single word for both the host
3445 and target machines, we can compute this from which bits of the
3446 object might be nonzero in its own mode, taking into account the fact
3447 that on many CISC machines, accessing an object in a wider mode
3448 causes the high-order bits to become undefined. So they are
3449 not known to be zero. */
3451 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3452 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3453 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3454 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3456 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3457 known_x, known_mode, known_ret);
3458 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3459 return nonzero;
3461 #endif
3463 code = GET_CODE (x);
3464 switch (code)
3466 case REG:
3467 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3468 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3469 all the bits above ptr_mode are known to be zero. */
3470 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3471 && REG_POINTER (x))
3472 nonzero &= GET_MODE_MASK (ptr_mode);
3473 #endif
3475 /* Include declared information about alignment of pointers. */
3476 /* ??? We don't properly preserve REG_POINTER changes across
3477 pointer-to-integer casts, so we can't trust it except for
3478 things that we know must be pointers. See execute/960116-1.c. */
3479 if ((x == stack_pointer_rtx
3480 || x == frame_pointer_rtx
3481 || x == arg_pointer_rtx)
3482 && REGNO_POINTER_ALIGN (REGNO (x)))
3484 unsigned HOST_WIDE_INT alignment
3485 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3487 #ifdef PUSH_ROUNDING
3488 /* If PUSH_ROUNDING is defined, it is possible for the
3489 stack to be momentarily aligned only to that amount,
3490 so we pick the least alignment. */
3491 if (x == stack_pointer_rtx && PUSH_ARGS)
3492 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3493 alignment);
3494 #endif
3496 nonzero &= ~(alignment - 1);
3500 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3501 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3502 known_mode, known_ret,
3503 &nonzero_for_hook);
3505 if (new)
3506 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3507 known_mode, known_ret);
3509 return nonzero_for_hook;
3512 case CONST_INT:
3513 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3514 /* If X is negative in MODE, sign-extend the value. */
3515 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3516 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3517 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3518 #endif
3520 return INTVAL (x);
3522 case MEM:
3523 #ifdef LOAD_EXTEND_OP
3524 /* In many, if not most, RISC machines, reading a byte from memory
3525 zeros the rest of the register. Noticing that fact saves a lot
3526 of extra zero-extends. */
3527 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3528 nonzero &= GET_MODE_MASK (GET_MODE (x));
3529 #endif
3530 break;
3532 case EQ: case NE:
3533 case UNEQ: case LTGT:
3534 case GT: case GTU: case UNGT:
3535 case LT: case LTU: case UNLT:
3536 case GE: case GEU: case UNGE:
3537 case LE: case LEU: case UNLE:
3538 case UNORDERED: case ORDERED:
3539 /* If this produces an integer result, we know which bits are set.
3540 Code here used to clear bits outside the mode of X, but that is
3541 now done above. */
3542 /* Mind that MODE is the mode the caller wants to look at this
3543 operation in, and not the actual operation mode. We can wind
3544 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3545 that describes the results of a vector compare. */
3546 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3547 && mode_width <= HOST_BITS_PER_WIDE_INT)
3548 nonzero = STORE_FLAG_VALUE;
3549 break;
3551 case NEG:
3552 #if 0
3553 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3554 and num_sign_bit_copies. */
3555 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3556 == GET_MODE_BITSIZE (GET_MODE (x)))
3557 nonzero = 1;
3558 #endif
3560 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3561 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3562 break;
3564 case ABS:
3565 #if 0
3566 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3567 and num_sign_bit_copies. */
3568 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3569 == GET_MODE_BITSIZE (GET_MODE (x)))
3570 nonzero = 1;
3571 #endif
3572 break;
3574 case TRUNCATE:
3575 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3576 known_x, known_mode, known_ret)
3577 & GET_MODE_MASK (mode));
3578 break;
3580 case ZERO_EXTEND:
3581 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3582 known_x, known_mode, known_ret);
3583 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3584 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3585 break;
3587 case SIGN_EXTEND:
3588 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3589 Otherwise, show all the bits in the outer mode but not the inner
3590 may be nonzero. */
3591 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3592 known_x, known_mode, known_ret);
3593 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3595 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3596 if (inner_nz
3597 & (((HOST_WIDE_INT) 1
3598 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3599 inner_nz |= (GET_MODE_MASK (mode)
3600 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3603 nonzero &= inner_nz;
3604 break;
3606 case AND:
3607 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3608 known_x, known_mode, known_ret)
3609 & cached_nonzero_bits (XEXP (x, 1), mode,
3610 known_x, known_mode, known_ret);
3611 break;
3613 case XOR: case IOR:
3614 case UMIN: case UMAX: case SMIN: case SMAX:
3616 unsigned HOST_WIDE_INT nonzero0 =
3617 cached_nonzero_bits (XEXP (x, 0), mode,
3618 known_x, known_mode, known_ret);
3620 /* Don't call nonzero_bits for the second time if it cannot change
3621 anything. */
3622 if ((nonzero & nonzero0) != nonzero)
3623 nonzero &= nonzero0
3624 | cached_nonzero_bits (XEXP (x, 1), mode,
3625 known_x, known_mode, known_ret);
3627 break;
3629 case PLUS: case MINUS:
3630 case MULT:
3631 case DIV: case UDIV:
3632 case MOD: case UMOD:
3633 /* We can apply the rules of arithmetic to compute the number of
3634 high- and low-order zero bits of these operations. We start by
3635 computing the width (position of the highest-order nonzero bit)
3636 and the number of low-order zero bits for each value. */
3638 unsigned HOST_WIDE_INT nz0 =
3639 cached_nonzero_bits (XEXP (x, 0), mode,
3640 known_x, known_mode, known_ret);
3641 unsigned HOST_WIDE_INT nz1 =
3642 cached_nonzero_bits (XEXP (x, 1), mode,
3643 known_x, known_mode, known_ret);
3644 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3645 int width0 = floor_log2 (nz0) + 1;
3646 int width1 = floor_log2 (nz1) + 1;
3647 int low0 = floor_log2 (nz0 & -nz0);
3648 int low1 = floor_log2 (nz1 & -nz1);
3649 HOST_WIDE_INT op0_maybe_minusp
3650 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3651 HOST_WIDE_INT op1_maybe_minusp
3652 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3653 unsigned int result_width = mode_width;
3654 int result_low = 0;
3656 switch (code)
3658 case PLUS:
3659 result_width = MAX (width0, width1) + 1;
3660 result_low = MIN (low0, low1);
3661 break;
3662 case MINUS:
3663 result_low = MIN (low0, low1);
3664 break;
3665 case MULT:
3666 result_width = width0 + width1;
3667 result_low = low0 + low1;
3668 break;
3669 case DIV:
3670 if (width1 == 0)
3671 break;
3672 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3673 result_width = width0;
3674 break;
3675 case UDIV:
3676 if (width1 == 0)
3677 break;
3678 result_width = width0;
3679 break;
3680 case MOD:
3681 if (width1 == 0)
3682 break;
3683 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3684 result_width = MIN (width0, width1);
3685 result_low = MIN (low0, low1);
3686 break;
3687 case UMOD:
3688 if (width1 == 0)
3689 break;
3690 result_width = MIN (width0, width1);
3691 result_low = MIN (low0, low1);
3692 break;
3693 default:
3694 gcc_unreachable ();
3697 if (result_width < mode_width)
3698 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3700 if (result_low > 0)
3701 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3703 #ifdef POINTERS_EXTEND_UNSIGNED
3704 /* If pointers extend unsigned and this is an addition or subtraction
3705 to a pointer in Pmode, all the bits above ptr_mode are known to be
3706 zero. */
3707 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3708 && (code == PLUS || code == MINUS)
3709 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3710 nonzero &= GET_MODE_MASK (ptr_mode);
3711 #endif
3713 break;
3715 case ZERO_EXTRACT:
3716 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3717 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3718 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3719 break;
3721 case SUBREG:
3722 /* If this is a SUBREG formed for a promoted variable that has
3723 been zero-extended, we know that at least the high-order bits
3724 are zero, though others might be too. */
3726 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3727 nonzero = GET_MODE_MASK (GET_MODE (x))
3728 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3729 known_x, known_mode, known_ret);
3731 /* If the inner mode is a single word for both the host and target
3732 machines, we can compute this from which bits of the inner
3733 object might be nonzero. */
3734 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3735 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3736 <= HOST_BITS_PER_WIDE_INT))
3738 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3739 known_x, known_mode, known_ret);
3741 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3742 /* If this is a typical RISC machine, we only have to worry
3743 about the way loads are extended. */
3744 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3745 ? (((nonzero
3746 & (((unsigned HOST_WIDE_INT) 1
3747 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3748 != 0))
3749 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3750 || !MEM_P (SUBREG_REG (x)))
3751 #endif
3753 /* On many CISC machines, accessing an object in a wider mode
3754 causes the high-order bits to become undefined. So they are
3755 not known to be zero. */
3756 if (GET_MODE_SIZE (GET_MODE (x))
3757 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3758 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3759 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3762 break;
3764 case ASHIFTRT:
3765 case LSHIFTRT:
3766 case ASHIFT:
3767 case ROTATE:
3768 /* The nonzero bits are in two classes: any bits within MODE
3769 that aren't in GET_MODE (x) are always significant. The rest of the
3770 nonzero bits are those that are significant in the operand of
3771 the shift when shifted the appropriate number of bits. This
3772 shows that high-order bits are cleared by the right shift and
3773 low-order bits by left shifts. */
3774 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3775 && INTVAL (XEXP (x, 1)) >= 0
3776 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3778 enum machine_mode inner_mode = GET_MODE (x);
3779 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3780 int count = INTVAL (XEXP (x, 1));
3781 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3782 unsigned HOST_WIDE_INT op_nonzero =
3783 cached_nonzero_bits (XEXP (x, 0), mode,
3784 known_x, known_mode, known_ret);
3785 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3786 unsigned HOST_WIDE_INT outer = 0;
3788 if (mode_width > width)
3789 outer = (op_nonzero & nonzero & ~mode_mask);
3791 if (code == LSHIFTRT)
3792 inner >>= count;
3793 else if (code == ASHIFTRT)
3795 inner >>= count;
3797 /* If the sign bit may have been nonzero before the shift, we
3798 need to mark all the places it could have been copied to
3799 by the shift as possibly nonzero. */
3800 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3801 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3803 else if (code == ASHIFT)
3804 inner <<= count;
3805 else
3806 inner = ((inner << (count % width)
3807 | (inner >> (width - (count % width)))) & mode_mask);
3809 nonzero &= (outer | inner);
3811 break;
3813 case FFS:
3814 case POPCOUNT:
3815 /* This is at most the number of bits in the mode. */
3816 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3817 break;
3819 case CLZ:
3820 /* If CLZ has a known value at zero, then the nonzero bits are
3821 that value, plus the number of bits in the mode minus one. */
3822 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3823 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3824 else
3825 nonzero = -1;
3826 break;
3828 case CTZ:
3829 /* If CTZ has a known value at zero, then the nonzero bits are
3830 that value, plus the number of bits in the mode minus one. */
3831 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3832 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3833 else
3834 nonzero = -1;
3835 break;
3837 case PARITY:
3838 nonzero = 1;
3839 break;
3841 case IF_THEN_ELSE:
3843 unsigned HOST_WIDE_INT nonzero_true =
3844 cached_nonzero_bits (XEXP (x, 1), mode,
3845 known_x, known_mode, known_ret);
3847 /* Don't call nonzero_bits for the second time if it cannot change
3848 anything. */
3849 if ((nonzero & nonzero_true) != nonzero)
3850 nonzero &= nonzero_true
3851 | cached_nonzero_bits (XEXP (x, 2), mode,
3852 known_x, known_mode, known_ret);
3854 break;
3856 default:
3857 break;
3860 return nonzero;
3863 /* See the macro definition above. */
3864 #undef cached_num_sign_bit_copies
3867 /* The function cached_num_sign_bit_copies is a wrapper around
3868 num_sign_bit_copies1. It avoids exponential behavior in
3869 num_sign_bit_copies1 when X has identical subexpressions on the
3870 first or the second level. */
3872 static unsigned int
3873 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
3874 enum machine_mode known_mode,
3875 unsigned int known_ret)
3877 if (x == known_x && mode == known_mode)
3878 return known_ret;
3880 /* Try to find identical subexpressions. If found call
3881 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3882 the precomputed value for the subexpression as KNOWN_RET. */
3884 if (ARITHMETIC_P (x))
3886 rtx x0 = XEXP (x, 0);
3887 rtx x1 = XEXP (x, 1);
3889 /* Check the first level. */
3890 if (x0 == x1)
3891 return
3892 num_sign_bit_copies1 (x, mode, x0, mode,
3893 cached_num_sign_bit_copies (x0, mode, known_x,
3894 known_mode,
3895 known_ret));
3897 /* Check the second level. */
3898 if (ARITHMETIC_P (x0)
3899 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3900 return
3901 num_sign_bit_copies1 (x, mode, x1, mode,
3902 cached_num_sign_bit_copies (x1, mode, known_x,
3903 known_mode,
3904 known_ret));
3906 if (ARITHMETIC_P (x1)
3907 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3908 return
3909 num_sign_bit_copies1 (x, mode, x0, mode,
3910 cached_num_sign_bit_copies (x0, mode, known_x,
3911 known_mode,
3912 known_ret));
3915 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
3918 /* Return the number of bits at the high-order end of X that are known to
3919 be equal to the sign bit. X will be used in mode MODE; if MODE is
3920 VOIDmode, X will be used in its own mode. The returned value will always
3921 be between 1 and the number of bits in MODE. */
3923 static unsigned int
3924 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
3925 enum machine_mode known_mode,
3926 unsigned int known_ret)
3928 enum rtx_code code = GET_CODE (x);
3929 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
3930 int num0, num1, result;
3931 unsigned HOST_WIDE_INT nonzero;
3933 /* If we weren't given a mode, use the mode of X. If the mode is still
3934 VOIDmode, we don't know anything. Likewise if one of the modes is
3935 floating-point. */
3937 if (mode == VOIDmode)
3938 mode = GET_MODE (x);
3940 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
3941 return 1;
3943 /* For a smaller object, just ignore the high bits. */
3944 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
3946 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
3947 known_x, known_mode, known_ret);
3948 return MAX (1,
3949 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
3952 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
3954 #ifndef WORD_REGISTER_OPERATIONS
3955 /* If this machine does not do all register operations on the entire
3956 register and MODE is wider than the mode of X, we can say nothing
3957 at all about the high-order bits. */
3958 return 1;
3959 #else
3960 /* Likewise on machines that do, if the mode of the object is smaller
3961 than a word and loads of that size don't sign extend, we can say
3962 nothing about the high order bits. */
3963 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
3964 #ifdef LOAD_EXTEND_OP
3965 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
3966 #endif
3968 return 1;
3969 #endif
3972 switch (code)
3974 case REG:
3976 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3977 /* If pointers extend signed and this is a pointer in Pmode, say that
3978 all the bits above ptr_mode are known to be sign bit copies. */
3979 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
3980 && REG_POINTER (x))
3981 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
3982 #endif
3985 unsigned int copies_for_hook = 1, copies = 1;
3986 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
3987 known_mode, known_ret,
3988 &copies_for_hook);
3990 if (new)
3991 copies = cached_num_sign_bit_copies (new, mode, known_x,
3992 known_mode, known_ret);
3994 if (copies > 1 || copies_for_hook > 1)
3995 return MAX (copies, copies_for_hook);
3997 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
3999 break;
4001 case MEM:
4002 #ifdef LOAD_EXTEND_OP
4003 /* Some RISC machines sign-extend all loads of smaller than a word. */
4004 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4005 return MAX (1, ((int) bitwidth
4006 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4007 #endif
4008 break;
4010 case CONST_INT:
4011 /* If the constant is negative, take its 1's complement and remask.
4012 Then see how many zero bits we have. */
4013 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4014 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4015 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4016 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4018 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4020 case SUBREG:
4021 /* If this is a SUBREG for a promoted object that is sign-extended
4022 and we are looking at it in a wider mode, we know that at least the
4023 high-order bits are known to be sign bit copies. */
4025 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4027 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4028 known_x, known_mode, known_ret);
4029 return MAX ((int) bitwidth
4030 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4031 num0);
4034 /* For a smaller object, just ignore the high bits. */
4035 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4037 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4038 known_x, known_mode, known_ret);
4039 return MAX (1, (num0
4040 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4041 - bitwidth)));
4044 #ifdef WORD_REGISTER_OPERATIONS
4045 #ifdef LOAD_EXTEND_OP
4046 /* For paradoxical SUBREGs on machines where all register operations
4047 affect the entire register, just look inside. Note that we are
4048 passing MODE to the recursive call, so the number of sign bit copies
4049 will remain relative to that mode, not the inner mode. */
4051 /* This works only if loads sign extend. Otherwise, if we get a
4052 reload for the inner part, it may be loaded from the stack, and
4053 then we lose all sign bit copies that existed before the store
4054 to the stack. */
4056 if ((GET_MODE_SIZE (GET_MODE (x))
4057 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4058 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4059 && MEM_P (SUBREG_REG (x)))
4060 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4061 known_x, known_mode, known_ret);
4062 #endif
4063 #endif
4064 break;
4066 case SIGN_EXTRACT:
4067 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4068 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4069 break;
4071 case SIGN_EXTEND:
4072 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4073 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4074 known_x, known_mode, known_ret));
4076 case TRUNCATE:
4077 /* For a smaller object, just ignore the high bits. */
4078 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4079 known_x, known_mode, known_ret);
4080 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4081 - bitwidth)));
4083 case NOT:
4084 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4085 known_x, known_mode, known_ret);
4087 case ROTATE: case ROTATERT:
4088 /* If we are rotating left by a number of bits less than the number
4089 of sign bit copies, we can just subtract that amount from the
4090 number. */
4091 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4092 && INTVAL (XEXP (x, 1)) >= 0
4093 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4095 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4096 known_x, known_mode, known_ret);
4097 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4098 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4100 break;
4102 case NEG:
4103 /* In general, this subtracts one sign bit copy. But if the value
4104 is known to be positive, the number of sign bit copies is the
4105 same as that of the input. Finally, if the input has just one bit
4106 that might be nonzero, all the bits are copies of the sign bit. */
4107 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4108 known_x, known_mode, known_ret);
4109 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4110 return num0 > 1 ? num0 - 1 : 1;
4112 nonzero = nonzero_bits (XEXP (x, 0), mode);
4113 if (nonzero == 1)
4114 return bitwidth;
4116 if (num0 > 1
4117 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4118 num0--;
4120 return num0;
4122 case IOR: case AND: case XOR:
4123 case SMIN: case SMAX: case UMIN: case UMAX:
4124 /* Logical operations will preserve the number of sign-bit copies.
4125 MIN and MAX operations always return one of the operands. */
4126 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4127 known_x, known_mode, known_ret);
4128 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4129 known_x, known_mode, known_ret);
4130 return MIN (num0, num1);
4132 case PLUS: case MINUS:
4133 /* For addition and subtraction, we can have a 1-bit carry. However,
4134 if we are subtracting 1 from a positive number, there will not
4135 be such a carry. Furthermore, if the positive number is known to
4136 be 0 or 1, we know the result is either -1 or 0. */
4138 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4139 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4141 nonzero = nonzero_bits (XEXP (x, 0), mode);
4142 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4143 return (nonzero == 1 || nonzero == 0 ? bitwidth
4144 : bitwidth - floor_log2 (nonzero) - 1);
4147 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4148 known_x, known_mode, known_ret);
4149 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4150 known_x, known_mode, known_ret);
4151 result = MAX (1, MIN (num0, num1) - 1);
4153 #ifdef POINTERS_EXTEND_UNSIGNED
4154 /* If pointers extend signed and this is an addition or subtraction
4155 to a pointer in Pmode, all the bits above ptr_mode are known to be
4156 sign bit copies. */
4157 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4158 && (code == PLUS || code == MINUS)
4159 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4160 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4161 - GET_MODE_BITSIZE (ptr_mode) + 1),
4162 result);
4163 #endif
4164 return result;
4166 case MULT:
4167 /* The number of bits of the product is the sum of the number of
4168 bits of both terms. However, unless one of the terms if known
4169 to be positive, we must allow for an additional bit since negating
4170 a negative number can remove one sign bit copy. */
4172 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4173 known_x, known_mode, known_ret);
4174 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4175 known_x, known_mode, known_ret);
4177 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4178 if (result > 0
4179 && (bitwidth > HOST_BITS_PER_WIDE_INT
4180 || (((nonzero_bits (XEXP (x, 0), mode)
4181 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4182 && ((nonzero_bits (XEXP (x, 1), mode)
4183 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4184 result--;
4186 return MAX (1, result);
4188 case UDIV:
4189 /* The result must be <= the first operand. If the first operand
4190 has the high bit set, we know nothing about the number of sign
4191 bit copies. */
4192 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4193 return 1;
4194 else if ((nonzero_bits (XEXP (x, 0), mode)
4195 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4196 return 1;
4197 else
4198 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4199 known_x, known_mode, known_ret);
4201 case UMOD:
4202 /* The result must be <= the second operand. */
4203 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4204 known_x, known_mode, known_ret);
4206 case DIV:
4207 /* Similar to unsigned division, except that we have to worry about
4208 the case where the divisor is negative, in which case we have
4209 to add 1. */
4210 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4211 known_x, known_mode, known_ret);
4212 if (result > 1
4213 && (bitwidth > HOST_BITS_PER_WIDE_INT
4214 || (nonzero_bits (XEXP (x, 1), mode)
4215 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4216 result--;
4218 return result;
4220 case MOD:
4221 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4222 known_x, known_mode, known_ret);
4223 if (result > 1
4224 && (bitwidth > HOST_BITS_PER_WIDE_INT
4225 || (nonzero_bits (XEXP (x, 1), mode)
4226 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4227 result--;
4229 return result;
4231 case ASHIFTRT:
4232 /* Shifts by a constant add to the number of bits equal to the
4233 sign bit. */
4234 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4235 known_x, known_mode, known_ret);
4236 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4237 && INTVAL (XEXP (x, 1)) > 0)
4238 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4240 return num0;
4242 case ASHIFT:
4243 /* Left shifts destroy copies. */
4244 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4245 || INTVAL (XEXP (x, 1)) < 0
4246 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4247 return 1;
4249 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4250 known_x, known_mode, known_ret);
4251 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4253 case IF_THEN_ELSE:
4254 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4255 known_x, known_mode, known_ret);
4256 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4257 known_x, known_mode, known_ret);
4258 return MIN (num0, num1);
4260 case EQ: case NE: case GE: case GT: case LE: case LT:
4261 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4262 case GEU: case GTU: case LEU: case LTU:
4263 case UNORDERED: case ORDERED:
4264 /* If the constant is negative, take its 1's complement and remask.
4265 Then see how many zero bits we have. */
4266 nonzero = STORE_FLAG_VALUE;
4267 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4268 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4269 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4271 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4273 default:
4274 break;
4277 /* If we haven't been able to figure it out by one of the above rules,
4278 see if some of the high-order bits are known to be zero. If so,
4279 count those bits and return one less than that amount. If we can't
4280 safely compute the mask for this mode, always return BITWIDTH. */
4282 bitwidth = GET_MODE_BITSIZE (mode);
4283 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4284 return 1;
4286 nonzero = nonzero_bits (x, mode);
4287 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4288 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4291 /* Calculate the rtx_cost of a single instruction. A return value of
4292 zero indicates an instruction pattern without a known cost. */
4295 insn_rtx_cost (rtx pat)
4297 int i, cost;
4298 rtx set;
4300 /* Extract the single set rtx from the instruction pattern.
4301 We can't use single_set since we only have the pattern. */
4302 if (GET_CODE (pat) == SET)
4303 set = pat;
4304 else if (GET_CODE (pat) == PARALLEL)
4306 set = NULL_RTX;
4307 for (i = 0; i < XVECLEN (pat, 0); i++)
4309 rtx x = XVECEXP (pat, 0, i);
4310 if (GET_CODE (x) == SET)
4312 if (set)
4313 return 0;
4314 set = x;
4317 if (!set)
4318 return 0;
4320 else
4321 return 0;
4323 cost = rtx_cost (SET_SRC (set), SET);
4324 return cost > 0 ? cost : COSTS_N_INSNS (1);
4327 /* Given an insn INSN and condition COND, return the condition in a
4328 canonical form to simplify testing by callers. Specifically:
4330 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4331 (2) Both operands will be machine operands; (cc0) will have been replaced.
4332 (3) If an operand is a constant, it will be the second operand.
4333 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4334 for GE, GEU, and LEU.
4336 If the condition cannot be understood, or is an inequality floating-point
4337 comparison which needs to be reversed, 0 will be returned.
4339 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4341 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4342 insn used in locating the condition was found. If a replacement test
4343 of the condition is desired, it should be placed in front of that
4344 insn and we will be sure that the inputs are still valid.
4346 If WANT_REG is nonzero, we wish the condition to be relative to that
4347 register, if possible. Therefore, do not canonicalize the condition
4348 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4349 to be a compare to a CC mode register.
4351 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4352 and at INSN. */
4355 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4356 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4358 enum rtx_code code;
4359 rtx prev = insn;
4360 rtx set;
4361 rtx tem;
4362 rtx op0, op1;
4363 int reverse_code = 0;
4364 enum machine_mode mode;
4365 basic_block bb = BLOCK_FOR_INSN (insn);
4367 code = GET_CODE (cond);
4368 mode = GET_MODE (cond);
4369 op0 = XEXP (cond, 0);
4370 op1 = XEXP (cond, 1);
4372 if (reverse)
4373 code = reversed_comparison_code (cond, insn);
4374 if (code == UNKNOWN)
4375 return 0;
4377 if (earliest)
4378 *earliest = insn;
4380 /* If we are comparing a register with zero, see if the register is set
4381 in the previous insn to a COMPARE or a comparison operation. Perform
4382 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4383 in cse.c */
4385 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4386 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4387 && op1 == CONST0_RTX (GET_MODE (op0))
4388 && op0 != want_reg)
4390 /* Set nonzero when we find something of interest. */
4391 rtx x = 0;
4393 #ifdef HAVE_cc0
4394 /* If comparison with cc0, import actual comparison from compare
4395 insn. */
4396 if (op0 == cc0_rtx)
4398 if ((prev = prev_nonnote_insn (prev)) == 0
4399 || !NONJUMP_INSN_P (prev)
4400 || (set = single_set (prev)) == 0
4401 || SET_DEST (set) != cc0_rtx)
4402 return 0;
4404 op0 = SET_SRC (set);
4405 op1 = CONST0_RTX (GET_MODE (op0));
4406 if (earliest)
4407 *earliest = prev;
4409 #endif
4411 /* If this is a COMPARE, pick up the two things being compared. */
4412 if (GET_CODE (op0) == COMPARE)
4414 op1 = XEXP (op0, 1);
4415 op0 = XEXP (op0, 0);
4416 continue;
4418 else if (!REG_P (op0))
4419 break;
4421 /* Go back to the previous insn. Stop if it is not an INSN. We also
4422 stop if it isn't a single set or if it has a REG_INC note because
4423 we don't want to bother dealing with it. */
4425 if ((prev = prev_nonnote_insn (prev)) == 0
4426 || !NONJUMP_INSN_P (prev)
4427 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4428 /* In cfglayout mode, there do not have to be labels at the
4429 beginning of a block, or jumps at the end, so the previous
4430 conditions would not stop us when we reach bb boundary. */
4431 || BLOCK_FOR_INSN (prev) != bb)
4432 break;
4434 set = set_of (op0, prev);
4436 if (set
4437 && (GET_CODE (set) != SET
4438 || !rtx_equal_p (SET_DEST (set), op0)))
4439 break;
4441 /* If this is setting OP0, get what it sets it to if it looks
4442 relevant. */
4443 if (set)
4445 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4446 #ifdef FLOAT_STORE_FLAG_VALUE
4447 REAL_VALUE_TYPE fsfv;
4448 #endif
4450 /* ??? We may not combine comparisons done in a CCmode with
4451 comparisons not done in a CCmode. This is to aid targets
4452 like Alpha that have an IEEE compliant EQ instruction, and
4453 a non-IEEE compliant BEQ instruction. The use of CCmode is
4454 actually artificial, simply to prevent the combination, but
4455 should not affect other platforms.
4457 However, we must allow VOIDmode comparisons to match either
4458 CCmode or non-CCmode comparison, because some ports have
4459 modeless comparisons inside branch patterns.
4461 ??? This mode check should perhaps look more like the mode check
4462 in simplify_comparison in combine. */
4464 if ((GET_CODE (SET_SRC (set)) == COMPARE
4465 || (((code == NE
4466 || (code == LT
4467 && GET_MODE_CLASS (inner_mode) == MODE_INT
4468 && (GET_MODE_BITSIZE (inner_mode)
4469 <= HOST_BITS_PER_WIDE_INT)
4470 && (STORE_FLAG_VALUE
4471 & ((HOST_WIDE_INT) 1
4472 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4473 #ifdef FLOAT_STORE_FLAG_VALUE
4474 || (code == LT
4475 && SCALAR_FLOAT_MODE_P (inner_mode)
4476 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4477 REAL_VALUE_NEGATIVE (fsfv)))
4478 #endif
4480 && COMPARISON_P (SET_SRC (set))))
4481 && (((GET_MODE_CLASS (mode) == MODE_CC)
4482 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4483 || mode == VOIDmode || inner_mode == VOIDmode))
4484 x = SET_SRC (set);
4485 else if (((code == EQ
4486 || (code == GE
4487 && (GET_MODE_BITSIZE (inner_mode)
4488 <= HOST_BITS_PER_WIDE_INT)
4489 && GET_MODE_CLASS (inner_mode) == MODE_INT
4490 && (STORE_FLAG_VALUE
4491 & ((HOST_WIDE_INT) 1
4492 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4493 #ifdef FLOAT_STORE_FLAG_VALUE
4494 || (code == GE
4495 && SCALAR_FLOAT_MODE_P (inner_mode)
4496 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4497 REAL_VALUE_NEGATIVE (fsfv)))
4498 #endif
4500 && COMPARISON_P (SET_SRC (set))
4501 && (((GET_MODE_CLASS (mode) == MODE_CC)
4502 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4503 || mode == VOIDmode || inner_mode == VOIDmode))
4506 reverse_code = 1;
4507 x = SET_SRC (set);
4509 else
4510 break;
4513 else if (reg_set_p (op0, prev))
4514 /* If this sets OP0, but not directly, we have to give up. */
4515 break;
4517 if (x)
4519 /* If the caller is expecting the condition to be valid at INSN,
4520 make sure X doesn't change before INSN. */
4521 if (valid_at_insn_p)
4522 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4523 break;
4524 if (COMPARISON_P (x))
4525 code = GET_CODE (x);
4526 if (reverse_code)
4528 code = reversed_comparison_code (x, prev);
4529 if (code == UNKNOWN)
4530 return 0;
4531 reverse_code = 0;
4534 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4535 if (earliest)
4536 *earliest = prev;
4540 /* If constant is first, put it last. */
4541 if (CONSTANT_P (op0))
4542 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4544 /* If OP0 is the result of a comparison, we weren't able to find what
4545 was really being compared, so fail. */
4546 if (!allow_cc_mode
4547 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4548 return 0;
4550 /* Canonicalize any ordered comparison with integers involving equality
4551 if we can do computations in the relevant mode and we do not
4552 overflow. */
4554 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4555 && GET_CODE (op1) == CONST_INT
4556 && GET_MODE (op0) != VOIDmode
4557 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4559 HOST_WIDE_INT const_val = INTVAL (op1);
4560 unsigned HOST_WIDE_INT uconst_val = const_val;
4561 unsigned HOST_WIDE_INT max_val
4562 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4564 switch (code)
4566 case LE:
4567 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4568 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4569 break;
4571 /* When cross-compiling, const_val might be sign-extended from
4572 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4573 case GE:
4574 if ((HOST_WIDE_INT) (const_val & max_val)
4575 != (((HOST_WIDE_INT) 1
4576 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4577 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4578 break;
4580 case LEU:
4581 if (uconst_val < max_val)
4582 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4583 break;
4585 case GEU:
4586 if (uconst_val != 0)
4587 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4588 break;
4590 default:
4591 break;
4595 /* Never return CC0; return zero instead. */
4596 if (CC0_P (op0))
4597 return 0;
4599 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4602 /* Given a jump insn JUMP, return the condition that will cause it to branch
4603 to its JUMP_LABEL. If the condition cannot be understood, or is an
4604 inequality floating-point comparison which needs to be reversed, 0 will
4605 be returned.
4607 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4608 insn used in locating the condition was found. If a replacement test
4609 of the condition is desired, it should be placed in front of that
4610 insn and we will be sure that the inputs are still valid. If EARLIEST
4611 is null, the returned condition will be valid at INSN.
4613 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4614 compare CC mode register.
4616 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4619 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4621 rtx cond;
4622 int reverse;
4623 rtx set;
4625 /* If this is not a standard conditional jump, we can't parse it. */
4626 if (!JUMP_P (jump)
4627 || ! any_condjump_p (jump))
4628 return 0;
4629 set = pc_set (jump);
4631 cond = XEXP (SET_SRC (set), 0);
4633 /* If this branches to JUMP_LABEL when the condition is false, reverse
4634 the condition. */
4635 reverse
4636 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4637 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4639 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4640 allow_cc_mode, valid_at_insn_p);
4643 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4644 TARGET_MODE_REP_EXTENDED.
4646 Note that we assume that the property of
4647 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4648 narrower than mode B. I.e., if A is a mode narrower than B then in
4649 order to be able to operate on it in mode B, mode A needs to
4650 satisfy the requirements set by the representation of mode B. */
4652 static void
4653 init_num_sign_bit_copies_in_rep (void)
4655 enum machine_mode mode, in_mode;
4657 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4658 in_mode = GET_MODE_WIDER_MODE (mode))
4659 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4660 mode = GET_MODE_WIDER_MODE (mode))
4662 enum machine_mode i;
4664 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4665 extends to the next widest mode. */
4666 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4667 || GET_MODE_WIDER_MODE (mode) == in_mode);
4669 /* We are in in_mode. Count how many bits outside of mode
4670 have to be copies of the sign-bit. */
4671 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4673 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4675 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4676 /* We can only check sign-bit copies starting from the
4677 top-bit. In order to be able to check the bits we
4678 have already seen we pretend that subsequent bits
4679 have to be sign-bit copies too. */
4680 || num_sign_bit_copies_in_rep [in_mode][mode])
4681 num_sign_bit_copies_in_rep [in_mode][mode]
4682 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4687 /* Suppose that truncation from the machine mode of X to MODE is not a
4688 no-op. See if there is anything special about X so that we can
4689 assume it already contains a truncated value of MODE. */
4691 bool
4692 truncated_to_mode (enum machine_mode mode, rtx x)
4694 /* This register has already been used in MODE without explicit
4695 truncation. */
4696 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4697 return true;
4699 /* See if we already satisfy the requirements of MODE. If yes we
4700 can just switch to MODE. */
4701 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4702 && (num_sign_bit_copies (x, GET_MODE (x))
4703 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4704 return true;
4706 return false;
4709 /* Initialize non_rtx_starting_operands, which is used to speed up
4710 for_each_rtx. */
4711 void
4712 init_rtlanal (void)
4714 int i;
4715 for (i = 0; i < NUM_RTX_CODE; i++)
4717 const char *format = GET_RTX_FORMAT (i);
4718 const char *first = strpbrk (format, "eEV");
4719 non_rtx_starting_operands[i] = first ? first - format : -1;
4722 init_num_sign_bit_copies_in_rep ();
4725 /* Check whether this is a constant pool constant. */
4726 bool
4727 constant_pool_constant_p (rtx x)
4729 x = avoid_constant_pool_reference (x);
4730 return GET_CODE (x) == CONST_DOUBLE;