Merge -r 127928:132243 from trunk
[official-gcc.git] / gcc / rtlanal.c
blobca57e8050121ce35afc87eae355aa58486728484
1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007 Free Software
4 Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "regs.h"
38 #include "function.h"
39 #include "df.h"
40 #include "tree.h"
42 /* Information about a subreg of a hard register. */
43 struct subreg_info
45 /* Offset of first hard register involved in the subreg. */
46 int offset;
47 /* Number of hard registers involved in the subreg. */
48 int nregs;
49 /* Whether this subreg can be represented as a hard reg with the new
50 mode. */
51 bool representable_p;
54 /* Forward declarations */
55 static void set_of_1 (rtx, const_rtx, void *);
56 static bool covers_regno_p (const_rtx, unsigned int);
57 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
58 static int rtx_referenced_p_1 (rtx *, void *);
59 static int computed_jump_p_1 (const_rtx);
60 static void parms_set (rtx, const_rtx, void *);
61 static void subreg_get_info (unsigned int, enum machine_mode,
62 unsigned int, enum machine_mode,
63 struct subreg_info *);
65 static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
66 const_rtx, enum machine_mode,
67 unsigned HOST_WIDE_INT);
68 static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
69 const_rtx, enum machine_mode,
70 unsigned HOST_WIDE_INT);
71 static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
72 enum machine_mode,
73 unsigned int);
74 static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
75 enum machine_mode, unsigned int);
77 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
78 -1 if a code has no such operand. */
79 static int non_rtx_starting_operands[NUM_RTX_CODE];
81 /* Bit flags that specify the machine subtype we are compiling for.
82 Bits are tested using macros TARGET_... defined in the tm.h file
83 and set by `-m...' switches. Must be defined in rtlanal.c. */
85 int target_flags;
87 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
88 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
89 SIGN_EXTEND then while narrowing we also have to enforce the
90 representation and sign-extend the value to mode DESTINATION_REP.
92 If the value is already sign-extended to DESTINATION_REP mode we
93 can just switch to DESTINATION mode on it. For each pair of
94 integral modes SOURCE and DESTINATION, when truncating from SOURCE
95 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
96 contains the number of high-order bits in SOURCE that have to be
97 copies of the sign-bit so that we can do this mode-switch to
98 DESTINATION. */
100 static unsigned int
101 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
103 /* Return 1 if the value of X is unstable
104 (would be different at a different point in the program).
105 The frame pointer, arg pointer, etc. are considered stable
106 (within one function) and so is anything marked `unchanging'. */
109 rtx_unstable_p (const_rtx x)
111 const RTX_CODE code = GET_CODE (x);
112 int i;
113 const char *fmt;
115 switch (code)
117 case MEM:
118 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
120 case CONST:
121 case CONST_INT:
122 case CONST_DOUBLE:
123 case CONST_FIXED:
124 case CONST_VECTOR:
125 case SYMBOL_REF:
126 case LABEL_REF:
127 return 0;
129 case REG:
130 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
131 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
132 /* The arg pointer varies if it is not a fixed register. */
133 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
134 return 0;
135 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
136 /* ??? When call-clobbered, the value is stable modulo the restore
137 that must happen after a call. This currently screws up local-alloc
138 into believing that the restore is not needed. */
139 if (x == pic_offset_table_rtx)
140 return 0;
141 #endif
142 return 1;
144 case ASM_OPERANDS:
145 if (MEM_VOLATILE_P (x))
146 return 1;
148 /* Fall through. */
150 default:
151 break;
154 fmt = GET_RTX_FORMAT (code);
155 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
156 if (fmt[i] == 'e')
158 if (rtx_unstable_p (XEXP (x, i)))
159 return 1;
161 else if (fmt[i] == 'E')
163 int j;
164 for (j = 0; j < XVECLEN (x, i); j++)
165 if (rtx_unstable_p (XVECEXP (x, i, j)))
166 return 1;
169 return 0;
172 /* Return 1 if X has a value that can vary even between two
173 executions of the program. 0 means X can be compared reliably
174 against certain constants or near-constants.
175 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
176 zero, we are slightly more conservative.
177 The frame pointer and the arg pointer are considered constant. */
179 bool
180 rtx_varies_p (const_rtx x, bool for_alias)
182 RTX_CODE code;
183 int i;
184 const char *fmt;
186 if (!x)
187 return 0;
189 code = GET_CODE (x);
190 switch (code)
192 case MEM:
193 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
195 case CONST:
196 case CONST_INT:
197 case CONST_DOUBLE:
198 case CONST_FIXED:
199 case CONST_VECTOR:
200 case SYMBOL_REF:
201 case LABEL_REF:
202 return 0;
204 case REG:
205 /* Note that we have to test for the actual rtx used for the frame
206 and arg pointers and not just the register number in case we have
207 eliminated the frame and/or arg pointer and are using it
208 for pseudos. */
209 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
210 /* The arg pointer varies if it is not a fixed register. */
211 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
212 return 0;
213 if (x == pic_offset_table_rtx
214 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
215 /* ??? When call-clobbered, the value is stable modulo the restore
216 that must happen after a call. This currently screws up
217 local-alloc into believing that the restore is not needed, so we
218 must return 0 only if we are called from alias analysis. */
219 && for_alias
220 #endif
222 return 0;
223 return 1;
225 case LO_SUM:
226 /* The operand 0 of a LO_SUM is considered constant
227 (in fact it is related specifically to operand 1)
228 during alias analysis. */
229 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
230 || rtx_varies_p (XEXP (x, 1), for_alias);
232 case ASM_OPERANDS:
233 if (MEM_VOLATILE_P (x))
234 return 1;
236 /* Fall through. */
238 default:
239 break;
242 fmt = GET_RTX_FORMAT (code);
243 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
244 if (fmt[i] == 'e')
246 if (rtx_varies_p (XEXP (x, i), for_alias))
247 return 1;
249 else if (fmt[i] == 'E')
251 int j;
252 for (j = 0; j < XVECLEN (x, i); j++)
253 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
254 return 1;
257 return 0;
260 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
261 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
262 whether nonzero is returned for unaligned memory accesses on strict
263 alignment machines. */
265 static int
266 rtx_addr_can_trap_p_1 (const_rtx x, enum machine_mode mode, bool unaligned_mems)
268 enum rtx_code code = GET_CODE (x);
270 switch (code)
272 case SYMBOL_REF:
273 return SYMBOL_REF_WEAK (x);
275 case LABEL_REF:
276 return 0;
278 case REG:
279 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
280 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
281 || x == stack_pointer_rtx
282 /* The arg pointer varies if it is not a fixed register. */
283 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
284 return 0;
285 /* All of the virtual frame registers are stack references. */
286 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
287 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
288 return 0;
289 return 1;
291 case CONST:
292 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
294 case PLUS:
295 /* An address is assumed not to trap if:
296 - it is an address that can't trap plus a constant integer,
297 with the proper remainder modulo the mode size if we are
298 considering unaligned memory references. */
299 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
300 && GET_CODE (XEXP (x, 1)) == CONST_INT)
302 HOST_WIDE_INT offset;
304 if (!STRICT_ALIGNMENT
305 || !unaligned_mems
306 || GET_MODE_SIZE (mode) == 0)
307 return 0;
309 offset = INTVAL (XEXP (x, 1));
311 #ifdef SPARC_STACK_BOUNDARY_HACK
312 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
313 the real alignment of %sp. However, when it does this, the
314 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
315 if (SPARC_STACK_BOUNDARY_HACK
316 && (XEXP (x, 0) == stack_pointer_rtx
317 || XEXP (x, 0) == hard_frame_pointer_rtx))
318 offset -= STACK_POINTER_OFFSET;
319 #endif
321 return offset % GET_MODE_SIZE (mode) != 0;
324 /* - or it is the pic register plus a constant. */
325 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
326 return 0;
328 return 1;
330 case LO_SUM:
331 case PRE_MODIFY:
332 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
334 case PRE_DEC:
335 case PRE_INC:
336 case POST_DEC:
337 case POST_INC:
338 case POST_MODIFY:
339 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
341 default:
342 break;
345 /* If it isn't one of the case above, it can cause a trap. */
346 return 1;
349 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
352 rtx_addr_can_trap_p (const_rtx x)
354 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
357 /* Return true if X is an address that is known to not be zero. */
359 bool
360 nonzero_address_p (const_rtx x)
362 const enum rtx_code code = GET_CODE (x);
364 switch (code)
366 case SYMBOL_REF:
367 return !SYMBOL_REF_WEAK (x);
369 case LABEL_REF:
370 return true;
372 case REG:
373 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
374 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
375 || x == stack_pointer_rtx
376 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
377 return true;
378 /* All of the virtual frame registers are stack references. */
379 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
380 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
381 return true;
382 return false;
384 case CONST:
385 return nonzero_address_p (XEXP (x, 0));
387 case PLUS:
388 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
389 return nonzero_address_p (XEXP (x, 0));
390 /* Handle PIC references. */
391 else if (XEXP (x, 0) == pic_offset_table_rtx
392 && CONSTANT_P (XEXP (x, 1)))
393 return true;
394 return false;
396 case PRE_MODIFY:
397 /* Similar to the above; allow positive offsets. Further, since
398 auto-inc is only allowed in memories, the register must be a
399 pointer. */
400 if (GET_CODE (XEXP (x, 1)) == CONST_INT
401 && INTVAL (XEXP (x, 1)) > 0)
402 return true;
403 return nonzero_address_p (XEXP (x, 0));
405 case PRE_INC:
406 /* Similarly. Further, the offset is always positive. */
407 return true;
409 case PRE_DEC:
410 case POST_DEC:
411 case POST_INC:
412 case POST_MODIFY:
413 return nonzero_address_p (XEXP (x, 0));
415 case LO_SUM:
416 return nonzero_address_p (XEXP (x, 1));
418 default:
419 break;
422 /* If it isn't one of the case above, might be zero. */
423 return false;
426 /* Return 1 if X refers to a memory location whose address
427 cannot be compared reliably with constant addresses,
428 or if X refers to a BLKmode memory object.
429 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
430 zero, we are slightly more conservative. */
432 bool
433 rtx_addr_varies_p (const_rtx x, bool for_alias)
435 enum rtx_code code;
436 int i;
437 const char *fmt;
439 if (x == 0)
440 return 0;
442 code = GET_CODE (x);
443 if (code == MEM)
444 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
446 fmt = GET_RTX_FORMAT (code);
447 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
448 if (fmt[i] == 'e')
450 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
451 return 1;
453 else if (fmt[i] == 'E')
455 int j;
456 for (j = 0; j < XVECLEN (x, i); j++)
457 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
458 return 1;
460 return 0;
463 /* Return the value of the integer term in X, if one is apparent;
464 otherwise return 0.
465 Only obvious integer terms are detected.
466 This is used in cse.c with the `related_value' field. */
468 HOST_WIDE_INT
469 get_integer_term (const_rtx x)
471 if (GET_CODE (x) == CONST)
472 x = XEXP (x, 0);
474 if (GET_CODE (x) == MINUS
475 && GET_CODE (XEXP (x, 1)) == CONST_INT)
476 return - INTVAL (XEXP (x, 1));
477 if (GET_CODE (x) == PLUS
478 && GET_CODE (XEXP (x, 1)) == CONST_INT)
479 return INTVAL (XEXP (x, 1));
480 return 0;
483 /* If X is a constant, return the value sans apparent integer term;
484 otherwise return 0.
485 Only obvious integer terms are detected. */
488 get_related_value (const_rtx x)
490 if (GET_CODE (x) != CONST)
491 return 0;
492 x = XEXP (x, 0);
493 if (GET_CODE (x) == PLUS
494 && GET_CODE (XEXP (x, 1)) == CONST_INT)
495 return XEXP (x, 0);
496 else if (GET_CODE (x) == MINUS
497 && GET_CODE (XEXP (x, 1)) == CONST_INT)
498 return XEXP (x, 0);
499 return 0;
502 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
503 to somewhere in the same object or object_block as SYMBOL. */
505 bool
506 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
508 tree decl;
510 if (GET_CODE (symbol) != SYMBOL_REF)
511 return false;
513 if (offset == 0)
514 return true;
516 if (offset > 0)
518 if (CONSTANT_POOL_ADDRESS_P (symbol)
519 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
520 return true;
522 decl = SYMBOL_REF_DECL (symbol);
523 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
524 return true;
527 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
528 && SYMBOL_REF_BLOCK (symbol)
529 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
530 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
531 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
532 return true;
534 return false;
537 /* Split X into a base and a constant offset, storing them in *BASE_OUT
538 and *OFFSET_OUT respectively. */
540 void
541 split_const (rtx x, rtx *base_out, rtx *offset_out)
543 if (GET_CODE (x) == CONST)
545 x = XEXP (x, 0);
546 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
548 *base_out = XEXP (x, 0);
549 *offset_out = XEXP (x, 1);
550 return;
553 *base_out = x;
554 *offset_out = const0_rtx;
557 /* Return the number of places FIND appears within X. If COUNT_DEST is
558 zero, we do not count occurrences inside the destination of a SET. */
561 count_occurrences (const_rtx x, const_rtx find, int count_dest)
563 int i, j;
564 enum rtx_code code;
565 const char *format_ptr;
566 int count;
568 if (x == find)
569 return 1;
571 code = GET_CODE (x);
573 switch (code)
575 case REG:
576 case CONST_INT:
577 case CONST_DOUBLE:
578 case CONST_FIXED:
579 case CONST_VECTOR:
580 case SYMBOL_REF:
581 case CODE_LABEL:
582 case PC:
583 case CC0:
584 return 0;
586 case EXPR_LIST:
587 count = count_occurrences (XEXP (x, 0), find, count_dest);
588 if (XEXP (x, 1))
589 count += count_occurrences (XEXP (x, 1), find, count_dest);
590 return count;
592 case MEM:
593 if (MEM_P (find) && rtx_equal_p (x, find))
594 return 1;
595 break;
597 case SET:
598 if (SET_DEST (x) == find && ! count_dest)
599 return count_occurrences (SET_SRC (x), find, count_dest);
600 break;
602 default:
603 break;
606 format_ptr = GET_RTX_FORMAT (code);
607 count = 0;
609 for (i = 0; i < GET_RTX_LENGTH (code); i++)
611 switch (*format_ptr++)
613 case 'e':
614 count += count_occurrences (XEXP (x, i), find, count_dest);
615 break;
617 case 'E':
618 for (j = 0; j < XVECLEN (x, i); j++)
619 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
620 break;
623 return count;
627 /* Nonzero if register REG appears somewhere within IN.
628 Also works if REG is not a register; in this case it checks
629 for a subexpression of IN that is Lisp "equal" to REG. */
632 reg_mentioned_p (const_rtx reg, const_rtx in)
634 const char *fmt;
635 int i;
636 enum rtx_code code;
638 if (in == 0)
639 return 0;
641 if (reg == in)
642 return 1;
644 if (GET_CODE (in) == LABEL_REF)
645 return reg == XEXP (in, 0);
647 code = GET_CODE (in);
649 switch (code)
651 /* Compare registers by number. */
652 case REG:
653 return REG_P (reg) && REGNO (in) == REGNO (reg);
655 /* These codes have no constituent expressions
656 and are unique. */
657 case SCRATCH:
658 case CC0:
659 case PC:
660 return 0;
662 case CONST_INT:
663 case CONST_VECTOR:
664 case CONST_DOUBLE:
665 case CONST_FIXED:
666 /* These are kept unique for a given value. */
667 return 0;
669 default:
670 break;
673 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
674 return 1;
676 fmt = GET_RTX_FORMAT (code);
678 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
680 if (fmt[i] == 'E')
682 int j;
683 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
684 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
685 return 1;
687 else if (fmt[i] == 'e'
688 && reg_mentioned_p (reg, XEXP (in, i)))
689 return 1;
691 return 0;
694 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
695 no CODE_LABEL insn. */
698 no_labels_between_p (const_rtx beg, const_rtx end)
700 rtx p;
701 if (beg == end)
702 return 0;
703 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
704 if (LABEL_P (p))
705 return 0;
706 return 1;
709 /* Nonzero if register REG is used in an insn between
710 FROM_INSN and TO_INSN (exclusive of those two). */
713 reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
715 rtx insn;
717 if (from_insn == to_insn)
718 return 0;
720 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
721 if (INSN_P (insn)
722 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
723 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
724 return 1;
725 return 0;
728 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
729 is entirely replaced by a new value and the only use is as a SET_DEST,
730 we do not consider it a reference. */
733 reg_referenced_p (const_rtx x, const_rtx body)
735 int i;
737 switch (GET_CODE (body))
739 case SET:
740 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
741 return 1;
743 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
744 of a REG that occupies all of the REG, the insn references X if
745 it is mentioned in the destination. */
746 if (GET_CODE (SET_DEST (body)) != CC0
747 && GET_CODE (SET_DEST (body)) != PC
748 && !REG_P (SET_DEST (body))
749 && ! (GET_CODE (SET_DEST (body)) == SUBREG
750 && REG_P (SUBREG_REG (SET_DEST (body)))
751 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
752 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
753 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
754 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
755 && reg_overlap_mentioned_p (x, SET_DEST (body)))
756 return 1;
757 return 0;
759 case ASM_OPERANDS:
760 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
761 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
762 return 1;
763 return 0;
765 case CALL:
766 case USE:
767 case IF_THEN_ELSE:
768 return reg_overlap_mentioned_p (x, body);
770 case TRAP_IF:
771 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
773 case PREFETCH:
774 return reg_overlap_mentioned_p (x, XEXP (body, 0));
776 case UNSPEC:
777 case UNSPEC_VOLATILE:
778 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
779 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
780 return 1;
781 return 0;
783 case PARALLEL:
784 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
785 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
786 return 1;
787 return 0;
789 case CLOBBER:
790 if (MEM_P (XEXP (body, 0)))
791 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
792 return 1;
793 return 0;
795 case COND_EXEC:
796 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
797 return 1;
798 return reg_referenced_p (x, COND_EXEC_CODE (body));
800 default:
801 return 0;
805 /* Nonzero if register REG is set or clobbered in an insn between
806 FROM_INSN and TO_INSN (exclusive of those two). */
809 reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
811 const_rtx insn;
813 if (from_insn == to_insn)
814 return 0;
816 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
817 if (INSN_P (insn) && reg_set_p (reg, insn))
818 return 1;
819 return 0;
822 /* Internals of reg_set_between_p. */
824 reg_set_p (const_rtx reg, const_rtx insn)
826 if (INSN_P (insn))
828 if (FIND_REG_INC_NOTE (insn, reg))
829 return 1;
830 if (CALL_P (insn))
832 if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
834 HARD_REG_SET clobbered_regs;
836 get_call_invalidated_used_regs (insn, &clobbered_regs, true);
837 if (TEST_HARD_REG_BIT (clobbered_regs, REGNO (reg)))
838 return 1;
840 if (MEM_P (reg) || find_reg_fusage (insn, CLOBBER, reg))
841 return 1;
845 return set_of (reg, insn) != NULL_RTX;
848 /* Similar to reg_set_between_p, but check all registers in X. Return 0
849 only if none of them are modified between START and END. Return 1 if
850 X contains a MEM; this routine does usememory aliasing. */
853 modified_between_p (const_rtx x, const_rtx start, const_rtx end)
855 const enum rtx_code code = GET_CODE (x);
856 const char *fmt;
857 int i, j;
858 rtx insn;
860 if (start == end)
861 return 0;
863 switch (code)
865 case CONST_INT:
866 case CONST_DOUBLE:
867 case CONST_FIXED:
868 case CONST_VECTOR:
869 case CONST:
870 case SYMBOL_REF:
871 case LABEL_REF:
872 return 0;
874 case PC:
875 case CC0:
876 return 1;
878 case MEM:
879 if (modified_between_p (XEXP (x, 0), start, end))
880 return 1;
881 if (MEM_READONLY_P (x))
882 return 0;
883 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
884 if (memory_modified_in_insn_p (x, insn))
885 return 1;
886 return 0;
887 break;
889 case REG:
890 return reg_set_between_p (x, start, end);
892 default:
893 break;
896 fmt = GET_RTX_FORMAT (code);
897 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
899 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
900 return 1;
902 else if (fmt[i] == 'E')
903 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
904 if (modified_between_p (XVECEXP (x, i, j), start, end))
905 return 1;
908 return 0;
911 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
912 of them are modified in INSN. Return 1 if X contains a MEM; this routine
913 does use memory aliasing. */
916 modified_in_p (const_rtx x, const_rtx insn)
918 const enum rtx_code code = GET_CODE (x);
919 const char *fmt;
920 int i, j;
922 switch (code)
924 case CONST_INT:
925 case CONST_DOUBLE:
926 case CONST_FIXED:
927 case CONST_VECTOR:
928 case CONST:
929 case SYMBOL_REF:
930 case LABEL_REF:
931 return 0;
933 case PC:
934 case CC0:
935 return 1;
937 case MEM:
938 if (modified_in_p (XEXP (x, 0), insn))
939 return 1;
940 if (MEM_READONLY_P (x))
941 return 0;
942 if (memory_modified_in_insn_p (x, insn))
943 return 1;
944 return 0;
945 break;
947 case REG:
948 return reg_set_p (x, insn);
950 default:
951 break;
954 fmt = GET_RTX_FORMAT (code);
955 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
957 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
958 return 1;
960 else if (fmt[i] == 'E')
961 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
962 if (modified_in_p (XVECEXP (x, i, j), insn))
963 return 1;
966 return 0;
969 /* Helper function for set_of. */
970 struct set_of_data
972 const_rtx found;
973 const_rtx pat;
976 static void
977 set_of_1 (rtx x, const_rtx pat, void *data1)
979 struct set_of_data *const data = (struct set_of_data *) (data1);
980 if (rtx_equal_p (x, data->pat)
981 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
982 data->found = pat;
985 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
986 (either directly or via STRICT_LOW_PART and similar modifiers). */
987 const_rtx
988 set_of (const_rtx pat, const_rtx insn)
990 struct set_of_data data;
991 data.found = NULL_RTX;
992 data.pat = pat;
993 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
994 return data.found;
997 /* Given an INSN, return a SET expression if this insn has only a single SET.
998 It may also have CLOBBERs, USEs, or SET whose output
999 will not be used, which we ignore. */
1002 single_set_2 (const_rtx insn, const_rtx pat)
1004 rtx set = NULL;
1005 int set_verified = 1;
1006 int i;
1008 if (GET_CODE (pat) == PARALLEL)
1010 for (i = 0; i < XVECLEN (pat, 0); i++)
1012 rtx sub = XVECEXP (pat, 0, i);
1013 switch (GET_CODE (sub))
1015 case USE:
1016 case CLOBBER:
1017 break;
1019 case SET:
1020 /* We can consider insns having multiple sets, where all
1021 but one are dead as single set insns. In common case
1022 only single set is present in the pattern so we want
1023 to avoid checking for REG_UNUSED notes unless necessary.
1025 When we reach set first time, we just expect this is
1026 the single set we are looking for and only when more
1027 sets are found in the insn, we check them. */
1028 if (!set_verified)
1030 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1031 && !side_effects_p (set))
1032 set = NULL;
1033 else
1034 set_verified = 1;
1036 if (!set)
1037 set = sub, set_verified = 0;
1038 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1039 || side_effects_p (sub))
1040 return NULL_RTX;
1041 break;
1043 default:
1044 return NULL_RTX;
1048 return set;
1051 /* Given an INSN, return nonzero if it has more than one SET, else return
1052 zero. */
1055 multiple_sets (const_rtx insn)
1057 int found;
1058 int i;
1060 /* INSN must be an insn. */
1061 if (! INSN_P (insn))
1062 return 0;
1064 /* Only a PARALLEL can have multiple SETs. */
1065 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1067 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1068 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1070 /* If we have already found a SET, then return now. */
1071 if (found)
1072 return 1;
1073 else
1074 found = 1;
1078 /* Either zero or one SET. */
1079 return 0;
1082 /* Return nonzero if the destination of SET equals the source
1083 and there are no side effects. */
1086 set_noop_p (const_rtx set)
1088 rtx src = SET_SRC (set);
1089 rtx dst = SET_DEST (set);
1091 if (dst == pc_rtx && src == pc_rtx)
1092 return 1;
1094 if (MEM_P (dst) && MEM_P (src))
1095 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1097 if (GET_CODE (dst) == ZERO_EXTRACT)
1098 return rtx_equal_p (XEXP (dst, 0), src)
1099 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1100 && !side_effects_p (src);
1102 if (GET_CODE (dst) == STRICT_LOW_PART)
1103 dst = XEXP (dst, 0);
1105 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1107 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1108 return 0;
1109 src = SUBREG_REG (src);
1110 dst = SUBREG_REG (dst);
1113 return (REG_P (src) && REG_P (dst)
1114 && REGNO (src) == REGNO (dst));
1117 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1118 value to itself. */
1121 noop_move_p (const_rtx insn)
1123 rtx pat = PATTERN (insn);
1125 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1126 return 1;
1128 /* Insns carrying these notes are useful later on. */
1129 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1130 return 0;
1132 /* For now treat an insn with a REG_RETVAL note as a
1133 a special insn which should not be considered a no-op. */
1134 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1135 return 0;
1137 if (GET_CODE (pat) == SET && set_noop_p (pat))
1138 return 1;
1140 if (GET_CODE (pat) == PARALLEL)
1142 int i;
1143 /* If nothing but SETs of registers to themselves,
1144 this insn can also be deleted. */
1145 for (i = 0; i < XVECLEN (pat, 0); i++)
1147 rtx tem = XVECEXP (pat, 0, i);
1149 if (GET_CODE (tem) == USE
1150 || GET_CODE (tem) == CLOBBER)
1151 continue;
1153 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1154 return 0;
1157 return 1;
1159 return 0;
1163 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1164 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1165 If the object was modified, if we hit a partial assignment to X, or hit a
1166 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1167 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1168 be the src. */
1171 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1173 rtx p;
1175 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1176 p = PREV_INSN (p))
1177 if (INSN_P (p))
1179 rtx set = single_set (p);
1180 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1182 if (set && rtx_equal_p (x, SET_DEST (set)))
1184 rtx src = SET_SRC (set);
1186 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1187 src = XEXP (note, 0);
1189 if ((valid_to == NULL_RTX
1190 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1191 /* Reject hard registers because we don't usually want
1192 to use them; we'd rather use a pseudo. */
1193 && (! (REG_P (src)
1194 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1196 *pinsn = p;
1197 return src;
1201 /* If set in non-simple way, we don't have a value. */
1202 if (reg_set_p (x, p))
1203 break;
1206 return x;
1209 /* Return nonzero if register in range [REGNO, ENDREGNO)
1210 appears either explicitly or implicitly in X
1211 other than being stored into.
1213 References contained within the substructure at LOC do not count.
1214 LOC may be zero, meaning don't ignore anything. */
1217 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1218 rtx *loc)
1220 int i;
1221 unsigned int x_regno;
1222 RTX_CODE code;
1223 const char *fmt;
1225 repeat:
1226 /* The contents of a REG_NONNEG note is always zero, so we must come here
1227 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1228 if (x == 0)
1229 return 0;
1231 code = GET_CODE (x);
1233 switch (code)
1235 case REG:
1236 x_regno = REGNO (x);
1238 /* If we modifying the stack, frame, or argument pointer, it will
1239 clobber a virtual register. In fact, we could be more precise,
1240 but it isn't worth it. */
1241 if ((x_regno == STACK_POINTER_REGNUM
1242 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1243 || x_regno == ARG_POINTER_REGNUM
1244 #endif
1245 || x_regno == FRAME_POINTER_REGNUM)
1246 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1247 return 1;
1249 return endregno > x_regno && regno < END_REGNO (x);
1251 case SUBREG:
1252 /* If this is a SUBREG of a hard reg, we can see exactly which
1253 registers are being modified. Otherwise, handle normally. */
1254 if (REG_P (SUBREG_REG (x))
1255 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1257 unsigned int inner_regno = subreg_regno (x);
1258 unsigned int inner_endregno
1259 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1260 ? subreg_nregs (x) : 1);
1262 return endregno > inner_regno && regno < inner_endregno;
1264 break;
1266 case CLOBBER:
1267 case SET:
1268 if (&SET_DEST (x) != loc
1269 /* Note setting a SUBREG counts as referring to the REG it is in for
1270 a pseudo but not for hard registers since we can
1271 treat each word individually. */
1272 && ((GET_CODE (SET_DEST (x)) == SUBREG
1273 && loc != &SUBREG_REG (SET_DEST (x))
1274 && REG_P (SUBREG_REG (SET_DEST (x)))
1275 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1276 && refers_to_regno_p (regno, endregno,
1277 SUBREG_REG (SET_DEST (x)), loc))
1278 || (!REG_P (SET_DEST (x))
1279 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1280 return 1;
1282 if (code == CLOBBER || loc == &SET_SRC (x))
1283 return 0;
1284 x = SET_SRC (x);
1285 goto repeat;
1287 default:
1288 break;
1291 /* X does not match, so try its subexpressions. */
1293 fmt = GET_RTX_FORMAT (code);
1294 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1296 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1298 if (i == 0)
1300 x = XEXP (x, 0);
1301 goto repeat;
1303 else
1304 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1305 return 1;
1307 else if (fmt[i] == 'E')
1309 int j;
1310 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1311 if (loc != &XVECEXP (x, i, j)
1312 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1313 return 1;
1316 return 0;
1319 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1320 we check if any register number in X conflicts with the relevant register
1321 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1322 contains a MEM (we don't bother checking for memory addresses that can't
1323 conflict because we expect this to be a rare case. */
1326 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1328 unsigned int regno, endregno;
1330 /* If either argument is a constant, then modifying X can not
1331 affect IN. Here we look at IN, we can profitably combine
1332 CONSTANT_P (x) with the switch statement below. */
1333 if (CONSTANT_P (in))
1334 return 0;
1336 recurse:
1337 switch (GET_CODE (x))
1339 case STRICT_LOW_PART:
1340 case ZERO_EXTRACT:
1341 case SIGN_EXTRACT:
1342 /* Overly conservative. */
1343 x = XEXP (x, 0);
1344 goto recurse;
1346 case SUBREG:
1347 regno = REGNO (SUBREG_REG (x));
1348 if (regno < FIRST_PSEUDO_REGISTER)
1349 regno = subreg_regno (x);
1350 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1351 ? subreg_nregs (x) : 1);
1352 goto do_reg;
1354 case REG:
1355 regno = REGNO (x);
1356 endregno = END_REGNO (x);
1357 do_reg:
1358 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1360 case MEM:
1362 const char *fmt;
1363 int i;
1365 if (MEM_P (in))
1366 return 1;
1368 fmt = GET_RTX_FORMAT (GET_CODE (in));
1369 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1370 if (fmt[i] == 'e')
1372 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1373 return 1;
1375 else if (fmt[i] == 'E')
1377 int j;
1378 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1379 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1380 return 1;
1383 return 0;
1386 case SCRATCH:
1387 case PC:
1388 case CC0:
1389 return reg_mentioned_p (x, in);
1391 case PARALLEL:
1393 int i;
1395 /* If any register in here refers to it we return true. */
1396 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1397 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1398 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1399 return 1;
1400 return 0;
1403 default:
1404 gcc_assert (CONSTANT_P (x));
1405 return 0;
1409 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1410 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1411 ignored by note_stores, but passed to FUN.
1413 FUN receives three arguments:
1414 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1415 2. the SET or CLOBBER rtx that does the store,
1416 3. the pointer DATA provided to note_stores.
1418 If the item being stored in or clobbered is a SUBREG of a hard register,
1419 the SUBREG will be passed. */
1421 void
1422 note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1424 int i;
1426 if (GET_CODE (x) == COND_EXEC)
1427 x = COND_EXEC_CODE (x);
1429 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1431 rtx dest = SET_DEST (x);
1433 while ((GET_CODE (dest) == SUBREG
1434 && (!REG_P (SUBREG_REG (dest))
1435 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1436 || GET_CODE (dest) == ZERO_EXTRACT
1437 || GET_CODE (dest) == STRICT_LOW_PART)
1438 dest = XEXP (dest, 0);
1440 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1441 each of whose first operand is a register. */
1442 if (GET_CODE (dest) == PARALLEL)
1444 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1445 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1446 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1448 else
1449 (*fun) (dest, x, data);
1452 else if (GET_CODE (x) == PARALLEL)
1453 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1454 note_stores (XVECEXP (x, 0, i), fun, data);
1457 /* Like notes_stores, but call FUN for each expression that is being
1458 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1459 FUN for each expression, not any interior subexpressions. FUN receives a
1460 pointer to the expression and the DATA passed to this function.
1462 Note that this is not quite the same test as that done in reg_referenced_p
1463 since that considers something as being referenced if it is being
1464 partially set, while we do not. */
1466 void
1467 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1469 rtx body = *pbody;
1470 int i;
1472 switch (GET_CODE (body))
1474 case COND_EXEC:
1475 (*fun) (&COND_EXEC_TEST (body), data);
1476 note_uses (&COND_EXEC_CODE (body), fun, data);
1477 return;
1479 case PARALLEL:
1480 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1481 note_uses (&XVECEXP (body, 0, i), fun, data);
1482 return;
1484 case SEQUENCE:
1485 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1486 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1487 return;
1489 case USE:
1490 (*fun) (&XEXP (body, 0), data);
1491 return;
1493 case ASM_OPERANDS:
1494 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1495 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1496 return;
1498 case TRAP_IF:
1499 (*fun) (&TRAP_CONDITION (body), data);
1500 return;
1502 case PREFETCH:
1503 (*fun) (&XEXP (body, 0), data);
1504 return;
1506 case UNSPEC:
1507 case UNSPEC_VOLATILE:
1508 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1509 (*fun) (&XVECEXP (body, 0, i), data);
1510 return;
1512 case CLOBBER:
1513 if (MEM_P (XEXP (body, 0)))
1514 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1515 return;
1517 case SET:
1519 rtx dest = SET_DEST (body);
1521 /* For sets we replace everything in source plus registers in memory
1522 expression in store and operands of a ZERO_EXTRACT. */
1523 (*fun) (&SET_SRC (body), data);
1525 if (GET_CODE (dest) == ZERO_EXTRACT)
1527 (*fun) (&XEXP (dest, 1), data);
1528 (*fun) (&XEXP (dest, 2), data);
1531 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1532 dest = XEXP (dest, 0);
1534 if (MEM_P (dest))
1535 (*fun) (&XEXP (dest, 0), data);
1537 return;
1539 default:
1540 /* All the other possibilities never store. */
1541 (*fun) (pbody, data);
1542 return;
1546 /* Return nonzero if X's old contents don't survive after INSN.
1547 This will be true if X is (cc0) or if X is a register and
1548 X dies in INSN or because INSN entirely sets X.
1550 "Entirely set" means set directly and not through a SUBREG, or
1551 ZERO_EXTRACT, so no trace of the old contents remains.
1552 Likewise, REG_INC does not count.
1554 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1555 but for this use that makes no difference, since regs don't overlap
1556 during their lifetimes. Therefore, this function may be used
1557 at any time after deaths have been computed.
1559 If REG is a hard reg that occupies multiple machine registers, this
1560 function will only return 1 if each of those registers will be replaced
1561 by INSN. */
1564 dead_or_set_p (const_rtx insn, const_rtx x)
1566 unsigned int regno, end_regno;
1567 unsigned int i;
1569 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1570 if (GET_CODE (x) == CC0)
1571 return 1;
1573 gcc_assert (REG_P (x));
1575 regno = REGNO (x);
1576 end_regno = END_REGNO (x);
1577 for (i = regno; i < end_regno; i++)
1578 if (! dead_or_set_regno_p (insn, i))
1579 return 0;
1581 return 1;
1584 /* Return TRUE iff DEST is a register or subreg of a register and
1585 doesn't change the number of words of the inner register, and any
1586 part of the register is TEST_REGNO. */
1588 static bool
1589 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1591 unsigned int regno, endregno;
1593 if (GET_CODE (dest) == SUBREG
1594 && (((GET_MODE_SIZE (GET_MODE (dest))
1595 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1596 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1597 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1598 dest = SUBREG_REG (dest);
1600 if (!REG_P (dest))
1601 return false;
1603 regno = REGNO (dest);
1604 endregno = END_REGNO (dest);
1605 return (test_regno >= regno && test_regno < endregno);
1608 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1609 any member matches the covers_regno_no_parallel_p criteria. */
1611 static bool
1612 covers_regno_p (const_rtx dest, unsigned int test_regno)
1614 if (GET_CODE (dest) == PARALLEL)
1616 /* Some targets place small structures in registers for return
1617 values of functions, and those registers are wrapped in
1618 PARALLELs that we may see as the destination of a SET. */
1619 int i;
1621 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1623 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1624 if (inner != NULL_RTX
1625 && covers_regno_no_parallel_p (inner, test_regno))
1626 return true;
1629 return false;
1631 else
1632 return covers_regno_no_parallel_p (dest, test_regno);
1635 /* Utility function for dead_or_set_p to check an individual register. */
1638 dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1640 const_rtx pattern;
1642 /* See if there is a death note for something that includes TEST_REGNO. */
1643 if (find_regno_note (insn, REG_DEAD, test_regno))
1644 return 1;
1646 if (CALL_P (insn)
1647 && find_regno_fusage (insn, CLOBBER, test_regno))
1648 return 1;
1650 pattern = PATTERN (insn);
1652 if (GET_CODE (pattern) == COND_EXEC)
1653 pattern = COND_EXEC_CODE (pattern);
1655 if (GET_CODE (pattern) == SET)
1656 return covers_regno_p (SET_DEST (pattern), test_regno);
1657 else if (GET_CODE (pattern) == PARALLEL)
1659 int i;
1661 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1663 rtx body = XVECEXP (pattern, 0, i);
1665 if (GET_CODE (body) == COND_EXEC)
1666 body = COND_EXEC_CODE (body);
1668 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1669 && covers_regno_p (SET_DEST (body), test_regno))
1670 return 1;
1674 return 0;
1677 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1678 If DATUM is nonzero, look for one whose datum is DATUM. */
1681 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1683 rtx link;
1685 gcc_assert (insn);
1687 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1688 if (! INSN_P (insn))
1689 return 0;
1690 if (datum == 0)
1692 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1693 if (REG_NOTE_KIND (link) == kind)
1694 return link;
1695 return 0;
1698 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1699 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1700 return link;
1701 return 0;
1704 /* Return the reg-note of kind KIND in insn INSN which applies to register
1705 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1706 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1707 it might be the case that the note overlaps REGNO. */
1710 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1712 rtx link;
1714 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1715 if (! INSN_P (insn))
1716 return 0;
1718 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1719 if (REG_NOTE_KIND (link) == kind
1720 /* Verify that it is a register, so that scratch and MEM won't cause a
1721 problem here. */
1722 && REG_P (XEXP (link, 0))
1723 && REGNO (XEXP (link, 0)) <= regno
1724 && END_REGNO (XEXP (link, 0)) > regno)
1725 return link;
1726 return 0;
1729 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1730 has such a note. */
1733 find_reg_equal_equiv_note (const_rtx insn)
1735 rtx link;
1737 if (!INSN_P (insn))
1738 return 0;
1740 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1741 if (REG_NOTE_KIND (link) == REG_EQUAL
1742 || REG_NOTE_KIND (link) == REG_EQUIV)
1744 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1745 insns that have multiple sets. Checking single_set to
1746 make sure of this is not the proper check, as explained
1747 in the comment in set_unique_reg_note.
1749 This should be changed into an assert. */
1750 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1751 return 0;
1752 return link;
1754 return NULL;
1757 /* Check whether INSN is a single_set whose source is known to be
1758 equivalent to a constant. Return that constant if so, otherwise
1759 return null. */
1762 find_constant_src (const_rtx insn)
1764 rtx note, set, x;
1766 set = single_set (insn);
1767 if (set)
1769 x = avoid_constant_pool_reference (SET_SRC (set));
1770 if (CONSTANT_P (x))
1771 return x;
1774 note = find_reg_equal_equiv_note (insn);
1775 if (note && CONSTANT_P (XEXP (note, 0)))
1776 return XEXP (note, 0);
1778 return NULL_RTX;
1781 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1782 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1785 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1787 /* If it's not a CALL_INSN, it can't possibly have a
1788 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1789 if (!CALL_P (insn))
1790 return 0;
1792 gcc_assert (datum);
1794 if (!REG_P (datum))
1796 rtx link;
1798 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1799 link;
1800 link = XEXP (link, 1))
1801 if (GET_CODE (XEXP (link, 0)) == code
1802 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1803 return 1;
1805 else
1807 unsigned int regno = REGNO (datum);
1809 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1810 to pseudo registers, so don't bother checking. */
1812 if (regno < FIRST_PSEUDO_REGISTER)
1814 unsigned int end_regno = END_HARD_REGNO (datum);
1815 unsigned int i;
1817 for (i = regno; i < end_regno; i++)
1818 if (find_regno_fusage (insn, code, i))
1819 return 1;
1823 return 0;
1826 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1827 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1830 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1832 rtx link;
1834 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1835 to pseudo registers, so don't bother checking. */
1837 if (regno >= FIRST_PSEUDO_REGISTER
1838 || !CALL_P (insn) )
1839 return 0;
1841 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1843 rtx op, reg;
1845 if (GET_CODE (op = XEXP (link, 0)) == code
1846 && REG_P (reg = XEXP (op, 0))
1847 && REGNO (reg) <= regno
1848 && END_HARD_REGNO (reg) > regno)
1849 return 1;
1852 return 0;
1855 /* Return true if INSN is a call to a pure function. */
1858 pure_call_p (const_rtx insn)
1860 const_rtx link;
1862 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1863 return 0;
1865 /* Look for the note that differentiates const and pure functions. */
1866 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1868 rtx u, m;
1870 if (GET_CODE (u = XEXP (link, 0)) == USE
1871 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1872 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1873 return 1;
1876 return 0;
1879 /* Remove register note NOTE from the REG_NOTES of INSN. */
1881 void
1882 remove_note (rtx insn, const_rtx note)
1884 rtx link;
1886 if (note == NULL_RTX)
1887 return;
1889 if (REG_NOTES (insn) == note)
1890 REG_NOTES (insn) = XEXP (note, 1);
1891 else
1892 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1893 if (XEXP (link, 1) == note)
1895 XEXP (link, 1) = XEXP (note, 1);
1896 break;
1899 switch (REG_NOTE_KIND (note))
1901 case REG_EQUAL:
1902 case REG_EQUIV:
1903 df_notes_rescan (insn);
1904 break;
1905 default:
1906 break;
1910 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
1912 void
1913 remove_reg_equal_equiv_notes (rtx insn)
1915 rtx *loc;
1917 loc = &REG_NOTES (insn);
1918 while (*loc)
1920 enum reg_note kind = REG_NOTE_KIND (*loc);
1921 if (kind == REG_EQUAL || kind == REG_EQUIV)
1922 *loc = XEXP (*loc, 1);
1923 else
1924 loc = &XEXP (*loc, 1);
1928 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1929 return 1 if it is found. A simple equality test is used to determine if
1930 NODE matches. */
1933 in_expr_list_p (const_rtx listp, const_rtx node)
1935 const_rtx x;
1937 for (x = listp; x; x = XEXP (x, 1))
1938 if (node == XEXP (x, 0))
1939 return 1;
1941 return 0;
1944 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1945 remove that entry from the list if it is found.
1947 A simple equality test is used to determine if NODE matches. */
1949 void
1950 remove_node_from_expr_list (const_rtx node, rtx *listp)
1952 rtx temp = *listp;
1953 rtx prev = NULL_RTX;
1955 while (temp)
1957 if (node == XEXP (temp, 0))
1959 /* Splice the node out of the list. */
1960 if (prev)
1961 XEXP (prev, 1) = XEXP (temp, 1);
1962 else
1963 *listp = XEXP (temp, 1);
1965 return;
1968 prev = temp;
1969 temp = XEXP (temp, 1);
1973 /* Nonzero if X contains any volatile instructions. These are instructions
1974 which may cause unpredictable machine state instructions, and thus no
1975 instructions should be moved or combined across them. This includes
1976 only volatile asms and UNSPEC_VOLATILE instructions. */
1979 volatile_insn_p (const_rtx x)
1981 const RTX_CODE code = GET_CODE (x);
1982 switch (code)
1984 case LABEL_REF:
1985 case SYMBOL_REF:
1986 case CONST_INT:
1987 case CONST:
1988 case CONST_DOUBLE:
1989 case CONST_FIXED:
1990 case CONST_VECTOR:
1991 case CC0:
1992 case PC:
1993 case REG:
1994 case SCRATCH:
1995 case CLOBBER:
1996 case ADDR_VEC:
1997 case ADDR_DIFF_VEC:
1998 case CALL:
1999 case MEM:
2000 return 0;
2002 case UNSPEC_VOLATILE:
2003 /* case TRAP_IF: This isn't clear yet. */
2004 return 1;
2006 case ASM_INPUT:
2007 case ASM_OPERANDS:
2008 if (MEM_VOLATILE_P (x))
2009 return 1;
2011 default:
2012 break;
2015 /* Recursively scan the operands of this expression. */
2018 const char *const fmt = GET_RTX_FORMAT (code);
2019 int i;
2021 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2023 if (fmt[i] == 'e')
2025 if (volatile_insn_p (XEXP (x, i)))
2026 return 1;
2028 else if (fmt[i] == 'E')
2030 int j;
2031 for (j = 0; j < XVECLEN (x, i); j++)
2032 if (volatile_insn_p (XVECEXP (x, i, j)))
2033 return 1;
2037 return 0;
2040 /* Nonzero if X contains any volatile memory references
2041 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2044 volatile_refs_p (const_rtx x)
2046 const RTX_CODE code = GET_CODE (x);
2047 switch (code)
2049 case LABEL_REF:
2050 case SYMBOL_REF:
2051 case CONST_INT:
2052 case CONST:
2053 case CONST_DOUBLE:
2054 case CONST_FIXED:
2055 case CONST_VECTOR:
2056 case CC0:
2057 case PC:
2058 case REG:
2059 case SCRATCH:
2060 case CLOBBER:
2061 case ADDR_VEC:
2062 case ADDR_DIFF_VEC:
2063 return 0;
2065 case UNSPEC_VOLATILE:
2066 return 1;
2068 case MEM:
2069 case ASM_INPUT:
2070 case ASM_OPERANDS:
2071 if (MEM_VOLATILE_P (x))
2072 return 1;
2074 default:
2075 break;
2078 /* Recursively scan the operands of this expression. */
2081 const char *const fmt = GET_RTX_FORMAT (code);
2082 int i;
2084 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2086 if (fmt[i] == 'e')
2088 if (volatile_refs_p (XEXP (x, i)))
2089 return 1;
2091 else if (fmt[i] == 'E')
2093 int j;
2094 for (j = 0; j < XVECLEN (x, i); j++)
2095 if (volatile_refs_p (XVECEXP (x, i, j)))
2096 return 1;
2100 return 0;
2103 /* Similar to above, except that it also rejects register pre- and post-
2104 incrementing. */
2107 side_effects_p (const_rtx x)
2109 const RTX_CODE code = GET_CODE (x);
2110 switch (code)
2112 case LABEL_REF:
2113 case SYMBOL_REF:
2114 case CONST_INT:
2115 case CONST:
2116 case CONST_DOUBLE:
2117 case CONST_FIXED:
2118 case CONST_VECTOR:
2119 case CC0:
2120 case PC:
2121 case REG:
2122 case SCRATCH:
2123 case ADDR_VEC:
2124 case ADDR_DIFF_VEC:
2125 return 0;
2127 case CLOBBER:
2128 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2129 when some combination can't be done. If we see one, don't think
2130 that we can simplify the expression. */
2131 return (GET_MODE (x) != VOIDmode);
2133 case PRE_INC:
2134 case PRE_DEC:
2135 case POST_INC:
2136 case POST_DEC:
2137 case PRE_MODIFY:
2138 case POST_MODIFY:
2139 case CALL:
2140 case UNSPEC_VOLATILE:
2141 /* case TRAP_IF: This isn't clear yet. */
2142 return 1;
2144 case MEM:
2145 case ASM_INPUT:
2146 case ASM_OPERANDS:
2147 if (MEM_VOLATILE_P (x))
2148 return 1;
2150 default:
2151 break;
2154 /* Recursively scan the operands of this expression. */
2157 const char *fmt = GET_RTX_FORMAT (code);
2158 int i;
2160 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2162 if (fmt[i] == 'e')
2164 if (side_effects_p (XEXP (x, i)))
2165 return 1;
2167 else if (fmt[i] == 'E')
2169 int j;
2170 for (j = 0; j < XVECLEN (x, i); j++)
2171 if (side_effects_p (XVECEXP (x, i, j)))
2172 return 1;
2176 return 0;
2179 enum may_trap_p_flags
2181 MTP_UNALIGNED_MEMS = 1,
2182 MTP_AFTER_MOVE = 2
2184 /* Return nonzero if evaluating rtx X might cause a trap.
2185 (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2186 unaligned memory accesses on strict alignment machines. If
2187 (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2188 cannot trap at its current location, but it might become trapping if moved
2189 elsewhere. */
2192 may_trap_p_1 (const_rtx x, unsigned flags)
2194 int i;
2195 enum rtx_code code;
2196 const char *fmt;
2197 bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2199 if (x == 0)
2200 return 0;
2201 code = GET_CODE (x);
2202 switch (code)
2204 /* Handle these cases quickly. */
2205 case CONST_INT:
2206 case CONST_DOUBLE:
2207 case CONST_FIXED:
2208 case CONST_VECTOR:
2209 case SYMBOL_REF:
2210 case LABEL_REF:
2211 case CONST:
2212 case PC:
2213 case CC0:
2214 case REG:
2215 case SCRATCH:
2216 return 0;
2218 case UNSPEC:
2219 case UNSPEC_VOLATILE:
2220 return targetm.unspec_may_trap_p (x, flags);
2222 case ASM_INPUT:
2223 case TRAP_IF:
2224 return 1;
2226 case ASM_OPERANDS:
2227 return MEM_VOLATILE_P (x);
2229 /* Memory ref can trap unless it's a static var or a stack slot. */
2230 case MEM:
2231 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2232 reference; moving it out of condition might cause its address
2233 become invalid. */
2234 !(flags & MTP_AFTER_MOVE)
2235 && MEM_NOTRAP_P (x)
2236 && (!STRICT_ALIGNMENT || !unaligned_mems))
2237 return 0;
2238 return
2239 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2241 /* Division by a non-constant might trap. */
2242 case DIV:
2243 case MOD:
2244 case UDIV:
2245 case UMOD:
2246 if (HONOR_SNANS (GET_MODE (x)))
2247 return 1;
2248 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2249 return flag_trapping_math;
2250 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2251 return 1;
2252 break;
2254 case EXPR_LIST:
2255 /* An EXPR_LIST is used to represent a function call. This
2256 certainly may trap. */
2257 return 1;
2259 case GE:
2260 case GT:
2261 case LE:
2262 case LT:
2263 case LTGT:
2264 case COMPARE:
2265 /* Some floating point comparisons may trap. */
2266 if (!flag_trapping_math)
2267 break;
2268 /* ??? There is no machine independent way to check for tests that trap
2269 when COMPARE is used, though many targets do make this distinction.
2270 For instance, sparc uses CCFPE for compares which generate exceptions
2271 and CCFP for compares which do not generate exceptions. */
2272 if (HONOR_NANS (GET_MODE (x)))
2273 return 1;
2274 /* But often the compare has some CC mode, so check operand
2275 modes as well. */
2276 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2277 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2278 return 1;
2279 break;
2281 case EQ:
2282 case NE:
2283 if (HONOR_SNANS (GET_MODE (x)))
2284 return 1;
2285 /* Often comparison is CC mode, so check operand modes. */
2286 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2287 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2288 return 1;
2289 break;
2291 case FIX:
2292 /* Conversion of floating point might trap. */
2293 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2294 return 1;
2295 break;
2297 case NEG:
2298 case ABS:
2299 case SUBREG:
2300 /* These operations don't trap even with floating point. */
2301 break;
2303 default:
2304 /* Any floating arithmetic may trap. */
2305 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2306 && flag_trapping_math)
2307 return 1;
2310 fmt = GET_RTX_FORMAT (code);
2311 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2313 if (fmt[i] == 'e')
2315 if (may_trap_p_1 (XEXP (x, i), flags))
2316 return 1;
2318 else if (fmt[i] == 'E')
2320 int j;
2321 for (j = 0; j < XVECLEN (x, i); j++)
2322 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2323 return 1;
2326 return 0;
2329 /* Return nonzero if evaluating rtx X might cause a trap. */
2332 may_trap_p (const_rtx x)
2334 return may_trap_p_1 (x, 0);
2337 /* Return nonzero if evaluating rtx X might cause a trap, when the expression
2338 is moved from its current location by some optimization. */
2341 may_trap_after_code_motion_p (const_rtx x)
2343 return may_trap_p_1 (x, MTP_AFTER_MOVE);
2346 /* Same as above, but additionally return nonzero if evaluating rtx X might
2347 cause a fault. We define a fault for the purpose of this function as a
2348 erroneous execution condition that cannot be encountered during the normal
2349 execution of a valid program; the typical example is an unaligned memory
2350 access on a strict alignment machine. The compiler guarantees that it
2351 doesn't generate code that will fault from a valid program, but this
2352 guarantee doesn't mean anything for individual instructions. Consider
2353 the following example:
2355 struct S { int d; union { char *cp; int *ip; }; };
2357 int foo(struct S *s)
2359 if (s->d == 1)
2360 return *s->ip;
2361 else
2362 return *s->cp;
2365 on a strict alignment machine. In a valid program, foo will never be
2366 invoked on a structure for which d is equal to 1 and the underlying
2367 unique field of the union not aligned on a 4-byte boundary, but the
2368 expression *s->ip might cause a fault if considered individually.
2370 At the RTL level, potentially problematic expressions will almost always
2371 verify may_trap_p; for example, the above dereference can be emitted as
2372 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2373 However, suppose that foo is inlined in a caller that causes s->cp to
2374 point to a local character variable and guarantees that s->d is not set
2375 to 1; foo may have been effectively translated into pseudo-RTL as:
2377 if ((reg:SI) == 1)
2378 (set (reg:SI) (mem:SI (%fp - 7)))
2379 else
2380 (set (reg:QI) (mem:QI (%fp - 7)))
2382 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2383 memory reference to a stack slot, but it will certainly cause a fault
2384 on a strict alignment machine. */
2387 may_trap_or_fault_p (const_rtx x)
2389 return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2392 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2393 i.e., an inequality. */
2396 inequality_comparisons_p (const_rtx x)
2398 const char *fmt;
2399 int len, i;
2400 const enum rtx_code code = GET_CODE (x);
2402 switch (code)
2404 case REG:
2405 case SCRATCH:
2406 case PC:
2407 case CC0:
2408 case CONST_INT:
2409 case CONST_DOUBLE:
2410 case CONST_FIXED:
2411 case CONST_VECTOR:
2412 case CONST:
2413 case LABEL_REF:
2414 case SYMBOL_REF:
2415 return 0;
2417 case LT:
2418 case LTU:
2419 case GT:
2420 case GTU:
2421 case LE:
2422 case LEU:
2423 case GE:
2424 case GEU:
2425 return 1;
2427 default:
2428 break;
2431 len = GET_RTX_LENGTH (code);
2432 fmt = GET_RTX_FORMAT (code);
2434 for (i = 0; i < len; i++)
2436 if (fmt[i] == 'e')
2438 if (inequality_comparisons_p (XEXP (x, i)))
2439 return 1;
2441 else if (fmt[i] == 'E')
2443 int j;
2444 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2445 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2446 return 1;
2450 return 0;
2453 /* Replace any occurrence of FROM in X with TO. The function does
2454 not enter into CONST_DOUBLE for the replace.
2456 Note that copying is not done so X must not be shared unless all copies
2457 are to be modified. */
2460 replace_rtx (rtx x, rtx from, rtx to)
2462 int i, j;
2463 const char *fmt;
2465 /* The following prevents loops occurrence when we change MEM in
2466 CONST_DOUBLE onto the same CONST_DOUBLE. */
2467 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2468 return x;
2470 if (x == from)
2471 return to;
2473 /* Allow this function to make replacements in EXPR_LISTs. */
2474 if (x == 0)
2475 return 0;
2477 if (GET_CODE (x) == SUBREG)
2479 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2481 if (GET_CODE (new) == CONST_INT)
2483 x = simplify_subreg (GET_MODE (x), new,
2484 GET_MODE (SUBREG_REG (x)),
2485 SUBREG_BYTE (x));
2486 gcc_assert (x);
2488 else
2489 SUBREG_REG (x) = new;
2491 return x;
2493 else if (GET_CODE (x) == ZERO_EXTEND)
2495 rtx new = replace_rtx (XEXP (x, 0), from, to);
2497 if (GET_CODE (new) == CONST_INT)
2499 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2500 new, GET_MODE (XEXP (x, 0)));
2501 gcc_assert (x);
2503 else
2504 XEXP (x, 0) = new;
2506 return x;
2509 fmt = GET_RTX_FORMAT (GET_CODE (x));
2510 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2512 if (fmt[i] == 'e')
2513 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2514 else if (fmt[i] == 'E')
2515 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2516 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2519 return x;
2522 /* Replace occurrences of the old label in *X with the new one.
2523 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2526 replace_label (rtx *x, void *data)
2528 rtx l = *x;
2529 rtx old_label = ((replace_label_data *) data)->r1;
2530 rtx new_label = ((replace_label_data *) data)->r2;
2531 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2533 if (l == NULL_RTX)
2534 return 0;
2536 if (GET_CODE (l) == SYMBOL_REF
2537 && CONSTANT_POOL_ADDRESS_P (l))
2539 rtx c = get_pool_constant (l);
2540 if (rtx_referenced_p (old_label, c))
2542 rtx new_c, new_l;
2543 replace_label_data *d = (replace_label_data *) data;
2545 /* Create a copy of constant C; replace the label inside
2546 but do not update LABEL_NUSES because uses in constant pool
2547 are not counted. */
2548 new_c = copy_rtx (c);
2549 d->update_label_nuses = false;
2550 for_each_rtx (&new_c, replace_label, data);
2551 d->update_label_nuses = update_label_nuses;
2553 /* Add the new constant NEW_C to constant pool and replace
2554 the old reference to constant by new reference. */
2555 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2556 *x = replace_rtx (l, l, new_l);
2558 return 0;
2561 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2562 field. This is not handled by for_each_rtx because it doesn't
2563 handle unprinted ('0') fields. */
2564 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2565 JUMP_LABEL (l) = new_label;
2567 if ((GET_CODE (l) == LABEL_REF
2568 || GET_CODE (l) == INSN_LIST)
2569 && XEXP (l, 0) == old_label)
2571 XEXP (l, 0) = new_label;
2572 if (update_label_nuses)
2574 ++LABEL_NUSES (new_label);
2575 --LABEL_NUSES (old_label);
2577 return 0;
2580 return 0;
2583 /* When *BODY is equal to X or X is directly referenced by *BODY
2584 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2585 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2587 static int
2588 rtx_referenced_p_1 (rtx *body, void *x)
2590 rtx y = (rtx) x;
2592 if (*body == NULL_RTX)
2593 return y == NULL_RTX;
2595 /* Return true if a label_ref *BODY refers to label Y. */
2596 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2597 return XEXP (*body, 0) == y;
2599 /* If *BODY is a reference to pool constant traverse the constant. */
2600 if (GET_CODE (*body) == SYMBOL_REF
2601 && CONSTANT_POOL_ADDRESS_P (*body))
2602 return rtx_referenced_p (y, get_pool_constant (*body));
2604 /* By default, compare the RTL expressions. */
2605 return rtx_equal_p (*body, y);
2608 /* Return true if X is referenced in BODY. */
2611 rtx_referenced_p (rtx x, rtx body)
2613 return for_each_rtx (&body, rtx_referenced_p_1, x);
2616 /* If INSN is a tablejump return true and store the label (before jump table) to
2617 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2619 bool
2620 tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2622 rtx label, table;
2624 if (JUMP_P (insn)
2625 && (label = JUMP_LABEL (insn)) != NULL_RTX
2626 && (table = next_active_insn (label)) != NULL_RTX
2627 && JUMP_P (table)
2628 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2629 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2631 if (labelp)
2632 *labelp = label;
2633 if (tablep)
2634 *tablep = table;
2635 return true;
2637 return false;
2640 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2641 constant that is not in the constant pool and not in the condition
2642 of an IF_THEN_ELSE. */
2644 static int
2645 computed_jump_p_1 (const_rtx x)
2647 const enum rtx_code code = GET_CODE (x);
2648 int i, j;
2649 const char *fmt;
2651 switch (code)
2653 case LABEL_REF:
2654 case PC:
2655 return 0;
2657 case CONST:
2658 case CONST_INT:
2659 case CONST_DOUBLE:
2660 case CONST_FIXED:
2661 case CONST_VECTOR:
2662 case SYMBOL_REF:
2663 case REG:
2664 return 1;
2666 case MEM:
2667 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2668 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2670 case IF_THEN_ELSE:
2671 return (computed_jump_p_1 (XEXP (x, 1))
2672 || computed_jump_p_1 (XEXP (x, 2)));
2674 default:
2675 break;
2678 fmt = GET_RTX_FORMAT (code);
2679 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2681 if (fmt[i] == 'e'
2682 && computed_jump_p_1 (XEXP (x, i)))
2683 return 1;
2685 else if (fmt[i] == 'E')
2686 for (j = 0; j < XVECLEN (x, i); j++)
2687 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2688 return 1;
2691 return 0;
2694 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2696 Tablejumps and casesi insns are not considered indirect jumps;
2697 we can recognize them by a (use (label_ref)). */
2700 computed_jump_p (const_rtx insn)
2702 int i;
2703 if (JUMP_P (insn))
2705 rtx pat = PATTERN (insn);
2707 /* If we have a JUMP_LABEL set, we're not a computed jump. */
2708 if (JUMP_LABEL (insn) != NULL)
2709 return 0;
2711 if (GET_CODE (pat) == PARALLEL)
2713 int len = XVECLEN (pat, 0);
2714 int has_use_labelref = 0;
2716 for (i = len - 1; i >= 0; i--)
2717 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2718 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2719 == LABEL_REF))
2720 has_use_labelref = 1;
2722 if (! has_use_labelref)
2723 for (i = len - 1; i >= 0; i--)
2724 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2725 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2726 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2727 return 1;
2729 else if (GET_CODE (pat) == SET
2730 && SET_DEST (pat) == pc_rtx
2731 && computed_jump_p_1 (SET_SRC (pat)))
2732 return 1;
2734 return 0;
2737 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2738 calls. Processes the subexpressions of EXP and passes them to F. */
2739 static int
2740 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2742 int result, i, j;
2743 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2744 rtx *x;
2746 for (; format[n] != '\0'; n++)
2748 switch (format[n])
2750 case 'e':
2751 /* Call F on X. */
2752 x = &XEXP (exp, n);
2753 result = (*f) (x, data);
2754 if (result == -1)
2755 /* Do not traverse sub-expressions. */
2756 continue;
2757 else if (result != 0)
2758 /* Stop the traversal. */
2759 return result;
2761 if (*x == NULL_RTX)
2762 /* There are no sub-expressions. */
2763 continue;
2765 i = non_rtx_starting_operands[GET_CODE (*x)];
2766 if (i >= 0)
2768 result = for_each_rtx_1 (*x, i, f, data);
2769 if (result != 0)
2770 return result;
2772 break;
2774 case 'V':
2775 case 'E':
2776 if (XVEC (exp, n) == 0)
2777 continue;
2778 for (j = 0; j < XVECLEN (exp, n); ++j)
2780 /* Call F on X. */
2781 x = &XVECEXP (exp, n, j);
2782 result = (*f) (x, data);
2783 if (result == -1)
2784 /* Do not traverse sub-expressions. */
2785 continue;
2786 else if (result != 0)
2787 /* Stop the traversal. */
2788 return result;
2790 if (*x == NULL_RTX)
2791 /* There are no sub-expressions. */
2792 continue;
2794 i = non_rtx_starting_operands[GET_CODE (*x)];
2795 if (i >= 0)
2797 result = for_each_rtx_1 (*x, i, f, data);
2798 if (result != 0)
2799 return result;
2802 break;
2804 default:
2805 /* Nothing to do. */
2806 break;
2810 return 0;
2813 /* Traverse X via depth-first search, calling F for each
2814 sub-expression (including X itself). F is also passed the DATA.
2815 If F returns -1, do not traverse sub-expressions, but continue
2816 traversing the rest of the tree. If F ever returns any other
2817 nonzero value, stop the traversal, and return the value returned
2818 by F. Otherwise, return 0. This function does not traverse inside
2819 tree structure that contains RTX_EXPRs, or into sub-expressions
2820 whose format code is `0' since it is not known whether or not those
2821 codes are actually RTL.
2823 This routine is very general, and could (should?) be used to
2824 implement many of the other routines in this file. */
2827 for_each_rtx (rtx *x, rtx_function f, void *data)
2829 int result;
2830 int i;
2832 /* Call F on X. */
2833 result = (*f) (x, data);
2834 if (result == -1)
2835 /* Do not traverse sub-expressions. */
2836 return 0;
2837 else if (result != 0)
2838 /* Stop the traversal. */
2839 return result;
2841 if (*x == NULL_RTX)
2842 /* There are no sub-expressions. */
2843 return 0;
2845 i = non_rtx_starting_operands[GET_CODE (*x)];
2846 if (i < 0)
2847 return 0;
2849 return for_each_rtx_1 (*x, i, f, data);
2853 /* Searches X for any reference to REGNO, returning the rtx of the
2854 reference found if any. Otherwise, returns NULL_RTX. */
2857 regno_use_in (unsigned int regno, rtx x)
2859 const char *fmt;
2860 int i, j;
2861 rtx tem;
2863 if (REG_P (x) && REGNO (x) == regno)
2864 return x;
2866 fmt = GET_RTX_FORMAT (GET_CODE (x));
2867 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2869 if (fmt[i] == 'e')
2871 if ((tem = regno_use_in (regno, XEXP (x, i))))
2872 return tem;
2874 else if (fmt[i] == 'E')
2875 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2876 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2877 return tem;
2880 return NULL_RTX;
2883 /* Return a value indicating whether OP, an operand of a commutative
2884 operation, is preferred as the first or second operand. The higher
2885 the value, the stronger the preference for being the first operand.
2886 We use negative values to indicate a preference for the first operand
2887 and positive values for the second operand. */
2890 commutative_operand_precedence (rtx op)
2892 enum rtx_code code = GET_CODE (op);
2894 /* Constants always come the second operand. Prefer "nice" constants. */
2895 if (code == CONST_INT)
2896 return -8;
2897 if (code == CONST_DOUBLE)
2898 return -7;
2899 if (code == CONST_FIXED)
2900 return -7;
2901 op = avoid_constant_pool_reference (op);
2902 code = GET_CODE (op);
2904 switch (GET_RTX_CLASS (code))
2906 case RTX_CONST_OBJ:
2907 if (code == CONST_INT)
2908 return -6;
2909 if (code == CONST_DOUBLE)
2910 return -5;
2911 if (code == CONST_FIXED)
2912 return -5;
2913 return -4;
2915 case RTX_EXTRA:
2916 /* SUBREGs of objects should come second. */
2917 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2918 return -3;
2919 return 0;
2921 case RTX_OBJ:
2922 /* Complex expressions should be the first, so decrease priority
2923 of objects. Prefer pointer objects over non pointer objects. */
2924 if ((REG_P (op) && REG_POINTER (op))
2925 || (MEM_P (op) && MEM_POINTER (op)))
2926 return -1;
2927 return -2;
2929 case RTX_COMM_ARITH:
2930 /* Prefer operands that are themselves commutative to be first.
2931 This helps to make things linear. In particular,
2932 (and (and (reg) (reg)) (not (reg))) is canonical. */
2933 return 4;
2935 case RTX_BIN_ARITH:
2936 /* If only one operand is a binary expression, it will be the first
2937 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2938 is canonical, although it will usually be further simplified. */
2939 return 2;
2941 case RTX_UNARY:
2942 /* Then prefer NEG and NOT. */
2943 if (code == NEG || code == NOT)
2944 return 1;
2946 default:
2947 return 0;
2951 /* Return 1 iff it is necessary to swap operands of commutative operation
2952 in order to canonicalize expression. */
2954 bool
2955 swap_commutative_operands_p (rtx x, rtx y)
2957 return (commutative_operand_precedence (x)
2958 < commutative_operand_precedence (y));
2961 /* Return 1 if X is an autoincrement side effect and the register is
2962 not the stack pointer. */
2964 auto_inc_p (const_rtx x)
2966 switch (GET_CODE (x))
2968 case PRE_INC:
2969 case POST_INC:
2970 case PRE_DEC:
2971 case POST_DEC:
2972 case PRE_MODIFY:
2973 case POST_MODIFY:
2974 /* There are no REG_INC notes for SP. */
2975 if (XEXP (x, 0) != stack_pointer_rtx)
2976 return 1;
2977 default:
2978 break;
2980 return 0;
2983 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2985 loc_mentioned_in_p (rtx *loc, const_rtx in)
2987 enum rtx_code code;
2988 const char *fmt;
2989 int i, j;
2991 if (!in)
2992 return 0;
2994 code = GET_CODE (in);
2995 fmt = GET_RTX_FORMAT (code);
2996 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2998 if (loc == &in->u.fld[i].rt_rtx)
2999 return 1;
3000 if (fmt[i] == 'e')
3002 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3003 return 1;
3005 else if (fmt[i] == 'E')
3006 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3007 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3008 return 1;
3010 return 0;
3013 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3014 and SUBREG_BYTE, return the bit offset where the subreg begins
3015 (counting from the least significant bit of the operand). */
3017 unsigned int
3018 subreg_lsb_1 (enum machine_mode outer_mode,
3019 enum machine_mode inner_mode,
3020 unsigned int subreg_byte)
3022 unsigned int bitpos;
3023 unsigned int byte;
3024 unsigned int word;
3026 /* A paradoxical subreg begins at bit position 0. */
3027 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3028 return 0;
3030 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3031 /* If the subreg crosses a word boundary ensure that
3032 it also begins and ends on a word boundary. */
3033 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3034 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3035 && (subreg_byte % UNITS_PER_WORD
3036 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3038 if (WORDS_BIG_ENDIAN)
3039 word = (GET_MODE_SIZE (inner_mode)
3040 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3041 else
3042 word = subreg_byte / UNITS_PER_WORD;
3043 bitpos = word * BITS_PER_WORD;
3045 if (BYTES_BIG_ENDIAN)
3046 byte = (GET_MODE_SIZE (inner_mode)
3047 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3048 else
3049 byte = subreg_byte % UNITS_PER_WORD;
3050 bitpos += byte * BITS_PER_UNIT;
3052 return bitpos;
3055 /* Given a subreg X, return the bit offset where the subreg begins
3056 (counting from the least significant bit of the reg). */
3058 unsigned int
3059 subreg_lsb (const_rtx x)
3061 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3062 SUBREG_BYTE (x));
3065 /* Fill in information about a subreg of a hard register.
3066 xregno - A regno of an inner hard subreg_reg (or what will become one).
3067 xmode - The mode of xregno.
3068 offset - The byte offset.
3069 ymode - The mode of a top level SUBREG (or what may become one).
3070 info - Pointer to structure to fill in. */
3071 static void
3072 subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3073 unsigned int offset, enum machine_mode ymode,
3074 struct subreg_info *info)
3076 int nregs_xmode, nregs_ymode;
3077 int mode_multiple, nregs_multiple;
3078 int offset_adj, y_offset, y_offset_adj;
3079 int regsize_xmode, regsize_ymode;
3080 bool rknown;
3082 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3084 rknown = false;
3086 /* If there are holes in a non-scalar mode in registers, we expect
3087 that it is made up of its units concatenated together. */
3088 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3090 enum machine_mode xmode_unit;
3092 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3093 if (GET_MODE_INNER (xmode) == VOIDmode)
3094 xmode_unit = xmode;
3095 else
3096 xmode_unit = GET_MODE_INNER (xmode);
3097 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3098 gcc_assert (nregs_xmode
3099 == (GET_MODE_NUNITS (xmode)
3100 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3101 gcc_assert (hard_regno_nregs[xregno][xmode]
3102 == (hard_regno_nregs[xregno][xmode_unit]
3103 * GET_MODE_NUNITS (xmode)));
3105 /* You can only ask for a SUBREG of a value with holes in the middle
3106 if you don't cross the holes. (Such a SUBREG should be done by
3107 picking a different register class, or doing it in memory if
3108 necessary.) An example of a value with holes is XCmode on 32-bit
3109 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3110 3 for each part, but in memory it's two 128-bit parts.
3111 Padding is assumed to be at the end (not necessarily the 'high part')
3112 of each unit. */
3113 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3114 < GET_MODE_NUNITS (xmode))
3115 && (offset / GET_MODE_SIZE (xmode_unit)
3116 != ((offset + GET_MODE_SIZE (ymode) - 1)
3117 / GET_MODE_SIZE (xmode_unit))))
3119 info->representable_p = false;
3120 rknown = true;
3123 else
3124 nregs_xmode = hard_regno_nregs[xregno][xmode];
3126 nregs_ymode = hard_regno_nregs[xregno][ymode];
3128 /* Paradoxical subregs are otherwise valid. */
3129 if (!rknown
3130 && offset == 0
3131 && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode))
3133 info->representable_p = true;
3134 /* If this is a big endian paradoxical subreg, which uses more
3135 actual hard registers than the original register, we must
3136 return a negative offset so that we find the proper highpart
3137 of the register. */
3138 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3139 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3140 info->offset = nregs_xmode - nregs_ymode;
3141 else
3142 info->offset = 0;
3143 info->nregs = nregs_ymode;
3144 return;
3147 /* If registers store different numbers of bits in the different
3148 modes, we cannot generally form this subreg. */
3149 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3150 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3151 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3152 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3154 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3155 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3156 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3158 info->representable_p = false;
3159 info->nregs
3160 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3161 info->offset = offset / regsize_xmode;
3162 return;
3164 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3166 info->representable_p = false;
3167 info->nregs
3168 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3169 info->offset = offset / regsize_xmode;
3170 return;
3174 /* Lowpart subregs are otherwise valid. */
3175 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3177 info->representable_p = true;
3178 rknown = true;
3180 if (offset == 0 || nregs_xmode == nregs_ymode)
3182 info->offset = 0;
3183 info->nregs = nregs_ymode;
3184 return;
3188 /* This should always pass, otherwise we don't know how to verify
3189 the constraint. These conditions may be relaxed but
3190 subreg_regno_offset would need to be redesigned. */
3191 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3192 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3194 /* The XMODE value can be seen as a vector of NREGS_XMODE
3195 values. The subreg must represent a lowpart of given field.
3196 Compute what field it is. */
3197 offset_adj = offset;
3198 offset_adj -= subreg_lowpart_offset (ymode,
3199 mode_for_size (GET_MODE_BITSIZE (xmode)
3200 / nregs_xmode,
3201 MODE_INT, 0));
3203 /* Size of ymode must not be greater than the size of xmode. */
3204 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3205 gcc_assert (mode_multiple != 0);
3207 y_offset = offset / GET_MODE_SIZE (ymode);
3208 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3209 nregs_multiple = nregs_xmode / nregs_ymode;
3211 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3212 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3214 if (!rknown)
3216 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3217 rknown = true;
3219 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3220 info->nregs = nregs_ymode;
3223 /* This function returns the regno offset of a subreg expression.
3224 xregno - A regno of an inner hard subreg_reg (or what will become one).
3225 xmode - The mode of xregno.
3226 offset - The byte offset.
3227 ymode - The mode of a top level SUBREG (or what may become one).
3228 RETURN - The regno offset which would be used. */
3229 unsigned int
3230 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3231 unsigned int offset, enum machine_mode ymode)
3233 struct subreg_info info;
3234 subreg_get_info (xregno, xmode, offset, ymode, &info);
3235 return info.offset;
3238 /* This function returns true when the offset is representable via
3239 subreg_offset in the given regno.
3240 xregno - A regno of an inner hard subreg_reg (or what will become one).
3241 xmode - The mode of xregno.
3242 offset - The byte offset.
3243 ymode - The mode of a top level SUBREG (or what may become one).
3244 RETURN - Whether the offset is representable. */
3245 bool
3246 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3247 unsigned int offset, enum machine_mode ymode)
3249 struct subreg_info info;
3250 subreg_get_info (xregno, xmode, offset, ymode, &info);
3251 return info.representable_p;
3254 /* Return the final regno that a subreg expression refers to. */
3255 unsigned int
3256 subreg_regno (const_rtx x)
3258 unsigned int ret;
3259 rtx subreg = SUBREG_REG (x);
3260 int regno = REGNO (subreg);
3262 ret = regno + subreg_regno_offset (regno,
3263 GET_MODE (subreg),
3264 SUBREG_BYTE (x),
3265 GET_MODE (x));
3266 return ret;
3270 /* Return the number of registers that a subreg expression refers
3271 to. */
3272 unsigned int
3273 subreg_nregs (const_rtx x)
3275 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3278 /* Return the number of registers that a subreg REG with REGNO
3279 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3280 changed so that the regno can be passed in. */
3282 unsigned int
3283 subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3285 struct subreg_info info;
3286 rtx subreg = SUBREG_REG (x);
3288 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3289 &info);
3290 return info.nregs;
3294 struct parms_set_data
3296 int nregs;
3297 HARD_REG_SET regs;
3300 /* Helper function for noticing stores to parameter registers. */
3301 static void
3302 parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3304 struct parms_set_data *d = data;
3305 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3306 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3308 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3309 d->nregs--;
3313 /* Look backward for first parameter to be loaded.
3314 Note that loads of all parameters will not necessarily be
3315 found if CSE has eliminated some of them (e.g., an argument
3316 to the outer function is passed down as a parameter).
3317 Do not skip BOUNDARY. */
3319 find_first_parameter_load (rtx call_insn, rtx boundary)
3321 struct parms_set_data parm;
3322 rtx p, before, first_set;
3324 /* Since different machines initialize their parameter registers
3325 in different orders, assume nothing. Collect the set of all
3326 parameter registers. */
3327 CLEAR_HARD_REG_SET (parm.regs);
3328 parm.nregs = 0;
3329 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3330 if (GET_CODE (XEXP (p, 0)) == USE
3331 && REG_P (XEXP (XEXP (p, 0), 0)))
3333 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3335 /* We only care about registers which can hold function
3336 arguments. */
3337 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3338 continue;
3340 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3341 parm.nregs++;
3343 before = call_insn;
3344 first_set = call_insn;
3346 /* Search backward for the first set of a register in this set. */
3347 while (parm.nregs && before != boundary)
3349 before = PREV_INSN (before);
3351 /* It is possible that some loads got CSEed from one call to
3352 another. Stop in that case. */
3353 if (CALL_P (before))
3354 break;
3356 /* Our caller needs either ensure that we will find all sets
3357 (in case code has not been optimized yet), or take care
3358 for possible labels in a way by setting boundary to preceding
3359 CODE_LABEL. */
3360 if (LABEL_P (before))
3362 gcc_assert (before == boundary);
3363 break;
3366 if (INSN_P (before))
3368 int nregs_old = parm.nregs;
3369 note_stores (PATTERN (before), parms_set, &parm);
3370 /* If we found something that did not set a parameter reg,
3371 we're done. Do not keep going, as that might result
3372 in hoisting an insn before the setting of a pseudo
3373 that is used by the hoisted insn. */
3374 if (nregs_old != parm.nregs)
3375 first_set = before;
3376 else
3377 break;
3380 return first_set;
3383 /* Return true if we should avoid inserting code between INSN and preceding
3384 call instruction. */
3386 bool
3387 keep_with_call_p (const_rtx insn)
3389 rtx set;
3391 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3393 if (REG_P (SET_DEST (set))
3394 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3395 && fixed_regs[REGNO (SET_DEST (set))]
3396 && general_operand (SET_SRC (set), VOIDmode))
3397 return true;
3398 if (REG_P (SET_SRC (set))
3399 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3400 && REG_P (SET_DEST (set))
3401 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3402 return true;
3403 /* There may be a stack pop just after the call and before the store
3404 of the return register. Search for the actual store when deciding
3405 if we can break or not. */
3406 if (SET_DEST (set) == stack_pointer_rtx)
3408 /* This CONST_CAST is okay because next_nonnote_insn just
3409 returns it's argument and we assign it to a const_rtx
3410 variable. */
3411 const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
3412 if (i2 && keep_with_call_p (i2))
3413 return true;
3416 return false;
3419 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3420 to non-complex jumps. That is, direct unconditional, conditional,
3421 and tablejumps, but not computed jumps or returns. It also does
3422 not apply to the fallthru case of a conditional jump. */
3424 bool
3425 label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3427 rtx tmp = JUMP_LABEL (jump_insn);
3429 if (label == tmp)
3430 return true;
3432 if (tablejump_p (jump_insn, NULL, &tmp))
3434 rtvec vec = XVEC (PATTERN (tmp),
3435 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3436 int i, veclen = GET_NUM_ELEM (vec);
3438 for (i = 0; i < veclen; ++i)
3439 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3440 return true;
3443 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3444 return true;
3446 return false;
3450 /* Return an estimate of the cost of computing rtx X.
3451 One use is in cse, to decide which expression to keep in the hash table.
3452 Another is in rtl generation, to pick the cheapest way to multiply.
3453 Other uses like the latter are expected in the future. */
3456 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3458 int i, j;
3459 enum rtx_code code;
3460 const char *fmt;
3461 int total;
3463 if (x == 0)
3464 return 0;
3466 /* Compute the default costs of certain things.
3467 Note that targetm.rtx_costs can override the defaults. */
3469 code = GET_CODE (x);
3470 switch (code)
3472 case MULT:
3473 total = COSTS_N_INSNS (5);
3474 break;
3475 case DIV:
3476 case UDIV:
3477 case MOD:
3478 case UMOD:
3479 total = COSTS_N_INSNS (7);
3480 break;
3481 case USE:
3482 /* Used in combine.c as a marker. */
3483 total = 0;
3484 break;
3485 default:
3486 total = COSTS_N_INSNS (1);
3489 switch (code)
3491 case REG:
3492 return 0;
3494 case SUBREG:
3495 total = 0;
3496 /* If we can't tie these modes, make this expensive. The larger
3497 the mode, the more expensive it is. */
3498 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3499 return COSTS_N_INSNS (2
3500 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3501 break;
3503 default:
3504 if (targetm.rtx_costs (x, code, outer_code, &total))
3505 return total;
3506 break;
3509 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3510 which is already in total. */
3512 fmt = GET_RTX_FORMAT (code);
3513 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3514 if (fmt[i] == 'e')
3515 total += rtx_cost (XEXP (x, i), code);
3516 else if (fmt[i] == 'E')
3517 for (j = 0; j < XVECLEN (x, i); j++)
3518 total += rtx_cost (XVECEXP (x, i, j), code);
3520 return total;
3523 /* Return cost of address expression X.
3524 Expect that X is properly formed address reference. */
3527 address_cost (rtx x, enum machine_mode mode)
3529 /* We may be asked for cost of various unusual addresses, such as operands
3530 of push instruction. It is not worthwhile to complicate writing
3531 of the target hook by such cases. */
3533 if (!memory_address_p (mode, x))
3534 return 1000;
3536 return targetm.address_cost (x);
3539 /* If the target doesn't override, compute the cost as with arithmetic. */
3542 default_address_cost (rtx x)
3544 return rtx_cost (x, MEM);
3548 unsigned HOST_WIDE_INT
3549 nonzero_bits (const_rtx x, enum machine_mode mode)
3551 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3554 unsigned int
3555 num_sign_bit_copies (const_rtx x, enum machine_mode mode)
3557 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3560 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3561 It avoids exponential behavior in nonzero_bits1 when X has
3562 identical subexpressions on the first or the second level. */
3564 static unsigned HOST_WIDE_INT
3565 cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
3566 enum machine_mode known_mode,
3567 unsigned HOST_WIDE_INT known_ret)
3569 if (x == known_x && mode == known_mode)
3570 return known_ret;
3572 /* Try to find identical subexpressions. If found call
3573 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3574 precomputed value for the subexpression as KNOWN_RET. */
3576 if (ARITHMETIC_P (x))
3578 rtx x0 = XEXP (x, 0);
3579 rtx x1 = XEXP (x, 1);
3581 /* Check the first level. */
3582 if (x0 == x1)
3583 return nonzero_bits1 (x, mode, x0, mode,
3584 cached_nonzero_bits (x0, mode, known_x,
3585 known_mode, known_ret));
3587 /* Check the second level. */
3588 if (ARITHMETIC_P (x0)
3589 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3590 return nonzero_bits1 (x, mode, x1, mode,
3591 cached_nonzero_bits (x1, mode, known_x,
3592 known_mode, known_ret));
3594 if (ARITHMETIC_P (x1)
3595 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3596 return nonzero_bits1 (x, mode, x0, mode,
3597 cached_nonzero_bits (x0, mode, known_x,
3598 known_mode, known_ret));
3601 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3604 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3605 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3606 is less useful. We can't allow both, because that results in exponential
3607 run time recursion. There is a nullstone testcase that triggered
3608 this. This macro avoids accidental uses of num_sign_bit_copies. */
3609 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3611 /* Given an expression, X, compute which bits in X can be nonzero.
3612 We don't care about bits outside of those defined in MODE.
3614 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3615 an arithmetic operation, we can do better. */
3617 static unsigned HOST_WIDE_INT
3618 nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
3619 enum machine_mode known_mode,
3620 unsigned HOST_WIDE_INT known_ret)
3622 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3623 unsigned HOST_WIDE_INT inner_nz;
3624 enum rtx_code code;
3625 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3627 /* For floating-point values, assume all bits are needed. */
3628 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3629 return nonzero;
3631 /* If X is wider than MODE, use its mode instead. */
3632 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3634 mode = GET_MODE (x);
3635 nonzero = GET_MODE_MASK (mode);
3636 mode_width = GET_MODE_BITSIZE (mode);
3639 if (mode_width > HOST_BITS_PER_WIDE_INT)
3640 /* Our only callers in this case look for single bit values. So
3641 just return the mode mask. Those tests will then be false. */
3642 return nonzero;
3644 #ifndef WORD_REGISTER_OPERATIONS
3645 /* If MODE is wider than X, but both are a single word for both the host
3646 and target machines, we can compute this from which bits of the
3647 object might be nonzero in its own mode, taking into account the fact
3648 that on many CISC machines, accessing an object in a wider mode
3649 causes the high-order bits to become undefined. So they are
3650 not known to be zero. */
3652 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3653 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3654 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3655 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3657 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3658 known_x, known_mode, known_ret);
3659 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3660 return nonzero;
3662 #endif
3664 code = GET_CODE (x);
3665 switch (code)
3667 case REG:
3668 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3669 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3670 all the bits above ptr_mode are known to be zero. */
3671 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3672 && REG_POINTER (x))
3673 nonzero &= GET_MODE_MASK (ptr_mode);
3674 #endif
3676 /* Include declared information about alignment of pointers. */
3677 /* ??? We don't properly preserve REG_POINTER changes across
3678 pointer-to-integer casts, so we can't trust it except for
3679 things that we know must be pointers. See execute/960116-1.c. */
3680 if ((x == stack_pointer_rtx
3681 || x == frame_pointer_rtx
3682 || x == arg_pointer_rtx)
3683 && REGNO_POINTER_ALIGN (REGNO (x)))
3685 unsigned HOST_WIDE_INT alignment
3686 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3688 #ifdef PUSH_ROUNDING
3689 /* If PUSH_ROUNDING is defined, it is possible for the
3690 stack to be momentarily aligned only to that amount,
3691 so we pick the least alignment. */
3692 if (x == stack_pointer_rtx && PUSH_ARGS)
3693 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3694 alignment);
3695 #endif
3697 nonzero &= ~(alignment - 1);
3701 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3702 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3703 known_mode, known_ret,
3704 &nonzero_for_hook);
3706 if (new)
3707 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3708 known_mode, known_ret);
3710 return nonzero_for_hook;
3713 case CONST_INT:
3714 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3715 /* If X is negative in MODE, sign-extend the value. */
3716 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3717 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3718 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3719 #endif
3721 return INTVAL (x);
3723 case MEM:
3724 #ifdef LOAD_EXTEND_OP
3725 /* In many, if not most, RISC machines, reading a byte from memory
3726 zeros the rest of the register. Noticing that fact saves a lot
3727 of extra zero-extends. */
3728 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3729 nonzero &= GET_MODE_MASK (GET_MODE (x));
3730 #endif
3731 break;
3733 case EQ: case NE:
3734 case UNEQ: case LTGT:
3735 case GT: case GTU: case UNGT:
3736 case LT: case LTU: case UNLT:
3737 case GE: case GEU: case UNGE:
3738 case LE: case LEU: case UNLE:
3739 case UNORDERED: case ORDERED:
3740 /* If this produces an integer result, we know which bits are set.
3741 Code here used to clear bits outside the mode of X, but that is
3742 now done above. */
3743 /* Mind that MODE is the mode the caller wants to look at this
3744 operation in, and not the actual operation mode. We can wind
3745 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3746 that describes the results of a vector compare. */
3747 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3748 && mode_width <= HOST_BITS_PER_WIDE_INT)
3749 nonzero = STORE_FLAG_VALUE;
3750 break;
3752 case NEG:
3753 #if 0
3754 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3755 and num_sign_bit_copies. */
3756 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3757 == GET_MODE_BITSIZE (GET_MODE (x)))
3758 nonzero = 1;
3759 #endif
3761 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3762 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3763 break;
3765 case ABS:
3766 #if 0
3767 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3768 and num_sign_bit_copies. */
3769 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3770 == GET_MODE_BITSIZE (GET_MODE (x)))
3771 nonzero = 1;
3772 #endif
3773 break;
3775 case TRUNCATE:
3776 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3777 known_x, known_mode, known_ret)
3778 & GET_MODE_MASK (mode));
3779 break;
3781 case ZERO_EXTEND:
3782 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3783 known_x, known_mode, known_ret);
3784 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3785 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3786 break;
3788 case SIGN_EXTEND:
3789 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3790 Otherwise, show all the bits in the outer mode but not the inner
3791 may be nonzero. */
3792 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3793 known_x, known_mode, known_ret);
3794 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3796 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3797 if (inner_nz
3798 & (((HOST_WIDE_INT) 1
3799 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3800 inner_nz |= (GET_MODE_MASK (mode)
3801 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3804 nonzero &= inner_nz;
3805 break;
3807 case AND:
3808 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3809 known_x, known_mode, known_ret)
3810 & cached_nonzero_bits (XEXP (x, 1), mode,
3811 known_x, known_mode, known_ret);
3812 break;
3814 case XOR: case IOR:
3815 case UMIN: case UMAX: case SMIN: case SMAX:
3817 unsigned HOST_WIDE_INT nonzero0 =
3818 cached_nonzero_bits (XEXP (x, 0), mode,
3819 known_x, known_mode, known_ret);
3821 /* Don't call nonzero_bits for the second time if it cannot change
3822 anything. */
3823 if ((nonzero & nonzero0) != nonzero)
3824 nonzero &= nonzero0
3825 | cached_nonzero_bits (XEXP (x, 1), mode,
3826 known_x, known_mode, known_ret);
3828 break;
3830 case PLUS: case MINUS:
3831 case MULT:
3832 case DIV: case UDIV:
3833 case MOD: case UMOD:
3834 /* We can apply the rules of arithmetic to compute the number of
3835 high- and low-order zero bits of these operations. We start by
3836 computing the width (position of the highest-order nonzero bit)
3837 and the number of low-order zero bits for each value. */
3839 unsigned HOST_WIDE_INT nz0 =
3840 cached_nonzero_bits (XEXP (x, 0), mode,
3841 known_x, known_mode, known_ret);
3842 unsigned HOST_WIDE_INT nz1 =
3843 cached_nonzero_bits (XEXP (x, 1), mode,
3844 known_x, known_mode, known_ret);
3845 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3846 int width0 = floor_log2 (nz0) + 1;
3847 int width1 = floor_log2 (nz1) + 1;
3848 int low0 = floor_log2 (nz0 & -nz0);
3849 int low1 = floor_log2 (nz1 & -nz1);
3850 HOST_WIDE_INT op0_maybe_minusp
3851 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3852 HOST_WIDE_INT op1_maybe_minusp
3853 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3854 unsigned int result_width = mode_width;
3855 int result_low = 0;
3857 switch (code)
3859 case PLUS:
3860 result_width = MAX (width0, width1) + 1;
3861 result_low = MIN (low0, low1);
3862 break;
3863 case MINUS:
3864 result_low = MIN (low0, low1);
3865 break;
3866 case MULT:
3867 result_width = width0 + width1;
3868 result_low = low0 + low1;
3869 break;
3870 case DIV:
3871 if (width1 == 0)
3872 break;
3873 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3874 result_width = width0;
3875 break;
3876 case UDIV:
3877 if (width1 == 0)
3878 break;
3879 result_width = width0;
3880 break;
3881 case MOD:
3882 if (width1 == 0)
3883 break;
3884 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3885 result_width = MIN (width0, width1);
3886 result_low = MIN (low0, low1);
3887 break;
3888 case UMOD:
3889 if (width1 == 0)
3890 break;
3891 result_width = MIN (width0, width1);
3892 result_low = MIN (low0, low1);
3893 break;
3894 default:
3895 gcc_unreachable ();
3898 if (result_width < mode_width)
3899 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3901 if (result_low > 0)
3902 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3904 #ifdef POINTERS_EXTEND_UNSIGNED
3905 /* If pointers extend unsigned and this is an addition or subtraction
3906 to a pointer in Pmode, all the bits above ptr_mode are known to be
3907 zero. */
3908 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3909 && (code == PLUS || code == MINUS)
3910 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3911 nonzero &= GET_MODE_MASK (ptr_mode);
3912 #endif
3914 break;
3916 case ZERO_EXTRACT:
3917 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3918 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3919 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3920 break;
3922 case SUBREG:
3923 /* If this is a SUBREG formed for a promoted variable that has
3924 been zero-extended, we know that at least the high-order bits
3925 are zero, though others might be too. */
3927 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3928 nonzero = GET_MODE_MASK (GET_MODE (x))
3929 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3930 known_x, known_mode, known_ret);
3932 /* If the inner mode is a single word for both the host and target
3933 machines, we can compute this from which bits of the inner
3934 object might be nonzero. */
3935 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3936 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3937 <= HOST_BITS_PER_WIDE_INT))
3939 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3940 known_x, known_mode, known_ret);
3942 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3943 /* If this is a typical RISC machine, we only have to worry
3944 about the way loads are extended. */
3945 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3946 ? (((nonzero
3947 & (((unsigned HOST_WIDE_INT) 1
3948 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3949 != 0))
3950 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3951 || !MEM_P (SUBREG_REG (x)))
3952 #endif
3954 /* On many CISC machines, accessing an object in a wider mode
3955 causes the high-order bits to become undefined. So they are
3956 not known to be zero. */
3957 if (GET_MODE_SIZE (GET_MODE (x))
3958 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3959 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3960 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3963 break;
3965 case ASHIFTRT:
3966 case LSHIFTRT:
3967 case ASHIFT:
3968 case ROTATE:
3969 /* The nonzero bits are in two classes: any bits within MODE
3970 that aren't in GET_MODE (x) are always significant. The rest of the
3971 nonzero bits are those that are significant in the operand of
3972 the shift when shifted the appropriate number of bits. This
3973 shows that high-order bits are cleared by the right shift and
3974 low-order bits by left shifts. */
3975 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3976 && INTVAL (XEXP (x, 1)) >= 0
3977 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3979 enum machine_mode inner_mode = GET_MODE (x);
3980 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3981 int count = INTVAL (XEXP (x, 1));
3982 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3983 unsigned HOST_WIDE_INT op_nonzero =
3984 cached_nonzero_bits (XEXP (x, 0), mode,
3985 known_x, known_mode, known_ret);
3986 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3987 unsigned HOST_WIDE_INT outer = 0;
3989 if (mode_width > width)
3990 outer = (op_nonzero & nonzero & ~mode_mask);
3992 if (code == LSHIFTRT)
3993 inner >>= count;
3994 else if (code == ASHIFTRT)
3996 inner >>= count;
3998 /* If the sign bit may have been nonzero before the shift, we
3999 need to mark all the places it could have been copied to
4000 by the shift as possibly nonzero. */
4001 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4002 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4004 else if (code == ASHIFT)
4005 inner <<= count;
4006 else
4007 inner = ((inner << (count % width)
4008 | (inner >> (width - (count % width)))) & mode_mask);
4010 nonzero &= (outer | inner);
4012 break;
4014 case FFS:
4015 case POPCOUNT:
4016 /* This is at most the number of bits in the mode. */
4017 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4018 break;
4020 case CLZ:
4021 /* If CLZ has a known value at zero, then the nonzero bits are
4022 that value, plus the number of bits in the mode minus one. */
4023 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4024 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4025 else
4026 nonzero = -1;
4027 break;
4029 case CTZ:
4030 /* If CTZ has a known value at zero, then the nonzero bits are
4031 that value, plus the number of bits in the mode minus one. */
4032 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4033 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4034 else
4035 nonzero = -1;
4036 break;
4038 case PARITY:
4039 nonzero = 1;
4040 break;
4042 case IF_THEN_ELSE:
4044 unsigned HOST_WIDE_INT nonzero_true =
4045 cached_nonzero_bits (XEXP (x, 1), mode,
4046 known_x, known_mode, known_ret);
4048 /* Don't call nonzero_bits for the second time if it cannot change
4049 anything. */
4050 if ((nonzero & nonzero_true) != nonzero)
4051 nonzero &= nonzero_true
4052 | cached_nonzero_bits (XEXP (x, 2), mode,
4053 known_x, known_mode, known_ret);
4055 break;
4057 default:
4058 break;
4061 return nonzero;
4064 /* See the macro definition above. */
4065 #undef cached_num_sign_bit_copies
4068 /* The function cached_num_sign_bit_copies is a wrapper around
4069 num_sign_bit_copies1. It avoids exponential behavior in
4070 num_sign_bit_copies1 when X has identical subexpressions on the
4071 first or the second level. */
4073 static unsigned int
4074 cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
4075 enum machine_mode known_mode,
4076 unsigned int known_ret)
4078 if (x == known_x && mode == known_mode)
4079 return known_ret;
4081 /* Try to find identical subexpressions. If found call
4082 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4083 the precomputed value for the subexpression as KNOWN_RET. */
4085 if (ARITHMETIC_P (x))
4087 rtx x0 = XEXP (x, 0);
4088 rtx x1 = XEXP (x, 1);
4090 /* Check the first level. */
4091 if (x0 == x1)
4092 return
4093 num_sign_bit_copies1 (x, mode, x0, mode,
4094 cached_num_sign_bit_copies (x0, mode, known_x,
4095 known_mode,
4096 known_ret));
4098 /* Check the second level. */
4099 if (ARITHMETIC_P (x0)
4100 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4101 return
4102 num_sign_bit_copies1 (x, mode, x1, mode,
4103 cached_num_sign_bit_copies (x1, mode, known_x,
4104 known_mode,
4105 known_ret));
4107 if (ARITHMETIC_P (x1)
4108 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4109 return
4110 num_sign_bit_copies1 (x, mode, x0, mode,
4111 cached_num_sign_bit_copies (x0, mode, known_x,
4112 known_mode,
4113 known_ret));
4116 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4119 /* Return the number of bits at the high-order end of X that are known to
4120 be equal to the sign bit. X will be used in mode MODE; if MODE is
4121 VOIDmode, X will be used in its own mode. The returned value will always
4122 be between 1 and the number of bits in MODE. */
4124 static unsigned int
4125 num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4126 enum machine_mode known_mode,
4127 unsigned int known_ret)
4129 enum rtx_code code = GET_CODE (x);
4130 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4131 int num0, num1, result;
4132 unsigned HOST_WIDE_INT nonzero;
4134 /* If we weren't given a mode, use the mode of X. If the mode is still
4135 VOIDmode, we don't know anything. Likewise if one of the modes is
4136 floating-point. */
4138 if (mode == VOIDmode)
4139 mode = GET_MODE (x);
4141 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4142 return 1;
4144 /* For a smaller object, just ignore the high bits. */
4145 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4147 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4148 known_x, known_mode, known_ret);
4149 return MAX (1,
4150 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4153 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4155 #ifndef WORD_REGISTER_OPERATIONS
4156 /* If this machine does not do all register operations on the entire
4157 register and MODE is wider than the mode of X, we can say nothing
4158 at all about the high-order bits. */
4159 return 1;
4160 #else
4161 /* Likewise on machines that do, if the mode of the object is smaller
4162 than a word and loads of that size don't sign extend, we can say
4163 nothing about the high order bits. */
4164 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4165 #ifdef LOAD_EXTEND_OP
4166 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4167 #endif
4169 return 1;
4170 #endif
4173 switch (code)
4175 case REG:
4177 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4178 /* If pointers extend signed and this is a pointer in Pmode, say that
4179 all the bits above ptr_mode are known to be sign bit copies. */
4180 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4181 && REG_POINTER (x))
4182 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4183 #endif
4186 unsigned int copies_for_hook = 1, copies = 1;
4187 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4188 known_mode, known_ret,
4189 &copies_for_hook);
4191 if (new)
4192 copies = cached_num_sign_bit_copies (new, mode, known_x,
4193 known_mode, known_ret);
4195 if (copies > 1 || copies_for_hook > 1)
4196 return MAX (copies, copies_for_hook);
4198 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4200 break;
4202 case MEM:
4203 #ifdef LOAD_EXTEND_OP
4204 /* Some RISC machines sign-extend all loads of smaller than a word. */
4205 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4206 return MAX (1, ((int) bitwidth
4207 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4208 #endif
4209 break;
4211 case CONST_INT:
4212 /* If the constant is negative, take its 1's complement and remask.
4213 Then see how many zero bits we have. */
4214 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4215 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4216 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4217 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4219 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4221 case SUBREG:
4222 /* If this is a SUBREG for a promoted object that is sign-extended
4223 and we are looking at it in a wider mode, we know that at least the
4224 high-order bits are known to be sign bit copies. */
4226 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4228 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4229 known_x, known_mode, known_ret);
4230 return MAX ((int) bitwidth
4231 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4232 num0);
4235 /* For a smaller object, just ignore the high bits. */
4236 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4238 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4239 known_x, known_mode, known_ret);
4240 return MAX (1, (num0
4241 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4242 - bitwidth)));
4245 #ifdef WORD_REGISTER_OPERATIONS
4246 #ifdef LOAD_EXTEND_OP
4247 /* For paradoxical SUBREGs on machines where all register operations
4248 affect the entire register, just look inside. Note that we are
4249 passing MODE to the recursive call, so the number of sign bit copies
4250 will remain relative to that mode, not the inner mode. */
4252 /* This works only if loads sign extend. Otherwise, if we get a
4253 reload for the inner part, it may be loaded from the stack, and
4254 then we lose all sign bit copies that existed before the store
4255 to the stack. */
4257 if ((GET_MODE_SIZE (GET_MODE (x))
4258 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4259 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4260 && MEM_P (SUBREG_REG (x)))
4261 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4262 known_x, known_mode, known_ret);
4263 #endif
4264 #endif
4265 break;
4267 case SIGN_EXTRACT:
4268 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4269 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4270 break;
4272 case SIGN_EXTEND:
4273 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4274 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4275 known_x, known_mode, known_ret));
4277 case TRUNCATE:
4278 /* For a smaller object, just ignore the high bits. */
4279 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4280 known_x, known_mode, known_ret);
4281 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4282 - bitwidth)));
4284 case NOT:
4285 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4286 known_x, known_mode, known_ret);
4288 case ROTATE: case ROTATERT:
4289 /* If we are rotating left by a number of bits less than the number
4290 of sign bit copies, we can just subtract that amount from the
4291 number. */
4292 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4293 && INTVAL (XEXP (x, 1)) >= 0
4294 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4296 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4297 known_x, known_mode, known_ret);
4298 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4299 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4301 break;
4303 case NEG:
4304 /* In general, this subtracts one sign bit copy. But if the value
4305 is known to be positive, the number of sign bit copies is the
4306 same as that of the input. Finally, if the input has just one bit
4307 that might be nonzero, all the bits are copies of the sign bit. */
4308 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4309 known_x, known_mode, known_ret);
4310 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4311 return num0 > 1 ? num0 - 1 : 1;
4313 nonzero = nonzero_bits (XEXP (x, 0), mode);
4314 if (nonzero == 1)
4315 return bitwidth;
4317 if (num0 > 1
4318 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4319 num0--;
4321 return num0;
4323 case IOR: case AND: case XOR:
4324 case SMIN: case SMAX: case UMIN: case UMAX:
4325 /* Logical operations will preserve the number of sign-bit copies.
4326 MIN and MAX operations always return one of the operands. */
4327 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4328 known_x, known_mode, known_ret);
4329 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4330 known_x, known_mode, known_ret);
4332 /* If num1 is clearing some of the top bits then regardless of
4333 the other term, we are guaranteed to have at least that many
4334 high-order zero bits. */
4335 if (code == AND
4336 && num1 > 1
4337 && bitwidth <= HOST_BITS_PER_WIDE_INT
4338 && GET_CODE (XEXP (x, 1)) == CONST_INT
4339 && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4340 return num1;
4342 /* Similarly for IOR when setting high-order bits. */
4343 if (code == IOR
4344 && num1 > 1
4345 && bitwidth <= HOST_BITS_PER_WIDE_INT
4346 && GET_CODE (XEXP (x, 1)) == CONST_INT
4347 && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4348 return num1;
4350 return MIN (num0, num1);
4352 case PLUS: case MINUS:
4353 /* For addition and subtraction, we can have a 1-bit carry. However,
4354 if we are subtracting 1 from a positive number, there will not
4355 be such a carry. Furthermore, if the positive number is known to
4356 be 0 or 1, we know the result is either -1 or 0. */
4358 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4359 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4361 nonzero = nonzero_bits (XEXP (x, 0), mode);
4362 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4363 return (nonzero == 1 || nonzero == 0 ? bitwidth
4364 : bitwidth - floor_log2 (nonzero) - 1);
4367 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4368 known_x, known_mode, known_ret);
4369 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4370 known_x, known_mode, known_ret);
4371 result = MAX (1, MIN (num0, num1) - 1);
4373 #ifdef POINTERS_EXTEND_UNSIGNED
4374 /* If pointers extend signed and this is an addition or subtraction
4375 to a pointer in Pmode, all the bits above ptr_mode are known to be
4376 sign bit copies. */
4377 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4378 && (code == PLUS || code == MINUS)
4379 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4380 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4381 - GET_MODE_BITSIZE (ptr_mode) + 1),
4382 result);
4383 #endif
4384 return result;
4386 case MULT:
4387 /* The number of bits of the product is the sum of the number of
4388 bits of both terms. However, unless one of the terms if known
4389 to be positive, we must allow for an additional bit since negating
4390 a negative number can remove one sign bit copy. */
4392 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4393 known_x, known_mode, known_ret);
4394 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4395 known_x, known_mode, known_ret);
4397 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4398 if (result > 0
4399 && (bitwidth > HOST_BITS_PER_WIDE_INT
4400 || (((nonzero_bits (XEXP (x, 0), mode)
4401 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4402 && ((nonzero_bits (XEXP (x, 1), mode)
4403 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4404 result--;
4406 return MAX (1, result);
4408 case UDIV:
4409 /* The result must be <= the first operand. If the first operand
4410 has the high bit set, we know nothing about the number of sign
4411 bit copies. */
4412 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4413 return 1;
4414 else if ((nonzero_bits (XEXP (x, 0), mode)
4415 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4416 return 1;
4417 else
4418 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4419 known_x, known_mode, known_ret);
4421 case UMOD:
4422 /* The result must be <= the second operand. */
4423 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4424 known_x, known_mode, known_ret);
4426 case DIV:
4427 /* Similar to unsigned division, except that we have to worry about
4428 the case where the divisor is negative, in which case we have
4429 to add 1. */
4430 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4431 known_x, known_mode, known_ret);
4432 if (result > 1
4433 && (bitwidth > HOST_BITS_PER_WIDE_INT
4434 || (nonzero_bits (XEXP (x, 1), mode)
4435 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4436 result--;
4438 return result;
4440 case MOD:
4441 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4442 known_x, known_mode, known_ret);
4443 if (result > 1
4444 && (bitwidth > HOST_BITS_PER_WIDE_INT
4445 || (nonzero_bits (XEXP (x, 1), mode)
4446 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4447 result--;
4449 return result;
4451 case ASHIFTRT:
4452 /* Shifts by a constant add to the number of bits equal to the
4453 sign bit. */
4454 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4455 known_x, known_mode, known_ret);
4456 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4457 && INTVAL (XEXP (x, 1)) > 0)
4458 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4460 return num0;
4462 case ASHIFT:
4463 /* Left shifts destroy copies. */
4464 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4465 || INTVAL (XEXP (x, 1)) < 0
4466 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4467 return 1;
4469 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4470 known_x, known_mode, known_ret);
4471 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4473 case IF_THEN_ELSE:
4474 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4475 known_x, known_mode, known_ret);
4476 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4477 known_x, known_mode, known_ret);
4478 return MIN (num0, num1);
4480 case EQ: case NE: case GE: case GT: case LE: case LT:
4481 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4482 case GEU: case GTU: case LEU: case LTU:
4483 case UNORDERED: case ORDERED:
4484 /* If the constant is negative, take its 1's complement and remask.
4485 Then see how many zero bits we have. */
4486 nonzero = STORE_FLAG_VALUE;
4487 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4488 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4489 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4491 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4493 default:
4494 break;
4497 /* If we haven't been able to figure it out by one of the above rules,
4498 see if some of the high-order bits are known to be zero. If so,
4499 count those bits and return one less than that amount. If we can't
4500 safely compute the mask for this mode, always return BITWIDTH. */
4502 bitwidth = GET_MODE_BITSIZE (mode);
4503 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4504 return 1;
4506 nonzero = nonzero_bits (x, mode);
4507 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4508 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4511 /* Calculate the rtx_cost of a single instruction. A return value of
4512 zero indicates an instruction pattern without a known cost. */
4515 insn_rtx_cost (rtx pat)
4517 int i, cost;
4518 rtx set;
4520 /* Extract the single set rtx from the instruction pattern.
4521 We can't use single_set since we only have the pattern. */
4522 if (GET_CODE (pat) == SET)
4523 set = pat;
4524 else if (GET_CODE (pat) == PARALLEL)
4526 set = NULL_RTX;
4527 for (i = 0; i < XVECLEN (pat, 0); i++)
4529 rtx x = XVECEXP (pat, 0, i);
4530 if (GET_CODE (x) == SET)
4532 if (set)
4533 return 0;
4534 set = x;
4537 if (!set)
4538 return 0;
4540 else
4541 return 0;
4543 cost = rtx_cost (SET_SRC (set), SET);
4544 return cost > 0 ? cost : COSTS_N_INSNS (1);
4547 /* Given an insn INSN and condition COND, return the condition in a
4548 canonical form to simplify testing by callers. Specifically:
4550 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4551 (2) Both operands will be machine operands; (cc0) will have been replaced.
4552 (3) If an operand is a constant, it will be the second operand.
4553 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4554 for GE, GEU, and LEU.
4556 If the condition cannot be understood, or is an inequality floating-point
4557 comparison which needs to be reversed, 0 will be returned.
4559 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4561 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4562 insn used in locating the condition was found. If a replacement test
4563 of the condition is desired, it should be placed in front of that
4564 insn and we will be sure that the inputs are still valid.
4566 If WANT_REG is nonzero, we wish the condition to be relative to that
4567 register, if possible. Therefore, do not canonicalize the condition
4568 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4569 to be a compare to a CC mode register.
4571 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4572 and at INSN. */
4575 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4576 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4578 enum rtx_code code;
4579 rtx prev = insn;
4580 const_rtx set;
4581 rtx tem;
4582 rtx op0, op1;
4583 int reverse_code = 0;
4584 enum machine_mode mode;
4585 basic_block bb = BLOCK_FOR_INSN (insn);
4587 code = GET_CODE (cond);
4588 mode = GET_MODE (cond);
4589 op0 = XEXP (cond, 0);
4590 op1 = XEXP (cond, 1);
4592 if (reverse)
4593 code = reversed_comparison_code (cond, insn);
4594 if (code == UNKNOWN)
4595 return 0;
4597 if (earliest)
4598 *earliest = insn;
4600 /* If we are comparing a register with zero, see if the register is set
4601 in the previous insn to a COMPARE or a comparison operation. Perform
4602 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4603 in cse.c */
4605 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4606 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4607 && op1 == CONST0_RTX (GET_MODE (op0))
4608 && op0 != want_reg)
4610 /* Set nonzero when we find something of interest. */
4611 rtx x = 0;
4613 #ifdef HAVE_cc0
4614 /* If comparison with cc0, import actual comparison from compare
4615 insn. */
4616 if (op0 == cc0_rtx)
4618 if ((prev = prev_nonnote_insn (prev)) == 0
4619 || !NONJUMP_INSN_P (prev)
4620 || (set = single_set (prev)) == 0
4621 || SET_DEST (set) != cc0_rtx)
4622 return 0;
4624 op0 = SET_SRC (set);
4625 op1 = CONST0_RTX (GET_MODE (op0));
4626 if (earliest)
4627 *earliest = prev;
4629 #endif
4631 /* If this is a COMPARE, pick up the two things being compared. */
4632 if (GET_CODE (op0) == COMPARE)
4634 op1 = XEXP (op0, 1);
4635 op0 = XEXP (op0, 0);
4636 continue;
4638 else if (!REG_P (op0))
4639 break;
4641 /* Go back to the previous insn. Stop if it is not an INSN. We also
4642 stop if it isn't a single set or if it has a REG_INC note because
4643 we don't want to bother dealing with it. */
4645 if ((prev = prev_nonnote_insn (prev)) == 0
4646 || !NONJUMP_INSN_P (prev)
4647 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4648 /* In cfglayout mode, there do not have to be labels at the
4649 beginning of a block, or jumps at the end, so the previous
4650 conditions would not stop us when we reach bb boundary. */
4651 || BLOCK_FOR_INSN (prev) != bb)
4652 break;
4654 set = set_of (op0, prev);
4656 if (set
4657 && (GET_CODE (set) != SET
4658 || !rtx_equal_p (SET_DEST (set), op0)))
4659 break;
4661 /* If this is setting OP0, get what it sets it to if it looks
4662 relevant. */
4663 if (set)
4665 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4666 #ifdef FLOAT_STORE_FLAG_VALUE
4667 REAL_VALUE_TYPE fsfv;
4668 #endif
4670 /* ??? We may not combine comparisons done in a CCmode with
4671 comparisons not done in a CCmode. This is to aid targets
4672 like Alpha that have an IEEE compliant EQ instruction, and
4673 a non-IEEE compliant BEQ instruction. The use of CCmode is
4674 actually artificial, simply to prevent the combination, but
4675 should not affect other platforms.
4677 However, we must allow VOIDmode comparisons to match either
4678 CCmode or non-CCmode comparison, because some ports have
4679 modeless comparisons inside branch patterns.
4681 ??? This mode check should perhaps look more like the mode check
4682 in simplify_comparison in combine. */
4684 if ((GET_CODE (SET_SRC (set)) == COMPARE
4685 || (((code == NE
4686 || (code == LT
4687 && GET_MODE_CLASS (inner_mode) == MODE_INT
4688 && (GET_MODE_BITSIZE (inner_mode)
4689 <= HOST_BITS_PER_WIDE_INT)
4690 && (STORE_FLAG_VALUE
4691 & ((HOST_WIDE_INT) 1
4692 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4693 #ifdef FLOAT_STORE_FLAG_VALUE
4694 || (code == LT
4695 && SCALAR_FLOAT_MODE_P (inner_mode)
4696 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4697 REAL_VALUE_NEGATIVE (fsfv)))
4698 #endif
4700 && COMPARISON_P (SET_SRC (set))))
4701 && (((GET_MODE_CLASS (mode) == MODE_CC)
4702 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4703 || mode == VOIDmode || inner_mode == VOIDmode))
4704 x = SET_SRC (set);
4705 else if (((code == EQ
4706 || (code == GE
4707 && (GET_MODE_BITSIZE (inner_mode)
4708 <= HOST_BITS_PER_WIDE_INT)
4709 && GET_MODE_CLASS (inner_mode) == MODE_INT
4710 && (STORE_FLAG_VALUE
4711 & ((HOST_WIDE_INT) 1
4712 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4713 #ifdef FLOAT_STORE_FLAG_VALUE
4714 || (code == GE
4715 && SCALAR_FLOAT_MODE_P (inner_mode)
4716 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4717 REAL_VALUE_NEGATIVE (fsfv)))
4718 #endif
4720 && COMPARISON_P (SET_SRC (set))
4721 && (((GET_MODE_CLASS (mode) == MODE_CC)
4722 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4723 || mode == VOIDmode || inner_mode == VOIDmode))
4726 reverse_code = 1;
4727 x = SET_SRC (set);
4729 else
4730 break;
4733 else if (reg_set_p (op0, prev))
4734 /* If this sets OP0, but not directly, we have to give up. */
4735 break;
4737 if (x)
4739 /* If the caller is expecting the condition to be valid at INSN,
4740 make sure X doesn't change before INSN. */
4741 if (valid_at_insn_p)
4742 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4743 break;
4744 if (COMPARISON_P (x))
4745 code = GET_CODE (x);
4746 if (reverse_code)
4748 code = reversed_comparison_code (x, prev);
4749 if (code == UNKNOWN)
4750 return 0;
4751 reverse_code = 0;
4754 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4755 if (earliest)
4756 *earliest = prev;
4760 /* If constant is first, put it last. */
4761 if (CONSTANT_P (op0))
4762 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4764 /* If OP0 is the result of a comparison, we weren't able to find what
4765 was really being compared, so fail. */
4766 if (!allow_cc_mode
4767 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4768 return 0;
4770 /* Canonicalize any ordered comparison with integers involving equality
4771 if we can do computations in the relevant mode and we do not
4772 overflow. */
4774 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4775 && GET_CODE (op1) == CONST_INT
4776 && GET_MODE (op0) != VOIDmode
4777 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4779 HOST_WIDE_INT const_val = INTVAL (op1);
4780 unsigned HOST_WIDE_INT uconst_val = const_val;
4781 unsigned HOST_WIDE_INT max_val
4782 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4784 switch (code)
4786 case LE:
4787 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4788 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4789 break;
4791 /* When cross-compiling, const_val might be sign-extended from
4792 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4793 case GE:
4794 if ((HOST_WIDE_INT) (const_val & max_val)
4795 != (((HOST_WIDE_INT) 1
4796 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4797 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4798 break;
4800 case LEU:
4801 if (uconst_val < max_val)
4802 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4803 break;
4805 case GEU:
4806 if (uconst_val != 0)
4807 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4808 break;
4810 default:
4811 break;
4815 /* Never return CC0; return zero instead. */
4816 if (CC0_P (op0))
4817 return 0;
4819 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4822 /* Given a jump insn JUMP, return the condition that will cause it to branch
4823 to its JUMP_LABEL. If the condition cannot be understood, or is an
4824 inequality floating-point comparison which needs to be reversed, 0 will
4825 be returned.
4827 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4828 insn used in locating the condition was found. If a replacement test
4829 of the condition is desired, it should be placed in front of that
4830 insn and we will be sure that the inputs are still valid. If EARLIEST
4831 is null, the returned condition will be valid at INSN.
4833 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4834 compare CC mode register.
4836 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4839 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4841 rtx cond;
4842 int reverse;
4843 rtx set;
4845 /* If this is not a standard conditional jump, we can't parse it. */
4846 if (!JUMP_P (jump)
4847 || ! any_condjump_p (jump))
4848 return 0;
4849 set = pc_set (jump);
4851 cond = XEXP (SET_SRC (set), 0);
4853 /* If this branches to JUMP_LABEL when the condition is false, reverse
4854 the condition. */
4855 reverse
4856 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4857 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4859 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4860 allow_cc_mode, valid_at_insn_p);
4863 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4864 TARGET_MODE_REP_EXTENDED.
4866 Note that we assume that the property of
4867 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4868 narrower than mode B. I.e., if A is a mode narrower than B then in
4869 order to be able to operate on it in mode B, mode A needs to
4870 satisfy the requirements set by the representation of mode B. */
4872 static void
4873 init_num_sign_bit_copies_in_rep (void)
4875 enum machine_mode mode, in_mode;
4877 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4878 in_mode = GET_MODE_WIDER_MODE (mode))
4879 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4880 mode = GET_MODE_WIDER_MODE (mode))
4882 enum machine_mode i;
4884 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4885 extends to the next widest mode. */
4886 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4887 || GET_MODE_WIDER_MODE (mode) == in_mode);
4889 /* We are in in_mode. Count how many bits outside of mode
4890 have to be copies of the sign-bit. */
4891 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4893 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4895 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4896 /* We can only check sign-bit copies starting from the
4897 top-bit. In order to be able to check the bits we
4898 have already seen we pretend that subsequent bits
4899 have to be sign-bit copies too. */
4900 || num_sign_bit_copies_in_rep [in_mode][mode])
4901 num_sign_bit_copies_in_rep [in_mode][mode]
4902 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4907 /* Suppose that truncation from the machine mode of X to MODE is not a
4908 no-op. See if there is anything special about X so that we can
4909 assume it already contains a truncated value of MODE. */
4911 bool
4912 truncated_to_mode (enum machine_mode mode, const_rtx x)
4914 /* This register has already been used in MODE without explicit
4915 truncation. */
4916 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4917 return true;
4919 /* See if we already satisfy the requirements of MODE. If yes we
4920 can just switch to MODE. */
4921 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4922 && (num_sign_bit_copies (x, GET_MODE (x))
4923 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4924 return true;
4926 return false;
4929 /* Initialize non_rtx_starting_operands, which is used to speed up
4930 for_each_rtx. */
4931 void
4932 init_rtlanal (void)
4934 int i;
4935 for (i = 0; i < NUM_RTX_CODE; i++)
4937 const char *format = GET_RTX_FORMAT (i);
4938 const char *first = strpbrk (format, "eEV");
4939 non_rtx_starting_operands[i] = first ? first - format : -1;
4942 init_num_sign_bit_copies_in_rep ();
4945 /* Check whether this is a constant pool constant. */
4946 bool
4947 constant_pool_constant_p (rtx x)
4949 x = avoid_constant_pool_reference (x);
4950 return GET_CODE (x) == CONST_DOUBLE;