2008-05-30 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / rtlanal.c
blob9bf627318b712296b73303342fddad69bd237895
1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008 Free Software
4 Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "regs.h"
38 #include "function.h"
39 #include "df.h"
40 #include "tree.h"
42 /* Information about a subreg of a hard register. */
43 struct subreg_info
45 /* Offset of first hard register involved in the subreg. */
46 int offset;
47 /* Number of hard registers involved in the subreg. */
48 int nregs;
49 /* Whether this subreg can be represented as a hard reg with the new
50 mode. */
51 bool representable_p;
54 /* Forward declarations */
55 static void set_of_1 (rtx, const_rtx, void *);
56 static bool covers_regno_p (const_rtx, unsigned int);
57 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
58 static int rtx_referenced_p_1 (rtx *, void *);
59 static int computed_jump_p_1 (const_rtx);
60 static void parms_set (rtx, const_rtx, void *);
61 static void subreg_get_info (unsigned int, enum machine_mode,
62 unsigned int, enum machine_mode,
63 struct subreg_info *);
65 static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
66 const_rtx, enum machine_mode,
67 unsigned HOST_WIDE_INT);
68 static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
69 const_rtx, enum machine_mode,
70 unsigned HOST_WIDE_INT);
71 static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
72 enum machine_mode,
73 unsigned int);
74 static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
75 enum machine_mode, unsigned int);
77 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
78 -1 if a code has no such operand. */
79 static int non_rtx_starting_operands[NUM_RTX_CODE];
81 /* Bit flags that specify the machine subtype we are compiling for.
82 Bits are tested using macros TARGET_... defined in the tm.h file
83 and set by `-m...' switches. Must be defined in rtlanal.c. */
85 int target_flags;
87 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
88 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
89 SIGN_EXTEND then while narrowing we also have to enforce the
90 representation and sign-extend the value to mode DESTINATION_REP.
92 If the value is already sign-extended to DESTINATION_REP mode we
93 can just switch to DESTINATION mode on it. For each pair of
94 integral modes SOURCE and DESTINATION, when truncating from SOURCE
95 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
96 contains the number of high-order bits in SOURCE that have to be
97 copies of the sign-bit so that we can do this mode-switch to
98 DESTINATION. */
100 static unsigned int
101 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
103 /* Return 1 if the value of X is unstable
104 (would be different at a different point in the program).
105 The frame pointer, arg pointer, etc. are considered stable
106 (within one function) and so is anything marked `unchanging'. */
109 rtx_unstable_p (const_rtx x)
111 const RTX_CODE code = GET_CODE (x);
112 int i;
113 const char *fmt;
115 switch (code)
117 case MEM:
118 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
120 case CONST:
121 case CONST_INT:
122 case CONST_DOUBLE:
123 case CONST_FIXED:
124 case CONST_VECTOR:
125 case SYMBOL_REF:
126 case LABEL_REF:
127 return 0;
129 case REG:
130 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
131 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
132 /* The arg pointer varies if it is not a fixed register. */
133 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
134 return 0;
135 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
136 /* ??? When call-clobbered, the value is stable modulo the restore
137 that must happen after a call. This currently screws up local-alloc
138 into believing that the restore is not needed. */
139 if (x == pic_offset_table_rtx)
140 return 0;
141 #endif
142 return 1;
144 case ASM_OPERANDS:
145 if (MEM_VOLATILE_P (x))
146 return 1;
148 /* Fall through. */
150 default:
151 break;
154 fmt = GET_RTX_FORMAT (code);
155 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
156 if (fmt[i] == 'e')
158 if (rtx_unstable_p (XEXP (x, i)))
159 return 1;
161 else if (fmt[i] == 'E')
163 int j;
164 for (j = 0; j < XVECLEN (x, i); j++)
165 if (rtx_unstable_p (XVECEXP (x, i, j)))
166 return 1;
169 return 0;
172 /* Return 1 if X has a value that can vary even between two
173 executions of the program. 0 means X can be compared reliably
174 against certain constants or near-constants.
175 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
176 zero, we are slightly more conservative.
177 The frame pointer and the arg pointer are considered constant. */
179 bool
180 rtx_varies_p (const_rtx x, bool for_alias)
182 RTX_CODE code;
183 int i;
184 const char *fmt;
186 if (!x)
187 return 0;
189 code = GET_CODE (x);
190 switch (code)
192 case MEM:
193 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
195 case CONST:
196 case CONST_INT:
197 case CONST_DOUBLE:
198 case CONST_FIXED:
199 case CONST_VECTOR:
200 case SYMBOL_REF:
201 case LABEL_REF:
202 return 0;
204 case REG:
205 /* Note that we have to test for the actual rtx used for the frame
206 and arg pointers and not just the register number in case we have
207 eliminated the frame and/or arg pointer and are using it
208 for pseudos. */
209 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
210 /* The arg pointer varies if it is not a fixed register. */
211 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
212 return 0;
213 if (x == pic_offset_table_rtx
214 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
215 /* ??? When call-clobbered, the value is stable modulo the restore
216 that must happen after a call. This currently screws up
217 local-alloc into believing that the restore is not needed, so we
218 must return 0 only if we are called from alias analysis. */
219 && for_alias
220 #endif
222 return 0;
223 return 1;
225 case LO_SUM:
226 /* The operand 0 of a LO_SUM is considered constant
227 (in fact it is related specifically to operand 1)
228 during alias analysis. */
229 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
230 || rtx_varies_p (XEXP (x, 1), for_alias);
232 case ASM_OPERANDS:
233 if (MEM_VOLATILE_P (x))
234 return 1;
236 /* Fall through. */
238 default:
239 break;
242 fmt = GET_RTX_FORMAT (code);
243 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
244 if (fmt[i] == 'e')
246 if (rtx_varies_p (XEXP (x, i), for_alias))
247 return 1;
249 else if (fmt[i] == 'E')
251 int j;
252 for (j = 0; j < XVECLEN (x, i); j++)
253 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
254 return 1;
257 return 0;
260 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
261 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
262 whether nonzero is returned for unaligned memory accesses on strict
263 alignment machines. */
265 static int
266 rtx_addr_can_trap_p_1 (const_rtx x, enum machine_mode mode, bool unaligned_mems)
268 enum rtx_code code = GET_CODE (x);
270 switch (code)
272 case SYMBOL_REF:
273 return SYMBOL_REF_WEAK (x);
275 case LABEL_REF:
276 return 0;
278 case REG:
279 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
280 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
281 || x == stack_pointer_rtx
282 /* The arg pointer varies if it is not a fixed register. */
283 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
284 return 0;
285 /* All of the virtual frame registers are stack references. */
286 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
287 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
288 return 0;
289 return 1;
291 case CONST:
292 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
294 case PLUS:
295 /* An address is assumed not to trap if:
296 - it is an address that can't trap plus a constant integer,
297 with the proper remainder modulo the mode size if we are
298 considering unaligned memory references. */
299 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
300 && GET_CODE (XEXP (x, 1)) == CONST_INT)
302 HOST_WIDE_INT offset;
304 if (!STRICT_ALIGNMENT
305 || !unaligned_mems
306 || GET_MODE_SIZE (mode) == 0)
307 return 0;
309 offset = INTVAL (XEXP (x, 1));
311 #ifdef SPARC_STACK_BOUNDARY_HACK
312 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
313 the real alignment of %sp. However, when it does this, the
314 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
315 if (SPARC_STACK_BOUNDARY_HACK
316 && (XEXP (x, 0) == stack_pointer_rtx
317 || XEXP (x, 0) == hard_frame_pointer_rtx))
318 offset -= STACK_POINTER_OFFSET;
319 #endif
321 return offset % GET_MODE_SIZE (mode) != 0;
324 /* - or it is the pic register plus a constant. */
325 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
326 return 0;
328 return 1;
330 case LO_SUM:
331 case PRE_MODIFY:
332 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
334 case PRE_DEC:
335 case PRE_INC:
336 case POST_DEC:
337 case POST_INC:
338 case POST_MODIFY:
339 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
341 default:
342 break;
345 /* If it isn't one of the case above, it can cause a trap. */
346 return 1;
349 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
352 rtx_addr_can_trap_p (const_rtx x)
354 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
357 /* Return true if X is an address that is known to not be zero. */
359 bool
360 nonzero_address_p (const_rtx x)
362 const enum rtx_code code = GET_CODE (x);
364 switch (code)
366 case SYMBOL_REF:
367 return !SYMBOL_REF_WEAK (x);
369 case LABEL_REF:
370 return true;
372 case REG:
373 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
374 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
375 || x == stack_pointer_rtx
376 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
377 return true;
378 /* All of the virtual frame registers are stack references. */
379 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
380 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
381 return true;
382 return false;
384 case CONST:
385 return nonzero_address_p (XEXP (x, 0));
387 case PLUS:
388 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
389 return nonzero_address_p (XEXP (x, 0));
390 /* Handle PIC references. */
391 else if (XEXP (x, 0) == pic_offset_table_rtx
392 && CONSTANT_P (XEXP (x, 1)))
393 return true;
394 return false;
396 case PRE_MODIFY:
397 /* Similar to the above; allow positive offsets. Further, since
398 auto-inc is only allowed in memories, the register must be a
399 pointer. */
400 if (GET_CODE (XEXP (x, 1)) == CONST_INT
401 && INTVAL (XEXP (x, 1)) > 0)
402 return true;
403 return nonzero_address_p (XEXP (x, 0));
405 case PRE_INC:
406 /* Similarly. Further, the offset is always positive. */
407 return true;
409 case PRE_DEC:
410 case POST_DEC:
411 case POST_INC:
412 case POST_MODIFY:
413 return nonzero_address_p (XEXP (x, 0));
415 case LO_SUM:
416 return nonzero_address_p (XEXP (x, 1));
418 default:
419 break;
422 /* If it isn't one of the case above, might be zero. */
423 return false;
426 /* Return 1 if X refers to a memory location whose address
427 cannot be compared reliably with constant addresses,
428 or if X refers to a BLKmode memory object.
429 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
430 zero, we are slightly more conservative. */
432 bool
433 rtx_addr_varies_p (const_rtx x, bool for_alias)
435 enum rtx_code code;
436 int i;
437 const char *fmt;
439 if (x == 0)
440 return 0;
442 code = GET_CODE (x);
443 if (code == MEM)
444 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
446 fmt = GET_RTX_FORMAT (code);
447 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
448 if (fmt[i] == 'e')
450 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
451 return 1;
453 else if (fmt[i] == 'E')
455 int j;
456 for (j = 0; j < XVECLEN (x, i); j++)
457 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
458 return 1;
460 return 0;
463 /* Return the value of the integer term in X, if one is apparent;
464 otherwise return 0.
465 Only obvious integer terms are detected.
466 This is used in cse.c with the `related_value' field. */
468 HOST_WIDE_INT
469 get_integer_term (const_rtx x)
471 if (GET_CODE (x) == CONST)
472 x = XEXP (x, 0);
474 if (GET_CODE (x) == MINUS
475 && GET_CODE (XEXP (x, 1)) == CONST_INT)
476 return - INTVAL (XEXP (x, 1));
477 if (GET_CODE (x) == PLUS
478 && GET_CODE (XEXP (x, 1)) == CONST_INT)
479 return INTVAL (XEXP (x, 1));
480 return 0;
483 /* If X is a constant, return the value sans apparent integer term;
484 otherwise return 0.
485 Only obvious integer terms are detected. */
488 get_related_value (const_rtx x)
490 if (GET_CODE (x) != CONST)
491 return 0;
492 x = XEXP (x, 0);
493 if (GET_CODE (x) == PLUS
494 && GET_CODE (XEXP (x, 1)) == CONST_INT)
495 return XEXP (x, 0);
496 else if (GET_CODE (x) == MINUS
497 && GET_CODE (XEXP (x, 1)) == CONST_INT)
498 return XEXP (x, 0);
499 return 0;
502 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
503 to somewhere in the same object or object_block as SYMBOL. */
505 bool
506 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
508 tree decl;
510 if (GET_CODE (symbol) != SYMBOL_REF)
511 return false;
513 if (offset == 0)
514 return true;
516 if (offset > 0)
518 if (CONSTANT_POOL_ADDRESS_P (symbol)
519 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
520 return true;
522 decl = SYMBOL_REF_DECL (symbol);
523 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
524 return true;
527 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
528 && SYMBOL_REF_BLOCK (symbol)
529 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
530 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
531 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
532 return true;
534 return false;
537 /* Split X into a base and a constant offset, storing them in *BASE_OUT
538 and *OFFSET_OUT respectively. */
540 void
541 split_const (rtx x, rtx *base_out, rtx *offset_out)
543 if (GET_CODE (x) == CONST)
545 x = XEXP (x, 0);
546 if (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT)
548 *base_out = XEXP (x, 0);
549 *offset_out = XEXP (x, 1);
550 return;
553 *base_out = x;
554 *offset_out = const0_rtx;
557 /* Return the number of places FIND appears within X. If COUNT_DEST is
558 zero, we do not count occurrences inside the destination of a SET. */
561 count_occurrences (const_rtx x, const_rtx find, int count_dest)
563 int i, j;
564 enum rtx_code code;
565 const char *format_ptr;
566 int count;
568 if (x == find)
569 return 1;
571 code = GET_CODE (x);
573 switch (code)
575 case REG:
576 case CONST_INT:
577 case CONST_DOUBLE:
578 case CONST_FIXED:
579 case CONST_VECTOR:
580 case SYMBOL_REF:
581 case CODE_LABEL:
582 case PC:
583 case CC0:
584 return 0;
586 case EXPR_LIST:
587 count = count_occurrences (XEXP (x, 0), find, count_dest);
588 if (XEXP (x, 1))
589 count += count_occurrences (XEXP (x, 1), find, count_dest);
590 return count;
592 case MEM:
593 if (MEM_P (find) && rtx_equal_p (x, find))
594 return 1;
595 break;
597 case SET:
598 if (SET_DEST (x) == find && ! count_dest)
599 return count_occurrences (SET_SRC (x), find, count_dest);
600 break;
602 default:
603 break;
606 format_ptr = GET_RTX_FORMAT (code);
607 count = 0;
609 for (i = 0; i < GET_RTX_LENGTH (code); i++)
611 switch (*format_ptr++)
613 case 'e':
614 count += count_occurrences (XEXP (x, i), find, count_dest);
615 break;
617 case 'E':
618 for (j = 0; j < XVECLEN (x, i); j++)
619 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
620 break;
623 return count;
627 /* Nonzero if register REG appears somewhere within IN.
628 Also works if REG is not a register; in this case it checks
629 for a subexpression of IN that is Lisp "equal" to REG. */
632 reg_mentioned_p (const_rtx reg, const_rtx in)
634 const char *fmt;
635 int i;
636 enum rtx_code code;
638 if (in == 0)
639 return 0;
641 if (reg == in)
642 return 1;
644 if (GET_CODE (in) == LABEL_REF)
645 return reg == XEXP (in, 0);
647 code = GET_CODE (in);
649 switch (code)
651 /* Compare registers by number. */
652 case REG:
653 return REG_P (reg) && REGNO (in) == REGNO (reg);
655 /* These codes have no constituent expressions
656 and are unique. */
657 case SCRATCH:
658 case CC0:
659 case PC:
660 return 0;
662 case CONST_INT:
663 case CONST_VECTOR:
664 case CONST_DOUBLE:
665 case CONST_FIXED:
666 /* These are kept unique for a given value. */
667 return 0;
669 default:
670 break;
673 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
674 return 1;
676 fmt = GET_RTX_FORMAT (code);
678 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
680 if (fmt[i] == 'E')
682 int j;
683 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
684 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
685 return 1;
687 else if (fmt[i] == 'e'
688 && reg_mentioned_p (reg, XEXP (in, i)))
689 return 1;
691 return 0;
694 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
695 no CODE_LABEL insn. */
698 no_labels_between_p (const_rtx beg, const_rtx end)
700 rtx p;
701 if (beg == end)
702 return 0;
703 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
704 if (LABEL_P (p))
705 return 0;
706 return 1;
709 /* Nonzero if register REG is used in an insn between
710 FROM_INSN and TO_INSN (exclusive of those two). */
713 reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
715 rtx insn;
717 if (from_insn == to_insn)
718 return 0;
720 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
721 if (INSN_P (insn)
722 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
723 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
724 return 1;
725 return 0;
728 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
729 is entirely replaced by a new value and the only use is as a SET_DEST,
730 we do not consider it a reference. */
733 reg_referenced_p (const_rtx x, const_rtx body)
735 int i;
737 switch (GET_CODE (body))
739 case SET:
740 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
741 return 1;
743 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
744 of a REG that occupies all of the REG, the insn references X if
745 it is mentioned in the destination. */
746 if (GET_CODE (SET_DEST (body)) != CC0
747 && GET_CODE (SET_DEST (body)) != PC
748 && !REG_P (SET_DEST (body))
749 && ! (GET_CODE (SET_DEST (body)) == SUBREG
750 && REG_P (SUBREG_REG (SET_DEST (body)))
751 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
752 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
753 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
754 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
755 && reg_overlap_mentioned_p (x, SET_DEST (body)))
756 return 1;
757 return 0;
759 case ASM_OPERANDS:
760 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
761 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
762 return 1;
763 return 0;
765 case CALL:
766 case USE:
767 case IF_THEN_ELSE:
768 return reg_overlap_mentioned_p (x, body);
770 case TRAP_IF:
771 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
773 case PREFETCH:
774 return reg_overlap_mentioned_p (x, XEXP (body, 0));
776 case UNSPEC:
777 case UNSPEC_VOLATILE:
778 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
779 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
780 return 1;
781 return 0;
783 case PARALLEL:
784 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
785 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
786 return 1;
787 return 0;
789 case CLOBBER:
790 if (MEM_P (XEXP (body, 0)))
791 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
792 return 1;
793 return 0;
795 case COND_EXEC:
796 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
797 return 1;
798 return reg_referenced_p (x, COND_EXEC_CODE (body));
800 default:
801 return 0;
805 /* Nonzero if register REG is set or clobbered in an insn between
806 FROM_INSN and TO_INSN (exclusive of those two). */
809 reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
811 const_rtx insn;
813 if (from_insn == to_insn)
814 return 0;
816 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
817 if (INSN_P (insn) && reg_set_p (reg, insn))
818 return 1;
819 return 0;
822 /* Internals of reg_set_between_p. */
824 reg_set_p (const_rtx reg, const_rtx insn)
826 if (INSN_P (insn))
828 if (FIND_REG_INC_NOTE (insn, reg))
829 return 1;
830 if (CALL_P (insn))
832 if (REG_P (reg) && REGNO (reg) < FIRST_PSEUDO_REGISTER)
834 HARD_REG_SET clobbered_regs;
836 get_call_invalidated_used_regs (insn, &clobbered_regs, true);
837 if (TEST_HARD_REG_BIT (clobbered_regs, REGNO (reg)))
838 return 1;
840 if (MEM_P (reg) || find_reg_fusage (insn, CLOBBER, reg))
841 return 1;
845 return set_of (reg, insn) != NULL_RTX;
848 /* Similar to reg_set_between_p, but check all registers in X. Return 0
849 only if none of them are modified between START and END. Return 1 if
850 X contains a MEM; this routine does usememory aliasing. */
853 modified_between_p (const_rtx x, const_rtx start, const_rtx end)
855 const enum rtx_code code = GET_CODE (x);
856 const char *fmt;
857 int i, j;
858 rtx insn;
860 if (start == end)
861 return 0;
863 switch (code)
865 case CONST_INT:
866 case CONST_DOUBLE:
867 case CONST_FIXED:
868 case CONST_VECTOR:
869 case CONST:
870 case SYMBOL_REF:
871 case LABEL_REF:
872 return 0;
874 case PC:
875 case CC0:
876 return 1;
878 case MEM:
879 if (modified_between_p (XEXP (x, 0), start, end))
880 return 1;
881 if (MEM_READONLY_P (x))
882 return 0;
883 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
884 if (memory_modified_in_insn_p (x, insn))
885 return 1;
886 return 0;
887 break;
889 case REG:
890 return reg_set_between_p (x, start, end);
892 default:
893 break;
896 fmt = GET_RTX_FORMAT (code);
897 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
899 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
900 return 1;
902 else if (fmt[i] == 'E')
903 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
904 if (modified_between_p (XVECEXP (x, i, j), start, end))
905 return 1;
908 return 0;
911 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
912 of them are modified in INSN. Return 1 if X contains a MEM; this routine
913 does use memory aliasing. */
916 modified_in_p (const_rtx x, const_rtx insn)
918 const enum rtx_code code = GET_CODE (x);
919 const char *fmt;
920 int i, j;
922 switch (code)
924 case CONST_INT:
925 case CONST_DOUBLE:
926 case CONST_FIXED:
927 case CONST_VECTOR:
928 case CONST:
929 case SYMBOL_REF:
930 case LABEL_REF:
931 return 0;
933 case PC:
934 case CC0:
935 return 1;
937 case MEM:
938 if (modified_in_p (XEXP (x, 0), insn))
939 return 1;
940 if (MEM_READONLY_P (x))
941 return 0;
942 if (memory_modified_in_insn_p (x, insn))
943 return 1;
944 return 0;
945 break;
947 case REG:
948 return reg_set_p (x, insn);
950 default:
951 break;
954 fmt = GET_RTX_FORMAT (code);
955 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
957 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
958 return 1;
960 else if (fmt[i] == 'E')
961 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
962 if (modified_in_p (XVECEXP (x, i, j), insn))
963 return 1;
966 return 0;
969 /* Helper function for set_of. */
970 struct set_of_data
972 const_rtx found;
973 const_rtx pat;
976 static void
977 set_of_1 (rtx x, const_rtx pat, void *data1)
979 struct set_of_data *const data = (struct set_of_data *) (data1);
980 if (rtx_equal_p (x, data->pat)
981 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
982 data->found = pat;
985 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
986 (either directly or via STRICT_LOW_PART and similar modifiers). */
987 const_rtx
988 set_of (const_rtx pat, const_rtx insn)
990 struct set_of_data data;
991 data.found = NULL_RTX;
992 data.pat = pat;
993 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
994 return data.found;
997 /* Given an INSN, return a SET expression if this insn has only a single SET.
998 It may also have CLOBBERs, USEs, or SET whose output
999 will not be used, which we ignore. */
1002 single_set_2 (const_rtx insn, const_rtx pat)
1004 rtx set = NULL;
1005 int set_verified = 1;
1006 int i;
1008 if (GET_CODE (pat) == PARALLEL)
1010 for (i = 0; i < XVECLEN (pat, 0); i++)
1012 rtx sub = XVECEXP (pat, 0, i);
1013 switch (GET_CODE (sub))
1015 case USE:
1016 case CLOBBER:
1017 break;
1019 case SET:
1020 /* We can consider insns having multiple sets, where all
1021 but one are dead as single set insns. In common case
1022 only single set is present in the pattern so we want
1023 to avoid checking for REG_UNUSED notes unless necessary.
1025 When we reach set first time, we just expect this is
1026 the single set we are looking for and only when more
1027 sets are found in the insn, we check them. */
1028 if (!set_verified)
1030 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1031 && !side_effects_p (set))
1032 set = NULL;
1033 else
1034 set_verified = 1;
1036 if (!set)
1037 set = sub, set_verified = 0;
1038 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1039 || side_effects_p (sub))
1040 return NULL_RTX;
1041 break;
1043 default:
1044 return NULL_RTX;
1048 return set;
1051 /* Given an INSN, return nonzero if it has more than one SET, else return
1052 zero. */
1055 multiple_sets (const_rtx insn)
1057 int found;
1058 int i;
1060 /* INSN must be an insn. */
1061 if (! INSN_P (insn))
1062 return 0;
1064 /* Only a PARALLEL can have multiple SETs. */
1065 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1067 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1068 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1070 /* If we have already found a SET, then return now. */
1071 if (found)
1072 return 1;
1073 else
1074 found = 1;
1078 /* Either zero or one SET. */
1079 return 0;
1082 /* Return nonzero if the destination of SET equals the source
1083 and there are no side effects. */
1086 set_noop_p (const_rtx set)
1088 rtx src = SET_SRC (set);
1089 rtx dst = SET_DEST (set);
1091 if (dst == pc_rtx && src == pc_rtx)
1092 return 1;
1094 if (MEM_P (dst) && MEM_P (src))
1095 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1097 if (GET_CODE (dst) == ZERO_EXTRACT)
1098 return rtx_equal_p (XEXP (dst, 0), src)
1099 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1100 && !side_effects_p (src);
1102 if (GET_CODE (dst) == STRICT_LOW_PART)
1103 dst = XEXP (dst, 0);
1105 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1107 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1108 return 0;
1109 src = SUBREG_REG (src);
1110 dst = SUBREG_REG (dst);
1113 return (REG_P (src) && REG_P (dst)
1114 && REGNO (src) == REGNO (dst));
1117 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1118 value to itself. */
1121 noop_move_p (const_rtx insn)
1123 rtx pat = PATTERN (insn);
1125 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1126 return 1;
1128 /* Insns carrying these notes are useful later on. */
1129 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1130 return 0;
1132 /* For now treat an insn with a REG_RETVAL note as a
1133 a special insn which should not be considered a no-op. */
1134 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1135 return 0;
1137 if (GET_CODE (pat) == SET && set_noop_p (pat))
1138 return 1;
1140 if (GET_CODE (pat) == PARALLEL)
1142 int i;
1143 /* If nothing but SETs of registers to themselves,
1144 this insn can also be deleted. */
1145 for (i = 0; i < XVECLEN (pat, 0); i++)
1147 rtx tem = XVECEXP (pat, 0, i);
1149 if (GET_CODE (tem) == USE
1150 || GET_CODE (tem) == CLOBBER)
1151 continue;
1153 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1154 return 0;
1157 return 1;
1159 return 0;
1163 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1164 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1165 If the object was modified, if we hit a partial assignment to X, or hit a
1166 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1167 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1168 be the src. */
1171 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1173 rtx p;
1175 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1176 p = PREV_INSN (p))
1177 if (INSN_P (p))
1179 rtx set = single_set (p);
1180 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1182 if (set && rtx_equal_p (x, SET_DEST (set)))
1184 rtx src = SET_SRC (set);
1186 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1187 src = XEXP (note, 0);
1189 if ((valid_to == NULL_RTX
1190 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1191 /* Reject hard registers because we don't usually want
1192 to use them; we'd rather use a pseudo. */
1193 && (! (REG_P (src)
1194 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1196 *pinsn = p;
1197 return src;
1201 /* If set in non-simple way, we don't have a value. */
1202 if (reg_set_p (x, p))
1203 break;
1206 return x;
1209 /* Return nonzero if register in range [REGNO, ENDREGNO)
1210 appears either explicitly or implicitly in X
1211 other than being stored into.
1213 References contained within the substructure at LOC do not count.
1214 LOC may be zero, meaning don't ignore anything. */
1217 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1218 rtx *loc)
1220 int i;
1221 unsigned int x_regno;
1222 RTX_CODE code;
1223 const char *fmt;
1225 repeat:
1226 /* The contents of a REG_NONNEG note is always zero, so we must come here
1227 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1228 if (x == 0)
1229 return 0;
1231 code = GET_CODE (x);
1233 switch (code)
1235 case REG:
1236 x_regno = REGNO (x);
1238 /* If we modifying the stack, frame, or argument pointer, it will
1239 clobber a virtual register. In fact, we could be more precise,
1240 but it isn't worth it. */
1241 if ((x_regno == STACK_POINTER_REGNUM
1242 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1243 || x_regno == ARG_POINTER_REGNUM
1244 #endif
1245 || x_regno == FRAME_POINTER_REGNUM)
1246 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1247 return 1;
1249 return endregno > x_regno && regno < END_REGNO (x);
1251 case SUBREG:
1252 /* If this is a SUBREG of a hard reg, we can see exactly which
1253 registers are being modified. Otherwise, handle normally. */
1254 if (REG_P (SUBREG_REG (x))
1255 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1257 unsigned int inner_regno = subreg_regno (x);
1258 unsigned int inner_endregno
1259 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1260 ? subreg_nregs (x) : 1);
1262 return endregno > inner_regno && regno < inner_endregno;
1264 break;
1266 case CLOBBER:
1267 case SET:
1268 if (&SET_DEST (x) != loc
1269 /* Note setting a SUBREG counts as referring to the REG it is in for
1270 a pseudo but not for hard registers since we can
1271 treat each word individually. */
1272 && ((GET_CODE (SET_DEST (x)) == SUBREG
1273 && loc != &SUBREG_REG (SET_DEST (x))
1274 && REG_P (SUBREG_REG (SET_DEST (x)))
1275 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1276 && refers_to_regno_p (regno, endregno,
1277 SUBREG_REG (SET_DEST (x)), loc))
1278 || (!REG_P (SET_DEST (x))
1279 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1280 return 1;
1282 if (code == CLOBBER || loc == &SET_SRC (x))
1283 return 0;
1284 x = SET_SRC (x);
1285 goto repeat;
1287 default:
1288 break;
1291 /* X does not match, so try its subexpressions. */
1293 fmt = GET_RTX_FORMAT (code);
1294 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1296 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1298 if (i == 0)
1300 x = XEXP (x, 0);
1301 goto repeat;
1303 else
1304 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1305 return 1;
1307 else if (fmt[i] == 'E')
1309 int j;
1310 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1311 if (loc != &XVECEXP (x, i, j)
1312 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1313 return 1;
1316 return 0;
1319 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1320 we check if any register number in X conflicts with the relevant register
1321 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1322 contains a MEM (we don't bother checking for memory addresses that can't
1323 conflict because we expect this to be a rare case. */
1326 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1328 unsigned int regno, endregno;
1330 /* If either argument is a constant, then modifying X can not
1331 affect IN. Here we look at IN, we can profitably combine
1332 CONSTANT_P (x) with the switch statement below. */
1333 if (CONSTANT_P (in))
1334 return 0;
1336 recurse:
1337 switch (GET_CODE (x))
1339 case STRICT_LOW_PART:
1340 case ZERO_EXTRACT:
1341 case SIGN_EXTRACT:
1342 /* Overly conservative. */
1343 x = XEXP (x, 0);
1344 goto recurse;
1346 case SUBREG:
1347 regno = REGNO (SUBREG_REG (x));
1348 if (regno < FIRST_PSEUDO_REGISTER)
1349 regno = subreg_regno (x);
1350 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1351 ? subreg_nregs (x) : 1);
1352 goto do_reg;
1354 case REG:
1355 regno = REGNO (x);
1356 endregno = END_REGNO (x);
1357 do_reg:
1358 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1360 case MEM:
1362 const char *fmt;
1363 int i;
1365 if (MEM_P (in))
1366 return 1;
1368 fmt = GET_RTX_FORMAT (GET_CODE (in));
1369 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1370 if (fmt[i] == 'e')
1372 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1373 return 1;
1375 else if (fmt[i] == 'E')
1377 int j;
1378 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1379 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1380 return 1;
1383 return 0;
1386 case SCRATCH:
1387 case PC:
1388 case CC0:
1389 return reg_mentioned_p (x, in);
1391 case PARALLEL:
1393 int i;
1395 /* If any register in here refers to it we return true. */
1396 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1397 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1398 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1399 return 1;
1400 return 0;
1403 default:
1404 gcc_assert (CONSTANT_P (x));
1405 return 0;
1409 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1410 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1411 ignored by note_stores, but passed to FUN.
1413 FUN receives three arguments:
1414 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1415 2. the SET or CLOBBER rtx that does the store,
1416 3. the pointer DATA provided to note_stores.
1418 If the item being stored in or clobbered is a SUBREG of a hard register,
1419 the SUBREG will be passed. */
1421 void
1422 note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1424 int i;
1426 if (GET_CODE (x) == COND_EXEC)
1427 x = COND_EXEC_CODE (x);
1429 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1431 rtx dest = SET_DEST (x);
1433 while ((GET_CODE (dest) == SUBREG
1434 && (!REG_P (SUBREG_REG (dest))
1435 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1436 || GET_CODE (dest) == ZERO_EXTRACT
1437 || GET_CODE (dest) == STRICT_LOW_PART)
1438 dest = XEXP (dest, 0);
1440 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1441 each of whose first operand is a register. */
1442 if (GET_CODE (dest) == PARALLEL)
1444 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1445 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1446 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1448 else
1449 (*fun) (dest, x, data);
1452 else if (GET_CODE (x) == PARALLEL)
1453 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1454 note_stores (XVECEXP (x, 0, i), fun, data);
1457 /* Like notes_stores, but call FUN for each expression that is being
1458 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1459 FUN for each expression, not any interior subexpressions. FUN receives a
1460 pointer to the expression and the DATA passed to this function.
1462 Note that this is not quite the same test as that done in reg_referenced_p
1463 since that considers something as being referenced if it is being
1464 partially set, while we do not. */
1466 void
1467 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1469 rtx body = *pbody;
1470 int i;
1472 switch (GET_CODE (body))
1474 case COND_EXEC:
1475 (*fun) (&COND_EXEC_TEST (body), data);
1476 note_uses (&COND_EXEC_CODE (body), fun, data);
1477 return;
1479 case PARALLEL:
1480 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1481 note_uses (&XVECEXP (body, 0, i), fun, data);
1482 return;
1484 case SEQUENCE:
1485 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1486 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1487 return;
1489 case USE:
1490 (*fun) (&XEXP (body, 0), data);
1491 return;
1493 case ASM_OPERANDS:
1494 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1495 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1496 return;
1498 case TRAP_IF:
1499 (*fun) (&TRAP_CONDITION (body), data);
1500 return;
1502 case PREFETCH:
1503 (*fun) (&XEXP (body, 0), data);
1504 return;
1506 case UNSPEC:
1507 case UNSPEC_VOLATILE:
1508 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1509 (*fun) (&XVECEXP (body, 0, i), data);
1510 return;
1512 case CLOBBER:
1513 if (MEM_P (XEXP (body, 0)))
1514 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1515 return;
1517 case SET:
1519 rtx dest = SET_DEST (body);
1521 /* For sets we replace everything in source plus registers in memory
1522 expression in store and operands of a ZERO_EXTRACT. */
1523 (*fun) (&SET_SRC (body), data);
1525 if (GET_CODE (dest) == ZERO_EXTRACT)
1527 (*fun) (&XEXP (dest, 1), data);
1528 (*fun) (&XEXP (dest, 2), data);
1531 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1532 dest = XEXP (dest, 0);
1534 if (MEM_P (dest))
1535 (*fun) (&XEXP (dest, 0), data);
1537 return;
1539 default:
1540 /* All the other possibilities never store. */
1541 (*fun) (pbody, data);
1542 return;
1546 /* Return nonzero if X's old contents don't survive after INSN.
1547 This will be true if X is (cc0) or if X is a register and
1548 X dies in INSN or because INSN entirely sets X.
1550 "Entirely set" means set directly and not through a SUBREG, or
1551 ZERO_EXTRACT, so no trace of the old contents remains.
1552 Likewise, REG_INC does not count.
1554 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1555 but for this use that makes no difference, since regs don't overlap
1556 during their lifetimes. Therefore, this function may be used
1557 at any time after deaths have been computed.
1559 If REG is a hard reg that occupies multiple machine registers, this
1560 function will only return 1 if each of those registers will be replaced
1561 by INSN. */
1564 dead_or_set_p (const_rtx insn, const_rtx x)
1566 unsigned int regno, end_regno;
1567 unsigned int i;
1569 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1570 if (GET_CODE (x) == CC0)
1571 return 1;
1573 gcc_assert (REG_P (x));
1575 regno = REGNO (x);
1576 end_regno = END_REGNO (x);
1577 for (i = regno; i < end_regno; i++)
1578 if (! dead_or_set_regno_p (insn, i))
1579 return 0;
1581 return 1;
1584 /* Return TRUE iff DEST is a register or subreg of a register and
1585 doesn't change the number of words of the inner register, and any
1586 part of the register is TEST_REGNO. */
1588 static bool
1589 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1591 unsigned int regno, endregno;
1593 if (GET_CODE (dest) == SUBREG
1594 && (((GET_MODE_SIZE (GET_MODE (dest))
1595 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1596 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1597 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1598 dest = SUBREG_REG (dest);
1600 if (!REG_P (dest))
1601 return false;
1603 regno = REGNO (dest);
1604 endregno = END_REGNO (dest);
1605 return (test_regno >= regno && test_regno < endregno);
1608 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1609 any member matches the covers_regno_no_parallel_p criteria. */
1611 static bool
1612 covers_regno_p (const_rtx dest, unsigned int test_regno)
1614 if (GET_CODE (dest) == PARALLEL)
1616 /* Some targets place small structures in registers for return
1617 values of functions, and those registers are wrapped in
1618 PARALLELs that we may see as the destination of a SET. */
1619 int i;
1621 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1623 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1624 if (inner != NULL_RTX
1625 && covers_regno_no_parallel_p (inner, test_regno))
1626 return true;
1629 return false;
1631 else
1632 return covers_regno_no_parallel_p (dest, test_regno);
1635 /* Utility function for dead_or_set_p to check an individual register. */
1638 dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1640 const_rtx pattern;
1642 /* See if there is a death note for something that includes TEST_REGNO. */
1643 if (find_regno_note (insn, REG_DEAD, test_regno))
1644 return 1;
1646 if (CALL_P (insn)
1647 && find_regno_fusage (insn, CLOBBER, test_regno))
1648 return 1;
1650 pattern = PATTERN (insn);
1652 if (GET_CODE (pattern) == COND_EXEC)
1653 pattern = COND_EXEC_CODE (pattern);
1655 if (GET_CODE (pattern) == SET)
1656 return covers_regno_p (SET_DEST (pattern), test_regno);
1657 else if (GET_CODE (pattern) == PARALLEL)
1659 int i;
1661 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1663 rtx body = XVECEXP (pattern, 0, i);
1665 if (GET_CODE (body) == COND_EXEC)
1666 body = COND_EXEC_CODE (body);
1668 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1669 && covers_regno_p (SET_DEST (body), test_regno))
1670 return 1;
1674 return 0;
1677 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1678 If DATUM is nonzero, look for one whose datum is DATUM. */
1681 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1683 rtx link;
1685 gcc_assert (insn);
1687 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1688 if (! INSN_P (insn))
1689 return 0;
1690 if (datum == 0)
1692 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1693 if (REG_NOTE_KIND (link) == kind)
1694 return link;
1695 return 0;
1698 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1699 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1700 return link;
1701 return 0;
1704 /* Return the reg-note of kind KIND in insn INSN which applies to register
1705 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1706 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1707 it might be the case that the note overlaps REGNO. */
1710 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1712 rtx link;
1714 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1715 if (! INSN_P (insn))
1716 return 0;
1718 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1719 if (REG_NOTE_KIND (link) == kind
1720 /* Verify that it is a register, so that scratch and MEM won't cause a
1721 problem here. */
1722 && REG_P (XEXP (link, 0))
1723 && REGNO (XEXP (link, 0)) <= regno
1724 && END_REGNO (XEXP (link, 0)) > regno)
1725 return link;
1726 return 0;
1729 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1730 has such a note. */
1733 find_reg_equal_equiv_note (const_rtx insn)
1735 rtx link;
1737 if (!INSN_P (insn))
1738 return 0;
1740 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1741 if (REG_NOTE_KIND (link) == REG_EQUAL
1742 || REG_NOTE_KIND (link) == REG_EQUIV)
1744 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1745 insns that have multiple sets. Checking single_set to
1746 make sure of this is not the proper check, as explained
1747 in the comment in set_unique_reg_note.
1749 This should be changed into an assert. */
1750 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1751 return 0;
1752 return link;
1754 return NULL;
1757 /* Check whether INSN is a single_set whose source is known to be
1758 equivalent to a constant. Return that constant if so, otherwise
1759 return null. */
1762 find_constant_src (const_rtx insn)
1764 rtx note, set, x;
1766 set = single_set (insn);
1767 if (set)
1769 x = avoid_constant_pool_reference (SET_SRC (set));
1770 if (CONSTANT_P (x))
1771 return x;
1774 note = find_reg_equal_equiv_note (insn);
1775 if (note && CONSTANT_P (XEXP (note, 0)))
1776 return XEXP (note, 0);
1778 return NULL_RTX;
1781 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1782 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1785 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1787 /* If it's not a CALL_INSN, it can't possibly have a
1788 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1789 if (!CALL_P (insn))
1790 return 0;
1792 gcc_assert (datum);
1794 if (!REG_P (datum))
1796 rtx link;
1798 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1799 link;
1800 link = XEXP (link, 1))
1801 if (GET_CODE (XEXP (link, 0)) == code
1802 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1803 return 1;
1805 else
1807 unsigned int regno = REGNO (datum);
1809 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1810 to pseudo registers, so don't bother checking. */
1812 if (regno < FIRST_PSEUDO_REGISTER)
1814 unsigned int end_regno = END_HARD_REGNO (datum);
1815 unsigned int i;
1817 for (i = regno; i < end_regno; i++)
1818 if (find_regno_fusage (insn, code, i))
1819 return 1;
1823 return 0;
1826 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1827 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1830 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1832 rtx link;
1834 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1835 to pseudo registers, so don't bother checking. */
1837 if (regno >= FIRST_PSEUDO_REGISTER
1838 || !CALL_P (insn) )
1839 return 0;
1841 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1843 rtx op, reg;
1845 if (GET_CODE (op = XEXP (link, 0)) == code
1846 && REG_P (reg = XEXP (op, 0))
1847 && REGNO (reg) <= regno
1848 && END_HARD_REGNO (reg) > regno)
1849 return 1;
1852 return 0;
1856 /* Remove register note NOTE from the REG_NOTES of INSN. */
1858 void
1859 remove_note (rtx insn, const_rtx note)
1861 rtx link;
1863 if (note == NULL_RTX)
1864 return;
1866 if (REG_NOTES (insn) == note)
1867 REG_NOTES (insn) = XEXP (note, 1);
1868 else
1869 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1870 if (XEXP (link, 1) == note)
1872 XEXP (link, 1) = XEXP (note, 1);
1873 break;
1876 switch (REG_NOTE_KIND (note))
1878 case REG_EQUAL:
1879 case REG_EQUIV:
1880 df_notes_rescan (insn);
1881 break;
1882 default:
1883 break;
1887 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
1889 void
1890 remove_reg_equal_equiv_notes (rtx insn)
1892 rtx *loc;
1894 loc = &REG_NOTES (insn);
1895 while (*loc)
1897 enum reg_note kind = REG_NOTE_KIND (*loc);
1898 if (kind == REG_EQUAL || kind == REG_EQUIV)
1899 *loc = XEXP (*loc, 1);
1900 else
1901 loc = &XEXP (*loc, 1);
1905 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1906 return 1 if it is found. A simple equality test is used to determine if
1907 NODE matches. */
1910 in_expr_list_p (const_rtx listp, const_rtx node)
1912 const_rtx x;
1914 for (x = listp; x; x = XEXP (x, 1))
1915 if (node == XEXP (x, 0))
1916 return 1;
1918 return 0;
1921 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1922 remove that entry from the list if it is found.
1924 A simple equality test is used to determine if NODE matches. */
1926 void
1927 remove_node_from_expr_list (const_rtx node, rtx *listp)
1929 rtx temp = *listp;
1930 rtx prev = NULL_RTX;
1932 while (temp)
1934 if (node == XEXP (temp, 0))
1936 /* Splice the node out of the list. */
1937 if (prev)
1938 XEXP (prev, 1) = XEXP (temp, 1);
1939 else
1940 *listp = XEXP (temp, 1);
1942 return;
1945 prev = temp;
1946 temp = XEXP (temp, 1);
1950 /* Nonzero if X contains any volatile instructions. These are instructions
1951 which may cause unpredictable machine state instructions, and thus no
1952 instructions should be moved or combined across them. This includes
1953 only volatile asms and UNSPEC_VOLATILE instructions. */
1956 volatile_insn_p (const_rtx x)
1958 const RTX_CODE code = GET_CODE (x);
1959 switch (code)
1961 case LABEL_REF:
1962 case SYMBOL_REF:
1963 case CONST_INT:
1964 case CONST:
1965 case CONST_DOUBLE:
1966 case CONST_FIXED:
1967 case CONST_VECTOR:
1968 case CC0:
1969 case PC:
1970 case REG:
1971 case SCRATCH:
1972 case CLOBBER:
1973 case ADDR_VEC:
1974 case ADDR_DIFF_VEC:
1975 case CALL:
1976 case MEM:
1977 return 0;
1979 case UNSPEC_VOLATILE:
1980 /* case TRAP_IF: This isn't clear yet. */
1981 return 1;
1983 case ASM_INPUT:
1984 case ASM_OPERANDS:
1985 if (MEM_VOLATILE_P (x))
1986 return 1;
1988 default:
1989 break;
1992 /* Recursively scan the operands of this expression. */
1995 const char *const fmt = GET_RTX_FORMAT (code);
1996 int i;
1998 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2000 if (fmt[i] == 'e')
2002 if (volatile_insn_p (XEXP (x, i)))
2003 return 1;
2005 else if (fmt[i] == 'E')
2007 int j;
2008 for (j = 0; j < XVECLEN (x, i); j++)
2009 if (volatile_insn_p (XVECEXP (x, i, j)))
2010 return 1;
2014 return 0;
2017 /* Nonzero if X contains any volatile memory references
2018 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2021 volatile_refs_p (const_rtx x)
2023 const RTX_CODE code = GET_CODE (x);
2024 switch (code)
2026 case LABEL_REF:
2027 case SYMBOL_REF:
2028 case CONST_INT:
2029 case CONST:
2030 case CONST_DOUBLE:
2031 case CONST_FIXED:
2032 case CONST_VECTOR:
2033 case CC0:
2034 case PC:
2035 case REG:
2036 case SCRATCH:
2037 case CLOBBER:
2038 case ADDR_VEC:
2039 case ADDR_DIFF_VEC:
2040 return 0;
2042 case UNSPEC_VOLATILE:
2043 return 1;
2045 case MEM:
2046 case ASM_INPUT:
2047 case ASM_OPERANDS:
2048 if (MEM_VOLATILE_P (x))
2049 return 1;
2051 default:
2052 break;
2055 /* Recursively scan the operands of this expression. */
2058 const char *const fmt = GET_RTX_FORMAT (code);
2059 int i;
2061 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2063 if (fmt[i] == 'e')
2065 if (volatile_refs_p (XEXP (x, i)))
2066 return 1;
2068 else if (fmt[i] == 'E')
2070 int j;
2071 for (j = 0; j < XVECLEN (x, i); j++)
2072 if (volatile_refs_p (XVECEXP (x, i, j)))
2073 return 1;
2077 return 0;
2080 /* Similar to above, except that it also rejects register pre- and post-
2081 incrementing. */
2084 side_effects_p (const_rtx x)
2086 const RTX_CODE code = GET_CODE (x);
2087 switch (code)
2089 case LABEL_REF:
2090 case SYMBOL_REF:
2091 case CONST_INT:
2092 case CONST:
2093 case CONST_DOUBLE:
2094 case CONST_FIXED:
2095 case CONST_VECTOR:
2096 case CC0:
2097 case PC:
2098 case REG:
2099 case SCRATCH:
2100 case ADDR_VEC:
2101 case ADDR_DIFF_VEC:
2102 return 0;
2104 case CLOBBER:
2105 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2106 when some combination can't be done. If we see one, don't think
2107 that we can simplify the expression. */
2108 return (GET_MODE (x) != VOIDmode);
2110 case PRE_INC:
2111 case PRE_DEC:
2112 case POST_INC:
2113 case POST_DEC:
2114 case PRE_MODIFY:
2115 case POST_MODIFY:
2116 case CALL:
2117 case UNSPEC_VOLATILE:
2118 /* case TRAP_IF: This isn't clear yet. */
2119 return 1;
2121 case MEM:
2122 case ASM_INPUT:
2123 case ASM_OPERANDS:
2124 if (MEM_VOLATILE_P (x))
2125 return 1;
2127 default:
2128 break;
2131 /* Recursively scan the operands of this expression. */
2134 const char *fmt = GET_RTX_FORMAT (code);
2135 int i;
2137 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2139 if (fmt[i] == 'e')
2141 if (side_effects_p (XEXP (x, i)))
2142 return 1;
2144 else if (fmt[i] == 'E')
2146 int j;
2147 for (j = 0; j < XVECLEN (x, i); j++)
2148 if (side_effects_p (XVECEXP (x, i, j)))
2149 return 1;
2153 return 0;
2156 enum may_trap_p_flags
2158 MTP_UNALIGNED_MEMS = 1,
2159 MTP_AFTER_MOVE = 2
2161 /* Return nonzero if evaluating rtx X might cause a trap.
2162 (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2163 unaligned memory accesses on strict alignment machines. If
2164 (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2165 cannot trap at its current location, but it might become trapping if moved
2166 elsewhere. */
2169 may_trap_p_1 (const_rtx x, unsigned flags)
2171 int i;
2172 enum rtx_code code;
2173 const char *fmt;
2174 bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2176 if (x == 0)
2177 return 0;
2178 code = GET_CODE (x);
2179 switch (code)
2181 /* Handle these cases quickly. */
2182 case CONST_INT:
2183 case CONST_DOUBLE:
2184 case CONST_FIXED:
2185 case CONST_VECTOR:
2186 case SYMBOL_REF:
2187 case LABEL_REF:
2188 case CONST:
2189 case PC:
2190 case CC0:
2191 case REG:
2192 case SCRATCH:
2193 return 0;
2195 case UNSPEC:
2196 case UNSPEC_VOLATILE:
2197 return targetm.unspec_may_trap_p (x, flags);
2199 case ASM_INPUT:
2200 case TRAP_IF:
2201 return 1;
2203 case ASM_OPERANDS:
2204 return MEM_VOLATILE_P (x);
2206 /* Memory ref can trap unless it's a static var or a stack slot. */
2207 case MEM:
2208 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2209 reference; moving it out of condition might cause its address
2210 become invalid. */
2211 !(flags & MTP_AFTER_MOVE)
2212 && MEM_NOTRAP_P (x)
2213 && (!STRICT_ALIGNMENT || !unaligned_mems))
2214 return 0;
2215 return
2216 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2218 /* Division by a non-constant might trap. */
2219 case DIV:
2220 case MOD:
2221 case UDIV:
2222 case UMOD:
2223 if (HONOR_SNANS (GET_MODE (x)))
2224 return 1;
2225 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2226 return flag_trapping_math;
2227 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2228 return 1;
2229 break;
2231 case EXPR_LIST:
2232 /* An EXPR_LIST is used to represent a function call. This
2233 certainly may trap. */
2234 return 1;
2236 case GE:
2237 case GT:
2238 case LE:
2239 case LT:
2240 case LTGT:
2241 case COMPARE:
2242 /* Some floating point comparisons may trap. */
2243 if (!flag_trapping_math)
2244 break;
2245 /* ??? There is no machine independent way to check for tests that trap
2246 when COMPARE is used, though many targets do make this distinction.
2247 For instance, sparc uses CCFPE for compares which generate exceptions
2248 and CCFP for compares which do not generate exceptions. */
2249 if (HONOR_NANS (GET_MODE (x)))
2250 return 1;
2251 /* But often the compare has some CC mode, so check operand
2252 modes as well. */
2253 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2254 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2255 return 1;
2256 break;
2258 case EQ:
2259 case NE:
2260 if (HONOR_SNANS (GET_MODE (x)))
2261 return 1;
2262 /* Often comparison is CC mode, so check operand modes. */
2263 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2264 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2265 return 1;
2266 break;
2268 case FIX:
2269 /* Conversion of floating point might trap. */
2270 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2271 return 1;
2272 break;
2274 case NEG:
2275 case ABS:
2276 case SUBREG:
2277 /* These operations don't trap even with floating point. */
2278 break;
2280 default:
2281 /* Any floating arithmetic may trap. */
2282 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2283 && flag_trapping_math)
2284 return 1;
2287 fmt = GET_RTX_FORMAT (code);
2288 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2290 if (fmt[i] == 'e')
2292 if (may_trap_p_1 (XEXP (x, i), flags))
2293 return 1;
2295 else if (fmt[i] == 'E')
2297 int j;
2298 for (j = 0; j < XVECLEN (x, i); j++)
2299 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2300 return 1;
2303 return 0;
2306 /* Return nonzero if evaluating rtx X might cause a trap. */
2309 may_trap_p (const_rtx x)
2311 return may_trap_p_1 (x, 0);
2314 /* Return nonzero if evaluating rtx X might cause a trap, when the expression
2315 is moved from its current location by some optimization. */
2318 may_trap_after_code_motion_p (const_rtx x)
2320 return may_trap_p_1 (x, MTP_AFTER_MOVE);
2323 /* Same as above, but additionally return nonzero if evaluating rtx X might
2324 cause a fault. We define a fault for the purpose of this function as a
2325 erroneous execution condition that cannot be encountered during the normal
2326 execution of a valid program; the typical example is an unaligned memory
2327 access on a strict alignment machine. The compiler guarantees that it
2328 doesn't generate code that will fault from a valid program, but this
2329 guarantee doesn't mean anything for individual instructions. Consider
2330 the following example:
2332 struct S { int d; union { char *cp; int *ip; }; };
2334 int foo(struct S *s)
2336 if (s->d == 1)
2337 return *s->ip;
2338 else
2339 return *s->cp;
2342 on a strict alignment machine. In a valid program, foo will never be
2343 invoked on a structure for which d is equal to 1 and the underlying
2344 unique field of the union not aligned on a 4-byte boundary, but the
2345 expression *s->ip might cause a fault if considered individually.
2347 At the RTL level, potentially problematic expressions will almost always
2348 verify may_trap_p; for example, the above dereference can be emitted as
2349 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2350 However, suppose that foo is inlined in a caller that causes s->cp to
2351 point to a local character variable and guarantees that s->d is not set
2352 to 1; foo may have been effectively translated into pseudo-RTL as:
2354 if ((reg:SI) == 1)
2355 (set (reg:SI) (mem:SI (%fp - 7)))
2356 else
2357 (set (reg:QI) (mem:QI (%fp - 7)))
2359 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2360 memory reference to a stack slot, but it will certainly cause a fault
2361 on a strict alignment machine. */
2364 may_trap_or_fault_p (const_rtx x)
2366 return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2369 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2370 i.e., an inequality. */
2373 inequality_comparisons_p (const_rtx x)
2375 const char *fmt;
2376 int len, i;
2377 const enum rtx_code code = GET_CODE (x);
2379 switch (code)
2381 case REG:
2382 case SCRATCH:
2383 case PC:
2384 case CC0:
2385 case CONST_INT:
2386 case CONST_DOUBLE:
2387 case CONST_FIXED:
2388 case CONST_VECTOR:
2389 case CONST:
2390 case LABEL_REF:
2391 case SYMBOL_REF:
2392 return 0;
2394 case LT:
2395 case LTU:
2396 case GT:
2397 case GTU:
2398 case LE:
2399 case LEU:
2400 case GE:
2401 case GEU:
2402 return 1;
2404 default:
2405 break;
2408 len = GET_RTX_LENGTH (code);
2409 fmt = GET_RTX_FORMAT (code);
2411 for (i = 0; i < len; i++)
2413 if (fmt[i] == 'e')
2415 if (inequality_comparisons_p (XEXP (x, i)))
2416 return 1;
2418 else if (fmt[i] == 'E')
2420 int j;
2421 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2422 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2423 return 1;
2427 return 0;
2430 /* Replace any occurrence of FROM in X with TO. The function does
2431 not enter into CONST_DOUBLE for the replace.
2433 Note that copying is not done so X must not be shared unless all copies
2434 are to be modified. */
2437 replace_rtx (rtx x, rtx from, rtx to)
2439 int i, j;
2440 const char *fmt;
2442 /* The following prevents loops occurrence when we change MEM in
2443 CONST_DOUBLE onto the same CONST_DOUBLE. */
2444 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2445 return x;
2447 if (x == from)
2448 return to;
2450 /* Allow this function to make replacements in EXPR_LISTs. */
2451 if (x == 0)
2452 return 0;
2454 if (GET_CODE (x) == SUBREG)
2456 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2458 if (GET_CODE (new) == CONST_INT)
2460 x = simplify_subreg (GET_MODE (x), new,
2461 GET_MODE (SUBREG_REG (x)),
2462 SUBREG_BYTE (x));
2463 gcc_assert (x);
2465 else
2466 SUBREG_REG (x) = new;
2468 return x;
2470 else if (GET_CODE (x) == ZERO_EXTEND)
2472 rtx new = replace_rtx (XEXP (x, 0), from, to);
2474 if (GET_CODE (new) == CONST_INT)
2476 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2477 new, GET_MODE (XEXP (x, 0)));
2478 gcc_assert (x);
2480 else
2481 XEXP (x, 0) = new;
2483 return x;
2486 fmt = GET_RTX_FORMAT (GET_CODE (x));
2487 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2489 if (fmt[i] == 'e')
2490 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2491 else if (fmt[i] == 'E')
2492 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2493 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2496 return x;
2499 /* Replace occurrences of the old label in *X with the new one.
2500 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2503 replace_label (rtx *x, void *data)
2505 rtx l = *x;
2506 rtx old_label = ((replace_label_data *) data)->r1;
2507 rtx new_label = ((replace_label_data *) data)->r2;
2508 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2510 if (l == NULL_RTX)
2511 return 0;
2513 if (GET_CODE (l) == SYMBOL_REF
2514 && CONSTANT_POOL_ADDRESS_P (l))
2516 rtx c = get_pool_constant (l);
2517 if (rtx_referenced_p (old_label, c))
2519 rtx new_c, new_l;
2520 replace_label_data *d = (replace_label_data *) data;
2522 /* Create a copy of constant C; replace the label inside
2523 but do not update LABEL_NUSES because uses in constant pool
2524 are not counted. */
2525 new_c = copy_rtx (c);
2526 d->update_label_nuses = false;
2527 for_each_rtx (&new_c, replace_label, data);
2528 d->update_label_nuses = update_label_nuses;
2530 /* Add the new constant NEW_C to constant pool and replace
2531 the old reference to constant by new reference. */
2532 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2533 *x = replace_rtx (l, l, new_l);
2535 return 0;
2538 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2539 field. This is not handled by for_each_rtx because it doesn't
2540 handle unprinted ('0') fields. */
2541 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2542 JUMP_LABEL (l) = new_label;
2544 if ((GET_CODE (l) == LABEL_REF
2545 || GET_CODE (l) == INSN_LIST)
2546 && XEXP (l, 0) == old_label)
2548 XEXP (l, 0) = new_label;
2549 if (update_label_nuses)
2551 ++LABEL_NUSES (new_label);
2552 --LABEL_NUSES (old_label);
2554 return 0;
2557 return 0;
2560 /* When *BODY is equal to X or X is directly referenced by *BODY
2561 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2562 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2564 static int
2565 rtx_referenced_p_1 (rtx *body, void *x)
2567 rtx y = (rtx) x;
2569 if (*body == NULL_RTX)
2570 return y == NULL_RTX;
2572 /* Return true if a label_ref *BODY refers to label Y. */
2573 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2574 return XEXP (*body, 0) == y;
2576 /* If *BODY is a reference to pool constant traverse the constant. */
2577 if (GET_CODE (*body) == SYMBOL_REF
2578 && CONSTANT_POOL_ADDRESS_P (*body))
2579 return rtx_referenced_p (y, get_pool_constant (*body));
2581 /* By default, compare the RTL expressions. */
2582 return rtx_equal_p (*body, y);
2585 /* Return true if X is referenced in BODY. */
2588 rtx_referenced_p (rtx x, rtx body)
2590 return for_each_rtx (&body, rtx_referenced_p_1, x);
2593 /* If INSN is a tablejump return true and store the label (before jump table) to
2594 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2596 bool
2597 tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2599 rtx label, table;
2601 if (JUMP_P (insn)
2602 && (label = JUMP_LABEL (insn)) != NULL_RTX
2603 && (table = next_active_insn (label)) != NULL_RTX
2604 && JUMP_P (table)
2605 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2606 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2608 if (labelp)
2609 *labelp = label;
2610 if (tablep)
2611 *tablep = table;
2612 return true;
2614 return false;
2617 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2618 constant that is not in the constant pool and not in the condition
2619 of an IF_THEN_ELSE. */
2621 static int
2622 computed_jump_p_1 (const_rtx x)
2624 const enum rtx_code code = GET_CODE (x);
2625 int i, j;
2626 const char *fmt;
2628 switch (code)
2630 case LABEL_REF:
2631 case PC:
2632 return 0;
2634 case CONST:
2635 case CONST_INT:
2636 case CONST_DOUBLE:
2637 case CONST_FIXED:
2638 case CONST_VECTOR:
2639 case SYMBOL_REF:
2640 case REG:
2641 return 1;
2643 case MEM:
2644 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2645 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2647 case IF_THEN_ELSE:
2648 return (computed_jump_p_1 (XEXP (x, 1))
2649 || computed_jump_p_1 (XEXP (x, 2)));
2651 default:
2652 break;
2655 fmt = GET_RTX_FORMAT (code);
2656 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2658 if (fmt[i] == 'e'
2659 && computed_jump_p_1 (XEXP (x, i)))
2660 return 1;
2662 else if (fmt[i] == 'E')
2663 for (j = 0; j < XVECLEN (x, i); j++)
2664 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2665 return 1;
2668 return 0;
2671 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2673 Tablejumps and casesi insns are not considered indirect jumps;
2674 we can recognize them by a (use (label_ref)). */
2677 computed_jump_p (const_rtx insn)
2679 int i;
2680 if (JUMP_P (insn))
2682 rtx pat = PATTERN (insn);
2684 /* If we have a JUMP_LABEL set, we're not a computed jump. */
2685 if (JUMP_LABEL (insn) != NULL)
2686 return 0;
2688 if (GET_CODE (pat) == PARALLEL)
2690 int len = XVECLEN (pat, 0);
2691 int has_use_labelref = 0;
2693 for (i = len - 1; i >= 0; i--)
2694 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2695 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2696 == LABEL_REF))
2697 has_use_labelref = 1;
2699 if (! has_use_labelref)
2700 for (i = len - 1; i >= 0; i--)
2701 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2702 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2703 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2704 return 1;
2706 else if (GET_CODE (pat) == SET
2707 && SET_DEST (pat) == pc_rtx
2708 && computed_jump_p_1 (SET_SRC (pat)))
2709 return 1;
2711 return 0;
2714 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2715 calls. Processes the subexpressions of EXP and passes them to F. */
2716 static int
2717 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2719 int result, i, j;
2720 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2721 rtx *x;
2723 for (; format[n] != '\0'; n++)
2725 switch (format[n])
2727 case 'e':
2728 /* Call F on X. */
2729 x = &XEXP (exp, n);
2730 result = (*f) (x, data);
2731 if (result == -1)
2732 /* Do not traverse sub-expressions. */
2733 continue;
2734 else if (result != 0)
2735 /* Stop the traversal. */
2736 return result;
2738 if (*x == NULL_RTX)
2739 /* There are no sub-expressions. */
2740 continue;
2742 i = non_rtx_starting_operands[GET_CODE (*x)];
2743 if (i >= 0)
2745 result = for_each_rtx_1 (*x, i, f, data);
2746 if (result != 0)
2747 return result;
2749 break;
2751 case 'V':
2752 case 'E':
2753 if (XVEC (exp, n) == 0)
2754 continue;
2755 for (j = 0; j < XVECLEN (exp, n); ++j)
2757 /* Call F on X. */
2758 x = &XVECEXP (exp, n, j);
2759 result = (*f) (x, data);
2760 if (result == -1)
2761 /* Do not traverse sub-expressions. */
2762 continue;
2763 else if (result != 0)
2764 /* Stop the traversal. */
2765 return result;
2767 if (*x == NULL_RTX)
2768 /* There are no sub-expressions. */
2769 continue;
2771 i = non_rtx_starting_operands[GET_CODE (*x)];
2772 if (i >= 0)
2774 result = for_each_rtx_1 (*x, i, f, data);
2775 if (result != 0)
2776 return result;
2779 break;
2781 default:
2782 /* Nothing to do. */
2783 break;
2787 return 0;
2790 /* Traverse X via depth-first search, calling F for each
2791 sub-expression (including X itself). F is also passed the DATA.
2792 If F returns -1, do not traverse sub-expressions, but continue
2793 traversing the rest of the tree. If F ever returns any other
2794 nonzero value, stop the traversal, and return the value returned
2795 by F. Otherwise, return 0. This function does not traverse inside
2796 tree structure that contains RTX_EXPRs, or into sub-expressions
2797 whose format code is `0' since it is not known whether or not those
2798 codes are actually RTL.
2800 This routine is very general, and could (should?) be used to
2801 implement many of the other routines in this file. */
2804 for_each_rtx (rtx *x, rtx_function f, void *data)
2806 int result;
2807 int i;
2809 /* Call F on X. */
2810 result = (*f) (x, data);
2811 if (result == -1)
2812 /* Do not traverse sub-expressions. */
2813 return 0;
2814 else if (result != 0)
2815 /* Stop the traversal. */
2816 return result;
2818 if (*x == NULL_RTX)
2819 /* There are no sub-expressions. */
2820 return 0;
2822 i = non_rtx_starting_operands[GET_CODE (*x)];
2823 if (i < 0)
2824 return 0;
2826 return for_each_rtx_1 (*x, i, f, data);
2830 /* Searches X for any reference to REGNO, returning the rtx of the
2831 reference found if any. Otherwise, returns NULL_RTX. */
2834 regno_use_in (unsigned int regno, rtx x)
2836 const char *fmt;
2837 int i, j;
2838 rtx tem;
2840 if (REG_P (x) && REGNO (x) == regno)
2841 return x;
2843 fmt = GET_RTX_FORMAT (GET_CODE (x));
2844 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2846 if (fmt[i] == 'e')
2848 if ((tem = regno_use_in (regno, XEXP (x, i))))
2849 return tem;
2851 else if (fmt[i] == 'E')
2852 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2853 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2854 return tem;
2857 return NULL_RTX;
2860 /* Return a value indicating whether OP, an operand of a commutative
2861 operation, is preferred as the first or second operand. The higher
2862 the value, the stronger the preference for being the first operand.
2863 We use negative values to indicate a preference for the first operand
2864 and positive values for the second operand. */
2867 commutative_operand_precedence (rtx op)
2869 enum rtx_code code = GET_CODE (op);
2871 /* Constants always come the second operand. Prefer "nice" constants. */
2872 if (code == CONST_INT)
2873 return -8;
2874 if (code == CONST_DOUBLE)
2875 return -7;
2876 if (code == CONST_FIXED)
2877 return -7;
2878 op = avoid_constant_pool_reference (op);
2879 code = GET_CODE (op);
2881 switch (GET_RTX_CLASS (code))
2883 case RTX_CONST_OBJ:
2884 if (code == CONST_INT)
2885 return -6;
2886 if (code == CONST_DOUBLE)
2887 return -5;
2888 if (code == CONST_FIXED)
2889 return -5;
2890 return -4;
2892 case RTX_EXTRA:
2893 /* SUBREGs of objects should come second. */
2894 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2895 return -3;
2896 return 0;
2898 case RTX_OBJ:
2899 /* Complex expressions should be the first, so decrease priority
2900 of objects. Prefer pointer objects over non pointer objects. */
2901 if ((REG_P (op) && REG_POINTER (op))
2902 || (MEM_P (op) && MEM_POINTER (op)))
2903 return -1;
2904 return -2;
2906 case RTX_COMM_ARITH:
2907 /* Prefer operands that are themselves commutative to be first.
2908 This helps to make things linear. In particular,
2909 (and (and (reg) (reg)) (not (reg))) is canonical. */
2910 return 4;
2912 case RTX_BIN_ARITH:
2913 /* If only one operand is a binary expression, it will be the first
2914 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2915 is canonical, although it will usually be further simplified. */
2916 return 2;
2918 case RTX_UNARY:
2919 /* Then prefer NEG and NOT. */
2920 if (code == NEG || code == NOT)
2921 return 1;
2923 default:
2924 return 0;
2928 /* Return 1 iff it is necessary to swap operands of commutative operation
2929 in order to canonicalize expression. */
2931 bool
2932 swap_commutative_operands_p (rtx x, rtx y)
2934 return (commutative_operand_precedence (x)
2935 < commutative_operand_precedence (y));
2938 /* Return 1 if X is an autoincrement side effect and the register is
2939 not the stack pointer. */
2941 auto_inc_p (const_rtx x)
2943 switch (GET_CODE (x))
2945 case PRE_INC:
2946 case POST_INC:
2947 case PRE_DEC:
2948 case POST_DEC:
2949 case PRE_MODIFY:
2950 case POST_MODIFY:
2951 /* There are no REG_INC notes for SP. */
2952 if (XEXP (x, 0) != stack_pointer_rtx)
2953 return 1;
2954 default:
2955 break;
2957 return 0;
2960 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2962 loc_mentioned_in_p (rtx *loc, const_rtx in)
2964 enum rtx_code code;
2965 const char *fmt;
2966 int i, j;
2968 if (!in)
2969 return 0;
2971 code = GET_CODE (in);
2972 fmt = GET_RTX_FORMAT (code);
2973 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2975 if (fmt[i] == 'e')
2977 if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
2978 return 1;
2980 else if (fmt[i] == 'E')
2981 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2982 if (loc == &XVECEXP (in, i, j)
2983 || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2984 return 1;
2986 return 0;
2989 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2990 and SUBREG_BYTE, return the bit offset where the subreg begins
2991 (counting from the least significant bit of the operand). */
2993 unsigned int
2994 subreg_lsb_1 (enum machine_mode outer_mode,
2995 enum machine_mode inner_mode,
2996 unsigned int subreg_byte)
2998 unsigned int bitpos;
2999 unsigned int byte;
3000 unsigned int word;
3002 /* A paradoxical subreg begins at bit position 0. */
3003 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3004 return 0;
3006 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3007 /* If the subreg crosses a word boundary ensure that
3008 it also begins and ends on a word boundary. */
3009 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3010 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3011 && (subreg_byte % UNITS_PER_WORD
3012 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3014 if (WORDS_BIG_ENDIAN)
3015 word = (GET_MODE_SIZE (inner_mode)
3016 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3017 else
3018 word = subreg_byte / UNITS_PER_WORD;
3019 bitpos = word * BITS_PER_WORD;
3021 if (BYTES_BIG_ENDIAN)
3022 byte = (GET_MODE_SIZE (inner_mode)
3023 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3024 else
3025 byte = subreg_byte % UNITS_PER_WORD;
3026 bitpos += byte * BITS_PER_UNIT;
3028 return bitpos;
3031 /* Given a subreg X, return the bit offset where the subreg begins
3032 (counting from the least significant bit of the reg). */
3034 unsigned int
3035 subreg_lsb (const_rtx x)
3037 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3038 SUBREG_BYTE (x));
3041 /* Fill in information about a subreg of a hard register.
3042 xregno - A regno of an inner hard subreg_reg (or what will become one).
3043 xmode - The mode of xregno.
3044 offset - The byte offset.
3045 ymode - The mode of a top level SUBREG (or what may become one).
3046 info - Pointer to structure to fill in. */
3047 static void
3048 subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3049 unsigned int offset, enum machine_mode ymode,
3050 struct subreg_info *info)
3052 int nregs_xmode, nregs_ymode;
3053 int mode_multiple, nregs_multiple;
3054 int offset_adj, y_offset, y_offset_adj;
3055 int regsize_xmode, regsize_ymode;
3056 bool rknown;
3058 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3060 rknown = false;
3062 /* If there are holes in a non-scalar mode in registers, we expect
3063 that it is made up of its units concatenated together. */
3064 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3066 enum machine_mode xmode_unit;
3068 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3069 if (GET_MODE_INNER (xmode) == VOIDmode)
3070 xmode_unit = xmode;
3071 else
3072 xmode_unit = GET_MODE_INNER (xmode);
3073 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3074 gcc_assert (nregs_xmode
3075 == (GET_MODE_NUNITS (xmode)
3076 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3077 gcc_assert (hard_regno_nregs[xregno][xmode]
3078 == (hard_regno_nregs[xregno][xmode_unit]
3079 * GET_MODE_NUNITS (xmode)));
3081 /* You can only ask for a SUBREG of a value with holes in the middle
3082 if you don't cross the holes. (Such a SUBREG should be done by
3083 picking a different register class, or doing it in memory if
3084 necessary.) An example of a value with holes is XCmode on 32-bit
3085 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3086 3 for each part, but in memory it's two 128-bit parts.
3087 Padding is assumed to be at the end (not necessarily the 'high part')
3088 of each unit. */
3089 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3090 < GET_MODE_NUNITS (xmode))
3091 && (offset / GET_MODE_SIZE (xmode_unit)
3092 != ((offset + GET_MODE_SIZE (ymode) - 1)
3093 / GET_MODE_SIZE (xmode_unit))))
3095 info->representable_p = false;
3096 rknown = true;
3099 else
3100 nregs_xmode = hard_regno_nregs[xregno][xmode];
3102 nregs_ymode = hard_regno_nregs[xregno][ymode];
3104 /* Paradoxical subregs are otherwise valid. */
3105 if (!rknown
3106 && offset == 0
3107 && GET_MODE_SIZE (ymode) > GET_MODE_SIZE (xmode))
3109 info->representable_p = true;
3110 /* If this is a big endian paradoxical subreg, which uses more
3111 actual hard registers than the original register, we must
3112 return a negative offset so that we find the proper highpart
3113 of the register. */
3114 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3115 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3116 info->offset = nregs_xmode - nregs_ymode;
3117 else
3118 info->offset = 0;
3119 info->nregs = nregs_ymode;
3120 return;
3123 /* If registers store different numbers of bits in the different
3124 modes, we cannot generally form this subreg. */
3125 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3126 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3127 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3128 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3130 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3131 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3132 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3134 info->representable_p = false;
3135 info->nregs
3136 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3137 info->offset = offset / regsize_xmode;
3138 return;
3140 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3142 info->representable_p = false;
3143 info->nregs
3144 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3145 info->offset = offset / regsize_xmode;
3146 return;
3150 /* Lowpart subregs are otherwise valid. */
3151 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3153 info->representable_p = true;
3154 rknown = true;
3156 if (offset == 0 || nregs_xmode == nregs_ymode)
3158 info->offset = 0;
3159 info->nregs = nregs_ymode;
3160 return;
3164 /* This should always pass, otherwise we don't know how to verify
3165 the constraint. These conditions may be relaxed but
3166 subreg_regno_offset would need to be redesigned. */
3167 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3168 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3170 /* The XMODE value can be seen as a vector of NREGS_XMODE
3171 values. The subreg must represent a lowpart of given field.
3172 Compute what field it is. */
3173 offset_adj = offset;
3174 offset_adj -= subreg_lowpart_offset (ymode,
3175 mode_for_size (GET_MODE_BITSIZE (xmode)
3176 / nregs_xmode,
3177 MODE_INT, 0));
3179 /* Size of ymode must not be greater than the size of xmode. */
3180 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3181 gcc_assert (mode_multiple != 0);
3183 y_offset = offset / GET_MODE_SIZE (ymode);
3184 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3185 nregs_multiple = nregs_xmode / nregs_ymode;
3187 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3188 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3190 if (!rknown)
3192 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3193 rknown = true;
3195 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3196 info->nregs = nregs_ymode;
3199 /* This function returns the regno offset of a subreg expression.
3200 xregno - A regno of an inner hard subreg_reg (or what will become one).
3201 xmode - The mode of xregno.
3202 offset - The byte offset.
3203 ymode - The mode of a top level SUBREG (or what may become one).
3204 RETURN - The regno offset which would be used. */
3205 unsigned int
3206 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3207 unsigned int offset, enum machine_mode ymode)
3209 struct subreg_info info;
3210 subreg_get_info (xregno, xmode, offset, ymode, &info);
3211 return info.offset;
3214 /* This function returns true when the offset is representable via
3215 subreg_offset in the given regno.
3216 xregno - A regno of an inner hard subreg_reg (or what will become one).
3217 xmode - The mode of xregno.
3218 offset - The byte offset.
3219 ymode - The mode of a top level SUBREG (or what may become one).
3220 RETURN - Whether the offset is representable. */
3221 bool
3222 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3223 unsigned int offset, enum machine_mode ymode)
3225 struct subreg_info info;
3226 subreg_get_info (xregno, xmode, offset, ymode, &info);
3227 return info.representable_p;
3230 /* Return the final regno that a subreg expression refers to. */
3231 unsigned int
3232 subreg_regno (const_rtx x)
3234 unsigned int ret;
3235 rtx subreg = SUBREG_REG (x);
3236 int regno = REGNO (subreg);
3238 ret = regno + subreg_regno_offset (regno,
3239 GET_MODE (subreg),
3240 SUBREG_BYTE (x),
3241 GET_MODE (x));
3242 return ret;
3246 /* Return the number of registers that a subreg expression refers
3247 to. */
3248 unsigned int
3249 subreg_nregs (const_rtx x)
3251 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3254 /* Return the number of registers that a subreg REG with REGNO
3255 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3256 changed so that the regno can be passed in. */
3258 unsigned int
3259 subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3261 struct subreg_info info;
3262 rtx subreg = SUBREG_REG (x);
3264 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3265 &info);
3266 return info.nregs;
3270 struct parms_set_data
3272 int nregs;
3273 HARD_REG_SET regs;
3276 /* Helper function for noticing stores to parameter registers. */
3277 static void
3278 parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3280 struct parms_set_data *d = data;
3281 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3282 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3284 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3285 d->nregs--;
3289 /* Look backward for first parameter to be loaded.
3290 Note that loads of all parameters will not necessarily be
3291 found if CSE has eliminated some of them (e.g., an argument
3292 to the outer function is passed down as a parameter).
3293 Do not skip BOUNDARY. */
3295 find_first_parameter_load (rtx call_insn, rtx boundary)
3297 struct parms_set_data parm;
3298 rtx p, before, first_set;
3300 /* Since different machines initialize their parameter registers
3301 in different orders, assume nothing. Collect the set of all
3302 parameter registers. */
3303 CLEAR_HARD_REG_SET (parm.regs);
3304 parm.nregs = 0;
3305 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3306 if (GET_CODE (XEXP (p, 0)) == USE
3307 && REG_P (XEXP (XEXP (p, 0), 0)))
3309 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3311 /* We only care about registers which can hold function
3312 arguments. */
3313 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3314 continue;
3316 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3317 parm.nregs++;
3319 before = call_insn;
3320 first_set = call_insn;
3322 /* Search backward for the first set of a register in this set. */
3323 while (parm.nregs && before != boundary)
3325 before = PREV_INSN (before);
3327 /* It is possible that some loads got CSEed from one call to
3328 another. Stop in that case. */
3329 if (CALL_P (before))
3330 break;
3332 /* Our caller needs either ensure that we will find all sets
3333 (in case code has not been optimized yet), or take care
3334 for possible labels in a way by setting boundary to preceding
3335 CODE_LABEL. */
3336 if (LABEL_P (before))
3338 gcc_assert (before == boundary);
3339 break;
3342 if (INSN_P (before))
3344 int nregs_old = parm.nregs;
3345 note_stores (PATTERN (before), parms_set, &parm);
3346 /* If we found something that did not set a parameter reg,
3347 we're done. Do not keep going, as that might result
3348 in hoisting an insn before the setting of a pseudo
3349 that is used by the hoisted insn. */
3350 if (nregs_old != parm.nregs)
3351 first_set = before;
3352 else
3353 break;
3356 return first_set;
3359 /* Return true if we should avoid inserting code between INSN and preceding
3360 call instruction. */
3362 bool
3363 keep_with_call_p (const_rtx insn)
3365 rtx set;
3367 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3369 if (REG_P (SET_DEST (set))
3370 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3371 && fixed_regs[REGNO (SET_DEST (set))]
3372 && general_operand (SET_SRC (set), VOIDmode))
3373 return true;
3374 if (REG_P (SET_SRC (set))
3375 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3376 && REG_P (SET_DEST (set))
3377 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3378 return true;
3379 /* There may be a stack pop just after the call and before the store
3380 of the return register. Search for the actual store when deciding
3381 if we can break or not. */
3382 if (SET_DEST (set) == stack_pointer_rtx)
3384 /* This CONST_CAST is okay because next_nonnote_insn just
3385 returns it's argument and we assign it to a const_rtx
3386 variable. */
3387 const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
3388 if (i2 && keep_with_call_p (i2))
3389 return true;
3392 return false;
3395 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3396 to non-complex jumps. That is, direct unconditional, conditional,
3397 and tablejumps, but not computed jumps or returns. It also does
3398 not apply to the fallthru case of a conditional jump. */
3400 bool
3401 label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3403 rtx tmp = JUMP_LABEL (jump_insn);
3405 if (label == tmp)
3406 return true;
3408 if (tablejump_p (jump_insn, NULL, &tmp))
3410 rtvec vec = XVEC (PATTERN (tmp),
3411 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3412 int i, veclen = GET_NUM_ELEM (vec);
3414 for (i = 0; i < veclen; ++i)
3415 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3416 return true;
3419 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3420 return true;
3422 return false;
3426 /* Return an estimate of the cost of computing rtx X.
3427 One use is in cse, to decide which expression to keep in the hash table.
3428 Another is in rtl generation, to pick the cheapest way to multiply.
3429 Other uses like the latter are expected in the future. */
3432 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3434 int i, j;
3435 enum rtx_code code;
3436 const char *fmt;
3437 int total;
3439 if (x == 0)
3440 return 0;
3442 /* Compute the default costs of certain things.
3443 Note that targetm.rtx_costs can override the defaults. */
3445 code = GET_CODE (x);
3446 switch (code)
3448 case MULT:
3449 total = COSTS_N_INSNS (5);
3450 break;
3451 case DIV:
3452 case UDIV:
3453 case MOD:
3454 case UMOD:
3455 total = COSTS_N_INSNS (7);
3456 break;
3457 case USE:
3458 /* Used in combine.c as a marker. */
3459 total = 0;
3460 break;
3461 default:
3462 total = COSTS_N_INSNS (1);
3465 switch (code)
3467 case REG:
3468 return 0;
3470 case SUBREG:
3471 total = 0;
3472 /* If we can't tie these modes, make this expensive. The larger
3473 the mode, the more expensive it is. */
3474 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3475 return COSTS_N_INSNS (2
3476 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3477 break;
3479 default:
3480 if (targetm.rtx_costs (x, code, outer_code, &total))
3481 return total;
3482 break;
3485 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3486 which is already in total. */
3488 fmt = GET_RTX_FORMAT (code);
3489 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3490 if (fmt[i] == 'e')
3491 total += rtx_cost (XEXP (x, i), code);
3492 else if (fmt[i] == 'E')
3493 for (j = 0; j < XVECLEN (x, i); j++)
3494 total += rtx_cost (XVECEXP (x, i, j), code);
3496 return total;
3499 /* Return cost of address expression X.
3500 Expect that X is properly formed address reference. */
3503 address_cost (rtx x, enum machine_mode mode)
3505 /* We may be asked for cost of various unusual addresses, such as operands
3506 of push instruction. It is not worthwhile to complicate writing
3507 of the target hook by such cases. */
3509 if (!memory_address_p (mode, x))
3510 return 1000;
3512 return targetm.address_cost (x);
3515 /* If the target doesn't override, compute the cost as with arithmetic. */
3518 default_address_cost (rtx x)
3520 return rtx_cost (x, MEM);
3524 unsigned HOST_WIDE_INT
3525 nonzero_bits (const_rtx x, enum machine_mode mode)
3527 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3530 unsigned int
3531 num_sign_bit_copies (const_rtx x, enum machine_mode mode)
3533 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3536 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3537 It avoids exponential behavior in nonzero_bits1 when X has
3538 identical subexpressions on the first or the second level. */
3540 static unsigned HOST_WIDE_INT
3541 cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
3542 enum machine_mode known_mode,
3543 unsigned HOST_WIDE_INT known_ret)
3545 if (x == known_x && mode == known_mode)
3546 return known_ret;
3548 /* Try to find identical subexpressions. If found call
3549 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3550 precomputed value for the subexpression as KNOWN_RET. */
3552 if (ARITHMETIC_P (x))
3554 rtx x0 = XEXP (x, 0);
3555 rtx x1 = XEXP (x, 1);
3557 /* Check the first level. */
3558 if (x0 == x1)
3559 return nonzero_bits1 (x, mode, x0, mode,
3560 cached_nonzero_bits (x0, mode, known_x,
3561 known_mode, known_ret));
3563 /* Check the second level. */
3564 if (ARITHMETIC_P (x0)
3565 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3566 return nonzero_bits1 (x, mode, x1, mode,
3567 cached_nonzero_bits (x1, mode, known_x,
3568 known_mode, known_ret));
3570 if (ARITHMETIC_P (x1)
3571 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3572 return nonzero_bits1 (x, mode, x0, mode,
3573 cached_nonzero_bits (x0, mode, known_x,
3574 known_mode, known_ret));
3577 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3580 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3581 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3582 is less useful. We can't allow both, because that results in exponential
3583 run time recursion. There is a nullstone testcase that triggered
3584 this. This macro avoids accidental uses of num_sign_bit_copies. */
3585 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3587 /* Given an expression, X, compute which bits in X can be nonzero.
3588 We don't care about bits outside of those defined in MODE.
3590 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3591 an arithmetic operation, we can do better. */
3593 static unsigned HOST_WIDE_INT
3594 nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
3595 enum machine_mode known_mode,
3596 unsigned HOST_WIDE_INT known_ret)
3598 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3599 unsigned HOST_WIDE_INT inner_nz;
3600 enum rtx_code code;
3601 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3603 /* For floating-point values, assume all bits are needed. */
3604 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3605 return nonzero;
3607 /* If X is wider than MODE, use its mode instead. */
3608 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3610 mode = GET_MODE (x);
3611 nonzero = GET_MODE_MASK (mode);
3612 mode_width = GET_MODE_BITSIZE (mode);
3615 if (mode_width > HOST_BITS_PER_WIDE_INT)
3616 /* Our only callers in this case look for single bit values. So
3617 just return the mode mask. Those tests will then be false. */
3618 return nonzero;
3620 #ifndef WORD_REGISTER_OPERATIONS
3621 /* If MODE is wider than X, but both are a single word for both the host
3622 and target machines, we can compute this from which bits of the
3623 object might be nonzero in its own mode, taking into account the fact
3624 that on many CISC machines, accessing an object in a wider mode
3625 causes the high-order bits to become undefined. So they are
3626 not known to be zero. */
3628 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3629 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3630 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3631 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3633 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3634 known_x, known_mode, known_ret);
3635 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3636 return nonzero;
3638 #endif
3640 code = GET_CODE (x);
3641 switch (code)
3643 case REG:
3644 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3645 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3646 all the bits above ptr_mode are known to be zero. */
3647 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3648 && REG_POINTER (x))
3649 nonzero &= GET_MODE_MASK (ptr_mode);
3650 #endif
3652 /* Include declared information about alignment of pointers. */
3653 /* ??? We don't properly preserve REG_POINTER changes across
3654 pointer-to-integer casts, so we can't trust it except for
3655 things that we know must be pointers. See execute/960116-1.c. */
3656 if ((x == stack_pointer_rtx
3657 || x == frame_pointer_rtx
3658 || x == arg_pointer_rtx)
3659 && REGNO_POINTER_ALIGN (REGNO (x)))
3661 unsigned HOST_WIDE_INT alignment
3662 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3664 #ifdef PUSH_ROUNDING
3665 /* If PUSH_ROUNDING is defined, it is possible for the
3666 stack to be momentarily aligned only to that amount,
3667 so we pick the least alignment. */
3668 if (x == stack_pointer_rtx && PUSH_ARGS)
3669 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3670 alignment);
3671 #endif
3673 nonzero &= ~(alignment - 1);
3677 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3678 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3679 known_mode, known_ret,
3680 &nonzero_for_hook);
3682 if (new)
3683 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3684 known_mode, known_ret);
3686 return nonzero_for_hook;
3689 case CONST_INT:
3690 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3691 /* If X is negative in MODE, sign-extend the value. */
3692 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3693 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3694 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3695 #endif
3697 return INTVAL (x);
3699 case MEM:
3700 #ifdef LOAD_EXTEND_OP
3701 /* In many, if not most, RISC machines, reading a byte from memory
3702 zeros the rest of the register. Noticing that fact saves a lot
3703 of extra zero-extends. */
3704 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3705 nonzero &= GET_MODE_MASK (GET_MODE (x));
3706 #endif
3707 break;
3709 case EQ: case NE:
3710 case UNEQ: case LTGT:
3711 case GT: case GTU: case UNGT:
3712 case LT: case LTU: case UNLT:
3713 case GE: case GEU: case UNGE:
3714 case LE: case LEU: case UNLE:
3715 case UNORDERED: case ORDERED:
3716 /* If this produces an integer result, we know which bits are set.
3717 Code here used to clear bits outside the mode of X, but that is
3718 now done above. */
3719 /* Mind that MODE is the mode the caller wants to look at this
3720 operation in, and not the actual operation mode. We can wind
3721 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3722 that describes the results of a vector compare. */
3723 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3724 && mode_width <= HOST_BITS_PER_WIDE_INT)
3725 nonzero = STORE_FLAG_VALUE;
3726 break;
3728 case NEG:
3729 #if 0
3730 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3731 and num_sign_bit_copies. */
3732 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3733 == GET_MODE_BITSIZE (GET_MODE (x)))
3734 nonzero = 1;
3735 #endif
3737 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3738 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3739 break;
3741 case ABS:
3742 #if 0
3743 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3744 and num_sign_bit_copies. */
3745 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3746 == GET_MODE_BITSIZE (GET_MODE (x)))
3747 nonzero = 1;
3748 #endif
3749 break;
3751 case TRUNCATE:
3752 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3753 known_x, known_mode, known_ret)
3754 & GET_MODE_MASK (mode));
3755 break;
3757 case ZERO_EXTEND:
3758 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3759 known_x, known_mode, known_ret);
3760 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3761 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3762 break;
3764 case SIGN_EXTEND:
3765 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3766 Otherwise, show all the bits in the outer mode but not the inner
3767 may be nonzero. */
3768 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3769 known_x, known_mode, known_ret);
3770 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3772 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3773 if (inner_nz
3774 & (((HOST_WIDE_INT) 1
3775 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3776 inner_nz |= (GET_MODE_MASK (mode)
3777 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3780 nonzero &= inner_nz;
3781 break;
3783 case AND:
3784 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3785 known_x, known_mode, known_ret)
3786 & cached_nonzero_bits (XEXP (x, 1), mode,
3787 known_x, known_mode, known_ret);
3788 break;
3790 case XOR: case IOR:
3791 case UMIN: case UMAX: case SMIN: case SMAX:
3793 unsigned HOST_WIDE_INT nonzero0 =
3794 cached_nonzero_bits (XEXP (x, 0), mode,
3795 known_x, known_mode, known_ret);
3797 /* Don't call nonzero_bits for the second time if it cannot change
3798 anything. */
3799 if ((nonzero & nonzero0) != nonzero)
3800 nonzero &= nonzero0
3801 | cached_nonzero_bits (XEXP (x, 1), mode,
3802 known_x, known_mode, known_ret);
3804 break;
3806 case PLUS: case MINUS:
3807 case MULT:
3808 case DIV: case UDIV:
3809 case MOD: case UMOD:
3810 /* We can apply the rules of arithmetic to compute the number of
3811 high- and low-order zero bits of these operations. We start by
3812 computing the width (position of the highest-order nonzero bit)
3813 and the number of low-order zero bits for each value. */
3815 unsigned HOST_WIDE_INT nz0 =
3816 cached_nonzero_bits (XEXP (x, 0), mode,
3817 known_x, known_mode, known_ret);
3818 unsigned HOST_WIDE_INT nz1 =
3819 cached_nonzero_bits (XEXP (x, 1), mode,
3820 known_x, known_mode, known_ret);
3821 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3822 int width0 = floor_log2 (nz0) + 1;
3823 int width1 = floor_log2 (nz1) + 1;
3824 int low0 = floor_log2 (nz0 & -nz0);
3825 int low1 = floor_log2 (nz1 & -nz1);
3826 HOST_WIDE_INT op0_maybe_minusp
3827 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3828 HOST_WIDE_INT op1_maybe_minusp
3829 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3830 unsigned int result_width = mode_width;
3831 int result_low = 0;
3833 switch (code)
3835 case PLUS:
3836 result_width = MAX (width0, width1) + 1;
3837 result_low = MIN (low0, low1);
3838 break;
3839 case MINUS:
3840 result_low = MIN (low0, low1);
3841 break;
3842 case MULT:
3843 result_width = width0 + width1;
3844 result_low = low0 + low1;
3845 break;
3846 case DIV:
3847 if (width1 == 0)
3848 break;
3849 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3850 result_width = width0;
3851 break;
3852 case UDIV:
3853 if (width1 == 0)
3854 break;
3855 result_width = width0;
3856 break;
3857 case MOD:
3858 if (width1 == 0)
3859 break;
3860 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3861 result_width = MIN (width0, width1);
3862 result_low = MIN (low0, low1);
3863 break;
3864 case UMOD:
3865 if (width1 == 0)
3866 break;
3867 result_width = MIN (width0, width1);
3868 result_low = MIN (low0, low1);
3869 break;
3870 default:
3871 gcc_unreachable ();
3874 if (result_width < mode_width)
3875 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3877 if (result_low > 0)
3878 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3880 #ifdef POINTERS_EXTEND_UNSIGNED
3881 /* If pointers extend unsigned and this is an addition or subtraction
3882 to a pointer in Pmode, all the bits above ptr_mode are known to be
3883 zero. */
3884 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3885 && (code == PLUS || code == MINUS)
3886 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3887 nonzero &= GET_MODE_MASK (ptr_mode);
3888 #endif
3890 break;
3892 case ZERO_EXTRACT:
3893 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3894 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3895 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3896 break;
3898 case SUBREG:
3899 /* If this is a SUBREG formed for a promoted variable that has
3900 been zero-extended, we know that at least the high-order bits
3901 are zero, though others might be too. */
3903 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3904 nonzero = GET_MODE_MASK (GET_MODE (x))
3905 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3906 known_x, known_mode, known_ret);
3908 /* If the inner mode is a single word for both the host and target
3909 machines, we can compute this from which bits of the inner
3910 object might be nonzero. */
3911 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3912 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3913 <= HOST_BITS_PER_WIDE_INT))
3915 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3916 known_x, known_mode, known_ret);
3918 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3919 /* If this is a typical RISC machine, we only have to worry
3920 about the way loads are extended. */
3921 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3922 ? (((nonzero
3923 & (((unsigned HOST_WIDE_INT) 1
3924 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3925 != 0))
3926 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3927 || !MEM_P (SUBREG_REG (x)))
3928 #endif
3930 /* On many CISC machines, accessing an object in a wider mode
3931 causes the high-order bits to become undefined. So they are
3932 not known to be zero. */
3933 if (GET_MODE_SIZE (GET_MODE (x))
3934 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3935 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3936 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3939 break;
3941 case ASHIFTRT:
3942 case LSHIFTRT:
3943 case ASHIFT:
3944 case ROTATE:
3945 /* The nonzero bits are in two classes: any bits within MODE
3946 that aren't in GET_MODE (x) are always significant. The rest of the
3947 nonzero bits are those that are significant in the operand of
3948 the shift when shifted the appropriate number of bits. This
3949 shows that high-order bits are cleared by the right shift and
3950 low-order bits by left shifts. */
3951 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3952 && INTVAL (XEXP (x, 1)) >= 0
3953 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3955 enum machine_mode inner_mode = GET_MODE (x);
3956 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3957 int count = INTVAL (XEXP (x, 1));
3958 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3959 unsigned HOST_WIDE_INT op_nonzero =
3960 cached_nonzero_bits (XEXP (x, 0), mode,
3961 known_x, known_mode, known_ret);
3962 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3963 unsigned HOST_WIDE_INT outer = 0;
3965 if (mode_width > width)
3966 outer = (op_nonzero & nonzero & ~mode_mask);
3968 if (code == LSHIFTRT)
3969 inner >>= count;
3970 else if (code == ASHIFTRT)
3972 inner >>= count;
3974 /* If the sign bit may have been nonzero before the shift, we
3975 need to mark all the places it could have been copied to
3976 by the shift as possibly nonzero. */
3977 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3978 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3980 else if (code == ASHIFT)
3981 inner <<= count;
3982 else
3983 inner = ((inner << (count % width)
3984 | (inner >> (width - (count % width)))) & mode_mask);
3986 nonzero &= (outer | inner);
3988 break;
3990 case FFS:
3991 case POPCOUNT:
3992 /* This is at most the number of bits in the mode. */
3993 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3994 break;
3996 case CLZ:
3997 /* If CLZ has a known value at zero, then the nonzero bits are
3998 that value, plus the number of bits in the mode minus one. */
3999 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4000 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4001 else
4002 nonzero = -1;
4003 break;
4005 case CTZ:
4006 /* If CTZ has a known value at zero, then the nonzero bits are
4007 that value, plus the number of bits in the mode minus one. */
4008 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4009 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4010 else
4011 nonzero = -1;
4012 break;
4014 case PARITY:
4015 nonzero = 1;
4016 break;
4018 case IF_THEN_ELSE:
4020 unsigned HOST_WIDE_INT nonzero_true =
4021 cached_nonzero_bits (XEXP (x, 1), mode,
4022 known_x, known_mode, known_ret);
4024 /* Don't call nonzero_bits for the second time if it cannot change
4025 anything. */
4026 if ((nonzero & nonzero_true) != nonzero)
4027 nonzero &= nonzero_true
4028 | cached_nonzero_bits (XEXP (x, 2), mode,
4029 known_x, known_mode, known_ret);
4031 break;
4033 default:
4034 break;
4037 return nonzero;
4040 /* See the macro definition above. */
4041 #undef cached_num_sign_bit_copies
4044 /* The function cached_num_sign_bit_copies is a wrapper around
4045 num_sign_bit_copies1. It avoids exponential behavior in
4046 num_sign_bit_copies1 when X has identical subexpressions on the
4047 first or the second level. */
4049 static unsigned int
4050 cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
4051 enum machine_mode known_mode,
4052 unsigned int known_ret)
4054 if (x == known_x && mode == known_mode)
4055 return known_ret;
4057 /* Try to find identical subexpressions. If found call
4058 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4059 the precomputed value for the subexpression as KNOWN_RET. */
4061 if (ARITHMETIC_P (x))
4063 rtx x0 = XEXP (x, 0);
4064 rtx x1 = XEXP (x, 1);
4066 /* Check the first level. */
4067 if (x0 == x1)
4068 return
4069 num_sign_bit_copies1 (x, mode, x0, mode,
4070 cached_num_sign_bit_copies (x0, mode, known_x,
4071 known_mode,
4072 known_ret));
4074 /* Check the second level. */
4075 if (ARITHMETIC_P (x0)
4076 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4077 return
4078 num_sign_bit_copies1 (x, mode, x1, mode,
4079 cached_num_sign_bit_copies (x1, mode, known_x,
4080 known_mode,
4081 known_ret));
4083 if (ARITHMETIC_P (x1)
4084 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4085 return
4086 num_sign_bit_copies1 (x, mode, x0, mode,
4087 cached_num_sign_bit_copies (x0, mode, known_x,
4088 known_mode,
4089 known_ret));
4092 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4095 /* Return the number of bits at the high-order end of X that are known to
4096 be equal to the sign bit. X will be used in mode MODE; if MODE is
4097 VOIDmode, X will be used in its own mode. The returned value will always
4098 be between 1 and the number of bits in MODE. */
4100 static unsigned int
4101 num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4102 enum machine_mode known_mode,
4103 unsigned int known_ret)
4105 enum rtx_code code = GET_CODE (x);
4106 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4107 int num0, num1, result;
4108 unsigned HOST_WIDE_INT nonzero;
4110 /* If we weren't given a mode, use the mode of X. If the mode is still
4111 VOIDmode, we don't know anything. Likewise if one of the modes is
4112 floating-point. */
4114 if (mode == VOIDmode)
4115 mode = GET_MODE (x);
4117 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4118 return 1;
4120 /* For a smaller object, just ignore the high bits. */
4121 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4123 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4124 known_x, known_mode, known_ret);
4125 return MAX (1,
4126 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4129 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4131 #ifndef WORD_REGISTER_OPERATIONS
4132 /* If this machine does not do all register operations on the entire
4133 register and MODE is wider than the mode of X, we can say nothing
4134 at all about the high-order bits. */
4135 return 1;
4136 #else
4137 /* Likewise on machines that do, if the mode of the object is smaller
4138 than a word and loads of that size don't sign extend, we can say
4139 nothing about the high order bits. */
4140 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4141 #ifdef LOAD_EXTEND_OP
4142 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4143 #endif
4145 return 1;
4146 #endif
4149 switch (code)
4151 case REG:
4153 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4154 /* If pointers extend signed and this is a pointer in Pmode, say that
4155 all the bits above ptr_mode are known to be sign bit copies. */
4156 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4157 && REG_POINTER (x))
4158 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4159 #endif
4162 unsigned int copies_for_hook = 1, copies = 1;
4163 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4164 known_mode, known_ret,
4165 &copies_for_hook);
4167 if (new)
4168 copies = cached_num_sign_bit_copies (new, mode, known_x,
4169 known_mode, known_ret);
4171 if (copies > 1 || copies_for_hook > 1)
4172 return MAX (copies, copies_for_hook);
4174 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4176 break;
4178 case MEM:
4179 #ifdef LOAD_EXTEND_OP
4180 /* Some RISC machines sign-extend all loads of smaller than a word. */
4181 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4182 return MAX (1, ((int) bitwidth
4183 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4184 #endif
4185 break;
4187 case CONST_INT:
4188 /* If the constant is negative, take its 1's complement and remask.
4189 Then see how many zero bits we have. */
4190 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4191 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4192 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4193 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4195 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4197 case SUBREG:
4198 /* If this is a SUBREG for a promoted object that is sign-extended
4199 and we are looking at it in a wider mode, we know that at least the
4200 high-order bits are known to be sign bit copies. */
4202 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4204 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4205 known_x, known_mode, known_ret);
4206 return MAX ((int) bitwidth
4207 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4208 num0);
4211 /* For a smaller object, just ignore the high bits. */
4212 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4214 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4215 known_x, known_mode, known_ret);
4216 return MAX (1, (num0
4217 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4218 - bitwidth)));
4221 #ifdef WORD_REGISTER_OPERATIONS
4222 #ifdef LOAD_EXTEND_OP
4223 /* For paradoxical SUBREGs on machines where all register operations
4224 affect the entire register, just look inside. Note that we are
4225 passing MODE to the recursive call, so the number of sign bit copies
4226 will remain relative to that mode, not the inner mode. */
4228 /* This works only if loads sign extend. Otherwise, if we get a
4229 reload for the inner part, it may be loaded from the stack, and
4230 then we lose all sign bit copies that existed before the store
4231 to the stack. */
4233 if ((GET_MODE_SIZE (GET_MODE (x))
4234 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4235 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4236 && MEM_P (SUBREG_REG (x)))
4237 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4238 known_x, known_mode, known_ret);
4239 #endif
4240 #endif
4241 break;
4243 case SIGN_EXTRACT:
4244 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4245 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4246 break;
4248 case SIGN_EXTEND:
4249 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4250 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4251 known_x, known_mode, known_ret));
4253 case TRUNCATE:
4254 /* For a smaller object, just ignore the high bits. */
4255 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4256 known_x, known_mode, known_ret);
4257 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4258 - bitwidth)));
4260 case NOT:
4261 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4262 known_x, known_mode, known_ret);
4264 case ROTATE: case ROTATERT:
4265 /* If we are rotating left by a number of bits less than the number
4266 of sign bit copies, we can just subtract that amount from the
4267 number. */
4268 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4269 && INTVAL (XEXP (x, 1)) >= 0
4270 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4272 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4273 known_x, known_mode, known_ret);
4274 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4275 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4277 break;
4279 case NEG:
4280 /* In general, this subtracts one sign bit copy. But if the value
4281 is known to be positive, the number of sign bit copies is the
4282 same as that of the input. Finally, if the input has just one bit
4283 that might be nonzero, all the bits are copies of the sign bit. */
4284 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4285 known_x, known_mode, known_ret);
4286 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4287 return num0 > 1 ? num0 - 1 : 1;
4289 nonzero = nonzero_bits (XEXP (x, 0), mode);
4290 if (nonzero == 1)
4291 return bitwidth;
4293 if (num0 > 1
4294 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4295 num0--;
4297 return num0;
4299 case IOR: case AND: case XOR:
4300 case SMIN: case SMAX: case UMIN: case UMAX:
4301 /* Logical operations will preserve the number of sign-bit copies.
4302 MIN and MAX operations always return one of the operands. */
4303 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4304 known_x, known_mode, known_ret);
4305 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4306 known_x, known_mode, known_ret);
4308 /* If num1 is clearing some of the top bits then regardless of
4309 the other term, we are guaranteed to have at least that many
4310 high-order zero bits. */
4311 if (code == AND
4312 && num1 > 1
4313 && bitwidth <= HOST_BITS_PER_WIDE_INT
4314 && GET_CODE (XEXP (x, 1)) == CONST_INT
4315 && !(INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4316 return num1;
4318 /* Similarly for IOR when setting high-order bits. */
4319 if (code == IOR
4320 && num1 > 1
4321 && bitwidth <= HOST_BITS_PER_WIDE_INT
4322 && GET_CODE (XEXP (x, 1)) == CONST_INT
4323 && (INTVAL (XEXP (x, 1)) & ((HOST_WIDE_INT) 1 << (bitwidth - 1))))
4324 return num1;
4326 return MIN (num0, num1);
4328 case PLUS: case MINUS:
4329 /* For addition and subtraction, we can have a 1-bit carry. However,
4330 if we are subtracting 1 from a positive number, there will not
4331 be such a carry. Furthermore, if the positive number is known to
4332 be 0 or 1, we know the result is either -1 or 0. */
4334 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4335 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4337 nonzero = nonzero_bits (XEXP (x, 0), mode);
4338 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4339 return (nonzero == 1 || nonzero == 0 ? bitwidth
4340 : bitwidth - floor_log2 (nonzero) - 1);
4343 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4344 known_x, known_mode, known_ret);
4345 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4346 known_x, known_mode, known_ret);
4347 result = MAX (1, MIN (num0, num1) - 1);
4349 #ifdef POINTERS_EXTEND_UNSIGNED
4350 /* If pointers extend signed and this is an addition or subtraction
4351 to a pointer in Pmode, all the bits above ptr_mode are known to be
4352 sign bit copies. */
4353 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4354 && (code == PLUS || code == MINUS)
4355 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4356 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4357 - GET_MODE_BITSIZE (ptr_mode) + 1),
4358 result);
4359 #endif
4360 return result;
4362 case MULT:
4363 /* The number of bits of the product is the sum of the number of
4364 bits of both terms. However, unless one of the terms if known
4365 to be positive, we must allow for an additional bit since negating
4366 a negative number can remove one sign bit copy. */
4368 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4369 known_x, known_mode, known_ret);
4370 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4371 known_x, known_mode, known_ret);
4373 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4374 if (result > 0
4375 && (bitwidth > HOST_BITS_PER_WIDE_INT
4376 || (((nonzero_bits (XEXP (x, 0), mode)
4377 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4378 && ((nonzero_bits (XEXP (x, 1), mode)
4379 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4380 result--;
4382 return MAX (1, result);
4384 case UDIV:
4385 /* The result must be <= the first operand. If the first operand
4386 has the high bit set, we know nothing about the number of sign
4387 bit copies. */
4388 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4389 return 1;
4390 else if ((nonzero_bits (XEXP (x, 0), mode)
4391 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4392 return 1;
4393 else
4394 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4395 known_x, known_mode, known_ret);
4397 case UMOD:
4398 /* The result must be <= the second operand. */
4399 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4400 known_x, known_mode, known_ret);
4402 case DIV:
4403 /* Similar to unsigned division, except that we have to worry about
4404 the case where the divisor is negative, in which case we have
4405 to add 1. */
4406 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4407 known_x, known_mode, known_ret);
4408 if (result > 1
4409 && (bitwidth > HOST_BITS_PER_WIDE_INT
4410 || (nonzero_bits (XEXP (x, 1), mode)
4411 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4412 result--;
4414 return result;
4416 case MOD:
4417 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4418 known_x, known_mode, known_ret);
4419 if (result > 1
4420 && (bitwidth > HOST_BITS_PER_WIDE_INT
4421 || (nonzero_bits (XEXP (x, 1), mode)
4422 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4423 result--;
4425 return result;
4427 case ASHIFTRT:
4428 /* Shifts by a constant add to the number of bits equal to the
4429 sign bit. */
4430 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4431 known_x, known_mode, known_ret);
4432 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4433 && INTVAL (XEXP (x, 1)) > 0)
4434 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4436 return num0;
4438 case ASHIFT:
4439 /* Left shifts destroy copies. */
4440 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4441 || INTVAL (XEXP (x, 1)) < 0
4442 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4443 return 1;
4445 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4446 known_x, known_mode, known_ret);
4447 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4449 case IF_THEN_ELSE:
4450 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4451 known_x, known_mode, known_ret);
4452 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4453 known_x, known_mode, known_ret);
4454 return MIN (num0, num1);
4456 case EQ: case NE: case GE: case GT: case LE: case LT:
4457 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4458 case GEU: case GTU: case LEU: case LTU:
4459 case UNORDERED: case ORDERED:
4460 /* If the constant is negative, take its 1's complement and remask.
4461 Then see how many zero bits we have. */
4462 nonzero = STORE_FLAG_VALUE;
4463 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4464 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4465 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4467 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4469 default:
4470 break;
4473 /* If we haven't been able to figure it out by one of the above rules,
4474 see if some of the high-order bits are known to be zero. If so,
4475 count those bits and return one less than that amount. If we can't
4476 safely compute the mask for this mode, always return BITWIDTH. */
4478 bitwidth = GET_MODE_BITSIZE (mode);
4479 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4480 return 1;
4482 nonzero = nonzero_bits (x, mode);
4483 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4484 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4487 /* Calculate the rtx_cost of a single instruction. A return value of
4488 zero indicates an instruction pattern without a known cost. */
4491 insn_rtx_cost (rtx pat)
4493 int i, cost;
4494 rtx set;
4496 /* Extract the single set rtx from the instruction pattern.
4497 We can't use single_set since we only have the pattern. */
4498 if (GET_CODE (pat) == SET)
4499 set = pat;
4500 else if (GET_CODE (pat) == PARALLEL)
4502 set = NULL_RTX;
4503 for (i = 0; i < XVECLEN (pat, 0); i++)
4505 rtx x = XVECEXP (pat, 0, i);
4506 if (GET_CODE (x) == SET)
4508 if (set)
4509 return 0;
4510 set = x;
4513 if (!set)
4514 return 0;
4516 else
4517 return 0;
4519 cost = rtx_cost (SET_SRC (set), SET);
4520 return cost > 0 ? cost : COSTS_N_INSNS (1);
4523 /* Given an insn INSN and condition COND, return the condition in a
4524 canonical form to simplify testing by callers. Specifically:
4526 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4527 (2) Both operands will be machine operands; (cc0) will have been replaced.
4528 (3) If an operand is a constant, it will be the second operand.
4529 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4530 for GE, GEU, and LEU.
4532 If the condition cannot be understood, or is an inequality floating-point
4533 comparison which needs to be reversed, 0 will be returned.
4535 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4537 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4538 insn used in locating the condition was found. If a replacement test
4539 of the condition is desired, it should be placed in front of that
4540 insn and we will be sure that the inputs are still valid.
4542 If WANT_REG is nonzero, we wish the condition to be relative to that
4543 register, if possible. Therefore, do not canonicalize the condition
4544 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4545 to be a compare to a CC mode register.
4547 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4548 and at INSN. */
4551 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4552 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4554 enum rtx_code code;
4555 rtx prev = insn;
4556 const_rtx set;
4557 rtx tem;
4558 rtx op0, op1;
4559 int reverse_code = 0;
4560 enum machine_mode mode;
4561 basic_block bb = BLOCK_FOR_INSN (insn);
4563 code = GET_CODE (cond);
4564 mode = GET_MODE (cond);
4565 op0 = XEXP (cond, 0);
4566 op1 = XEXP (cond, 1);
4568 if (reverse)
4569 code = reversed_comparison_code (cond, insn);
4570 if (code == UNKNOWN)
4571 return 0;
4573 if (earliest)
4574 *earliest = insn;
4576 /* If we are comparing a register with zero, see if the register is set
4577 in the previous insn to a COMPARE or a comparison operation. Perform
4578 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4579 in cse.c */
4581 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4582 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4583 && op1 == CONST0_RTX (GET_MODE (op0))
4584 && op0 != want_reg)
4586 /* Set nonzero when we find something of interest. */
4587 rtx x = 0;
4589 #ifdef HAVE_cc0
4590 /* If comparison with cc0, import actual comparison from compare
4591 insn. */
4592 if (op0 == cc0_rtx)
4594 if ((prev = prev_nonnote_insn (prev)) == 0
4595 || !NONJUMP_INSN_P (prev)
4596 || (set = single_set (prev)) == 0
4597 || SET_DEST (set) != cc0_rtx)
4598 return 0;
4600 op0 = SET_SRC (set);
4601 op1 = CONST0_RTX (GET_MODE (op0));
4602 if (earliest)
4603 *earliest = prev;
4605 #endif
4607 /* If this is a COMPARE, pick up the two things being compared. */
4608 if (GET_CODE (op0) == COMPARE)
4610 op1 = XEXP (op0, 1);
4611 op0 = XEXP (op0, 0);
4612 continue;
4614 else if (!REG_P (op0))
4615 break;
4617 /* Go back to the previous insn. Stop if it is not an INSN. We also
4618 stop if it isn't a single set or if it has a REG_INC note because
4619 we don't want to bother dealing with it. */
4621 if ((prev = prev_nonnote_insn (prev)) == 0
4622 || !NONJUMP_INSN_P (prev)
4623 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4624 /* In cfglayout mode, there do not have to be labels at the
4625 beginning of a block, or jumps at the end, so the previous
4626 conditions would not stop us when we reach bb boundary. */
4627 || BLOCK_FOR_INSN (prev) != bb)
4628 break;
4630 set = set_of (op0, prev);
4632 if (set
4633 && (GET_CODE (set) != SET
4634 || !rtx_equal_p (SET_DEST (set), op0)))
4635 break;
4637 /* If this is setting OP0, get what it sets it to if it looks
4638 relevant. */
4639 if (set)
4641 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4642 #ifdef FLOAT_STORE_FLAG_VALUE
4643 REAL_VALUE_TYPE fsfv;
4644 #endif
4646 /* ??? We may not combine comparisons done in a CCmode with
4647 comparisons not done in a CCmode. This is to aid targets
4648 like Alpha that have an IEEE compliant EQ instruction, and
4649 a non-IEEE compliant BEQ instruction. The use of CCmode is
4650 actually artificial, simply to prevent the combination, but
4651 should not affect other platforms.
4653 However, we must allow VOIDmode comparisons to match either
4654 CCmode or non-CCmode comparison, because some ports have
4655 modeless comparisons inside branch patterns.
4657 ??? This mode check should perhaps look more like the mode check
4658 in simplify_comparison in combine. */
4660 if ((GET_CODE (SET_SRC (set)) == COMPARE
4661 || (((code == NE
4662 || (code == LT
4663 && GET_MODE_CLASS (inner_mode) == MODE_INT
4664 && (GET_MODE_BITSIZE (inner_mode)
4665 <= HOST_BITS_PER_WIDE_INT)
4666 && (STORE_FLAG_VALUE
4667 & ((HOST_WIDE_INT) 1
4668 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4669 #ifdef FLOAT_STORE_FLAG_VALUE
4670 || (code == LT
4671 && SCALAR_FLOAT_MODE_P (inner_mode)
4672 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4673 REAL_VALUE_NEGATIVE (fsfv)))
4674 #endif
4676 && COMPARISON_P (SET_SRC (set))))
4677 && (((GET_MODE_CLASS (mode) == MODE_CC)
4678 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4679 || mode == VOIDmode || inner_mode == VOIDmode))
4680 x = SET_SRC (set);
4681 else if (((code == EQ
4682 || (code == GE
4683 && (GET_MODE_BITSIZE (inner_mode)
4684 <= HOST_BITS_PER_WIDE_INT)
4685 && GET_MODE_CLASS (inner_mode) == MODE_INT
4686 && (STORE_FLAG_VALUE
4687 & ((HOST_WIDE_INT) 1
4688 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4689 #ifdef FLOAT_STORE_FLAG_VALUE
4690 || (code == GE
4691 && SCALAR_FLOAT_MODE_P (inner_mode)
4692 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4693 REAL_VALUE_NEGATIVE (fsfv)))
4694 #endif
4696 && COMPARISON_P (SET_SRC (set))
4697 && (((GET_MODE_CLASS (mode) == MODE_CC)
4698 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4699 || mode == VOIDmode || inner_mode == VOIDmode))
4702 reverse_code = 1;
4703 x = SET_SRC (set);
4705 else
4706 break;
4709 else if (reg_set_p (op0, prev))
4710 /* If this sets OP0, but not directly, we have to give up. */
4711 break;
4713 if (x)
4715 /* If the caller is expecting the condition to be valid at INSN,
4716 make sure X doesn't change before INSN. */
4717 if (valid_at_insn_p)
4718 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4719 break;
4720 if (COMPARISON_P (x))
4721 code = GET_CODE (x);
4722 if (reverse_code)
4724 code = reversed_comparison_code (x, prev);
4725 if (code == UNKNOWN)
4726 return 0;
4727 reverse_code = 0;
4730 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4731 if (earliest)
4732 *earliest = prev;
4736 /* If constant is first, put it last. */
4737 if (CONSTANT_P (op0))
4738 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4740 /* If OP0 is the result of a comparison, we weren't able to find what
4741 was really being compared, so fail. */
4742 if (!allow_cc_mode
4743 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4744 return 0;
4746 /* Canonicalize any ordered comparison with integers involving equality
4747 if we can do computations in the relevant mode and we do not
4748 overflow. */
4750 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4751 && GET_CODE (op1) == CONST_INT
4752 && GET_MODE (op0) != VOIDmode
4753 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4755 HOST_WIDE_INT const_val = INTVAL (op1);
4756 unsigned HOST_WIDE_INT uconst_val = const_val;
4757 unsigned HOST_WIDE_INT max_val
4758 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4760 switch (code)
4762 case LE:
4763 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4764 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4765 break;
4767 /* When cross-compiling, const_val might be sign-extended from
4768 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4769 case GE:
4770 if ((HOST_WIDE_INT) (const_val & max_val)
4771 != (((HOST_WIDE_INT) 1
4772 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4773 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4774 break;
4776 case LEU:
4777 if (uconst_val < max_val)
4778 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4779 break;
4781 case GEU:
4782 if (uconst_val != 0)
4783 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4784 break;
4786 default:
4787 break;
4791 /* Never return CC0; return zero instead. */
4792 if (CC0_P (op0))
4793 return 0;
4795 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4798 /* Given a jump insn JUMP, return the condition that will cause it to branch
4799 to its JUMP_LABEL. If the condition cannot be understood, or is an
4800 inequality floating-point comparison which needs to be reversed, 0 will
4801 be returned.
4803 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4804 insn used in locating the condition was found. If a replacement test
4805 of the condition is desired, it should be placed in front of that
4806 insn and we will be sure that the inputs are still valid. If EARLIEST
4807 is null, the returned condition will be valid at INSN.
4809 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4810 compare CC mode register.
4812 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4815 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4817 rtx cond;
4818 int reverse;
4819 rtx set;
4821 /* If this is not a standard conditional jump, we can't parse it. */
4822 if (!JUMP_P (jump)
4823 || ! any_condjump_p (jump))
4824 return 0;
4825 set = pc_set (jump);
4827 cond = XEXP (SET_SRC (set), 0);
4829 /* If this branches to JUMP_LABEL when the condition is false, reverse
4830 the condition. */
4831 reverse
4832 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4833 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4835 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4836 allow_cc_mode, valid_at_insn_p);
4839 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4840 TARGET_MODE_REP_EXTENDED.
4842 Note that we assume that the property of
4843 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4844 narrower than mode B. I.e., if A is a mode narrower than B then in
4845 order to be able to operate on it in mode B, mode A needs to
4846 satisfy the requirements set by the representation of mode B. */
4848 static void
4849 init_num_sign_bit_copies_in_rep (void)
4851 enum machine_mode mode, in_mode;
4853 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4854 in_mode = GET_MODE_WIDER_MODE (mode))
4855 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4856 mode = GET_MODE_WIDER_MODE (mode))
4858 enum machine_mode i;
4860 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4861 extends to the next widest mode. */
4862 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4863 || GET_MODE_WIDER_MODE (mode) == in_mode);
4865 /* We are in in_mode. Count how many bits outside of mode
4866 have to be copies of the sign-bit. */
4867 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4869 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4871 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4872 /* We can only check sign-bit copies starting from the
4873 top-bit. In order to be able to check the bits we
4874 have already seen we pretend that subsequent bits
4875 have to be sign-bit copies too. */
4876 || num_sign_bit_copies_in_rep [in_mode][mode])
4877 num_sign_bit_copies_in_rep [in_mode][mode]
4878 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4883 /* Suppose that truncation from the machine mode of X to MODE is not a
4884 no-op. See if there is anything special about X so that we can
4885 assume it already contains a truncated value of MODE. */
4887 bool
4888 truncated_to_mode (enum machine_mode mode, const_rtx x)
4890 /* This register has already been used in MODE without explicit
4891 truncation. */
4892 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4893 return true;
4895 /* See if we already satisfy the requirements of MODE. If yes we
4896 can just switch to MODE. */
4897 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4898 && (num_sign_bit_copies (x, GET_MODE (x))
4899 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4900 return true;
4902 return false;
4905 /* Initialize non_rtx_starting_operands, which is used to speed up
4906 for_each_rtx. */
4907 void
4908 init_rtlanal (void)
4910 int i;
4911 for (i = 0; i < NUM_RTX_CODE; i++)
4913 const char *format = GET_RTX_FORMAT (i);
4914 const char *first = strpbrk (format, "eEV");
4915 non_rtx_starting_operands[i] = first ? first - format : -1;
4918 init_num_sign_bit_copies_in_rep ();
4921 /* Check whether this is a constant pool constant. */
4922 bool
4923 constant_pool_constant_p (rtx x)
4925 x = avoid_constant_pool_reference (x);
4926 return GET_CODE (x) == CONST_DOUBLE;