Commit for Asher Langton <langton2@llnl.gov>
[official-gcc.git] / gcc / rtlanal.c
blob594b2e47f64308d43b14fadfe69e128d62bcf403
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "regs.h"
38 #include "function.h"
40 /* Forward declarations */
41 static int global_reg_mentioned_p_1 (rtx *, void *);
42 static void set_of_1 (rtx, rtx, void *);
43 static bool covers_regno_p (rtx, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx, unsigned int);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
49 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50 rtx, enum machine_mode,
51 unsigned HOST_WIDE_INT);
52 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53 enum machine_mode,
54 unsigned HOST_WIDE_INT);
55 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56 enum machine_mode,
57 unsigned int);
58 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59 enum machine_mode, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands[NUM_RTX_CODE];
65 /* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
69 int target_flags;
71 /* Return 1 if the value of X is unstable
72 (would be different at a different point in the program).
73 The frame pointer, arg pointer, etc. are considered stable
74 (within one function) and so is anything marked `unchanging'. */
76 int
77 rtx_unstable_p (rtx x)
79 RTX_CODE code = GET_CODE (x);
80 int i;
81 const char *fmt;
83 switch (code)
85 case MEM:
86 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
88 case CONST:
89 case CONST_INT:
90 case CONST_DOUBLE:
91 case CONST_VECTOR:
92 case SYMBOL_REF:
93 case LABEL_REF:
94 return 0;
96 case REG:
97 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
98 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
99 /* The arg pointer varies if it is not a fixed register. */
100 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
101 return 0;
102 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
103 /* ??? When call-clobbered, the value is stable modulo the restore
104 that must happen after a call. This currently screws up local-alloc
105 into believing that the restore is not needed. */
106 if (x == pic_offset_table_rtx)
107 return 0;
108 #endif
109 return 1;
111 case ASM_OPERANDS:
112 if (MEM_VOLATILE_P (x))
113 return 1;
115 /* Fall through. */
117 default:
118 break;
121 fmt = GET_RTX_FORMAT (code);
122 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
123 if (fmt[i] == 'e')
125 if (rtx_unstable_p (XEXP (x, i)))
126 return 1;
128 else if (fmt[i] == 'E')
130 int j;
131 for (j = 0; j < XVECLEN (x, i); j++)
132 if (rtx_unstable_p (XVECEXP (x, i, j)))
133 return 1;
136 return 0;
139 /* Return 1 if X has a value that can vary even between two
140 executions of the program. 0 means X can be compared reliably
141 against certain constants or near-constants.
142 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
143 zero, we are slightly more conservative.
144 The frame pointer and the arg pointer are considered constant. */
147 rtx_varies_p (rtx x, int for_alias)
149 RTX_CODE code;
150 int i;
151 const char *fmt;
153 if (!x)
154 return 0;
156 code = GET_CODE (x);
157 switch (code)
159 case MEM:
160 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
162 case CONST:
163 case CONST_INT:
164 case CONST_DOUBLE:
165 case CONST_VECTOR:
166 case SYMBOL_REF:
167 case LABEL_REF:
168 return 0;
170 case REG:
171 /* Note that we have to test for the actual rtx used for the frame
172 and arg pointers and not just the register number in case we have
173 eliminated the frame and/or arg pointer and are using it
174 for pseudos. */
175 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
176 /* The arg pointer varies if it is not a fixed register. */
177 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
178 return 0;
179 if (x == pic_offset_table_rtx
180 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
181 /* ??? When call-clobbered, the value is stable modulo the restore
182 that must happen after a call. This currently screws up
183 local-alloc into believing that the restore is not needed, so we
184 must return 0 only if we are called from alias analysis. */
185 && for_alias
186 #endif
188 return 0;
189 return 1;
191 case LO_SUM:
192 /* The operand 0 of a LO_SUM is considered constant
193 (in fact it is related specifically to operand 1)
194 during alias analysis. */
195 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
196 || rtx_varies_p (XEXP (x, 1), for_alias);
198 case ASM_OPERANDS:
199 if (MEM_VOLATILE_P (x))
200 return 1;
202 /* Fall through. */
204 default:
205 break;
208 fmt = GET_RTX_FORMAT (code);
209 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
210 if (fmt[i] == 'e')
212 if (rtx_varies_p (XEXP (x, i), for_alias))
213 return 1;
215 else if (fmt[i] == 'E')
217 int j;
218 for (j = 0; j < XVECLEN (x, i); j++)
219 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
220 return 1;
223 return 0;
226 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
227 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
228 whether nonzero is returned for unaligned memory accesses on strict
229 alignment machines. */
231 static int
232 rtx_addr_can_trap_p_1 (rtx x, enum machine_mode mode, bool unaligned_mems)
234 enum rtx_code code = GET_CODE (x);
236 switch (code)
238 case SYMBOL_REF:
239 return SYMBOL_REF_WEAK (x);
241 case LABEL_REF:
242 return 0;
244 case REG:
245 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
246 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
247 || x == stack_pointer_rtx
248 /* The arg pointer varies if it is not a fixed register. */
249 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
250 return 0;
251 /* All of the virtual frame registers are stack references. */
252 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
253 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
254 return 0;
255 return 1;
257 case CONST:
258 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
260 case PLUS:
261 /* An address is assumed not to trap if:
262 - it is an address that can't trap plus a constant integer,
263 with the proper remainder modulo the mode size if we are
264 considering unaligned memory references. */
265 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
266 && GET_CODE (XEXP (x, 1)) == CONST_INT)
268 HOST_WIDE_INT offset;
270 if (!STRICT_ALIGNMENT || !unaligned_mems)
271 return 0;
273 offset = INTVAL (XEXP (x, 1));
275 #ifdef SPARC_STACK_BOUNDARY_HACK
276 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
277 the real alignment of %sp. However, when it does this, the
278 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
279 if (SPARC_STACK_BOUNDARY_HACK
280 && (XEXP (x, 0) == stack_pointer_rtx
281 || XEXP (x, 0) == hard_frame_pointer_rtx))
282 offset -= STACK_POINTER_OFFSET;
283 #endif
285 return offset % GET_MODE_SIZE (mode) != 0;
288 /* - or it is the pic register plus a constant. */
289 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
290 return 0;
292 return 1;
294 case LO_SUM:
295 case PRE_MODIFY:
296 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
298 case PRE_DEC:
299 case PRE_INC:
300 case POST_DEC:
301 case POST_INC:
302 case POST_MODIFY:
303 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
305 default:
306 break;
309 /* If it isn't one of the case above, it can cause a trap. */
310 return 1;
313 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
316 rtx_addr_can_trap_p (rtx x)
318 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
321 /* Return true if X is an address that is known to not be zero. */
323 bool
324 nonzero_address_p (rtx x)
326 enum rtx_code code = GET_CODE (x);
328 switch (code)
330 case SYMBOL_REF:
331 return !SYMBOL_REF_WEAK (x);
333 case LABEL_REF:
334 return true;
336 case REG:
337 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
338 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
339 || x == stack_pointer_rtx
340 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
341 return true;
342 /* All of the virtual frame registers are stack references. */
343 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
344 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
345 return true;
346 return false;
348 case CONST:
349 return nonzero_address_p (XEXP (x, 0));
351 case PLUS:
352 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
354 /* Pointers aren't allowed to wrap. If we've got a register
355 that is known to be a pointer, and a positive offset, then
356 the composite can't be zero. */
357 if (INTVAL (XEXP (x, 1)) > 0
358 && REG_P (XEXP (x, 0))
359 && REG_POINTER (XEXP (x, 0)))
360 return true;
362 return nonzero_address_p (XEXP (x, 0));
364 /* Handle PIC references. */
365 else if (XEXP (x, 0) == pic_offset_table_rtx
366 && CONSTANT_P (XEXP (x, 1)))
367 return true;
368 return false;
370 case PRE_MODIFY:
371 /* Similar to the above; allow positive offsets. Further, since
372 auto-inc is only allowed in memories, the register must be a
373 pointer. */
374 if (GET_CODE (XEXP (x, 1)) == CONST_INT
375 && INTVAL (XEXP (x, 1)) > 0)
376 return true;
377 return nonzero_address_p (XEXP (x, 0));
379 case PRE_INC:
380 /* Similarly. Further, the offset is always positive. */
381 return true;
383 case PRE_DEC:
384 case POST_DEC:
385 case POST_INC:
386 case POST_MODIFY:
387 return nonzero_address_p (XEXP (x, 0));
389 case LO_SUM:
390 return nonzero_address_p (XEXP (x, 1));
392 default:
393 break;
396 /* If it isn't one of the case above, might be zero. */
397 return false;
400 /* Return 1 if X refers to a memory location whose address
401 cannot be compared reliably with constant addresses,
402 or if X refers to a BLKmode memory object.
403 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
404 zero, we are slightly more conservative. */
407 rtx_addr_varies_p (rtx x, int for_alias)
409 enum rtx_code code;
410 int i;
411 const char *fmt;
413 if (x == 0)
414 return 0;
416 code = GET_CODE (x);
417 if (code == MEM)
418 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
420 fmt = GET_RTX_FORMAT (code);
421 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
422 if (fmt[i] == 'e')
424 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
425 return 1;
427 else if (fmt[i] == 'E')
429 int j;
430 for (j = 0; j < XVECLEN (x, i); j++)
431 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
432 return 1;
434 return 0;
437 /* Return the value of the integer term in X, if one is apparent;
438 otherwise return 0.
439 Only obvious integer terms are detected.
440 This is used in cse.c with the `related_value' field. */
442 HOST_WIDE_INT
443 get_integer_term (rtx x)
445 if (GET_CODE (x) == CONST)
446 x = XEXP (x, 0);
448 if (GET_CODE (x) == MINUS
449 && GET_CODE (XEXP (x, 1)) == CONST_INT)
450 return - INTVAL (XEXP (x, 1));
451 if (GET_CODE (x) == PLUS
452 && GET_CODE (XEXP (x, 1)) == CONST_INT)
453 return INTVAL (XEXP (x, 1));
454 return 0;
457 /* If X is a constant, return the value sans apparent integer term;
458 otherwise return 0.
459 Only obvious integer terms are detected. */
462 get_related_value (rtx x)
464 if (GET_CODE (x) != CONST)
465 return 0;
466 x = XEXP (x, 0);
467 if (GET_CODE (x) == PLUS
468 && GET_CODE (XEXP (x, 1)) == CONST_INT)
469 return XEXP (x, 0);
470 else if (GET_CODE (x) == MINUS
471 && GET_CODE (XEXP (x, 1)) == CONST_INT)
472 return XEXP (x, 0);
473 return 0;
476 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
477 a global register. */
479 static int
480 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
482 int regno;
483 rtx x = *loc;
485 if (! x)
486 return 0;
488 switch (GET_CODE (x))
490 case SUBREG:
491 if (REG_P (SUBREG_REG (x)))
493 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
494 && global_regs[subreg_regno (x)])
495 return 1;
496 return 0;
498 break;
500 case REG:
501 regno = REGNO (x);
502 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
503 return 1;
504 return 0;
506 case SCRATCH:
507 case PC:
508 case CC0:
509 case CONST_INT:
510 case CONST_DOUBLE:
511 case CONST:
512 case LABEL_REF:
513 return 0;
515 case CALL:
516 /* A non-constant call might use a global register. */
517 return 1;
519 default:
520 break;
523 return 0;
526 /* Returns nonzero if X mentions a global register. */
529 global_reg_mentioned_p (rtx x)
531 if (INSN_P (x))
533 if (CALL_P (x))
535 if (! CONST_OR_PURE_CALL_P (x))
536 return 1;
537 x = CALL_INSN_FUNCTION_USAGE (x);
538 if (x == 0)
539 return 0;
541 else
542 x = PATTERN (x);
545 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
548 /* Return the number of places FIND appears within X. If COUNT_DEST is
549 zero, we do not count occurrences inside the destination of a SET. */
552 count_occurrences (rtx x, rtx find, int count_dest)
554 int i, j;
555 enum rtx_code code;
556 const char *format_ptr;
557 int count;
559 if (x == find)
560 return 1;
562 code = GET_CODE (x);
564 switch (code)
566 case REG:
567 case CONST_INT:
568 case CONST_DOUBLE:
569 case CONST_VECTOR:
570 case SYMBOL_REF:
571 case CODE_LABEL:
572 case PC:
573 case CC0:
574 return 0;
576 case MEM:
577 if (MEM_P (find) && rtx_equal_p (x, find))
578 return 1;
579 break;
581 case SET:
582 if (SET_DEST (x) == find && ! count_dest)
583 return count_occurrences (SET_SRC (x), find, count_dest);
584 break;
586 default:
587 break;
590 format_ptr = GET_RTX_FORMAT (code);
591 count = 0;
593 for (i = 0; i < GET_RTX_LENGTH (code); i++)
595 switch (*format_ptr++)
597 case 'e':
598 count += count_occurrences (XEXP (x, i), find, count_dest);
599 break;
601 case 'E':
602 for (j = 0; j < XVECLEN (x, i); j++)
603 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
604 break;
607 return count;
610 /* Nonzero if register REG appears somewhere within IN.
611 Also works if REG is not a register; in this case it checks
612 for a subexpression of IN that is Lisp "equal" to REG. */
615 reg_mentioned_p (rtx reg, rtx in)
617 const char *fmt;
618 int i;
619 enum rtx_code code;
621 if (in == 0)
622 return 0;
624 if (reg == in)
625 return 1;
627 if (GET_CODE (in) == LABEL_REF)
628 return reg == XEXP (in, 0);
630 code = GET_CODE (in);
632 switch (code)
634 /* Compare registers by number. */
635 case REG:
636 return REG_P (reg) && REGNO (in) == REGNO (reg);
638 /* These codes have no constituent expressions
639 and are unique. */
640 case SCRATCH:
641 case CC0:
642 case PC:
643 return 0;
645 case CONST_INT:
646 case CONST_VECTOR:
647 case CONST_DOUBLE:
648 /* These are kept unique for a given value. */
649 return 0;
651 default:
652 break;
655 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
656 return 1;
658 fmt = GET_RTX_FORMAT (code);
660 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
662 if (fmt[i] == 'E')
664 int j;
665 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
666 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
667 return 1;
669 else if (fmt[i] == 'e'
670 && reg_mentioned_p (reg, XEXP (in, i)))
671 return 1;
673 return 0;
676 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
677 no CODE_LABEL insn. */
680 no_labels_between_p (rtx beg, rtx end)
682 rtx p;
683 if (beg == end)
684 return 0;
685 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
686 if (LABEL_P (p))
687 return 0;
688 return 1;
691 /* Nonzero if register REG is used in an insn between
692 FROM_INSN and TO_INSN (exclusive of those two). */
695 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
697 rtx insn;
699 if (from_insn == to_insn)
700 return 0;
702 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
703 if (INSN_P (insn)
704 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
705 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
706 return 1;
707 return 0;
710 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
711 is entirely replaced by a new value and the only use is as a SET_DEST,
712 we do not consider it a reference. */
715 reg_referenced_p (rtx x, rtx body)
717 int i;
719 switch (GET_CODE (body))
721 case SET:
722 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
723 return 1;
725 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
726 of a REG that occupies all of the REG, the insn references X if
727 it is mentioned in the destination. */
728 if (GET_CODE (SET_DEST (body)) != CC0
729 && GET_CODE (SET_DEST (body)) != PC
730 && !REG_P (SET_DEST (body))
731 && ! (GET_CODE (SET_DEST (body)) == SUBREG
732 && REG_P (SUBREG_REG (SET_DEST (body)))
733 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
734 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
735 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
736 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
737 && reg_overlap_mentioned_p (x, SET_DEST (body)))
738 return 1;
739 return 0;
741 case ASM_OPERANDS:
742 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
743 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
744 return 1;
745 return 0;
747 case CALL:
748 case USE:
749 case IF_THEN_ELSE:
750 return reg_overlap_mentioned_p (x, body);
752 case TRAP_IF:
753 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
755 case PREFETCH:
756 return reg_overlap_mentioned_p (x, XEXP (body, 0));
758 case UNSPEC:
759 case UNSPEC_VOLATILE:
760 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
761 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
762 return 1;
763 return 0;
765 case PARALLEL:
766 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
767 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
768 return 1;
769 return 0;
771 case CLOBBER:
772 if (MEM_P (XEXP (body, 0)))
773 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
774 return 1;
775 return 0;
777 case COND_EXEC:
778 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
779 return 1;
780 return reg_referenced_p (x, COND_EXEC_CODE (body));
782 default:
783 return 0;
787 /* Nonzero if register REG is set or clobbered in an insn between
788 FROM_INSN and TO_INSN (exclusive of those two). */
791 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
793 rtx insn;
795 if (from_insn == to_insn)
796 return 0;
798 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
799 if (INSN_P (insn) && reg_set_p (reg, insn))
800 return 1;
801 return 0;
804 /* Internals of reg_set_between_p. */
806 reg_set_p (rtx reg, rtx insn)
808 /* We can be passed an insn or part of one. If we are passed an insn,
809 check if a side-effect of the insn clobbers REG. */
810 if (INSN_P (insn)
811 && (FIND_REG_INC_NOTE (insn, reg)
812 || (CALL_P (insn)
813 && ((REG_P (reg)
814 && REGNO (reg) < FIRST_PSEUDO_REGISTER
815 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
816 REGNO (reg)))
817 || MEM_P (reg)
818 || find_reg_fusage (insn, CLOBBER, reg)))))
819 return 1;
821 return set_of (reg, insn) != NULL_RTX;
824 /* Similar to reg_set_between_p, but check all registers in X. Return 0
825 only if none of them are modified between START and END. Return 1 if
826 X contains a MEM; this routine does usememory aliasing. */
829 modified_between_p (rtx x, rtx start, rtx end)
831 enum rtx_code code = GET_CODE (x);
832 const char *fmt;
833 int i, j;
834 rtx insn;
836 if (start == end)
837 return 0;
839 switch (code)
841 case CONST_INT:
842 case CONST_DOUBLE:
843 case CONST_VECTOR:
844 case CONST:
845 case SYMBOL_REF:
846 case LABEL_REF:
847 return 0;
849 case PC:
850 case CC0:
851 return 1;
853 case MEM:
854 if (modified_between_p (XEXP (x, 0), start, end))
855 return 1;
856 if (MEM_READONLY_P (x))
857 return 0;
858 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
859 if (memory_modified_in_insn_p (x, insn))
860 return 1;
861 return 0;
862 break;
864 case REG:
865 return reg_set_between_p (x, start, end);
867 default:
868 break;
871 fmt = GET_RTX_FORMAT (code);
872 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
874 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
875 return 1;
877 else if (fmt[i] == 'E')
878 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
879 if (modified_between_p (XVECEXP (x, i, j), start, end))
880 return 1;
883 return 0;
886 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
887 of them are modified in INSN. Return 1 if X contains a MEM; this routine
888 does use memory aliasing. */
891 modified_in_p (rtx x, rtx insn)
893 enum rtx_code code = GET_CODE (x);
894 const char *fmt;
895 int i, j;
897 switch (code)
899 case CONST_INT:
900 case CONST_DOUBLE:
901 case CONST_VECTOR:
902 case CONST:
903 case SYMBOL_REF:
904 case LABEL_REF:
905 return 0;
907 case PC:
908 case CC0:
909 return 1;
911 case MEM:
912 if (modified_in_p (XEXP (x, 0), insn))
913 return 1;
914 if (MEM_READONLY_P (x))
915 return 0;
916 if (memory_modified_in_insn_p (x, insn))
917 return 1;
918 return 0;
919 break;
921 case REG:
922 return reg_set_p (x, insn);
924 default:
925 break;
928 fmt = GET_RTX_FORMAT (code);
929 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
931 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
932 return 1;
934 else if (fmt[i] == 'E')
935 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
936 if (modified_in_p (XVECEXP (x, i, j), insn))
937 return 1;
940 return 0;
943 /* Helper function for set_of. */
944 struct set_of_data
946 rtx found;
947 rtx pat;
950 static void
951 set_of_1 (rtx x, rtx pat, void *data1)
953 struct set_of_data *data = (struct set_of_data *) (data1);
954 if (rtx_equal_p (x, data->pat)
955 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
956 data->found = pat;
959 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
960 (either directly or via STRICT_LOW_PART and similar modifiers). */
962 set_of (rtx pat, rtx insn)
964 struct set_of_data data;
965 data.found = NULL_RTX;
966 data.pat = pat;
967 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
968 return data.found;
971 /* Given an INSN, return a SET expression if this insn has only a single SET.
972 It may also have CLOBBERs, USEs, or SET whose output
973 will not be used, which we ignore. */
976 single_set_2 (rtx insn, rtx pat)
978 rtx set = NULL;
979 int set_verified = 1;
980 int i;
982 if (GET_CODE (pat) == PARALLEL)
984 for (i = 0; i < XVECLEN (pat, 0); i++)
986 rtx sub = XVECEXP (pat, 0, i);
987 switch (GET_CODE (sub))
989 case USE:
990 case CLOBBER:
991 break;
993 case SET:
994 /* We can consider insns having multiple sets, where all
995 but one are dead as single set insns. In common case
996 only single set is present in the pattern so we want
997 to avoid checking for REG_UNUSED notes unless necessary.
999 When we reach set first time, we just expect this is
1000 the single set we are looking for and only when more
1001 sets are found in the insn, we check them. */
1002 if (!set_verified)
1004 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1005 && !side_effects_p (set))
1006 set = NULL;
1007 else
1008 set_verified = 1;
1010 if (!set)
1011 set = sub, set_verified = 0;
1012 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1013 || side_effects_p (sub))
1014 return NULL_RTX;
1015 break;
1017 default:
1018 return NULL_RTX;
1022 return set;
1025 /* Given an INSN, return nonzero if it has more than one SET, else return
1026 zero. */
1029 multiple_sets (rtx insn)
1031 int found;
1032 int i;
1034 /* INSN must be an insn. */
1035 if (! INSN_P (insn))
1036 return 0;
1038 /* Only a PARALLEL can have multiple SETs. */
1039 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1041 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1042 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1044 /* If we have already found a SET, then return now. */
1045 if (found)
1046 return 1;
1047 else
1048 found = 1;
1052 /* Either zero or one SET. */
1053 return 0;
1056 /* Return nonzero if the destination of SET equals the source
1057 and there are no side effects. */
1060 set_noop_p (rtx set)
1062 rtx src = SET_SRC (set);
1063 rtx dst = SET_DEST (set);
1065 if (dst == pc_rtx && src == pc_rtx)
1066 return 1;
1068 if (MEM_P (dst) && MEM_P (src))
1069 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1071 if (GET_CODE (dst) == ZERO_EXTRACT)
1072 return rtx_equal_p (XEXP (dst, 0), src)
1073 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1074 && !side_effects_p (src);
1076 if (GET_CODE (dst) == STRICT_LOW_PART)
1077 dst = XEXP (dst, 0);
1079 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1081 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1082 return 0;
1083 src = SUBREG_REG (src);
1084 dst = SUBREG_REG (dst);
1087 return (REG_P (src) && REG_P (dst)
1088 && REGNO (src) == REGNO (dst));
1091 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1092 value to itself. */
1095 noop_move_p (rtx insn)
1097 rtx pat = PATTERN (insn);
1099 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1100 return 1;
1102 /* Insns carrying these notes are useful later on. */
1103 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1104 return 0;
1106 /* For now treat an insn with a REG_RETVAL note as a
1107 a special insn which should not be considered a no-op. */
1108 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1109 return 0;
1111 if (GET_CODE (pat) == SET && set_noop_p (pat))
1112 return 1;
1114 if (GET_CODE (pat) == PARALLEL)
1116 int i;
1117 /* If nothing but SETs of registers to themselves,
1118 this insn can also be deleted. */
1119 for (i = 0; i < XVECLEN (pat, 0); i++)
1121 rtx tem = XVECEXP (pat, 0, i);
1123 if (GET_CODE (tem) == USE
1124 || GET_CODE (tem) == CLOBBER)
1125 continue;
1127 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1128 return 0;
1131 return 1;
1133 return 0;
1137 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1138 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1139 If the object was modified, if we hit a partial assignment to X, or hit a
1140 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1141 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1142 be the src. */
1145 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1147 rtx p;
1149 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1150 p = PREV_INSN (p))
1151 if (INSN_P (p))
1153 rtx set = single_set (p);
1154 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1156 if (set && rtx_equal_p (x, SET_DEST (set)))
1158 rtx src = SET_SRC (set);
1160 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1161 src = XEXP (note, 0);
1163 if ((valid_to == NULL_RTX
1164 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1165 /* Reject hard registers because we don't usually want
1166 to use them; we'd rather use a pseudo. */
1167 && (! (REG_P (src)
1168 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1170 *pinsn = p;
1171 return src;
1175 /* If set in non-simple way, we don't have a value. */
1176 if (reg_set_p (x, p))
1177 break;
1180 return x;
1183 /* Return nonzero if register in range [REGNO, ENDREGNO)
1184 appears either explicitly or implicitly in X
1185 other than being stored into.
1187 References contained within the substructure at LOC do not count.
1188 LOC may be zero, meaning don't ignore anything. */
1191 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1192 rtx *loc)
1194 int i;
1195 unsigned int x_regno;
1196 RTX_CODE code;
1197 const char *fmt;
1199 repeat:
1200 /* The contents of a REG_NONNEG note is always zero, so we must come here
1201 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1202 if (x == 0)
1203 return 0;
1205 code = GET_CODE (x);
1207 switch (code)
1209 case REG:
1210 x_regno = REGNO (x);
1212 /* If we modifying the stack, frame, or argument pointer, it will
1213 clobber a virtual register. In fact, we could be more precise,
1214 but it isn't worth it. */
1215 if ((x_regno == STACK_POINTER_REGNUM
1216 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1217 || x_regno == ARG_POINTER_REGNUM
1218 #endif
1219 || x_regno == FRAME_POINTER_REGNUM)
1220 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1221 return 1;
1223 return (endregno > x_regno
1224 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1225 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1226 : 1));
1228 case SUBREG:
1229 /* If this is a SUBREG of a hard reg, we can see exactly which
1230 registers are being modified. Otherwise, handle normally. */
1231 if (REG_P (SUBREG_REG (x))
1232 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1234 unsigned int inner_regno = subreg_regno (x);
1235 unsigned int inner_endregno
1236 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1237 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1239 return endregno > inner_regno && regno < inner_endregno;
1241 break;
1243 case CLOBBER:
1244 case SET:
1245 if (&SET_DEST (x) != loc
1246 /* Note setting a SUBREG counts as referring to the REG it is in for
1247 a pseudo but not for hard registers since we can
1248 treat each word individually. */
1249 && ((GET_CODE (SET_DEST (x)) == SUBREG
1250 && loc != &SUBREG_REG (SET_DEST (x))
1251 && REG_P (SUBREG_REG (SET_DEST (x)))
1252 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1253 && refers_to_regno_p (regno, endregno,
1254 SUBREG_REG (SET_DEST (x)), loc))
1255 || (!REG_P (SET_DEST (x))
1256 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1257 return 1;
1259 if (code == CLOBBER || loc == &SET_SRC (x))
1260 return 0;
1261 x = SET_SRC (x);
1262 goto repeat;
1264 default:
1265 break;
1268 /* X does not match, so try its subexpressions. */
1270 fmt = GET_RTX_FORMAT (code);
1271 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1273 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1275 if (i == 0)
1277 x = XEXP (x, 0);
1278 goto repeat;
1280 else
1281 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1282 return 1;
1284 else if (fmt[i] == 'E')
1286 int j;
1287 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1288 if (loc != &XVECEXP (x, i, j)
1289 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1290 return 1;
1293 return 0;
1296 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1297 we check if any register number in X conflicts with the relevant register
1298 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1299 contains a MEM (we don't bother checking for memory addresses that can't
1300 conflict because we expect this to be a rare case. */
1303 reg_overlap_mentioned_p (rtx x, rtx in)
1305 unsigned int regno, endregno;
1307 /* If either argument is a constant, then modifying X can not
1308 affect IN. Here we look at IN, we can profitably combine
1309 CONSTANT_P (x) with the switch statement below. */
1310 if (CONSTANT_P (in))
1311 return 0;
1313 recurse:
1314 switch (GET_CODE (x))
1316 case STRICT_LOW_PART:
1317 case ZERO_EXTRACT:
1318 case SIGN_EXTRACT:
1319 /* Overly conservative. */
1320 x = XEXP (x, 0);
1321 goto recurse;
1323 case SUBREG:
1324 regno = REGNO (SUBREG_REG (x));
1325 if (regno < FIRST_PSEUDO_REGISTER)
1326 regno = subreg_regno (x);
1327 goto do_reg;
1329 case REG:
1330 regno = REGNO (x);
1331 do_reg:
1332 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1333 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1334 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1336 case MEM:
1338 const char *fmt;
1339 int i;
1341 if (MEM_P (in))
1342 return 1;
1344 fmt = GET_RTX_FORMAT (GET_CODE (in));
1345 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1346 if (fmt[i] == 'e')
1348 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1349 return 1;
1351 else if (fmt[i] == 'E')
1353 int j;
1354 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1355 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1356 return 1;
1359 return 0;
1362 case SCRATCH:
1363 case PC:
1364 case CC0:
1365 return reg_mentioned_p (x, in);
1367 case PARALLEL:
1369 int i;
1371 /* If any register in here refers to it we return true. */
1372 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1373 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1374 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1375 return 1;
1376 return 0;
1379 default:
1380 gcc_assert (CONSTANT_P (x));
1381 return 0;
1385 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1386 (X would be the pattern of an insn).
1387 FUN receives two arguments:
1388 the REG, MEM, CC0 or PC being stored in or clobbered,
1389 the SET or CLOBBER rtx that does the store.
1391 If the item being stored in or clobbered is a SUBREG of a hard register,
1392 the SUBREG will be passed. */
1394 void
1395 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1397 int i;
1399 if (GET_CODE (x) == COND_EXEC)
1400 x = COND_EXEC_CODE (x);
1402 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1404 rtx dest = SET_DEST (x);
1406 while ((GET_CODE (dest) == SUBREG
1407 && (!REG_P (SUBREG_REG (dest))
1408 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1409 || GET_CODE (dest) == ZERO_EXTRACT
1410 || GET_CODE (dest) == STRICT_LOW_PART)
1411 dest = XEXP (dest, 0);
1413 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1414 each of whose first operand is a register. */
1415 if (GET_CODE (dest) == PARALLEL)
1417 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1418 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1419 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1421 else
1422 (*fun) (dest, x, data);
1425 else if (GET_CODE (x) == PARALLEL)
1426 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1427 note_stores (XVECEXP (x, 0, i), fun, data);
1430 /* Like notes_stores, but call FUN for each expression that is being
1431 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1432 FUN for each expression, not any interior subexpressions. FUN receives a
1433 pointer to the expression and the DATA passed to this function.
1435 Note that this is not quite the same test as that done in reg_referenced_p
1436 since that considers something as being referenced if it is being
1437 partially set, while we do not. */
1439 void
1440 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1442 rtx body = *pbody;
1443 int i;
1445 switch (GET_CODE (body))
1447 case COND_EXEC:
1448 (*fun) (&COND_EXEC_TEST (body), data);
1449 note_uses (&COND_EXEC_CODE (body), fun, data);
1450 return;
1452 case PARALLEL:
1453 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1454 note_uses (&XVECEXP (body, 0, i), fun, data);
1455 return;
1457 case USE:
1458 (*fun) (&XEXP (body, 0), data);
1459 return;
1461 case ASM_OPERANDS:
1462 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1463 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1464 return;
1466 case TRAP_IF:
1467 (*fun) (&TRAP_CONDITION (body), data);
1468 return;
1470 case PREFETCH:
1471 (*fun) (&XEXP (body, 0), data);
1472 return;
1474 case UNSPEC:
1475 case UNSPEC_VOLATILE:
1476 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1477 (*fun) (&XVECEXP (body, 0, i), data);
1478 return;
1480 case CLOBBER:
1481 if (MEM_P (XEXP (body, 0)))
1482 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1483 return;
1485 case SET:
1487 rtx dest = SET_DEST (body);
1489 /* For sets we replace everything in source plus registers in memory
1490 expression in store and operands of a ZERO_EXTRACT. */
1491 (*fun) (&SET_SRC (body), data);
1493 if (GET_CODE (dest) == ZERO_EXTRACT)
1495 (*fun) (&XEXP (dest, 1), data);
1496 (*fun) (&XEXP (dest, 2), data);
1499 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1500 dest = XEXP (dest, 0);
1502 if (MEM_P (dest))
1503 (*fun) (&XEXP (dest, 0), data);
1505 return;
1507 default:
1508 /* All the other possibilities never store. */
1509 (*fun) (pbody, data);
1510 return;
1514 /* Return nonzero if X's old contents don't survive after INSN.
1515 This will be true if X is (cc0) or if X is a register and
1516 X dies in INSN or because INSN entirely sets X.
1518 "Entirely set" means set directly and not through a SUBREG, or
1519 ZERO_EXTRACT, so no trace of the old contents remains.
1520 Likewise, REG_INC does not count.
1522 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1523 but for this use that makes no difference, since regs don't overlap
1524 during their lifetimes. Therefore, this function may be used
1525 at any time after deaths have been computed (in flow.c).
1527 If REG is a hard reg that occupies multiple machine registers, this
1528 function will only return 1 if each of those registers will be replaced
1529 by INSN. */
1532 dead_or_set_p (rtx insn, rtx x)
1534 unsigned int regno, last_regno;
1535 unsigned int i;
1537 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1538 if (GET_CODE (x) == CC0)
1539 return 1;
1541 gcc_assert (REG_P (x));
1543 regno = REGNO (x);
1544 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1545 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1547 for (i = regno; i <= last_regno; i++)
1548 if (! dead_or_set_regno_p (insn, i))
1549 return 0;
1551 return 1;
1554 /* Return TRUE iff DEST is a register or subreg of a register and
1555 doesn't change the number of words of the inner register, and any
1556 part of the register is TEST_REGNO. */
1558 static bool
1559 covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1561 unsigned int regno, endregno;
1563 if (GET_CODE (dest) == SUBREG
1564 && (((GET_MODE_SIZE (GET_MODE (dest))
1565 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1566 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1567 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1568 dest = SUBREG_REG (dest);
1570 if (!REG_P (dest))
1571 return false;
1573 regno = REGNO (dest);
1574 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1575 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1576 return (test_regno >= regno && test_regno < endregno);
1579 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1580 any member matches the covers_regno_no_parallel_p criteria. */
1582 static bool
1583 covers_regno_p (rtx dest, unsigned int test_regno)
1585 if (GET_CODE (dest) == PARALLEL)
1587 /* Some targets place small structures in registers for return
1588 values of functions, and those registers are wrapped in
1589 PARALLELs that we may see as the destination of a SET. */
1590 int i;
1592 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1594 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1595 if (inner != NULL_RTX
1596 && covers_regno_no_parallel_p (inner, test_regno))
1597 return true;
1600 return false;
1602 else
1603 return covers_regno_no_parallel_p (dest, test_regno);
1606 /* Utility function for dead_or_set_p to check an individual register. Also
1607 called from flow.c. */
1610 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1612 rtx pattern;
1614 /* See if there is a death note for something that includes TEST_REGNO. */
1615 if (find_regno_note (insn, REG_DEAD, test_regno))
1616 return 1;
1618 if (CALL_P (insn)
1619 && find_regno_fusage (insn, CLOBBER, test_regno))
1620 return 1;
1622 pattern = PATTERN (insn);
1624 if (GET_CODE (pattern) == COND_EXEC)
1625 pattern = COND_EXEC_CODE (pattern);
1627 if (GET_CODE (pattern) == SET)
1628 return covers_regno_p (SET_DEST (pattern), test_regno);
1629 else if (GET_CODE (pattern) == PARALLEL)
1631 int i;
1633 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1635 rtx body = XVECEXP (pattern, 0, i);
1637 if (GET_CODE (body) == COND_EXEC)
1638 body = COND_EXEC_CODE (body);
1640 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1641 && covers_regno_p (SET_DEST (body), test_regno))
1642 return 1;
1646 return 0;
1649 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1650 If DATUM is nonzero, look for one whose datum is DATUM. */
1653 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1655 rtx link;
1657 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1658 if (! INSN_P (insn))
1659 return 0;
1660 if (datum == 0)
1662 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1663 if (REG_NOTE_KIND (link) == kind)
1664 return link;
1665 return 0;
1668 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1669 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1670 return link;
1671 return 0;
1674 /* Return the reg-note of kind KIND in insn INSN which applies to register
1675 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1676 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1677 it might be the case that the note overlaps REGNO. */
1680 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1682 rtx link;
1684 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1685 if (! INSN_P (insn))
1686 return 0;
1688 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1689 if (REG_NOTE_KIND (link) == kind
1690 /* Verify that it is a register, so that scratch and MEM won't cause a
1691 problem here. */
1692 && REG_P (XEXP (link, 0))
1693 && REGNO (XEXP (link, 0)) <= regno
1694 && ((REGNO (XEXP (link, 0))
1695 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1696 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1697 [GET_MODE (XEXP (link, 0))]))
1698 > regno))
1699 return link;
1700 return 0;
1703 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1704 has such a note. */
1707 find_reg_equal_equiv_note (rtx insn)
1709 rtx link;
1711 if (!INSN_P (insn))
1712 return 0;
1713 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1714 if (REG_NOTE_KIND (link) == REG_EQUAL
1715 || REG_NOTE_KIND (link) == REG_EQUIV)
1717 if (single_set (insn) == 0)
1718 return 0;
1719 return link;
1721 return NULL;
1724 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1725 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1728 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1730 /* If it's not a CALL_INSN, it can't possibly have a
1731 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1732 if (!CALL_P (insn))
1733 return 0;
1735 gcc_assert (datum);
1737 if (!REG_P (datum))
1739 rtx link;
1741 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1742 link;
1743 link = XEXP (link, 1))
1744 if (GET_CODE (XEXP (link, 0)) == code
1745 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1746 return 1;
1748 else
1750 unsigned int regno = REGNO (datum);
1752 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1753 to pseudo registers, so don't bother checking. */
1755 if (regno < FIRST_PSEUDO_REGISTER)
1757 unsigned int end_regno
1758 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1759 unsigned int i;
1761 for (i = regno; i < end_regno; i++)
1762 if (find_regno_fusage (insn, code, i))
1763 return 1;
1767 return 0;
1770 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1771 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1774 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1776 rtx link;
1778 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1779 to pseudo registers, so don't bother checking. */
1781 if (regno >= FIRST_PSEUDO_REGISTER
1782 || !CALL_P (insn) )
1783 return 0;
1785 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1787 unsigned int regnote;
1788 rtx op, reg;
1790 if (GET_CODE (op = XEXP (link, 0)) == code
1791 && REG_P (reg = XEXP (op, 0))
1792 && (regnote = REGNO (reg)) <= regno
1793 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1794 return 1;
1797 return 0;
1800 /* Return true if INSN is a call to a pure function. */
1803 pure_call_p (rtx insn)
1805 rtx link;
1807 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1808 return 0;
1810 /* Look for the note that differentiates const and pure functions. */
1811 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1813 rtx u, m;
1815 if (GET_CODE (u = XEXP (link, 0)) == USE
1816 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1817 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1818 return 1;
1821 return 0;
1824 /* Remove register note NOTE from the REG_NOTES of INSN. */
1826 void
1827 remove_note (rtx insn, rtx note)
1829 rtx link;
1831 if (note == NULL_RTX)
1832 return;
1834 if (REG_NOTES (insn) == note)
1836 REG_NOTES (insn) = XEXP (note, 1);
1837 return;
1840 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1841 if (XEXP (link, 1) == note)
1843 XEXP (link, 1) = XEXP (note, 1);
1844 return;
1847 gcc_unreachable ();
1850 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1851 return 1 if it is found. A simple equality test is used to determine if
1852 NODE matches. */
1855 in_expr_list_p (rtx listp, rtx node)
1857 rtx x;
1859 for (x = listp; x; x = XEXP (x, 1))
1860 if (node == XEXP (x, 0))
1861 return 1;
1863 return 0;
1866 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1867 remove that entry from the list if it is found.
1869 A simple equality test is used to determine if NODE matches. */
1871 void
1872 remove_node_from_expr_list (rtx node, rtx *listp)
1874 rtx temp = *listp;
1875 rtx prev = NULL_RTX;
1877 while (temp)
1879 if (node == XEXP (temp, 0))
1881 /* Splice the node out of the list. */
1882 if (prev)
1883 XEXP (prev, 1) = XEXP (temp, 1);
1884 else
1885 *listp = XEXP (temp, 1);
1887 return;
1890 prev = temp;
1891 temp = XEXP (temp, 1);
1895 /* Nonzero if X contains any volatile instructions. These are instructions
1896 which may cause unpredictable machine state instructions, and thus no
1897 instructions should be moved or combined across them. This includes
1898 only volatile asms and UNSPEC_VOLATILE instructions. */
1901 volatile_insn_p (rtx x)
1903 RTX_CODE code;
1905 code = GET_CODE (x);
1906 switch (code)
1908 case LABEL_REF:
1909 case SYMBOL_REF:
1910 case CONST_INT:
1911 case CONST:
1912 case CONST_DOUBLE:
1913 case CONST_VECTOR:
1914 case CC0:
1915 case PC:
1916 case REG:
1917 case SCRATCH:
1918 case CLOBBER:
1919 case ADDR_VEC:
1920 case ADDR_DIFF_VEC:
1921 case CALL:
1922 case MEM:
1923 return 0;
1925 case UNSPEC_VOLATILE:
1926 /* case TRAP_IF: This isn't clear yet. */
1927 return 1;
1929 case ASM_INPUT:
1930 case ASM_OPERANDS:
1931 if (MEM_VOLATILE_P (x))
1932 return 1;
1934 default:
1935 break;
1938 /* Recursively scan the operands of this expression. */
1941 const char *fmt = GET_RTX_FORMAT (code);
1942 int i;
1944 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1946 if (fmt[i] == 'e')
1948 if (volatile_insn_p (XEXP (x, i)))
1949 return 1;
1951 else if (fmt[i] == 'E')
1953 int j;
1954 for (j = 0; j < XVECLEN (x, i); j++)
1955 if (volatile_insn_p (XVECEXP (x, i, j)))
1956 return 1;
1960 return 0;
1963 /* Nonzero if X contains any volatile memory references
1964 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1967 volatile_refs_p (rtx x)
1969 RTX_CODE code;
1971 code = GET_CODE (x);
1972 switch (code)
1974 case LABEL_REF:
1975 case SYMBOL_REF:
1976 case CONST_INT:
1977 case CONST:
1978 case CONST_DOUBLE:
1979 case CONST_VECTOR:
1980 case CC0:
1981 case PC:
1982 case REG:
1983 case SCRATCH:
1984 case CLOBBER:
1985 case ADDR_VEC:
1986 case ADDR_DIFF_VEC:
1987 return 0;
1989 case UNSPEC_VOLATILE:
1990 return 1;
1992 case MEM:
1993 case ASM_INPUT:
1994 case ASM_OPERANDS:
1995 if (MEM_VOLATILE_P (x))
1996 return 1;
1998 default:
1999 break;
2002 /* Recursively scan the operands of this expression. */
2005 const char *fmt = GET_RTX_FORMAT (code);
2006 int i;
2008 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2010 if (fmt[i] == 'e')
2012 if (volatile_refs_p (XEXP (x, i)))
2013 return 1;
2015 else if (fmt[i] == 'E')
2017 int j;
2018 for (j = 0; j < XVECLEN (x, i); j++)
2019 if (volatile_refs_p (XVECEXP (x, i, j)))
2020 return 1;
2024 return 0;
2027 /* Similar to above, except that it also rejects register pre- and post-
2028 incrementing. */
2031 side_effects_p (rtx x)
2033 RTX_CODE code;
2035 code = GET_CODE (x);
2036 switch (code)
2038 case LABEL_REF:
2039 case SYMBOL_REF:
2040 case CONST_INT:
2041 case CONST:
2042 case CONST_DOUBLE:
2043 case CONST_VECTOR:
2044 case CC0:
2045 case PC:
2046 case REG:
2047 case SCRATCH:
2048 case ADDR_VEC:
2049 case ADDR_DIFF_VEC:
2050 return 0;
2052 case CLOBBER:
2053 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2054 when some combination can't be done. If we see one, don't think
2055 that we can simplify the expression. */
2056 return (GET_MODE (x) != VOIDmode);
2058 case PRE_INC:
2059 case PRE_DEC:
2060 case POST_INC:
2061 case POST_DEC:
2062 case PRE_MODIFY:
2063 case POST_MODIFY:
2064 case CALL:
2065 case UNSPEC_VOLATILE:
2066 /* case TRAP_IF: This isn't clear yet. */
2067 return 1;
2069 case MEM:
2070 case ASM_INPUT:
2071 case ASM_OPERANDS:
2072 if (MEM_VOLATILE_P (x))
2073 return 1;
2075 default:
2076 break;
2079 /* Recursively scan the operands of this expression. */
2082 const char *fmt = GET_RTX_FORMAT (code);
2083 int i;
2085 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2087 if (fmt[i] == 'e')
2089 if (side_effects_p (XEXP (x, i)))
2090 return 1;
2092 else if (fmt[i] == 'E')
2094 int j;
2095 for (j = 0; j < XVECLEN (x, i); j++)
2096 if (side_effects_p (XVECEXP (x, i, j)))
2097 return 1;
2101 return 0;
2104 /* Return nonzero if evaluating rtx X might cause a trap. UNALIGNED_MEMS
2105 controls whether nonzero is returned for unaligned memory accesses on
2106 strict alignment machines. */
2108 static int
2109 may_trap_p_1 (rtx x, bool unaligned_mems)
2111 int i;
2112 enum rtx_code code;
2113 const char *fmt;
2115 if (x == 0)
2116 return 0;
2117 code = GET_CODE (x);
2118 switch (code)
2120 /* Handle these cases quickly. */
2121 case CONST_INT:
2122 case CONST_DOUBLE:
2123 case CONST_VECTOR:
2124 case SYMBOL_REF:
2125 case LABEL_REF:
2126 case CONST:
2127 case PC:
2128 case CC0:
2129 case REG:
2130 case SCRATCH:
2131 return 0;
2133 case ASM_INPUT:
2134 case UNSPEC_VOLATILE:
2135 case TRAP_IF:
2136 return 1;
2138 case ASM_OPERANDS:
2139 return MEM_VOLATILE_P (x);
2141 /* Memory ref can trap unless it's a static var or a stack slot. */
2142 case MEM:
2143 if (MEM_NOTRAP_P (x)
2144 && (!STRICT_ALIGNMENT || !unaligned_mems))
2145 return 0;
2146 return
2147 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2149 /* Division by a non-constant might trap. */
2150 case DIV:
2151 case MOD:
2152 case UDIV:
2153 case UMOD:
2154 if (HONOR_SNANS (GET_MODE (x)))
2155 return 1;
2156 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2157 return flag_trapping_math;
2158 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2159 return 1;
2160 break;
2162 case EXPR_LIST:
2163 /* An EXPR_LIST is used to represent a function call. This
2164 certainly may trap. */
2165 return 1;
2167 case GE:
2168 case GT:
2169 case LE:
2170 case LT:
2171 case LTGT:
2172 case COMPARE:
2173 /* Some floating point comparisons may trap. */
2174 if (!flag_trapping_math)
2175 break;
2176 /* ??? There is no machine independent way to check for tests that trap
2177 when COMPARE is used, though many targets do make this distinction.
2178 For instance, sparc uses CCFPE for compares which generate exceptions
2179 and CCFP for compares which do not generate exceptions. */
2180 if (HONOR_NANS (GET_MODE (x)))
2181 return 1;
2182 /* But often the compare has some CC mode, so check operand
2183 modes as well. */
2184 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2185 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2186 return 1;
2187 break;
2189 case EQ:
2190 case NE:
2191 if (HONOR_SNANS (GET_MODE (x)))
2192 return 1;
2193 /* Often comparison is CC mode, so check operand modes. */
2194 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2195 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2196 return 1;
2197 break;
2199 case FIX:
2200 /* Conversion of floating point might trap. */
2201 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2202 return 1;
2203 break;
2205 case NEG:
2206 case ABS:
2207 case SUBREG:
2208 /* These operations don't trap even with floating point. */
2209 break;
2211 default:
2212 /* Any floating arithmetic may trap. */
2213 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2214 && flag_trapping_math)
2215 return 1;
2218 fmt = GET_RTX_FORMAT (code);
2219 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2221 if (fmt[i] == 'e')
2223 if (may_trap_p_1 (XEXP (x, i), unaligned_mems))
2224 return 1;
2226 else if (fmt[i] == 'E')
2228 int j;
2229 for (j = 0; j < XVECLEN (x, i); j++)
2230 if (may_trap_p_1 (XVECEXP (x, i, j), unaligned_mems))
2231 return 1;
2234 return 0;
2237 /* Return nonzero if evaluating rtx X might cause a trap. */
2240 may_trap_p (rtx x)
2242 return may_trap_p_1 (x, false);
2245 /* Same as above, but additionally return non-zero if evaluating rtx X might
2246 cause a fault. We define a fault for the purpose of this function as a
2247 erroneous execution condition that cannot be encountered during the normal
2248 execution of a valid program; the typical example is an unaligned memory
2249 access on a strict alignment machine. The compiler guarantees that it
2250 doesn't generate code that will fault from a valid program, but this
2251 guarantee doesn't mean anything for individual instructions. Consider
2252 the following example:
2254 struct S { int d; union { char *cp; int *ip; }; };
2256 int foo(struct S *s)
2258 if (s->d == 1)
2259 return *s->ip;
2260 else
2261 return *s->cp;
2264 on a strict alignment machine. In a valid program, foo will never be
2265 invoked on a structure for which d is equal to 1 and the underlying
2266 unique field of the union not aligned on a 4-byte boundary, but the
2267 expression *s->ip might cause a fault if considered individually.
2269 At the RTL level, potentially problematic expressions will almost always
2270 verify may_trap_p; for example, the above dereference can be emitted as
2271 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2272 However, suppose that foo is inlined in a caller that causes s->cp to
2273 point to a local character variable and guarantees that s->d is not set
2274 to 1; foo may have been effectively translated into pseudo-RTL as:
2276 if ((reg:SI) == 1)
2277 (set (reg:SI) (mem:SI (%fp - 7)))
2278 else
2279 (set (reg:QI) (mem:QI (%fp - 7)))
2281 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2282 memory reference to a stack slot, but it will certainly cause a fault
2283 on a strict alignment machine. */
2286 may_trap_or_fault_p (rtx x)
2288 return may_trap_p_1 (x, true);
2291 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2292 i.e., an inequality. */
2295 inequality_comparisons_p (rtx x)
2297 const char *fmt;
2298 int len, i;
2299 enum rtx_code code = GET_CODE (x);
2301 switch (code)
2303 case REG:
2304 case SCRATCH:
2305 case PC:
2306 case CC0:
2307 case CONST_INT:
2308 case CONST_DOUBLE:
2309 case CONST_VECTOR:
2310 case CONST:
2311 case LABEL_REF:
2312 case SYMBOL_REF:
2313 return 0;
2315 case LT:
2316 case LTU:
2317 case GT:
2318 case GTU:
2319 case LE:
2320 case LEU:
2321 case GE:
2322 case GEU:
2323 return 1;
2325 default:
2326 break;
2329 len = GET_RTX_LENGTH (code);
2330 fmt = GET_RTX_FORMAT (code);
2332 for (i = 0; i < len; i++)
2334 if (fmt[i] == 'e')
2336 if (inequality_comparisons_p (XEXP (x, i)))
2337 return 1;
2339 else if (fmt[i] == 'E')
2341 int j;
2342 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2343 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2344 return 1;
2348 return 0;
2351 /* Replace any occurrence of FROM in X with TO. The function does
2352 not enter into CONST_DOUBLE for the replace.
2354 Note that copying is not done so X must not be shared unless all copies
2355 are to be modified. */
2358 replace_rtx (rtx x, rtx from, rtx to)
2360 int i, j;
2361 const char *fmt;
2363 /* The following prevents loops occurrence when we change MEM in
2364 CONST_DOUBLE onto the same CONST_DOUBLE. */
2365 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2366 return x;
2368 if (x == from)
2369 return to;
2371 /* Allow this function to make replacements in EXPR_LISTs. */
2372 if (x == 0)
2373 return 0;
2375 if (GET_CODE (x) == SUBREG)
2377 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2379 if (GET_CODE (new) == CONST_INT)
2381 x = simplify_subreg (GET_MODE (x), new,
2382 GET_MODE (SUBREG_REG (x)),
2383 SUBREG_BYTE (x));
2384 gcc_assert (x);
2386 else
2387 SUBREG_REG (x) = new;
2389 return x;
2391 else if (GET_CODE (x) == ZERO_EXTEND)
2393 rtx new = replace_rtx (XEXP (x, 0), from, to);
2395 if (GET_CODE (new) == CONST_INT)
2397 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2398 new, GET_MODE (XEXP (x, 0)));
2399 gcc_assert (x);
2401 else
2402 XEXP (x, 0) = new;
2404 return x;
2407 fmt = GET_RTX_FORMAT (GET_CODE (x));
2408 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2410 if (fmt[i] == 'e')
2411 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2412 else if (fmt[i] == 'E')
2413 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2414 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2417 return x;
2420 /* Throughout the rtx X, replace many registers according to REG_MAP.
2421 Return the replacement for X (which may be X with altered contents).
2422 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2423 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2425 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2426 should not be mapped to pseudos or vice versa since validate_change
2427 is not called.
2429 If REPLACE_DEST is 1, replacements are also done in destinations;
2430 otherwise, only sources are replaced. */
2433 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2435 enum rtx_code code;
2436 int i;
2437 const char *fmt;
2439 if (x == 0)
2440 return x;
2442 code = GET_CODE (x);
2443 switch (code)
2445 case SCRATCH:
2446 case PC:
2447 case CC0:
2448 case CONST_INT:
2449 case CONST_DOUBLE:
2450 case CONST_VECTOR:
2451 case CONST:
2452 case SYMBOL_REF:
2453 case LABEL_REF:
2454 return x;
2456 case REG:
2457 /* Verify that the register has an entry before trying to access it. */
2458 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2460 /* SUBREGs can't be shared. Always return a copy to ensure that if
2461 this replacement occurs more than once then each instance will
2462 get distinct rtx. */
2463 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2464 return copy_rtx (reg_map[REGNO (x)]);
2465 return reg_map[REGNO (x)];
2467 return x;
2469 case SUBREG:
2470 /* Prevent making nested SUBREGs. */
2471 if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2472 && reg_map[REGNO (SUBREG_REG (x))] != 0
2473 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2475 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2476 return simplify_gen_subreg (GET_MODE (x), map_val,
2477 GET_MODE (SUBREG_REG (x)),
2478 SUBREG_BYTE (x));
2480 break;
2482 case SET:
2483 if (replace_dest)
2484 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2486 else if (MEM_P (SET_DEST (x))
2487 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2488 /* Even if we are not to replace destinations, replace register if it
2489 is CONTAINED in destination (destination is memory or
2490 STRICT_LOW_PART). */
2491 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2492 reg_map, nregs, 0);
2493 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2494 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2495 break;
2497 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2498 return x;
2500 default:
2501 break;
2504 fmt = GET_RTX_FORMAT (code);
2505 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2507 if (fmt[i] == 'e')
2508 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2509 else if (fmt[i] == 'E')
2511 int j;
2512 for (j = 0; j < XVECLEN (x, i); j++)
2513 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2514 nregs, replace_dest);
2517 return x;
2520 /* Replace occurrences of the old label in *X with the new one.
2521 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2524 replace_label (rtx *x, void *data)
2526 rtx l = *x;
2527 rtx old_label = ((replace_label_data *) data)->r1;
2528 rtx new_label = ((replace_label_data *) data)->r2;
2529 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2531 if (l == NULL_RTX)
2532 return 0;
2534 if (GET_CODE (l) == SYMBOL_REF
2535 && CONSTANT_POOL_ADDRESS_P (l))
2537 rtx c = get_pool_constant (l);
2538 if (rtx_referenced_p (old_label, c))
2540 rtx new_c, new_l;
2541 replace_label_data *d = (replace_label_data *) data;
2543 /* Create a copy of constant C; replace the label inside
2544 but do not update LABEL_NUSES because uses in constant pool
2545 are not counted. */
2546 new_c = copy_rtx (c);
2547 d->update_label_nuses = false;
2548 for_each_rtx (&new_c, replace_label, data);
2549 d->update_label_nuses = update_label_nuses;
2551 /* Add the new constant NEW_C to constant pool and replace
2552 the old reference to constant by new reference. */
2553 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2554 *x = replace_rtx (l, l, new_l);
2556 return 0;
2559 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2560 field. This is not handled by for_each_rtx because it doesn't
2561 handle unprinted ('0') fields. */
2562 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2563 JUMP_LABEL (l) = new_label;
2565 if ((GET_CODE (l) == LABEL_REF
2566 || GET_CODE (l) == INSN_LIST)
2567 && XEXP (l, 0) == old_label)
2569 XEXP (l, 0) = new_label;
2570 if (update_label_nuses)
2572 ++LABEL_NUSES (new_label);
2573 --LABEL_NUSES (old_label);
2575 return 0;
2578 return 0;
2581 /* When *BODY is equal to X or X is directly referenced by *BODY
2582 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2583 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2585 static int
2586 rtx_referenced_p_1 (rtx *body, void *x)
2588 rtx y = (rtx) x;
2590 if (*body == NULL_RTX)
2591 return y == NULL_RTX;
2593 /* Return true if a label_ref *BODY refers to label Y. */
2594 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2595 return XEXP (*body, 0) == y;
2597 /* If *BODY is a reference to pool constant traverse the constant. */
2598 if (GET_CODE (*body) == SYMBOL_REF
2599 && CONSTANT_POOL_ADDRESS_P (*body))
2600 return rtx_referenced_p (y, get_pool_constant (*body));
2602 /* By default, compare the RTL expressions. */
2603 return rtx_equal_p (*body, y);
2606 /* Return true if X is referenced in BODY. */
2609 rtx_referenced_p (rtx x, rtx body)
2611 return for_each_rtx (&body, rtx_referenced_p_1, x);
2614 /* If INSN is a tablejump return true and store the label (before jump table) to
2615 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2617 bool
2618 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2620 rtx label, table;
2622 if (JUMP_P (insn)
2623 && (label = JUMP_LABEL (insn)) != NULL_RTX
2624 && (table = next_active_insn (label)) != NULL_RTX
2625 && JUMP_P (table)
2626 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2627 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2629 if (labelp)
2630 *labelp = label;
2631 if (tablep)
2632 *tablep = table;
2633 return true;
2635 return false;
2638 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2639 constant that is not in the constant pool and not in the condition
2640 of an IF_THEN_ELSE. */
2642 static int
2643 computed_jump_p_1 (rtx x)
2645 enum rtx_code code = GET_CODE (x);
2646 int i, j;
2647 const char *fmt;
2649 switch (code)
2651 case LABEL_REF:
2652 case PC:
2653 return 0;
2655 case CONST:
2656 case CONST_INT:
2657 case CONST_DOUBLE:
2658 case CONST_VECTOR:
2659 case SYMBOL_REF:
2660 case REG:
2661 return 1;
2663 case MEM:
2664 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2665 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2667 case IF_THEN_ELSE:
2668 return (computed_jump_p_1 (XEXP (x, 1))
2669 || computed_jump_p_1 (XEXP (x, 2)));
2671 default:
2672 break;
2675 fmt = GET_RTX_FORMAT (code);
2676 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2678 if (fmt[i] == 'e'
2679 && computed_jump_p_1 (XEXP (x, i)))
2680 return 1;
2682 else if (fmt[i] == 'E')
2683 for (j = 0; j < XVECLEN (x, i); j++)
2684 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2685 return 1;
2688 return 0;
2691 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2693 Tablejumps and casesi insns are not considered indirect jumps;
2694 we can recognize them by a (use (label_ref)). */
2697 computed_jump_p (rtx insn)
2699 int i;
2700 if (JUMP_P (insn))
2702 rtx pat = PATTERN (insn);
2704 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2705 return 0;
2706 else if (GET_CODE (pat) == PARALLEL)
2708 int len = XVECLEN (pat, 0);
2709 int has_use_labelref = 0;
2711 for (i = len - 1; i >= 0; i--)
2712 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2713 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2714 == LABEL_REF))
2715 has_use_labelref = 1;
2717 if (! has_use_labelref)
2718 for (i = len - 1; i >= 0; i--)
2719 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2720 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2721 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2722 return 1;
2724 else if (GET_CODE (pat) == SET
2725 && SET_DEST (pat) == pc_rtx
2726 && computed_jump_p_1 (SET_SRC (pat)))
2727 return 1;
2729 return 0;
2732 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2733 calls. Processes the subexpressions of EXP and passes them to F. */
2734 static int
2735 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2737 int result, i, j;
2738 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2739 rtx *x;
2741 for (; format[n] != '\0'; n++)
2743 switch (format[n])
2745 case 'e':
2746 /* Call F on X. */
2747 x = &XEXP (exp, n);
2748 result = (*f) (x, data);
2749 if (result == -1)
2750 /* Do not traverse sub-expressions. */
2751 continue;
2752 else if (result != 0)
2753 /* Stop the traversal. */
2754 return result;
2756 if (*x == NULL_RTX)
2757 /* There are no sub-expressions. */
2758 continue;
2760 i = non_rtx_starting_operands[GET_CODE (*x)];
2761 if (i >= 0)
2763 result = for_each_rtx_1 (*x, i, f, data);
2764 if (result != 0)
2765 return result;
2767 break;
2769 case 'V':
2770 case 'E':
2771 if (XVEC (exp, n) == 0)
2772 continue;
2773 for (j = 0; j < XVECLEN (exp, n); ++j)
2775 /* Call F on X. */
2776 x = &XVECEXP (exp, n, j);
2777 result = (*f) (x, data);
2778 if (result == -1)
2779 /* Do not traverse sub-expressions. */
2780 continue;
2781 else if (result != 0)
2782 /* Stop the traversal. */
2783 return result;
2785 if (*x == NULL_RTX)
2786 /* There are no sub-expressions. */
2787 continue;
2789 i = non_rtx_starting_operands[GET_CODE (*x)];
2790 if (i >= 0)
2792 result = for_each_rtx_1 (*x, i, f, data);
2793 if (result != 0)
2794 return result;
2797 break;
2799 default:
2800 /* Nothing to do. */
2801 break;
2805 return 0;
2808 /* Traverse X via depth-first search, calling F for each
2809 sub-expression (including X itself). F is also passed the DATA.
2810 If F returns -1, do not traverse sub-expressions, but continue
2811 traversing the rest of the tree. If F ever returns any other
2812 nonzero value, stop the traversal, and return the value returned
2813 by F. Otherwise, return 0. This function does not traverse inside
2814 tree structure that contains RTX_EXPRs, or into sub-expressions
2815 whose format code is `0' since it is not known whether or not those
2816 codes are actually RTL.
2818 This routine is very general, and could (should?) be used to
2819 implement many of the other routines in this file. */
2822 for_each_rtx (rtx *x, rtx_function f, void *data)
2824 int result;
2825 int i;
2827 /* Call F on X. */
2828 result = (*f) (x, data);
2829 if (result == -1)
2830 /* Do not traverse sub-expressions. */
2831 return 0;
2832 else if (result != 0)
2833 /* Stop the traversal. */
2834 return result;
2836 if (*x == NULL_RTX)
2837 /* There are no sub-expressions. */
2838 return 0;
2840 i = non_rtx_starting_operands[GET_CODE (*x)];
2841 if (i < 0)
2842 return 0;
2844 return for_each_rtx_1 (*x, i, f, data);
2848 /* Searches X for any reference to REGNO, returning the rtx of the
2849 reference found if any. Otherwise, returns NULL_RTX. */
2852 regno_use_in (unsigned int regno, rtx x)
2854 const char *fmt;
2855 int i, j;
2856 rtx tem;
2858 if (REG_P (x) && REGNO (x) == regno)
2859 return x;
2861 fmt = GET_RTX_FORMAT (GET_CODE (x));
2862 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2864 if (fmt[i] == 'e')
2866 if ((tem = regno_use_in (regno, XEXP (x, i))))
2867 return tem;
2869 else if (fmt[i] == 'E')
2870 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2871 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2872 return tem;
2875 return NULL_RTX;
2878 /* Return a value indicating whether OP, an operand of a commutative
2879 operation, is preferred as the first or second operand. The higher
2880 the value, the stronger the preference for being the first operand.
2881 We use negative values to indicate a preference for the first operand
2882 and positive values for the second operand. */
2885 commutative_operand_precedence (rtx op)
2887 enum rtx_code code = GET_CODE (op);
2889 /* Constants always come the second operand. Prefer "nice" constants. */
2890 if (code == CONST_INT)
2891 return -7;
2892 if (code == CONST_DOUBLE)
2893 return -6;
2894 op = avoid_constant_pool_reference (op);
2895 code = GET_CODE (op);
2897 switch (GET_RTX_CLASS (code))
2899 case RTX_CONST_OBJ:
2900 if (code == CONST_INT)
2901 return -5;
2902 if (code == CONST_DOUBLE)
2903 return -4;
2904 return -3;
2906 case RTX_EXTRA:
2907 /* SUBREGs of objects should come second. */
2908 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2909 return -2;
2911 if (!CONSTANT_P (op))
2912 return 0;
2913 else
2914 /* As for RTX_CONST_OBJ. */
2915 return -3;
2917 case RTX_OBJ:
2918 /* Complex expressions should be the first, so decrease priority
2919 of objects. */
2920 return -1;
2922 case RTX_COMM_ARITH:
2923 /* Prefer operands that are themselves commutative to be first.
2924 This helps to make things linear. In particular,
2925 (and (and (reg) (reg)) (not (reg))) is canonical. */
2926 return 4;
2928 case RTX_BIN_ARITH:
2929 /* If only one operand is a binary expression, it will be the first
2930 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2931 is canonical, although it will usually be further simplified. */
2932 return 2;
2934 case RTX_UNARY:
2935 /* Then prefer NEG and NOT. */
2936 if (code == NEG || code == NOT)
2937 return 1;
2939 default:
2940 return 0;
2944 /* Return 1 iff it is necessary to swap operands of commutative operation
2945 in order to canonicalize expression. */
2948 swap_commutative_operands_p (rtx x, rtx y)
2950 return (commutative_operand_precedence (x)
2951 < commutative_operand_precedence (y));
2954 /* Return 1 if X is an autoincrement side effect and the register is
2955 not the stack pointer. */
2957 auto_inc_p (rtx x)
2959 switch (GET_CODE (x))
2961 case PRE_INC:
2962 case POST_INC:
2963 case PRE_DEC:
2964 case POST_DEC:
2965 case PRE_MODIFY:
2966 case POST_MODIFY:
2967 /* There are no REG_INC notes for SP. */
2968 if (XEXP (x, 0) != stack_pointer_rtx)
2969 return 1;
2970 default:
2971 break;
2973 return 0;
2976 /* Return 1 if the sequence of instructions beginning with FROM and up
2977 to and including TO is safe to move. If NEW_TO is non-NULL, and
2978 the sequence is not already safe to move, but can be easily
2979 extended to a sequence which is safe, then NEW_TO will point to the
2980 end of the extended sequence.
2982 For now, this function only checks that the region contains whole
2983 exception regions, but it could be extended to check additional
2984 conditions as well. */
2987 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
2989 int eh_region_count = 0;
2990 int past_to_p = 0;
2991 rtx r = from;
2993 /* By default, assume the end of the region will be what was
2994 suggested. */
2995 if (new_to)
2996 *new_to = to;
2998 while (r)
3000 if (NOTE_P (r))
3002 switch (NOTE_LINE_NUMBER (r))
3004 case NOTE_INSN_EH_REGION_BEG:
3005 ++eh_region_count;
3006 break;
3008 case NOTE_INSN_EH_REGION_END:
3009 if (eh_region_count == 0)
3010 /* This sequence of instructions contains the end of
3011 an exception region, but not he beginning. Moving
3012 it will cause chaos. */
3013 return 0;
3015 --eh_region_count;
3016 break;
3018 default:
3019 break;
3022 else if (past_to_p)
3023 /* If we've passed TO, and we see a non-note instruction, we
3024 can't extend the sequence to a movable sequence. */
3025 return 0;
3027 if (r == to)
3029 if (!new_to)
3030 /* It's OK to move the sequence if there were matched sets of
3031 exception region notes. */
3032 return eh_region_count == 0;
3034 past_to_p = 1;
3037 /* It's OK to move the sequence if there were matched sets of
3038 exception region notes. */
3039 if (past_to_p && eh_region_count == 0)
3041 *new_to = r;
3042 return 1;
3045 /* Go to the next instruction. */
3046 r = NEXT_INSN (r);
3049 return 0;
3052 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3054 loc_mentioned_in_p (rtx *loc, rtx in)
3056 enum rtx_code code = GET_CODE (in);
3057 const char *fmt = GET_RTX_FORMAT (code);
3058 int i, j;
3060 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3062 if (loc == &in->u.fld[i].rt_rtx)
3063 return 1;
3064 if (fmt[i] == 'e')
3066 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3067 return 1;
3069 else if (fmt[i] == 'E')
3070 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3071 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3072 return 1;
3074 return 0;
3077 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3078 and SUBREG_BYTE, return the bit offset where the subreg begins
3079 (counting from the least significant bit of the operand). */
3081 unsigned int
3082 subreg_lsb_1 (enum machine_mode outer_mode,
3083 enum machine_mode inner_mode,
3084 unsigned int subreg_byte)
3086 unsigned int bitpos;
3087 unsigned int byte;
3088 unsigned int word;
3090 /* A paradoxical subreg begins at bit position 0. */
3091 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3092 return 0;
3094 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3095 /* If the subreg crosses a word boundary ensure that
3096 it also begins and ends on a word boundary. */
3097 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3098 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3099 && (subreg_byte % UNITS_PER_WORD
3100 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3102 if (WORDS_BIG_ENDIAN)
3103 word = (GET_MODE_SIZE (inner_mode)
3104 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3105 else
3106 word = subreg_byte / UNITS_PER_WORD;
3107 bitpos = word * BITS_PER_WORD;
3109 if (BYTES_BIG_ENDIAN)
3110 byte = (GET_MODE_SIZE (inner_mode)
3111 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3112 else
3113 byte = subreg_byte % UNITS_PER_WORD;
3114 bitpos += byte * BITS_PER_UNIT;
3116 return bitpos;
3119 /* Given a subreg X, return the bit offset where the subreg begins
3120 (counting from the least significant bit of the reg). */
3122 unsigned int
3123 subreg_lsb (rtx x)
3125 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3126 SUBREG_BYTE (x));
3129 /* This function returns the regno offset of a subreg expression.
3130 xregno - A regno of an inner hard subreg_reg (or what will become one).
3131 xmode - The mode of xregno.
3132 offset - The byte offset.
3133 ymode - The mode of a top level SUBREG (or what may become one).
3134 RETURN - The regno offset which would be used. */
3135 unsigned int
3136 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3137 unsigned int offset, enum machine_mode ymode)
3139 int nregs_xmode, nregs_ymode, nregs_xmode_unit_int;
3140 int mode_multiple, nregs_multiple;
3141 int y_offset;
3142 enum machine_mode xmode_unit, xmode_unit_int;
3144 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3146 if (GET_MODE_INNER (xmode) == VOIDmode)
3147 xmode_unit = xmode;
3148 else
3149 xmode_unit = GET_MODE_INNER (xmode);
3151 if (FLOAT_MODE_P (xmode_unit))
3153 xmode_unit_int = int_mode_for_mode (xmode_unit);
3154 if (xmode_unit_int == BLKmode)
3155 /* It's probably bad to be here; a port should have an integer mode
3156 that's the same size as anything of which it takes a SUBREG. */
3157 xmode_unit_int = xmode_unit;
3159 else
3160 xmode_unit_int = xmode_unit;
3162 nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
3164 /* Adjust nregs_xmode to allow for 'holes'. */
3165 if (nregs_xmode_unit_int != hard_regno_nregs[xregno][xmode_unit])
3166 nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
3167 else
3168 nregs_xmode = hard_regno_nregs[xregno][xmode];
3170 nregs_ymode = hard_regno_nregs[xregno][ymode];
3172 /* If this is a big endian paradoxical subreg, which uses more actual
3173 hard registers than the original register, we must return a negative
3174 offset so that we find the proper highpart of the register. */
3175 if (offset == 0
3176 && nregs_ymode > nregs_xmode
3177 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3178 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3179 return nregs_xmode - nregs_ymode;
3181 if (offset == 0 || nregs_xmode == nregs_ymode)
3182 return 0;
3184 /* Size of ymode must not be greater than the size of xmode. */
3185 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3186 gcc_assert (mode_multiple != 0);
3188 y_offset = offset / GET_MODE_SIZE (ymode);
3189 nregs_multiple = nregs_xmode / nregs_ymode;
3190 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3193 /* This function returns true when the offset is representable via
3194 subreg_offset in the given regno.
3195 xregno - A regno of an inner hard subreg_reg (or what will become one).
3196 xmode - The mode of xregno.
3197 offset - The byte offset.
3198 ymode - The mode of a top level SUBREG (or what may become one).
3199 RETURN - Whether the offset is representable. */
3200 bool
3201 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3202 unsigned int offset, enum machine_mode ymode)
3204 int nregs_xmode, nregs_ymode, nregs_xmode_unit, nregs_xmode_unit_int;
3205 int mode_multiple, nregs_multiple;
3206 int y_offset;
3207 enum machine_mode xmode_unit, xmode_unit_int;
3209 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3211 if (GET_MODE_INNER (xmode) == VOIDmode)
3212 xmode_unit = xmode;
3213 else
3214 xmode_unit = GET_MODE_INNER (xmode);
3216 if (FLOAT_MODE_P (xmode_unit))
3218 xmode_unit_int = int_mode_for_mode (xmode_unit);
3219 if (xmode_unit_int == BLKmode)
3220 /* It's probably bad to be here; a port should have an integer mode
3221 that's the same size as anything of which it takes a SUBREG. */
3222 xmode_unit_int = xmode_unit;
3224 else
3225 xmode_unit_int = xmode_unit;
3227 nregs_xmode_unit = hard_regno_nregs[xregno][xmode_unit];
3228 nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
3230 /* If there are holes in a non-scalar mode in registers, we expect
3231 that it is made up of its units concatenated together. */
3232 if (nregs_xmode_unit != nregs_xmode_unit_int)
3234 gcc_assert (nregs_xmode_unit * GET_MODE_NUNITS (xmode)
3235 == hard_regno_nregs[xregno][xmode]);
3237 /* You can only ask for a SUBREG of a value with holes in the middle
3238 if you don't cross the holes. (Such a SUBREG should be done by
3239 picking a different register class, or doing it in memory if
3240 necessary.) An example of a value with holes is XCmode on 32-bit
3241 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3242 3 for each part, but in memory it's two 128-bit parts.
3243 Padding is assumed to be at the end (not necessarily the 'high part')
3244 of each unit. */
3245 if (nregs_xmode_unit != nregs_xmode_unit_int
3246 && (offset / GET_MODE_SIZE (xmode_unit_int) + 1
3247 < GET_MODE_NUNITS (xmode))
3248 && (offset / GET_MODE_SIZE (xmode_unit_int)
3249 != ((offset + GET_MODE_SIZE (ymode) - 1)
3250 / GET_MODE_SIZE (xmode_unit_int))))
3251 return false;
3253 nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
3255 else
3256 nregs_xmode = hard_regno_nregs[xregno][xmode];
3258 nregs_ymode = hard_regno_nregs[xregno][ymode];
3260 /* Paradoxical subregs are otherwise valid. */
3261 if (offset == 0
3262 && nregs_ymode > nregs_xmode
3263 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3264 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3265 return true;
3267 /* Lowpart subregs are otherwise valid. */
3268 if (offset == subreg_lowpart_offset (ymode, xmode))
3269 return true;
3271 /* This should always pass, otherwise we don't know how to verify
3272 the constraint. These conditions may be relaxed but
3273 subreg_regno_offset would need to be redesigned. */
3274 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3275 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3277 /* The XMODE value can be seen as a vector of NREGS_XMODE
3278 values. The subreg must represent a lowpart of given field.
3279 Compute what field it is. */
3280 offset -= subreg_lowpart_offset (ymode,
3281 mode_for_size (GET_MODE_BITSIZE (xmode)
3282 / nregs_xmode,
3283 MODE_INT, 0));
3285 /* Size of ymode must not be greater than the size of xmode. */
3286 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3287 gcc_assert (mode_multiple != 0);
3289 y_offset = offset / GET_MODE_SIZE (ymode);
3290 nregs_multiple = nregs_xmode / nregs_ymode;
3292 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3293 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3295 return (!(y_offset % (mode_multiple / nregs_multiple)));
3298 /* Return the final regno that a subreg expression refers to. */
3299 unsigned int
3300 subreg_regno (rtx x)
3302 unsigned int ret;
3303 rtx subreg = SUBREG_REG (x);
3304 int regno = REGNO (subreg);
3306 ret = regno + subreg_regno_offset (regno,
3307 GET_MODE (subreg),
3308 SUBREG_BYTE (x),
3309 GET_MODE (x));
3310 return ret;
3313 struct parms_set_data
3315 int nregs;
3316 HARD_REG_SET regs;
3319 /* Helper function for noticing stores to parameter registers. */
3320 static void
3321 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3323 struct parms_set_data *d = data;
3324 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3325 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3327 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3328 d->nregs--;
3332 /* Look backward for first parameter to be loaded.
3333 Note that loads of all parameters will not necessarily be
3334 found if CSE has eliminated some of them (e.g., an argument
3335 to the outer function is passed down as a parameter).
3336 Do not skip BOUNDARY. */
3338 find_first_parameter_load (rtx call_insn, rtx boundary)
3340 struct parms_set_data parm;
3341 rtx p, before, first_set;
3343 /* Since different machines initialize their parameter registers
3344 in different orders, assume nothing. Collect the set of all
3345 parameter registers. */
3346 CLEAR_HARD_REG_SET (parm.regs);
3347 parm.nregs = 0;
3348 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3349 if (GET_CODE (XEXP (p, 0)) == USE
3350 && REG_P (XEXP (XEXP (p, 0), 0)))
3352 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3354 /* We only care about registers which can hold function
3355 arguments. */
3356 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3357 continue;
3359 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3360 parm.nregs++;
3362 before = call_insn;
3363 first_set = call_insn;
3365 /* Search backward for the first set of a register in this set. */
3366 while (parm.nregs && before != boundary)
3368 before = PREV_INSN (before);
3370 /* It is possible that some loads got CSEed from one call to
3371 another. Stop in that case. */
3372 if (CALL_P (before))
3373 break;
3375 /* Our caller needs either ensure that we will find all sets
3376 (in case code has not been optimized yet), or take care
3377 for possible labels in a way by setting boundary to preceding
3378 CODE_LABEL. */
3379 if (LABEL_P (before))
3381 gcc_assert (before == boundary);
3382 break;
3385 if (INSN_P (before))
3387 int nregs_old = parm.nregs;
3388 note_stores (PATTERN (before), parms_set, &parm);
3389 /* If we found something that did not set a parameter reg,
3390 we're done. Do not keep going, as that might result
3391 in hoisting an insn before the setting of a pseudo
3392 that is used by the hoisted insn. */
3393 if (nregs_old != parm.nregs)
3394 first_set = before;
3395 else
3396 break;
3399 return first_set;
3402 /* Return true if we should avoid inserting code between INSN and preceding
3403 call instruction. */
3405 bool
3406 keep_with_call_p (rtx insn)
3408 rtx set;
3410 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3412 if (REG_P (SET_DEST (set))
3413 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3414 && fixed_regs[REGNO (SET_DEST (set))]
3415 && general_operand (SET_SRC (set), VOIDmode))
3416 return true;
3417 if (REG_P (SET_SRC (set))
3418 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3419 && REG_P (SET_DEST (set))
3420 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3421 return true;
3422 /* There may be a stack pop just after the call and before the store
3423 of the return register. Search for the actual store when deciding
3424 if we can break or not. */
3425 if (SET_DEST (set) == stack_pointer_rtx)
3427 rtx i2 = next_nonnote_insn (insn);
3428 if (i2 && keep_with_call_p (i2))
3429 return true;
3432 return false;
3435 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3436 to non-complex jumps. That is, direct unconditional, conditional,
3437 and tablejumps, but not computed jumps or returns. It also does
3438 not apply to the fallthru case of a conditional jump. */
3440 bool
3441 label_is_jump_target_p (rtx label, rtx jump_insn)
3443 rtx tmp = JUMP_LABEL (jump_insn);
3445 if (label == tmp)
3446 return true;
3448 if (tablejump_p (jump_insn, NULL, &tmp))
3450 rtvec vec = XVEC (PATTERN (tmp),
3451 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3452 int i, veclen = GET_NUM_ELEM (vec);
3454 for (i = 0; i < veclen; ++i)
3455 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3456 return true;
3459 return false;
3463 /* Return an estimate of the cost of computing rtx X.
3464 One use is in cse, to decide which expression to keep in the hash table.
3465 Another is in rtl generation, to pick the cheapest way to multiply.
3466 Other uses like the latter are expected in the future. */
3469 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3471 int i, j;
3472 enum rtx_code code;
3473 const char *fmt;
3474 int total;
3476 if (x == 0)
3477 return 0;
3479 /* Compute the default costs of certain things.
3480 Note that targetm.rtx_costs can override the defaults. */
3482 code = GET_CODE (x);
3483 switch (code)
3485 case MULT:
3486 total = COSTS_N_INSNS (5);
3487 break;
3488 case DIV:
3489 case UDIV:
3490 case MOD:
3491 case UMOD:
3492 total = COSTS_N_INSNS (7);
3493 break;
3494 case USE:
3495 /* Used in loop.c and combine.c as a marker. */
3496 total = 0;
3497 break;
3498 default:
3499 total = COSTS_N_INSNS (1);
3502 switch (code)
3504 case REG:
3505 return 0;
3507 case SUBREG:
3508 total = 0;
3509 /* If we can't tie these modes, make this expensive. The larger
3510 the mode, the more expensive it is. */
3511 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3512 return COSTS_N_INSNS (2
3513 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3514 break;
3516 default:
3517 if (targetm.rtx_costs (x, code, outer_code, &total))
3518 return total;
3519 break;
3522 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3523 which is already in total. */
3525 fmt = GET_RTX_FORMAT (code);
3526 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3527 if (fmt[i] == 'e')
3528 total += rtx_cost (XEXP (x, i), code);
3529 else if (fmt[i] == 'E')
3530 for (j = 0; j < XVECLEN (x, i); j++)
3531 total += rtx_cost (XVECEXP (x, i, j), code);
3533 return total;
3536 /* Return cost of address expression X.
3537 Expect that X is properly formed address reference. */
3540 address_cost (rtx x, enum machine_mode mode)
3542 /* We may be asked for cost of various unusual addresses, such as operands
3543 of push instruction. It is not worthwhile to complicate writing
3544 of the target hook by such cases. */
3546 if (!memory_address_p (mode, x))
3547 return 1000;
3549 return targetm.address_cost (x);
3552 /* If the target doesn't override, compute the cost as with arithmetic. */
3555 default_address_cost (rtx x)
3557 return rtx_cost (x, MEM);
3561 unsigned HOST_WIDE_INT
3562 nonzero_bits (rtx x, enum machine_mode mode)
3564 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3567 unsigned int
3568 num_sign_bit_copies (rtx x, enum machine_mode mode)
3570 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3573 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3574 It avoids exponential behavior in nonzero_bits1 when X has
3575 identical subexpressions on the first or the second level. */
3577 static unsigned HOST_WIDE_INT
3578 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3579 enum machine_mode known_mode,
3580 unsigned HOST_WIDE_INT known_ret)
3582 if (x == known_x && mode == known_mode)
3583 return known_ret;
3585 /* Try to find identical subexpressions. If found call
3586 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3587 precomputed value for the subexpression as KNOWN_RET. */
3589 if (ARITHMETIC_P (x))
3591 rtx x0 = XEXP (x, 0);
3592 rtx x1 = XEXP (x, 1);
3594 /* Check the first level. */
3595 if (x0 == x1)
3596 return nonzero_bits1 (x, mode, x0, mode,
3597 cached_nonzero_bits (x0, mode, known_x,
3598 known_mode, known_ret));
3600 /* Check the second level. */
3601 if (ARITHMETIC_P (x0)
3602 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3603 return nonzero_bits1 (x, mode, x1, mode,
3604 cached_nonzero_bits (x1, mode, known_x,
3605 known_mode, known_ret));
3607 if (ARITHMETIC_P (x1)
3608 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3609 return nonzero_bits1 (x, mode, x0, mode,
3610 cached_nonzero_bits (x0, mode, known_x,
3611 known_mode, known_ret));
3614 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3617 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3618 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3619 is less useful. We can't allow both, because that results in exponential
3620 run time recursion. There is a nullstone testcase that triggered
3621 this. This macro avoids accidental uses of num_sign_bit_copies. */
3622 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3624 /* Given an expression, X, compute which bits in X can be nonzero.
3625 We don't care about bits outside of those defined in MODE.
3627 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3628 an arithmetic operation, we can do better. */
3630 static unsigned HOST_WIDE_INT
3631 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3632 enum machine_mode known_mode,
3633 unsigned HOST_WIDE_INT known_ret)
3635 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3636 unsigned HOST_WIDE_INT inner_nz;
3637 enum rtx_code code;
3638 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3640 /* For floating-point values, assume all bits are needed. */
3641 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3642 return nonzero;
3644 /* If X is wider than MODE, use its mode instead. */
3645 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3647 mode = GET_MODE (x);
3648 nonzero = GET_MODE_MASK (mode);
3649 mode_width = GET_MODE_BITSIZE (mode);
3652 if (mode_width > HOST_BITS_PER_WIDE_INT)
3653 /* Our only callers in this case look for single bit values. So
3654 just return the mode mask. Those tests will then be false. */
3655 return nonzero;
3657 #ifndef WORD_REGISTER_OPERATIONS
3658 /* If MODE is wider than X, but both are a single word for both the host
3659 and target machines, we can compute this from which bits of the
3660 object might be nonzero in its own mode, taking into account the fact
3661 that on many CISC machines, accessing an object in a wider mode
3662 causes the high-order bits to become undefined. So they are
3663 not known to be zero. */
3665 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3666 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3667 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3668 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3670 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3671 known_x, known_mode, known_ret);
3672 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3673 return nonzero;
3675 #endif
3677 code = GET_CODE (x);
3678 switch (code)
3680 case REG:
3681 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3682 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3683 all the bits above ptr_mode are known to be zero. */
3684 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3685 && REG_POINTER (x))
3686 nonzero &= GET_MODE_MASK (ptr_mode);
3687 #endif
3689 /* Include declared information about alignment of pointers. */
3690 /* ??? We don't properly preserve REG_POINTER changes across
3691 pointer-to-integer casts, so we can't trust it except for
3692 things that we know must be pointers. See execute/960116-1.c. */
3693 if ((x == stack_pointer_rtx
3694 || x == frame_pointer_rtx
3695 || x == arg_pointer_rtx)
3696 && REGNO_POINTER_ALIGN (REGNO (x)))
3698 unsigned HOST_WIDE_INT alignment
3699 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3701 #ifdef PUSH_ROUNDING
3702 /* If PUSH_ROUNDING is defined, it is possible for the
3703 stack to be momentarily aligned only to that amount,
3704 so we pick the least alignment. */
3705 if (x == stack_pointer_rtx && PUSH_ARGS)
3706 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3707 alignment);
3708 #endif
3710 nonzero &= ~(alignment - 1);
3714 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3715 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3716 known_mode, known_ret,
3717 &nonzero_for_hook);
3719 if (new)
3720 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3721 known_mode, known_ret);
3723 return nonzero_for_hook;
3726 case CONST_INT:
3727 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3728 /* If X is negative in MODE, sign-extend the value. */
3729 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3730 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3731 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3732 #endif
3734 return INTVAL (x);
3736 case MEM:
3737 #ifdef LOAD_EXTEND_OP
3738 /* In many, if not most, RISC machines, reading a byte from memory
3739 zeros the rest of the register. Noticing that fact saves a lot
3740 of extra zero-extends. */
3741 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3742 nonzero &= GET_MODE_MASK (GET_MODE (x));
3743 #endif
3744 break;
3746 case EQ: case NE:
3747 case UNEQ: case LTGT:
3748 case GT: case GTU: case UNGT:
3749 case LT: case LTU: case UNLT:
3750 case GE: case GEU: case UNGE:
3751 case LE: case LEU: case UNLE:
3752 case UNORDERED: case ORDERED:
3753 /* If this produces an integer result, we know which bits are set.
3754 Code here used to clear bits outside the mode of X, but that is
3755 now done above. */
3756 /* Mind that MODE is the mode the caller wants to look at this
3757 operation in, and not the actual operation mode. We can wind
3758 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3759 that describes the results of a vector compare. */
3760 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3761 && mode_width <= HOST_BITS_PER_WIDE_INT)
3762 nonzero = STORE_FLAG_VALUE;
3763 break;
3765 case NEG:
3766 #if 0
3767 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3768 and num_sign_bit_copies. */
3769 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3770 == GET_MODE_BITSIZE (GET_MODE (x)))
3771 nonzero = 1;
3772 #endif
3774 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3775 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3776 break;
3778 case ABS:
3779 #if 0
3780 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3781 and num_sign_bit_copies. */
3782 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3783 == GET_MODE_BITSIZE (GET_MODE (x)))
3784 nonzero = 1;
3785 #endif
3786 break;
3788 case TRUNCATE:
3789 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3790 known_x, known_mode, known_ret)
3791 & GET_MODE_MASK (mode));
3792 break;
3794 case ZERO_EXTEND:
3795 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3796 known_x, known_mode, known_ret);
3797 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3798 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3799 break;
3801 case SIGN_EXTEND:
3802 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3803 Otherwise, show all the bits in the outer mode but not the inner
3804 may be nonzero. */
3805 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3806 known_x, known_mode, known_ret);
3807 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3809 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3810 if (inner_nz
3811 & (((HOST_WIDE_INT) 1
3812 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3813 inner_nz |= (GET_MODE_MASK (mode)
3814 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3817 nonzero &= inner_nz;
3818 break;
3820 case AND:
3821 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3822 known_x, known_mode, known_ret)
3823 & cached_nonzero_bits (XEXP (x, 1), mode,
3824 known_x, known_mode, known_ret);
3825 break;
3827 case XOR: case IOR:
3828 case UMIN: case UMAX: case SMIN: case SMAX:
3830 unsigned HOST_WIDE_INT nonzero0 =
3831 cached_nonzero_bits (XEXP (x, 0), mode,
3832 known_x, known_mode, known_ret);
3834 /* Don't call nonzero_bits for the second time if it cannot change
3835 anything. */
3836 if ((nonzero & nonzero0) != nonzero)
3837 nonzero &= nonzero0
3838 | cached_nonzero_bits (XEXP (x, 1), mode,
3839 known_x, known_mode, known_ret);
3841 break;
3843 case PLUS: case MINUS:
3844 case MULT:
3845 case DIV: case UDIV:
3846 case MOD: case UMOD:
3847 /* We can apply the rules of arithmetic to compute the number of
3848 high- and low-order zero bits of these operations. We start by
3849 computing the width (position of the highest-order nonzero bit)
3850 and the number of low-order zero bits for each value. */
3852 unsigned HOST_WIDE_INT nz0 =
3853 cached_nonzero_bits (XEXP (x, 0), mode,
3854 known_x, known_mode, known_ret);
3855 unsigned HOST_WIDE_INT nz1 =
3856 cached_nonzero_bits (XEXP (x, 1), mode,
3857 known_x, known_mode, known_ret);
3858 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3859 int width0 = floor_log2 (nz0) + 1;
3860 int width1 = floor_log2 (nz1) + 1;
3861 int low0 = floor_log2 (nz0 & -nz0);
3862 int low1 = floor_log2 (nz1 & -nz1);
3863 HOST_WIDE_INT op0_maybe_minusp
3864 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3865 HOST_WIDE_INT op1_maybe_minusp
3866 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3867 unsigned int result_width = mode_width;
3868 int result_low = 0;
3870 switch (code)
3872 case PLUS:
3873 result_width = MAX (width0, width1) + 1;
3874 result_low = MIN (low0, low1);
3875 break;
3876 case MINUS:
3877 result_low = MIN (low0, low1);
3878 break;
3879 case MULT:
3880 result_width = width0 + width1;
3881 result_low = low0 + low1;
3882 break;
3883 case DIV:
3884 if (width1 == 0)
3885 break;
3886 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3887 result_width = width0;
3888 break;
3889 case UDIV:
3890 if (width1 == 0)
3891 break;
3892 result_width = width0;
3893 break;
3894 case MOD:
3895 if (width1 == 0)
3896 break;
3897 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3898 result_width = MIN (width0, width1);
3899 result_low = MIN (low0, low1);
3900 break;
3901 case UMOD:
3902 if (width1 == 0)
3903 break;
3904 result_width = MIN (width0, width1);
3905 result_low = MIN (low0, low1);
3906 break;
3907 default:
3908 gcc_unreachable ();
3911 if (result_width < mode_width)
3912 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3914 if (result_low > 0)
3915 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3917 #ifdef POINTERS_EXTEND_UNSIGNED
3918 /* If pointers extend unsigned and this is an addition or subtraction
3919 to a pointer in Pmode, all the bits above ptr_mode are known to be
3920 zero. */
3921 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3922 && (code == PLUS || code == MINUS)
3923 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3924 nonzero &= GET_MODE_MASK (ptr_mode);
3925 #endif
3927 break;
3929 case ZERO_EXTRACT:
3930 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3931 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3932 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3933 break;
3935 case SUBREG:
3936 /* If this is a SUBREG formed for a promoted variable that has
3937 been zero-extended, we know that at least the high-order bits
3938 are zero, though others might be too. */
3940 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3941 nonzero = GET_MODE_MASK (GET_MODE (x))
3942 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3943 known_x, known_mode, known_ret);
3945 /* If the inner mode is a single word for both the host and target
3946 machines, we can compute this from which bits of the inner
3947 object might be nonzero. */
3948 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3949 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3950 <= HOST_BITS_PER_WIDE_INT))
3952 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3953 known_x, known_mode, known_ret);
3955 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3956 /* If this is a typical RISC machine, we only have to worry
3957 about the way loads are extended. */
3958 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3959 ? (((nonzero
3960 & (((unsigned HOST_WIDE_INT) 1
3961 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3962 != 0))
3963 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3964 || !MEM_P (SUBREG_REG (x)))
3965 #endif
3967 /* On many CISC machines, accessing an object in a wider mode
3968 causes the high-order bits to become undefined. So they are
3969 not known to be zero. */
3970 if (GET_MODE_SIZE (GET_MODE (x))
3971 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3972 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3973 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3976 break;
3978 case ASHIFTRT:
3979 case LSHIFTRT:
3980 case ASHIFT:
3981 case ROTATE:
3982 /* The nonzero bits are in two classes: any bits within MODE
3983 that aren't in GET_MODE (x) are always significant. The rest of the
3984 nonzero bits are those that are significant in the operand of
3985 the shift when shifted the appropriate number of bits. This
3986 shows that high-order bits are cleared by the right shift and
3987 low-order bits by left shifts. */
3988 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3989 && INTVAL (XEXP (x, 1)) >= 0
3990 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3992 enum machine_mode inner_mode = GET_MODE (x);
3993 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3994 int count = INTVAL (XEXP (x, 1));
3995 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3996 unsigned HOST_WIDE_INT op_nonzero =
3997 cached_nonzero_bits (XEXP (x, 0), mode,
3998 known_x, known_mode, known_ret);
3999 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4000 unsigned HOST_WIDE_INT outer = 0;
4002 if (mode_width > width)
4003 outer = (op_nonzero & nonzero & ~mode_mask);
4005 if (code == LSHIFTRT)
4006 inner >>= count;
4007 else if (code == ASHIFTRT)
4009 inner >>= count;
4011 /* If the sign bit may have been nonzero before the shift, we
4012 need to mark all the places it could have been copied to
4013 by the shift as possibly nonzero. */
4014 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4015 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4017 else if (code == ASHIFT)
4018 inner <<= count;
4019 else
4020 inner = ((inner << (count % width)
4021 | (inner >> (width - (count % width)))) & mode_mask);
4023 nonzero &= (outer | inner);
4025 break;
4027 case FFS:
4028 case POPCOUNT:
4029 /* This is at most the number of bits in the mode. */
4030 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4031 break;
4033 case CLZ:
4034 /* If CLZ has a known value at zero, then the nonzero bits are
4035 that value, plus the number of bits in the mode minus one. */
4036 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4037 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4038 else
4039 nonzero = -1;
4040 break;
4042 case CTZ:
4043 /* If CTZ has a known value at zero, then the nonzero bits are
4044 that value, plus the number of bits in the mode minus one. */
4045 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4046 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4047 else
4048 nonzero = -1;
4049 break;
4051 case PARITY:
4052 nonzero = 1;
4053 break;
4055 case IF_THEN_ELSE:
4057 unsigned HOST_WIDE_INT nonzero_true =
4058 cached_nonzero_bits (XEXP (x, 1), mode,
4059 known_x, known_mode, known_ret);
4061 /* Don't call nonzero_bits for the second time if it cannot change
4062 anything. */
4063 if ((nonzero & nonzero_true) != nonzero)
4064 nonzero &= nonzero_true
4065 | cached_nonzero_bits (XEXP (x, 2), mode,
4066 known_x, known_mode, known_ret);
4068 break;
4070 default:
4071 break;
4074 return nonzero;
4077 /* See the macro definition above. */
4078 #undef cached_num_sign_bit_copies
4081 /* The function cached_num_sign_bit_copies is a wrapper around
4082 num_sign_bit_copies1. It avoids exponential behavior in
4083 num_sign_bit_copies1 when X has identical subexpressions on the
4084 first or the second level. */
4086 static unsigned int
4087 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
4088 enum machine_mode known_mode,
4089 unsigned int known_ret)
4091 if (x == known_x && mode == known_mode)
4092 return known_ret;
4094 /* Try to find identical subexpressions. If found call
4095 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4096 the precomputed value for the subexpression as KNOWN_RET. */
4098 if (ARITHMETIC_P (x))
4100 rtx x0 = XEXP (x, 0);
4101 rtx x1 = XEXP (x, 1);
4103 /* Check the first level. */
4104 if (x0 == x1)
4105 return
4106 num_sign_bit_copies1 (x, mode, x0, mode,
4107 cached_num_sign_bit_copies (x0, mode, known_x,
4108 known_mode,
4109 known_ret));
4111 /* Check the second level. */
4112 if (ARITHMETIC_P (x0)
4113 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4114 return
4115 num_sign_bit_copies1 (x, mode, x1, mode,
4116 cached_num_sign_bit_copies (x1, mode, known_x,
4117 known_mode,
4118 known_ret));
4120 if (ARITHMETIC_P (x1)
4121 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4122 return
4123 num_sign_bit_copies1 (x, mode, x0, mode,
4124 cached_num_sign_bit_copies (x0, mode, known_x,
4125 known_mode,
4126 known_ret));
4129 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4132 /* Return the number of bits at the high-order end of X that are known to
4133 be equal to the sign bit. X will be used in mode MODE; if MODE is
4134 VOIDmode, X will be used in its own mode. The returned value will always
4135 be between 1 and the number of bits in MODE. */
4137 static unsigned int
4138 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
4139 enum machine_mode known_mode,
4140 unsigned int known_ret)
4142 enum rtx_code code = GET_CODE (x);
4143 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4144 int num0, num1, result;
4145 unsigned HOST_WIDE_INT nonzero;
4147 /* If we weren't given a mode, use the mode of X. If the mode is still
4148 VOIDmode, we don't know anything. Likewise if one of the modes is
4149 floating-point. */
4151 if (mode == VOIDmode)
4152 mode = GET_MODE (x);
4154 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4155 return 1;
4157 /* For a smaller object, just ignore the high bits. */
4158 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4160 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4161 known_x, known_mode, known_ret);
4162 return MAX (1,
4163 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4166 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4168 #ifndef WORD_REGISTER_OPERATIONS
4169 /* If this machine does not do all register operations on the entire
4170 register and MODE is wider than the mode of X, we can say nothing
4171 at all about the high-order bits. */
4172 return 1;
4173 #else
4174 /* Likewise on machines that do, if the mode of the object is smaller
4175 than a word and loads of that size don't sign extend, we can say
4176 nothing about the high order bits. */
4177 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4178 #ifdef LOAD_EXTEND_OP
4179 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4180 #endif
4182 return 1;
4183 #endif
4186 switch (code)
4188 case REG:
4190 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4191 /* If pointers extend signed and this is a pointer in Pmode, say that
4192 all the bits above ptr_mode are known to be sign bit copies. */
4193 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4194 && REG_POINTER (x))
4195 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4196 #endif
4199 unsigned int copies_for_hook = 1, copies = 1;
4200 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4201 known_mode, known_ret,
4202 &copies_for_hook);
4204 if (new)
4205 copies = cached_num_sign_bit_copies (new, mode, known_x,
4206 known_mode, known_ret);
4208 if (copies > 1 || copies_for_hook > 1)
4209 return MAX (copies, copies_for_hook);
4211 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4213 break;
4215 case MEM:
4216 #ifdef LOAD_EXTEND_OP
4217 /* Some RISC machines sign-extend all loads of smaller than a word. */
4218 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4219 return MAX (1, ((int) bitwidth
4220 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4221 #endif
4222 break;
4224 case CONST_INT:
4225 /* If the constant is negative, take its 1's complement and remask.
4226 Then see how many zero bits we have. */
4227 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4228 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4229 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4230 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4232 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4234 case SUBREG:
4235 /* If this is a SUBREG for a promoted object that is sign-extended
4236 and we are looking at it in a wider mode, we know that at least the
4237 high-order bits are known to be sign bit copies. */
4239 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4241 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4242 known_x, known_mode, known_ret);
4243 return MAX ((int) bitwidth
4244 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4245 num0);
4248 /* For a smaller object, just ignore the high bits. */
4249 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4251 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4252 known_x, known_mode, known_ret);
4253 return MAX (1, (num0
4254 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4255 - bitwidth)));
4258 #ifdef WORD_REGISTER_OPERATIONS
4259 #ifdef LOAD_EXTEND_OP
4260 /* For paradoxical SUBREGs on machines where all register operations
4261 affect the entire register, just look inside. Note that we are
4262 passing MODE to the recursive call, so the number of sign bit copies
4263 will remain relative to that mode, not the inner mode. */
4265 /* This works only if loads sign extend. Otherwise, if we get a
4266 reload for the inner part, it may be loaded from the stack, and
4267 then we lose all sign bit copies that existed before the store
4268 to the stack. */
4270 if ((GET_MODE_SIZE (GET_MODE (x))
4271 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4272 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4273 && MEM_P (SUBREG_REG (x)))
4274 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4275 known_x, known_mode, known_ret);
4276 #endif
4277 #endif
4278 break;
4280 case SIGN_EXTRACT:
4281 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4282 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4283 break;
4285 case SIGN_EXTEND:
4286 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4287 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4288 known_x, known_mode, known_ret));
4290 case TRUNCATE:
4291 /* For a smaller object, just ignore the high bits. */
4292 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4293 known_x, known_mode, known_ret);
4294 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4295 - bitwidth)));
4297 case NOT:
4298 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4299 known_x, known_mode, known_ret);
4301 case ROTATE: case ROTATERT:
4302 /* If we are rotating left by a number of bits less than the number
4303 of sign bit copies, we can just subtract that amount from the
4304 number. */
4305 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4306 && INTVAL (XEXP (x, 1)) >= 0
4307 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4309 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4310 known_x, known_mode, known_ret);
4311 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4312 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4314 break;
4316 case NEG:
4317 /* In general, this subtracts one sign bit copy. But if the value
4318 is known to be positive, the number of sign bit copies is the
4319 same as that of the input. Finally, if the input has just one bit
4320 that might be nonzero, all the bits are copies of the sign bit. */
4321 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4322 known_x, known_mode, known_ret);
4323 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4324 return num0 > 1 ? num0 - 1 : 1;
4326 nonzero = nonzero_bits (XEXP (x, 0), mode);
4327 if (nonzero == 1)
4328 return bitwidth;
4330 if (num0 > 1
4331 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4332 num0--;
4334 return num0;
4336 case IOR: case AND: case XOR:
4337 case SMIN: case SMAX: case UMIN: case UMAX:
4338 /* Logical operations will preserve the number of sign-bit copies.
4339 MIN and MAX operations always return one of the operands. */
4340 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4341 known_x, known_mode, known_ret);
4342 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4343 known_x, known_mode, known_ret);
4344 return MIN (num0, num1);
4346 case PLUS: case MINUS:
4347 /* For addition and subtraction, we can have a 1-bit carry. However,
4348 if we are subtracting 1 from a positive number, there will not
4349 be such a carry. Furthermore, if the positive number is known to
4350 be 0 or 1, we know the result is either -1 or 0. */
4352 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4353 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4355 nonzero = nonzero_bits (XEXP (x, 0), mode);
4356 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4357 return (nonzero == 1 || nonzero == 0 ? bitwidth
4358 : bitwidth - floor_log2 (nonzero) - 1);
4361 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4362 known_x, known_mode, known_ret);
4363 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4364 known_x, known_mode, known_ret);
4365 result = MAX (1, MIN (num0, num1) - 1);
4367 #ifdef POINTERS_EXTEND_UNSIGNED
4368 /* If pointers extend signed and this is an addition or subtraction
4369 to a pointer in Pmode, all the bits above ptr_mode are known to be
4370 sign bit copies. */
4371 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4372 && (code == PLUS || code == MINUS)
4373 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4374 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4375 - GET_MODE_BITSIZE (ptr_mode) + 1),
4376 result);
4377 #endif
4378 return result;
4380 case MULT:
4381 /* The number of bits of the product is the sum of the number of
4382 bits of both terms. However, unless one of the terms if known
4383 to be positive, we must allow for an additional bit since negating
4384 a negative number can remove one sign bit copy. */
4386 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4387 known_x, known_mode, known_ret);
4388 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4389 known_x, known_mode, known_ret);
4391 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4392 if (result > 0
4393 && (bitwidth > HOST_BITS_PER_WIDE_INT
4394 || (((nonzero_bits (XEXP (x, 0), mode)
4395 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4396 && ((nonzero_bits (XEXP (x, 1), mode)
4397 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4398 result--;
4400 return MAX (1, result);
4402 case UDIV:
4403 /* The result must be <= the first operand. If the first operand
4404 has the high bit set, we know nothing about the number of sign
4405 bit copies. */
4406 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4407 return 1;
4408 else if ((nonzero_bits (XEXP (x, 0), mode)
4409 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4410 return 1;
4411 else
4412 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4413 known_x, known_mode, known_ret);
4415 case UMOD:
4416 /* The result must be <= the second operand. */
4417 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4418 known_x, known_mode, known_ret);
4420 case DIV:
4421 /* Similar to unsigned division, except that we have to worry about
4422 the case where the divisor is negative, in which case we have
4423 to add 1. */
4424 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4425 known_x, known_mode, known_ret);
4426 if (result > 1
4427 && (bitwidth > HOST_BITS_PER_WIDE_INT
4428 || (nonzero_bits (XEXP (x, 1), mode)
4429 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4430 result--;
4432 return result;
4434 case MOD:
4435 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4436 known_x, known_mode, known_ret);
4437 if (result > 1
4438 && (bitwidth > HOST_BITS_PER_WIDE_INT
4439 || (nonzero_bits (XEXP (x, 1), mode)
4440 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4441 result--;
4443 return result;
4445 case ASHIFTRT:
4446 /* Shifts by a constant add to the number of bits equal to the
4447 sign bit. */
4448 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4449 known_x, known_mode, known_ret);
4450 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4451 && INTVAL (XEXP (x, 1)) > 0)
4452 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4454 return num0;
4456 case ASHIFT:
4457 /* Left shifts destroy copies. */
4458 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4459 || INTVAL (XEXP (x, 1)) < 0
4460 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4461 return 1;
4463 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4464 known_x, known_mode, known_ret);
4465 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4467 case IF_THEN_ELSE:
4468 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4469 known_x, known_mode, known_ret);
4470 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4471 known_x, known_mode, known_ret);
4472 return MIN (num0, num1);
4474 case EQ: case NE: case GE: case GT: case LE: case LT:
4475 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4476 case GEU: case GTU: case LEU: case LTU:
4477 case UNORDERED: case ORDERED:
4478 /* If the constant is negative, take its 1's complement and remask.
4479 Then see how many zero bits we have. */
4480 nonzero = STORE_FLAG_VALUE;
4481 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4482 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4483 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4485 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4487 default:
4488 break;
4491 /* If we haven't been able to figure it out by one of the above rules,
4492 see if some of the high-order bits are known to be zero. If so,
4493 count those bits and return one less than that amount. If we can't
4494 safely compute the mask for this mode, always return BITWIDTH. */
4496 bitwidth = GET_MODE_BITSIZE (mode);
4497 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4498 return 1;
4500 nonzero = nonzero_bits (x, mode);
4501 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4502 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4505 /* Calculate the rtx_cost of a single instruction. A return value of
4506 zero indicates an instruction pattern without a known cost. */
4509 insn_rtx_cost (rtx pat)
4511 int i, cost;
4512 rtx set;
4514 /* Extract the single set rtx from the instruction pattern.
4515 We can't use single_set since we only have the pattern. */
4516 if (GET_CODE (pat) == SET)
4517 set = pat;
4518 else if (GET_CODE (pat) == PARALLEL)
4520 set = NULL_RTX;
4521 for (i = 0; i < XVECLEN (pat, 0); i++)
4523 rtx x = XVECEXP (pat, 0, i);
4524 if (GET_CODE (x) == SET)
4526 if (set)
4527 return 0;
4528 set = x;
4531 if (!set)
4532 return 0;
4534 else
4535 return 0;
4537 cost = rtx_cost (SET_SRC (set), SET);
4538 return cost > 0 ? cost : COSTS_N_INSNS (1);
4541 /* Given an insn INSN and condition COND, return the condition in a
4542 canonical form to simplify testing by callers. Specifically:
4544 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4545 (2) Both operands will be machine operands; (cc0) will have been replaced.
4546 (3) If an operand is a constant, it will be the second operand.
4547 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4548 for GE, GEU, and LEU.
4550 If the condition cannot be understood, or is an inequality floating-point
4551 comparison which needs to be reversed, 0 will be returned.
4553 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4555 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4556 insn used in locating the condition was found. If a replacement test
4557 of the condition is desired, it should be placed in front of that
4558 insn and we will be sure that the inputs are still valid.
4560 If WANT_REG is nonzero, we wish the condition to be relative to that
4561 register, if possible. Therefore, do not canonicalize the condition
4562 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4563 to be a compare to a CC mode register.
4565 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4566 and at INSN. */
4569 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4570 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4572 enum rtx_code code;
4573 rtx prev = insn;
4574 rtx set;
4575 rtx tem;
4576 rtx op0, op1;
4577 int reverse_code = 0;
4578 enum machine_mode mode;
4580 code = GET_CODE (cond);
4581 mode = GET_MODE (cond);
4582 op0 = XEXP (cond, 0);
4583 op1 = XEXP (cond, 1);
4585 if (reverse)
4586 code = reversed_comparison_code (cond, insn);
4587 if (code == UNKNOWN)
4588 return 0;
4590 if (earliest)
4591 *earliest = insn;
4593 /* If we are comparing a register with zero, see if the register is set
4594 in the previous insn to a COMPARE or a comparison operation. Perform
4595 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4596 in cse.c */
4598 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4599 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4600 && op1 == CONST0_RTX (GET_MODE (op0))
4601 && op0 != want_reg)
4603 /* Set nonzero when we find something of interest. */
4604 rtx x = 0;
4606 #ifdef HAVE_cc0
4607 /* If comparison with cc0, import actual comparison from compare
4608 insn. */
4609 if (op0 == cc0_rtx)
4611 if ((prev = prev_nonnote_insn (prev)) == 0
4612 || !NONJUMP_INSN_P (prev)
4613 || (set = single_set (prev)) == 0
4614 || SET_DEST (set) != cc0_rtx)
4615 return 0;
4617 op0 = SET_SRC (set);
4618 op1 = CONST0_RTX (GET_MODE (op0));
4619 if (earliest)
4620 *earliest = prev;
4622 #endif
4624 /* If this is a COMPARE, pick up the two things being compared. */
4625 if (GET_CODE (op0) == COMPARE)
4627 op1 = XEXP (op0, 1);
4628 op0 = XEXP (op0, 0);
4629 continue;
4631 else if (!REG_P (op0))
4632 break;
4634 /* Go back to the previous insn. Stop if it is not an INSN. We also
4635 stop if it isn't a single set or if it has a REG_INC note because
4636 we don't want to bother dealing with it. */
4638 if ((prev = prev_nonnote_insn (prev)) == 0
4639 || !NONJUMP_INSN_P (prev)
4640 || FIND_REG_INC_NOTE (prev, NULL_RTX))
4641 break;
4643 set = set_of (op0, prev);
4645 if (set
4646 && (GET_CODE (set) != SET
4647 || !rtx_equal_p (SET_DEST (set), op0)))
4648 break;
4650 /* If this is setting OP0, get what it sets it to if it looks
4651 relevant. */
4652 if (set)
4654 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4655 #ifdef FLOAT_STORE_FLAG_VALUE
4656 REAL_VALUE_TYPE fsfv;
4657 #endif
4659 /* ??? We may not combine comparisons done in a CCmode with
4660 comparisons not done in a CCmode. This is to aid targets
4661 like Alpha that have an IEEE compliant EQ instruction, and
4662 a non-IEEE compliant BEQ instruction. The use of CCmode is
4663 actually artificial, simply to prevent the combination, but
4664 should not affect other platforms.
4666 However, we must allow VOIDmode comparisons to match either
4667 CCmode or non-CCmode comparison, because some ports have
4668 modeless comparisons inside branch patterns.
4670 ??? This mode check should perhaps look more like the mode check
4671 in simplify_comparison in combine. */
4673 if ((GET_CODE (SET_SRC (set)) == COMPARE
4674 || (((code == NE
4675 || (code == LT
4676 && GET_MODE_CLASS (inner_mode) == MODE_INT
4677 && (GET_MODE_BITSIZE (inner_mode)
4678 <= HOST_BITS_PER_WIDE_INT)
4679 && (STORE_FLAG_VALUE
4680 & ((HOST_WIDE_INT) 1
4681 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4682 #ifdef FLOAT_STORE_FLAG_VALUE
4683 || (code == LT
4684 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4685 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4686 REAL_VALUE_NEGATIVE (fsfv)))
4687 #endif
4689 && COMPARISON_P (SET_SRC (set))))
4690 && (((GET_MODE_CLASS (mode) == MODE_CC)
4691 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4692 || mode == VOIDmode || inner_mode == VOIDmode))
4693 x = SET_SRC (set);
4694 else if (((code == EQ
4695 || (code == GE
4696 && (GET_MODE_BITSIZE (inner_mode)
4697 <= HOST_BITS_PER_WIDE_INT)
4698 && GET_MODE_CLASS (inner_mode) == MODE_INT
4699 && (STORE_FLAG_VALUE
4700 & ((HOST_WIDE_INT) 1
4701 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4702 #ifdef FLOAT_STORE_FLAG_VALUE
4703 || (code == GE
4704 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4705 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4706 REAL_VALUE_NEGATIVE (fsfv)))
4707 #endif
4709 && COMPARISON_P (SET_SRC (set))
4710 && (((GET_MODE_CLASS (mode) == MODE_CC)
4711 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4712 || mode == VOIDmode || inner_mode == VOIDmode))
4715 reverse_code = 1;
4716 x = SET_SRC (set);
4718 else
4719 break;
4722 else if (reg_set_p (op0, prev))
4723 /* If this sets OP0, but not directly, we have to give up. */
4724 break;
4726 if (x)
4728 /* If the caller is expecting the condition to be valid at INSN,
4729 make sure X doesn't change before INSN. */
4730 if (valid_at_insn_p)
4731 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4732 break;
4733 if (COMPARISON_P (x))
4734 code = GET_CODE (x);
4735 if (reverse_code)
4737 code = reversed_comparison_code (x, prev);
4738 if (code == UNKNOWN)
4739 return 0;
4740 reverse_code = 0;
4743 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4744 if (earliest)
4745 *earliest = prev;
4749 /* If constant is first, put it last. */
4750 if (CONSTANT_P (op0))
4751 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4753 /* If OP0 is the result of a comparison, we weren't able to find what
4754 was really being compared, so fail. */
4755 if (!allow_cc_mode
4756 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4757 return 0;
4759 /* Canonicalize any ordered comparison with integers involving equality
4760 if we can do computations in the relevant mode and we do not
4761 overflow. */
4763 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4764 && GET_CODE (op1) == CONST_INT
4765 && GET_MODE (op0) != VOIDmode
4766 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4768 HOST_WIDE_INT const_val = INTVAL (op1);
4769 unsigned HOST_WIDE_INT uconst_val = const_val;
4770 unsigned HOST_WIDE_INT max_val
4771 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4773 switch (code)
4775 case LE:
4776 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4777 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4778 break;
4780 /* When cross-compiling, const_val might be sign-extended from
4781 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4782 case GE:
4783 if ((HOST_WIDE_INT) (const_val & max_val)
4784 != (((HOST_WIDE_INT) 1
4785 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4786 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4787 break;
4789 case LEU:
4790 if (uconst_val < max_val)
4791 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4792 break;
4794 case GEU:
4795 if (uconst_val != 0)
4796 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4797 break;
4799 default:
4800 break;
4804 /* Never return CC0; return zero instead. */
4805 if (CC0_P (op0))
4806 return 0;
4808 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4811 /* Given a jump insn JUMP, return the condition that will cause it to branch
4812 to its JUMP_LABEL. If the condition cannot be understood, or is an
4813 inequality floating-point comparison which needs to be reversed, 0 will
4814 be returned.
4816 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4817 insn used in locating the condition was found. If a replacement test
4818 of the condition is desired, it should be placed in front of that
4819 insn and we will be sure that the inputs are still valid. If EARLIEST
4820 is null, the returned condition will be valid at INSN.
4822 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4823 compare CC mode register.
4825 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4828 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4830 rtx cond;
4831 int reverse;
4832 rtx set;
4834 /* If this is not a standard conditional jump, we can't parse it. */
4835 if (!JUMP_P (jump)
4836 || ! any_condjump_p (jump))
4837 return 0;
4838 set = pc_set (jump);
4840 cond = XEXP (SET_SRC (set), 0);
4842 /* If this branches to JUMP_LABEL when the condition is false, reverse
4843 the condition. */
4844 reverse
4845 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4846 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4848 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4849 allow_cc_mode, valid_at_insn_p);
4853 /* Initialize non_rtx_starting_operands, which is used to speed up
4854 for_each_rtx. */
4855 void
4856 init_rtlanal (void)
4858 int i;
4859 for (i = 0; i < NUM_RTX_CODE; i++)
4861 const char *format = GET_RTX_FORMAT (i);
4862 const char *first = strpbrk (format, "eEV");
4863 non_rtx_starting_operands[i] = first ? first - format : -1;