2005-06-07 Adrian Straetling <straetling@de.ibm.com>
[official-gcc.git] / gcc / rtlanal.c
blob0bdcbbadbcf8344fc40d06551eb24a0dc96b435c
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "regs.h"
38 #include "function.h"
40 /* Forward declarations */
41 static int global_reg_mentioned_p_1 (rtx *, void *);
42 static void set_of_1 (rtx, rtx, void *);
43 static bool covers_regno_p (rtx, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx, unsigned int);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
49 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50 rtx, enum machine_mode,
51 unsigned HOST_WIDE_INT);
52 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53 enum machine_mode,
54 unsigned HOST_WIDE_INT);
55 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56 enum machine_mode,
57 unsigned int);
58 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59 enum machine_mode, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands[NUM_RTX_CODE];
65 /* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
69 int target_flags;
71 /* Return 1 if the value of X is unstable
72 (would be different at a different point in the program).
73 The frame pointer, arg pointer, etc. are considered stable
74 (within one function) and so is anything marked `unchanging'. */
76 int
77 rtx_unstable_p (rtx x)
79 RTX_CODE code = GET_CODE (x);
80 int i;
81 const char *fmt;
83 switch (code)
85 case MEM:
86 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
88 case CONST:
89 case CONST_INT:
90 case CONST_DOUBLE:
91 case CONST_VECTOR:
92 case SYMBOL_REF:
93 case LABEL_REF:
94 return 0;
96 case REG:
97 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
98 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
99 /* The arg pointer varies if it is not a fixed register. */
100 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
101 return 0;
102 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
103 /* ??? When call-clobbered, the value is stable modulo the restore
104 that must happen after a call. This currently screws up local-alloc
105 into believing that the restore is not needed. */
106 if (x == pic_offset_table_rtx)
107 return 0;
108 #endif
109 return 1;
111 case ASM_OPERANDS:
112 if (MEM_VOLATILE_P (x))
113 return 1;
115 /* Fall through. */
117 default:
118 break;
121 fmt = GET_RTX_FORMAT (code);
122 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
123 if (fmt[i] == 'e')
125 if (rtx_unstable_p (XEXP (x, i)))
126 return 1;
128 else if (fmt[i] == 'E')
130 int j;
131 for (j = 0; j < XVECLEN (x, i); j++)
132 if (rtx_unstable_p (XVECEXP (x, i, j)))
133 return 1;
136 return 0;
139 /* Return 1 if X has a value that can vary even between two
140 executions of the program. 0 means X can be compared reliably
141 against certain constants or near-constants.
142 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
143 zero, we are slightly more conservative.
144 The frame pointer and the arg pointer are considered constant. */
147 rtx_varies_p (rtx x, int for_alias)
149 RTX_CODE code;
150 int i;
151 const char *fmt;
153 if (!x)
154 return 0;
156 code = GET_CODE (x);
157 switch (code)
159 case MEM:
160 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
162 case CONST:
163 case CONST_INT:
164 case CONST_DOUBLE:
165 case CONST_VECTOR:
166 case SYMBOL_REF:
167 case LABEL_REF:
168 return 0;
170 case REG:
171 /* Note that we have to test for the actual rtx used for the frame
172 and arg pointers and not just the register number in case we have
173 eliminated the frame and/or arg pointer and are using it
174 for pseudos. */
175 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
176 /* The arg pointer varies if it is not a fixed register. */
177 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
178 return 0;
179 if (x == pic_offset_table_rtx
180 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
181 /* ??? When call-clobbered, the value is stable modulo the restore
182 that must happen after a call. This currently screws up
183 local-alloc into believing that the restore is not needed, so we
184 must return 0 only if we are called from alias analysis. */
185 && for_alias
186 #endif
188 return 0;
189 return 1;
191 case LO_SUM:
192 /* The operand 0 of a LO_SUM is considered constant
193 (in fact it is related specifically to operand 1)
194 during alias analysis. */
195 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
196 || rtx_varies_p (XEXP (x, 1), for_alias);
198 case ASM_OPERANDS:
199 if (MEM_VOLATILE_P (x))
200 return 1;
202 /* Fall through. */
204 default:
205 break;
208 fmt = GET_RTX_FORMAT (code);
209 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
210 if (fmt[i] == 'e')
212 if (rtx_varies_p (XEXP (x, i), for_alias))
213 return 1;
215 else if (fmt[i] == 'E')
217 int j;
218 for (j = 0; j < XVECLEN (x, i); j++)
219 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
220 return 1;
223 return 0;
226 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
229 rtx_addr_can_trap_p (rtx x)
231 enum rtx_code code = GET_CODE (x);
233 switch (code)
235 case SYMBOL_REF:
236 return SYMBOL_REF_WEAK (x);
238 case LABEL_REF:
239 return 0;
241 case REG:
242 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
243 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
244 || x == stack_pointer_rtx
245 /* The arg pointer varies if it is not a fixed register. */
246 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
247 return 0;
248 /* All of the virtual frame registers are stack references. */
249 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
250 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
251 return 0;
252 return 1;
254 case CONST:
255 return rtx_addr_can_trap_p (XEXP (x, 0));
257 case PLUS:
258 /* An address is assumed not to trap if it is an address that can't
259 trap plus a constant integer or it is the pic register plus a
260 constant. */
261 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
262 && GET_CODE (XEXP (x, 1)) == CONST_INT)
263 || (XEXP (x, 0) == pic_offset_table_rtx
264 && CONSTANT_P (XEXP (x, 1))));
266 case LO_SUM:
267 case PRE_MODIFY:
268 return rtx_addr_can_trap_p (XEXP (x, 1));
270 case PRE_DEC:
271 case PRE_INC:
272 case POST_DEC:
273 case POST_INC:
274 case POST_MODIFY:
275 return rtx_addr_can_trap_p (XEXP (x, 0));
277 default:
278 break;
281 /* If it isn't one of the case above, it can cause a trap. */
282 return 1;
285 /* Return true if X is an address that is known to not be zero. */
287 bool
288 nonzero_address_p (rtx x)
290 enum rtx_code code = GET_CODE (x);
292 switch (code)
294 case SYMBOL_REF:
295 return !SYMBOL_REF_WEAK (x);
297 case LABEL_REF:
298 return true;
300 case REG:
301 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
302 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
303 || x == stack_pointer_rtx
304 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
305 return true;
306 /* All of the virtual frame registers are stack references. */
307 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
308 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
309 return true;
310 return false;
312 case CONST:
313 return nonzero_address_p (XEXP (x, 0));
315 case PLUS:
316 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
318 /* Pointers aren't allowed to wrap. If we've got a register
319 that is known to be a pointer, and a positive offset, then
320 the composite can't be zero. */
321 if (INTVAL (XEXP (x, 1)) > 0
322 && REG_P (XEXP (x, 0))
323 && REG_POINTER (XEXP (x, 0)))
324 return true;
326 return nonzero_address_p (XEXP (x, 0));
328 /* Handle PIC references. */
329 else if (XEXP (x, 0) == pic_offset_table_rtx
330 && CONSTANT_P (XEXP (x, 1)))
331 return true;
332 return false;
334 case PRE_MODIFY:
335 /* Similar to the above; allow positive offsets. Further, since
336 auto-inc is only allowed in memories, the register must be a
337 pointer. */
338 if (GET_CODE (XEXP (x, 1)) == CONST_INT
339 && INTVAL (XEXP (x, 1)) > 0)
340 return true;
341 return nonzero_address_p (XEXP (x, 0));
343 case PRE_INC:
344 /* Similarly. Further, the offset is always positive. */
345 return true;
347 case PRE_DEC:
348 case POST_DEC:
349 case POST_INC:
350 case POST_MODIFY:
351 return nonzero_address_p (XEXP (x, 0));
353 case LO_SUM:
354 return nonzero_address_p (XEXP (x, 1));
356 default:
357 break;
360 /* If it isn't one of the case above, might be zero. */
361 return false;
364 /* Return 1 if X refers to a memory location whose address
365 cannot be compared reliably with constant addresses,
366 or if X refers to a BLKmode memory object.
367 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
368 zero, we are slightly more conservative. */
371 rtx_addr_varies_p (rtx x, int for_alias)
373 enum rtx_code code;
374 int i;
375 const char *fmt;
377 if (x == 0)
378 return 0;
380 code = GET_CODE (x);
381 if (code == MEM)
382 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
384 fmt = GET_RTX_FORMAT (code);
385 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
386 if (fmt[i] == 'e')
388 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
389 return 1;
391 else if (fmt[i] == 'E')
393 int j;
394 for (j = 0; j < XVECLEN (x, i); j++)
395 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
396 return 1;
398 return 0;
401 /* Return the value of the integer term in X, if one is apparent;
402 otherwise return 0.
403 Only obvious integer terms are detected.
404 This is used in cse.c with the `related_value' field. */
406 HOST_WIDE_INT
407 get_integer_term (rtx x)
409 if (GET_CODE (x) == CONST)
410 x = XEXP (x, 0);
412 if (GET_CODE (x) == MINUS
413 && GET_CODE (XEXP (x, 1)) == CONST_INT)
414 return - INTVAL (XEXP (x, 1));
415 if (GET_CODE (x) == PLUS
416 && GET_CODE (XEXP (x, 1)) == CONST_INT)
417 return INTVAL (XEXP (x, 1));
418 return 0;
421 /* If X is a constant, return the value sans apparent integer term;
422 otherwise return 0.
423 Only obvious integer terms are detected. */
426 get_related_value (rtx x)
428 if (GET_CODE (x) != CONST)
429 return 0;
430 x = XEXP (x, 0);
431 if (GET_CODE (x) == PLUS
432 && GET_CODE (XEXP (x, 1)) == CONST_INT)
433 return XEXP (x, 0);
434 else if (GET_CODE (x) == MINUS
435 && GET_CODE (XEXP (x, 1)) == CONST_INT)
436 return XEXP (x, 0);
437 return 0;
440 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
441 a global register. */
443 static int
444 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
446 int regno;
447 rtx x = *loc;
449 if (! x)
450 return 0;
452 switch (GET_CODE (x))
454 case SUBREG:
455 if (REG_P (SUBREG_REG (x)))
457 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
458 && global_regs[subreg_regno (x)])
459 return 1;
460 return 0;
462 break;
464 case REG:
465 regno = REGNO (x);
466 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
467 return 1;
468 return 0;
470 case SCRATCH:
471 case PC:
472 case CC0:
473 case CONST_INT:
474 case CONST_DOUBLE:
475 case CONST:
476 case LABEL_REF:
477 return 0;
479 case CALL:
480 /* A non-constant call might use a global register. */
481 return 1;
483 default:
484 break;
487 return 0;
490 /* Returns nonzero if X mentions a global register. */
493 global_reg_mentioned_p (rtx x)
495 if (INSN_P (x))
497 if (CALL_P (x))
499 if (! CONST_OR_PURE_CALL_P (x))
500 return 1;
501 x = CALL_INSN_FUNCTION_USAGE (x);
502 if (x == 0)
503 return 0;
505 else
506 x = PATTERN (x);
509 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
512 /* Return the number of places FIND appears within X. If COUNT_DEST is
513 zero, we do not count occurrences inside the destination of a SET. */
516 count_occurrences (rtx x, rtx find, int count_dest)
518 int i, j;
519 enum rtx_code code;
520 const char *format_ptr;
521 int count;
523 if (x == find)
524 return 1;
526 code = GET_CODE (x);
528 switch (code)
530 case REG:
531 case CONST_INT:
532 case CONST_DOUBLE:
533 case CONST_VECTOR:
534 case SYMBOL_REF:
535 case CODE_LABEL:
536 case PC:
537 case CC0:
538 return 0;
540 case MEM:
541 if (MEM_P (find) && rtx_equal_p (x, find))
542 return 1;
543 break;
545 case SET:
546 if (SET_DEST (x) == find && ! count_dest)
547 return count_occurrences (SET_SRC (x), find, count_dest);
548 break;
550 default:
551 break;
554 format_ptr = GET_RTX_FORMAT (code);
555 count = 0;
557 for (i = 0; i < GET_RTX_LENGTH (code); i++)
559 switch (*format_ptr++)
561 case 'e':
562 count += count_occurrences (XEXP (x, i), find, count_dest);
563 break;
565 case 'E':
566 for (j = 0; j < XVECLEN (x, i); j++)
567 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
568 break;
571 return count;
574 /* Nonzero if register REG appears somewhere within IN.
575 Also works if REG is not a register; in this case it checks
576 for a subexpression of IN that is Lisp "equal" to REG. */
579 reg_mentioned_p (rtx reg, rtx in)
581 const char *fmt;
582 int i;
583 enum rtx_code code;
585 if (in == 0)
586 return 0;
588 if (reg == in)
589 return 1;
591 if (GET_CODE (in) == LABEL_REF)
592 return reg == XEXP (in, 0);
594 code = GET_CODE (in);
596 switch (code)
598 /* Compare registers by number. */
599 case REG:
600 return REG_P (reg) && REGNO (in) == REGNO (reg);
602 /* These codes have no constituent expressions
603 and are unique. */
604 case SCRATCH:
605 case CC0:
606 case PC:
607 return 0;
609 case CONST_INT:
610 case CONST_VECTOR:
611 case CONST_DOUBLE:
612 /* These are kept unique for a given value. */
613 return 0;
615 default:
616 break;
619 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
620 return 1;
622 fmt = GET_RTX_FORMAT (code);
624 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
626 if (fmt[i] == 'E')
628 int j;
629 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
630 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
631 return 1;
633 else if (fmt[i] == 'e'
634 && reg_mentioned_p (reg, XEXP (in, i)))
635 return 1;
637 return 0;
640 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
641 no CODE_LABEL insn. */
644 no_labels_between_p (rtx beg, rtx end)
646 rtx p;
647 if (beg == end)
648 return 0;
649 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
650 if (LABEL_P (p))
651 return 0;
652 return 1;
655 /* Nonzero if register REG is used in an insn between
656 FROM_INSN and TO_INSN (exclusive of those two). */
659 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
661 rtx insn;
663 if (from_insn == to_insn)
664 return 0;
666 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
667 if (INSN_P (insn)
668 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
669 || (CALL_P (insn)
670 && (find_reg_fusage (insn, USE, reg)
671 || find_reg_fusage (insn, CLOBBER, reg)))))
672 return 1;
673 return 0;
676 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
677 is entirely replaced by a new value and the only use is as a SET_DEST,
678 we do not consider it a reference. */
681 reg_referenced_p (rtx x, rtx body)
683 int i;
685 switch (GET_CODE (body))
687 case SET:
688 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
689 return 1;
691 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
692 of a REG that occupies all of the REG, the insn references X if
693 it is mentioned in the destination. */
694 if (GET_CODE (SET_DEST (body)) != CC0
695 && GET_CODE (SET_DEST (body)) != PC
696 && !REG_P (SET_DEST (body))
697 && ! (GET_CODE (SET_DEST (body)) == SUBREG
698 && REG_P (SUBREG_REG (SET_DEST (body)))
699 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
700 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
701 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
702 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
703 && reg_overlap_mentioned_p (x, SET_DEST (body)))
704 return 1;
705 return 0;
707 case ASM_OPERANDS:
708 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
709 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
710 return 1;
711 return 0;
713 case CALL:
714 case USE:
715 case IF_THEN_ELSE:
716 return reg_overlap_mentioned_p (x, body);
718 case TRAP_IF:
719 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
721 case PREFETCH:
722 return reg_overlap_mentioned_p (x, XEXP (body, 0));
724 case UNSPEC:
725 case UNSPEC_VOLATILE:
726 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
727 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
728 return 1;
729 return 0;
731 case PARALLEL:
732 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
733 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
734 return 1;
735 return 0;
737 case CLOBBER:
738 if (MEM_P (XEXP (body, 0)))
739 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
740 return 1;
741 return 0;
743 case COND_EXEC:
744 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
745 return 1;
746 return reg_referenced_p (x, COND_EXEC_CODE (body));
748 default:
749 return 0;
753 /* Nonzero if register REG is set or clobbered in an insn between
754 FROM_INSN and TO_INSN (exclusive of those two). */
757 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
759 rtx insn;
761 if (from_insn == to_insn)
762 return 0;
764 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
765 if (INSN_P (insn) && reg_set_p (reg, insn))
766 return 1;
767 return 0;
770 /* Internals of reg_set_between_p. */
772 reg_set_p (rtx reg, rtx insn)
774 /* We can be passed an insn or part of one. If we are passed an insn,
775 check if a side-effect of the insn clobbers REG. */
776 if (INSN_P (insn)
777 && (FIND_REG_INC_NOTE (insn, reg)
778 || (CALL_P (insn)
779 && ((REG_P (reg)
780 && REGNO (reg) < FIRST_PSEUDO_REGISTER
781 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
782 REGNO (reg)))
783 || MEM_P (reg)
784 || find_reg_fusage (insn, CLOBBER, reg)))))
785 return 1;
787 return set_of (reg, insn) != NULL_RTX;
790 /* Similar to reg_set_between_p, but check all registers in X. Return 0
791 only if none of them are modified between START and END. Return 1 if
792 X contains a MEM; this routine does usememory aliasing. */
795 modified_between_p (rtx x, rtx start, rtx end)
797 enum rtx_code code = GET_CODE (x);
798 const char *fmt;
799 int i, j;
800 rtx insn;
802 if (start == end)
803 return 0;
805 switch (code)
807 case CONST_INT:
808 case CONST_DOUBLE:
809 case CONST_VECTOR:
810 case CONST:
811 case SYMBOL_REF:
812 case LABEL_REF:
813 return 0;
815 case PC:
816 case CC0:
817 return 1;
819 case MEM:
820 if (modified_between_p (XEXP (x, 0), start, end))
821 return 1;
822 if (MEM_READONLY_P (x))
823 return 0;
824 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
825 if (memory_modified_in_insn_p (x, insn))
826 return 1;
827 return 0;
828 break;
830 case REG:
831 return reg_set_between_p (x, start, end);
833 default:
834 break;
837 fmt = GET_RTX_FORMAT (code);
838 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
840 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
841 return 1;
843 else if (fmt[i] == 'E')
844 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
845 if (modified_between_p (XVECEXP (x, i, j), start, end))
846 return 1;
849 return 0;
852 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
853 of them are modified in INSN. Return 1 if X contains a MEM; this routine
854 does use memory aliasing. */
857 modified_in_p (rtx x, rtx insn)
859 enum rtx_code code = GET_CODE (x);
860 const char *fmt;
861 int i, j;
863 switch (code)
865 case CONST_INT:
866 case CONST_DOUBLE:
867 case CONST_VECTOR:
868 case CONST:
869 case SYMBOL_REF:
870 case LABEL_REF:
871 return 0;
873 case PC:
874 case CC0:
875 return 1;
877 case MEM:
878 if (modified_in_p (XEXP (x, 0), insn))
879 return 1;
880 if (MEM_READONLY_P (x))
881 return 0;
882 if (memory_modified_in_insn_p (x, insn))
883 return 1;
884 return 0;
885 break;
887 case REG:
888 return reg_set_p (x, insn);
890 default:
891 break;
894 fmt = GET_RTX_FORMAT (code);
895 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
897 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
898 return 1;
900 else if (fmt[i] == 'E')
901 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
902 if (modified_in_p (XVECEXP (x, i, j), insn))
903 return 1;
906 return 0;
909 /* Helper function for set_of. */
910 struct set_of_data
912 rtx found;
913 rtx pat;
916 static void
917 set_of_1 (rtx x, rtx pat, void *data1)
919 struct set_of_data *data = (struct set_of_data *) (data1);
920 if (rtx_equal_p (x, data->pat)
921 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
922 data->found = pat;
925 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
926 (either directly or via STRICT_LOW_PART and similar modifiers). */
928 set_of (rtx pat, rtx insn)
930 struct set_of_data data;
931 data.found = NULL_RTX;
932 data.pat = pat;
933 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
934 return data.found;
937 /* Given an INSN, return a SET expression if this insn has only a single SET.
938 It may also have CLOBBERs, USEs, or SET whose output
939 will not be used, which we ignore. */
942 single_set_2 (rtx insn, rtx pat)
944 rtx set = NULL;
945 int set_verified = 1;
946 int i;
948 if (GET_CODE (pat) == PARALLEL)
950 for (i = 0; i < XVECLEN (pat, 0); i++)
952 rtx sub = XVECEXP (pat, 0, i);
953 switch (GET_CODE (sub))
955 case USE:
956 case CLOBBER:
957 break;
959 case SET:
960 /* We can consider insns having multiple sets, where all
961 but one are dead as single set insns. In common case
962 only single set is present in the pattern so we want
963 to avoid checking for REG_UNUSED notes unless necessary.
965 When we reach set first time, we just expect this is
966 the single set we are looking for and only when more
967 sets are found in the insn, we check them. */
968 if (!set_verified)
970 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
971 && !side_effects_p (set))
972 set = NULL;
973 else
974 set_verified = 1;
976 if (!set)
977 set = sub, set_verified = 0;
978 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
979 || side_effects_p (sub))
980 return NULL_RTX;
981 break;
983 default:
984 return NULL_RTX;
988 return set;
991 /* Given an INSN, return nonzero if it has more than one SET, else return
992 zero. */
995 multiple_sets (rtx insn)
997 int found;
998 int i;
1000 /* INSN must be an insn. */
1001 if (! INSN_P (insn))
1002 return 0;
1004 /* Only a PARALLEL can have multiple SETs. */
1005 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1007 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1008 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1010 /* If we have already found a SET, then return now. */
1011 if (found)
1012 return 1;
1013 else
1014 found = 1;
1018 /* Either zero or one SET. */
1019 return 0;
1022 /* Return nonzero if the destination of SET equals the source
1023 and there are no side effects. */
1026 set_noop_p (rtx set)
1028 rtx src = SET_SRC (set);
1029 rtx dst = SET_DEST (set);
1031 if (dst == pc_rtx && src == pc_rtx)
1032 return 1;
1034 if (MEM_P (dst) && MEM_P (src))
1035 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1037 if (GET_CODE (dst) == ZERO_EXTRACT)
1038 return rtx_equal_p (XEXP (dst, 0), src)
1039 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1040 && !side_effects_p (src);
1042 if (GET_CODE (dst) == STRICT_LOW_PART)
1043 dst = XEXP (dst, 0);
1045 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1047 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1048 return 0;
1049 src = SUBREG_REG (src);
1050 dst = SUBREG_REG (dst);
1053 return (REG_P (src) && REG_P (dst)
1054 && REGNO (src) == REGNO (dst));
1057 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1058 value to itself. */
1061 noop_move_p (rtx insn)
1063 rtx pat = PATTERN (insn);
1065 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1066 return 1;
1068 /* Insns carrying these notes are useful later on. */
1069 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1070 return 0;
1072 /* For now treat an insn with a REG_RETVAL note as a
1073 a special insn which should not be considered a no-op. */
1074 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1075 return 0;
1077 if (GET_CODE (pat) == SET && set_noop_p (pat))
1078 return 1;
1080 if (GET_CODE (pat) == PARALLEL)
1082 int i;
1083 /* If nothing but SETs of registers to themselves,
1084 this insn can also be deleted. */
1085 for (i = 0; i < XVECLEN (pat, 0); i++)
1087 rtx tem = XVECEXP (pat, 0, i);
1089 if (GET_CODE (tem) == USE
1090 || GET_CODE (tem) == CLOBBER)
1091 continue;
1093 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1094 return 0;
1097 return 1;
1099 return 0;
1103 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1104 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1105 If the object was modified, if we hit a partial assignment to X, or hit a
1106 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1107 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1108 be the src. */
1111 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1113 rtx p;
1115 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1116 p = PREV_INSN (p))
1117 if (INSN_P (p))
1119 rtx set = single_set (p);
1120 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1122 if (set && rtx_equal_p (x, SET_DEST (set)))
1124 rtx src = SET_SRC (set);
1126 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1127 src = XEXP (note, 0);
1129 if ((valid_to == NULL_RTX
1130 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1131 /* Reject hard registers because we don't usually want
1132 to use them; we'd rather use a pseudo. */
1133 && (! (REG_P (src)
1134 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1136 *pinsn = p;
1137 return src;
1141 /* If set in non-simple way, we don't have a value. */
1142 if (reg_set_p (x, p))
1143 break;
1146 return x;
1149 /* Return nonzero if register in range [REGNO, ENDREGNO)
1150 appears either explicitly or implicitly in X
1151 other than being stored into.
1153 References contained within the substructure at LOC do not count.
1154 LOC may be zero, meaning don't ignore anything. */
1157 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1158 rtx *loc)
1160 int i;
1161 unsigned int x_regno;
1162 RTX_CODE code;
1163 const char *fmt;
1165 repeat:
1166 /* The contents of a REG_NONNEG note is always zero, so we must come here
1167 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1168 if (x == 0)
1169 return 0;
1171 code = GET_CODE (x);
1173 switch (code)
1175 case REG:
1176 x_regno = REGNO (x);
1178 /* If we modifying the stack, frame, or argument pointer, it will
1179 clobber a virtual register. In fact, we could be more precise,
1180 but it isn't worth it. */
1181 if ((x_regno == STACK_POINTER_REGNUM
1182 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1183 || x_regno == ARG_POINTER_REGNUM
1184 #endif
1185 || x_regno == FRAME_POINTER_REGNUM)
1186 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1187 return 1;
1189 return (endregno > x_regno
1190 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1191 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1192 : 1));
1194 case SUBREG:
1195 /* If this is a SUBREG of a hard reg, we can see exactly which
1196 registers are being modified. Otherwise, handle normally. */
1197 if (REG_P (SUBREG_REG (x))
1198 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1200 unsigned int inner_regno = subreg_regno (x);
1201 unsigned int inner_endregno
1202 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1203 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1205 return endregno > inner_regno && regno < inner_endregno;
1207 break;
1209 case CLOBBER:
1210 case SET:
1211 if (&SET_DEST (x) != loc
1212 /* Note setting a SUBREG counts as referring to the REG it is in for
1213 a pseudo but not for hard registers since we can
1214 treat each word individually. */
1215 && ((GET_CODE (SET_DEST (x)) == SUBREG
1216 && loc != &SUBREG_REG (SET_DEST (x))
1217 && REG_P (SUBREG_REG (SET_DEST (x)))
1218 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1219 && refers_to_regno_p (regno, endregno,
1220 SUBREG_REG (SET_DEST (x)), loc))
1221 || (!REG_P (SET_DEST (x))
1222 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1223 return 1;
1225 if (code == CLOBBER || loc == &SET_SRC (x))
1226 return 0;
1227 x = SET_SRC (x);
1228 goto repeat;
1230 default:
1231 break;
1234 /* X does not match, so try its subexpressions. */
1236 fmt = GET_RTX_FORMAT (code);
1237 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1239 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1241 if (i == 0)
1243 x = XEXP (x, 0);
1244 goto repeat;
1246 else
1247 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1248 return 1;
1250 else if (fmt[i] == 'E')
1252 int j;
1253 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1254 if (loc != &XVECEXP (x, i, j)
1255 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1256 return 1;
1259 return 0;
1262 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1263 we check if any register number in X conflicts with the relevant register
1264 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1265 contains a MEM (we don't bother checking for memory addresses that can't
1266 conflict because we expect this to be a rare case. */
1269 reg_overlap_mentioned_p (rtx x, rtx in)
1271 unsigned int regno, endregno;
1273 /* If either argument is a constant, then modifying X can not
1274 affect IN. Here we look at IN, we can profitably combine
1275 CONSTANT_P (x) with the switch statement below. */
1276 if (CONSTANT_P (in))
1277 return 0;
1279 recurse:
1280 switch (GET_CODE (x))
1282 case STRICT_LOW_PART:
1283 case ZERO_EXTRACT:
1284 case SIGN_EXTRACT:
1285 /* Overly conservative. */
1286 x = XEXP (x, 0);
1287 goto recurse;
1289 case SUBREG:
1290 regno = REGNO (SUBREG_REG (x));
1291 if (regno < FIRST_PSEUDO_REGISTER)
1292 regno = subreg_regno (x);
1293 goto do_reg;
1295 case REG:
1296 regno = REGNO (x);
1297 do_reg:
1298 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1299 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1300 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1302 case MEM:
1304 const char *fmt;
1305 int i;
1307 if (MEM_P (in))
1308 return 1;
1310 fmt = GET_RTX_FORMAT (GET_CODE (in));
1311 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1312 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1313 return 1;
1315 return 0;
1318 case SCRATCH:
1319 case PC:
1320 case CC0:
1321 return reg_mentioned_p (x, in);
1323 case PARALLEL:
1325 int i;
1327 /* If any register in here refers to it we return true. */
1328 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1329 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1330 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1331 return 1;
1332 return 0;
1335 default:
1336 gcc_assert (CONSTANT_P (x));
1337 return 0;
1341 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1342 (X would be the pattern of an insn).
1343 FUN receives two arguments:
1344 the REG, MEM, CC0 or PC being stored in or clobbered,
1345 the SET or CLOBBER rtx that does the store.
1347 If the item being stored in or clobbered is a SUBREG of a hard register,
1348 the SUBREG will be passed. */
1350 void
1351 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1353 int i;
1355 if (GET_CODE (x) == COND_EXEC)
1356 x = COND_EXEC_CODE (x);
1358 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1360 rtx dest = SET_DEST (x);
1362 while ((GET_CODE (dest) == SUBREG
1363 && (!REG_P (SUBREG_REG (dest))
1364 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1365 || GET_CODE (dest) == ZERO_EXTRACT
1366 || GET_CODE (dest) == STRICT_LOW_PART)
1367 dest = XEXP (dest, 0);
1369 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1370 each of whose first operand is a register. */
1371 if (GET_CODE (dest) == PARALLEL)
1373 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1374 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1375 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1377 else
1378 (*fun) (dest, x, data);
1381 else if (GET_CODE (x) == PARALLEL)
1382 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1383 note_stores (XVECEXP (x, 0, i), fun, data);
1386 /* Like notes_stores, but call FUN for each expression that is being
1387 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1388 FUN for each expression, not any interior subexpressions. FUN receives a
1389 pointer to the expression and the DATA passed to this function.
1391 Note that this is not quite the same test as that done in reg_referenced_p
1392 since that considers something as being referenced if it is being
1393 partially set, while we do not. */
1395 void
1396 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1398 rtx body = *pbody;
1399 int i;
1401 switch (GET_CODE (body))
1403 case COND_EXEC:
1404 (*fun) (&COND_EXEC_TEST (body), data);
1405 note_uses (&COND_EXEC_CODE (body), fun, data);
1406 return;
1408 case PARALLEL:
1409 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1410 note_uses (&XVECEXP (body, 0, i), fun, data);
1411 return;
1413 case USE:
1414 (*fun) (&XEXP (body, 0), data);
1415 return;
1417 case ASM_OPERANDS:
1418 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1419 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1420 return;
1422 case TRAP_IF:
1423 (*fun) (&TRAP_CONDITION (body), data);
1424 return;
1426 case PREFETCH:
1427 (*fun) (&XEXP (body, 0), data);
1428 return;
1430 case UNSPEC:
1431 case UNSPEC_VOLATILE:
1432 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1433 (*fun) (&XVECEXP (body, 0, i), data);
1434 return;
1436 case CLOBBER:
1437 if (MEM_P (XEXP (body, 0)))
1438 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1439 return;
1441 case SET:
1443 rtx dest = SET_DEST (body);
1445 /* For sets we replace everything in source plus registers in memory
1446 expression in store and operands of a ZERO_EXTRACT. */
1447 (*fun) (&SET_SRC (body), data);
1449 if (GET_CODE (dest) == ZERO_EXTRACT)
1451 (*fun) (&XEXP (dest, 1), data);
1452 (*fun) (&XEXP (dest, 2), data);
1455 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1456 dest = XEXP (dest, 0);
1458 if (MEM_P (dest))
1459 (*fun) (&XEXP (dest, 0), data);
1461 return;
1463 default:
1464 /* All the other possibilities never store. */
1465 (*fun) (pbody, data);
1466 return;
1470 /* Return nonzero if X's old contents don't survive after INSN.
1471 This will be true if X is (cc0) or if X is a register and
1472 X dies in INSN or because INSN entirely sets X.
1474 "Entirely set" means set directly and not through a SUBREG, or
1475 ZERO_EXTRACT, so no trace of the old contents remains.
1476 Likewise, REG_INC does not count.
1478 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1479 but for this use that makes no difference, since regs don't overlap
1480 during their lifetimes. Therefore, this function may be used
1481 at any time after deaths have been computed (in flow.c).
1483 If REG is a hard reg that occupies multiple machine registers, this
1484 function will only return 1 if each of those registers will be replaced
1485 by INSN. */
1488 dead_or_set_p (rtx insn, rtx x)
1490 unsigned int regno, last_regno;
1491 unsigned int i;
1493 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1494 if (GET_CODE (x) == CC0)
1495 return 1;
1497 gcc_assert (REG_P (x));
1499 regno = REGNO (x);
1500 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1501 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1503 for (i = regno; i <= last_regno; i++)
1504 if (! dead_or_set_regno_p (insn, i))
1505 return 0;
1507 return 1;
1510 /* Return TRUE iff DEST is a register or subreg of a register and
1511 doesn't change the number of words of the inner register, and any
1512 part of the register is TEST_REGNO. */
1514 static bool
1515 covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1517 unsigned int regno, endregno;
1519 if (GET_CODE (dest) == SUBREG
1520 && (((GET_MODE_SIZE (GET_MODE (dest))
1521 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1522 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1523 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1524 dest = SUBREG_REG (dest);
1526 if (!REG_P (dest))
1527 return false;
1529 regno = REGNO (dest);
1530 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1531 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1532 return (test_regno >= regno && test_regno < endregno);
1535 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1536 any member matches the covers_regno_no_parallel_p criteria. */
1538 static bool
1539 covers_regno_p (rtx dest, unsigned int test_regno)
1541 if (GET_CODE (dest) == PARALLEL)
1543 /* Some targets place small structures in registers for return
1544 values of functions, and those registers are wrapped in
1545 PARALLELs that we may see as the destination of a SET. */
1546 int i;
1548 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1550 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1551 if (inner != NULL_RTX
1552 && covers_regno_no_parallel_p (inner, test_regno))
1553 return true;
1556 return false;
1558 else
1559 return covers_regno_no_parallel_p (dest, test_regno);
1562 /* Utility function for dead_or_set_p to check an individual register. Also
1563 called from flow.c. */
1566 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1568 rtx pattern;
1570 /* See if there is a death note for something that includes TEST_REGNO. */
1571 if (find_regno_note (insn, REG_DEAD, test_regno))
1572 return 1;
1574 if (CALL_P (insn)
1575 && find_regno_fusage (insn, CLOBBER, test_regno))
1576 return 1;
1578 pattern = PATTERN (insn);
1580 if (GET_CODE (pattern) == COND_EXEC)
1581 pattern = COND_EXEC_CODE (pattern);
1583 if (GET_CODE (pattern) == SET)
1584 return covers_regno_p (SET_DEST (pattern), test_regno);
1585 else if (GET_CODE (pattern) == PARALLEL)
1587 int i;
1589 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1591 rtx body = XVECEXP (pattern, 0, i);
1593 if (GET_CODE (body) == COND_EXEC)
1594 body = COND_EXEC_CODE (body);
1596 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1597 && covers_regno_p (SET_DEST (body), test_regno))
1598 return 1;
1602 return 0;
1605 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1606 If DATUM is nonzero, look for one whose datum is DATUM. */
1609 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1611 rtx link;
1613 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1614 if (! INSN_P (insn))
1615 return 0;
1616 if (datum == 0)
1618 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1619 if (REG_NOTE_KIND (link) == kind)
1620 return link;
1621 return 0;
1624 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1625 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1626 return link;
1627 return 0;
1630 /* Return the reg-note of kind KIND in insn INSN which applies to register
1631 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1632 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1633 it might be the case that the note overlaps REGNO. */
1636 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1638 rtx link;
1640 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1641 if (! INSN_P (insn))
1642 return 0;
1644 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1645 if (REG_NOTE_KIND (link) == kind
1646 /* Verify that it is a register, so that scratch and MEM won't cause a
1647 problem here. */
1648 && REG_P (XEXP (link, 0))
1649 && REGNO (XEXP (link, 0)) <= regno
1650 && ((REGNO (XEXP (link, 0))
1651 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1652 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1653 [GET_MODE (XEXP (link, 0))]))
1654 > regno))
1655 return link;
1656 return 0;
1659 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1660 has such a note. */
1663 find_reg_equal_equiv_note (rtx insn)
1665 rtx link;
1667 if (!INSN_P (insn))
1668 return 0;
1669 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1670 if (REG_NOTE_KIND (link) == REG_EQUAL
1671 || REG_NOTE_KIND (link) == REG_EQUIV)
1673 if (single_set (insn) == 0)
1674 return 0;
1675 return link;
1677 return NULL;
1680 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1681 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1684 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1686 /* If it's not a CALL_INSN, it can't possibly have a
1687 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1688 if (!CALL_P (insn))
1689 return 0;
1691 gcc_assert (datum);
1693 if (!REG_P (datum))
1695 rtx link;
1697 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1698 link;
1699 link = XEXP (link, 1))
1700 if (GET_CODE (XEXP (link, 0)) == code
1701 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1702 return 1;
1704 else
1706 unsigned int regno = REGNO (datum);
1708 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1709 to pseudo registers, so don't bother checking. */
1711 if (regno < FIRST_PSEUDO_REGISTER)
1713 unsigned int end_regno
1714 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1715 unsigned int i;
1717 for (i = regno; i < end_regno; i++)
1718 if (find_regno_fusage (insn, code, i))
1719 return 1;
1723 return 0;
1726 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1727 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1730 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1732 rtx link;
1734 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1735 to pseudo registers, so don't bother checking. */
1737 if (regno >= FIRST_PSEUDO_REGISTER
1738 || !CALL_P (insn) )
1739 return 0;
1741 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1743 unsigned int regnote;
1744 rtx op, reg;
1746 if (GET_CODE (op = XEXP (link, 0)) == code
1747 && REG_P (reg = XEXP (op, 0))
1748 && (regnote = REGNO (reg)) <= regno
1749 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1750 return 1;
1753 return 0;
1756 /* Return true if INSN is a call to a pure function. */
1759 pure_call_p (rtx insn)
1761 rtx link;
1763 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1764 return 0;
1766 /* Look for the note that differentiates const and pure functions. */
1767 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1769 rtx u, m;
1771 if (GET_CODE (u = XEXP (link, 0)) == USE
1772 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1773 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1774 return 1;
1777 return 0;
1780 /* Remove register note NOTE from the REG_NOTES of INSN. */
1782 void
1783 remove_note (rtx insn, rtx note)
1785 rtx link;
1787 if (note == NULL_RTX)
1788 return;
1790 if (REG_NOTES (insn) == note)
1792 REG_NOTES (insn) = XEXP (note, 1);
1793 return;
1796 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1797 if (XEXP (link, 1) == note)
1799 XEXP (link, 1) = XEXP (note, 1);
1800 return;
1803 gcc_unreachable ();
1806 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1807 return 1 if it is found. A simple equality test is used to determine if
1808 NODE matches. */
1811 in_expr_list_p (rtx listp, rtx node)
1813 rtx x;
1815 for (x = listp; x; x = XEXP (x, 1))
1816 if (node == XEXP (x, 0))
1817 return 1;
1819 return 0;
1822 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1823 remove that entry from the list if it is found.
1825 A simple equality test is used to determine if NODE matches. */
1827 void
1828 remove_node_from_expr_list (rtx node, rtx *listp)
1830 rtx temp = *listp;
1831 rtx prev = NULL_RTX;
1833 while (temp)
1835 if (node == XEXP (temp, 0))
1837 /* Splice the node out of the list. */
1838 if (prev)
1839 XEXP (prev, 1) = XEXP (temp, 1);
1840 else
1841 *listp = XEXP (temp, 1);
1843 return;
1846 prev = temp;
1847 temp = XEXP (temp, 1);
1851 /* Nonzero if X contains any volatile instructions. These are instructions
1852 which may cause unpredictable machine state instructions, and thus no
1853 instructions should be moved or combined across them. This includes
1854 only volatile asms and UNSPEC_VOLATILE instructions. */
1857 volatile_insn_p (rtx x)
1859 RTX_CODE code;
1861 code = GET_CODE (x);
1862 switch (code)
1864 case LABEL_REF:
1865 case SYMBOL_REF:
1866 case CONST_INT:
1867 case CONST:
1868 case CONST_DOUBLE:
1869 case CONST_VECTOR:
1870 case CC0:
1871 case PC:
1872 case REG:
1873 case SCRATCH:
1874 case CLOBBER:
1875 case ADDR_VEC:
1876 case ADDR_DIFF_VEC:
1877 case CALL:
1878 case MEM:
1879 return 0;
1881 case UNSPEC_VOLATILE:
1882 /* case TRAP_IF: This isn't clear yet. */
1883 return 1;
1885 case ASM_INPUT:
1886 case ASM_OPERANDS:
1887 if (MEM_VOLATILE_P (x))
1888 return 1;
1890 default:
1891 break;
1894 /* Recursively scan the operands of this expression. */
1897 const char *fmt = GET_RTX_FORMAT (code);
1898 int i;
1900 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1902 if (fmt[i] == 'e')
1904 if (volatile_insn_p (XEXP (x, i)))
1905 return 1;
1907 else if (fmt[i] == 'E')
1909 int j;
1910 for (j = 0; j < XVECLEN (x, i); j++)
1911 if (volatile_insn_p (XVECEXP (x, i, j)))
1912 return 1;
1916 return 0;
1919 /* Nonzero if X contains any volatile memory references
1920 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1923 volatile_refs_p (rtx x)
1925 RTX_CODE code;
1927 code = GET_CODE (x);
1928 switch (code)
1930 case LABEL_REF:
1931 case SYMBOL_REF:
1932 case CONST_INT:
1933 case CONST:
1934 case CONST_DOUBLE:
1935 case CONST_VECTOR:
1936 case CC0:
1937 case PC:
1938 case REG:
1939 case SCRATCH:
1940 case CLOBBER:
1941 case ADDR_VEC:
1942 case ADDR_DIFF_VEC:
1943 return 0;
1945 case UNSPEC_VOLATILE:
1946 return 1;
1948 case MEM:
1949 case ASM_INPUT:
1950 case ASM_OPERANDS:
1951 if (MEM_VOLATILE_P (x))
1952 return 1;
1954 default:
1955 break;
1958 /* Recursively scan the operands of this expression. */
1961 const char *fmt = GET_RTX_FORMAT (code);
1962 int i;
1964 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1966 if (fmt[i] == 'e')
1968 if (volatile_refs_p (XEXP (x, i)))
1969 return 1;
1971 else if (fmt[i] == 'E')
1973 int j;
1974 for (j = 0; j < XVECLEN (x, i); j++)
1975 if (volatile_refs_p (XVECEXP (x, i, j)))
1976 return 1;
1980 return 0;
1983 /* Similar to above, except that it also rejects register pre- and post-
1984 incrementing. */
1987 side_effects_p (rtx x)
1989 RTX_CODE code;
1991 code = GET_CODE (x);
1992 switch (code)
1994 case LABEL_REF:
1995 case SYMBOL_REF:
1996 case CONST_INT:
1997 case CONST:
1998 case CONST_DOUBLE:
1999 case CONST_VECTOR:
2000 case CC0:
2001 case PC:
2002 case REG:
2003 case SCRATCH:
2004 case ADDR_VEC:
2005 case ADDR_DIFF_VEC:
2006 return 0;
2008 case CLOBBER:
2009 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2010 when some combination can't be done. If we see one, don't think
2011 that we can simplify the expression. */
2012 return (GET_MODE (x) != VOIDmode);
2014 case PRE_INC:
2015 case PRE_DEC:
2016 case POST_INC:
2017 case POST_DEC:
2018 case PRE_MODIFY:
2019 case POST_MODIFY:
2020 case CALL:
2021 case UNSPEC_VOLATILE:
2022 /* case TRAP_IF: This isn't clear yet. */
2023 return 1;
2025 case MEM:
2026 case ASM_INPUT:
2027 case ASM_OPERANDS:
2028 if (MEM_VOLATILE_P (x))
2029 return 1;
2031 default:
2032 break;
2035 /* Recursively scan the operands of this expression. */
2038 const char *fmt = GET_RTX_FORMAT (code);
2039 int i;
2041 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2043 if (fmt[i] == 'e')
2045 if (side_effects_p (XEXP (x, i)))
2046 return 1;
2048 else if (fmt[i] == 'E')
2050 int j;
2051 for (j = 0; j < XVECLEN (x, i); j++)
2052 if (side_effects_p (XVECEXP (x, i, j)))
2053 return 1;
2057 return 0;
2060 /* Return nonzero if evaluating rtx X might cause a trap. */
2063 may_trap_p (rtx x)
2065 int i;
2066 enum rtx_code code;
2067 const char *fmt;
2069 if (x == 0)
2070 return 0;
2071 code = GET_CODE (x);
2072 switch (code)
2074 /* Handle these cases quickly. */
2075 case CONST_INT:
2076 case CONST_DOUBLE:
2077 case CONST_VECTOR:
2078 case SYMBOL_REF:
2079 case LABEL_REF:
2080 case CONST:
2081 case PC:
2082 case CC0:
2083 case REG:
2084 case SCRATCH:
2085 return 0;
2087 case ASM_INPUT:
2088 case UNSPEC_VOLATILE:
2089 case TRAP_IF:
2090 return 1;
2092 case ASM_OPERANDS:
2093 return MEM_VOLATILE_P (x);
2095 /* Memory ref can trap unless it's a static var or a stack slot. */
2096 case MEM:
2097 if (MEM_NOTRAP_P (x))
2098 return 0;
2099 return rtx_addr_can_trap_p (XEXP (x, 0));
2101 /* Division by a non-constant might trap. */
2102 case DIV:
2103 case MOD:
2104 case UDIV:
2105 case UMOD:
2106 if (HONOR_SNANS (GET_MODE (x)))
2107 return 1;
2108 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2109 return flag_trapping_math;
2110 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2111 return 1;
2112 break;
2114 case EXPR_LIST:
2115 /* An EXPR_LIST is used to represent a function call. This
2116 certainly may trap. */
2117 return 1;
2119 case GE:
2120 case GT:
2121 case LE:
2122 case LT:
2123 case LTGT:
2124 case COMPARE:
2125 /* Some floating point comparisons may trap. */
2126 if (!flag_trapping_math)
2127 break;
2128 /* ??? There is no machine independent way to check for tests that trap
2129 when COMPARE is used, though many targets do make this distinction.
2130 For instance, sparc uses CCFPE for compares which generate exceptions
2131 and CCFP for compares which do not generate exceptions. */
2132 if (HONOR_NANS (GET_MODE (x)))
2133 return 1;
2134 /* But often the compare has some CC mode, so check operand
2135 modes as well. */
2136 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2137 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2138 return 1;
2139 break;
2141 case EQ:
2142 case NE:
2143 if (HONOR_SNANS (GET_MODE (x)))
2144 return 1;
2145 /* Often comparison is CC mode, so check operand modes. */
2146 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2147 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2148 return 1;
2149 break;
2151 case FIX:
2152 /* Conversion of floating point might trap. */
2153 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2154 return 1;
2155 break;
2157 case NEG:
2158 case ABS:
2159 /* These operations don't trap even with floating point. */
2160 break;
2162 default:
2163 /* Any floating arithmetic may trap. */
2164 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2165 && flag_trapping_math)
2166 return 1;
2169 fmt = GET_RTX_FORMAT (code);
2170 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2172 if (fmt[i] == 'e')
2174 if (may_trap_p (XEXP (x, i)))
2175 return 1;
2177 else if (fmt[i] == 'E')
2179 int j;
2180 for (j = 0; j < XVECLEN (x, i); j++)
2181 if (may_trap_p (XVECEXP (x, i, j)))
2182 return 1;
2185 return 0;
2188 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2189 i.e., an inequality. */
2192 inequality_comparisons_p (rtx x)
2194 const char *fmt;
2195 int len, i;
2196 enum rtx_code code = GET_CODE (x);
2198 switch (code)
2200 case REG:
2201 case SCRATCH:
2202 case PC:
2203 case CC0:
2204 case CONST_INT:
2205 case CONST_DOUBLE:
2206 case CONST_VECTOR:
2207 case CONST:
2208 case LABEL_REF:
2209 case SYMBOL_REF:
2210 return 0;
2212 case LT:
2213 case LTU:
2214 case GT:
2215 case GTU:
2216 case LE:
2217 case LEU:
2218 case GE:
2219 case GEU:
2220 return 1;
2222 default:
2223 break;
2226 len = GET_RTX_LENGTH (code);
2227 fmt = GET_RTX_FORMAT (code);
2229 for (i = 0; i < len; i++)
2231 if (fmt[i] == 'e')
2233 if (inequality_comparisons_p (XEXP (x, i)))
2234 return 1;
2236 else if (fmt[i] == 'E')
2238 int j;
2239 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2240 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2241 return 1;
2245 return 0;
2248 /* Replace any occurrence of FROM in X with TO. The function does
2249 not enter into CONST_DOUBLE for the replace.
2251 Note that copying is not done so X must not be shared unless all copies
2252 are to be modified. */
2255 replace_rtx (rtx x, rtx from, rtx to)
2257 int i, j;
2258 const char *fmt;
2260 /* The following prevents loops occurrence when we change MEM in
2261 CONST_DOUBLE onto the same CONST_DOUBLE. */
2262 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2263 return x;
2265 if (x == from)
2266 return to;
2268 /* Allow this function to make replacements in EXPR_LISTs. */
2269 if (x == 0)
2270 return 0;
2272 if (GET_CODE (x) == SUBREG)
2274 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2276 if (GET_CODE (new) == CONST_INT)
2278 x = simplify_subreg (GET_MODE (x), new,
2279 GET_MODE (SUBREG_REG (x)),
2280 SUBREG_BYTE (x));
2281 gcc_assert (x);
2283 else
2284 SUBREG_REG (x) = new;
2286 return x;
2288 else if (GET_CODE (x) == ZERO_EXTEND)
2290 rtx new = replace_rtx (XEXP (x, 0), from, to);
2292 if (GET_CODE (new) == CONST_INT)
2294 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2295 new, GET_MODE (XEXP (x, 0)));
2296 gcc_assert (x);
2298 else
2299 XEXP (x, 0) = new;
2301 return x;
2304 fmt = GET_RTX_FORMAT (GET_CODE (x));
2305 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2307 if (fmt[i] == 'e')
2308 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2309 else if (fmt[i] == 'E')
2310 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2311 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2314 return x;
2317 /* Throughout the rtx X, replace many registers according to REG_MAP.
2318 Return the replacement for X (which may be X with altered contents).
2319 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2320 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2322 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2323 should not be mapped to pseudos or vice versa since validate_change
2324 is not called.
2326 If REPLACE_DEST is 1, replacements are also done in destinations;
2327 otherwise, only sources are replaced. */
2330 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2332 enum rtx_code code;
2333 int i;
2334 const char *fmt;
2336 if (x == 0)
2337 return x;
2339 code = GET_CODE (x);
2340 switch (code)
2342 case SCRATCH:
2343 case PC:
2344 case CC0:
2345 case CONST_INT:
2346 case CONST_DOUBLE:
2347 case CONST_VECTOR:
2348 case CONST:
2349 case SYMBOL_REF:
2350 case LABEL_REF:
2351 return x;
2353 case REG:
2354 /* Verify that the register has an entry before trying to access it. */
2355 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2357 /* SUBREGs can't be shared. Always return a copy to ensure that if
2358 this replacement occurs more than once then each instance will
2359 get distinct rtx. */
2360 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2361 return copy_rtx (reg_map[REGNO (x)]);
2362 return reg_map[REGNO (x)];
2364 return x;
2366 case SUBREG:
2367 /* Prevent making nested SUBREGs. */
2368 if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2369 && reg_map[REGNO (SUBREG_REG (x))] != 0
2370 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2372 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2373 return simplify_gen_subreg (GET_MODE (x), map_val,
2374 GET_MODE (SUBREG_REG (x)),
2375 SUBREG_BYTE (x));
2377 break;
2379 case SET:
2380 if (replace_dest)
2381 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2383 else if (MEM_P (SET_DEST (x))
2384 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2385 /* Even if we are not to replace destinations, replace register if it
2386 is CONTAINED in destination (destination is memory or
2387 STRICT_LOW_PART). */
2388 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2389 reg_map, nregs, 0);
2390 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2391 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2392 break;
2394 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2395 return x;
2397 default:
2398 break;
2401 fmt = GET_RTX_FORMAT (code);
2402 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2404 if (fmt[i] == 'e')
2405 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2406 else if (fmt[i] == 'E')
2408 int j;
2409 for (j = 0; j < XVECLEN (x, i); j++)
2410 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2411 nregs, replace_dest);
2414 return x;
2417 /* Replace occurrences of the old label in *X with the new one.
2418 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2421 replace_label (rtx *x, void *data)
2423 rtx l = *x;
2424 rtx old_label = ((replace_label_data *) data)->r1;
2425 rtx new_label = ((replace_label_data *) data)->r2;
2426 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2428 if (l == NULL_RTX)
2429 return 0;
2431 if (GET_CODE (l) == SYMBOL_REF
2432 && CONSTANT_POOL_ADDRESS_P (l))
2434 rtx c = get_pool_constant (l);
2435 if (rtx_referenced_p (old_label, c))
2437 rtx new_c, new_l;
2438 replace_label_data *d = (replace_label_data *) data;
2440 /* Create a copy of constant C; replace the label inside
2441 but do not update LABEL_NUSES because uses in constant pool
2442 are not counted. */
2443 new_c = copy_rtx (c);
2444 d->update_label_nuses = false;
2445 for_each_rtx (&new_c, replace_label, data);
2446 d->update_label_nuses = update_label_nuses;
2448 /* Add the new constant NEW_C to constant pool and replace
2449 the old reference to constant by new reference. */
2450 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2451 *x = replace_rtx (l, l, new_l);
2453 return 0;
2456 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2457 field. This is not handled by for_each_rtx because it doesn't
2458 handle unprinted ('0') fields. */
2459 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2460 JUMP_LABEL (l) = new_label;
2462 if ((GET_CODE (l) == LABEL_REF
2463 || GET_CODE (l) == INSN_LIST)
2464 && XEXP (l, 0) == old_label)
2466 XEXP (l, 0) = new_label;
2467 if (update_label_nuses)
2469 ++LABEL_NUSES (new_label);
2470 --LABEL_NUSES (old_label);
2472 return 0;
2475 return 0;
2478 /* When *BODY is equal to X or X is directly referenced by *BODY
2479 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2480 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2482 static int
2483 rtx_referenced_p_1 (rtx *body, void *x)
2485 rtx y = (rtx) x;
2487 if (*body == NULL_RTX)
2488 return y == NULL_RTX;
2490 /* Return true if a label_ref *BODY refers to label Y. */
2491 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2492 return XEXP (*body, 0) == y;
2494 /* If *BODY is a reference to pool constant traverse the constant. */
2495 if (GET_CODE (*body) == SYMBOL_REF
2496 && CONSTANT_POOL_ADDRESS_P (*body))
2497 return rtx_referenced_p (y, get_pool_constant (*body));
2499 /* By default, compare the RTL expressions. */
2500 return rtx_equal_p (*body, y);
2503 /* Return true if X is referenced in BODY. */
2506 rtx_referenced_p (rtx x, rtx body)
2508 return for_each_rtx (&body, rtx_referenced_p_1, x);
2511 /* If INSN is a tablejump return true and store the label (before jump table) to
2512 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2514 bool
2515 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2517 rtx label, table;
2519 if (JUMP_P (insn)
2520 && (label = JUMP_LABEL (insn)) != NULL_RTX
2521 && (table = next_active_insn (label)) != NULL_RTX
2522 && JUMP_P (table)
2523 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2524 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2526 if (labelp)
2527 *labelp = label;
2528 if (tablep)
2529 *tablep = table;
2530 return true;
2532 return false;
2535 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2536 constant that is not in the constant pool and not in the condition
2537 of an IF_THEN_ELSE. */
2539 static int
2540 computed_jump_p_1 (rtx x)
2542 enum rtx_code code = GET_CODE (x);
2543 int i, j;
2544 const char *fmt;
2546 switch (code)
2548 case LABEL_REF:
2549 case PC:
2550 return 0;
2552 case CONST:
2553 case CONST_INT:
2554 case CONST_DOUBLE:
2555 case CONST_VECTOR:
2556 case SYMBOL_REF:
2557 case REG:
2558 return 1;
2560 case MEM:
2561 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2562 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2564 case IF_THEN_ELSE:
2565 return (computed_jump_p_1 (XEXP (x, 1))
2566 || computed_jump_p_1 (XEXP (x, 2)));
2568 default:
2569 break;
2572 fmt = GET_RTX_FORMAT (code);
2573 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2575 if (fmt[i] == 'e'
2576 && computed_jump_p_1 (XEXP (x, i)))
2577 return 1;
2579 else if (fmt[i] == 'E')
2580 for (j = 0; j < XVECLEN (x, i); j++)
2581 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2582 return 1;
2585 return 0;
2588 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2590 Tablejumps and casesi insns are not considered indirect jumps;
2591 we can recognize them by a (use (label_ref)). */
2594 computed_jump_p (rtx insn)
2596 int i;
2597 if (JUMP_P (insn))
2599 rtx pat = PATTERN (insn);
2601 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2602 return 0;
2603 else if (GET_CODE (pat) == PARALLEL)
2605 int len = XVECLEN (pat, 0);
2606 int has_use_labelref = 0;
2608 for (i = len - 1; i >= 0; i--)
2609 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2610 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2611 == LABEL_REF))
2612 has_use_labelref = 1;
2614 if (! has_use_labelref)
2615 for (i = len - 1; i >= 0; i--)
2616 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2617 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2618 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2619 return 1;
2621 else if (GET_CODE (pat) == SET
2622 && SET_DEST (pat) == pc_rtx
2623 && computed_jump_p_1 (SET_SRC (pat)))
2624 return 1;
2626 return 0;
2629 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2630 calls. Processes the subexpressions of EXP and passes them to F. */
2631 static int
2632 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2634 int result, i, j;
2635 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2636 rtx *x;
2638 for (; format[n] != '\0'; n++)
2640 switch (format[n])
2642 case 'e':
2643 /* Call F on X. */
2644 x = &XEXP (exp, n);
2645 result = (*f) (x, data);
2646 if (result == -1)
2647 /* Do not traverse sub-expressions. */
2648 continue;
2649 else if (result != 0)
2650 /* Stop the traversal. */
2651 return result;
2653 if (*x == NULL_RTX)
2654 /* There are no sub-expressions. */
2655 continue;
2657 i = non_rtx_starting_operands[GET_CODE (*x)];
2658 if (i >= 0)
2660 result = for_each_rtx_1 (*x, i, f, data);
2661 if (result != 0)
2662 return result;
2664 break;
2666 case 'V':
2667 case 'E':
2668 if (XVEC (exp, n) == 0)
2669 continue;
2670 for (j = 0; j < XVECLEN (exp, n); ++j)
2672 /* Call F on X. */
2673 x = &XVECEXP (exp, n, j);
2674 result = (*f) (x, data);
2675 if (result == -1)
2676 /* Do not traverse sub-expressions. */
2677 continue;
2678 else if (result != 0)
2679 /* Stop the traversal. */
2680 return result;
2682 if (*x == NULL_RTX)
2683 /* There are no sub-expressions. */
2684 continue;
2686 i = non_rtx_starting_operands[GET_CODE (*x)];
2687 if (i >= 0)
2689 result = for_each_rtx_1 (*x, i, f, data);
2690 if (result != 0)
2691 return result;
2694 break;
2696 default:
2697 /* Nothing to do. */
2698 break;
2702 return 0;
2705 /* Traverse X via depth-first search, calling F for each
2706 sub-expression (including X itself). F is also passed the DATA.
2707 If F returns -1, do not traverse sub-expressions, but continue
2708 traversing the rest of the tree. If F ever returns any other
2709 nonzero value, stop the traversal, and return the value returned
2710 by F. Otherwise, return 0. This function does not traverse inside
2711 tree structure that contains RTX_EXPRs, or into sub-expressions
2712 whose format code is `0' since it is not known whether or not those
2713 codes are actually RTL.
2715 This routine is very general, and could (should?) be used to
2716 implement many of the other routines in this file. */
2719 for_each_rtx (rtx *x, rtx_function f, void *data)
2721 int result;
2722 int i;
2724 /* Call F on X. */
2725 result = (*f) (x, data);
2726 if (result == -1)
2727 /* Do not traverse sub-expressions. */
2728 return 0;
2729 else if (result != 0)
2730 /* Stop the traversal. */
2731 return result;
2733 if (*x == NULL_RTX)
2734 /* There are no sub-expressions. */
2735 return 0;
2737 i = non_rtx_starting_operands[GET_CODE (*x)];
2738 if (i < 0)
2739 return 0;
2741 return for_each_rtx_1 (*x, i, f, data);
2745 /* Searches X for any reference to REGNO, returning the rtx of the
2746 reference found if any. Otherwise, returns NULL_RTX. */
2749 regno_use_in (unsigned int regno, rtx x)
2751 const char *fmt;
2752 int i, j;
2753 rtx tem;
2755 if (REG_P (x) && REGNO (x) == regno)
2756 return x;
2758 fmt = GET_RTX_FORMAT (GET_CODE (x));
2759 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2761 if (fmt[i] == 'e')
2763 if ((tem = regno_use_in (regno, XEXP (x, i))))
2764 return tem;
2766 else if (fmt[i] == 'E')
2767 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2768 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2769 return tem;
2772 return NULL_RTX;
2775 /* Return a value indicating whether OP, an operand of a commutative
2776 operation, is preferred as the first or second operand. The higher
2777 the value, the stronger the preference for being the first operand.
2778 We use negative values to indicate a preference for the first operand
2779 and positive values for the second operand. */
2782 commutative_operand_precedence (rtx op)
2784 enum rtx_code code = GET_CODE (op);
2786 /* Constants always come the second operand. Prefer "nice" constants. */
2787 if (code == CONST_INT)
2788 return -7;
2789 if (code == CONST_DOUBLE)
2790 return -6;
2791 op = avoid_constant_pool_reference (op);
2792 code = GET_CODE (op);
2794 switch (GET_RTX_CLASS (code))
2796 case RTX_CONST_OBJ:
2797 if (code == CONST_INT)
2798 return -5;
2799 if (code == CONST_DOUBLE)
2800 return -4;
2801 return -3;
2803 case RTX_EXTRA:
2804 /* SUBREGs of objects should come second. */
2805 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2806 return -2;
2808 if (!CONSTANT_P (op))
2809 return 0;
2810 else
2811 /* As for RTX_CONST_OBJ. */
2812 return -3;
2814 case RTX_OBJ:
2815 /* Complex expressions should be the first, so decrease priority
2816 of objects. */
2817 return -1;
2819 case RTX_COMM_ARITH:
2820 /* Prefer operands that are themselves commutative to be first.
2821 This helps to make things linear. In particular,
2822 (and (and (reg) (reg)) (not (reg))) is canonical. */
2823 return 4;
2825 case RTX_BIN_ARITH:
2826 /* If only one operand is a binary expression, it will be the first
2827 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2828 is canonical, although it will usually be further simplified. */
2829 return 2;
2831 case RTX_UNARY:
2832 /* Then prefer NEG and NOT. */
2833 if (code == NEG || code == NOT)
2834 return 1;
2836 default:
2837 return 0;
2841 /* Return 1 iff it is necessary to swap operands of commutative operation
2842 in order to canonicalize expression. */
2845 swap_commutative_operands_p (rtx x, rtx y)
2847 return (commutative_operand_precedence (x)
2848 < commutative_operand_precedence (y));
2851 /* Return 1 if X is an autoincrement side effect and the register is
2852 not the stack pointer. */
2854 auto_inc_p (rtx x)
2856 switch (GET_CODE (x))
2858 case PRE_INC:
2859 case POST_INC:
2860 case PRE_DEC:
2861 case POST_DEC:
2862 case PRE_MODIFY:
2863 case POST_MODIFY:
2864 /* There are no REG_INC notes for SP. */
2865 if (XEXP (x, 0) != stack_pointer_rtx)
2866 return 1;
2867 default:
2868 break;
2870 return 0;
2873 /* Return 1 if the sequence of instructions beginning with FROM and up
2874 to and including TO is safe to move. If NEW_TO is non-NULL, and
2875 the sequence is not already safe to move, but can be easily
2876 extended to a sequence which is safe, then NEW_TO will point to the
2877 end of the extended sequence.
2879 For now, this function only checks that the region contains whole
2880 exception regions, but it could be extended to check additional
2881 conditions as well. */
2884 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
2886 int eh_region_count = 0;
2887 int past_to_p = 0;
2888 rtx r = from;
2890 /* By default, assume the end of the region will be what was
2891 suggested. */
2892 if (new_to)
2893 *new_to = to;
2895 while (r)
2897 if (NOTE_P (r))
2899 switch (NOTE_LINE_NUMBER (r))
2901 case NOTE_INSN_EH_REGION_BEG:
2902 ++eh_region_count;
2903 break;
2905 case NOTE_INSN_EH_REGION_END:
2906 if (eh_region_count == 0)
2907 /* This sequence of instructions contains the end of
2908 an exception region, but not he beginning. Moving
2909 it will cause chaos. */
2910 return 0;
2912 --eh_region_count;
2913 break;
2915 default:
2916 break;
2919 else if (past_to_p)
2920 /* If we've passed TO, and we see a non-note instruction, we
2921 can't extend the sequence to a movable sequence. */
2922 return 0;
2924 if (r == to)
2926 if (!new_to)
2927 /* It's OK to move the sequence if there were matched sets of
2928 exception region notes. */
2929 return eh_region_count == 0;
2931 past_to_p = 1;
2934 /* It's OK to move the sequence if there were matched sets of
2935 exception region notes. */
2936 if (past_to_p && eh_region_count == 0)
2938 *new_to = r;
2939 return 1;
2942 /* Go to the next instruction. */
2943 r = NEXT_INSN (r);
2946 return 0;
2949 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2951 loc_mentioned_in_p (rtx *loc, rtx in)
2953 enum rtx_code code = GET_CODE (in);
2954 const char *fmt = GET_RTX_FORMAT (code);
2955 int i, j;
2957 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2959 if (loc == &in->u.fld[i].rt_rtx)
2960 return 1;
2961 if (fmt[i] == 'e')
2963 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2964 return 1;
2966 else if (fmt[i] == 'E')
2967 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2968 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2969 return 1;
2971 return 0;
2974 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2975 and SUBREG_BYTE, return the bit offset where the subreg begins
2976 (counting from the least significant bit of the operand). */
2978 unsigned int
2979 subreg_lsb_1 (enum machine_mode outer_mode,
2980 enum machine_mode inner_mode,
2981 unsigned int subreg_byte)
2983 unsigned int bitpos;
2984 unsigned int byte;
2985 unsigned int word;
2987 /* A paradoxical subreg begins at bit position 0. */
2988 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
2989 return 0;
2991 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2992 /* If the subreg crosses a word boundary ensure that
2993 it also begins and ends on a word boundary. */
2994 gcc_assert (!((subreg_byte % UNITS_PER_WORD
2995 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
2996 && (subreg_byte % UNITS_PER_WORD
2997 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
2999 if (WORDS_BIG_ENDIAN)
3000 word = (GET_MODE_SIZE (inner_mode)
3001 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3002 else
3003 word = subreg_byte / UNITS_PER_WORD;
3004 bitpos = word * BITS_PER_WORD;
3006 if (BYTES_BIG_ENDIAN)
3007 byte = (GET_MODE_SIZE (inner_mode)
3008 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3009 else
3010 byte = subreg_byte % UNITS_PER_WORD;
3011 bitpos += byte * BITS_PER_UNIT;
3013 return bitpos;
3016 /* Given a subreg X, return the bit offset where the subreg begins
3017 (counting from the least significant bit of the reg). */
3019 unsigned int
3020 subreg_lsb (rtx x)
3022 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3023 SUBREG_BYTE (x));
3026 /* This function returns the regno offset of a subreg expression.
3027 xregno - A regno of an inner hard subreg_reg (or what will become one).
3028 xmode - The mode of xregno.
3029 offset - The byte offset.
3030 ymode - The mode of a top level SUBREG (or what may become one).
3031 RETURN - The regno offset which would be used. */
3032 unsigned int
3033 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3034 unsigned int offset, enum machine_mode ymode)
3036 int nregs_xmode, nregs_ymode, nregs_xmode_unit_int;
3037 int mode_multiple, nregs_multiple;
3038 int y_offset;
3039 enum machine_mode xmode_unit, xmode_unit_int;
3041 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3043 if (GET_MODE_INNER (xmode) == VOIDmode)
3044 xmode_unit = xmode;
3045 else
3046 xmode_unit = GET_MODE_INNER (xmode);
3048 if (FLOAT_MODE_P (xmode_unit))
3050 xmode_unit_int = int_mode_for_mode (xmode_unit);
3051 if (xmode_unit_int == BLKmode)
3052 /* It's probably bad to be here; a port should have an integer mode
3053 that's the same size as anything of which it takes a SUBREG. */
3054 xmode_unit_int = xmode_unit;
3056 else
3057 xmode_unit_int = xmode_unit;
3059 nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
3061 /* Adjust nregs_xmode to allow for 'holes'. */
3062 if (nregs_xmode_unit_int != hard_regno_nregs[xregno][xmode_unit])
3063 nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
3064 else
3065 nregs_xmode = hard_regno_nregs[xregno][xmode];
3067 nregs_ymode = hard_regno_nregs[xregno][ymode];
3069 /* If this is a big endian paradoxical subreg, which uses more actual
3070 hard registers than the original register, we must return a negative
3071 offset so that we find the proper highpart of the register. */
3072 if (offset == 0
3073 && nregs_ymode > nregs_xmode
3074 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3075 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3076 return nregs_xmode - nregs_ymode;
3078 if (offset == 0 || nregs_xmode == nregs_ymode)
3079 return 0;
3081 /* Size of ymode must not be greater than the size of xmode. */
3082 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3083 gcc_assert (mode_multiple != 0);
3085 y_offset = offset / GET_MODE_SIZE (ymode);
3086 nregs_multiple = nregs_xmode / nregs_ymode;
3087 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3090 /* This function returns true when the offset is representable via
3091 subreg_offset in the given regno.
3092 xregno - A regno of an inner hard subreg_reg (or what will become one).
3093 xmode - The mode of xregno.
3094 offset - The byte offset.
3095 ymode - The mode of a top level SUBREG (or what may become one).
3096 RETURN - Whether the offset is representable. */
3097 bool
3098 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3099 unsigned int offset, enum machine_mode ymode)
3101 int nregs_xmode, nregs_ymode, nregs_xmode_unit, nregs_xmode_unit_int;
3102 int mode_multiple, nregs_multiple;
3103 int y_offset;
3104 enum machine_mode xmode_unit, xmode_unit_int;
3106 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3108 if (GET_MODE_INNER (xmode) == VOIDmode)
3109 xmode_unit = xmode;
3110 else
3111 xmode_unit = GET_MODE_INNER (xmode);
3113 if (FLOAT_MODE_P (xmode_unit))
3115 xmode_unit_int = int_mode_for_mode (xmode_unit);
3116 if (xmode_unit_int == BLKmode)
3117 /* It's probably bad to be here; a port should have an integer mode
3118 that's the same size as anything of which it takes a SUBREG. */
3119 xmode_unit_int = xmode_unit;
3121 else
3122 xmode_unit_int = xmode_unit;
3124 nregs_xmode_unit = hard_regno_nregs[xregno][xmode_unit];
3125 nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
3127 /* If there are holes in a non-scalar mode in registers, we expect
3128 that it is made up of its units concatenated together. */
3129 if (nregs_xmode_unit != nregs_xmode_unit_int)
3131 gcc_assert (nregs_xmode_unit * GET_MODE_NUNITS (xmode)
3132 == hard_regno_nregs[xregno][xmode]);
3134 /* You can only ask for a SUBREG of a value with holes in the middle
3135 if you don't cross the holes. (Such a SUBREG should be done by
3136 picking a different register class, or doing it in memory if
3137 necessary.) An example of a value with holes is XCmode on 32-bit
3138 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3139 3 for each part, but in memory it's two 128-bit parts.
3140 Padding is assumed to be at the end (not necessarily the 'high part')
3141 of each unit. */
3142 if (nregs_xmode_unit != nregs_xmode_unit_int
3143 && (offset / GET_MODE_SIZE (xmode_unit_int) + 1
3144 < GET_MODE_NUNITS (xmode))
3145 && (offset / GET_MODE_SIZE (xmode_unit_int)
3146 != ((offset + GET_MODE_SIZE (ymode) - 1)
3147 / GET_MODE_SIZE (xmode_unit_int))))
3148 return false;
3150 nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
3152 else
3153 nregs_xmode = hard_regno_nregs[xregno][xmode];
3155 nregs_ymode = hard_regno_nregs[xregno][ymode];
3157 /* Paradoxical subregs are otherwise valid. */
3158 if (offset == 0
3159 && nregs_ymode > nregs_xmode
3160 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3161 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3162 return true;
3164 /* Lowpart subregs are otherwise valid. */
3165 if (offset == subreg_lowpart_offset (ymode, xmode))
3166 return true;
3168 /* This should always pass, otherwise we don't know how to verify
3169 the constraint. These conditions may be relaxed but
3170 subreg_regno_offset would need to be redesigned. */
3171 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3172 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3174 /* The XMODE value can be seen as a vector of NREGS_XMODE
3175 values. The subreg must represent a lowpart of given field.
3176 Compute what field it is. */
3177 offset -= subreg_lowpart_offset (ymode,
3178 mode_for_size (GET_MODE_BITSIZE (xmode)
3179 / nregs_xmode,
3180 MODE_INT, 0));
3182 /* Size of ymode must not be greater than the size of xmode. */
3183 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3184 gcc_assert (mode_multiple != 0);
3186 y_offset = offset / GET_MODE_SIZE (ymode);
3187 nregs_multiple = nregs_xmode / nregs_ymode;
3189 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3190 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3192 return (!(y_offset % (mode_multiple / nregs_multiple)));
3195 /* Return the final regno that a subreg expression refers to. */
3196 unsigned int
3197 subreg_regno (rtx x)
3199 unsigned int ret;
3200 rtx subreg = SUBREG_REG (x);
3201 int regno = REGNO (subreg);
3203 ret = regno + subreg_regno_offset (regno,
3204 GET_MODE (subreg),
3205 SUBREG_BYTE (x),
3206 GET_MODE (x));
3207 return ret;
3210 struct parms_set_data
3212 int nregs;
3213 HARD_REG_SET regs;
3216 /* Helper function for noticing stores to parameter registers. */
3217 static void
3218 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3220 struct parms_set_data *d = data;
3221 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3222 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3224 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3225 d->nregs--;
3229 /* Look backward for first parameter to be loaded.
3230 Note that loads of all parameters will not necessarily be
3231 found if CSE has eliminated some of them (e.g., an argument
3232 to the outer function is passed down as a parameter).
3233 Do not skip BOUNDARY. */
3235 find_first_parameter_load (rtx call_insn, rtx boundary)
3237 struct parms_set_data parm;
3238 rtx p, before, first_set;
3240 /* Since different machines initialize their parameter registers
3241 in different orders, assume nothing. Collect the set of all
3242 parameter registers. */
3243 CLEAR_HARD_REG_SET (parm.regs);
3244 parm.nregs = 0;
3245 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3246 if (GET_CODE (XEXP (p, 0)) == USE
3247 && REG_P (XEXP (XEXP (p, 0), 0)))
3249 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3251 /* We only care about registers which can hold function
3252 arguments. */
3253 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3254 continue;
3256 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3257 parm.nregs++;
3259 before = call_insn;
3260 first_set = call_insn;
3262 /* Search backward for the first set of a register in this set. */
3263 while (parm.nregs && before != boundary)
3265 before = PREV_INSN (before);
3267 /* It is possible that some loads got CSEed from one call to
3268 another. Stop in that case. */
3269 if (CALL_P (before))
3270 break;
3272 /* Our caller needs either ensure that we will find all sets
3273 (in case code has not been optimized yet), or take care
3274 for possible labels in a way by setting boundary to preceding
3275 CODE_LABEL. */
3276 if (LABEL_P (before))
3278 gcc_assert (before == boundary);
3279 break;
3282 if (INSN_P (before))
3284 int nregs_old = parm.nregs;
3285 note_stores (PATTERN (before), parms_set, &parm);
3286 /* If we found something that did not set a parameter reg,
3287 we're done. Do not keep going, as that might result
3288 in hoisting an insn before the setting of a pseudo
3289 that is used by the hoisted insn. */
3290 if (nregs_old != parm.nregs)
3291 first_set = before;
3292 else
3293 break;
3296 return first_set;
3299 /* Return true if we should avoid inserting code between INSN and preceding
3300 call instruction. */
3302 bool
3303 keep_with_call_p (rtx insn)
3305 rtx set;
3307 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3309 if (REG_P (SET_DEST (set))
3310 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3311 && fixed_regs[REGNO (SET_DEST (set))]
3312 && general_operand (SET_SRC (set), VOIDmode))
3313 return true;
3314 if (REG_P (SET_SRC (set))
3315 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3316 && REG_P (SET_DEST (set))
3317 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3318 return true;
3319 /* There may be a stack pop just after the call and before the store
3320 of the return register. Search for the actual store when deciding
3321 if we can break or not. */
3322 if (SET_DEST (set) == stack_pointer_rtx)
3324 rtx i2 = next_nonnote_insn (insn);
3325 if (i2 && keep_with_call_p (i2))
3326 return true;
3329 return false;
3332 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3333 to non-complex jumps. That is, direct unconditional, conditional,
3334 and tablejumps, but not computed jumps or returns. It also does
3335 not apply to the fallthru case of a conditional jump. */
3337 bool
3338 label_is_jump_target_p (rtx label, rtx jump_insn)
3340 rtx tmp = JUMP_LABEL (jump_insn);
3342 if (label == tmp)
3343 return true;
3345 if (tablejump_p (jump_insn, NULL, &tmp))
3347 rtvec vec = XVEC (PATTERN (tmp),
3348 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3349 int i, veclen = GET_NUM_ELEM (vec);
3351 for (i = 0; i < veclen; ++i)
3352 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3353 return true;
3356 return false;
3360 /* Return an estimate of the cost of computing rtx X.
3361 One use is in cse, to decide which expression to keep in the hash table.
3362 Another is in rtl generation, to pick the cheapest way to multiply.
3363 Other uses like the latter are expected in the future. */
3366 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3368 int i, j;
3369 enum rtx_code code;
3370 const char *fmt;
3371 int total;
3373 if (x == 0)
3374 return 0;
3376 /* Compute the default costs of certain things.
3377 Note that targetm.rtx_costs can override the defaults. */
3379 code = GET_CODE (x);
3380 switch (code)
3382 case MULT:
3383 total = COSTS_N_INSNS (5);
3384 break;
3385 case DIV:
3386 case UDIV:
3387 case MOD:
3388 case UMOD:
3389 total = COSTS_N_INSNS (7);
3390 break;
3391 case USE:
3392 /* Used in loop.c and combine.c as a marker. */
3393 total = 0;
3394 break;
3395 default:
3396 total = COSTS_N_INSNS (1);
3399 switch (code)
3401 case REG:
3402 return 0;
3404 case SUBREG:
3405 total = 0;
3406 /* If we can't tie these modes, make this expensive. The larger
3407 the mode, the more expensive it is. */
3408 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3409 return COSTS_N_INSNS (2
3410 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3411 break;
3413 default:
3414 if (targetm.rtx_costs (x, code, outer_code, &total))
3415 return total;
3416 break;
3419 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3420 which is already in total. */
3422 fmt = GET_RTX_FORMAT (code);
3423 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3424 if (fmt[i] == 'e')
3425 total += rtx_cost (XEXP (x, i), code);
3426 else if (fmt[i] == 'E')
3427 for (j = 0; j < XVECLEN (x, i); j++)
3428 total += rtx_cost (XVECEXP (x, i, j), code);
3430 return total;
3433 /* Return cost of address expression X.
3434 Expect that X is properly formed address reference. */
3437 address_cost (rtx x, enum machine_mode mode)
3439 /* We may be asked for cost of various unusual addresses, such as operands
3440 of push instruction. It is not worthwhile to complicate writing
3441 of the target hook by such cases. */
3443 if (!memory_address_p (mode, x))
3444 return 1000;
3446 return targetm.address_cost (x);
3449 /* If the target doesn't override, compute the cost as with arithmetic. */
3452 default_address_cost (rtx x)
3454 return rtx_cost (x, MEM);
3458 unsigned HOST_WIDE_INT
3459 nonzero_bits (rtx x, enum machine_mode mode)
3461 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3464 unsigned int
3465 num_sign_bit_copies (rtx x, enum machine_mode mode)
3467 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3470 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3471 It avoids exponential behavior in nonzero_bits1 when X has
3472 identical subexpressions on the first or the second level. */
3474 static unsigned HOST_WIDE_INT
3475 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3476 enum machine_mode known_mode,
3477 unsigned HOST_WIDE_INT known_ret)
3479 if (x == known_x && mode == known_mode)
3480 return known_ret;
3482 /* Try to find identical subexpressions. If found call
3483 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3484 precomputed value for the subexpression as KNOWN_RET. */
3486 if (ARITHMETIC_P (x))
3488 rtx x0 = XEXP (x, 0);
3489 rtx x1 = XEXP (x, 1);
3491 /* Check the first level. */
3492 if (x0 == x1)
3493 return nonzero_bits1 (x, mode, x0, mode,
3494 cached_nonzero_bits (x0, mode, known_x,
3495 known_mode, known_ret));
3497 /* Check the second level. */
3498 if (ARITHMETIC_P (x0)
3499 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3500 return nonzero_bits1 (x, mode, x1, mode,
3501 cached_nonzero_bits (x1, mode, known_x,
3502 known_mode, known_ret));
3504 if (ARITHMETIC_P (x1)
3505 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3506 return nonzero_bits1 (x, mode, x0, mode,
3507 cached_nonzero_bits (x0, mode, known_x,
3508 known_mode, known_ret));
3511 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3514 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3515 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3516 is less useful. We can't allow both, because that results in exponential
3517 run time recursion. There is a nullstone testcase that triggered
3518 this. This macro avoids accidental uses of num_sign_bit_copies. */
3519 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3521 /* Given an expression, X, compute which bits in X can be nonzero.
3522 We don't care about bits outside of those defined in MODE.
3524 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3525 an arithmetic operation, we can do better. */
3527 static unsigned HOST_WIDE_INT
3528 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3529 enum machine_mode known_mode,
3530 unsigned HOST_WIDE_INT known_ret)
3532 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3533 unsigned HOST_WIDE_INT inner_nz;
3534 enum rtx_code code;
3535 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3537 /* For floating-point values, assume all bits are needed. */
3538 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3539 return nonzero;
3541 /* If X is wider than MODE, use its mode instead. */
3542 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3544 mode = GET_MODE (x);
3545 nonzero = GET_MODE_MASK (mode);
3546 mode_width = GET_MODE_BITSIZE (mode);
3549 if (mode_width > HOST_BITS_PER_WIDE_INT)
3550 /* Our only callers in this case look for single bit values. So
3551 just return the mode mask. Those tests will then be false. */
3552 return nonzero;
3554 #ifndef WORD_REGISTER_OPERATIONS
3555 /* If MODE is wider than X, but both are a single word for both the host
3556 and target machines, we can compute this from which bits of the
3557 object might be nonzero in its own mode, taking into account the fact
3558 that on many CISC machines, accessing an object in a wider mode
3559 causes the high-order bits to become undefined. So they are
3560 not known to be zero. */
3562 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3563 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3564 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3565 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3567 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3568 known_x, known_mode, known_ret);
3569 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3570 return nonzero;
3572 #endif
3574 code = GET_CODE (x);
3575 switch (code)
3577 case REG:
3578 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3579 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3580 all the bits above ptr_mode are known to be zero. */
3581 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3582 && REG_POINTER (x))
3583 nonzero &= GET_MODE_MASK (ptr_mode);
3584 #endif
3586 /* Include declared information about alignment of pointers. */
3587 /* ??? We don't properly preserve REG_POINTER changes across
3588 pointer-to-integer casts, so we can't trust it except for
3589 things that we know must be pointers. See execute/960116-1.c. */
3590 if ((x == stack_pointer_rtx
3591 || x == frame_pointer_rtx
3592 || x == arg_pointer_rtx)
3593 && REGNO_POINTER_ALIGN (REGNO (x)))
3595 unsigned HOST_WIDE_INT alignment
3596 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3598 #ifdef PUSH_ROUNDING
3599 /* If PUSH_ROUNDING is defined, it is possible for the
3600 stack to be momentarily aligned only to that amount,
3601 so we pick the least alignment. */
3602 if (x == stack_pointer_rtx && PUSH_ARGS)
3603 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3604 alignment);
3605 #endif
3607 nonzero &= ~(alignment - 1);
3611 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3612 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3613 known_mode, known_ret,
3614 &nonzero_for_hook);
3616 if (new)
3617 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3618 known_mode, known_ret);
3620 return nonzero_for_hook;
3623 case CONST_INT:
3624 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3625 /* If X is negative in MODE, sign-extend the value. */
3626 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3627 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3628 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3629 #endif
3631 return INTVAL (x);
3633 case MEM:
3634 #ifdef LOAD_EXTEND_OP
3635 /* In many, if not most, RISC machines, reading a byte from memory
3636 zeros the rest of the register. Noticing that fact saves a lot
3637 of extra zero-extends. */
3638 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3639 nonzero &= GET_MODE_MASK (GET_MODE (x));
3640 #endif
3641 break;
3643 case EQ: case NE:
3644 case UNEQ: case LTGT:
3645 case GT: case GTU: case UNGT:
3646 case LT: case LTU: case UNLT:
3647 case GE: case GEU: case UNGE:
3648 case LE: case LEU: case UNLE:
3649 case UNORDERED: case ORDERED:
3651 /* If this produces an integer result, we know which bits are set.
3652 Code here used to clear bits outside the mode of X, but that is
3653 now done above. */
3655 if (GET_MODE_CLASS (mode) == MODE_INT
3656 && mode_width <= HOST_BITS_PER_WIDE_INT)
3657 nonzero = STORE_FLAG_VALUE;
3658 break;
3660 case NEG:
3661 #if 0
3662 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3663 and num_sign_bit_copies. */
3664 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3665 == GET_MODE_BITSIZE (GET_MODE (x)))
3666 nonzero = 1;
3667 #endif
3669 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3670 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3671 break;
3673 case ABS:
3674 #if 0
3675 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3676 and num_sign_bit_copies. */
3677 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3678 == GET_MODE_BITSIZE (GET_MODE (x)))
3679 nonzero = 1;
3680 #endif
3681 break;
3683 case TRUNCATE:
3684 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3685 known_x, known_mode, known_ret)
3686 & GET_MODE_MASK (mode));
3687 break;
3689 case ZERO_EXTEND:
3690 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3691 known_x, known_mode, known_ret);
3692 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3693 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3694 break;
3696 case SIGN_EXTEND:
3697 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3698 Otherwise, show all the bits in the outer mode but not the inner
3699 may be nonzero. */
3700 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3701 known_x, known_mode, known_ret);
3702 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3704 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3705 if (inner_nz
3706 & (((HOST_WIDE_INT) 1
3707 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3708 inner_nz |= (GET_MODE_MASK (mode)
3709 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3712 nonzero &= inner_nz;
3713 break;
3715 case AND:
3716 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3717 known_x, known_mode, known_ret)
3718 & cached_nonzero_bits (XEXP (x, 1), mode,
3719 known_x, known_mode, known_ret);
3720 break;
3722 case XOR: case IOR:
3723 case UMIN: case UMAX: case SMIN: case SMAX:
3725 unsigned HOST_WIDE_INT nonzero0 =
3726 cached_nonzero_bits (XEXP (x, 0), mode,
3727 known_x, known_mode, known_ret);
3729 /* Don't call nonzero_bits for the second time if it cannot change
3730 anything. */
3731 if ((nonzero & nonzero0) != nonzero)
3732 nonzero &= nonzero0
3733 | cached_nonzero_bits (XEXP (x, 1), mode,
3734 known_x, known_mode, known_ret);
3736 break;
3738 case PLUS: case MINUS:
3739 case MULT:
3740 case DIV: case UDIV:
3741 case MOD: case UMOD:
3742 /* We can apply the rules of arithmetic to compute the number of
3743 high- and low-order zero bits of these operations. We start by
3744 computing the width (position of the highest-order nonzero bit)
3745 and the number of low-order zero bits for each value. */
3747 unsigned HOST_WIDE_INT nz0 =
3748 cached_nonzero_bits (XEXP (x, 0), mode,
3749 known_x, known_mode, known_ret);
3750 unsigned HOST_WIDE_INT nz1 =
3751 cached_nonzero_bits (XEXP (x, 1), mode,
3752 known_x, known_mode, known_ret);
3753 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3754 int width0 = floor_log2 (nz0) + 1;
3755 int width1 = floor_log2 (nz1) + 1;
3756 int low0 = floor_log2 (nz0 & -nz0);
3757 int low1 = floor_log2 (nz1 & -nz1);
3758 HOST_WIDE_INT op0_maybe_minusp
3759 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3760 HOST_WIDE_INT op1_maybe_minusp
3761 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3762 unsigned int result_width = mode_width;
3763 int result_low = 0;
3765 switch (code)
3767 case PLUS:
3768 result_width = MAX (width0, width1) + 1;
3769 result_low = MIN (low0, low1);
3770 break;
3771 case MINUS:
3772 result_low = MIN (low0, low1);
3773 break;
3774 case MULT:
3775 result_width = width0 + width1;
3776 result_low = low0 + low1;
3777 break;
3778 case DIV:
3779 if (width1 == 0)
3780 break;
3781 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3782 result_width = width0;
3783 break;
3784 case UDIV:
3785 if (width1 == 0)
3786 break;
3787 result_width = width0;
3788 break;
3789 case MOD:
3790 if (width1 == 0)
3791 break;
3792 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3793 result_width = MIN (width0, width1);
3794 result_low = MIN (low0, low1);
3795 break;
3796 case UMOD:
3797 if (width1 == 0)
3798 break;
3799 result_width = MIN (width0, width1);
3800 result_low = MIN (low0, low1);
3801 break;
3802 default:
3803 gcc_unreachable ();
3806 if (result_width < mode_width)
3807 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3809 if (result_low > 0)
3810 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3812 #ifdef POINTERS_EXTEND_UNSIGNED
3813 /* If pointers extend unsigned and this is an addition or subtraction
3814 to a pointer in Pmode, all the bits above ptr_mode are known to be
3815 zero. */
3816 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3817 && (code == PLUS || code == MINUS)
3818 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3819 nonzero &= GET_MODE_MASK (ptr_mode);
3820 #endif
3822 break;
3824 case ZERO_EXTRACT:
3825 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3826 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3827 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3828 break;
3830 case SUBREG:
3831 /* If this is a SUBREG formed for a promoted variable that has
3832 been zero-extended, we know that at least the high-order bits
3833 are zero, though others might be too. */
3835 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3836 nonzero = GET_MODE_MASK (GET_MODE (x))
3837 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3838 known_x, known_mode, known_ret);
3840 /* If the inner mode is a single word for both the host and target
3841 machines, we can compute this from which bits of the inner
3842 object might be nonzero. */
3843 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3844 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3845 <= HOST_BITS_PER_WIDE_INT))
3847 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3848 known_x, known_mode, known_ret);
3850 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3851 /* If this is a typical RISC machine, we only have to worry
3852 about the way loads are extended. */
3853 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3854 ? (((nonzero
3855 & (((unsigned HOST_WIDE_INT) 1
3856 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3857 != 0))
3858 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3859 || !MEM_P (SUBREG_REG (x)))
3860 #endif
3862 /* On many CISC machines, accessing an object in a wider mode
3863 causes the high-order bits to become undefined. So they are
3864 not known to be zero. */
3865 if (GET_MODE_SIZE (GET_MODE (x))
3866 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3867 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3868 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3871 break;
3873 case ASHIFTRT:
3874 case LSHIFTRT:
3875 case ASHIFT:
3876 case ROTATE:
3877 /* The nonzero bits are in two classes: any bits within MODE
3878 that aren't in GET_MODE (x) are always significant. The rest of the
3879 nonzero bits are those that are significant in the operand of
3880 the shift when shifted the appropriate number of bits. This
3881 shows that high-order bits are cleared by the right shift and
3882 low-order bits by left shifts. */
3883 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3884 && INTVAL (XEXP (x, 1)) >= 0
3885 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3887 enum machine_mode inner_mode = GET_MODE (x);
3888 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3889 int count = INTVAL (XEXP (x, 1));
3890 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3891 unsigned HOST_WIDE_INT op_nonzero =
3892 cached_nonzero_bits (XEXP (x, 0), mode,
3893 known_x, known_mode, known_ret);
3894 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3895 unsigned HOST_WIDE_INT outer = 0;
3897 if (mode_width > width)
3898 outer = (op_nonzero & nonzero & ~mode_mask);
3900 if (code == LSHIFTRT)
3901 inner >>= count;
3902 else if (code == ASHIFTRT)
3904 inner >>= count;
3906 /* If the sign bit may have been nonzero before the shift, we
3907 need to mark all the places it could have been copied to
3908 by the shift as possibly nonzero. */
3909 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3910 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3912 else if (code == ASHIFT)
3913 inner <<= count;
3914 else
3915 inner = ((inner << (count % width)
3916 | (inner >> (width - (count % width)))) & mode_mask);
3918 nonzero &= (outer | inner);
3920 break;
3922 case FFS:
3923 case POPCOUNT:
3924 /* This is at most the number of bits in the mode. */
3925 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3926 break;
3928 case CLZ:
3929 /* If CLZ has a known value at zero, then the nonzero bits are
3930 that value, plus the number of bits in the mode minus one. */
3931 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3932 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3933 else
3934 nonzero = -1;
3935 break;
3937 case CTZ:
3938 /* If CTZ has a known value at zero, then the nonzero bits are
3939 that value, plus the number of bits in the mode minus one. */
3940 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3941 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3942 else
3943 nonzero = -1;
3944 break;
3946 case PARITY:
3947 nonzero = 1;
3948 break;
3950 case IF_THEN_ELSE:
3952 unsigned HOST_WIDE_INT nonzero_true =
3953 cached_nonzero_bits (XEXP (x, 1), mode,
3954 known_x, known_mode, known_ret);
3956 /* Don't call nonzero_bits for the second time if it cannot change
3957 anything. */
3958 if ((nonzero & nonzero_true) != nonzero)
3959 nonzero &= nonzero_true
3960 | cached_nonzero_bits (XEXP (x, 2), mode,
3961 known_x, known_mode, known_ret);
3963 break;
3965 default:
3966 break;
3969 return nonzero;
3972 /* See the macro definition above. */
3973 #undef cached_num_sign_bit_copies
3976 /* The function cached_num_sign_bit_copies is a wrapper around
3977 num_sign_bit_copies1. It avoids exponential behavior in
3978 num_sign_bit_copies1 when X has identical subexpressions on the
3979 first or the second level. */
3981 static unsigned int
3982 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
3983 enum machine_mode known_mode,
3984 unsigned int known_ret)
3986 if (x == known_x && mode == known_mode)
3987 return known_ret;
3989 /* Try to find identical subexpressions. If found call
3990 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3991 the precomputed value for the subexpression as KNOWN_RET. */
3993 if (ARITHMETIC_P (x))
3995 rtx x0 = XEXP (x, 0);
3996 rtx x1 = XEXP (x, 1);
3998 /* Check the first level. */
3999 if (x0 == x1)
4000 return
4001 num_sign_bit_copies1 (x, mode, x0, mode,
4002 cached_num_sign_bit_copies (x0, mode, known_x,
4003 known_mode,
4004 known_ret));
4006 /* Check the second level. */
4007 if (ARITHMETIC_P (x0)
4008 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4009 return
4010 num_sign_bit_copies1 (x, mode, x1, mode,
4011 cached_num_sign_bit_copies (x1, mode, known_x,
4012 known_mode,
4013 known_ret));
4015 if (ARITHMETIC_P (x1)
4016 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4017 return
4018 num_sign_bit_copies1 (x, mode, x0, mode,
4019 cached_num_sign_bit_copies (x0, mode, known_x,
4020 known_mode,
4021 known_ret));
4024 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4027 /* Return the number of bits at the high-order end of X that are known to
4028 be equal to the sign bit. X will be used in mode MODE; if MODE is
4029 VOIDmode, X will be used in its own mode. The returned value will always
4030 be between 1 and the number of bits in MODE. */
4032 static unsigned int
4033 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
4034 enum machine_mode known_mode,
4035 unsigned int known_ret)
4037 enum rtx_code code = GET_CODE (x);
4038 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4039 int num0, num1, result;
4040 unsigned HOST_WIDE_INT nonzero;
4042 /* If we weren't given a mode, use the mode of X. If the mode is still
4043 VOIDmode, we don't know anything. Likewise if one of the modes is
4044 floating-point. */
4046 if (mode == VOIDmode)
4047 mode = GET_MODE (x);
4049 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4050 return 1;
4052 /* For a smaller object, just ignore the high bits. */
4053 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4055 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4056 known_x, known_mode, known_ret);
4057 return MAX (1,
4058 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4061 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4063 #ifndef WORD_REGISTER_OPERATIONS
4064 /* If this machine does not do all register operations on the entire
4065 register and MODE is wider than the mode of X, we can say nothing
4066 at all about the high-order bits. */
4067 return 1;
4068 #else
4069 /* Likewise on machines that do, if the mode of the object is smaller
4070 than a word and loads of that size don't sign extend, we can say
4071 nothing about the high order bits. */
4072 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4073 #ifdef LOAD_EXTEND_OP
4074 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4075 #endif
4077 return 1;
4078 #endif
4081 switch (code)
4083 case REG:
4085 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4086 /* If pointers extend signed and this is a pointer in Pmode, say that
4087 all the bits above ptr_mode are known to be sign bit copies. */
4088 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4089 && REG_POINTER (x))
4090 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4091 #endif
4094 unsigned int copies_for_hook = 1, copies = 1;
4095 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4096 known_mode, known_ret,
4097 &copies_for_hook);
4099 if (new)
4100 copies = cached_num_sign_bit_copies (new, mode, known_x,
4101 known_mode, known_ret);
4103 if (copies > 1 || copies_for_hook > 1)
4104 return MAX (copies, copies_for_hook);
4106 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4108 break;
4110 case MEM:
4111 #ifdef LOAD_EXTEND_OP
4112 /* Some RISC machines sign-extend all loads of smaller than a word. */
4113 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4114 return MAX (1, ((int) bitwidth
4115 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4116 #endif
4117 break;
4119 case CONST_INT:
4120 /* If the constant is negative, take its 1's complement and remask.
4121 Then see how many zero bits we have. */
4122 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4123 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4124 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4125 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4127 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4129 case SUBREG:
4130 /* If this is a SUBREG for a promoted object that is sign-extended
4131 and we are looking at it in a wider mode, we know that at least the
4132 high-order bits are known to be sign bit copies. */
4134 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4136 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4137 known_x, known_mode, known_ret);
4138 return MAX ((int) bitwidth
4139 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4140 num0);
4143 /* For a smaller object, just ignore the high bits. */
4144 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4146 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4147 known_x, known_mode, known_ret);
4148 return MAX (1, (num0
4149 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4150 - bitwidth)));
4153 #ifdef WORD_REGISTER_OPERATIONS
4154 #ifdef LOAD_EXTEND_OP
4155 /* For paradoxical SUBREGs on machines where all register operations
4156 affect the entire register, just look inside. Note that we are
4157 passing MODE to the recursive call, so the number of sign bit copies
4158 will remain relative to that mode, not the inner mode. */
4160 /* This works only if loads sign extend. Otherwise, if we get a
4161 reload for the inner part, it may be loaded from the stack, and
4162 then we lose all sign bit copies that existed before the store
4163 to the stack. */
4165 if ((GET_MODE_SIZE (GET_MODE (x))
4166 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4167 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4168 && MEM_P (SUBREG_REG (x)))
4169 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4170 known_x, known_mode, known_ret);
4171 #endif
4172 #endif
4173 break;
4175 case SIGN_EXTRACT:
4176 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4177 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4178 break;
4180 case SIGN_EXTEND:
4181 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4182 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4183 known_x, known_mode, known_ret));
4185 case TRUNCATE:
4186 /* For a smaller object, just ignore the high bits. */
4187 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4188 known_x, known_mode, known_ret);
4189 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4190 - bitwidth)));
4192 case NOT:
4193 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4194 known_x, known_mode, known_ret);
4196 case ROTATE: case ROTATERT:
4197 /* If we are rotating left by a number of bits less than the number
4198 of sign bit copies, we can just subtract that amount from the
4199 number. */
4200 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4201 && INTVAL (XEXP (x, 1)) >= 0
4202 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4204 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4205 known_x, known_mode, known_ret);
4206 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4207 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4209 break;
4211 case NEG:
4212 /* In general, this subtracts one sign bit copy. But if the value
4213 is known to be positive, the number of sign bit copies is the
4214 same as that of the input. Finally, if the input has just one bit
4215 that might be nonzero, all the bits are copies of the sign bit. */
4216 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4217 known_x, known_mode, known_ret);
4218 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4219 return num0 > 1 ? num0 - 1 : 1;
4221 nonzero = nonzero_bits (XEXP (x, 0), mode);
4222 if (nonzero == 1)
4223 return bitwidth;
4225 if (num0 > 1
4226 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4227 num0--;
4229 return num0;
4231 case IOR: case AND: case XOR:
4232 case SMIN: case SMAX: case UMIN: case UMAX:
4233 /* Logical operations will preserve the number of sign-bit copies.
4234 MIN and MAX operations always return one of the operands. */
4235 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4236 known_x, known_mode, known_ret);
4237 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4238 known_x, known_mode, known_ret);
4239 return MIN (num0, num1);
4241 case PLUS: case MINUS:
4242 /* For addition and subtraction, we can have a 1-bit carry. However,
4243 if we are subtracting 1 from a positive number, there will not
4244 be such a carry. Furthermore, if the positive number is known to
4245 be 0 or 1, we know the result is either -1 or 0. */
4247 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4248 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4250 nonzero = nonzero_bits (XEXP (x, 0), mode);
4251 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4252 return (nonzero == 1 || nonzero == 0 ? bitwidth
4253 : bitwidth - floor_log2 (nonzero) - 1);
4256 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4257 known_x, known_mode, known_ret);
4258 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4259 known_x, known_mode, known_ret);
4260 result = MAX (1, MIN (num0, num1) - 1);
4262 #ifdef POINTERS_EXTEND_UNSIGNED
4263 /* If pointers extend signed and this is an addition or subtraction
4264 to a pointer in Pmode, all the bits above ptr_mode are known to be
4265 sign bit copies. */
4266 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4267 && (code == PLUS || code == MINUS)
4268 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4269 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4270 - GET_MODE_BITSIZE (ptr_mode) + 1),
4271 result);
4272 #endif
4273 return result;
4275 case MULT:
4276 /* The number of bits of the product is the sum of the number of
4277 bits of both terms. However, unless one of the terms if known
4278 to be positive, we must allow for an additional bit since negating
4279 a negative number can remove one sign bit copy. */
4281 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4282 known_x, known_mode, known_ret);
4283 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4284 known_x, known_mode, known_ret);
4286 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4287 if (result > 0
4288 && (bitwidth > HOST_BITS_PER_WIDE_INT
4289 || (((nonzero_bits (XEXP (x, 0), mode)
4290 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4291 && ((nonzero_bits (XEXP (x, 1), mode)
4292 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4293 result--;
4295 return MAX (1, result);
4297 case UDIV:
4298 /* The result must be <= the first operand. If the first operand
4299 has the high bit set, we know nothing about the number of sign
4300 bit copies. */
4301 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4302 return 1;
4303 else if ((nonzero_bits (XEXP (x, 0), mode)
4304 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4305 return 1;
4306 else
4307 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4308 known_x, known_mode, known_ret);
4310 case UMOD:
4311 /* The result must be <= the second operand. */
4312 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4313 known_x, known_mode, known_ret);
4315 case DIV:
4316 /* Similar to unsigned division, except that we have to worry about
4317 the case where the divisor is negative, in which case we have
4318 to add 1. */
4319 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4320 known_x, known_mode, known_ret);
4321 if (result > 1
4322 && (bitwidth > HOST_BITS_PER_WIDE_INT
4323 || (nonzero_bits (XEXP (x, 1), mode)
4324 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4325 result--;
4327 return result;
4329 case MOD:
4330 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4331 known_x, known_mode, known_ret);
4332 if (result > 1
4333 && (bitwidth > HOST_BITS_PER_WIDE_INT
4334 || (nonzero_bits (XEXP (x, 1), mode)
4335 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4336 result--;
4338 return result;
4340 case ASHIFTRT:
4341 /* Shifts by a constant add to the number of bits equal to the
4342 sign bit. */
4343 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4344 known_x, known_mode, known_ret);
4345 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4346 && INTVAL (XEXP (x, 1)) > 0)
4347 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4349 return num0;
4351 case ASHIFT:
4352 /* Left shifts destroy copies. */
4353 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4354 || INTVAL (XEXP (x, 1)) < 0
4355 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4356 return 1;
4358 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4359 known_x, known_mode, known_ret);
4360 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4362 case IF_THEN_ELSE:
4363 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4364 known_x, known_mode, known_ret);
4365 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4366 known_x, known_mode, known_ret);
4367 return MIN (num0, num1);
4369 case EQ: case NE: case GE: case GT: case LE: case LT:
4370 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4371 case GEU: case GTU: case LEU: case LTU:
4372 case UNORDERED: case ORDERED:
4373 /* If the constant is negative, take its 1's complement and remask.
4374 Then see how many zero bits we have. */
4375 nonzero = STORE_FLAG_VALUE;
4376 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4377 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4378 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4380 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4382 default:
4383 break;
4386 /* If we haven't been able to figure it out by one of the above rules,
4387 see if some of the high-order bits are known to be zero. If so,
4388 count those bits and return one less than that amount. If we can't
4389 safely compute the mask for this mode, always return BITWIDTH. */
4391 bitwidth = GET_MODE_BITSIZE (mode);
4392 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4393 return 1;
4395 nonzero = nonzero_bits (x, mode);
4396 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4397 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4400 /* Calculate the rtx_cost of a single instruction. A return value of
4401 zero indicates an instruction pattern without a known cost. */
4404 insn_rtx_cost (rtx pat)
4406 int i, cost;
4407 rtx set;
4409 /* Extract the single set rtx from the instruction pattern.
4410 We can't use single_set since we only have the pattern. */
4411 if (GET_CODE (pat) == SET)
4412 set = pat;
4413 else if (GET_CODE (pat) == PARALLEL)
4415 set = NULL_RTX;
4416 for (i = 0; i < XVECLEN (pat, 0); i++)
4418 rtx x = XVECEXP (pat, 0, i);
4419 if (GET_CODE (x) == SET)
4421 if (set)
4422 return 0;
4423 set = x;
4426 if (!set)
4427 return 0;
4429 else
4430 return 0;
4432 cost = rtx_cost (SET_SRC (set), SET);
4433 return cost > 0 ? cost : COSTS_N_INSNS (1);
4436 /* Given an insn INSN and condition COND, return the condition in a
4437 canonical form to simplify testing by callers. Specifically:
4439 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4440 (2) Both operands will be machine operands; (cc0) will have been replaced.
4441 (3) If an operand is a constant, it will be the second operand.
4442 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4443 for GE, GEU, and LEU.
4445 If the condition cannot be understood, or is an inequality floating-point
4446 comparison which needs to be reversed, 0 will be returned.
4448 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4450 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4451 insn used in locating the condition was found. If a replacement test
4452 of the condition is desired, it should be placed in front of that
4453 insn and we will be sure that the inputs are still valid.
4455 If WANT_REG is nonzero, we wish the condition to be relative to that
4456 register, if possible. Therefore, do not canonicalize the condition
4457 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4458 to be a compare to a CC mode register.
4460 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4461 and at INSN. */
4464 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4465 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4467 enum rtx_code code;
4468 rtx prev = insn;
4469 rtx set;
4470 rtx tem;
4471 rtx op0, op1;
4472 int reverse_code = 0;
4473 enum machine_mode mode;
4475 code = GET_CODE (cond);
4476 mode = GET_MODE (cond);
4477 op0 = XEXP (cond, 0);
4478 op1 = XEXP (cond, 1);
4480 if (reverse)
4481 code = reversed_comparison_code (cond, insn);
4482 if (code == UNKNOWN)
4483 return 0;
4485 if (earliest)
4486 *earliest = insn;
4488 /* If we are comparing a register with zero, see if the register is set
4489 in the previous insn to a COMPARE or a comparison operation. Perform
4490 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4491 in cse.c */
4493 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4494 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4495 && op1 == CONST0_RTX (GET_MODE (op0))
4496 && op0 != want_reg)
4498 /* Set nonzero when we find something of interest. */
4499 rtx x = 0;
4501 #ifdef HAVE_cc0
4502 /* If comparison with cc0, import actual comparison from compare
4503 insn. */
4504 if (op0 == cc0_rtx)
4506 if ((prev = prev_nonnote_insn (prev)) == 0
4507 || !NONJUMP_INSN_P (prev)
4508 || (set = single_set (prev)) == 0
4509 || SET_DEST (set) != cc0_rtx)
4510 return 0;
4512 op0 = SET_SRC (set);
4513 op1 = CONST0_RTX (GET_MODE (op0));
4514 if (earliest)
4515 *earliest = prev;
4517 #endif
4519 /* If this is a COMPARE, pick up the two things being compared. */
4520 if (GET_CODE (op0) == COMPARE)
4522 op1 = XEXP (op0, 1);
4523 op0 = XEXP (op0, 0);
4524 continue;
4526 else if (!REG_P (op0))
4527 break;
4529 /* Go back to the previous insn. Stop if it is not an INSN. We also
4530 stop if it isn't a single set or if it has a REG_INC note because
4531 we don't want to bother dealing with it. */
4533 if ((prev = prev_nonnote_insn (prev)) == 0
4534 || !NONJUMP_INSN_P (prev)
4535 || FIND_REG_INC_NOTE (prev, NULL_RTX))
4536 break;
4538 set = set_of (op0, prev);
4540 if (set
4541 && (GET_CODE (set) != SET
4542 || !rtx_equal_p (SET_DEST (set), op0)))
4543 break;
4545 /* If this is setting OP0, get what it sets it to if it looks
4546 relevant. */
4547 if (set)
4549 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4550 #ifdef FLOAT_STORE_FLAG_VALUE
4551 REAL_VALUE_TYPE fsfv;
4552 #endif
4554 /* ??? We may not combine comparisons done in a CCmode with
4555 comparisons not done in a CCmode. This is to aid targets
4556 like Alpha that have an IEEE compliant EQ instruction, and
4557 a non-IEEE compliant BEQ instruction. The use of CCmode is
4558 actually artificial, simply to prevent the combination, but
4559 should not affect other platforms.
4561 However, we must allow VOIDmode comparisons to match either
4562 CCmode or non-CCmode comparison, because some ports have
4563 modeless comparisons inside branch patterns.
4565 ??? This mode check should perhaps look more like the mode check
4566 in simplify_comparison in combine. */
4568 if ((GET_CODE (SET_SRC (set)) == COMPARE
4569 || (((code == NE
4570 || (code == LT
4571 && GET_MODE_CLASS (inner_mode) == MODE_INT
4572 && (GET_MODE_BITSIZE (inner_mode)
4573 <= HOST_BITS_PER_WIDE_INT)
4574 && (STORE_FLAG_VALUE
4575 & ((HOST_WIDE_INT) 1
4576 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4577 #ifdef FLOAT_STORE_FLAG_VALUE
4578 || (code == LT
4579 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4580 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4581 REAL_VALUE_NEGATIVE (fsfv)))
4582 #endif
4584 && COMPARISON_P (SET_SRC (set))))
4585 && (((GET_MODE_CLASS (mode) == MODE_CC)
4586 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4587 || mode == VOIDmode || inner_mode == VOIDmode))
4588 x = SET_SRC (set);
4589 else if (((code == EQ
4590 || (code == GE
4591 && (GET_MODE_BITSIZE (inner_mode)
4592 <= HOST_BITS_PER_WIDE_INT)
4593 && GET_MODE_CLASS (inner_mode) == MODE_INT
4594 && (STORE_FLAG_VALUE
4595 & ((HOST_WIDE_INT) 1
4596 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4597 #ifdef FLOAT_STORE_FLAG_VALUE
4598 || (code == GE
4599 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4600 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4601 REAL_VALUE_NEGATIVE (fsfv)))
4602 #endif
4604 && COMPARISON_P (SET_SRC (set))
4605 && (((GET_MODE_CLASS (mode) == MODE_CC)
4606 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4607 || mode == VOIDmode || inner_mode == VOIDmode))
4610 reverse_code = 1;
4611 x = SET_SRC (set);
4613 else
4614 break;
4617 else if (reg_set_p (op0, prev))
4618 /* If this sets OP0, but not directly, we have to give up. */
4619 break;
4621 if (x)
4623 /* If the caller is expecting the condition to be valid at INSN,
4624 make sure X doesn't change before INSN. */
4625 if (valid_at_insn_p)
4626 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4627 break;
4628 if (COMPARISON_P (x))
4629 code = GET_CODE (x);
4630 if (reverse_code)
4632 code = reversed_comparison_code (x, prev);
4633 if (code == UNKNOWN)
4634 return 0;
4635 reverse_code = 0;
4638 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4639 if (earliest)
4640 *earliest = prev;
4644 /* If constant is first, put it last. */
4645 if (CONSTANT_P (op0))
4646 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4648 /* If OP0 is the result of a comparison, we weren't able to find what
4649 was really being compared, so fail. */
4650 if (!allow_cc_mode
4651 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4652 return 0;
4654 /* Canonicalize any ordered comparison with integers involving equality
4655 if we can do computations in the relevant mode and we do not
4656 overflow. */
4658 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4659 && GET_CODE (op1) == CONST_INT
4660 && GET_MODE (op0) != VOIDmode
4661 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4663 HOST_WIDE_INT const_val = INTVAL (op1);
4664 unsigned HOST_WIDE_INT uconst_val = const_val;
4665 unsigned HOST_WIDE_INT max_val
4666 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4668 switch (code)
4670 case LE:
4671 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4672 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4673 break;
4675 /* When cross-compiling, const_val might be sign-extended from
4676 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4677 case GE:
4678 if ((HOST_WIDE_INT) (const_val & max_val)
4679 != (((HOST_WIDE_INT) 1
4680 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4681 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4682 break;
4684 case LEU:
4685 if (uconst_val < max_val)
4686 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4687 break;
4689 case GEU:
4690 if (uconst_val != 0)
4691 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4692 break;
4694 default:
4695 break;
4699 /* Never return CC0; return zero instead. */
4700 if (CC0_P (op0))
4701 return 0;
4703 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4706 /* Given a jump insn JUMP, return the condition that will cause it to branch
4707 to its JUMP_LABEL. If the condition cannot be understood, or is an
4708 inequality floating-point comparison which needs to be reversed, 0 will
4709 be returned.
4711 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4712 insn used in locating the condition was found. If a replacement test
4713 of the condition is desired, it should be placed in front of that
4714 insn and we will be sure that the inputs are still valid. If EARLIEST
4715 is null, the returned condition will be valid at INSN.
4717 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4718 compare CC mode register.
4720 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4723 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4725 rtx cond;
4726 int reverse;
4727 rtx set;
4729 /* If this is not a standard conditional jump, we can't parse it. */
4730 if (!JUMP_P (jump)
4731 || ! any_condjump_p (jump))
4732 return 0;
4733 set = pc_set (jump);
4735 cond = XEXP (SET_SRC (set), 0);
4737 /* If this branches to JUMP_LABEL when the condition is false, reverse
4738 the condition. */
4739 reverse
4740 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4741 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4743 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4744 allow_cc_mode, valid_at_insn_p);
4748 /* Initialize non_rtx_starting_operands, which is used to speed up
4749 for_each_rtx. */
4750 void
4751 init_rtlanal (void)
4753 int i;
4754 for (i = 0; i < NUM_RTX_CODE; i++)
4756 const char *format = GET_RTX_FORMAT (i);
4757 const char *first = strpbrk (format, "eEV");
4758 non_rtx_starting_operands[i] = first ? first - format : -1;