Merge from mainline (gomp-merge-2005-02-26).
[official-gcc.git] / gcc / rtlanal.c
blob94e8d4c8033ae111f1249f7466de1969c069ce50
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "regs.h"
38 #include "function.h"
40 /* Forward declarations */
41 static int global_reg_mentioned_p_1 (rtx *, void *);
42 static void set_of_1 (rtx, rtx, void *);
43 static bool covers_regno_p (rtx, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx, unsigned int);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
49 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50 rtx, enum machine_mode,
51 unsigned HOST_WIDE_INT);
52 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53 enum machine_mode,
54 unsigned HOST_WIDE_INT);
55 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56 enum machine_mode,
57 unsigned int);
58 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59 enum machine_mode, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands[NUM_RTX_CODE];
65 /* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
69 int target_flags;
71 /* Return 1 if the value of X is unstable
72 (would be different at a different point in the program).
73 The frame pointer, arg pointer, etc. are considered stable
74 (within one function) and so is anything marked `unchanging'. */
76 int
77 rtx_unstable_p (rtx x)
79 RTX_CODE code = GET_CODE (x);
80 int i;
81 const char *fmt;
83 switch (code)
85 case MEM:
86 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
88 case CONST:
89 case CONST_INT:
90 case CONST_DOUBLE:
91 case CONST_VECTOR:
92 case SYMBOL_REF:
93 case LABEL_REF:
94 return 0;
96 case REG:
97 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
98 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
99 /* The arg pointer varies if it is not a fixed register. */
100 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
101 return 0;
102 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
103 /* ??? When call-clobbered, the value is stable modulo the restore
104 that must happen after a call. This currently screws up local-alloc
105 into believing that the restore is not needed. */
106 if (x == pic_offset_table_rtx)
107 return 0;
108 #endif
109 return 1;
111 case ASM_OPERANDS:
112 if (MEM_VOLATILE_P (x))
113 return 1;
115 /* Fall through. */
117 default:
118 break;
121 fmt = GET_RTX_FORMAT (code);
122 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
123 if (fmt[i] == 'e')
125 if (rtx_unstable_p (XEXP (x, i)))
126 return 1;
128 else if (fmt[i] == 'E')
130 int j;
131 for (j = 0; j < XVECLEN (x, i); j++)
132 if (rtx_unstable_p (XVECEXP (x, i, j)))
133 return 1;
136 return 0;
139 /* Return 1 if X has a value that can vary even between two
140 executions of the program. 0 means X can be compared reliably
141 against certain constants or near-constants.
142 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
143 zero, we are slightly more conservative.
144 The frame pointer and the arg pointer are considered constant. */
147 rtx_varies_p (rtx x, int for_alias)
149 RTX_CODE code;
150 int i;
151 const char *fmt;
153 if (!x)
154 return 0;
156 code = GET_CODE (x);
157 switch (code)
159 case MEM:
160 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
162 case CONST:
163 case CONST_INT:
164 case CONST_DOUBLE:
165 case CONST_VECTOR:
166 case SYMBOL_REF:
167 case LABEL_REF:
168 return 0;
170 case REG:
171 /* Note that we have to test for the actual rtx used for the frame
172 and arg pointers and not just the register number in case we have
173 eliminated the frame and/or arg pointer and are using it
174 for pseudos. */
175 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
176 /* The arg pointer varies if it is not a fixed register. */
177 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
178 return 0;
179 if (x == pic_offset_table_rtx
180 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
181 /* ??? When call-clobbered, the value is stable modulo the restore
182 that must happen after a call. This currently screws up
183 local-alloc into believing that the restore is not needed, so we
184 must return 0 only if we are called from alias analysis. */
185 && for_alias
186 #endif
188 return 0;
189 return 1;
191 case LO_SUM:
192 /* The operand 0 of a LO_SUM is considered constant
193 (in fact it is related specifically to operand 1)
194 during alias analysis. */
195 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
196 || rtx_varies_p (XEXP (x, 1), for_alias);
198 case ASM_OPERANDS:
199 if (MEM_VOLATILE_P (x))
200 return 1;
202 /* Fall through. */
204 default:
205 break;
208 fmt = GET_RTX_FORMAT (code);
209 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
210 if (fmt[i] == 'e')
212 if (rtx_varies_p (XEXP (x, i), for_alias))
213 return 1;
215 else if (fmt[i] == 'E')
217 int j;
218 for (j = 0; j < XVECLEN (x, i); j++)
219 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
220 return 1;
223 return 0;
226 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
229 rtx_addr_can_trap_p (rtx x)
231 enum rtx_code code = GET_CODE (x);
233 switch (code)
235 case SYMBOL_REF:
236 return SYMBOL_REF_WEAK (x);
238 case LABEL_REF:
239 return 0;
241 case REG:
242 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
243 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
244 || x == stack_pointer_rtx
245 /* The arg pointer varies if it is not a fixed register. */
246 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
247 return 0;
248 /* All of the virtual frame registers are stack references. */
249 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
250 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
251 return 0;
252 return 1;
254 case CONST:
255 return rtx_addr_can_trap_p (XEXP (x, 0));
257 case PLUS:
258 /* An address is assumed not to trap if it is an address that can't
259 trap plus a constant integer or it is the pic register plus a
260 constant. */
261 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
262 && GET_CODE (XEXP (x, 1)) == CONST_INT)
263 || (XEXP (x, 0) == pic_offset_table_rtx
264 && CONSTANT_P (XEXP (x, 1))));
266 case LO_SUM:
267 case PRE_MODIFY:
268 return rtx_addr_can_trap_p (XEXP (x, 1));
270 case PRE_DEC:
271 case PRE_INC:
272 case POST_DEC:
273 case POST_INC:
274 case POST_MODIFY:
275 return rtx_addr_can_trap_p (XEXP (x, 0));
277 default:
278 break;
281 /* If it isn't one of the case above, it can cause a trap. */
282 return 1;
285 /* Return true if X is an address that is known to not be zero. */
287 bool
288 nonzero_address_p (rtx x)
290 enum rtx_code code = GET_CODE (x);
292 switch (code)
294 case SYMBOL_REF:
295 return !SYMBOL_REF_WEAK (x);
297 case LABEL_REF:
298 return true;
300 case REG:
301 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
302 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
303 || x == stack_pointer_rtx
304 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
305 return true;
306 /* All of the virtual frame registers are stack references. */
307 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
308 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
309 return true;
310 return false;
312 case CONST:
313 return nonzero_address_p (XEXP (x, 0));
315 case PLUS:
316 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
318 /* Pointers aren't allowed to wrap. If we've got a register
319 that is known to be a pointer, and a positive offset, then
320 the composite can't be zero. */
321 if (INTVAL (XEXP (x, 1)) > 0
322 && REG_P (XEXP (x, 0))
323 && REG_POINTER (XEXP (x, 0)))
324 return true;
326 return nonzero_address_p (XEXP (x, 0));
328 /* Handle PIC references. */
329 else if (XEXP (x, 0) == pic_offset_table_rtx
330 && CONSTANT_P (XEXP (x, 1)))
331 return true;
332 return false;
334 case PRE_MODIFY:
335 /* Similar to the above; allow positive offsets. Further, since
336 auto-inc is only allowed in memories, the register must be a
337 pointer. */
338 if (GET_CODE (XEXP (x, 1)) == CONST_INT
339 && INTVAL (XEXP (x, 1)) > 0)
340 return true;
341 return nonzero_address_p (XEXP (x, 0));
343 case PRE_INC:
344 /* Similarly. Further, the offset is always positive. */
345 return true;
347 case PRE_DEC:
348 case POST_DEC:
349 case POST_INC:
350 case POST_MODIFY:
351 return nonzero_address_p (XEXP (x, 0));
353 case LO_SUM:
354 return nonzero_address_p (XEXP (x, 1));
356 default:
357 break;
360 /* If it isn't one of the case above, might be zero. */
361 return false;
364 /* Return 1 if X refers to a memory location whose address
365 cannot be compared reliably with constant addresses,
366 or if X refers to a BLKmode memory object.
367 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
368 zero, we are slightly more conservative. */
371 rtx_addr_varies_p (rtx x, int for_alias)
373 enum rtx_code code;
374 int i;
375 const char *fmt;
377 if (x == 0)
378 return 0;
380 code = GET_CODE (x);
381 if (code == MEM)
382 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
384 fmt = GET_RTX_FORMAT (code);
385 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
386 if (fmt[i] == 'e')
388 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
389 return 1;
391 else if (fmt[i] == 'E')
393 int j;
394 for (j = 0; j < XVECLEN (x, i); j++)
395 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
396 return 1;
398 return 0;
401 /* Return the value of the integer term in X, if one is apparent;
402 otherwise return 0.
403 Only obvious integer terms are detected.
404 This is used in cse.c with the `related_value' field. */
406 HOST_WIDE_INT
407 get_integer_term (rtx x)
409 if (GET_CODE (x) == CONST)
410 x = XEXP (x, 0);
412 if (GET_CODE (x) == MINUS
413 && GET_CODE (XEXP (x, 1)) == CONST_INT)
414 return - INTVAL (XEXP (x, 1));
415 if (GET_CODE (x) == PLUS
416 && GET_CODE (XEXP (x, 1)) == CONST_INT)
417 return INTVAL (XEXP (x, 1));
418 return 0;
421 /* If X is a constant, return the value sans apparent integer term;
422 otherwise return 0.
423 Only obvious integer terms are detected. */
426 get_related_value (rtx x)
428 if (GET_CODE (x) != CONST)
429 return 0;
430 x = XEXP (x, 0);
431 if (GET_CODE (x) == PLUS
432 && GET_CODE (XEXP (x, 1)) == CONST_INT)
433 return XEXP (x, 0);
434 else if (GET_CODE (x) == MINUS
435 && GET_CODE (XEXP (x, 1)) == CONST_INT)
436 return XEXP (x, 0);
437 return 0;
440 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
441 a global register. */
443 static int
444 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
446 int regno;
447 rtx x = *loc;
449 if (! x)
450 return 0;
452 switch (GET_CODE (x))
454 case SUBREG:
455 if (REG_P (SUBREG_REG (x)))
457 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
458 && global_regs[subreg_regno (x)])
459 return 1;
460 return 0;
462 break;
464 case REG:
465 regno = REGNO (x);
466 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
467 return 1;
468 return 0;
470 case SCRATCH:
471 case PC:
472 case CC0:
473 case CONST_INT:
474 case CONST_DOUBLE:
475 case CONST:
476 case LABEL_REF:
477 return 0;
479 case CALL:
480 /* A non-constant call might use a global register. */
481 return 1;
483 default:
484 break;
487 return 0;
490 /* Returns nonzero if X mentions a global register. */
493 global_reg_mentioned_p (rtx x)
495 if (INSN_P (x))
497 if (CALL_P (x))
499 if (! CONST_OR_PURE_CALL_P (x))
500 return 1;
501 x = CALL_INSN_FUNCTION_USAGE (x);
502 if (x == 0)
503 return 0;
505 else
506 x = PATTERN (x);
509 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
512 /* Return the number of places FIND appears within X. If COUNT_DEST is
513 zero, we do not count occurrences inside the destination of a SET. */
516 count_occurrences (rtx x, rtx find, int count_dest)
518 int i, j;
519 enum rtx_code code;
520 const char *format_ptr;
521 int count;
523 if (x == find)
524 return 1;
526 code = GET_CODE (x);
528 switch (code)
530 case REG:
531 case CONST_INT:
532 case CONST_DOUBLE:
533 case CONST_VECTOR:
534 case SYMBOL_REF:
535 case CODE_LABEL:
536 case PC:
537 case CC0:
538 return 0;
540 case MEM:
541 if (MEM_P (find) && rtx_equal_p (x, find))
542 return 1;
543 break;
545 case SET:
546 if (SET_DEST (x) == find && ! count_dest)
547 return count_occurrences (SET_SRC (x), find, count_dest);
548 break;
550 default:
551 break;
554 format_ptr = GET_RTX_FORMAT (code);
555 count = 0;
557 for (i = 0; i < GET_RTX_LENGTH (code); i++)
559 switch (*format_ptr++)
561 case 'e':
562 count += count_occurrences (XEXP (x, i), find, count_dest);
563 break;
565 case 'E':
566 for (j = 0; j < XVECLEN (x, i); j++)
567 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
568 break;
571 return count;
574 /* Nonzero if register REG appears somewhere within IN.
575 Also works if REG is not a register; in this case it checks
576 for a subexpression of IN that is Lisp "equal" to REG. */
579 reg_mentioned_p (rtx reg, rtx in)
581 const char *fmt;
582 int i;
583 enum rtx_code code;
585 if (in == 0)
586 return 0;
588 if (reg == in)
589 return 1;
591 if (GET_CODE (in) == LABEL_REF)
592 return reg == XEXP (in, 0);
594 code = GET_CODE (in);
596 switch (code)
598 /* Compare registers by number. */
599 case REG:
600 return REG_P (reg) && REGNO (in) == REGNO (reg);
602 /* These codes have no constituent expressions
603 and are unique. */
604 case SCRATCH:
605 case CC0:
606 case PC:
607 return 0;
609 case CONST_INT:
610 case CONST_VECTOR:
611 case CONST_DOUBLE:
612 /* These are kept unique for a given value. */
613 return 0;
615 default:
616 break;
619 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
620 return 1;
622 fmt = GET_RTX_FORMAT (code);
624 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
626 if (fmt[i] == 'E')
628 int j;
629 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
630 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
631 return 1;
633 else if (fmt[i] == 'e'
634 && reg_mentioned_p (reg, XEXP (in, i)))
635 return 1;
637 return 0;
640 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
641 no CODE_LABEL insn. */
644 no_labels_between_p (rtx beg, rtx end)
646 rtx p;
647 if (beg == end)
648 return 0;
649 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
650 if (LABEL_P (p))
651 return 0;
652 return 1;
655 /* Nonzero if register REG is used in an insn between
656 FROM_INSN and TO_INSN (exclusive of those two). */
659 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
661 rtx insn;
663 if (from_insn == to_insn)
664 return 0;
666 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
667 if (INSN_P (insn)
668 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
669 || (CALL_P (insn)
670 && (find_reg_fusage (insn, USE, reg)
671 || find_reg_fusage (insn, CLOBBER, reg)))))
672 return 1;
673 return 0;
676 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
677 is entirely replaced by a new value and the only use is as a SET_DEST,
678 we do not consider it a reference. */
681 reg_referenced_p (rtx x, rtx body)
683 int i;
685 switch (GET_CODE (body))
687 case SET:
688 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
689 return 1;
691 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
692 of a REG that occupies all of the REG, the insn references X if
693 it is mentioned in the destination. */
694 if (GET_CODE (SET_DEST (body)) != CC0
695 && GET_CODE (SET_DEST (body)) != PC
696 && !REG_P (SET_DEST (body))
697 && ! (GET_CODE (SET_DEST (body)) == SUBREG
698 && REG_P (SUBREG_REG (SET_DEST (body)))
699 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
700 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
701 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
702 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
703 && reg_overlap_mentioned_p (x, SET_DEST (body)))
704 return 1;
705 return 0;
707 case ASM_OPERANDS:
708 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
709 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
710 return 1;
711 return 0;
713 case CALL:
714 case USE:
715 case IF_THEN_ELSE:
716 return reg_overlap_mentioned_p (x, body);
718 case TRAP_IF:
719 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
721 case PREFETCH:
722 return reg_overlap_mentioned_p (x, XEXP (body, 0));
724 case UNSPEC:
725 case UNSPEC_VOLATILE:
726 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
727 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
728 return 1;
729 return 0;
731 case PARALLEL:
732 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
733 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
734 return 1;
735 return 0;
737 case CLOBBER:
738 if (MEM_P (XEXP (body, 0)))
739 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
740 return 1;
741 return 0;
743 case COND_EXEC:
744 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
745 return 1;
746 return reg_referenced_p (x, COND_EXEC_CODE (body));
748 default:
749 return 0;
753 /* Nonzero if register REG is set or clobbered in an insn between
754 FROM_INSN and TO_INSN (exclusive of those two). */
757 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
759 rtx insn;
761 if (from_insn == to_insn)
762 return 0;
764 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
765 if (INSN_P (insn) && reg_set_p (reg, insn))
766 return 1;
767 return 0;
770 /* Internals of reg_set_between_p. */
772 reg_set_p (rtx reg, rtx insn)
774 /* We can be passed an insn or part of one. If we are passed an insn,
775 check if a side-effect of the insn clobbers REG. */
776 if (INSN_P (insn)
777 && (FIND_REG_INC_NOTE (insn, reg)
778 || (CALL_P (insn)
779 && ((REG_P (reg)
780 && REGNO (reg) < FIRST_PSEUDO_REGISTER
781 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
782 REGNO (reg)))
783 || MEM_P (reg)
784 || find_reg_fusage (insn, CLOBBER, reg)))))
785 return 1;
787 return set_of (reg, insn) != NULL_RTX;
790 /* Similar to reg_set_between_p, but check all registers in X. Return 0
791 only if none of them are modified between START and END. Return 1 if
792 X contains a MEM; this routine does usememory aliasing. */
795 modified_between_p (rtx x, rtx start, rtx end)
797 enum rtx_code code = GET_CODE (x);
798 const char *fmt;
799 int i, j;
800 rtx insn;
802 if (start == end)
803 return 0;
805 switch (code)
807 case CONST_INT:
808 case CONST_DOUBLE:
809 case CONST_VECTOR:
810 case CONST:
811 case SYMBOL_REF:
812 case LABEL_REF:
813 return 0;
815 case PC:
816 case CC0:
817 return 1;
819 case MEM:
820 if (MEM_READONLY_P (x))
821 return 0;
822 if (modified_between_p (XEXP (x, 0), start, end))
823 return 1;
824 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
825 if (memory_modified_in_insn_p (x, insn))
826 return 1;
827 return 0;
828 break;
830 case REG:
831 return reg_set_between_p (x, start, end);
833 default:
834 break;
837 fmt = GET_RTX_FORMAT (code);
838 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
840 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
841 return 1;
843 else if (fmt[i] == 'E')
844 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
845 if (modified_between_p (XVECEXP (x, i, j), start, end))
846 return 1;
849 return 0;
852 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
853 of them are modified in INSN. Return 1 if X contains a MEM; this routine
854 does use memory aliasing. */
857 modified_in_p (rtx x, rtx insn)
859 enum rtx_code code = GET_CODE (x);
860 const char *fmt;
861 int i, j;
863 switch (code)
865 case CONST_INT:
866 case CONST_DOUBLE:
867 case CONST_VECTOR:
868 case CONST:
869 case SYMBOL_REF:
870 case LABEL_REF:
871 return 0;
873 case PC:
874 case CC0:
875 return 1;
877 case MEM:
878 if (MEM_READONLY_P (x))
879 return 0;
880 if (modified_in_p (XEXP (x, 0), insn))
881 return 1;
882 if (memory_modified_in_insn_p (x, insn))
883 return 1;
884 return 0;
885 break;
887 case REG:
888 return reg_set_p (x, insn);
890 default:
891 break;
894 fmt = GET_RTX_FORMAT (code);
895 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
897 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
898 return 1;
900 else if (fmt[i] == 'E')
901 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
902 if (modified_in_p (XVECEXP (x, i, j), insn))
903 return 1;
906 return 0;
909 /* Helper function for set_of. */
910 struct set_of_data
912 rtx found;
913 rtx pat;
916 static void
917 set_of_1 (rtx x, rtx pat, void *data1)
919 struct set_of_data *data = (struct set_of_data *) (data1);
920 if (rtx_equal_p (x, data->pat)
921 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
922 data->found = pat;
925 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
926 (either directly or via STRICT_LOW_PART and similar modifiers). */
928 set_of (rtx pat, rtx insn)
930 struct set_of_data data;
931 data.found = NULL_RTX;
932 data.pat = pat;
933 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
934 return data.found;
937 /* Given an INSN, return a SET expression if this insn has only a single SET.
938 It may also have CLOBBERs, USEs, or SET whose output
939 will not be used, which we ignore. */
942 single_set_2 (rtx insn, rtx pat)
944 rtx set = NULL;
945 int set_verified = 1;
946 int i;
948 if (GET_CODE (pat) == PARALLEL)
950 for (i = 0; i < XVECLEN (pat, 0); i++)
952 rtx sub = XVECEXP (pat, 0, i);
953 switch (GET_CODE (sub))
955 case USE:
956 case CLOBBER:
957 break;
959 case SET:
960 /* We can consider insns having multiple sets, where all
961 but one are dead as single set insns. In common case
962 only single set is present in the pattern so we want
963 to avoid checking for REG_UNUSED notes unless necessary.
965 When we reach set first time, we just expect this is
966 the single set we are looking for and only when more
967 sets are found in the insn, we check them. */
968 if (!set_verified)
970 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
971 && !side_effects_p (set))
972 set = NULL;
973 else
974 set_verified = 1;
976 if (!set)
977 set = sub, set_verified = 0;
978 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
979 || side_effects_p (sub))
980 return NULL_RTX;
981 break;
983 default:
984 return NULL_RTX;
988 return set;
991 /* Given an INSN, return nonzero if it has more than one SET, else return
992 zero. */
995 multiple_sets (rtx insn)
997 int found;
998 int i;
1000 /* INSN must be an insn. */
1001 if (! INSN_P (insn))
1002 return 0;
1004 /* Only a PARALLEL can have multiple SETs. */
1005 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1007 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1008 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1010 /* If we have already found a SET, then return now. */
1011 if (found)
1012 return 1;
1013 else
1014 found = 1;
1018 /* Either zero or one SET. */
1019 return 0;
1022 /* Return nonzero if the destination of SET equals the source
1023 and there are no side effects. */
1026 set_noop_p (rtx set)
1028 rtx src = SET_SRC (set);
1029 rtx dst = SET_DEST (set);
1031 if (dst == pc_rtx && src == pc_rtx)
1032 return 1;
1034 if (MEM_P (dst) && MEM_P (src))
1035 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1037 if (GET_CODE (dst) == ZERO_EXTRACT)
1038 return rtx_equal_p (XEXP (dst, 0), src)
1039 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1040 && !side_effects_p (src);
1042 if (GET_CODE (dst) == STRICT_LOW_PART)
1043 dst = XEXP (dst, 0);
1045 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1047 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1048 return 0;
1049 src = SUBREG_REG (src);
1050 dst = SUBREG_REG (dst);
1053 return (REG_P (src) && REG_P (dst)
1054 && REGNO (src) == REGNO (dst));
1057 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1058 value to itself. */
1061 noop_move_p (rtx insn)
1063 rtx pat = PATTERN (insn);
1065 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1066 return 1;
1068 /* Insns carrying these notes are useful later on. */
1069 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1070 return 0;
1072 /* For now treat an insn with a REG_RETVAL note as a
1073 a special insn which should not be considered a no-op. */
1074 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1075 return 0;
1077 if (GET_CODE (pat) == SET && set_noop_p (pat))
1078 return 1;
1080 if (GET_CODE (pat) == PARALLEL)
1082 int i;
1083 /* If nothing but SETs of registers to themselves,
1084 this insn can also be deleted. */
1085 for (i = 0; i < XVECLEN (pat, 0); i++)
1087 rtx tem = XVECEXP (pat, 0, i);
1089 if (GET_CODE (tem) == USE
1090 || GET_CODE (tem) == CLOBBER)
1091 continue;
1093 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1094 return 0;
1097 return 1;
1099 return 0;
1103 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1104 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1105 If the object was modified, if we hit a partial assignment to X, or hit a
1106 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1107 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1108 be the src. */
1111 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1113 rtx p;
1115 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1116 p = PREV_INSN (p))
1117 if (INSN_P (p))
1119 rtx set = single_set (p);
1120 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1122 if (set && rtx_equal_p (x, SET_DEST (set)))
1124 rtx src = SET_SRC (set);
1126 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1127 src = XEXP (note, 0);
1129 if ((valid_to == NULL_RTX
1130 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1131 /* Reject hard registers because we don't usually want
1132 to use them; we'd rather use a pseudo. */
1133 && (! (REG_P (src)
1134 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1136 *pinsn = p;
1137 return src;
1141 /* If set in non-simple way, we don't have a value. */
1142 if (reg_set_p (x, p))
1143 break;
1146 return x;
1149 /* Return nonzero if register in range [REGNO, ENDREGNO)
1150 appears either explicitly or implicitly in X
1151 other than being stored into.
1153 References contained within the substructure at LOC do not count.
1154 LOC may be zero, meaning don't ignore anything. */
1157 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1158 rtx *loc)
1160 int i;
1161 unsigned int x_regno;
1162 RTX_CODE code;
1163 const char *fmt;
1165 repeat:
1166 /* The contents of a REG_NONNEG note is always zero, so we must come here
1167 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1168 if (x == 0)
1169 return 0;
1171 code = GET_CODE (x);
1173 switch (code)
1175 case REG:
1176 x_regno = REGNO (x);
1178 /* If we modifying the stack, frame, or argument pointer, it will
1179 clobber a virtual register. In fact, we could be more precise,
1180 but it isn't worth it. */
1181 if ((x_regno == STACK_POINTER_REGNUM
1182 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1183 || x_regno == ARG_POINTER_REGNUM
1184 #endif
1185 || x_regno == FRAME_POINTER_REGNUM)
1186 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1187 return 1;
1189 return (endregno > x_regno
1190 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1191 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1192 : 1));
1194 case SUBREG:
1195 /* If this is a SUBREG of a hard reg, we can see exactly which
1196 registers are being modified. Otherwise, handle normally. */
1197 if (REG_P (SUBREG_REG (x))
1198 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1200 unsigned int inner_regno = subreg_regno (x);
1201 unsigned int inner_endregno
1202 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1203 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1205 return endregno > inner_regno && regno < inner_endregno;
1207 break;
1209 case CLOBBER:
1210 case SET:
1211 if (&SET_DEST (x) != loc
1212 /* Note setting a SUBREG counts as referring to the REG it is in for
1213 a pseudo but not for hard registers since we can
1214 treat each word individually. */
1215 && ((GET_CODE (SET_DEST (x)) == SUBREG
1216 && loc != &SUBREG_REG (SET_DEST (x))
1217 && REG_P (SUBREG_REG (SET_DEST (x)))
1218 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1219 && refers_to_regno_p (regno, endregno,
1220 SUBREG_REG (SET_DEST (x)), loc))
1221 || (!REG_P (SET_DEST (x))
1222 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1223 return 1;
1225 if (code == CLOBBER || loc == &SET_SRC (x))
1226 return 0;
1227 x = SET_SRC (x);
1228 goto repeat;
1230 default:
1231 break;
1234 /* X does not match, so try its subexpressions. */
1236 fmt = GET_RTX_FORMAT (code);
1237 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1239 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1241 if (i == 0)
1243 x = XEXP (x, 0);
1244 goto repeat;
1246 else
1247 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1248 return 1;
1250 else if (fmt[i] == 'E')
1252 int j;
1253 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1254 if (loc != &XVECEXP (x, i, j)
1255 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1256 return 1;
1259 return 0;
1262 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1263 we check if any register number in X conflicts with the relevant register
1264 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1265 contains a MEM (we don't bother checking for memory addresses that can't
1266 conflict because we expect this to be a rare case. */
1269 reg_overlap_mentioned_p (rtx x, rtx in)
1271 unsigned int regno, endregno;
1273 /* If either argument is a constant, then modifying X can not
1274 affect IN. Here we look at IN, we can profitably combine
1275 CONSTANT_P (x) with the switch statement below. */
1276 if (CONSTANT_P (in))
1277 return 0;
1279 recurse:
1280 switch (GET_CODE (x))
1282 case STRICT_LOW_PART:
1283 case ZERO_EXTRACT:
1284 case SIGN_EXTRACT:
1285 /* Overly conservative. */
1286 x = XEXP (x, 0);
1287 goto recurse;
1289 case SUBREG:
1290 regno = REGNO (SUBREG_REG (x));
1291 if (regno < FIRST_PSEUDO_REGISTER)
1292 regno = subreg_regno (x);
1293 goto do_reg;
1295 case REG:
1296 regno = REGNO (x);
1297 do_reg:
1298 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1299 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1300 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1302 case MEM:
1304 const char *fmt;
1305 int i;
1307 if (MEM_P (in))
1308 return 1;
1310 fmt = GET_RTX_FORMAT (GET_CODE (in));
1311 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1312 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1313 return 1;
1315 return 0;
1318 case SCRATCH:
1319 case PC:
1320 case CC0:
1321 return reg_mentioned_p (x, in);
1323 case PARALLEL:
1325 int i;
1327 /* If any register in here refers to it we return true. */
1328 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1329 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1330 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1331 return 1;
1332 return 0;
1335 default:
1336 gcc_assert (CONSTANT_P (x));
1337 return 0;
1341 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1342 (X would be the pattern of an insn).
1343 FUN receives two arguments:
1344 the REG, MEM, CC0 or PC being stored in or clobbered,
1345 the SET or CLOBBER rtx that does the store.
1347 If the item being stored in or clobbered is a SUBREG of a hard register,
1348 the SUBREG will be passed. */
1350 void
1351 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1353 int i;
1355 if (GET_CODE (x) == COND_EXEC)
1356 x = COND_EXEC_CODE (x);
1358 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1360 rtx dest = SET_DEST (x);
1362 while ((GET_CODE (dest) == SUBREG
1363 && (!REG_P (SUBREG_REG (dest))
1364 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1365 || GET_CODE (dest) == ZERO_EXTRACT
1366 || GET_CODE (dest) == STRICT_LOW_PART)
1367 dest = XEXP (dest, 0);
1369 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1370 each of whose first operand is a register. */
1371 if (GET_CODE (dest) == PARALLEL)
1373 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1374 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1375 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1377 else
1378 (*fun) (dest, x, data);
1381 else if (GET_CODE (x) == PARALLEL)
1382 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1383 note_stores (XVECEXP (x, 0, i), fun, data);
1386 /* Like notes_stores, but call FUN for each expression that is being
1387 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1388 FUN for each expression, not any interior subexpressions. FUN receives a
1389 pointer to the expression and the DATA passed to this function.
1391 Note that this is not quite the same test as that done in reg_referenced_p
1392 since that considers something as being referenced if it is being
1393 partially set, while we do not. */
1395 void
1396 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1398 rtx body = *pbody;
1399 int i;
1401 switch (GET_CODE (body))
1403 case COND_EXEC:
1404 (*fun) (&COND_EXEC_TEST (body), data);
1405 note_uses (&COND_EXEC_CODE (body), fun, data);
1406 return;
1408 case PARALLEL:
1409 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1410 note_uses (&XVECEXP (body, 0, i), fun, data);
1411 return;
1413 case USE:
1414 (*fun) (&XEXP (body, 0), data);
1415 return;
1417 case ASM_OPERANDS:
1418 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1419 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1420 return;
1422 case TRAP_IF:
1423 (*fun) (&TRAP_CONDITION (body), data);
1424 return;
1426 case PREFETCH:
1427 (*fun) (&XEXP (body, 0), data);
1428 return;
1430 case UNSPEC:
1431 case UNSPEC_VOLATILE:
1432 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1433 (*fun) (&XVECEXP (body, 0, i), data);
1434 return;
1436 case CLOBBER:
1437 if (MEM_P (XEXP (body, 0)))
1438 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1439 return;
1441 case SET:
1443 rtx dest = SET_DEST (body);
1445 /* For sets we replace everything in source plus registers in memory
1446 expression in store and operands of a ZERO_EXTRACT. */
1447 (*fun) (&SET_SRC (body), data);
1449 if (GET_CODE (dest) == ZERO_EXTRACT)
1451 (*fun) (&XEXP (dest, 1), data);
1452 (*fun) (&XEXP (dest, 2), data);
1455 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1456 dest = XEXP (dest, 0);
1458 if (MEM_P (dest))
1459 (*fun) (&XEXP (dest, 0), data);
1461 return;
1463 default:
1464 /* All the other possibilities never store. */
1465 (*fun) (pbody, data);
1466 return;
1470 /* Return nonzero if X's old contents don't survive after INSN.
1471 This will be true if X is (cc0) or if X is a register and
1472 X dies in INSN or because INSN entirely sets X.
1474 "Entirely set" means set directly and not through a SUBREG, or
1475 ZERO_EXTRACT, so no trace of the old contents remains.
1476 Likewise, REG_INC does not count.
1478 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1479 but for this use that makes no difference, since regs don't overlap
1480 during their lifetimes. Therefore, this function may be used
1481 at any time after deaths have been computed (in flow.c).
1483 If REG is a hard reg that occupies multiple machine registers, this
1484 function will only return 1 if each of those registers will be replaced
1485 by INSN. */
1488 dead_or_set_p (rtx insn, rtx x)
1490 unsigned int regno, last_regno;
1491 unsigned int i;
1493 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1494 if (GET_CODE (x) == CC0)
1495 return 1;
1497 gcc_assert (REG_P (x));
1499 regno = REGNO (x);
1500 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1501 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1503 for (i = regno; i <= last_regno; i++)
1504 if (! dead_or_set_regno_p (insn, i))
1505 return 0;
1507 return 1;
1510 /* Return TRUE iff DEST is a register or subreg of a register and
1511 doesn't change the number of words of the inner register, and any
1512 part of the register is TEST_REGNO. */
1514 static bool
1515 covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1517 unsigned int regno, endregno;
1519 if (GET_CODE (dest) == SUBREG
1520 && (((GET_MODE_SIZE (GET_MODE (dest))
1521 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1522 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1523 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1524 dest = SUBREG_REG (dest);
1526 if (!REG_P (dest))
1527 return false;
1529 regno = REGNO (dest);
1530 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1531 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1532 return (test_regno >= regno && test_regno < endregno);
1535 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1536 any member matches the covers_regno_no_parallel_p criteria. */
1538 static bool
1539 covers_regno_p (rtx dest, unsigned int test_regno)
1541 if (GET_CODE (dest) == PARALLEL)
1543 /* Some targets place small structures in registers for return
1544 values of functions, and those registers are wrapped in
1545 PARALLELs that we may see as the destination of a SET. */
1546 int i;
1548 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1550 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1551 if (inner != NULL_RTX
1552 && covers_regno_no_parallel_p (inner, test_regno))
1553 return true;
1556 return false;
1558 else
1559 return covers_regno_no_parallel_p (dest, test_regno);
1562 /* Utility function for dead_or_set_p to check an individual register. Also
1563 called from flow.c. */
1566 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1568 rtx pattern;
1570 /* See if there is a death note for something that includes TEST_REGNO. */
1571 if (find_regno_note (insn, REG_DEAD, test_regno))
1572 return 1;
1574 if (CALL_P (insn)
1575 && find_regno_fusage (insn, CLOBBER, test_regno))
1576 return 1;
1578 pattern = PATTERN (insn);
1580 if (GET_CODE (pattern) == COND_EXEC)
1581 pattern = COND_EXEC_CODE (pattern);
1583 if (GET_CODE (pattern) == SET)
1584 return covers_regno_p (SET_DEST (pattern), test_regno);
1585 else if (GET_CODE (pattern) == PARALLEL)
1587 int i;
1589 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1591 rtx body = XVECEXP (pattern, 0, i);
1593 if (GET_CODE (body) == COND_EXEC)
1594 body = COND_EXEC_CODE (body);
1596 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1597 && covers_regno_p (SET_DEST (body), test_regno))
1598 return 1;
1602 return 0;
1605 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1606 If DATUM is nonzero, look for one whose datum is DATUM. */
1609 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1611 rtx link;
1613 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1614 if (! INSN_P (insn))
1615 return 0;
1616 if (datum == 0)
1618 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1619 if (REG_NOTE_KIND (link) == kind)
1620 return link;
1621 return 0;
1624 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1625 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1626 return link;
1627 return 0;
1630 /* Return the reg-note of kind KIND in insn INSN which applies to register
1631 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1632 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1633 it might be the case that the note overlaps REGNO. */
1636 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1638 rtx link;
1640 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1641 if (! INSN_P (insn))
1642 return 0;
1644 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1645 if (REG_NOTE_KIND (link) == kind
1646 /* Verify that it is a register, so that scratch and MEM won't cause a
1647 problem here. */
1648 && REG_P (XEXP (link, 0))
1649 && REGNO (XEXP (link, 0)) <= regno
1650 && ((REGNO (XEXP (link, 0))
1651 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1652 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1653 [GET_MODE (XEXP (link, 0))]))
1654 > regno))
1655 return link;
1656 return 0;
1659 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1660 has such a note. */
1663 find_reg_equal_equiv_note (rtx insn)
1665 rtx link;
1667 if (!INSN_P (insn))
1668 return 0;
1669 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1670 if (REG_NOTE_KIND (link) == REG_EQUAL
1671 || REG_NOTE_KIND (link) == REG_EQUIV)
1673 if (single_set (insn) == 0)
1674 return 0;
1675 return link;
1677 return NULL;
1680 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1681 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1684 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1686 /* If it's not a CALL_INSN, it can't possibly have a
1687 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1688 if (!CALL_P (insn))
1689 return 0;
1691 gcc_assert (datum);
1693 if (!REG_P (datum))
1695 rtx link;
1697 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1698 link;
1699 link = XEXP (link, 1))
1700 if (GET_CODE (XEXP (link, 0)) == code
1701 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1702 return 1;
1704 else
1706 unsigned int regno = REGNO (datum);
1708 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1709 to pseudo registers, so don't bother checking. */
1711 if (regno < FIRST_PSEUDO_REGISTER)
1713 unsigned int end_regno
1714 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1715 unsigned int i;
1717 for (i = regno; i < end_regno; i++)
1718 if (find_regno_fusage (insn, code, i))
1719 return 1;
1723 return 0;
1726 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1727 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1730 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1732 rtx link;
1734 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1735 to pseudo registers, so don't bother checking. */
1737 if (regno >= FIRST_PSEUDO_REGISTER
1738 || !CALL_P (insn) )
1739 return 0;
1741 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1743 unsigned int regnote;
1744 rtx op, reg;
1746 if (GET_CODE (op = XEXP (link, 0)) == code
1747 && REG_P (reg = XEXP (op, 0))
1748 && (regnote = REGNO (reg)) <= regno
1749 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1750 return 1;
1753 return 0;
1756 /* Return true if INSN is a call to a pure function. */
1759 pure_call_p (rtx insn)
1761 rtx link;
1763 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1764 return 0;
1766 /* Look for the note that differentiates const and pure functions. */
1767 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1769 rtx u, m;
1771 if (GET_CODE (u = XEXP (link, 0)) == USE
1772 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1773 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1774 return 1;
1777 return 0;
1780 /* Remove register note NOTE from the REG_NOTES of INSN. */
1782 void
1783 remove_note (rtx insn, rtx note)
1785 rtx link;
1787 if (note == NULL_RTX)
1788 return;
1790 if (REG_NOTES (insn) == note)
1792 REG_NOTES (insn) = XEXP (note, 1);
1793 return;
1796 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1797 if (XEXP (link, 1) == note)
1799 XEXP (link, 1) = XEXP (note, 1);
1800 return;
1803 gcc_unreachable ();
1806 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1807 return 1 if it is found. A simple equality test is used to determine if
1808 NODE matches. */
1811 in_expr_list_p (rtx listp, rtx node)
1813 rtx x;
1815 for (x = listp; x; x = XEXP (x, 1))
1816 if (node == XEXP (x, 0))
1817 return 1;
1819 return 0;
1822 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1823 remove that entry from the list if it is found.
1825 A simple equality test is used to determine if NODE matches. */
1827 void
1828 remove_node_from_expr_list (rtx node, rtx *listp)
1830 rtx temp = *listp;
1831 rtx prev = NULL_RTX;
1833 while (temp)
1835 if (node == XEXP (temp, 0))
1837 /* Splice the node out of the list. */
1838 if (prev)
1839 XEXP (prev, 1) = XEXP (temp, 1);
1840 else
1841 *listp = XEXP (temp, 1);
1843 return;
1846 prev = temp;
1847 temp = XEXP (temp, 1);
1851 /* Nonzero if X contains any volatile instructions. These are instructions
1852 which may cause unpredictable machine state instructions, and thus no
1853 instructions should be moved or combined across them. This includes
1854 only volatile asms and UNSPEC_VOLATILE instructions. */
1857 volatile_insn_p (rtx x)
1859 RTX_CODE code;
1861 code = GET_CODE (x);
1862 switch (code)
1864 case LABEL_REF:
1865 case SYMBOL_REF:
1866 case CONST_INT:
1867 case CONST:
1868 case CONST_DOUBLE:
1869 case CONST_VECTOR:
1870 case CC0:
1871 case PC:
1872 case REG:
1873 case SCRATCH:
1874 case CLOBBER:
1875 case ADDR_VEC:
1876 case ADDR_DIFF_VEC:
1877 case CALL:
1878 case MEM:
1879 return 0;
1881 case UNSPEC_VOLATILE:
1882 /* case TRAP_IF: This isn't clear yet. */
1883 return 1;
1885 case ASM_INPUT:
1886 case ASM_OPERANDS:
1887 if (MEM_VOLATILE_P (x))
1888 return 1;
1890 default:
1891 break;
1894 /* Recursively scan the operands of this expression. */
1897 const char *fmt = GET_RTX_FORMAT (code);
1898 int i;
1900 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1902 if (fmt[i] == 'e')
1904 if (volatile_insn_p (XEXP (x, i)))
1905 return 1;
1907 else if (fmt[i] == 'E')
1909 int j;
1910 for (j = 0; j < XVECLEN (x, i); j++)
1911 if (volatile_insn_p (XVECEXP (x, i, j)))
1912 return 1;
1916 return 0;
1919 /* Nonzero if X contains any volatile memory references
1920 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1923 volatile_refs_p (rtx x)
1925 RTX_CODE code;
1927 code = GET_CODE (x);
1928 switch (code)
1930 case LABEL_REF:
1931 case SYMBOL_REF:
1932 case CONST_INT:
1933 case CONST:
1934 case CONST_DOUBLE:
1935 case CONST_VECTOR:
1936 case CC0:
1937 case PC:
1938 case REG:
1939 case SCRATCH:
1940 case CLOBBER:
1941 case ADDR_VEC:
1942 case ADDR_DIFF_VEC:
1943 return 0;
1945 case UNSPEC_VOLATILE:
1946 return 1;
1948 case MEM:
1949 case ASM_INPUT:
1950 case ASM_OPERANDS:
1951 if (MEM_VOLATILE_P (x))
1952 return 1;
1954 default:
1955 break;
1958 /* Recursively scan the operands of this expression. */
1961 const char *fmt = GET_RTX_FORMAT (code);
1962 int i;
1964 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1966 if (fmt[i] == 'e')
1968 if (volatile_refs_p (XEXP (x, i)))
1969 return 1;
1971 else if (fmt[i] == 'E')
1973 int j;
1974 for (j = 0; j < XVECLEN (x, i); j++)
1975 if (volatile_refs_p (XVECEXP (x, i, j)))
1976 return 1;
1980 return 0;
1983 /* Similar to above, except that it also rejects register pre- and post-
1984 incrementing. */
1987 side_effects_p (rtx x)
1989 RTX_CODE code;
1991 code = GET_CODE (x);
1992 switch (code)
1994 case LABEL_REF:
1995 case SYMBOL_REF:
1996 case CONST_INT:
1997 case CONST:
1998 case CONST_DOUBLE:
1999 case CONST_VECTOR:
2000 case CC0:
2001 case PC:
2002 case REG:
2003 case SCRATCH:
2004 case ADDR_VEC:
2005 case ADDR_DIFF_VEC:
2006 return 0;
2008 case CLOBBER:
2009 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2010 when some combination can't be done. If we see one, don't think
2011 that we can simplify the expression. */
2012 return (GET_MODE (x) != VOIDmode);
2014 case PRE_INC:
2015 case PRE_DEC:
2016 case POST_INC:
2017 case POST_DEC:
2018 case PRE_MODIFY:
2019 case POST_MODIFY:
2020 case CALL:
2021 case UNSPEC_VOLATILE:
2022 /* case TRAP_IF: This isn't clear yet. */
2023 return 1;
2025 case MEM:
2026 case ASM_INPUT:
2027 case ASM_OPERANDS:
2028 if (MEM_VOLATILE_P (x))
2029 return 1;
2031 default:
2032 break;
2035 /* Recursively scan the operands of this expression. */
2038 const char *fmt = GET_RTX_FORMAT (code);
2039 int i;
2041 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2043 if (fmt[i] == 'e')
2045 if (side_effects_p (XEXP (x, i)))
2046 return 1;
2048 else if (fmt[i] == 'E')
2050 int j;
2051 for (j = 0; j < XVECLEN (x, i); j++)
2052 if (side_effects_p (XVECEXP (x, i, j)))
2053 return 1;
2057 return 0;
2060 /* Return nonzero if evaluating rtx X might cause a trap. */
2063 may_trap_p (rtx x)
2065 int i;
2066 enum rtx_code code;
2067 const char *fmt;
2069 if (x == 0)
2070 return 0;
2071 code = GET_CODE (x);
2072 switch (code)
2074 /* Handle these cases quickly. */
2075 case CONST_INT:
2076 case CONST_DOUBLE:
2077 case CONST_VECTOR:
2078 case SYMBOL_REF:
2079 case LABEL_REF:
2080 case CONST:
2081 case PC:
2082 case CC0:
2083 case REG:
2084 case SCRATCH:
2085 return 0;
2087 case ASM_INPUT:
2088 case UNSPEC_VOLATILE:
2089 case TRAP_IF:
2090 return 1;
2092 case ASM_OPERANDS:
2093 return MEM_VOLATILE_P (x);
2095 /* Memory ref can trap unless it's a static var or a stack slot. */
2096 case MEM:
2097 if (MEM_NOTRAP_P (x))
2098 return 0;
2099 return rtx_addr_can_trap_p (XEXP (x, 0));
2101 /* Division by a non-constant might trap. */
2102 case DIV:
2103 case MOD:
2104 case UDIV:
2105 case UMOD:
2106 if (HONOR_SNANS (GET_MODE (x)))
2107 return 1;
2108 if (! CONSTANT_P (XEXP (x, 1))
2109 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2110 && flag_trapping_math))
2111 return 1;
2112 if (XEXP (x, 1) == const0_rtx)
2113 return 1;
2114 break;
2116 case EXPR_LIST:
2117 /* An EXPR_LIST is used to represent a function call. This
2118 certainly may trap. */
2119 return 1;
2121 case GE:
2122 case GT:
2123 case LE:
2124 case LT:
2125 case LTGT:
2126 case COMPARE:
2127 /* Some floating point comparisons may trap. */
2128 if (!flag_trapping_math)
2129 break;
2130 /* ??? There is no machine independent way to check for tests that trap
2131 when COMPARE is used, though many targets do make this distinction.
2132 For instance, sparc uses CCFPE for compares which generate exceptions
2133 and CCFP for compares which do not generate exceptions. */
2134 if (HONOR_NANS (GET_MODE (x)))
2135 return 1;
2136 /* But often the compare has some CC mode, so check operand
2137 modes as well. */
2138 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2139 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2140 return 1;
2141 break;
2143 case EQ:
2144 case NE:
2145 if (HONOR_SNANS (GET_MODE (x)))
2146 return 1;
2147 /* Often comparison is CC mode, so check operand modes. */
2148 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2149 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2150 return 1;
2151 break;
2153 case FIX:
2154 /* Conversion of floating point might trap. */
2155 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2156 return 1;
2157 break;
2159 case NEG:
2160 case ABS:
2161 /* These operations don't trap even with floating point. */
2162 break;
2164 default:
2165 /* Any floating arithmetic may trap. */
2166 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2167 && flag_trapping_math)
2168 return 1;
2171 fmt = GET_RTX_FORMAT (code);
2172 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2174 if (fmt[i] == 'e')
2176 if (may_trap_p (XEXP (x, i)))
2177 return 1;
2179 else if (fmt[i] == 'E')
2181 int j;
2182 for (j = 0; j < XVECLEN (x, i); j++)
2183 if (may_trap_p (XVECEXP (x, i, j)))
2184 return 1;
2187 return 0;
2190 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2191 i.e., an inequality. */
2194 inequality_comparisons_p (rtx x)
2196 const char *fmt;
2197 int len, i;
2198 enum rtx_code code = GET_CODE (x);
2200 switch (code)
2202 case REG:
2203 case SCRATCH:
2204 case PC:
2205 case CC0:
2206 case CONST_INT:
2207 case CONST_DOUBLE:
2208 case CONST_VECTOR:
2209 case CONST:
2210 case LABEL_REF:
2211 case SYMBOL_REF:
2212 return 0;
2214 case LT:
2215 case LTU:
2216 case GT:
2217 case GTU:
2218 case LE:
2219 case LEU:
2220 case GE:
2221 case GEU:
2222 return 1;
2224 default:
2225 break;
2228 len = GET_RTX_LENGTH (code);
2229 fmt = GET_RTX_FORMAT (code);
2231 for (i = 0; i < len; i++)
2233 if (fmt[i] == 'e')
2235 if (inequality_comparisons_p (XEXP (x, i)))
2236 return 1;
2238 else if (fmt[i] == 'E')
2240 int j;
2241 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2242 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2243 return 1;
2247 return 0;
2250 /* Replace any occurrence of FROM in X with TO. The function does
2251 not enter into CONST_DOUBLE for the replace.
2253 Note that copying is not done so X must not be shared unless all copies
2254 are to be modified. */
2257 replace_rtx (rtx x, rtx from, rtx to)
2259 int i, j;
2260 const char *fmt;
2262 /* The following prevents loops occurrence when we change MEM in
2263 CONST_DOUBLE onto the same CONST_DOUBLE. */
2264 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2265 return x;
2267 if (x == from)
2268 return to;
2270 /* Allow this function to make replacements in EXPR_LISTs. */
2271 if (x == 0)
2272 return 0;
2274 if (GET_CODE (x) == SUBREG)
2276 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2278 if (GET_CODE (new) == CONST_INT)
2280 x = simplify_subreg (GET_MODE (x), new,
2281 GET_MODE (SUBREG_REG (x)),
2282 SUBREG_BYTE (x));
2283 gcc_assert (x);
2285 else
2286 SUBREG_REG (x) = new;
2288 return x;
2290 else if (GET_CODE (x) == ZERO_EXTEND)
2292 rtx new = replace_rtx (XEXP (x, 0), from, to);
2294 if (GET_CODE (new) == CONST_INT)
2296 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2297 new, GET_MODE (XEXP (x, 0)));
2298 gcc_assert (x);
2300 else
2301 XEXP (x, 0) = new;
2303 return x;
2306 fmt = GET_RTX_FORMAT (GET_CODE (x));
2307 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2309 if (fmt[i] == 'e')
2310 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2311 else if (fmt[i] == 'E')
2312 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2313 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2316 return x;
2319 /* Throughout the rtx X, replace many registers according to REG_MAP.
2320 Return the replacement for X (which may be X with altered contents).
2321 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2322 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2324 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2325 should not be mapped to pseudos or vice versa since validate_change
2326 is not called.
2328 If REPLACE_DEST is 1, replacements are also done in destinations;
2329 otherwise, only sources are replaced. */
2332 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2334 enum rtx_code code;
2335 int i;
2336 const char *fmt;
2338 if (x == 0)
2339 return x;
2341 code = GET_CODE (x);
2342 switch (code)
2344 case SCRATCH:
2345 case PC:
2346 case CC0:
2347 case CONST_INT:
2348 case CONST_DOUBLE:
2349 case CONST_VECTOR:
2350 case CONST:
2351 case SYMBOL_REF:
2352 case LABEL_REF:
2353 return x;
2355 case REG:
2356 /* Verify that the register has an entry before trying to access it. */
2357 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2359 /* SUBREGs can't be shared. Always return a copy to ensure that if
2360 this replacement occurs more than once then each instance will
2361 get distinct rtx. */
2362 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2363 return copy_rtx (reg_map[REGNO (x)]);
2364 return reg_map[REGNO (x)];
2366 return x;
2368 case SUBREG:
2369 /* Prevent making nested SUBREGs. */
2370 if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2371 && reg_map[REGNO (SUBREG_REG (x))] != 0
2372 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2374 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2375 return simplify_gen_subreg (GET_MODE (x), map_val,
2376 GET_MODE (SUBREG_REG (x)),
2377 SUBREG_BYTE (x));
2379 break;
2381 case SET:
2382 if (replace_dest)
2383 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2385 else if (MEM_P (SET_DEST (x))
2386 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2387 /* Even if we are not to replace destinations, replace register if it
2388 is CONTAINED in destination (destination is memory or
2389 STRICT_LOW_PART). */
2390 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2391 reg_map, nregs, 0);
2392 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2393 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2394 break;
2396 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2397 return x;
2399 default:
2400 break;
2403 fmt = GET_RTX_FORMAT (code);
2404 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2406 if (fmt[i] == 'e')
2407 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2408 else if (fmt[i] == 'E')
2410 int j;
2411 for (j = 0; j < XVECLEN (x, i); j++)
2412 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2413 nregs, replace_dest);
2416 return x;
2419 /* Replace occurrences of the old label in *X with the new one.
2420 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2423 replace_label (rtx *x, void *data)
2425 rtx l = *x;
2426 rtx old_label = ((replace_label_data *) data)->r1;
2427 rtx new_label = ((replace_label_data *) data)->r2;
2428 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2430 if (l == NULL_RTX)
2431 return 0;
2433 if (GET_CODE (l) == SYMBOL_REF
2434 && CONSTANT_POOL_ADDRESS_P (l))
2436 rtx c = get_pool_constant (l);
2437 if (rtx_referenced_p (old_label, c))
2439 rtx new_c, new_l;
2440 replace_label_data *d = (replace_label_data *) data;
2442 /* Create a copy of constant C; replace the label inside
2443 but do not update LABEL_NUSES because uses in constant pool
2444 are not counted. */
2445 new_c = copy_rtx (c);
2446 d->update_label_nuses = false;
2447 for_each_rtx (&new_c, replace_label, data);
2448 d->update_label_nuses = update_label_nuses;
2450 /* Add the new constant NEW_C to constant pool and replace
2451 the old reference to constant by new reference. */
2452 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2453 *x = replace_rtx (l, l, new_l);
2455 return 0;
2458 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2459 field. This is not handled by for_each_rtx because it doesn't
2460 handle unprinted ('0') fields. */
2461 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2462 JUMP_LABEL (l) = new_label;
2464 if ((GET_CODE (l) == LABEL_REF
2465 || GET_CODE (l) == INSN_LIST)
2466 && XEXP (l, 0) == old_label)
2468 XEXP (l, 0) = new_label;
2469 if (update_label_nuses)
2471 ++LABEL_NUSES (new_label);
2472 --LABEL_NUSES (old_label);
2474 return 0;
2477 return 0;
2480 /* When *BODY is equal to X or X is directly referenced by *BODY
2481 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2482 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2484 static int
2485 rtx_referenced_p_1 (rtx *body, void *x)
2487 rtx y = (rtx) x;
2489 if (*body == NULL_RTX)
2490 return y == NULL_RTX;
2492 /* Return true if a label_ref *BODY refers to label Y. */
2493 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2494 return XEXP (*body, 0) == y;
2496 /* If *BODY is a reference to pool constant traverse the constant. */
2497 if (GET_CODE (*body) == SYMBOL_REF
2498 && CONSTANT_POOL_ADDRESS_P (*body))
2499 return rtx_referenced_p (y, get_pool_constant (*body));
2501 /* By default, compare the RTL expressions. */
2502 return rtx_equal_p (*body, y);
2505 /* Return true if X is referenced in BODY. */
2508 rtx_referenced_p (rtx x, rtx body)
2510 return for_each_rtx (&body, rtx_referenced_p_1, x);
2513 /* If INSN is a tablejump return true and store the label (before jump table) to
2514 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2516 bool
2517 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2519 rtx label, table;
2521 if (JUMP_P (insn)
2522 && (label = JUMP_LABEL (insn)) != NULL_RTX
2523 && (table = next_active_insn (label)) != NULL_RTX
2524 && JUMP_P (table)
2525 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2526 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2528 if (labelp)
2529 *labelp = label;
2530 if (tablep)
2531 *tablep = table;
2532 return true;
2534 return false;
2537 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2538 constant that is not in the constant pool and not in the condition
2539 of an IF_THEN_ELSE. */
2541 static int
2542 computed_jump_p_1 (rtx x)
2544 enum rtx_code code = GET_CODE (x);
2545 int i, j;
2546 const char *fmt;
2548 switch (code)
2550 case LABEL_REF:
2551 case PC:
2552 return 0;
2554 case CONST:
2555 case CONST_INT:
2556 case CONST_DOUBLE:
2557 case CONST_VECTOR:
2558 case SYMBOL_REF:
2559 case REG:
2560 return 1;
2562 case MEM:
2563 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2564 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2566 case IF_THEN_ELSE:
2567 return (computed_jump_p_1 (XEXP (x, 1))
2568 || computed_jump_p_1 (XEXP (x, 2)));
2570 default:
2571 break;
2574 fmt = GET_RTX_FORMAT (code);
2575 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2577 if (fmt[i] == 'e'
2578 && computed_jump_p_1 (XEXP (x, i)))
2579 return 1;
2581 else if (fmt[i] == 'E')
2582 for (j = 0; j < XVECLEN (x, i); j++)
2583 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2584 return 1;
2587 return 0;
2590 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2592 Tablejumps and casesi insns are not considered indirect jumps;
2593 we can recognize them by a (use (label_ref)). */
2596 computed_jump_p (rtx insn)
2598 int i;
2599 if (JUMP_P (insn))
2601 rtx pat = PATTERN (insn);
2603 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2604 return 0;
2605 else if (GET_CODE (pat) == PARALLEL)
2607 int len = XVECLEN (pat, 0);
2608 int has_use_labelref = 0;
2610 for (i = len - 1; i >= 0; i--)
2611 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2612 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2613 == LABEL_REF))
2614 has_use_labelref = 1;
2616 if (! has_use_labelref)
2617 for (i = len - 1; i >= 0; i--)
2618 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2619 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2620 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2621 return 1;
2623 else if (GET_CODE (pat) == SET
2624 && SET_DEST (pat) == pc_rtx
2625 && computed_jump_p_1 (SET_SRC (pat)))
2626 return 1;
2628 return 0;
2631 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2632 calls. Processes the subexpressions of EXP and passes them to F. */
2633 static int
2634 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2636 int result, i, j;
2637 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2638 rtx *x;
2640 for (; format[n] != '\0'; n++)
2642 switch (format[n])
2644 case 'e':
2645 /* Call F on X. */
2646 x = &XEXP (exp, n);
2647 result = (*f) (x, data);
2648 if (result == -1)
2649 /* Do not traverse sub-expressions. */
2650 continue;
2651 else if (result != 0)
2652 /* Stop the traversal. */
2653 return result;
2655 if (*x == NULL_RTX)
2656 /* There are no sub-expressions. */
2657 continue;
2659 i = non_rtx_starting_operands[GET_CODE (*x)];
2660 if (i >= 0)
2662 result = for_each_rtx_1 (*x, i, f, data);
2663 if (result != 0)
2664 return result;
2666 break;
2668 case 'V':
2669 case 'E':
2670 if (XVEC (exp, n) == 0)
2671 continue;
2672 for (j = 0; j < XVECLEN (exp, n); ++j)
2674 /* Call F on X. */
2675 x = &XVECEXP (exp, n, j);
2676 result = (*f) (x, data);
2677 if (result == -1)
2678 /* Do not traverse sub-expressions. */
2679 continue;
2680 else if (result != 0)
2681 /* Stop the traversal. */
2682 return result;
2684 if (*x == NULL_RTX)
2685 /* There are no sub-expressions. */
2686 continue;
2688 i = non_rtx_starting_operands[GET_CODE (*x)];
2689 if (i >= 0)
2691 result = for_each_rtx_1 (*x, i, f, data);
2692 if (result != 0)
2693 return result;
2696 break;
2698 default:
2699 /* Nothing to do. */
2700 break;
2704 return 0;
2707 /* Traverse X via depth-first search, calling F for each
2708 sub-expression (including X itself). F is also passed the DATA.
2709 If F returns -1, do not traverse sub-expressions, but continue
2710 traversing the rest of the tree. If F ever returns any other
2711 nonzero value, stop the traversal, and return the value returned
2712 by F. Otherwise, return 0. This function does not traverse inside
2713 tree structure that contains RTX_EXPRs, or into sub-expressions
2714 whose format code is `0' since it is not known whether or not those
2715 codes are actually RTL.
2717 This routine is very general, and could (should?) be used to
2718 implement many of the other routines in this file. */
2721 for_each_rtx (rtx *x, rtx_function f, void *data)
2723 int result;
2724 int i;
2726 /* Call F on X. */
2727 result = (*f) (x, data);
2728 if (result == -1)
2729 /* Do not traverse sub-expressions. */
2730 return 0;
2731 else if (result != 0)
2732 /* Stop the traversal. */
2733 return result;
2735 if (*x == NULL_RTX)
2736 /* There are no sub-expressions. */
2737 return 0;
2739 i = non_rtx_starting_operands[GET_CODE (*x)];
2740 if (i < 0)
2741 return 0;
2743 return for_each_rtx_1 (*x, i, f, data);
2747 /* Searches X for any reference to REGNO, returning the rtx of the
2748 reference found if any. Otherwise, returns NULL_RTX. */
2751 regno_use_in (unsigned int regno, rtx x)
2753 const char *fmt;
2754 int i, j;
2755 rtx tem;
2757 if (REG_P (x) && REGNO (x) == regno)
2758 return x;
2760 fmt = GET_RTX_FORMAT (GET_CODE (x));
2761 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2763 if (fmt[i] == 'e')
2765 if ((tem = regno_use_in (regno, XEXP (x, i))))
2766 return tem;
2768 else if (fmt[i] == 'E')
2769 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2770 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2771 return tem;
2774 return NULL_RTX;
2777 /* Return a value indicating whether OP, an operand of a commutative
2778 operation, is preferred as the first or second operand. The higher
2779 the value, the stronger the preference for being the first operand.
2780 We use negative values to indicate a preference for the first operand
2781 and positive values for the second operand. */
2784 commutative_operand_precedence (rtx op)
2786 enum rtx_code code = GET_CODE (op);
2788 /* Constants always come the second operand. Prefer "nice" constants. */
2789 if (code == CONST_INT)
2790 return -7;
2791 if (code == CONST_DOUBLE)
2792 return -6;
2793 op = avoid_constant_pool_reference (op);
2794 code = GET_CODE (op);
2796 switch (GET_RTX_CLASS (code))
2798 case RTX_CONST_OBJ:
2799 if (code == CONST_INT)
2800 return -5;
2801 if (code == CONST_DOUBLE)
2802 return -4;
2803 return -3;
2805 case RTX_EXTRA:
2806 /* SUBREGs of objects should come second. */
2807 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2808 return -2;
2810 if (!CONSTANT_P (op))
2811 return 0;
2812 else
2813 /* As for RTX_CONST_OBJ. */
2814 return -3;
2816 case RTX_OBJ:
2817 /* Complex expressions should be the first, so decrease priority
2818 of objects. */
2819 return -1;
2821 case RTX_COMM_ARITH:
2822 /* Prefer operands that are themselves commutative to be first.
2823 This helps to make things linear. In particular,
2824 (and (and (reg) (reg)) (not (reg))) is canonical. */
2825 return 4;
2827 case RTX_BIN_ARITH:
2828 /* If only one operand is a binary expression, it will be the first
2829 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2830 is canonical, although it will usually be further simplified. */
2831 return 2;
2833 case RTX_UNARY:
2834 /* Then prefer NEG and NOT. */
2835 if (code == NEG || code == NOT)
2836 return 1;
2838 default:
2839 return 0;
2843 /* Return 1 iff it is necessary to swap operands of commutative operation
2844 in order to canonicalize expression. */
2847 swap_commutative_operands_p (rtx x, rtx y)
2849 return (commutative_operand_precedence (x)
2850 < commutative_operand_precedence (y));
2853 /* Return 1 if X is an autoincrement side effect and the register is
2854 not the stack pointer. */
2856 auto_inc_p (rtx x)
2858 switch (GET_CODE (x))
2860 case PRE_INC:
2861 case POST_INC:
2862 case PRE_DEC:
2863 case POST_DEC:
2864 case PRE_MODIFY:
2865 case POST_MODIFY:
2866 /* There are no REG_INC notes for SP. */
2867 if (XEXP (x, 0) != stack_pointer_rtx)
2868 return 1;
2869 default:
2870 break;
2872 return 0;
2875 /* Return 1 if the sequence of instructions beginning with FROM and up
2876 to and including TO is safe to move. If NEW_TO is non-NULL, and
2877 the sequence is not already safe to move, but can be easily
2878 extended to a sequence which is safe, then NEW_TO will point to the
2879 end of the extended sequence.
2881 For now, this function only checks that the region contains whole
2882 exception regions, but it could be extended to check additional
2883 conditions as well. */
2886 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
2888 int eh_region_count = 0;
2889 int past_to_p = 0;
2890 rtx r = from;
2892 /* By default, assume the end of the region will be what was
2893 suggested. */
2894 if (new_to)
2895 *new_to = to;
2897 while (r)
2899 if (NOTE_P (r))
2901 switch (NOTE_LINE_NUMBER (r))
2903 case NOTE_INSN_EH_REGION_BEG:
2904 ++eh_region_count;
2905 break;
2907 case NOTE_INSN_EH_REGION_END:
2908 if (eh_region_count == 0)
2909 /* This sequence of instructions contains the end of
2910 an exception region, but not he beginning. Moving
2911 it will cause chaos. */
2912 return 0;
2914 --eh_region_count;
2915 break;
2917 default:
2918 break;
2921 else if (past_to_p)
2922 /* If we've passed TO, and we see a non-note instruction, we
2923 can't extend the sequence to a movable sequence. */
2924 return 0;
2926 if (r == to)
2928 if (!new_to)
2929 /* It's OK to move the sequence if there were matched sets of
2930 exception region notes. */
2931 return eh_region_count == 0;
2933 past_to_p = 1;
2936 /* It's OK to move the sequence if there were matched sets of
2937 exception region notes. */
2938 if (past_to_p && eh_region_count == 0)
2940 *new_to = r;
2941 return 1;
2944 /* Go to the next instruction. */
2945 r = NEXT_INSN (r);
2948 return 0;
2951 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2953 loc_mentioned_in_p (rtx *loc, rtx in)
2955 enum rtx_code code = GET_CODE (in);
2956 const char *fmt = GET_RTX_FORMAT (code);
2957 int i, j;
2959 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2961 if (loc == &in->u.fld[i].rt_rtx)
2962 return 1;
2963 if (fmt[i] == 'e')
2965 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2966 return 1;
2968 else if (fmt[i] == 'E')
2969 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2970 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2971 return 1;
2973 return 0;
2976 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2977 and SUBREG_BYTE, return the bit offset where the subreg begins
2978 (counting from the least significant bit of the operand). */
2980 unsigned int
2981 subreg_lsb_1 (enum machine_mode outer_mode,
2982 enum machine_mode inner_mode,
2983 unsigned int subreg_byte)
2985 unsigned int bitpos;
2986 unsigned int byte;
2987 unsigned int word;
2989 /* A paradoxical subreg begins at bit position 0. */
2990 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
2991 return 0;
2993 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2994 /* If the subreg crosses a word boundary ensure that
2995 it also begins and ends on a word boundary. */
2996 gcc_assert (!((subreg_byte % UNITS_PER_WORD
2997 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
2998 && (subreg_byte % UNITS_PER_WORD
2999 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3001 if (WORDS_BIG_ENDIAN)
3002 word = (GET_MODE_SIZE (inner_mode)
3003 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3004 else
3005 word = subreg_byte / UNITS_PER_WORD;
3006 bitpos = word * BITS_PER_WORD;
3008 if (BYTES_BIG_ENDIAN)
3009 byte = (GET_MODE_SIZE (inner_mode)
3010 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3011 else
3012 byte = subreg_byte % UNITS_PER_WORD;
3013 bitpos += byte * BITS_PER_UNIT;
3015 return bitpos;
3018 /* Given a subreg X, return the bit offset where the subreg begins
3019 (counting from the least significant bit of the reg). */
3021 unsigned int
3022 subreg_lsb (rtx x)
3024 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3025 SUBREG_BYTE (x));
3028 /* This function returns the regno offset of a subreg expression.
3029 xregno - A regno of an inner hard subreg_reg (or what will become one).
3030 xmode - The mode of xregno.
3031 offset - The byte offset.
3032 ymode - The mode of a top level SUBREG (or what may become one).
3033 RETURN - The regno offset which would be used. */
3034 unsigned int
3035 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3036 unsigned int offset, enum machine_mode ymode)
3038 int nregs_xmode, nregs_ymode;
3039 int mode_multiple, nregs_multiple;
3040 int y_offset;
3042 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3044 nregs_xmode = hard_regno_nregs[xregno][xmode];
3045 nregs_ymode = hard_regno_nregs[xregno][ymode];
3047 /* If this is a big endian paradoxical subreg, which uses more actual
3048 hard registers than the original register, we must return a negative
3049 offset so that we find the proper highpart of the register. */
3050 if (offset == 0
3051 && nregs_ymode > nregs_xmode
3052 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3053 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3054 return nregs_xmode - nregs_ymode;
3056 if (offset == 0 || nregs_xmode == nregs_ymode)
3057 return 0;
3059 /* size of ymode must not be greater than the size of xmode. */
3060 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3061 gcc_assert (mode_multiple != 0);
3063 y_offset = offset / GET_MODE_SIZE (ymode);
3064 nregs_multiple = nregs_xmode / nregs_ymode;
3065 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3068 /* This function returns true when the offset is representable via
3069 subreg_offset in the given regno.
3070 xregno - A regno of an inner hard subreg_reg (or what will become one).
3071 xmode - The mode of xregno.
3072 offset - The byte offset.
3073 ymode - The mode of a top level SUBREG (or what may become one).
3074 RETURN - The regno offset which would be used. */
3075 bool
3076 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3077 unsigned int offset, enum machine_mode ymode)
3079 int nregs_xmode, nregs_ymode;
3080 int mode_multiple, nregs_multiple;
3081 int y_offset;
3083 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3085 nregs_xmode = hard_regno_nregs[xregno][xmode];
3086 nregs_ymode = hard_regno_nregs[xregno][ymode];
3088 /* Paradoxical subregs are always valid. */
3089 if (offset == 0
3090 && nregs_ymode > nregs_xmode
3091 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3092 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3093 return true;
3095 /* Lowpart subregs are always valid. */
3096 if (offset == subreg_lowpart_offset (ymode, xmode))
3097 return true;
3099 /* This should always pass, otherwise we don't know how to verify the
3100 constraint. These conditions may be relaxed but subreg_offset would
3101 need to be redesigned. */
3102 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3103 gcc_assert ((GET_MODE_SIZE (ymode) % nregs_ymode) == 0);
3104 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3106 /* The XMODE value can be seen as a vector of NREGS_XMODE
3107 values. The subreg must represent a lowpart of given field.
3108 Compute what field it is. */
3109 offset -= subreg_lowpart_offset (ymode,
3110 mode_for_size (GET_MODE_BITSIZE (xmode)
3111 / nregs_xmode,
3112 MODE_INT, 0));
3114 /* size of ymode must not be greater than the size of xmode. */
3115 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3116 gcc_assert (mode_multiple != 0);
3118 y_offset = offset / GET_MODE_SIZE (ymode);
3119 nregs_multiple = nregs_xmode / nregs_ymode;
3121 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3122 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3124 return (!(y_offset % (mode_multiple / nregs_multiple)));
3127 /* Return the final regno that a subreg expression refers to. */
3128 unsigned int
3129 subreg_regno (rtx x)
3131 unsigned int ret;
3132 rtx subreg = SUBREG_REG (x);
3133 int regno = REGNO (subreg);
3135 ret = regno + subreg_regno_offset (regno,
3136 GET_MODE (subreg),
3137 SUBREG_BYTE (x),
3138 GET_MODE (x));
3139 return ret;
3142 struct parms_set_data
3144 int nregs;
3145 HARD_REG_SET regs;
3148 /* Helper function for noticing stores to parameter registers. */
3149 static void
3150 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3152 struct parms_set_data *d = data;
3153 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3154 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3156 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3157 d->nregs--;
3161 /* Look backward for first parameter to be loaded.
3162 Do not skip BOUNDARY. */
3164 find_first_parameter_load (rtx call_insn, rtx boundary)
3166 struct parms_set_data parm;
3167 rtx p, before;
3169 /* Since different machines initialize their parameter registers
3170 in different orders, assume nothing. Collect the set of all
3171 parameter registers. */
3172 CLEAR_HARD_REG_SET (parm.regs);
3173 parm.nregs = 0;
3174 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3175 if (GET_CODE (XEXP (p, 0)) == USE
3176 && REG_P (XEXP (XEXP (p, 0), 0)))
3178 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3180 /* We only care about registers which can hold function
3181 arguments. */
3182 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3183 continue;
3185 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3186 parm.nregs++;
3188 before = call_insn;
3190 /* Search backward for the first set of a register in this set. */
3191 while (parm.nregs && before != boundary)
3193 before = PREV_INSN (before);
3195 /* It is possible that some loads got CSEed from one call to
3196 another. Stop in that case. */
3197 if (CALL_P (before))
3198 break;
3200 /* Our caller needs either ensure that we will find all sets
3201 (in case code has not been optimized yet), or take care
3202 for possible labels in a way by setting boundary to preceding
3203 CODE_LABEL. */
3204 if (LABEL_P (before))
3206 gcc_assert (before == boundary);
3207 break;
3210 if (INSN_P (before))
3211 note_stores (PATTERN (before), parms_set, &parm);
3213 return before;
3216 /* Return true if we should avoid inserting code between INSN and preceding
3217 call instruction. */
3219 bool
3220 keep_with_call_p (rtx insn)
3222 rtx set;
3224 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3226 if (REG_P (SET_DEST (set))
3227 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3228 && fixed_regs[REGNO (SET_DEST (set))]
3229 && general_operand (SET_SRC (set), VOIDmode))
3230 return true;
3231 if (REG_P (SET_SRC (set))
3232 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3233 && REG_P (SET_DEST (set))
3234 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3235 return true;
3236 /* There may be a stack pop just after the call and before the store
3237 of the return register. Search for the actual store when deciding
3238 if we can break or not. */
3239 if (SET_DEST (set) == stack_pointer_rtx)
3241 rtx i2 = next_nonnote_insn (insn);
3242 if (i2 && keep_with_call_p (i2))
3243 return true;
3246 return false;
3249 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3250 to non-complex jumps. That is, direct unconditional, conditional,
3251 and tablejumps, but not computed jumps or returns. It also does
3252 not apply to the fallthru case of a conditional jump. */
3254 bool
3255 label_is_jump_target_p (rtx label, rtx jump_insn)
3257 rtx tmp = JUMP_LABEL (jump_insn);
3259 if (label == tmp)
3260 return true;
3262 if (tablejump_p (jump_insn, NULL, &tmp))
3264 rtvec vec = XVEC (PATTERN (tmp),
3265 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3266 int i, veclen = GET_NUM_ELEM (vec);
3268 for (i = 0; i < veclen; ++i)
3269 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3270 return true;
3273 return false;
3277 /* Return an estimate of the cost of computing rtx X.
3278 One use is in cse, to decide which expression to keep in the hash table.
3279 Another is in rtl generation, to pick the cheapest way to multiply.
3280 Other uses like the latter are expected in the future. */
3283 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3285 int i, j;
3286 enum rtx_code code;
3287 const char *fmt;
3288 int total;
3290 if (x == 0)
3291 return 0;
3293 /* Compute the default costs of certain things.
3294 Note that targetm.rtx_costs can override the defaults. */
3296 code = GET_CODE (x);
3297 switch (code)
3299 case MULT:
3300 total = COSTS_N_INSNS (5);
3301 break;
3302 case DIV:
3303 case UDIV:
3304 case MOD:
3305 case UMOD:
3306 total = COSTS_N_INSNS (7);
3307 break;
3308 case USE:
3309 /* Used in loop.c and combine.c as a marker. */
3310 total = 0;
3311 break;
3312 default:
3313 total = COSTS_N_INSNS (1);
3316 switch (code)
3318 case REG:
3319 return 0;
3321 case SUBREG:
3322 total = 0;
3323 /* If we can't tie these modes, make this expensive. The larger
3324 the mode, the more expensive it is. */
3325 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3326 return COSTS_N_INSNS (2
3327 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3328 break;
3330 default:
3331 if (targetm.rtx_costs (x, code, outer_code, &total))
3332 return total;
3333 break;
3336 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3337 which is already in total. */
3339 fmt = GET_RTX_FORMAT (code);
3340 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3341 if (fmt[i] == 'e')
3342 total += rtx_cost (XEXP (x, i), code);
3343 else if (fmt[i] == 'E')
3344 for (j = 0; j < XVECLEN (x, i); j++)
3345 total += rtx_cost (XVECEXP (x, i, j), code);
3347 return total;
3350 /* Return cost of address expression X.
3351 Expect that X is properly formed address reference. */
3354 address_cost (rtx x, enum machine_mode mode)
3356 /* We may be asked for cost of various unusual addresses, such as operands
3357 of push instruction. It is not worthwhile to complicate writing
3358 of the target hook by such cases. */
3360 if (!memory_address_p (mode, x))
3361 return 1000;
3363 return targetm.address_cost (x);
3366 /* If the target doesn't override, compute the cost as with arithmetic. */
3369 default_address_cost (rtx x)
3371 return rtx_cost (x, MEM);
3375 unsigned HOST_WIDE_INT
3376 nonzero_bits (rtx x, enum machine_mode mode)
3378 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3381 unsigned int
3382 num_sign_bit_copies (rtx x, enum machine_mode mode)
3384 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3387 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3388 It avoids exponential behavior in nonzero_bits1 when X has
3389 identical subexpressions on the first or the second level. */
3391 static unsigned HOST_WIDE_INT
3392 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3393 enum machine_mode known_mode,
3394 unsigned HOST_WIDE_INT known_ret)
3396 if (x == known_x && mode == known_mode)
3397 return known_ret;
3399 /* Try to find identical subexpressions. If found call
3400 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3401 precomputed value for the subexpression as KNOWN_RET. */
3403 if (ARITHMETIC_P (x))
3405 rtx x0 = XEXP (x, 0);
3406 rtx x1 = XEXP (x, 1);
3408 /* Check the first level. */
3409 if (x0 == x1)
3410 return nonzero_bits1 (x, mode, x0, mode,
3411 cached_nonzero_bits (x0, mode, known_x,
3412 known_mode, known_ret));
3414 /* Check the second level. */
3415 if (ARITHMETIC_P (x0)
3416 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3417 return nonzero_bits1 (x, mode, x1, mode,
3418 cached_nonzero_bits (x1, mode, known_x,
3419 known_mode, known_ret));
3421 if (ARITHMETIC_P (x1)
3422 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3423 return nonzero_bits1 (x, mode, x0, mode,
3424 cached_nonzero_bits (x0, mode, known_x,
3425 known_mode, known_ret));
3428 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3431 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3432 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3433 is less useful. We can't allow both, because that results in exponential
3434 run time recursion. There is a nullstone testcase that triggered
3435 this. This macro avoids accidental uses of num_sign_bit_copies. */
3436 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3438 /* Given an expression, X, compute which bits in X can be nonzero.
3439 We don't care about bits outside of those defined in MODE.
3441 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3442 an arithmetic operation, we can do better. */
3444 static unsigned HOST_WIDE_INT
3445 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3446 enum machine_mode known_mode,
3447 unsigned HOST_WIDE_INT known_ret)
3449 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3450 unsigned HOST_WIDE_INT inner_nz;
3451 enum rtx_code code;
3452 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3454 /* For floating-point values, assume all bits are needed. */
3455 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3456 return nonzero;
3458 /* If X is wider than MODE, use its mode instead. */
3459 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3461 mode = GET_MODE (x);
3462 nonzero = GET_MODE_MASK (mode);
3463 mode_width = GET_MODE_BITSIZE (mode);
3466 if (mode_width > HOST_BITS_PER_WIDE_INT)
3467 /* Our only callers in this case look for single bit values. So
3468 just return the mode mask. Those tests will then be false. */
3469 return nonzero;
3471 #ifndef WORD_REGISTER_OPERATIONS
3472 /* If MODE is wider than X, but both are a single word for both the host
3473 and target machines, we can compute this from which bits of the
3474 object might be nonzero in its own mode, taking into account the fact
3475 that on many CISC machines, accessing an object in a wider mode
3476 causes the high-order bits to become undefined. So they are
3477 not known to be zero. */
3479 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3480 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3481 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3482 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3484 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3485 known_x, known_mode, known_ret);
3486 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3487 return nonzero;
3489 #endif
3491 code = GET_CODE (x);
3492 switch (code)
3494 case REG:
3495 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3496 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3497 all the bits above ptr_mode are known to be zero. */
3498 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3499 && REG_POINTER (x))
3500 nonzero &= GET_MODE_MASK (ptr_mode);
3501 #endif
3503 /* Include declared information about alignment of pointers. */
3504 /* ??? We don't properly preserve REG_POINTER changes across
3505 pointer-to-integer casts, so we can't trust it except for
3506 things that we know must be pointers. See execute/960116-1.c. */
3507 if ((x == stack_pointer_rtx
3508 || x == frame_pointer_rtx
3509 || x == arg_pointer_rtx)
3510 && REGNO_POINTER_ALIGN (REGNO (x)))
3512 unsigned HOST_WIDE_INT alignment
3513 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3515 #ifdef PUSH_ROUNDING
3516 /* If PUSH_ROUNDING is defined, it is possible for the
3517 stack to be momentarily aligned only to that amount,
3518 so we pick the least alignment. */
3519 if (x == stack_pointer_rtx && PUSH_ARGS)
3520 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3521 alignment);
3522 #endif
3524 nonzero &= ~(alignment - 1);
3528 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3529 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3530 known_mode, known_ret,
3531 &nonzero_for_hook);
3533 if (new)
3534 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3535 known_mode, known_ret);
3537 return nonzero_for_hook;
3540 case CONST_INT:
3541 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3542 /* If X is negative in MODE, sign-extend the value. */
3543 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3544 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3545 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3546 #endif
3548 return INTVAL (x);
3550 case MEM:
3551 #ifdef LOAD_EXTEND_OP
3552 /* In many, if not most, RISC machines, reading a byte from memory
3553 zeros the rest of the register. Noticing that fact saves a lot
3554 of extra zero-extends. */
3555 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3556 nonzero &= GET_MODE_MASK (GET_MODE (x));
3557 #endif
3558 break;
3560 case EQ: case NE:
3561 case UNEQ: case LTGT:
3562 case GT: case GTU: case UNGT:
3563 case LT: case LTU: case UNLT:
3564 case GE: case GEU: case UNGE:
3565 case LE: case LEU: case UNLE:
3566 case UNORDERED: case ORDERED:
3568 /* If this produces an integer result, we know which bits are set.
3569 Code here used to clear bits outside the mode of X, but that is
3570 now done above. */
3572 if (GET_MODE_CLASS (mode) == MODE_INT
3573 && mode_width <= HOST_BITS_PER_WIDE_INT)
3574 nonzero = STORE_FLAG_VALUE;
3575 break;
3577 case NEG:
3578 #if 0
3579 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3580 and num_sign_bit_copies. */
3581 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3582 == GET_MODE_BITSIZE (GET_MODE (x)))
3583 nonzero = 1;
3584 #endif
3586 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3587 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3588 break;
3590 case ABS:
3591 #if 0
3592 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3593 and num_sign_bit_copies. */
3594 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3595 == GET_MODE_BITSIZE (GET_MODE (x)))
3596 nonzero = 1;
3597 #endif
3598 break;
3600 case TRUNCATE:
3601 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3602 known_x, known_mode, known_ret)
3603 & GET_MODE_MASK (mode));
3604 break;
3606 case ZERO_EXTEND:
3607 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3608 known_x, known_mode, known_ret);
3609 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3610 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3611 break;
3613 case SIGN_EXTEND:
3614 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3615 Otherwise, show all the bits in the outer mode but not the inner
3616 may be nonzero. */
3617 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3618 known_x, known_mode, known_ret);
3619 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3621 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3622 if (inner_nz
3623 & (((HOST_WIDE_INT) 1
3624 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3625 inner_nz |= (GET_MODE_MASK (mode)
3626 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3629 nonzero &= inner_nz;
3630 break;
3632 case AND:
3633 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3634 known_x, known_mode, known_ret)
3635 & cached_nonzero_bits (XEXP (x, 1), mode,
3636 known_x, known_mode, known_ret);
3637 break;
3639 case XOR: case IOR:
3640 case UMIN: case UMAX: case SMIN: case SMAX:
3642 unsigned HOST_WIDE_INT nonzero0 =
3643 cached_nonzero_bits (XEXP (x, 0), mode,
3644 known_x, known_mode, known_ret);
3646 /* Don't call nonzero_bits for the second time if it cannot change
3647 anything. */
3648 if ((nonzero & nonzero0) != nonzero)
3649 nonzero &= nonzero0
3650 | cached_nonzero_bits (XEXP (x, 1), mode,
3651 known_x, known_mode, known_ret);
3653 break;
3655 case PLUS: case MINUS:
3656 case MULT:
3657 case DIV: case UDIV:
3658 case MOD: case UMOD:
3659 /* We can apply the rules of arithmetic to compute the number of
3660 high- and low-order zero bits of these operations. We start by
3661 computing the width (position of the highest-order nonzero bit)
3662 and the number of low-order zero bits for each value. */
3664 unsigned HOST_WIDE_INT nz0 =
3665 cached_nonzero_bits (XEXP (x, 0), mode,
3666 known_x, known_mode, known_ret);
3667 unsigned HOST_WIDE_INT nz1 =
3668 cached_nonzero_bits (XEXP (x, 1), mode,
3669 known_x, known_mode, known_ret);
3670 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3671 int width0 = floor_log2 (nz0) + 1;
3672 int width1 = floor_log2 (nz1) + 1;
3673 int low0 = floor_log2 (nz0 & -nz0);
3674 int low1 = floor_log2 (nz1 & -nz1);
3675 HOST_WIDE_INT op0_maybe_minusp
3676 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3677 HOST_WIDE_INT op1_maybe_minusp
3678 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3679 unsigned int result_width = mode_width;
3680 int result_low = 0;
3682 switch (code)
3684 case PLUS:
3685 result_width = MAX (width0, width1) + 1;
3686 result_low = MIN (low0, low1);
3687 break;
3688 case MINUS:
3689 result_low = MIN (low0, low1);
3690 break;
3691 case MULT:
3692 result_width = width0 + width1;
3693 result_low = low0 + low1;
3694 break;
3695 case DIV:
3696 if (width1 == 0)
3697 break;
3698 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3699 result_width = width0;
3700 break;
3701 case UDIV:
3702 if (width1 == 0)
3703 break;
3704 result_width = width0;
3705 break;
3706 case MOD:
3707 if (width1 == 0)
3708 break;
3709 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3710 result_width = MIN (width0, width1);
3711 result_low = MIN (low0, low1);
3712 break;
3713 case UMOD:
3714 if (width1 == 0)
3715 break;
3716 result_width = MIN (width0, width1);
3717 result_low = MIN (low0, low1);
3718 break;
3719 default:
3720 gcc_unreachable ();
3723 if (result_width < mode_width)
3724 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3726 if (result_low > 0)
3727 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3729 #ifdef POINTERS_EXTEND_UNSIGNED
3730 /* If pointers extend unsigned and this is an addition or subtraction
3731 to a pointer in Pmode, all the bits above ptr_mode are known to be
3732 zero. */
3733 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3734 && (code == PLUS || code == MINUS)
3735 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3736 nonzero &= GET_MODE_MASK (ptr_mode);
3737 #endif
3739 break;
3741 case ZERO_EXTRACT:
3742 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3743 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3744 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3745 break;
3747 case SUBREG:
3748 /* If this is a SUBREG formed for a promoted variable that has
3749 been zero-extended, we know that at least the high-order bits
3750 are zero, though others might be too. */
3752 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3753 nonzero = GET_MODE_MASK (GET_MODE (x))
3754 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3755 known_x, known_mode, known_ret);
3757 /* If the inner mode is a single word for both the host and target
3758 machines, we can compute this from which bits of the inner
3759 object might be nonzero. */
3760 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3761 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3762 <= HOST_BITS_PER_WIDE_INT))
3764 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3765 known_x, known_mode, known_ret);
3767 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3768 /* If this is a typical RISC machine, we only have to worry
3769 about the way loads are extended. */
3770 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3771 ? (((nonzero
3772 & (((unsigned HOST_WIDE_INT) 1
3773 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3774 != 0))
3775 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3776 || !MEM_P (SUBREG_REG (x)))
3777 #endif
3779 /* On many CISC machines, accessing an object in a wider mode
3780 causes the high-order bits to become undefined. So they are
3781 not known to be zero. */
3782 if (GET_MODE_SIZE (GET_MODE (x))
3783 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3784 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3785 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3788 break;
3790 case ASHIFTRT:
3791 case LSHIFTRT:
3792 case ASHIFT:
3793 case ROTATE:
3794 /* The nonzero bits are in two classes: any bits within MODE
3795 that aren't in GET_MODE (x) are always significant. The rest of the
3796 nonzero bits are those that are significant in the operand of
3797 the shift when shifted the appropriate number of bits. This
3798 shows that high-order bits are cleared by the right shift and
3799 low-order bits by left shifts. */
3800 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3801 && INTVAL (XEXP (x, 1)) >= 0
3802 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3804 enum machine_mode inner_mode = GET_MODE (x);
3805 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3806 int count = INTVAL (XEXP (x, 1));
3807 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3808 unsigned HOST_WIDE_INT op_nonzero =
3809 cached_nonzero_bits (XEXP (x, 0), mode,
3810 known_x, known_mode, known_ret);
3811 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3812 unsigned HOST_WIDE_INT outer = 0;
3814 if (mode_width > width)
3815 outer = (op_nonzero & nonzero & ~mode_mask);
3817 if (code == LSHIFTRT)
3818 inner >>= count;
3819 else if (code == ASHIFTRT)
3821 inner >>= count;
3823 /* If the sign bit may have been nonzero before the shift, we
3824 need to mark all the places it could have been copied to
3825 by the shift as possibly nonzero. */
3826 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3827 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3829 else if (code == ASHIFT)
3830 inner <<= count;
3831 else
3832 inner = ((inner << (count % width)
3833 | (inner >> (width - (count % width)))) & mode_mask);
3835 nonzero &= (outer | inner);
3837 break;
3839 case FFS:
3840 case POPCOUNT:
3841 /* This is at most the number of bits in the mode. */
3842 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3843 break;
3845 case CLZ:
3846 /* If CLZ has a known value at zero, then the nonzero bits are
3847 that value, plus the number of bits in the mode minus one. */
3848 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3849 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3850 else
3851 nonzero = -1;
3852 break;
3854 case CTZ:
3855 /* If CTZ has a known value at zero, then the nonzero bits are
3856 that value, plus the number of bits in the mode minus one. */
3857 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3858 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3859 else
3860 nonzero = -1;
3861 break;
3863 case PARITY:
3864 nonzero = 1;
3865 break;
3867 case IF_THEN_ELSE:
3869 unsigned HOST_WIDE_INT nonzero_true =
3870 cached_nonzero_bits (XEXP (x, 1), mode,
3871 known_x, known_mode, known_ret);
3873 /* Don't call nonzero_bits for the second time if it cannot change
3874 anything. */
3875 if ((nonzero & nonzero_true) != nonzero)
3876 nonzero &= nonzero_true
3877 | cached_nonzero_bits (XEXP (x, 2), mode,
3878 known_x, known_mode, known_ret);
3880 break;
3882 default:
3883 break;
3886 return nonzero;
3889 /* See the macro definition above. */
3890 #undef cached_num_sign_bit_copies
3893 /* The function cached_num_sign_bit_copies is a wrapper around
3894 num_sign_bit_copies1. It avoids exponential behavior in
3895 num_sign_bit_copies1 when X has identical subexpressions on the
3896 first or the second level. */
3898 static unsigned int
3899 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
3900 enum machine_mode known_mode,
3901 unsigned int known_ret)
3903 if (x == known_x && mode == known_mode)
3904 return known_ret;
3906 /* Try to find identical subexpressions. If found call
3907 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3908 the precomputed value for the subexpression as KNOWN_RET. */
3910 if (ARITHMETIC_P (x))
3912 rtx x0 = XEXP (x, 0);
3913 rtx x1 = XEXP (x, 1);
3915 /* Check the first level. */
3916 if (x0 == x1)
3917 return
3918 num_sign_bit_copies1 (x, mode, x0, mode,
3919 cached_num_sign_bit_copies (x0, mode, known_x,
3920 known_mode,
3921 known_ret));
3923 /* Check the second level. */
3924 if (ARITHMETIC_P (x0)
3925 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3926 return
3927 num_sign_bit_copies1 (x, mode, x1, mode,
3928 cached_num_sign_bit_copies (x1, mode, known_x,
3929 known_mode,
3930 known_ret));
3932 if (ARITHMETIC_P (x1)
3933 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3934 return
3935 num_sign_bit_copies1 (x, mode, x0, mode,
3936 cached_num_sign_bit_copies (x0, mode, known_x,
3937 known_mode,
3938 known_ret));
3941 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
3944 /* Return the number of bits at the high-order end of X that are known to
3945 be equal to the sign bit. X will be used in mode MODE; if MODE is
3946 VOIDmode, X will be used in its own mode. The returned value will always
3947 be between 1 and the number of bits in MODE. */
3949 static unsigned int
3950 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
3951 enum machine_mode known_mode,
3952 unsigned int known_ret)
3954 enum rtx_code code = GET_CODE (x);
3955 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
3956 int num0, num1, result;
3957 unsigned HOST_WIDE_INT nonzero;
3959 /* If we weren't given a mode, use the mode of X. If the mode is still
3960 VOIDmode, we don't know anything. Likewise if one of the modes is
3961 floating-point. */
3963 if (mode == VOIDmode)
3964 mode = GET_MODE (x);
3966 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
3967 return 1;
3969 /* For a smaller object, just ignore the high bits. */
3970 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
3972 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
3973 known_x, known_mode, known_ret);
3974 return MAX (1,
3975 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
3978 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
3980 #ifndef WORD_REGISTER_OPERATIONS
3981 /* If this machine does not do all register operations on the entire
3982 register and MODE is wider than the mode of X, we can say nothing
3983 at all about the high-order bits. */
3984 return 1;
3985 #else
3986 /* Likewise on machines that do, if the mode of the object is smaller
3987 than a word and loads of that size don't sign extend, we can say
3988 nothing about the high order bits. */
3989 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
3990 #ifdef LOAD_EXTEND_OP
3991 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
3992 #endif
3994 return 1;
3995 #endif
3998 switch (code)
4000 case REG:
4002 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4003 /* If pointers extend signed and this is a pointer in Pmode, say that
4004 all the bits above ptr_mode are known to be sign bit copies. */
4005 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4006 && REG_POINTER (x))
4007 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4008 #endif
4011 unsigned int copies_for_hook = 1, copies = 1;
4012 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4013 known_mode, known_ret,
4014 &copies_for_hook);
4016 if (new)
4017 copies = cached_num_sign_bit_copies (new, mode, known_x,
4018 known_mode, known_ret);
4020 if (copies > 1 || copies_for_hook > 1)
4021 return MAX (copies, copies_for_hook);
4023 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4025 break;
4027 case MEM:
4028 #ifdef LOAD_EXTEND_OP
4029 /* Some RISC machines sign-extend all loads of smaller than a word. */
4030 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4031 return MAX (1, ((int) bitwidth
4032 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4033 #endif
4034 break;
4036 case CONST_INT:
4037 /* If the constant is negative, take its 1's complement and remask.
4038 Then see how many zero bits we have. */
4039 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4040 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4041 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4042 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4044 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4046 case SUBREG:
4047 /* If this is a SUBREG for a promoted object that is sign-extended
4048 and we are looking at it in a wider mode, we know that at least the
4049 high-order bits are known to be sign bit copies. */
4051 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4053 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4054 known_x, known_mode, known_ret);
4055 return MAX ((int) bitwidth
4056 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4057 num0);
4060 /* For a smaller object, just ignore the high bits. */
4061 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4063 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4064 known_x, known_mode, known_ret);
4065 return MAX (1, (num0
4066 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4067 - bitwidth)));
4070 #ifdef WORD_REGISTER_OPERATIONS
4071 #ifdef LOAD_EXTEND_OP
4072 /* For paradoxical SUBREGs on machines where all register operations
4073 affect the entire register, just look inside. Note that we are
4074 passing MODE to the recursive call, so the number of sign bit copies
4075 will remain relative to that mode, not the inner mode. */
4077 /* This works only if loads sign extend. Otherwise, if we get a
4078 reload for the inner part, it may be loaded from the stack, and
4079 then we lose all sign bit copies that existed before the store
4080 to the stack. */
4082 if ((GET_MODE_SIZE (GET_MODE (x))
4083 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4084 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4085 && MEM_P (SUBREG_REG (x)))
4086 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4087 known_x, known_mode, known_ret);
4088 #endif
4089 #endif
4090 break;
4092 case SIGN_EXTRACT:
4093 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4094 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4095 break;
4097 case SIGN_EXTEND:
4098 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4099 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4100 known_x, known_mode, known_ret));
4102 case TRUNCATE:
4103 /* For a smaller object, just ignore the high bits. */
4104 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4105 known_x, known_mode, known_ret);
4106 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4107 - bitwidth)));
4109 case NOT:
4110 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4111 known_x, known_mode, known_ret);
4113 case ROTATE: case ROTATERT:
4114 /* If we are rotating left by a number of bits less than the number
4115 of sign bit copies, we can just subtract that amount from the
4116 number. */
4117 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4118 && INTVAL (XEXP (x, 1)) >= 0
4119 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4121 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4122 known_x, known_mode, known_ret);
4123 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4124 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4126 break;
4128 case NEG:
4129 /* In general, this subtracts one sign bit copy. But if the value
4130 is known to be positive, the number of sign bit copies is the
4131 same as that of the input. Finally, if the input has just one bit
4132 that might be nonzero, all the bits are copies of the sign bit. */
4133 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4134 known_x, known_mode, known_ret);
4135 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4136 return num0 > 1 ? num0 - 1 : 1;
4138 nonzero = nonzero_bits (XEXP (x, 0), mode);
4139 if (nonzero == 1)
4140 return bitwidth;
4142 if (num0 > 1
4143 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4144 num0--;
4146 return num0;
4148 case IOR: case AND: case XOR:
4149 case SMIN: case SMAX: case UMIN: case UMAX:
4150 /* Logical operations will preserve the number of sign-bit copies.
4151 MIN and MAX operations always return one of the operands. */
4152 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4153 known_x, known_mode, known_ret);
4154 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4155 known_x, known_mode, known_ret);
4156 return MIN (num0, num1);
4158 case PLUS: case MINUS:
4159 /* For addition and subtraction, we can have a 1-bit carry. However,
4160 if we are subtracting 1 from a positive number, there will not
4161 be such a carry. Furthermore, if the positive number is known to
4162 be 0 or 1, we know the result is either -1 or 0. */
4164 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4165 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4167 nonzero = nonzero_bits (XEXP (x, 0), mode);
4168 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4169 return (nonzero == 1 || nonzero == 0 ? bitwidth
4170 : bitwidth - floor_log2 (nonzero) - 1);
4173 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4174 known_x, known_mode, known_ret);
4175 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4176 known_x, known_mode, known_ret);
4177 result = MAX (1, MIN (num0, num1) - 1);
4179 #ifdef POINTERS_EXTEND_UNSIGNED
4180 /* If pointers extend signed and this is an addition or subtraction
4181 to a pointer in Pmode, all the bits above ptr_mode are known to be
4182 sign bit copies. */
4183 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4184 && (code == PLUS || code == MINUS)
4185 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4186 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4187 - GET_MODE_BITSIZE (ptr_mode) + 1),
4188 result);
4189 #endif
4190 return result;
4192 case MULT:
4193 /* The number of bits of the product is the sum of the number of
4194 bits of both terms. However, unless one of the terms if known
4195 to be positive, we must allow for an additional bit since negating
4196 a negative number can remove one sign bit copy. */
4198 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4199 known_x, known_mode, known_ret);
4200 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4201 known_x, known_mode, known_ret);
4203 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4204 if (result > 0
4205 && (bitwidth > HOST_BITS_PER_WIDE_INT
4206 || (((nonzero_bits (XEXP (x, 0), mode)
4207 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4208 && ((nonzero_bits (XEXP (x, 1), mode)
4209 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4210 result--;
4212 return MAX (1, result);
4214 case UDIV:
4215 /* The result must be <= the first operand. If the first operand
4216 has the high bit set, we know nothing about the number of sign
4217 bit copies. */
4218 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4219 return 1;
4220 else if ((nonzero_bits (XEXP (x, 0), mode)
4221 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4222 return 1;
4223 else
4224 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4225 known_x, known_mode, known_ret);
4227 case UMOD:
4228 /* The result must be <= the second operand. */
4229 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4230 known_x, known_mode, known_ret);
4232 case DIV:
4233 /* Similar to unsigned division, except that we have to worry about
4234 the case where the divisor is negative, in which case we have
4235 to add 1. */
4236 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4237 known_x, known_mode, known_ret);
4238 if (result > 1
4239 && (bitwidth > HOST_BITS_PER_WIDE_INT
4240 || (nonzero_bits (XEXP (x, 1), mode)
4241 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4242 result--;
4244 return result;
4246 case MOD:
4247 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4248 known_x, known_mode, known_ret);
4249 if (result > 1
4250 && (bitwidth > HOST_BITS_PER_WIDE_INT
4251 || (nonzero_bits (XEXP (x, 1), mode)
4252 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4253 result--;
4255 return result;
4257 case ASHIFTRT:
4258 /* Shifts by a constant add to the number of bits equal to the
4259 sign bit. */
4260 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4261 known_x, known_mode, known_ret);
4262 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4263 && INTVAL (XEXP (x, 1)) > 0)
4264 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4266 return num0;
4268 case ASHIFT:
4269 /* Left shifts destroy copies. */
4270 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4271 || INTVAL (XEXP (x, 1)) < 0
4272 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4273 return 1;
4275 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4276 known_x, known_mode, known_ret);
4277 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4279 case IF_THEN_ELSE:
4280 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4281 known_x, known_mode, known_ret);
4282 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4283 known_x, known_mode, known_ret);
4284 return MIN (num0, num1);
4286 case EQ: case NE: case GE: case GT: case LE: case LT:
4287 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4288 case GEU: case GTU: case LEU: case LTU:
4289 case UNORDERED: case ORDERED:
4290 /* If the constant is negative, take its 1's complement and remask.
4291 Then see how many zero bits we have. */
4292 nonzero = STORE_FLAG_VALUE;
4293 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4294 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4295 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4297 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4299 default:
4300 break;
4303 /* If we haven't been able to figure it out by one of the above rules,
4304 see if some of the high-order bits are known to be zero. If so,
4305 count those bits and return one less than that amount. If we can't
4306 safely compute the mask for this mode, always return BITWIDTH. */
4308 bitwidth = GET_MODE_BITSIZE (mode);
4309 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4310 return 1;
4312 nonzero = nonzero_bits (x, mode);
4313 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4314 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4317 /* Calculate the rtx_cost of a single instruction. A return value of
4318 zero indicates an instruction pattern without a known cost. */
4321 insn_rtx_cost (rtx pat)
4323 int i, cost;
4324 rtx set;
4326 /* Extract the single set rtx from the instruction pattern.
4327 We can't use single_set since we only have the pattern. */
4328 if (GET_CODE (pat) == SET)
4329 set = pat;
4330 else if (GET_CODE (pat) == PARALLEL)
4332 set = NULL_RTX;
4333 for (i = 0; i < XVECLEN (pat, 0); i++)
4335 rtx x = XVECEXP (pat, 0, i);
4336 if (GET_CODE (x) == SET)
4338 if (set)
4339 return 0;
4340 set = x;
4343 if (!set)
4344 return 0;
4346 else
4347 return 0;
4349 cost = rtx_cost (SET_SRC (set), SET);
4350 return cost > 0 ? cost : COSTS_N_INSNS (1);
4353 /* Given an insn INSN and condition COND, return the condition in a
4354 canonical form to simplify testing by callers. Specifically:
4356 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4357 (2) Both operands will be machine operands; (cc0) will have been replaced.
4358 (3) If an operand is a constant, it will be the second operand.
4359 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4360 for GE, GEU, and LEU.
4362 If the condition cannot be understood, or is an inequality floating-point
4363 comparison which needs to be reversed, 0 will be returned.
4365 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4367 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4368 insn used in locating the condition was found. If a replacement test
4369 of the condition is desired, it should be placed in front of that
4370 insn and we will be sure that the inputs are still valid.
4372 If WANT_REG is nonzero, we wish the condition to be relative to that
4373 register, if possible. Therefore, do not canonicalize the condition
4374 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4375 to be a compare to a CC mode register.
4377 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4378 and at INSN. */
4381 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4382 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4384 enum rtx_code code;
4385 rtx prev = insn;
4386 rtx set;
4387 rtx tem;
4388 rtx op0, op1;
4389 int reverse_code = 0;
4390 enum machine_mode mode;
4392 code = GET_CODE (cond);
4393 mode = GET_MODE (cond);
4394 op0 = XEXP (cond, 0);
4395 op1 = XEXP (cond, 1);
4397 if (reverse)
4398 code = reversed_comparison_code (cond, insn);
4399 if (code == UNKNOWN)
4400 return 0;
4402 if (earliest)
4403 *earliest = insn;
4405 /* If we are comparing a register with zero, see if the register is set
4406 in the previous insn to a COMPARE or a comparison operation. Perform
4407 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4408 in cse.c */
4410 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4411 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4412 && op1 == CONST0_RTX (GET_MODE (op0))
4413 && op0 != want_reg)
4415 /* Set nonzero when we find something of interest. */
4416 rtx x = 0;
4418 #ifdef HAVE_cc0
4419 /* If comparison with cc0, import actual comparison from compare
4420 insn. */
4421 if (op0 == cc0_rtx)
4423 if ((prev = prev_nonnote_insn (prev)) == 0
4424 || !NONJUMP_INSN_P (prev)
4425 || (set = single_set (prev)) == 0
4426 || SET_DEST (set) != cc0_rtx)
4427 return 0;
4429 op0 = SET_SRC (set);
4430 op1 = CONST0_RTX (GET_MODE (op0));
4431 if (earliest)
4432 *earliest = prev;
4434 #endif
4436 /* If this is a COMPARE, pick up the two things being compared. */
4437 if (GET_CODE (op0) == COMPARE)
4439 op1 = XEXP (op0, 1);
4440 op0 = XEXP (op0, 0);
4441 continue;
4443 else if (!REG_P (op0))
4444 break;
4446 /* Go back to the previous insn. Stop if it is not an INSN. We also
4447 stop if it isn't a single set or if it has a REG_INC note because
4448 we don't want to bother dealing with it. */
4450 if ((prev = prev_nonnote_insn (prev)) == 0
4451 || !NONJUMP_INSN_P (prev)
4452 || FIND_REG_INC_NOTE (prev, NULL_RTX))
4453 break;
4455 set = set_of (op0, prev);
4457 if (set
4458 && (GET_CODE (set) != SET
4459 || !rtx_equal_p (SET_DEST (set), op0)))
4460 break;
4462 /* If this is setting OP0, get what it sets it to if it looks
4463 relevant. */
4464 if (set)
4466 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4467 #ifdef FLOAT_STORE_FLAG_VALUE
4468 REAL_VALUE_TYPE fsfv;
4469 #endif
4471 /* ??? We may not combine comparisons done in a CCmode with
4472 comparisons not done in a CCmode. This is to aid targets
4473 like Alpha that have an IEEE compliant EQ instruction, and
4474 a non-IEEE compliant BEQ instruction. The use of CCmode is
4475 actually artificial, simply to prevent the combination, but
4476 should not affect other platforms.
4478 However, we must allow VOIDmode comparisons to match either
4479 CCmode or non-CCmode comparison, because some ports have
4480 modeless comparisons inside branch patterns.
4482 ??? This mode check should perhaps look more like the mode check
4483 in simplify_comparison in combine. */
4485 if ((GET_CODE (SET_SRC (set)) == COMPARE
4486 || (((code == NE
4487 || (code == LT
4488 && GET_MODE_CLASS (inner_mode) == MODE_INT
4489 && (GET_MODE_BITSIZE (inner_mode)
4490 <= HOST_BITS_PER_WIDE_INT)
4491 && (STORE_FLAG_VALUE
4492 & ((HOST_WIDE_INT) 1
4493 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4494 #ifdef FLOAT_STORE_FLAG_VALUE
4495 || (code == LT
4496 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4497 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4498 REAL_VALUE_NEGATIVE (fsfv)))
4499 #endif
4501 && COMPARISON_P (SET_SRC (set))))
4502 && (((GET_MODE_CLASS (mode) == MODE_CC)
4503 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4504 || mode == VOIDmode || inner_mode == VOIDmode))
4505 x = SET_SRC (set);
4506 else if (((code == EQ
4507 || (code == GE
4508 && (GET_MODE_BITSIZE (inner_mode)
4509 <= HOST_BITS_PER_WIDE_INT)
4510 && GET_MODE_CLASS (inner_mode) == MODE_INT
4511 && (STORE_FLAG_VALUE
4512 & ((HOST_WIDE_INT) 1
4513 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4514 #ifdef FLOAT_STORE_FLAG_VALUE
4515 || (code == GE
4516 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4517 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4518 REAL_VALUE_NEGATIVE (fsfv)))
4519 #endif
4521 && COMPARISON_P (SET_SRC (set))
4522 && (((GET_MODE_CLASS (mode) == MODE_CC)
4523 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4524 || mode == VOIDmode || inner_mode == VOIDmode))
4527 reverse_code = 1;
4528 x = SET_SRC (set);
4530 else
4531 break;
4534 else if (reg_set_p (op0, prev))
4535 /* If this sets OP0, but not directly, we have to give up. */
4536 break;
4538 if (x)
4540 /* If the caller is expecting the condition to be valid at INSN,
4541 make sure X doesn't change before INSN. */
4542 if (valid_at_insn_p)
4543 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4544 break;
4545 if (COMPARISON_P (x))
4546 code = GET_CODE (x);
4547 if (reverse_code)
4549 code = reversed_comparison_code (x, prev);
4550 if (code == UNKNOWN)
4551 return 0;
4552 reverse_code = 0;
4555 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4556 if (earliest)
4557 *earliest = prev;
4561 /* If constant is first, put it last. */
4562 if (CONSTANT_P (op0))
4563 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4565 /* If OP0 is the result of a comparison, we weren't able to find what
4566 was really being compared, so fail. */
4567 if (!allow_cc_mode
4568 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4569 return 0;
4571 /* Canonicalize any ordered comparison with integers involving equality
4572 if we can do computations in the relevant mode and we do not
4573 overflow. */
4575 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4576 && GET_CODE (op1) == CONST_INT
4577 && GET_MODE (op0) != VOIDmode
4578 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4580 HOST_WIDE_INT const_val = INTVAL (op1);
4581 unsigned HOST_WIDE_INT uconst_val = const_val;
4582 unsigned HOST_WIDE_INT max_val
4583 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4585 switch (code)
4587 case LE:
4588 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4589 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4590 break;
4592 /* When cross-compiling, const_val might be sign-extended from
4593 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4594 case GE:
4595 if ((HOST_WIDE_INT) (const_val & max_val)
4596 != (((HOST_WIDE_INT) 1
4597 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4598 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4599 break;
4601 case LEU:
4602 if (uconst_val < max_val)
4603 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4604 break;
4606 case GEU:
4607 if (uconst_val != 0)
4608 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4609 break;
4611 default:
4612 break;
4616 /* Never return CC0; return zero instead. */
4617 if (CC0_P (op0))
4618 return 0;
4620 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4623 /* Given a jump insn JUMP, return the condition that will cause it to branch
4624 to its JUMP_LABEL. If the condition cannot be understood, or is an
4625 inequality floating-point comparison which needs to be reversed, 0 will
4626 be returned.
4628 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4629 insn used in locating the condition was found. If a replacement test
4630 of the condition is desired, it should be placed in front of that
4631 insn and we will be sure that the inputs are still valid. If EARLIEST
4632 is null, the returned condition will be valid at INSN.
4634 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4635 compare CC mode register.
4637 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4640 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4642 rtx cond;
4643 int reverse;
4644 rtx set;
4646 /* If this is not a standard conditional jump, we can't parse it. */
4647 if (!JUMP_P (jump)
4648 || ! any_condjump_p (jump))
4649 return 0;
4650 set = pc_set (jump);
4652 cond = XEXP (SET_SRC (set), 0);
4654 /* If this branches to JUMP_LABEL when the condition is false, reverse
4655 the condition. */
4656 reverse
4657 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4658 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4660 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4661 allow_cc_mode, valid_at_insn_p);
4665 /* Initialize non_rtx_starting_operands, which is used to speed up
4666 for_each_rtx. */
4667 void
4668 init_rtlanal (void)
4670 int i;
4671 for (i = 0; i < NUM_RTX_CODE; i++)
4673 const char *format = GET_RTX_FORMAT (i);
4674 const char *first = strpbrk (format, "eEV");
4675 non_rtx_starting_operands[i] = first ? first - format : -1;