* basic-block.h (scale_bbs_frequencies_int,
[official-gcc.git] / gcc / rtlanal.c
blob0686af8c65bc4fe52bf770c60b01860f25d089f7
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "regs.h"
38 #include "function.h"
40 /* Forward declarations */
41 static int global_reg_mentioned_p_1 (rtx *, void *);
42 static void set_of_1 (rtx, rtx, void *);
43 static bool covers_regno_p (rtx, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx, unsigned int);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
49 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50 rtx, enum machine_mode,
51 unsigned HOST_WIDE_INT);
52 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53 enum machine_mode,
54 unsigned HOST_WIDE_INT);
55 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56 enum machine_mode,
57 unsigned int);
58 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59 enum machine_mode, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands[NUM_RTX_CODE];
65 /* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
69 int target_flags;
71 /* Return 1 if the value of X is unstable
72 (would be different at a different point in the program).
73 The frame pointer, arg pointer, etc. are considered stable
74 (within one function) and so is anything marked `unchanging'. */
76 int
77 rtx_unstable_p (rtx x)
79 RTX_CODE code = GET_CODE (x);
80 int i;
81 const char *fmt;
83 switch (code)
85 case MEM:
86 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
88 case CONST:
89 case CONST_INT:
90 case CONST_DOUBLE:
91 case CONST_VECTOR:
92 case SYMBOL_REF:
93 case LABEL_REF:
94 return 0;
96 case REG:
97 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
98 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
99 /* The arg pointer varies if it is not a fixed register. */
100 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
101 return 0;
102 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
103 /* ??? When call-clobbered, the value is stable modulo the restore
104 that must happen after a call. This currently screws up local-alloc
105 into believing that the restore is not needed. */
106 if (x == pic_offset_table_rtx)
107 return 0;
108 #endif
109 return 1;
111 case ASM_OPERANDS:
112 if (MEM_VOLATILE_P (x))
113 return 1;
115 /* Fall through. */
117 default:
118 break;
121 fmt = GET_RTX_FORMAT (code);
122 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
123 if (fmt[i] == 'e')
125 if (rtx_unstable_p (XEXP (x, i)))
126 return 1;
128 else if (fmt[i] == 'E')
130 int j;
131 for (j = 0; j < XVECLEN (x, i); j++)
132 if (rtx_unstable_p (XVECEXP (x, i, j)))
133 return 1;
136 return 0;
139 /* Return 1 if X has a value that can vary even between two
140 executions of the program. 0 means X can be compared reliably
141 against certain constants or near-constants.
142 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
143 zero, we are slightly more conservative.
144 The frame pointer and the arg pointer are considered constant. */
147 rtx_varies_p (rtx x, int for_alias)
149 RTX_CODE code;
150 int i;
151 const char *fmt;
153 if (!x)
154 return 0;
156 code = GET_CODE (x);
157 switch (code)
159 case MEM:
160 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
162 case CONST:
163 case CONST_INT:
164 case CONST_DOUBLE:
165 case CONST_VECTOR:
166 case SYMBOL_REF:
167 case LABEL_REF:
168 return 0;
170 case REG:
171 /* Note that we have to test for the actual rtx used for the frame
172 and arg pointers and not just the register number in case we have
173 eliminated the frame and/or arg pointer and are using it
174 for pseudos. */
175 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
176 /* The arg pointer varies if it is not a fixed register. */
177 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
178 return 0;
179 if (x == pic_offset_table_rtx
180 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
181 /* ??? When call-clobbered, the value is stable modulo the restore
182 that must happen after a call. This currently screws up
183 local-alloc into believing that the restore is not needed, so we
184 must return 0 only if we are called from alias analysis. */
185 && for_alias
186 #endif
188 return 0;
189 return 1;
191 case LO_SUM:
192 /* The operand 0 of a LO_SUM is considered constant
193 (in fact it is related specifically to operand 1)
194 during alias analysis. */
195 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
196 || rtx_varies_p (XEXP (x, 1), for_alias);
198 case ASM_OPERANDS:
199 if (MEM_VOLATILE_P (x))
200 return 1;
202 /* Fall through. */
204 default:
205 break;
208 fmt = GET_RTX_FORMAT (code);
209 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
210 if (fmt[i] == 'e')
212 if (rtx_varies_p (XEXP (x, i), for_alias))
213 return 1;
215 else if (fmt[i] == 'E')
217 int j;
218 for (j = 0; j < XVECLEN (x, i); j++)
219 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
220 return 1;
223 return 0;
226 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
229 rtx_addr_can_trap_p (rtx x)
231 enum rtx_code code = GET_CODE (x);
233 switch (code)
235 case SYMBOL_REF:
236 return SYMBOL_REF_WEAK (x);
238 case LABEL_REF:
239 return 0;
241 case REG:
242 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
243 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
244 || x == stack_pointer_rtx
245 /* The arg pointer varies if it is not a fixed register. */
246 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
247 return 0;
248 /* All of the virtual frame registers are stack references. */
249 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
250 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
251 return 0;
252 return 1;
254 case CONST:
255 return rtx_addr_can_trap_p (XEXP (x, 0));
257 case PLUS:
258 /* An address is assumed not to trap if it is an address that can't
259 trap plus a constant integer or it is the pic register plus a
260 constant. */
261 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
262 && GET_CODE (XEXP (x, 1)) == CONST_INT)
263 || (XEXP (x, 0) == pic_offset_table_rtx
264 && CONSTANT_P (XEXP (x, 1))));
266 case LO_SUM:
267 case PRE_MODIFY:
268 return rtx_addr_can_trap_p (XEXP (x, 1));
270 case PRE_DEC:
271 case PRE_INC:
272 case POST_DEC:
273 case POST_INC:
274 case POST_MODIFY:
275 return rtx_addr_can_trap_p (XEXP (x, 0));
277 default:
278 break;
281 /* If it isn't one of the case above, it can cause a trap. */
282 return 1;
285 /* Return true if X is an address that is known to not be zero. */
287 bool
288 nonzero_address_p (rtx x)
290 enum rtx_code code = GET_CODE (x);
292 switch (code)
294 case SYMBOL_REF:
295 return !SYMBOL_REF_WEAK (x);
297 case LABEL_REF:
298 return true;
300 case REG:
301 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
302 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
303 || x == stack_pointer_rtx
304 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
305 return true;
306 /* All of the virtual frame registers are stack references. */
307 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
308 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
309 return true;
310 return false;
312 case CONST:
313 return nonzero_address_p (XEXP (x, 0));
315 case PLUS:
316 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
318 /* Pointers aren't allowed to wrap. If we've got a register
319 that is known to be a pointer, and a positive offset, then
320 the composite can't be zero. */
321 if (INTVAL (XEXP (x, 1)) > 0
322 && REG_P (XEXP (x, 0))
323 && REG_POINTER (XEXP (x, 0)))
324 return true;
326 return nonzero_address_p (XEXP (x, 0));
328 /* Handle PIC references. */
329 else if (XEXP (x, 0) == pic_offset_table_rtx
330 && CONSTANT_P (XEXP (x, 1)))
331 return true;
332 return false;
334 case PRE_MODIFY:
335 /* Similar to the above; allow positive offsets. Further, since
336 auto-inc is only allowed in memories, the register must be a
337 pointer. */
338 if (GET_CODE (XEXP (x, 1)) == CONST_INT
339 && INTVAL (XEXP (x, 1)) > 0)
340 return true;
341 return nonzero_address_p (XEXP (x, 0));
343 case PRE_INC:
344 /* Similarly. Further, the offset is always positive. */
345 return true;
347 case PRE_DEC:
348 case POST_DEC:
349 case POST_INC:
350 case POST_MODIFY:
351 return nonzero_address_p (XEXP (x, 0));
353 case LO_SUM:
354 return nonzero_address_p (XEXP (x, 1));
356 default:
357 break;
360 /* If it isn't one of the case above, might be zero. */
361 return false;
364 /* Return 1 if X refers to a memory location whose address
365 cannot be compared reliably with constant addresses,
366 or if X refers to a BLKmode memory object.
367 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
368 zero, we are slightly more conservative. */
371 rtx_addr_varies_p (rtx x, int for_alias)
373 enum rtx_code code;
374 int i;
375 const char *fmt;
377 if (x == 0)
378 return 0;
380 code = GET_CODE (x);
381 if (code == MEM)
382 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
384 fmt = GET_RTX_FORMAT (code);
385 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
386 if (fmt[i] == 'e')
388 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
389 return 1;
391 else if (fmt[i] == 'E')
393 int j;
394 for (j = 0; j < XVECLEN (x, i); j++)
395 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
396 return 1;
398 return 0;
401 /* Return the value of the integer term in X, if one is apparent;
402 otherwise return 0.
403 Only obvious integer terms are detected.
404 This is used in cse.c with the `related_value' field. */
406 HOST_WIDE_INT
407 get_integer_term (rtx x)
409 if (GET_CODE (x) == CONST)
410 x = XEXP (x, 0);
412 if (GET_CODE (x) == MINUS
413 && GET_CODE (XEXP (x, 1)) == CONST_INT)
414 return - INTVAL (XEXP (x, 1));
415 if (GET_CODE (x) == PLUS
416 && GET_CODE (XEXP (x, 1)) == CONST_INT)
417 return INTVAL (XEXP (x, 1));
418 return 0;
421 /* If X is a constant, return the value sans apparent integer term;
422 otherwise return 0.
423 Only obvious integer terms are detected. */
426 get_related_value (rtx x)
428 if (GET_CODE (x) != CONST)
429 return 0;
430 x = XEXP (x, 0);
431 if (GET_CODE (x) == PLUS
432 && GET_CODE (XEXP (x, 1)) == CONST_INT)
433 return XEXP (x, 0);
434 else if (GET_CODE (x) == MINUS
435 && GET_CODE (XEXP (x, 1)) == CONST_INT)
436 return XEXP (x, 0);
437 return 0;
440 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
441 a global register. */
443 static int
444 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
446 int regno;
447 rtx x = *loc;
449 if (! x)
450 return 0;
452 switch (GET_CODE (x))
454 case SUBREG:
455 if (REG_P (SUBREG_REG (x)))
457 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
458 && global_regs[subreg_regno (x)])
459 return 1;
460 return 0;
462 break;
464 case REG:
465 regno = REGNO (x);
466 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
467 return 1;
468 return 0;
470 case SCRATCH:
471 case PC:
472 case CC0:
473 case CONST_INT:
474 case CONST_DOUBLE:
475 case CONST:
476 case LABEL_REF:
477 return 0;
479 case CALL:
480 /* A non-constant call might use a global register. */
481 return 1;
483 default:
484 break;
487 return 0;
490 /* Returns nonzero if X mentions a global register. */
493 global_reg_mentioned_p (rtx x)
495 if (INSN_P (x))
497 if (CALL_P (x))
499 if (! CONST_OR_PURE_CALL_P (x))
500 return 1;
501 x = CALL_INSN_FUNCTION_USAGE (x);
502 if (x == 0)
503 return 0;
505 else
506 x = PATTERN (x);
509 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
512 /* Return the number of places FIND appears within X. If COUNT_DEST is
513 zero, we do not count occurrences inside the destination of a SET. */
516 count_occurrences (rtx x, rtx find, int count_dest)
518 int i, j;
519 enum rtx_code code;
520 const char *format_ptr;
521 int count;
523 if (x == find)
524 return 1;
526 code = GET_CODE (x);
528 switch (code)
530 case REG:
531 case CONST_INT:
532 case CONST_DOUBLE:
533 case CONST_VECTOR:
534 case SYMBOL_REF:
535 case CODE_LABEL:
536 case PC:
537 case CC0:
538 return 0;
540 case MEM:
541 if (MEM_P (find) && rtx_equal_p (x, find))
542 return 1;
543 break;
545 case SET:
546 if (SET_DEST (x) == find && ! count_dest)
547 return count_occurrences (SET_SRC (x), find, count_dest);
548 break;
550 default:
551 break;
554 format_ptr = GET_RTX_FORMAT (code);
555 count = 0;
557 for (i = 0; i < GET_RTX_LENGTH (code); i++)
559 switch (*format_ptr++)
561 case 'e':
562 count += count_occurrences (XEXP (x, i), find, count_dest);
563 break;
565 case 'E':
566 for (j = 0; j < XVECLEN (x, i); j++)
567 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
568 break;
571 return count;
574 /* Nonzero if register REG appears somewhere within IN.
575 Also works if REG is not a register; in this case it checks
576 for a subexpression of IN that is Lisp "equal" to REG. */
579 reg_mentioned_p (rtx reg, rtx in)
581 const char *fmt;
582 int i;
583 enum rtx_code code;
585 if (in == 0)
586 return 0;
588 if (reg == in)
589 return 1;
591 if (GET_CODE (in) == LABEL_REF)
592 return reg == XEXP (in, 0);
594 code = GET_CODE (in);
596 switch (code)
598 /* Compare registers by number. */
599 case REG:
600 return REG_P (reg) && REGNO (in) == REGNO (reg);
602 /* These codes have no constituent expressions
603 and are unique. */
604 case SCRATCH:
605 case CC0:
606 case PC:
607 return 0;
609 case CONST_INT:
610 case CONST_VECTOR:
611 case CONST_DOUBLE:
612 /* These are kept unique for a given value. */
613 return 0;
615 default:
616 break;
619 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
620 return 1;
622 fmt = GET_RTX_FORMAT (code);
624 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
626 if (fmt[i] == 'E')
628 int j;
629 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
630 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
631 return 1;
633 else if (fmt[i] == 'e'
634 && reg_mentioned_p (reg, XEXP (in, i)))
635 return 1;
637 return 0;
640 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
641 no CODE_LABEL insn. */
644 no_labels_between_p (rtx beg, rtx end)
646 rtx p;
647 if (beg == end)
648 return 0;
649 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
650 if (LABEL_P (p))
651 return 0;
652 return 1;
655 /* Nonzero if register REG is used in an insn between
656 FROM_INSN and TO_INSN (exclusive of those two). */
659 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
661 rtx insn;
663 if (from_insn == to_insn)
664 return 0;
666 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
667 if (INSN_P (insn)
668 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
669 || (CALL_P (insn)
670 && (find_reg_fusage (insn, USE, reg)
671 || find_reg_fusage (insn, CLOBBER, reg)))))
672 return 1;
673 return 0;
676 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
677 is entirely replaced by a new value and the only use is as a SET_DEST,
678 we do not consider it a reference. */
681 reg_referenced_p (rtx x, rtx body)
683 int i;
685 switch (GET_CODE (body))
687 case SET:
688 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
689 return 1;
691 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
692 of a REG that occupies all of the REG, the insn references X if
693 it is mentioned in the destination. */
694 if (GET_CODE (SET_DEST (body)) != CC0
695 && GET_CODE (SET_DEST (body)) != PC
696 && !REG_P (SET_DEST (body))
697 && ! (GET_CODE (SET_DEST (body)) == SUBREG
698 && REG_P (SUBREG_REG (SET_DEST (body)))
699 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
700 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
701 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
702 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
703 && reg_overlap_mentioned_p (x, SET_DEST (body)))
704 return 1;
705 return 0;
707 case ASM_OPERANDS:
708 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
709 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
710 return 1;
711 return 0;
713 case CALL:
714 case USE:
715 case IF_THEN_ELSE:
716 return reg_overlap_mentioned_p (x, body);
718 case TRAP_IF:
719 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
721 case PREFETCH:
722 return reg_overlap_mentioned_p (x, XEXP (body, 0));
724 case UNSPEC:
725 case UNSPEC_VOLATILE:
726 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
727 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
728 return 1;
729 return 0;
731 case PARALLEL:
732 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
733 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
734 return 1;
735 return 0;
737 case CLOBBER:
738 if (MEM_P (XEXP (body, 0)))
739 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
740 return 1;
741 return 0;
743 case COND_EXEC:
744 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
745 return 1;
746 return reg_referenced_p (x, COND_EXEC_CODE (body));
748 default:
749 return 0;
753 /* Nonzero if register REG is set or clobbered in an insn between
754 FROM_INSN and TO_INSN (exclusive of those two). */
757 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
759 rtx insn;
761 if (from_insn == to_insn)
762 return 0;
764 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
765 if (INSN_P (insn) && reg_set_p (reg, insn))
766 return 1;
767 return 0;
770 /* Internals of reg_set_between_p. */
772 reg_set_p (rtx reg, rtx insn)
774 /* We can be passed an insn or part of one. If we are passed an insn,
775 check if a side-effect of the insn clobbers REG. */
776 if (INSN_P (insn)
777 && (FIND_REG_INC_NOTE (insn, reg)
778 || (CALL_P (insn)
779 && ((REG_P (reg)
780 && REGNO (reg) < FIRST_PSEUDO_REGISTER
781 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
782 REGNO (reg)))
783 || MEM_P (reg)
784 || find_reg_fusage (insn, CLOBBER, reg)))))
785 return 1;
787 return set_of (reg, insn) != NULL_RTX;
790 /* Similar to reg_set_between_p, but check all registers in X. Return 0
791 only if none of them are modified between START and END. Return 1 if
792 X contains a MEM; this routine does usememory aliasing. */
795 modified_between_p (rtx x, rtx start, rtx end)
797 enum rtx_code code = GET_CODE (x);
798 const char *fmt;
799 int i, j;
800 rtx insn;
802 if (start == end)
803 return 0;
805 switch (code)
807 case CONST_INT:
808 case CONST_DOUBLE:
809 case CONST_VECTOR:
810 case CONST:
811 case SYMBOL_REF:
812 case LABEL_REF:
813 return 0;
815 case PC:
816 case CC0:
817 return 1;
819 case MEM:
820 if (modified_between_p (XEXP (x, 0), start, end))
821 return 1;
822 if (MEM_READONLY_P (x))
823 return 0;
824 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
825 if (memory_modified_in_insn_p (x, insn))
826 return 1;
827 return 0;
828 break;
830 case REG:
831 return reg_set_between_p (x, start, end);
833 default:
834 break;
837 fmt = GET_RTX_FORMAT (code);
838 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
840 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
841 return 1;
843 else if (fmt[i] == 'E')
844 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
845 if (modified_between_p (XVECEXP (x, i, j), start, end))
846 return 1;
849 return 0;
852 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
853 of them are modified in INSN. Return 1 if X contains a MEM; this routine
854 does use memory aliasing. */
857 modified_in_p (rtx x, rtx insn)
859 enum rtx_code code = GET_CODE (x);
860 const char *fmt;
861 int i, j;
863 switch (code)
865 case CONST_INT:
866 case CONST_DOUBLE:
867 case CONST_VECTOR:
868 case CONST:
869 case SYMBOL_REF:
870 case LABEL_REF:
871 return 0;
873 case PC:
874 case CC0:
875 return 1;
877 case MEM:
878 if (modified_in_p (XEXP (x, 0), insn))
879 return 1;
880 if (MEM_READONLY_P (x))
881 return 0;
882 if (memory_modified_in_insn_p (x, insn))
883 return 1;
884 return 0;
885 break;
887 case REG:
888 return reg_set_p (x, insn);
890 default:
891 break;
894 fmt = GET_RTX_FORMAT (code);
895 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
897 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
898 return 1;
900 else if (fmt[i] == 'E')
901 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
902 if (modified_in_p (XVECEXP (x, i, j), insn))
903 return 1;
906 return 0;
909 /* Helper function for set_of. */
910 struct set_of_data
912 rtx found;
913 rtx pat;
916 static void
917 set_of_1 (rtx x, rtx pat, void *data1)
919 struct set_of_data *data = (struct set_of_data *) (data1);
920 if (rtx_equal_p (x, data->pat)
921 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
922 data->found = pat;
925 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
926 (either directly or via STRICT_LOW_PART and similar modifiers). */
928 set_of (rtx pat, rtx insn)
930 struct set_of_data data;
931 data.found = NULL_RTX;
932 data.pat = pat;
933 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
934 return data.found;
937 /* Given an INSN, return a SET expression if this insn has only a single SET.
938 It may also have CLOBBERs, USEs, or SET whose output
939 will not be used, which we ignore. */
942 single_set_2 (rtx insn, rtx pat)
944 rtx set = NULL;
945 int set_verified = 1;
946 int i;
948 if (GET_CODE (pat) == PARALLEL)
950 for (i = 0; i < XVECLEN (pat, 0); i++)
952 rtx sub = XVECEXP (pat, 0, i);
953 switch (GET_CODE (sub))
955 case USE:
956 case CLOBBER:
957 break;
959 case SET:
960 /* We can consider insns having multiple sets, where all
961 but one are dead as single set insns. In common case
962 only single set is present in the pattern so we want
963 to avoid checking for REG_UNUSED notes unless necessary.
965 When we reach set first time, we just expect this is
966 the single set we are looking for and only when more
967 sets are found in the insn, we check them. */
968 if (!set_verified)
970 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
971 && !side_effects_p (set))
972 set = NULL;
973 else
974 set_verified = 1;
976 if (!set)
977 set = sub, set_verified = 0;
978 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
979 || side_effects_p (sub))
980 return NULL_RTX;
981 break;
983 default:
984 return NULL_RTX;
988 return set;
991 /* Given an INSN, return nonzero if it has more than one SET, else return
992 zero. */
995 multiple_sets (rtx insn)
997 int found;
998 int i;
1000 /* INSN must be an insn. */
1001 if (! INSN_P (insn))
1002 return 0;
1004 /* Only a PARALLEL can have multiple SETs. */
1005 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1007 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1008 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1010 /* If we have already found a SET, then return now. */
1011 if (found)
1012 return 1;
1013 else
1014 found = 1;
1018 /* Either zero or one SET. */
1019 return 0;
1022 /* Return nonzero if the destination of SET equals the source
1023 and there are no side effects. */
1026 set_noop_p (rtx set)
1028 rtx src = SET_SRC (set);
1029 rtx dst = SET_DEST (set);
1031 if (dst == pc_rtx && src == pc_rtx)
1032 return 1;
1034 if (MEM_P (dst) && MEM_P (src))
1035 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1037 if (GET_CODE (dst) == ZERO_EXTRACT)
1038 return rtx_equal_p (XEXP (dst, 0), src)
1039 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1040 && !side_effects_p (src);
1042 if (GET_CODE (dst) == STRICT_LOW_PART)
1043 dst = XEXP (dst, 0);
1045 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1047 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1048 return 0;
1049 src = SUBREG_REG (src);
1050 dst = SUBREG_REG (dst);
1053 return (REG_P (src) && REG_P (dst)
1054 && REGNO (src) == REGNO (dst));
1057 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1058 value to itself. */
1061 noop_move_p (rtx insn)
1063 rtx pat = PATTERN (insn);
1065 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1066 return 1;
1068 /* Insns carrying these notes are useful later on. */
1069 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1070 return 0;
1072 /* For now treat an insn with a REG_RETVAL note as a
1073 a special insn which should not be considered a no-op. */
1074 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1075 return 0;
1077 if (GET_CODE (pat) == SET && set_noop_p (pat))
1078 return 1;
1080 if (GET_CODE (pat) == PARALLEL)
1082 int i;
1083 /* If nothing but SETs of registers to themselves,
1084 this insn can also be deleted. */
1085 for (i = 0; i < XVECLEN (pat, 0); i++)
1087 rtx tem = XVECEXP (pat, 0, i);
1089 if (GET_CODE (tem) == USE
1090 || GET_CODE (tem) == CLOBBER)
1091 continue;
1093 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1094 return 0;
1097 return 1;
1099 return 0;
1103 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1104 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1105 If the object was modified, if we hit a partial assignment to X, or hit a
1106 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1107 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1108 be the src. */
1111 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1113 rtx p;
1115 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1116 p = PREV_INSN (p))
1117 if (INSN_P (p))
1119 rtx set = single_set (p);
1120 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1122 if (set && rtx_equal_p (x, SET_DEST (set)))
1124 rtx src = SET_SRC (set);
1126 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1127 src = XEXP (note, 0);
1129 if ((valid_to == NULL_RTX
1130 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1131 /* Reject hard registers because we don't usually want
1132 to use them; we'd rather use a pseudo. */
1133 && (! (REG_P (src)
1134 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1136 *pinsn = p;
1137 return src;
1141 /* If set in non-simple way, we don't have a value. */
1142 if (reg_set_p (x, p))
1143 break;
1146 return x;
1149 /* Return nonzero if register in range [REGNO, ENDREGNO)
1150 appears either explicitly or implicitly in X
1151 other than being stored into.
1153 References contained within the substructure at LOC do not count.
1154 LOC may be zero, meaning don't ignore anything. */
1157 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1158 rtx *loc)
1160 int i;
1161 unsigned int x_regno;
1162 RTX_CODE code;
1163 const char *fmt;
1165 repeat:
1166 /* The contents of a REG_NONNEG note is always zero, so we must come here
1167 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1168 if (x == 0)
1169 return 0;
1171 code = GET_CODE (x);
1173 switch (code)
1175 case REG:
1176 x_regno = REGNO (x);
1178 /* If we modifying the stack, frame, or argument pointer, it will
1179 clobber a virtual register. In fact, we could be more precise,
1180 but it isn't worth it. */
1181 if ((x_regno == STACK_POINTER_REGNUM
1182 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1183 || x_regno == ARG_POINTER_REGNUM
1184 #endif
1185 || x_regno == FRAME_POINTER_REGNUM)
1186 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1187 return 1;
1189 return (endregno > x_regno
1190 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1191 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1192 : 1));
1194 case SUBREG:
1195 /* If this is a SUBREG of a hard reg, we can see exactly which
1196 registers are being modified. Otherwise, handle normally. */
1197 if (REG_P (SUBREG_REG (x))
1198 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1200 unsigned int inner_regno = subreg_regno (x);
1201 unsigned int inner_endregno
1202 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1203 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1205 return endregno > inner_regno && regno < inner_endregno;
1207 break;
1209 case CLOBBER:
1210 case SET:
1211 if (&SET_DEST (x) != loc
1212 /* Note setting a SUBREG counts as referring to the REG it is in for
1213 a pseudo but not for hard registers since we can
1214 treat each word individually. */
1215 && ((GET_CODE (SET_DEST (x)) == SUBREG
1216 && loc != &SUBREG_REG (SET_DEST (x))
1217 && REG_P (SUBREG_REG (SET_DEST (x)))
1218 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1219 && refers_to_regno_p (regno, endregno,
1220 SUBREG_REG (SET_DEST (x)), loc))
1221 || (!REG_P (SET_DEST (x))
1222 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1223 return 1;
1225 if (code == CLOBBER || loc == &SET_SRC (x))
1226 return 0;
1227 x = SET_SRC (x);
1228 goto repeat;
1230 default:
1231 break;
1234 /* X does not match, so try its subexpressions. */
1236 fmt = GET_RTX_FORMAT (code);
1237 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1239 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1241 if (i == 0)
1243 x = XEXP (x, 0);
1244 goto repeat;
1246 else
1247 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1248 return 1;
1250 else if (fmt[i] == 'E')
1252 int j;
1253 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1254 if (loc != &XVECEXP (x, i, j)
1255 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1256 return 1;
1259 return 0;
1262 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1263 we check if any register number in X conflicts with the relevant register
1264 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1265 contains a MEM (we don't bother checking for memory addresses that can't
1266 conflict because we expect this to be a rare case. */
1269 reg_overlap_mentioned_p (rtx x, rtx in)
1271 unsigned int regno, endregno;
1273 /* If either argument is a constant, then modifying X can not
1274 affect IN. Here we look at IN, we can profitably combine
1275 CONSTANT_P (x) with the switch statement below. */
1276 if (CONSTANT_P (in))
1277 return 0;
1279 recurse:
1280 switch (GET_CODE (x))
1282 case STRICT_LOW_PART:
1283 case ZERO_EXTRACT:
1284 case SIGN_EXTRACT:
1285 /* Overly conservative. */
1286 x = XEXP (x, 0);
1287 goto recurse;
1289 case SUBREG:
1290 regno = REGNO (SUBREG_REG (x));
1291 if (regno < FIRST_PSEUDO_REGISTER)
1292 regno = subreg_regno (x);
1293 goto do_reg;
1295 case REG:
1296 regno = REGNO (x);
1297 do_reg:
1298 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1299 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1300 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1302 case MEM:
1304 const char *fmt;
1305 int i;
1307 if (MEM_P (in))
1308 return 1;
1310 fmt = GET_RTX_FORMAT (GET_CODE (in));
1311 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1312 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1313 return 1;
1315 return 0;
1318 case SCRATCH:
1319 case PC:
1320 case CC0:
1321 return reg_mentioned_p (x, in);
1323 case PARALLEL:
1325 int i;
1327 /* If any register in here refers to it we return true. */
1328 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1329 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1330 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1331 return 1;
1332 return 0;
1335 default:
1336 gcc_assert (CONSTANT_P (x));
1337 return 0;
1341 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1342 (X would be the pattern of an insn).
1343 FUN receives two arguments:
1344 the REG, MEM, CC0 or PC being stored in or clobbered,
1345 the SET or CLOBBER rtx that does the store.
1347 If the item being stored in or clobbered is a SUBREG of a hard register,
1348 the SUBREG will be passed. */
1350 void
1351 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1353 int i;
1355 if (GET_CODE (x) == COND_EXEC)
1356 x = COND_EXEC_CODE (x);
1358 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1360 rtx dest = SET_DEST (x);
1362 while ((GET_CODE (dest) == SUBREG
1363 && (!REG_P (SUBREG_REG (dest))
1364 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1365 || GET_CODE (dest) == ZERO_EXTRACT
1366 || GET_CODE (dest) == STRICT_LOW_PART)
1367 dest = XEXP (dest, 0);
1369 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1370 each of whose first operand is a register. */
1371 if (GET_CODE (dest) == PARALLEL)
1373 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1374 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1375 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1377 else
1378 (*fun) (dest, x, data);
1381 else if (GET_CODE (x) == PARALLEL)
1382 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1383 note_stores (XVECEXP (x, 0, i), fun, data);
1386 /* Like notes_stores, but call FUN for each expression that is being
1387 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1388 FUN for each expression, not any interior subexpressions. FUN receives a
1389 pointer to the expression and the DATA passed to this function.
1391 Note that this is not quite the same test as that done in reg_referenced_p
1392 since that considers something as being referenced if it is being
1393 partially set, while we do not. */
1395 void
1396 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1398 rtx body = *pbody;
1399 int i;
1401 switch (GET_CODE (body))
1403 case COND_EXEC:
1404 (*fun) (&COND_EXEC_TEST (body), data);
1405 note_uses (&COND_EXEC_CODE (body), fun, data);
1406 return;
1408 case PARALLEL:
1409 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1410 note_uses (&XVECEXP (body, 0, i), fun, data);
1411 return;
1413 case USE:
1414 (*fun) (&XEXP (body, 0), data);
1415 return;
1417 case ASM_OPERANDS:
1418 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1419 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1420 return;
1422 case TRAP_IF:
1423 (*fun) (&TRAP_CONDITION (body), data);
1424 return;
1426 case PREFETCH:
1427 (*fun) (&XEXP (body, 0), data);
1428 return;
1430 case UNSPEC:
1431 case UNSPEC_VOLATILE:
1432 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1433 (*fun) (&XVECEXP (body, 0, i), data);
1434 return;
1436 case CLOBBER:
1437 if (MEM_P (XEXP (body, 0)))
1438 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1439 return;
1441 case SET:
1443 rtx dest = SET_DEST (body);
1445 /* For sets we replace everything in source plus registers in memory
1446 expression in store and operands of a ZERO_EXTRACT. */
1447 (*fun) (&SET_SRC (body), data);
1449 if (GET_CODE (dest) == ZERO_EXTRACT)
1451 (*fun) (&XEXP (dest, 1), data);
1452 (*fun) (&XEXP (dest, 2), data);
1455 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1456 dest = XEXP (dest, 0);
1458 if (MEM_P (dest))
1459 (*fun) (&XEXP (dest, 0), data);
1461 return;
1463 default:
1464 /* All the other possibilities never store. */
1465 (*fun) (pbody, data);
1466 return;
1470 /* Return nonzero if X's old contents don't survive after INSN.
1471 This will be true if X is (cc0) or if X is a register and
1472 X dies in INSN or because INSN entirely sets X.
1474 "Entirely set" means set directly and not through a SUBREG, or
1475 ZERO_EXTRACT, so no trace of the old contents remains.
1476 Likewise, REG_INC does not count.
1478 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1479 but for this use that makes no difference, since regs don't overlap
1480 during their lifetimes. Therefore, this function may be used
1481 at any time after deaths have been computed (in flow.c).
1483 If REG is a hard reg that occupies multiple machine registers, this
1484 function will only return 1 if each of those registers will be replaced
1485 by INSN. */
1488 dead_or_set_p (rtx insn, rtx x)
1490 unsigned int regno, last_regno;
1491 unsigned int i;
1493 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1494 if (GET_CODE (x) == CC0)
1495 return 1;
1497 gcc_assert (REG_P (x));
1499 regno = REGNO (x);
1500 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1501 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1503 for (i = regno; i <= last_regno; i++)
1504 if (! dead_or_set_regno_p (insn, i))
1505 return 0;
1507 return 1;
1510 /* Return TRUE iff DEST is a register or subreg of a register and
1511 doesn't change the number of words of the inner register, and any
1512 part of the register is TEST_REGNO. */
1514 static bool
1515 covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1517 unsigned int regno, endregno;
1519 if (GET_CODE (dest) == SUBREG
1520 && (((GET_MODE_SIZE (GET_MODE (dest))
1521 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1522 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1523 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1524 dest = SUBREG_REG (dest);
1526 if (!REG_P (dest))
1527 return false;
1529 regno = REGNO (dest);
1530 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1531 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1532 return (test_regno >= regno && test_regno < endregno);
1535 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1536 any member matches the covers_regno_no_parallel_p criteria. */
1538 static bool
1539 covers_regno_p (rtx dest, unsigned int test_regno)
1541 if (GET_CODE (dest) == PARALLEL)
1543 /* Some targets place small structures in registers for return
1544 values of functions, and those registers are wrapped in
1545 PARALLELs that we may see as the destination of a SET. */
1546 int i;
1548 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1550 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1551 if (inner != NULL_RTX
1552 && covers_regno_no_parallel_p (inner, test_regno))
1553 return true;
1556 return false;
1558 else
1559 return covers_regno_no_parallel_p (dest, test_regno);
1562 /* Utility function for dead_or_set_p to check an individual register. Also
1563 called from flow.c. */
1566 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1568 rtx pattern;
1570 /* See if there is a death note for something that includes TEST_REGNO. */
1571 if (find_regno_note (insn, REG_DEAD, test_regno))
1572 return 1;
1574 if (CALL_P (insn)
1575 && find_regno_fusage (insn, CLOBBER, test_regno))
1576 return 1;
1578 pattern = PATTERN (insn);
1580 if (GET_CODE (pattern) == COND_EXEC)
1581 pattern = COND_EXEC_CODE (pattern);
1583 if (GET_CODE (pattern) == SET)
1584 return covers_regno_p (SET_DEST (pattern), test_regno);
1585 else if (GET_CODE (pattern) == PARALLEL)
1587 int i;
1589 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1591 rtx body = XVECEXP (pattern, 0, i);
1593 if (GET_CODE (body) == COND_EXEC)
1594 body = COND_EXEC_CODE (body);
1596 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1597 && covers_regno_p (SET_DEST (body), test_regno))
1598 return 1;
1602 return 0;
1605 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1606 If DATUM is nonzero, look for one whose datum is DATUM. */
1609 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1611 rtx link;
1613 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1614 if (! INSN_P (insn))
1615 return 0;
1616 if (datum == 0)
1618 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1619 if (REG_NOTE_KIND (link) == kind)
1620 return link;
1621 return 0;
1624 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1625 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1626 return link;
1627 return 0;
1630 /* Return the reg-note of kind KIND in insn INSN which applies to register
1631 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1632 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1633 it might be the case that the note overlaps REGNO. */
1636 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1638 rtx link;
1640 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1641 if (! INSN_P (insn))
1642 return 0;
1644 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1645 if (REG_NOTE_KIND (link) == kind
1646 /* Verify that it is a register, so that scratch and MEM won't cause a
1647 problem here. */
1648 && REG_P (XEXP (link, 0))
1649 && REGNO (XEXP (link, 0)) <= regno
1650 && ((REGNO (XEXP (link, 0))
1651 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1652 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1653 [GET_MODE (XEXP (link, 0))]))
1654 > regno))
1655 return link;
1656 return 0;
1659 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1660 has such a note. */
1663 find_reg_equal_equiv_note (rtx insn)
1665 rtx link;
1667 if (!INSN_P (insn))
1668 return 0;
1669 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1670 if (REG_NOTE_KIND (link) == REG_EQUAL
1671 || REG_NOTE_KIND (link) == REG_EQUIV)
1673 if (single_set (insn) == 0)
1674 return 0;
1675 return link;
1677 return NULL;
1680 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1681 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1684 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1686 /* If it's not a CALL_INSN, it can't possibly have a
1687 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1688 if (!CALL_P (insn))
1689 return 0;
1691 gcc_assert (datum);
1693 if (!REG_P (datum))
1695 rtx link;
1697 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1698 link;
1699 link = XEXP (link, 1))
1700 if (GET_CODE (XEXP (link, 0)) == code
1701 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1702 return 1;
1704 else
1706 unsigned int regno = REGNO (datum);
1708 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1709 to pseudo registers, so don't bother checking. */
1711 if (regno < FIRST_PSEUDO_REGISTER)
1713 unsigned int end_regno
1714 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1715 unsigned int i;
1717 for (i = regno; i < end_regno; i++)
1718 if (find_regno_fusage (insn, code, i))
1719 return 1;
1723 return 0;
1726 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1727 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1730 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1732 rtx link;
1734 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1735 to pseudo registers, so don't bother checking. */
1737 if (regno >= FIRST_PSEUDO_REGISTER
1738 || !CALL_P (insn) )
1739 return 0;
1741 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1743 unsigned int regnote;
1744 rtx op, reg;
1746 if (GET_CODE (op = XEXP (link, 0)) == code
1747 && REG_P (reg = XEXP (op, 0))
1748 && (regnote = REGNO (reg)) <= regno
1749 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1750 return 1;
1753 return 0;
1756 /* Return true if INSN is a call to a pure function. */
1759 pure_call_p (rtx insn)
1761 rtx link;
1763 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1764 return 0;
1766 /* Look for the note that differentiates const and pure functions. */
1767 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1769 rtx u, m;
1771 if (GET_CODE (u = XEXP (link, 0)) == USE
1772 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1773 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1774 return 1;
1777 return 0;
1780 /* Remove register note NOTE from the REG_NOTES of INSN. */
1782 void
1783 remove_note (rtx insn, rtx note)
1785 rtx link;
1787 if (note == NULL_RTX)
1788 return;
1790 if (REG_NOTES (insn) == note)
1792 REG_NOTES (insn) = XEXP (note, 1);
1793 return;
1796 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1797 if (XEXP (link, 1) == note)
1799 XEXP (link, 1) = XEXP (note, 1);
1800 return;
1803 gcc_unreachable ();
1806 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1807 return 1 if it is found. A simple equality test is used to determine if
1808 NODE matches. */
1811 in_expr_list_p (rtx listp, rtx node)
1813 rtx x;
1815 for (x = listp; x; x = XEXP (x, 1))
1816 if (node == XEXP (x, 0))
1817 return 1;
1819 return 0;
1822 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1823 remove that entry from the list if it is found.
1825 A simple equality test is used to determine if NODE matches. */
1827 void
1828 remove_node_from_expr_list (rtx node, rtx *listp)
1830 rtx temp = *listp;
1831 rtx prev = NULL_RTX;
1833 while (temp)
1835 if (node == XEXP (temp, 0))
1837 /* Splice the node out of the list. */
1838 if (prev)
1839 XEXP (prev, 1) = XEXP (temp, 1);
1840 else
1841 *listp = XEXP (temp, 1);
1843 return;
1846 prev = temp;
1847 temp = XEXP (temp, 1);
1851 /* Nonzero if X contains any volatile instructions. These are instructions
1852 which may cause unpredictable machine state instructions, and thus no
1853 instructions should be moved or combined across them. This includes
1854 only volatile asms and UNSPEC_VOLATILE instructions. */
1857 volatile_insn_p (rtx x)
1859 RTX_CODE code;
1861 code = GET_CODE (x);
1862 switch (code)
1864 case LABEL_REF:
1865 case SYMBOL_REF:
1866 case CONST_INT:
1867 case CONST:
1868 case CONST_DOUBLE:
1869 case CONST_VECTOR:
1870 case CC0:
1871 case PC:
1872 case REG:
1873 case SCRATCH:
1874 case CLOBBER:
1875 case ADDR_VEC:
1876 case ADDR_DIFF_VEC:
1877 case CALL:
1878 case MEM:
1879 return 0;
1881 case UNSPEC_VOLATILE:
1882 /* case TRAP_IF: This isn't clear yet. */
1883 return 1;
1885 case ASM_INPUT:
1886 case ASM_OPERANDS:
1887 if (MEM_VOLATILE_P (x))
1888 return 1;
1890 default:
1891 break;
1894 /* Recursively scan the operands of this expression. */
1897 const char *fmt = GET_RTX_FORMAT (code);
1898 int i;
1900 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1902 if (fmt[i] == 'e')
1904 if (volatile_insn_p (XEXP (x, i)))
1905 return 1;
1907 else if (fmt[i] == 'E')
1909 int j;
1910 for (j = 0; j < XVECLEN (x, i); j++)
1911 if (volatile_insn_p (XVECEXP (x, i, j)))
1912 return 1;
1916 return 0;
1919 /* Nonzero if X contains any volatile memory references
1920 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1923 volatile_refs_p (rtx x)
1925 RTX_CODE code;
1927 code = GET_CODE (x);
1928 switch (code)
1930 case LABEL_REF:
1931 case SYMBOL_REF:
1932 case CONST_INT:
1933 case CONST:
1934 case CONST_DOUBLE:
1935 case CONST_VECTOR:
1936 case CC0:
1937 case PC:
1938 case REG:
1939 case SCRATCH:
1940 case CLOBBER:
1941 case ADDR_VEC:
1942 case ADDR_DIFF_VEC:
1943 return 0;
1945 case UNSPEC_VOLATILE:
1946 return 1;
1948 case MEM:
1949 case ASM_INPUT:
1950 case ASM_OPERANDS:
1951 if (MEM_VOLATILE_P (x))
1952 return 1;
1954 default:
1955 break;
1958 /* Recursively scan the operands of this expression. */
1961 const char *fmt = GET_RTX_FORMAT (code);
1962 int i;
1964 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1966 if (fmt[i] == 'e')
1968 if (volatile_refs_p (XEXP (x, i)))
1969 return 1;
1971 else if (fmt[i] == 'E')
1973 int j;
1974 for (j = 0; j < XVECLEN (x, i); j++)
1975 if (volatile_refs_p (XVECEXP (x, i, j)))
1976 return 1;
1980 return 0;
1983 /* Similar to above, except that it also rejects register pre- and post-
1984 incrementing. */
1987 side_effects_p (rtx x)
1989 RTX_CODE code;
1991 code = GET_CODE (x);
1992 switch (code)
1994 case LABEL_REF:
1995 case SYMBOL_REF:
1996 case CONST_INT:
1997 case CONST:
1998 case CONST_DOUBLE:
1999 case CONST_VECTOR:
2000 case CC0:
2001 case PC:
2002 case REG:
2003 case SCRATCH:
2004 case ADDR_VEC:
2005 case ADDR_DIFF_VEC:
2006 return 0;
2008 case CLOBBER:
2009 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2010 when some combination can't be done. If we see one, don't think
2011 that we can simplify the expression. */
2012 return (GET_MODE (x) != VOIDmode);
2014 case PRE_INC:
2015 case PRE_DEC:
2016 case POST_INC:
2017 case POST_DEC:
2018 case PRE_MODIFY:
2019 case POST_MODIFY:
2020 case CALL:
2021 case UNSPEC_VOLATILE:
2022 /* case TRAP_IF: This isn't clear yet. */
2023 return 1;
2025 case MEM:
2026 case ASM_INPUT:
2027 case ASM_OPERANDS:
2028 if (MEM_VOLATILE_P (x))
2029 return 1;
2031 default:
2032 break;
2035 /* Recursively scan the operands of this expression. */
2038 const char *fmt = GET_RTX_FORMAT (code);
2039 int i;
2041 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2043 if (fmt[i] == 'e')
2045 if (side_effects_p (XEXP (x, i)))
2046 return 1;
2048 else if (fmt[i] == 'E')
2050 int j;
2051 for (j = 0; j < XVECLEN (x, i); j++)
2052 if (side_effects_p (XVECEXP (x, i, j)))
2053 return 1;
2057 return 0;
2060 /* Return nonzero if evaluating rtx X might cause a trap. */
2063 may_trap_p (rtx x)
2065 int i;
2066 enum rtx_code code;
2067 const char *fmt;
2069 if (x == 0)
2070 return 0;
2071 code = GET_CODE (x);
2072 switch (code)
2074 /* Handle these cases quickly. */
2075 case CONST_INT:
2076 case CONST_DOUBLE:
2077 case CONST_VECTOR:
2078 case SYMBOL_REF:
2079 case LABEL_REF:
2080 case CONST:
2081 case PC:
2082 case CC0:
2083 case REG:
2084 case SCRATCH:
2085 return 0;
2087 case ASM_INPUT:
2088 case UNSPEC_VOLATILE:
2089 case TRAP_IF:
2090 return 1;
2092 case ASM_OPERANDS:
2093 return MEM_VOLATILE_P (x);
2095 /* Memory ref can trap unless it's a static var or a stack slot. */
2096 case MEM:
2097 if (MEM_NOTRAP_P (x))
2098 return 0;
2099 return rtx_addr_can_trap_p (XEXP (x, 0));
2101 /* Division by a non-constant might trap. */
2102 case DIV:
2103 case MOD:
2104 case UDIV:
2105 case UMOD:
2106 if (HONOR_SNANS (GET_MODE (x)))
2107 return 1;
2108 if (! CONSTANT_P (XEXP (x, 1))
2109 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2110 && flag_trapping_math))
2111 return 1;
2112 if (XEXP (x, 1) == const0_rtx)
2113 return 1;
2114 break;
2116 case EXPR_LIST:
2117 /* An EXPR_LIST is used to represent a function call. This
2118 certainly may trap. */
2119 return 1;
2121 case GE:
2122 case GT:
2123 case LE:
2124 case LT:
2125 case LTGT:
2126 case COMPARE:
2127 /* Some floating point comparisons may trap. */
2128 if (!flag_trapping_math)
2129 break;
2130 /* ??? There is no machine independent way to check for tests that trap
2131 when COMPARE is used, though many targets do make this distinction.
2132 For instance, sparc uses CCFPE for compares which generate exceptions
2133 and CCFP for compares which do not generate exceptions. */
2134 if (HONOR_NANS (GET_MODE (x)))
2135 return 1;
2136 /* But often the compare has some CC mode, so check operand
2137 modes as well. */
2138 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2139 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2140 return 1;
2141 break;
2143 case EQ:
2144 case NE:
2145 if (HONOR_SNANS (GET_MODE (x)))
2146 return 1;
2147 /* Often comparison is CC mode, so check operand modes. */
2148 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2149 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2150 return 1;
2151 break;
2153 case FIX:
2154 /* Conversion of floating point might trap. */
2155 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2156 return 1;
2157 break;
2159 case NEG:
2160 case ABS:
2161 /* These operations don't trap even with floating point. */
2162 break;
2164 default:
2165 /* Any floating arithmetic may trap. */
2166 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2167 && flag_trapping_math)
2168 return 1;
2171 fmt = GET_RTX_FORMAT (code);
2172 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2174 if (fmt[i] == 'e')
2176 if (may_trap_p (XEXP (x, i)))
2177 return 1;
2179 else if (fmt[i] == 'E')
2181 int j;
2182 for (j = 0; j < XVECLEN (x, i); j++)
2183 if (may_trap_p (XVECEXP (x, i, j)))
2184 return 1;
2187 return 0;
2190 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2191 i.e., an inequality. */
2194 inequality_comparisons_p (rtx x)
2196 const char *fmt;
2197 int len, i;
2198 enum rtx_code code = GET_CODE (x);
2200 switch (code)
2202 case REG:
2203 case SCRATCH:
2204 case PC:
2205 case CC0:
2206 case CONST_INT:
2207 case CONST_DOUBLE:
2208 case CONST_VECTOR:
2209 case CONST:
2210 case LABEL_REF:
2211 case SYMBOL_REF:
2212 return 0;
2214 case LT:
2215 case LTU:
2216 case GT:
2217 case GTU:
2218 case LE:
2219 case LEU:
2220 case GE:
2221 case GEU:
2222 return 1;
2224 default:
2225 break;
2228 len = GET_RTX_LENGTH (code);
2229 fmt = GET_RTX_FORMAT (code);
2231 for (i = 0; i < len; i++)
2233 if (fmt[i] == 'e')
2235 if (inequality_comparisons_p (XEXP (x, i)))
2236 return 1;
2238 else if (fmt[i] == 'E')
2240 int j;
2241 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2242 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2243 return 1;
2247 return 0;
2250 /* Replace any occurrence of FROM in X with TO. The function does
2251 not enter into CONST_DOUBLE for the replace.
2253 Note that copying is not done so X must not be shared unless all copies
2254 are to be modified. */
2257 replace_rtx (rtx x, rtx from, rtx to)
2259 int i, j;
2260 const char *fmt;
2262 /* The following prevents loops occurrence when we change MEM in
2263 CONST_DOUBLE onto the same CONST_DOUBLE. */
2264 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2265 return x;
2267 if (x == from)
2268 return to;
2270 /* Allow this function to make replacements in EXPR_LISTs. */
2271 if (x == 0)
2272 return 0;
2274 if (GET_CODE (x) == SUBREG)
2276 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2278 if (GET_CODE (new) == CONST_INT)
2280 x = simplify_subreg (GET_MODE (x), new,
2281 GET_MODE (SUBREG_REG (x)),
2282 SUBREG_BYTE (x));
2283 gcc_assert (x);
2285 else
2286 SUBREG_REG (x) = new;
2288 return x;
2290 else if (GET_CODE (x) == ZERO_EXTEND)
2292 rtx new = replace_rtx (XEXP (x, 0), from, to);
2294 if (GET_CODE (new) == CONST_INT)
2296 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2297 new, GET_MODE (XEXP (x, 0)));
2298 gcc_assert (x);
2300 else
2301 XEXP (x, 0) = new;
2303 return x;
2306 fmt = GET_RTX_FORMAT (GET_CODE (x));
2307 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2309 if (fmt[i] == 'e')
2310 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2311 else if (fmt[i] == 'E')
2312 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2313 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2316 return x;
2319 /* Throughout the rtx X, replace many registers according to REG_MAP.
2320 Return the replacement for X (which may be X with altered contents).
2321 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2322 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2324 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2325 should not be mapped to pseudos or vice versa since validate_change
2326 is not called.
2328 If REPLACE_DEST is 1, replacements are also done in destinations;
2329 otherwise, only sources are replaced. */
2332 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2334 enum rtx_code code;
2335 int i;
2336 const char *fmt;
2338 if (x == 0)
2339 return x;
2341 code = GET_CODE (x);
2342 switch (code)
2344 case SCRATCH:
2345 case PC:
2346 case CC0:
2347 case CONST_INT:
2348 case CONST_DOUBLE:
2349 case CONST_VECTOR:
2350 case CONST:
2351 case SYMBOL_REF:
2352 case LABEL_REF:
2353 return x;
2355 case REG:
2356 /* Verify that the register has an entry before trying to access it. */
2357 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2359 /* SUBREGs can't be shared. Always return a copy to ensure that if
2360 this replacement occurs more than once then each instance will
2361 get distinct rtx. */
2362 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2363 return copy_rtx (reg_map[REGNO (x)]);
2364 return reg_map[REGNO (x)];
2366 return x;
2368 case SUBREG:
2369 /* Prevent making nested SUBREGs. */
2370 if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2371 && reg_map[REGNO (SUBREG_REG (x))] != 0
2372 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2374 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2375 return simplify_gen_subreg (GET_MODE (x), map_val,
2376 GET_MODE (SUBREG_REG (x)),
2377 SUBREG_BYTE (x));
2379 break;
2381 case SET:
2382 if (replace_dest)
2383 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2385 else if (MEM_P (SET_DEST (x))
2386 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2387 /* Even if we are not to replace destinations, replace register if it
2388 is CONTAINED in destination (destination is memory or
2389 STRICT_LOW_PART). */
2390 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2391 reg_map, nregs, 0);
2392 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2393 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2394 break;
2396 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2397 return x;
2399 default:
2400 break;
2403 fmt = GET_RTX_FORMAT (code);
2404 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2406 if (fmt[i] == 'e')
2407 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2408 else if (fmt[i] == 'E')
2410 int j;
2411 for (j = 0; j < XVECLEN (x, i); j++)
2412 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2413 nregs, replace_dest);
2416 return x;
2419 /* Replace occurrences of the old label in *X with the new one.
2420 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2423 replace_label (rtx *x, void *data)
2425 rtx l = *x;
2426 rtx old_label = ((replace_label_data *) data)->r1;
2427 rtx new_label = ((replace_label_data *) data)->r2;
2428 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2430 if (l == NULL_RTX)
2431 return 0;
2433 if (GET_CODE (l) == SYMBOL_REF
2434 && CONSTANT_POOL_ADDRESS_P (l))
2436 rtx c = get_pool_constant (l);
2437 if (rtx_referenced_p (old_label, c))
2439 rtx new_c, new_l;
2440 replace_label_data *d = (replace_label_data *) data;
2442 /* Create a copy of constant C; replace the label inside
2443 but do not update LABEL_NUSES because uses in constant pool
2444 are not counted. */
2445 new_c = copy_rtx (c);
2446 d->update_label_nuses = false;
2447 for_each_rtx (&new_c, replace_label, data);
2448 d->update_label_nuses = update_label_nuses;
2450 /* Add the new constant NEW_C to constant pool and replace
2451 the old reference to constant by new reference. */
2452 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2453 *x = replace_rtx (l, l, new_l);
2455 return 0;
2458 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2459 field. This is not handled by for_each_rtx because it doesn't
2460 handle unprinted ('0') fields. */
2461 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2462 JUMP_LABEL (l) = new_label;
2464 if ((GET_CODE (l) == LABEL_REF
2465 || GET_CODE (l) == INSN_LIST)
2466 && XEXP (l, 0) == old_label)
2468 XEXP (l, 0) = new_label;
2469 if (update_label_nuses)
2471 ++LABEL_NUSES (new_label);
2472 --LABEL_NUSES (old_label);
2474 return 0;
2477 return 0;
2480 /* When *BODY is equal to X or X is directly referenced by *BODY
2481 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2482 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2484 static int
2485 rtx_referenced_p_1 (rtx *body, void *x)
2487 rtx y = (rtx) x;
2489 if (*body == NULL_RTX)
2490 return y == NULL_RTX;
2492 /* Return true if a label_ref *BODY refers to label Y. */
2493 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2494 return XEXP (*body, 0) == y;
2496 /* If *BODY is a reference to pool constant traverse the constant. */
2497 if (GET_CODE (*body) == SYMBOL_REF
2498 && CONSTANT_POOL_ADDRESS_P (*body))
2499 return rtx_referenced_p (y, get_pool_constant (*body));
2501 /* By default, compare the RTL expressions. */
2502 return rtx_equal_p (*body, y);
2505 /* Return true if X is referenced in BODY. */
2508 rtx_referenced_p (rtx x, rtx body)
2510 return for_each_rtx (&body, rtx_referenced_p_1, x);
2513 /* If INSN is a tablejump return true and store the label (before jump table) to
2514 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2516 bool
2517 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2519 rtx label, table;
2521 if (JUMP_P (insn)
2522 && (label = JUMP_LABEL (insn)) != NULL_RTX
2523 && (table = next_active_insn (label)) != NULL_RTX
2524 && JUMP_P (table)
2525 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2526 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2528 if (labelp)
2529 *labelp = label;
2530 if (tablep)
2531 *tablep = table;
2532 return true;
2534 return false;
2537 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2538 constant that is not in the constant pool and not in the condition
2539 of an IF_THEN_ELSE. */
2541 static int
2542 computed_jump_p_1 (rtx x)
2544 enum rtx_code code = GET_CODE (x);
2545 int i, j;
2546 const char *fmt;
2548 switch (code)
2550 case LABEL_REF:
2551 case PC:
2552 return 0;
2554 case CONST:
2555 case CONST_INT:
2556 case CONST_DOUBLE:
2557 case CONST_VECTOR:
2558 case SYMBOL_REF:
2559 case REG:
2560 return 1;
2562 case MEM:
2563 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2564 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2566 case IF_THEN_ELSE:
2567 return (computed_jump_p_1 (XEXP (x, 1))
2568 || computed_jump_p_1 (XEXP (x, 2)));
2570 default:
2571 break;
2574 fmt = GET_RTX_FORMAT (code);
2575 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2577 if (fmt[i] == 'e'
2578 && computed_jump_p_1 (XEXP (x, i)))
2579 return 1;
2581 else if (fmt[i] == 'E')
2582 for (j = 0; j < XVECLEN (x, i); j++)
2583 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2584 return 1;
2587 return 0;
2590 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2592 Tablejumps and casesi insns are not considered indirect jumps;
2593 we can recognize them by a (use (label_ref)). */
2596 computed_jump_p (rtx insn)
2598 int i;
2599 if (JUMP_P (insn))
2601 rtx pat = PATTERN (insn);
2603 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2604 return 0;
2605 else if (GET_CODE (pat) == PARALLEL)
2607 int len = XVECLEN (pat, 0);
2608 int has_use_labelref = 0;
2610 for (i = len - 1; i >= 0; i--)
2611 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2612 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2613 == LABEL_REF))
2614 has_use_labelref = 1;
2616 if (! has_use_labelref)
2617 for (i = len - 1; i >= 0; i--)
2618 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2619 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2620 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2621 return 1;
2623 else if (GET_CODE (pat) == SET
2624 && SET_DEST (pat) == pc_rtx
2625 && computed_jump_p_1 (SET_SRC (pat)))
2626 return 1;
2628 return 0;
2631 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2632 calls. Processes the subexpressions of EXP and passes them to F. */
2633 static int
2634 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2636 int result, i, j;
2637 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2638 rtx *x;
2640 for (; format[n] != '\0'; n++)
2642 switch (format[n])
2644 case 'e':
2645 /* Call F on X. */
2646 x = &XEXP (exp, n);
2647 result = (*f) (x, data);
2648 if (result == -1)
2649 /* Do not traverse sub-expressions. */
2650 continue;
2651 else if (result != 0)
2652 /* Stop the traversal. */
2653 return result;
2655 if (*x == NULL_RTX)
2656 /* There are no sub-expressions. */
2657 continue;
2659 i = non_rtx_starting_operands[GET_CODE (*x)];
2660 if (i >= 0)
2662 result = for_each_rtx_1 (*x, i, f, data);
2663 if (result != 0)
2664 return result;
2666 break;
2668 case 'V':
2669 case 'E':
2670 if (XVEC (exp, n) == 0)
2671 continue;
2672 for (j = 0; j < XVECLEN (exp, n); ++j)
2674 /* Call F on X. */
2675 x = &XVECEXP (exp, n, j);
2676 result = (*f) (x, data);
2677 if (result == -1)
2678 /* Do not traverse sub-expressions. */
2679 continue;
2680 else if (result != 0)
2681 /* Stop the traversal. */
2682 return result;
2684 if (*x == NULL_RTX)
2685 /* There are no sub-expressions. */
2686 continue;
2688 i = non_rtx_starting_operands[GET_CODE (*x)];
2689 if (i >= 0)
2691 result = for_each_rtx_1 (*x, i, f, data);
2692 if (result != 0)
2693 return result;
2696 break;
2698 default:
2699 /* Nothing to do. */
2700 break;
2704 return 0;
2707 /* Traverse X via depth-first search, calling F for each
2708 sub-expression (including X itself). F is also passed the DATA.
2709 If F returns -1, do not traverse sub-expressions, but continue
2710 traversing the rest of the tree. If F ever returns any other
2711 nonzero value, stop the traversal, and return the value returned
2712 by F. Otherwise, return 0. This function does not traverse inside
2713 tree structure that contains RTX_EXPRs, or into sub-expressions
2714 whose format code is `0' since it is not known whether or not those
2715 codes are actually RTL.
2717 This routine is very general, and could (should?) be used to
2718 implement many of the other routines in this file. */
2721 for_each_rtx (rtx *x, rtx_function f, void *data)
2723 int result;
2724 int i;
2726 /* Call F on X. */
2727 result = (*f) (x, data);
2728 if (result == -1)
2729 /* Do not traverse sub-expressions. */
2730 return 0;
2731 else if (result != 0)
2732 /* Stop the traversal. */
2733 return result;
2735 if (*x == NULL_RTX)
2736 /* There are no sub-expressions. */
2737 return 0;
2739 i = non_rtx_starting_operands[GET_CODE (*x)];
2740 if (i < 0)
2741 return 0;
2743 return for_each_rtx_1 (*x, i, f, data);
2747 /* Searches X for any reference to REGNO, returning the rtx of the
2748 reference found if any. Otherwise, returns NULL_RTX. */
2751 regno_use_in (unsigned int regno, rtx x)
2753 const char *fmt;
2754 int i, j;
2755 rtx tem;
2757 if (REG_P (x) && REGNO (x) == regno)
2758 return x;
2760 fmt = GET_RTX_FORMAT (GET_CODE (x));
2761 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2763 if (fmt[i] == 'e')
2765 if ((tem = regno_use_in (regno, XEXP (x, i))))
2766 return tem;
2768 else if (fmt[i] == 'E')
2769 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2770 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2771 return tem;
2774 return NULL_RTX;
2777 /* Return a value indicating whether OP, an operand of a commutative
2778 operation, is preferred as the first or second operand. The higher
2779 the value, the stronger the preference for being the first operand.
2780 We use negative values to indicate a preference for the first operand
2781 and positive values for the second operand. */
2784 commutative_operand_precedence (rtx op)
2786 enum rtx_code code = GET_CODE (op);
2788 /* Constants always come the second operand. Prefer "nice" constants. */
2789 if (code == CONST_INT)
2790 return -7;
2791 if (code == CONST_DOUBLE)
2792 return -6;
2793 op = avoid_constant_pool_reference (op);
2794 code = GET_CODE (op);
2796 switch (GET_RTX_CLASS (code))
2798 case RTX_CONST_OBJ:
2799 if (code == CONST_INT)
2800 return -5;
2801 if (code == CONST_DOUBLE)
2802 return -4;
2803 return -3;
2805 case RTX_EXTRA:
2806 /* SUBREGs of objects should come second. */
2807 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2808 return -2;
2810 if (!CONSTANT_P (op))
2811 return 0;
2812 else
2813 /* As for RTX_CONST_OBJ. */
2814 return -3;
2816 case RTX_OBJ:
2817 /* Complex expressions should be the first, so decrease priority
2818 of objects. */
2819 return -1;
2821 case RTX_COMM_ARITH:
2822 /* Prefer operands that are themselves commutative to be first.
2823 This helps to make things linear. In particular,
2824 (and (and (reg) (reg)) (not (reg))) is canonical. */
2825 return 4;
2827 case RTX_BIN_ARITH:
2828 /* If only one operand is a binary expression, it will be the first
2829 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2830 is canonical, although it will usually be further simplified. */
2831 return 2;
2833 case RTX_UNARY:
2834 /* Then prefer NEG and NOT. */
2835 if (code == NEG || code == NOT)
2836 return 1;
2838 default:
2839 return 0;
2843 /* Return 1 iff it is necessary to swap operands of commutative operation
2844 in order to canonicalize expression. */
2847 swap_commutative_operands_p (rtx x, rtx y)
2849 return (commutative_operand_precedence (x)
2850 < commutative_operand_precedence (y));
2853 /* Return 1 if X is an autoincrement side effect and the register is
2854 not the stack pointer. */
2856 auto_inc_p (rtx x)
2858 switch (GET_CODE (x))
2860 case PRE_INC:
2861 case POST_INC:
2862 case PRE_DEC:
2863 case POST_DEC:
2864 case PRE_MODIFY:
2865 case POST_MODIFY:
2866 /* There are no REG_INC notes for SP. */
2867 if (XEXP (x, 0) != stack_pointer_rtx)
2868 return 1;
2869 default:
2870 break;
2872 return 0;
2875 /* Return 1 if the sequence of instructions beginning with FROM and up
2876 to and including TO is safe to move. If NEW_TO is non-NULL, and
2877 the sequence is not already safe to move, but can be easily
2878 extended to a sequence which is safe, then NEW_TO will point to the
2879 end of the extended sequence.
2881 For now, this function only checks that the region contains whole
2882 exception regions, but it could be extended to check additional
2883 conditions as well. */
2886 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
2888 int eh_region_count = 0;
2889 int past_to_p = 0;
2890 rtx r = from;
2892 /* By default, assume the end of the region will be what was
2893 suggested. */
2894 if (new_to)
2895 *new_to = to;
2897 while (r)
2899 if (NOTE_P (r))
2901 switch (NOTE_LINE_NUMBER (r))
2903 case NOTE_INSN_EH_REGION_BEG:
2904 ++eh_region_count;
2905 break;
2907 case NOTE_INSN_EH_REGION_END:
2908 if (eh_region_count == 0)
2909 /* This sequence of instructions contains the end of
2910 an exception region, but not he beginning. Moving
2911 it will cause chaos. */
2912 return 0;
2914 --eh_region_count;
2915 break;
2917 default:
2918 break;
2921 else if (past_to_p)
2922 /* If we've passed TO, and we see a non-note instruction, we
2923 can't extend the sequence to a movable sequence. */
2924 return 0;
2926 if (r == to)
2928 if (!new_to)
2929 /* It's OK to move the sequence if there were matched sets of
2930 exception region notes. */
2931 return eh_region_count == 0;
2933 past_to_p = 1;
2936 /* It's OK to move the sequence if there were matched sets of
2937 exception region notes. */
2938 if (past_to_p && eh_region_count == 0)
2940 *new_to = r;
2941 return 1;
2944 /* Go to the next instruction. */
2945 r = NEXT_INSN (r);
2948 return 0;
2951 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2953 loc_mentioned_in_p (rtx *loc, rtx in)
2955 enum rtx_code code = GET_CODE (in);
2956 const char *fmt = GET_RTX_FORMAT (code);
2957 int i, j;
2959 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2961 if (loc == &in->u.fld[i].rt_rtx)
2962 return 1;
2963 if (fmt[i] == 'e')
2965 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2966 return 1;
2968 else if (fmt[i] == 'E')
2969 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2970 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2971 return 1;
2973 return 0;
2976 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2977 and SUBREG_BYTE, return the bit offset where the subreg begins
2978 (counting from the least significant bit of the operand). */
2980 unsigned int
2981 subreg_lsb_1 (enum machine_mode outer_mode,
2982 enum machine_mode inner_mode,
2983 unsigned int subreg_byte)
2985 unsigned int bitpos;
2986 unsigned int byte;
2987 unsigned int word;
2989 /* A paradoxical subreg begins at bit position 0. */
2990 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
2991 return 0;
2993 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2994 /* If the subreg crosses a word boundary ensure that
2995 it also begins and ends on a word boundary. */
2996 gcc_assert (!((subreg_byte % UNITS_PER_WORD
2997 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
2998 && (subreg_byte % UNITS_PER_WORD
2999 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3001 if (WORDS_BIG_ENDIAN)
3002 word = (GET_MODE_SIZE (inner_mode)
3003 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3004 else
3005 word = subreg_byte / UNITS_PER_WORD;
3006 bitpos = word * BITS_PER_WORD;
3008 if (BYTES_BIG_ENDIAN)
3009 byte = (GET_MODE_SIZE (inner_mode)
3010 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3011 else
3012 byte = subreg_byte % UNITS_PER_WORD;
3013 bitpos += byte * BITS_PER_UNIT;
3015 return bitpos;
3018 /* Given a subreg X, return the bit offset where the subreg begins
3019 (counting from the least significant bit of the reg). */
3021 unsigned int
3022 subreg_lsb (rtx x)
3024 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3025 SUBREG_BYTE (x));
3028 /* This function returns the regno offset of a subreg expression.
3029 xregno - A regno of an inner hard subreg_reg (or what will become one).
3030 xmode - The mode of xregno.
3031 offset - The byte offset.
3032 ymode - The mode of a top level SUBREG (or what may become one).
3033 RETURN - The regno offset which would be used. */
3034 unsigned int
3035 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3036 unsigned int offset, enum machine_mode ymode)
3038 int nregs_xmode, nregs_ymode;
3039 int mode_multiple, nregs_multiple;
3040 int y_offset;
3042 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3044 nregs_xmode = hard_regno_nregs[xregno][xmode];
3045 nregs_ymode = hard_regno_nregs[xregno][ymode];
3047 /* If this is a big endian paradoxical subreg, which uses more actual
3048 hard registers than the original register, we must return a negative
3049 offset so that we find the proper highpart of the register. */
3050 if (offset == 0
3051 && nregs_ymode > nregs_xmode
3052 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3053 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3054 return nregs_xmode - nregs_ymode;
3056 if (offset == 0 || nregs_xmode == nregs_ymode)
3057 return 0;
3059 /* size of ymode must not be greater than the size of xmode. */
3060 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3061 gcc_assert (mode_multiple != 0);
3063 y_offset = offset / GET_MODE_SIZE (ymode);
3064 nregs_multiple = nregs_xmode / nregs_ymode;
3065 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3068 /* This function returns true when the offset is representable via
3069 subreg_offset in the given regno.
3070 xregno - A regno of an inner hard subreg_reg (or what will become one).
3071 xmode - The mode of xregno.
3072 offset - The byte offset.
3073 ymode - The mode of a top level SUBREG (or what may become one).
3074 RETURN - The regno offset which would be used. */
3075 bool
3076 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3077 unsigned int offset, enum machine_mode ymode)
3079 int nregs_xmode, nregs_ymode;
3080 int mode_multiple, nregs_multiple;
3081 int y_offset;
3083 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3085 nregs_xmode = hard_regno_nregs[xregno][xmode];
3086 nregs_ymode = hard_regno_nregs[xregno][ymode];
3088 /* Paradoxical subregs are always valid. */
3089 if (offset == 0
3090 && nregs_ymode > nregs_xmode
3091 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3092 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3093 return true;
3095 /* Lowpart subregs are always valid. */
3096 if (offset == subreg_lowpart_offset (ymode, xmode))
3097 return true;
3099 /* This should always pass, otherwise we don't know how to verify the
3100 constraint. These conditions may be relaxed but subreg_offset would
3101 need to be redesigned. */
3102 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3103 gcc_assert ((GET_MODE_SIZE (ymode) % nregs_ymode) == 0);
3104 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3106 /* The XMODE value can be seen as a vector of NREGS_XMODE
3107 values. The subreg must represent a lowpart of given field.
3108 Compute what field it is. */
3109 offset -= subreg_lowpart_offset (ymode,
3110 mode_for_size (GET_MODE_BITSIZE (xmode)
3111 / nregs_xmode,
3112 MODE_INT, 0));
3114 /* size of ymode must not be greater than the size of xmode. */
3115 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3116 gcc_assert (mode_multiple != 0);
3118 y_offset = offset / GET_MODE_SIZE (ymode);
3119 nregs_multiple = nregs_xmode / nregs_ymode;
3121 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3122 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3124 return (!(y_offset % (mode_multiple / nregs_multiple)));
3127 /* Return the final regno that a subreg expression refers to. */
3128 unsigned int
3129 subreg_regno (rtx x)
3131 unsigned int ret;
3132 rtx subreg = SUBREG_REG (x);
3133 int regno = REGNO (subreg);
3135 ret = regno + subreg_regno_offset (regno,
3136 GET_MODE (subreg),
3137 SUBREG_BYTE (x),
3138 GET_MODE (x));
3139 return ret;
3142 struct parms_set_data
3144 int nregs;
3145 HARD_REG_SET regs;
3148 /* Helper function for noticing stores to parameter registers. */
3149 static void
3150 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3152 struct parms_set_data *d = data;
3153 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3154 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3156 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3157 d->nregs--;
3161 /* Look backward for first parameter to be loaded.
3162 Note that loads of all parameters will not necessarily be
3163 found if CSE has eliminated some of them (e.g., an argument
3164 to the outer function is passed down as a parameter).
3165 Do not skip BOUNDARY. */
3167 find_first_parameter_load (rtx call_insn, rtx boundary)
3169 struct parms_set_data parm;
3170 rtx p, before, first_set;
3172 /* Since different machines initialize their parameter registers
3173 in different orders, assume nothing. Collect the set of all
3174 parameter registers. */
3175 CLEAR_HARD_REG_SET (parm.regs);
3176 parm.nregs = 0;
3177 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3178 if (GET_CODE (XEXP (p, 0)) == USE
3179 && REG_P (XEXP (XEXP (p, 0), 0)))
3181 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3183 /* We only care about registers which can hold function
3184 arguments. */
3185 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3186 continue;
3188 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3189 parm.nregs++;
3191 before = call_insn;
3192 first_set = call_insn;
3194 /* Search backward for the first set of a register in this set. */
3195 while (parm.nregs && before != boundary)
3197 before = PREV_INSN (before);
3199 /* It is possible that some loads got CSEed from one call to
3200 another. Stop in that case. */
3201 if (CALL_P (before))
3202 break;
3204 /* Our caller needs either ensure that we will find all sets
3205 (in case code has not been optimized yet), or take care
3206 for possible labels in a way by setting boundary to preceding
3207 CODE_LABEL. */
3208 if (LABEL_P (before))
3210 gcc_assert (before == boundary);
3211 break;
3214 if (INSN_P (before))
3216 int nregs_old = parm.nregs;
3217 note_stores (PATTERN (before), parms_set, &parm);
3218 /* If we found something that did not set a parameter reg,
3219 we're done. Do not keep going, as that might result
3220 in hoisting an insn before the setting of a pseudo
3221 that is used by the hoisted insn. */
3222 if (nregs_old != parm.nregs)
3223 first_set = before;
3224 else
3225 break;
3228 return first_set;
3231 /* Return true if we should avoid inserting code between INSN and preceding
3232 call instruction. */
3234 bool
3235 keep_with_call_p (rtx insn)
3237 rtx set;
3239 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3241 if (REG_P (SET_DEST (set))
3242 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3243 && fixed_regs[REGNO (SET_DEST (set))]
3244 && general_operand (SET_SRC (set), VOIDmode))
3245 return true;
3246 if (REG_P (SET_SRC (set))
3247 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3248 && REG_P (SET_DEST (set))
3249 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3250 return true;
3251 /* There may be a stack pop just after the call and before the store
3252 of the return register. Search for the actual store when deciding
3253 if we can break or not. */
3254 if (SET_DEST (set) == stack_pointer_rtx)
3256 rtx i2 = next_nonnote_insn (insn);
3257 if (i2 && keep_with_call_p (i2))
3258 return true;
3261 return false;
3264 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3265 to non-complex jumps. That is, direct unconditional, conditional,
3266 and tablejumps, but not computed jumps or returns. It also does
3267 not apply to the fallthru case of a conditional jump. */
3269 bool
3270 label_is_jump_target_p (rtx label, rtx jump_insn)
3272 rtx tmp = JUMP_LABEL (jump_insn);
3274 if (label == tmp)
3275 return true;
3277 if (tablejump_p (jump_insn, NULL, &tmp))
3279 rtvec vec = XVEC (PATTERN (tmp),
3280 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3281 int i, veclen = GET_NUM_ELEM (vec);
3283 for (i = 0; i < veclen; ++i)
3284 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3285 return true;
3288 return false;
3292 /* Return an estimate of the cost of computing rtx X.
3293 One use is in cse, to decide which expression to keep in the hash table.
3294 Another is in rtl generation, to pick the cheapest way to multiply.
3295 Other uses like the latter are expected in the future. */
3298 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3300 int i, j;
3301 enum rtx_code code;
3302 const char *fmt;
3303 int total;
3305 if (x == 0)
3306 return 0;
3308 /* Compute the default costs of certain things.
3309 Note that targetm.rtx_costs can override the defaults. */
3311 code = GET_CODE (x);
3312 switch (code)
3314 case MULT:
3315 total = COSTS_N_INSNS (5);
3316 break;
3317 case DIV:
3318 case UDIV:
3319 case MOD:
3320 case UMOD:
3321 total = COSTS_N_INSNS (7);
3322 break;
3323 case USE:
3324 /* Used in loop.c and combine.c as a marker. */
3325 total = 0;
3326 break;
3327 default:
3328 total = COSTS_N_INSNS (1);
3331 switch (code)
3333 case REG:
3334 return 0;
3336 case SUBREG:
3337 total = 0;
3338 /* If we can't tie these modes, make this expensive. The larger
3339 the mode, the more expensive it is. */
3340 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3341 return COSTS_N_INSNS (2
3342 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3343 break;
3345 default:
3346 if (targetm.rtx_costs (x, code, outer_code, &total))
3347 return total;
3348 break;
3351 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3352 which is already in total. */
3354 fmt = GET_RTX_FORMAT (code);
3355 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3356 if (fmt[i] == 'e')
3357 total += rtx_cost (XEXP (x, i), code);
3358 else if (fmt[i] == 'E')
3359 for (j = 0; j < XVECLEN (x, i); j++)
3360 total += rtx_cost (XVECEXP (x, i, j), code);
3362 return total;
3365 /* Return cost of address expression X.
3366 Expect that X is properly formed address reference. */
3369 address_cost (rtx x, enum machine_mode mode)
3371 /* We may be asked for cost of various unusual addresses, such as operands
3372 of push instruction. It is not worthwhile to complicate writing
3373 of the target hook by such cases. */
3375 if (!memory_address_p (mode, x))
3376 return 1000;
3378 return targetm.address_cost (x);
3381 /* If the target doesn't override, compute the cost as with arithmetic. */
3384 default_address_cost (rtx x)
3386 return rtx_cost (x, MEM);
3390 unsigned HOST_WIDE_INT
3391 nonzero_bits (rtx x, enum machine_mode mode)
3393 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3396 unsigned int
3397 num_sign_bit_copies (rtx x, enum machine_mode mode)
3399 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3402 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3403 It avoids exponential behavior in nonzero_bits1 when X has
3404 identical subexpressions on the first or the second level. */
3406 static unsigned HOST_WIDE_INT
3407 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3408 enum machine_mode known_mode,
3409 unsigned HOST_WIDE_INT known_ret)
3411 if (x == known_x && mode == known_mode)
3412 return known_ret;
3414 /* Try to find identical subexpressions. If found call
3415 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3416 precomputed value for the subexpression as KNOWN_RET. */
3418 if (ARITHMETIC_P (x))
3420 rtx x0 = XEXP (x, 0);
3421 rtx x1 = XEXP (x, 1);
3423 /* Check the first level. */
3424 if (x0 == x1)
3425 return nonzero_bits1 (x, mode, x0, mode,
3426 cached_nonzero_bits (x0, mode, known_x,
3427 known_mode, known_ret));
3429 /* Check the second level. */
3430 if (ARITHMETIC_P (x0)
3431 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3432 return nonzero_bits1 (x, mode, x1, mode,
3433 cached_nonzero_bits (x1, mode, known_x,
3434 known_mode, known_ret));
3436 if (ARITHMETIC_P (x1)
3437 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3438 return nonzero_bits1 (x, mode, x0, mode,
3439 cached_nonzero_bits (x0, mode, known_x,
3440 known_mode, known_ret));
3443 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3446 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3447 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3448 is less useful. We can't allow both, because that results in exponential
3449 run time recursion. There is a nullstone testcase that triggered
3450 this. This macro avoids accidental uses of num_sign_bit_copies. */
3451 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3453 /* Given an expression, X, compute which bits in X can be nonzero.
3454 We don't care about bits outside of those defined in MODE.
3456 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3457 an arithmetic operation, we can do better. */
3459 static unsigned HOST_WIDE_INT
3460 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3461 enum machine_mode known_mode,
3462 unsigned HOST_WIDE_INT known_ret)
3464 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3465 unsigned HOST_WIDE_INT inner_nz;
3466 enum rtx_code code;
3467 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3469 /* For floating-point values, assume all bits are needed. */
3470 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3471 return nonzero;
3473 /* If X is wider than MODE, use its mode instead. */
3474 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3476 mode = GET_MODE (x);
3477 nonzero = GET_MODE_MASK (mode);
3478 mode_width = GET_MODE_BITSIZE (mode);
3481 if (mode_width > HOST_BITS_PER_WIDE_INT)
3482 /* Our only callers in this case look for single bit values. So
3483 just return the mode mask. Those tests will then be false. */
3484 return nonzero;
3486 #ifndef WORD_REGISTER_OPERATIONS
3487 /* If MODE is wider than X, but both are a single word for both the host
3488 and target machines, we can compute this from which bits of the
3489 object might be nonzero in its own mode, taking into account the fact
3490 that on many CISC machines, accessing an object in a wider mode
3491 causes the high-order bits to become undefined. So they are
3492 not known to be zero. */
3494 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3495 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3496 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3497 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3499 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3500 known_x, known_mode, known_ret);
3501 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3502 return nonzero;
3504 #endif
3506 code = GET_CODE (x);
3507 switch (code)
3509 case REG:
3510 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3511 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3512 all the bits above ptr_mode are known to be zero. */
3513 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3514 && REG_POINTER (x))
3515 nonzero &= GET_MODE_MASK (ptr_mode);
3516 #endif
3518 /* Include declared information about alignment of pointers. */
3519 /* ??? We don't properly preserve REG_POINTER changes across
3520 pointer-to-integer casts, so we can't trust it except for
3521 things that we know must be pointers. See execute/960116-1.c. */
3522 if ((x == stack_pointer_rtx
3523 || x == frame_pointer_rtx
3524 || x == arg_pointer_rtx)
3525 && REGNO_POINTER_ALIGN (REGNO (x)))
3527 unsigned HOST_WIDE_INT alignment
3528 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3530 #ifdef PUSH_ROUNDING
3531 /* If PUSH_ROUNDING is defined, it is possible for the
3532 stack to be momentarily aligned only to that amount,
3533 so we pick the least alignment. */
3534 if (x == stack_pointer_rtx && PUSH_ARGS)
3535 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3536 alignment);
3537 #endif
3539 nonzero &= ~(alignment - 1);
3543 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3544 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3545 known_mode, known_ret,
3546 &nonzero_for_hook);
3548 if (new)
3549 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3550 known_mode, known_ret);
3552 return nonzero_for_hook;
3555 case CONST_INT:
3556 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3557 /* If X is negative in MODE, sign-extend the value. */
3558 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3559 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3560 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3561 #endif
3563 return INTVAL (x);
3565 case MEM:
3566 #ifdef LOAD_EXTEND_OP
3567 /* In many, if not most, RISC machines, reading a byte from memory
3568 zeros the rest of the register. Noticing that fact saves a lot
3569 of extra zero-extends. */
3570 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3571 nonzero &= GET_MODE_MASK (GET_MODE (x));
3572 #endif
3573 break;
3575 case EQ: case NE:
3576 case UNEQ: case LTGT:
3577 case GT: case GTU: case UNGT:
3578 case LT: case LTU: case UNLT:
3579 case GE: case GEU: case UNGE:
3580 case LE: case LEU: case UNLE:
3581 case UNORDERED: case ORDERED:
3583 /* If this produces an integer result, we know which bits are set.
3584 Code here used to clear bits outside the mode of X, but that is
3585 now done above. */
3587 if (GET_MODE_CLASS (mode) == MODE_INT
3588 && mode_width <= HOST_BITS_PER_WIDE_INT)
3589 nonzero = STORE_FLAG_VALUE;
3590 break;
3592 case NEG:
3593 #if 0
3594 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3595 and num_sign_bit_copies. */
3596 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3597 == GET_MODE_BITSIZE (GET_MODE (x)))
3598 nonzero = 1;
3599 #endif
3601 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3602 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3603 break;
3605 case ABS:
3606 #if 0
3607 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3608 and num_sign_bit_copies. */
3609 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3610 == GET_MODE_BITSIZE (GET_MODE (x)))
3611 nonzero = 1;
3612 #endif
3613 break;
3615 case TRUNCATE:
3616 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3617 known_x, known_mode, known_ret)
3618 & GET_MODE_MASK (mode));
3619 break;
3621 case ZERO_EXTEND:
3622 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3623 known_x, known_mode, known_ret);
3624 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3625 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3626 break;
3628 case SIGN_EXTEND:
3629 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3630 Otherwise, show all the bits in the outer mode but not the inner
3631 may be nonzero. */
3632 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3633 known_x, known_mode, known_ret);
3634 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3636 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3637 if (inner_nz
3638 & (((HOST_WIDE_INT) 1
3639 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3640 inner_nz |= (GET_MODE_MASK (mode)
3641 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3644 nonzero &= inner_nz;
3645 break;
3647 case AND:
3648 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3649 known_x, known_mode, known_ret)
3650 & cached_nonzero_bits (XEXP (x, 1), mode,
3651 known_x, known_mode, known_ret);
3652 break;
3654 case XOR: case IOR:
3655 case UMIN: case UMAX: case SMIN: case SMAX:
3657 unsigned HOST_WIDE_INT nonzero0 =
3658 cached_nonzero_bits (XEXP (x, 0), mode,
3659 known_x, known_mode, known_ret);
3661 /* Don't call nonzero_bits for the second time if it cannot change
3662 anything. */
3663 if ((nonzero & nonzero0) != nonzero)
3664 nonzero &= nonzero0
3665 | cached_nonzero_bits (XEXP (x, 1), mode,
3666 known_x, known_mode, known_ret);
3668 break;
3670 case PLUS: case MINUS:
3671 case MULT:
3672 case DIV: case UDIV:
3673 case MOD: case UMOD:
3674 /* We can apply the rules of arithmetic to compute the number of
3675 high- and low-order zero bits of these operations. We start by
3676 computing the width (position of the highest-order nonzero bit)
3677 and the number of low-order zero bits for each value. */
3679 unsigned HOST_WIDE_INT nz0 =
3680 cached_nonzero_bits (XEXP (x, 0), mode,
3681 known_x, known_mode, known_ret);
3682 unsigned HOST_WIDE_INT nz1 =
3683 cached_nonzero_bits (XEXP (x, 1), mode,
3684 known_x, known_mode, known_ret);
3685 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3686 int width0 = floor_log2 (nz0) + 1;
3687 int width1 = floor_log2 (nz1) + 1;
3688 int low0 = floor_log2 (nz0 & -nz0);
3689 int low1 = floor_log2 (nz1 & -nz1);
3690 HOST_WIDE_INT op0_maybe_minusp
3691 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3692 HOST_WIDE_INT op1_maybe_minusp
3693 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3694 unsigned int result_width = mode_width;
3695 int result_low = 0;
3697 switch (code)
3699 case PLUS:
3700 result_width = MAX (width0, width1) + 1;
3701 result_low = MIN (low0, low1);
3702 break;
3703 case MINUS:
3704 result_low = MIN (low0, low1);
3705 break;
3706 case MULT:
3707 result_width = width0 + width1;
3708 result_low = low0 + low1;
3709 break;
3710 case DIV:
3711 if (width1 == 0)
3712 break;
3713 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3714 result_width = width0;
3715 break;
3716 case UDIV:
3717 if (width1 == 0)
3718 break;
3719 result_width = width0;
3720 break;
3721 case MOD:
3722 if (width1 == 0)
3723 break;
3724 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3725 result_width = MIN (width0, width1);
3726 result_low = MIN (low0, low1);
3727 break;
3728 case UMOD:
3729 if (width1 == 0)
3730 break;
3731 result_width = MIN (width0, width1);
3732 result_low = MIN (low0, low1);
3733 break;
3734 default:
3735 gcc_unreachable ();
3738 if (result_width < mode_width)
3739 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3741 if (result_low > 0)
3742 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3744 #ifdef POINTERS_EXTEND_UNSIGNED
3745 /* If pointers extend unsigned and this is an addition or subtraction
3746 to a pointer in Pmode, all the bits above ptr_mode are known to be
3747 zero. */
3748 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3749 && (code == PLUS || code == MINUS)
3750 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3751 nonzero &= GET_MODE_MASK (ptr_mode);
3752 #endif
3754 break;
3756 case ZERO_EXTRACT:
3757 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3758 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3759 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3760 break;
3762 case SUBREG:
3763 /* If this is a SUBREG formed for a promoted variable that has
3764 been zero-extended, we know that at least the high-order bits
3765 are zero, though others might be too. */
3767 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3768 nonzero = GET_MODE_MASK (GET_MODE (x))
3769 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3770 known_x, known_mode, known_ret);
3772 /* If the inner mode is a single word for both the host and target
3773 machines, we can compute this from which bits of the inner
3774 object might be nonzero. */
3775 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3776 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3777 <= HOST_BITS_PER_WIDE_INT))
3779 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3780 known_x, known_mode, known_ret);
3782 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3783 /* If this is a typical RISC machine, we only have to worry
3784 about the way loads are extended. */
3785 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3786 ? (((nonzero
3787 & (((unsigned HOST_WIDE_INT) 1
3788 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3789 != 0))
3790 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3791 || !MEM_P (SUBREG_REG (x)))
3792 #endif
3794 /* On many CISC machines, accessing an object in a wider mode
3795 causes the high-order bits to become undefined. So they are
3796 not known to be zero. */
3797 if (GET_MODE_SIZE (GET_MODE (x))
3798 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3799 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3800 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3803 break;
3805 case ASHIFTRT:
3806 case LSHIFTRT:
3807 case ASHIFT:
3808 case ROTATE:
3809 /* The nonzero bits are in two classes: any bits within MODE
3810 that aren't in GET_MODE (x) are always significant. The rest of the
3811 nonzero bits are those that are significant in the operand of
3812 the shift when shifted the appropriate number of bits. This
3813 shows that high-order bits are cleared by the right shift and
3814 low-order bits by left shifts. */
3815 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3816 && INTVAL (XEXP (x, 1)) >= 0
3817 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3819 enum machine_mode inner_mode = GET_MODE (x);
3820 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3821 int count = INTVAL (XEXP (x, 1));
3822 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3823 unsigned HOST_WIDE_INT op_nonzero =
3824 cached_nonzero_bits (XEXP (x, 0), mode,
3825 known_x, known_mode, known_ret);
3826 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3827 unsigned HOST_WIDE_INT outer = 0;
3829 if (mode_width > width)
3830 outer = (op_nonzero & nonzero & ~mode_mask);
3832 if (code == LSHIFTRT)
3833 inner >>= count;
3834 else if (code == ASHIFTRT)
3836 inner >>= count;
3838 /* If the sign bit may have been nonzero before the shift, we
3839 need to mark all the places it could have been copied to
3840 by the shift as possibly nonzero. */
3841 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3842 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3844 else if (code == ASHIFT)
3845 inner <<= count;
3846 else
3847 inner = ((inner << (count % width)
3848 | (inner >> (width - (count % width)))) & mode_mask);
3850 nonzero &= (outer | inner);
3852 break;
3854 case FFS:
3855 case POPCOUNT:
3856 /* This is at most the number of bits in the mode. */
3857 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3858 break;
3860 case CLZ:
3861 /* If CLZ has a known value at zero, then the nonzero bits are
3862 that value, plus the number of bits in the mode minus one. */
3863 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3864 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3865 else
3866 nonzero = -1;
3867 break;
3869 case CTZ:
3870 /* If CTZ has a known value at zero, then the nonzero bits are
3871 that value, plus the number of bits in the mode minus one. */
3872 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3873 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3874 else
3875 nonzero = -1;
3876 break;
3878 case PARITY:
3879 nonzero = 1;
3880 break;
3882 case IF_THEN_ELSE:
3884 unsigned HOST_WIDE_INT nonzero_true =
3885 cached_nonzero_bits (XEXP (x, 1), mode,
3886 known_x, known_mode, known_ret);
3888 /* Don't call nonzero_bits for the second time if it cannot change
3889 anything. */
3890 if ((nonzero & nonzero_true) != nonzero)
3891 nonzero &= nonzero_true
3892 | cached_nonzero_bits (XEXP (x, 2), mode,
3893 known_x, known_mode, known_ret);
3895 break;
3897 default:
3898 break;
3901 return nonzero;
3904 /* See the macro definition above. */
3905 #undef cached_num_sign_bit_copies
3908 /* The function cached_num_sign_bit_copies is a wrapper around
3909 num_sign_bit_copies1. It avoids exponential behavior in
3910 num_sign_bit_copies1 when X has identical subexpressions on the
3911 first or the second level. */
3913 static unsigned int
3914 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
3915 enum machine_mode known_mode,
3916 unsigned int known_ret)
3918 if (x == known_x && mode == known_mode)
3919 return known_ret;
3921 /* Try to find identical subexpressions. If found call
3922 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3923 the precomputed value for the subexpression as KNOWN_RET. */
3925 if (ARITHMETIC_P (x))
3927 rtx x0 = XEXP (x, 0);
3928 rtx x1 = XEXP (x, 1);
3930 /* Check the first level. */
3931 if (x0 == x1)
3932 return
3933 num_sign_bit_copies1 (x, mode, x0, mode,
3934 cached_num_sign_bit_copies (x0, mode, known_x,
3935 known_mode,
3936 known_ret));
3938 /* Check the second level. */
3939 if (ARITHMETIC_P (x0)
3940 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3941 return
3942 num_sign_bit_copies1 (x, mode, x1, mode,
3943 cached_num_sign_bit_copies (x1, mode, known_x,
3944 known_mode,
3945 known_ret));
3947 if (ARITHMETIC_P (x1)
3948 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3949 return
3950 num_sign_bit_copies1 (x, mode, x0, mode,
3951 cached_num_sign_bit_copies (x0, mode, known_x,
3952 known_mode,
3953 known_ret));
3956 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
3959 /* Return the number of bits at the high-order end of X that are known to
3960 be equal to the sign bit. X will be used in mode MODE; if MODE is
3961 VOIDmode, X will be used in its own mode. The returned value will always
3962 be between 1 and the number of bits in MODE. */
3964 static unsigned int
3965 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
3966 enum machine_mode known_mode,
3967 unsigned int known_ret)
3969 enum rtx_code code = GET_CODE (x);
3970 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
3971 int num0, num1, result;
3972 unsigned HOST_WIDE_INT nonzero;
3974 /* If we weren't given a mode, use the mode of X. If the mode is still
3975 VOIDmode, we don't know anything. Likewise if one of the modes is
3976 floating-point. */
3978 if (mode == VOIDmode)
3979 mode = GET_MODE (x);
3981 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
3982 return 1;
3984 /* For a smaller object, just ignore the high bits. */
3985 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
3987 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
3988 known_x, known_mode, known_ret);
3989 return MAX (1,
3990 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
3993 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
3995 #ifndef WORD_REGISTER_OPERATIONS
3996 /* If this machine does not do all register operations on the entire
3997 register and MODE is wider than the mode of X, we can say nothing
3998 at all about the high-order bits. */
3999 return 1;
4000 #else
4001 /* Likewise on machines that do, if the mode of the object is smaller
4002 than a word and loads of that size don't sign extend, we can say
4003 nothing about the high order bits. */
4004 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4005 #ifdef LOAD_EXTEND_OP
4006 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4007 #endif
4009 return 1;
4010 #endif
4013 switch (code)
4015 case REG:
4017 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4018 /* If pointers extend signed and this is a pointer in Pmode, say that
4019 all the bits above ptr_mode are known to be sign bit copies. */
4020 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4021 && REG_POINTER (x))
4022 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4023 #endif
4026 unsigned int copies_for_hook = 1, copies = 1;
4027 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4028 known_mode, known_ret,
4029 &copies_for_hook);
4031 if (new)
4032 copies = cached_num_sign_bit_copies (new, mode, known_x,
4033 known_mode, known_ret);
4035 if (copies > 1 || copies_for_hook > 1)
4036 return MAX (copies, copies_for_hook);
4038 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4040 break;
4042 case MEM:
4043 #ifdef LOAD_EXTEND_OP
4044 /* Some RISC machines sign-extend all loads of smaller than a word. */
4045 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4046 return MAX (1, ((int) bitwidth
4047 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4048 #endif
4049 break;
4051 case CONST_INT:
4052 /* If the constant is negative, take its 1's complement and remask.
4053 Then see how many zero bits we have. */
4054 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4055 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4056 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4057 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4059 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4061 case SUBREG:
4062 /* If this is a SUBREG for a promoted object that is sign-extended
4063 and we are looking at it in a wider mode, we know that at least the
4064 high-order bits are known to be sign bit copies. */
4066 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4068 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4069 known_x, known_mode, known_ret);
4070 return MAX ((int) bitwidth
4071 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4072 num0);
4075 /* For a smaller object, just ignore the high bits. */
4076 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4078 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4079 known_x, known_mode, known_ret);
4080 return MAX (1, (num0
4081 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4082 - bitwidth)));
4085 #ifdef WORD_REGISTER_OPERATIONS
4086 #ifdef LOAD_EXTEND_OP
4087 /* For paradoxical SUBREGs on machines where all register operations
4088 affect the entire register, just look inside. Note that we are
4089 passing MODE to the recursive call, so the number of sign bit copies
4090 will remain relative to that mode, not the inner mode. */
4092 /* This works only if loads sign extend. Otherwise, if we get a
4093 reload for the inner part, it may be loaded from the stack, and
4094 then we lose all sign bit copies that existed before the store
4095 to the stack. */
4097 if ((GET_MODE_SIZE (GET_MODE (x))
4098 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4099 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4100 && MEM_P (SUBREG_REG (x)))
4101 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4102 known_x, known_mode, known_ret);
4103 #endif
4104 #endif
4105 break;
4107 case SIGN_EXTRACT:
4108 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4109 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4110 break;
4112 case SIGN_EXTEND:
4113 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4114 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4115 known_x, known_mode, known_ret));
4117 case TRUNCATE:
4118 /* For a smaller object, just ignore the high bits. */
4119 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4120 known_x, known_mode, known_ret);
4121 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4122 - bitwidth)));
4124 case NOT:
4125 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4126 known_x, known_mode, known_ret);
4128 case ROTATE: case ROTATERT:
4129 /* If we are rotating left by a number of bits less than the number
4130 of sign bit copies, we can just subtract that amount from the
4131 number. */
4132 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4133 && INTVAL (XEXP (x, 1)) >= 0
4134 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4136 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4137 known_x, known_mode, known_ret);
4138 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4139 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4141 break;
4143 case NEG:
4144 /* In general, this subtracts one sign bit copy. But if the value
4145 is known to be positive, the number of sign bit copies is the
4146 same as that of the input. Finally, if the input has just one bit
4147 that might be nonzero, all the bits are copies of the sign bit. */
4148 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4149 known_x, known_mode, known_ret);
4150 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4151 return num0 > 1 ? num0 - 1 : 1;
4153 nonzero = nonzero_bits (XEXP (x, 0), mode);
4154 if (nonzero == 1)
4155 return bitwidth;
4157 if (num0 > 1
4158 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4159 num0--;
4161 return num0;
4163 case IOR: case AND: case XOR:
4164 case SMIN: case SMAX: case UMIN: case UMAX:
4165 /* Logical operations will preserve the number of sign-bit copies.
4166 MIN and MAX operations always return one of the operands. */
4167 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4168 known_x, known_mode, known_ret);
4169 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4170 known_x, known_mode, known_ret);
4171 return MIN (num0, num1);
4173 case PLUS: case MINUS:
4174 /* For addition and subtraction, we can have a 1-bit carry. However,
4175 if we are subtracting 1 from a positive number, there will not
4176 be such a carry. Furthermore, if the positive number is known to
4177 be 0 or 1, we know the result is either -1 or 0. */
4179 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4180 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4182 nonzero = nonzero_bits (XEXP (x, 0), mode);
4183 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4184 return (nonzero == 1 || nonzero == 0 ? bitwidth
4185 : bitwidth - floor_log2 (nonzero) - 1);
4188 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4189 known_x, known_mode, known_ret);
4190 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4191 known_x, known_mode, known_ret);
4192 result = MAX (1, MIN (num0, num1) - 1);
4194 #ifdef POINTERS_EXTEND_UNSIGNED
4195 /* If pointers extend signed and this is an addition or subtraction
4196 to a pointer in Pmode, all the bits above ptr_mode are known to be
4197 sign bit copies. */
4198 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4199 && (code == PLUS || code == MINUS)
4200 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4201 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4202 - GET_MODE_BITSIZE (ptr_mode) + 1),
4203 result);
4204 #endif
4205 return result;
4207 case MULT:
4208 /* The number of bits of the product is the sum of the number of
4209 bits of both terms. However, unless one of the terms if known
4210 to be positive, we must allow for an additional bit since negating
4211 a negative number can remove one sign bit copy. */
4213 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4214 known_x, known_mode, known_ret);
4215 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4216 known_x, known_mode, known_ret);
4218 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4219 if (result > 0
4220 && (bitwidth > HOST_BITS_PER_WIDE_INT
4221 || (((nonzero_bits (XEXP (x, 0), mode)
4222 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4223 && ((nonzero_bits (XEXP (x, 1), mode)
4224 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4225 result--;
4227 return MAX (1, result);
4229 case UDIV:
4230 /* The result must be <= the first operand. If the first operand
4231 has the high bit set, we know nothing about the number of sign
4232 bit copies. */
4233 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4234 return 1;
4235 else if ((nonzero_bits (XEXP (x, 0), mode)
4236 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4237 return 1;
4238 else
4239 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4240 known_x, known_mode, known_ret);
4242 case UMOD:
4243 /* The result must be <= the second operand. */
4244 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4245 known_x, known_mode, known_ret);
4247 case DIV:
4248 /* Similar to unsigned division, except that we have to worry about
4249 the case where the divisor is negative, in which case we have
4250 to add 1. */
4251 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4252 known_x, known_mode, known_ret);
4253 if (result > 1
4254 && (bitwidth > HOST_BITS_PER_WIDE_INT
4255 || (nonzero_bits (XEXP (x, 1), mode)
4256 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4257 result--;
4259 return result;
4261 case MOD:
4262 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4263 known_x, known_mode, known_ret);
4264 if (result > 1
4265 && (bitwidth > HOST_BITS_PER_WIDE_INT
4266 || (nonzero_bits (XEXP (x, 1), mode)
4267 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4268 result--;
4270 return result;
4272 case ASHIFTRT:
4273 /* Shifts by a constant add to the number of bits equal to the
4274 sign bit. */
4275 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4276 known_x, known_mode, known_ret);
4277 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4278 && INTVAL (XEXP (x, 1)) > 0)
4279 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4281 return num0;
4283 case ASHIFT:
4284 /* Left shifts destroy copies. */
4285 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4286 || INTVAL (XEXP (x, 1)) < 0
4287 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4288 return 1;
4290 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4291 known_x, known_mode, known_ret);
4292 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4294 case IF_THEN_ELSE:
4295 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4296 known_x, known_mode, known_ret);
4297 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4298 known_x, known_mode, known_ret);
4299 return MIN (num0, num1);
4301 case EQ: case NE: case GE: case GT: case LE: case LT:
4302 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4303 case GEU: case GTU: case LEU: case LTU:
4304 case UNORDERED: case ORDERED:
4305 /* If the constant is negative, take its 1's complement and remask.
4306 Then see how many zero bits we have. */
4307 nonzero = STORE_FLAG_VALUE;
4308 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4309 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4310 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4312 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4314 default:
4315 break;
4318 /* If we haven't been able to figure it out by one of the above rules,
4319 see if some of the high-order bits are known to be zero. If so,
4320 count those bits and return one less than that amount. If we can't
4321 safely compute the mask for this mode, always return BITWIDTH. */
4323 bitwidth = GET_MODE_BITSIZE (mode);
4324 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4325 return 1;
4327 nonzero = nonzero_bits (x, mode);
4328 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4329 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4332 /* Calculate the rtx_cost of a single instruction. A return value of
4333 zero indicates an instruction pattern without a known cost. */
4336 insn_rtx_cost (rtx pat)
4338 int i, cost;
4339 rtx set;
4341 /* Extract the single set rtx from the instruction pattern.
4342 We can't use single_set since we only have the pattern. */
4343 if (GET_CODE (pat) == SET)
4344 set = pat;
4345 else if (GET_CODE (pat) == PARALLEL)
4347 set = NULL_RTX;
4348 for (i = 0; i < XVECLEN (pat, 0); i++)
4350 rtx x = XVECEXP (pat, 0, i);
4351 if (GET_CODE (x) == SET)
4353 if (set)
4354 return 0;
4355 set = x;
4358 if (!set)
4359 return 0;
4361 else
4362 return 0;
4364 cost = rtx_cost (SET_SRC (set), SET);
4365 return cost > 0 ? cost : COSTS_N_INSNS (1);
4368 /* Given an insn INSN and condition COND, return the condition in a
4369 canonical form to simplify testing by callers. Specifically:
4371 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4372 (2) Both operands will be machine operands; (cc0) will have been replaced.
4373 (3) If an operand is a constant, it will be the second operand.
4374 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4375 for GE, GEU, and LEU.
4377 If the condition cannot be understood, or is an inequality floating-point
4378 comparison which needs to be reversed, 0 will be returned.
4380 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4382 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4383 insn used in locating the condition was found. If a replacement test
4384 of the condition is desired, it should be placed in front of that
4385 insn and we will be sure that the inputs are still valid.
4387 If WANT_REG is nonzero, we wish the condition to be relative to that
4388 register, if possible. Therefore, do not canonicalize the condition
4389 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4390 to be a compare to a CC mode register.
4392 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4393 and at INSN. */
4396 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4397 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4399 enum rtx_code code;
4400 rtx prev = insn;
4401 rtx set;
4402 rtx tem;
4403 rtx op0, op1;
4404 int reverse_code = 0;
4405 enum machine_mode mode;
4407 code = GET_CODE (cond);
4408 mode = GET_MODE (cond);
4409 op0 = XEXP (cond, 0);
4410 op1 = XEXP (cond, 1);
4412 if (reverse)
4413 code = reversed_comparison_code (cond, insn);
4414 if (code == UNKNOWN)
4415 return 0;
4417 if (earliest)
4418 *earliest = insn;
4420 /* If we are comparing a register with zero, see if the register is set
4421 in the previous insn to a COMPARE or a comparison operation. Perform
4422 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4423 in cse.c */
4425 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4426 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4427 && op1 == CONST0_RTX (GET_MODE (op0))
4428 && op0 != want_reg)
4430 /* Set nonzero when we find something of interest. */
4431 rtx x = 0;
4433 #ifdef HAVE_cc0
4434 /* If comparison with cc0, import actual comparison from compare
4435 insn. */
4436 if (op0 == cc0_rtx)
4438 if ((prev = prev_nonnote_insn (prev)) == 0
4439 || !NONJUMP_INSN_P (prev)
4440 || (set = single_set (prev)) == 0
4441 || SET_DEST (set) != cc0_rtx)
4442 return 0;
4444 op0 = SET_SRC (set);
4445 op1 = CONST0_RTX (GET_MODE (op0));
4446 if (earliest)
4447 *earliest = prev;
4449 #endif
4451 /* If this is a COMPARE, pick up the two things being compared. */
4452 if (GET_CODE (op0) == COMPARE)
4454 op1 = XEXP (op0, 1);
4455 op0 = XEXP (op0, 0);
4456 continue;
4458 else if (!REG_P (op0))
4459 break;
4461 /* Go back to the previous insn. Stop if it is not an INSN. We also
4462 stop if it isn't a single set or if it has a REG_INC note because
4463 we don't want to bother dealing with it. */
4465 if ((prev = prev_nonnote_insn (prev)) == 0
4466 || !NONJUMP_INSN_P (prev)
4467 || FIND_REG_INC_NOTE (prev, NULL_RTX))
4468 break;
4470 set = set_of (op0, prev);
4472 if (set
4473 && (GET_CODE (set) != SET
4474 || !rtx_equal_p (SET_DEST (set), op0)))
4475 break;
4477 /* If this is setting OP0, get what it sets it to if it looks
4478 relevant. */
4479 if (set)
4481 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4482 #ifdef FLOAT_STORE_FLAG_VALUE
4483 REAL_VALUE_TYPE fsfv;
4484 #endif
4486 /* ??? We may not combine comparisons done in a CCmode with
4487 comparisons not done in a CCmode. This is to aid targets
4488 like Alpha that have an IEEE compliant EQ instruction, and
4489 a non-IEEE compliant BEQ instruction. The use of CCmode is
4490 actually artificial, simply to prevent the combination, but
4491 should not affect other platforms.
4493 However, we must allow VOIDmode comparisons to match either
4494 CCmode or non-CCmode comparison, because some ports have
4495 modeless comparisons inside branch patterns.
4497 ??? This mode check should perhaps look more like the mode check
4498 in simplify_comparison in combine. */
4500 if ((GET_CODE (SET_SRC (set)) == COMPARE
4501 || (((code == NE
4502 || (code == LT
4503 && GET_MODE_CLASS (inner_mode) == MODE_INT
4504 && (GET_MODE_BITSIZE (inner_mode)
4505 <= HOST_BITS_PER_WIDE_INT)
4506 && (STORE_FLAG_VALUE
4507 & ((HOST_WIDE_INT) 1
4508 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4509 #ifdef FLOAT_STORE_FLAG_VALUE
4510 || (code == LT
4511 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4512 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4513 REAL_VALUE_NEGATIVE (fsfv)))
4514 #endif
4516 && COMPARISON_P (SET_SRC (set))))
4517 && (((GET_MODE_CLASS (mode) == MODE_CC)
4518 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4519 || mode == VOIDmode || inner_mode == VOIDmode))
4520 x = SET_SRC (set);
4521 else if (((code == EQ
4522 || (code == GE
4523 && (GET_MODE_BITSIZE (inner_mode)
4524 <= HOST_BITS_PER_WIDE_INT)
4525 && GET_MODE_CLASS (inner_mode) == MODE_INT
4526 && (STORE_FLAG_VALUE
4527 & ((HOST_WIDE_INT) 1
4528 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4529 #ifdef FLOAT_STORE_FLAG_VALUE
4530 || (code == GE
4531 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4532 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4533 REAL_VALUE_NEGATIVE (fsfv)))
4534 #endif
4536 && COMPARISON_P (SET_SRC (set))
4537 && (((GET_MODE_CLASS (mode) == MODE_CC)
4538 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4539 || mode == VOIDmode || inner_mode == VOIDmode))
4542 reverse_code = 1;
4543 x = SET_SRC (set);
4545 else
4546 break;
4549 else if (reg_set_p (op0, prev))
4550 /* If this sets OP0, but not directly, we have to give up. */
4551 break;
4553 if (x)
4555 /* If the caller is expecting the condition to be valid at INSN,
4556 make sure X doesn't change before INSN. */
4557 if (valid_at_insn_p)
4558 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4559 break;
4560 if (COMPARISON_P (x))
4561 code = GET_CODE (x);
4562 if (reverse_code)
4564 code = reversed_comparison_code (x, prev);
4565 if (code == UNKNOWN)
4566 return 0;
4567 reverse_code = 0;
4570 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4571 if (earliest)
4572 *earliest = prev;
4576 /* If constant is first, put it last. */
4577 if (CONSTANT_P (op0))
4578 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4580 /* If OP0 is the result of a comparison, we weren't able to find what
4581 was really being compared, so fail. */
4582 if (!allow_cc_mode
4583 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4584 return 0;
4586 /* Canonicalize any ordered comparison with integers involving equality
4587 if we can do computations in the relevant mode and we do not
4588 overflow. */
4590 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4591 && GET_CODE (op1) == CONST_INT
4592 && GET_MODE (op0) != VOIDmode
4593 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4595 HOST_WIDE_INT const_val = INTVAL (op1);
4596 unsigned HOST_WIDE_INT uconst_val = const_val;
4597 unsigned HOST_WIDE_INT max_val
4598 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4600 switch (code)
4602 case LE:
4603 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4604 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4605 break;
4607 /* When cross-compiling, const_val might be sign-extended from
4608 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4609 case GE:
4610 if ((HOST_WIDE_INT) (const_val & max_val)
4611 != (((HOST_WIDE_INT) 1
4612 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4613 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4614 break;
4616 case LEU:
4617 if (uconst_val < max_val)
4618 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4619 break;
4621 case GEU:
4622 if (uconst_val != 0)
4623 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4624 break;
4626 default:
4627 break;
4631 /* Never return CC0; return zero instead. */
4632 if (CC0_P (op0))
4633 return 0;
4635 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4638 /* Given a jump insn JUMP, return the condition that will cause it to branch
4639 to its JUMP_LABEL. If the condition cannot be understood, or is an
4640 inequality floating-point comparison which needs to be reversed, 0 will
4641 be returned.
4643 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4644 insn used in locating the condition was found. If a replacement test
4645 of the condition is desired, it should be placed in front of that
4646 insn and we will be sure that the inputs are still valid. If EARLIEST
4647 is null, the returned condition will be valid at INSN.
4649 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4650 compare CC mode register.
4652 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4655 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4657 rtx cond;
4658 int reverse;
4659 rtx set;
4661 /* If this is not a standard conditional jump, we can't parse it. */
4662 if (!JUMP_P (jump)
4663 || ! any_condjump_p (jump))
4664 return 0;
4665 set = pc_set (jump);
4667 cond = XEXP (SET_SRC (set), 0);
4669 /* If this branches to JUMP_LABEL when the condition is false, reverse
4670 the condition. */
4671 reverse
4672 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4673 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4675 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4676 allow_cc_mode, valid_at_insn_p);
4680 /* Initialize non_rtx_starting_operands, which is used to speed up
4681 for_each_rtx. */
4682 void
4683 init_rtlanal (void)
4685 int i;
4686 for (i = 0; i < NUM_RTX_CODE; i++)
4688 const char *format = GET_RTX_FORMAT (i);
4689 const char *first = strpbrk (format, "eEV");
4690 non_rtx_starting_operands[i] = first ? first - format : -1;