* argv.c (expandargv): Do not use xmalloc_failed.
[official-gcc.git] / gcc / rtlanal.c
blob9064b009a9bcf53fa4f5a5ff4a06c25dbfabe4bb
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "regs.h"
38 #include "function.h"
40 /* Forward declarations */
41 static int global_reg_mentioned_p_1 (rtx *, void *);
42 static void set_of_1 (rtx, rtx, void *);
43 static bool covers_regno_p (rtx, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx, unsigned int);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
49 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50 rtx, enum machine_mode,
51 unsigned HOST_WIDE_INT);
52 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53 enum machine_mode,
54 unsigned HOST_WIDE_INT);
55 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56 enum machine_mode,
57 unsigned int);
58 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59 enum machine_mode, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands[NUM_RTX_CODE];
65 /* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
69 int target_flags;
71 /* Return 1 if the value of X is unstable
72 (would be different at a different point in the program).
73 The frame pointer, arg pointer, etc. are considered stable
74 (within one function) and so is anything marked `unchanging'. */
76 int
77 rtx_unstable_p (rtx x)
79 RTX_CODE code = GET_CODE (x);
80 int i;
81 const char *fmt;
83 switch (code)
85 case MEM:
86 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
88 case CONST:
89 case CONST_INT:
90 case CONST_DOUBLE:
91 case CONST_VECTOR:
92 case SYMBOL_REF:
93 case LABEL_REF:
94 return 0;
96 case REG:
97 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
98 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
99 /* The arg pointer varies if it is not a fixed register. */
100 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
101 return 0;
102 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
103 /* ??? When call-clobbered, the value is stable modulo the restore
104 that must happen after a call. This currently screws up local-alloc
105 into believing that the restore is not needed. */
106 if (x == pic_offset_table_rtx)
107 return 0;
108 #endif
109 return 1;
111 case ASM_OPERANDS:
112 if (MEM_VOLATILE_P (x))
113 return 1;
115 /* Fall through. */
117 default:
118 break;
121 fmt = GET_RTX_FORMAT (code);
122 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
123 if (fmt[i] == 'e')
125 if (rtx_unstable_p (XEXP (x, i)))
126 return 1;
128 else if (fmt[i] == 'E')
130 int j;
131 for (j = 0; j < XVECLEN (x, i); j++)
132 if (rtx_unstable_p (XVECEXP (x, i, j)))
133 return 1;
136 return 0;
139 /* Return 1 if X has a value that can vary even between two
140 executions of the program. 0 means X can be compared reliably
141 against certain constants or near-constants.
142 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
143 zero, we are slightly more conservative.
144 The frame pointer and the arg pointer are considered constant. */
147 rtx_varies_p (rtx x, int for_alias)
149 RTX_CODE code;
150 int i;
151 const char *fmt;
153 if (!x)
154 return 0;
156 code = GET_CODE (x);
157 switch (code)
159 case MEM:
160 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
162 case CONST:
163 case CONST_INT:
164 case CONST_DOUBLE:
165 case CONST_VECTOR:
166 case SYMBOL_REF:
167 case LABEL_REF:
168 return 0;
170 case REG:
171 /* Note that we have to test for the actual rtx used for the frame
172 and arg pointers and not just the register number in case we have
173 eliminated the frame and/or arg pointer and are using it
174 for pseudos. */
175 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
176 /* The arg pointer varies if it is not a fixed register. */
177 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
178 return 0;
179 if (x == pic_offset_table_rtx
180 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
181 /* ??? When call-clobbered, the value is stable modulo the restore
182 that must happen after a call. This currently screws up
183 local-alloc into believing that the restore is not needed, so we
184 must return 0 only if we are called from alias analysis. */
185 && for_alias
186 #endif
188 return 0;
189 return 1;
191 case LO_SUM:
192 /* The operand 0 of a LO_SUM is considered constant
193 (in fact it is related specifically to operand 1)
194 during alias analysis. */
195 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
196 || rtx_varies_p (XEXP (x, 1), for_alias);
198 case ASM_OPERANDS:
199 if (MEM_VOLATILE_P (x))
200 return 1;
202 /* Fall through. */
204 default:
205 break;
208 fmt = GET_RTX_FORMAT (code);
209 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
210 if (fmt[i] == 'e')
212 if (rtx_varies_p (XEXP (x, i), for_alias))
213 return 1;
215 else if (fmt[i] == 'E')
217 int j;
218 for (j = 0; j < XVECLEN (x, i); j++)
219 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
220 return 1;
223 return 0;
226 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
229 rtx_addr_can_trap_p (rtx x)
231 enum rtx_code code = GET_CODE (x);
233 switch (code)
235 case SYMBOL_REF:
236 return SYMBOL_REF_WEAK (x);
238 case LABEL_REF:
239 return 0;
241 case REG:
242 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
243 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
244 || x == stack_pointer_rtx
245 /* The arg pointer varies if it is not a fixed register. */
246 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
247 return 0;
248 /* All of the virtual frame registers are stack references. */
249 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
250 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
251 return 0;
252 return 1;
254 case CONST:
255 return rtx_addr_can_trap_p (XEXP (x, 0));
257 case PLUS:
258 /* An address is assumed not to trap if it is an address that can't
259 trap plus a constant integer or it is the pic register plus a
260 constant. */
261 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
262 && GET_CODE (XEXP (x, 1)) == CONST_INT)
263 || (XEXP (x, 0) == pic_offset_table_rtx
264 && CONSTANT_P (XEXP (x, 1))));
266 case LO_SUM:
267 case PRE_MODIFY:
268 return rtx_addr_can_trap_p (XEXP (x, 1));
270 case PRE_DEC:
271 case PRE_INC:
272 case POST_DEC:
273 case POST_INC:
274 case POST_MODIFY:
275 return rtx_addr_can_trap_p (XEXP (x, 0));
277 default:
278 break;
281 /* If it isn't one of the case above, it can cause a trap. */
282 return 1;
285 /* Return true if X is an address that is known to not be zero. */
287 bool
288 nonzero_address_p (rtx x)
290 enum rtx_code code = GET_CODE (x);
292 switch (code)
294 case SYMBOL_REF:
295 return !SYMBOL_REF_WEAK (x);
297 case LABEL_REF:
298 return true;
300 case REG:
301 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
302 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
303 || x == stack_pointer_rtx
304 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
305 return true;
306 /* All of the virtual frame registers are stack references. */
307 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
308 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
309 return true;
310 return false;
312 case CONST:
313 return nonzero_address_p (XEXP (x, 0));
315 case PLUS:
316 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
318 /* Pointers aren't allowed to wrap. If we've got a register
319 that is known to be a pointer, and a positive offset, then
320 the composite can't be zero. */
321 if (INTVAL (XEXP (x, 1)) > 0
322 && REG_P (XEXP (x, 0))
323 && REG_POINTER (XEXP (x, 0)))
324 return true;
326 return nonzero_address_p (XEXP (x, 0));
328 /* Handle PIC references. */
329 else if (XEXP (x, 0) == pic_offset_table_rtx
330 && CONSTANT_P (XEXP (x, 1)))
331 return true;
332 return false;
334 case PRE_MODIFY:
335 /* Similar to the above; allow positive offsets. Further, since
336 auto-inc is only allowed in memories, the register must be a
337 pointer. */
338 if (GET_CODE (XEXP (x, 1)) == CONST_INT
339 && INTVAL (XEXP (x, 1)) > 0)
340 return true;
341 return nonzero_address_p (XEXP (x, 0));
343 case PRE_INC:
344 /* Similarly. Further, the offset is always positive. */
345 return true;
347 case PRE_DEC:
348 case POST_DEC:
349 case POST_INC:
350 case POST_MODIFY:
351 return nonzero_address_p (XEXP (x, 0));
353 case LO_SUM:
354 return nonzero_address_p (XEXP (x, 1));
356 default:
357 break;
360 /* If it isn't one of the case above, might be zero. */
361 return false;
364 /* Return 1 if X refers to a memory location whose address
365 cannot be compared reliably with constant addresses,
366 or if X refers to a BLKmode memory object.
367 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
368 zero, we are slightly more conservative. */
371 rtx_addr_varies_p (rtx x, int for_alias)
373 enum rtx_code code;
374 int i;
375 const char *fmt;
377 if (x == 0)
378 return 0;
380 code = GET_CODE (x);
381 if (code == MEM)
382 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
384 fmt = GET_RTX_FORMAT (code);
385 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
386 if (fmt[i] == 'e')
388 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
389 return 1;
391 else if (fmt[i] == 'E')
393 int j;
394 for (j = 0; j < XVECLEN (x, i); j++)
395 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
396 return 1;
398 return 0;
401 /* Return the value of the integer term in X, if one is apparent;
402 otherwise return 0.
403 Only obvious integer terms are detected.
404 This is used in cse.c with the `related_value' field. */
406 HOST_WIDE_INT
407 get_integer_term (rtx x)
409 if (GET_CODE (x) == CONST)
410 x = XEXP (x, 0);
412 if (GET_CODE (x) == MINUS
413 && GET_CODE (XEXP (x, 1)) == CONST_INT)
414 return - INTVAL (XEXP (x, 1));
415 if (GET_CODE (x) == PLUS
416 && GET_CODE (XEXP (x, 1)) == CONST_INT)
417 return INTVAL (XEXP (x, 1));
418 return 0;
421 /* If X is a constant, return the value sans apparent integer term;
422 otherwise return 0.
423 Only obvious integer terms are detected. */
426 get_related_value (rtx x)
428 if (GET_CODE (x) != CONST)
429 return 0;
430 x = XEXP (x, 0);
431 if (GET_CODE (x) == PLUS
432 && GET_CODE (XEXP (x, 1)) == CONST_INT)
433 return XEXP (x, 0);
434 else if (GET_CODE (x) == MINUS
435 && GET_CODE (XEXP (x, 1)) == CONST_INT)
436 return XEXP (x, 0);
437 return 0;
440 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
441 a global register. */
443 static int
444 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
446 int regno;
447 rtx x = *loc;
449 if (! x)
450 return 0;
452 switch (GET_CODE (x))
454 case SUBREG:
455 if (REG_P (SUBREG_REG (x)))
457 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
458 && global_regs[subreg_regno (x)])
459 return 1;
460 return 0;
462 break;
464 case REG:
465 regno = REGNO (x);
466 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
467 return 1;
468 return 0;
470 case SCRATCH:
471 case PC:
472 case CC0:
473 case CONST_INT:
474 case CONST_DOUBLE:
475 case CONST:
476 case LABEL_REF:
477 return 0;
479 case CALL:
480 /* A non-constant call might use a global register. */
481 return 1;
483 default:
484 break;
487 return 0;
490 /* Returns nonzero if X mentions a global register. */
493 global_reg_mentioned_p (rtx x)
495 if (INSN_P (x))
497 if (CALL_P (x))
499 if (! CONST_OR_PURE_CALL_P (x))
500 return 1;
501 x = CALL_INSN_FUNCTION_USAGE (x);
502 if (x == 0)
503 return 0;
505 else
506 x = PATTERN (x);
509 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
512 /* Return the number of places FIND appears within X. If COUNT_DEST is
513 zero, we do not count occurrences inside the destination of a SET. */
516 count_occurrences (rtx x, rtx find, int count_dest)
518 int i, j;
519 enum rtx_code code;
520 const char *format_ptr;
521 int count;
523 if (x == find)
524 return 1;
526 code = GET_CODE (x);
528 switch (code)
530 case REG:
531 case CONST_INT:
532 case CONST_DOUBLE:
533 case CONST_VECTOR:
534 case SYMBOL_REF:
535 case CODE_LABEL:
536 case PC:
537 case CC0:
538 return 0;
540 case MEM:
541 if (MEM_P (find) && rtx_equal_p (x, find))
542 return 1;
543 break;
545 case SET:
546 if (SET_DEST (x) == find && ! count_dest)
547 return count_occurrences (SET_SRC (x), find, count_dest);
548 break;
550 default:
551 break;
554 format_ptr = GET_RTX_FORMAT (code);
555 count = 0;
557 for (i = 0; i < GET_RTX_LENGTH (code); i++)
559 switch (*format_ptr++)
561 case 'e':
562 count += count_occurrences (XEXP (x, i), find, count_dest);
563 break;
565 case 'E':
566 for (j = 0; j < XVECLEN (x, i); j++)
567 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
568 break;
571 return count;
574 /* Nonzero if register REG appears somewhere within IN.
575 Also works if REG is not a register; in this case it checks
576 for a subexpression of IN that is Lisp "equal" to REG. */
579 reg_mentioned_p (rtx reg, rtx in)
581 const char *fmt;
582 int i;
583 enum rtx_code code;
585 if (in == 0)
586 return 0;
588 if (reg == in)
589 return 1;
591 if (GET_CODE (in) == LABEL_REF)
592 return reg == XEXP (in, 0);
594 code = GET_CODE (in);
596 switch (code)
598 /* Compare registers by number. */
599 case REG:
600 return REG_P (reg) && REGNO (in) == REGNO (reg);
602 /* These codes have no constituent expressions
603 and are unique. */
604 case SCRATCH:
605 case CC0:
606 case PC:
607 return 0;
609 case CONST_INT:
610 case CONST_VECTOR:
611 case CONST_DOUBLE:
612 /* These are kept unique for a given value. */
613 return 0;
615 default:
616 break;
619 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
620 return 1;
622 fmt = GET_RTX_FORMAT (code);
624 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
626 if (fmt[i] == 'E')
628 int j;
629 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
630 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
631 return 1;
633 else if (fmt[i] == 'e'
634 && reg_mentioned_p (reg, XEXP (in, i)))
635 return 1;
637 return 0;
640 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
641 no CODE_LABEL insn. */
644 no_labels_between_p (rtx beg, rtx end)
646 rtx p;
647 if (beg == end)
648 return 0;
649 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
650 if (LABEL_P (p))
651 return 0;
652 return 1;
655 /* Nonzero if register REG is used in an insn between
656 FROM_INSN and TO_INSN (exclusive of those two). */
659 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
661 rtx insn;
663 if (from_insn == to_insn)
664 return 0;
666 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
667 if (INSN_P (insn)
668 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
669 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
670 return 1;
671 return 0;
674 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
675 is entirely replaced by a new value and the only use is as a SET_DEST,
676 we do not consider it a reference. */
679 reg_referenced_p (rtx x, rtx body)
681 int i;
683 switch (GET_CODE (body))
685 case SET:
686 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
687 return 1;
689 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
690 of a REG that occupies all of the REG, the insn references X if
691 it is mentioned in the destination. */
692 if (GET_CODE (SET_DEST (body)) != CC0
693 && GET_CODE (SET_DEST (body)) != PC
694 && !REG_P (SET_DEST (body))
695 && ! (GET_CODE (SET_DEST (body)) == SUBREG
696 && REG_P (SUBREG_REG (SET_DEST (body)))
697 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
698 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
699 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
700 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
701 && reg_overlap_mentioned_p (x, SET_DEST (body)))
702 return 1;
703 return 0;
705 case ASM_OPERANDS:
706 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
707 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
708 return 1;
709 return 0;
711 case CALL:
712 case USE:
713 case IF_THEN_ELSE:
714 return reg_overlap_mentioned_p (x, body);
716 case TRAP_IF:
717 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
719 case PREFETCH:
720 return reg_overlap_mentioned_p (x, XEXP (body, 0));
722 case UNSPEC:
723 case UNSPEC_VOLATILE:
724 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
725 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
726 return 1;
727 return 0;
729 case PARALLEL:
730 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
731 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
732 return 1;
733 return 0;
735 case CLOBBER:
736 if (MEM_P (XEXP (body, 0)))
737 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
738 return 1;
739 return 0;
741 case COND_EXEC:
742 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
743 return 1;
744 return reg_referenced_p (x, COND_EXEC_CODE (body));
746 default:
747 return 0;
751 /* Nonzero if register REG is set or clobbered in an insn between
752 FROM_INSN and TO_INSN (exclusive of those two). */
755 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
757 rtx insn;
759 if (from_insn == to_insn)
760 return 0;
762 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
763 if (INSN_P (insn) && reg_set_p (reg, insn))
764 return 1;
765 return 0;
768 /* Internals of reg_set_between_p. */
770 reg_set_p (rtx reg, rtx insn)
772 /* We can be passed an insn or part of one. If we are passed an insn,
773 check if a side-effect of the insn clobbers REG. */
774 if (INSN_P (insn)
775 && (FIND_REG_INC_NOTE (insn, reg)
776 || (CALL_P (insn)
777 && ((REG_P (reg)
778 && REGNO (reg) < FIRST_PSEUDO_REGISTER
779 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
780 REGNO (reg)))
781 || MEM_P (reg)
782 || find_reg_fusage (insn, CLOBBER, reg)))))
783 return 1;
785 return set_of (reg, insn) != NULL_RTX;
788 /* Similar to reg_set_between_p, but check all registers in X. Return 0
789 only if none of them are modified between START and END. Return 1 if
790 X contains a MEM; this routine does usememory aliasing. */
793 modified_between_p (rtx x, rtx start, rtx end)
795 enum rtx_code code = GET_CODE (x);
796 const char *fmt;
797 int i, j;
798 rtx insn;
800 if (start == end)
801 return 0;
803 switch (code)
805 case CONST_INT:
806 case CONST_DOUBLE:
807 case CONST_VECTOR:
808 case CONST:
809 case SYMBOL_REF:
810 case LABEL_REF:
811 return 0;
813 case PC:
814 case CC0:
815 return 1;
817 case MEM:
818 if (modified_between_p (XEXP (x, 0), start, end))
819 return 1;
820 if (MEM_READONLY_P (x))
821 return 0;
822 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
823 if (memory_modified_in_insn_p (x, insn))
824 return 1;
825 return 0;
826 break;
828 case REG:
829 return reg_set_between_p (x, start, end);
831 default:
832 break;
835 fmt = GET_RTX_FORMAT (code);
836 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
838 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
839 return 1;
841 else if (fmt[i] == 'E')
842 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
843 if (modified_between_p (XVECEXP (x, i, j), start, end))
844 return 1;
847 return 0;
850 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
851 of them are modified in INSN. Return 1 if X contains a MEM; this routine
852 does use memory aliasing. */
855 modified_in_p (rtx x, rtx insn)
857 enum rtx_code code = GET_CODE (x);
858 const char *fmt;
859 int i, j;
861 switch (code)
863 case CONST_INT:
864 case CONST_DOUBLE:
865 case CONST_VECTOR:
866 case CONST:
867 case SYMBOL_REF:
868 case LABEL_REF:
869 return 0;
871 case PC:
872 case CC0:
873 return 1;
875 case MEM:
876 if (modified_in_p (XEXP (x, 0), insn))
877 return 1;
878 if (MEM_READONLY_P (x))
879 return 0;
880 if (memory_modified_in_insn_p (x, insn))
881 return 1;
882 return 0;
883 break;
885 case REG:
886 return reg_set_p (x, insn);
888 default:
889 break;
892 fmt = GET_RTX_FORMAT (code);
893 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
895 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
896 return 1;
898 else if (fmt[i] == 'E')
899 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
900 if (modified_in_p (XVECEXP (x, i, j), insn))
901 return 1;
904 return 0;
907 /* Helper function for set_of. */
908 struct set_of_data
910 rtx found;
911 rtx pat;
914 static void
915 set_of_1 (rtx x, rtx pat, void *data1)
917 struct set_of_data *data = (struct set_of_data *) (data1);
918 if (rtx_equal_p (x, data->pat)
919 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
920 data->found = pat;
923 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
924 (either directly or via STRICT_LOW_PART and similar modifiers). */
926 set_of (rtx pat, rtx insn)
928 struct set_of_data data;
929 data.found = NULL_RTX;
930 data.pat = pat;
931 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
932 return data.found;
935 /* Given an INSN, return a SET expression if this insn has only a single SET.
936 It may also have CLOBBERs, USEs, or SET whose output
937 will not be used, which we ignore. */
940 single_set_2 (rtx insn, rtx pat)
942 rtx set = NULL;
943 int set_verified = 1;
944 int i;
946 if (GET_CODE (pat) == PARALLEL)
948 for (i = 0; i < XVECLEN (pat, 0); i++)
950 rtx sub = XVECEXP (pat, 0, i);
951 switch (GET_CODE (sub))
953 case USE:
954 case CLOBBER:
955 break;
957 case SET:
958 /* We can consider insns having multiple sets, where all
959 but one are dead as single set insns. In common case
960 only single set is present in the pattern so we want
961 to avoid checking for REG_UNUSED notes unless necessary.
963 When we reach set first time, we just expect this is
964 the single set we are looking for and only when more
965 sets are found in the insn, we check them. */
966 if (!set_verified)
968 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
969 && !side_effects_p (set))
970 set = NULL;
971 else
972 set_verified = 1;
974 if (!set)
975 set = sub, set_verified = 0;
976 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
977 || side_effects_p (sub))
978 return NULL_RTX;
979 break;
981 default:
982 return NULL_RTX;
986 return set;
989 /* Given an INSN, return nonzero if it has more than one SET, else return
990 zero. */
993 multiple_sets (rtx insn)
995 int found;
996 int i;
998 /* INSN must be an insn. */
999 if (! INSN_P (insn))
1000 return 0;
1002 /* Only a PARALLEL can have multiple SETs. */
1003 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1005 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1006 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1008 /* If we have already found a SET, then return now. */
1009 if (found)
1010 return 1;
1011 else
1012 found = 1;
1016 /* Either zero or one SET. */
1017 return 0;
1020 /* Return nonzero if the destination of SET equals the source
1021 and there are no side effects. */
1024 set_noop_p (rtx set)
1026 rtx src = SET_SRC (set);
1027 rtx dst = SET_DEST (set);
1029 if (dst == pc_rtx && src == pc_rtx)
1030 return 1;
1032 if (MEM_P (dst) && MEM_P (src))
1033 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1035 if (GET_CODE (dst) == ZERO_EXTRACT)
1036 return rtx_equal_p (XEXP (dst, 0), src)
1037 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1038 && !side_effects_p (src);
1040 if (GET_CODE (dst) == STRICT_LOW_PART)
1041 dst = XEXP (dst, 0);
1043 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1045 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1046 return 0;
1047 src = SUBREG_REG (src);
1048 dst = SUBREG_REG (dst);
1051 return (REG_P (src) && REG_P (dst)
1052 && REGNO (src) == REGNO (dst));
1055 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1056 value to itself. */
1059 noop_move_p (rtx insn)
1061 rtx pat = PATTERN (insn);
1063 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1064 return 1;
1066 /* Insns carrying these notes are useful later on. */
1067 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1068 return 0;
1070 /* For now treat an insn with a REG_RETVAL note as a
1071 a special insn which should not be considered a no-op. */
1072 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1073 return 0;
1075 if (GET_CODE (pat) == SET && set_noop_p (pat))
1076 return 1;
1078 if (GET_CODE (pat) == PARALLEL)
1080 int i;
1081 /* If nothing but SETs of registers to themselves,
1082 this insn can also be deleted. */
1083 for (i = 0; i < XVECLEN (pat, 0); i++)
1085 rtx tem = XVECEXP (pat, 0, i);
1087 if (GET_CODE (tem) == USE
1088 || GET_CODE (tem) == CLOBBER)
1089 continue;
1091 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1092 return 0;
1095 return 1;
1097 return 0;
1101 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1102 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1103 If the object was modified, if we hit a partial assignment to X, or hit a
1104 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1105 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1106 be the src. */
1109 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1111 rtx p;
1113 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1114 p = PREV_INSN (p))
1115 if (INSN_P (p))
1117 rtx set = single_set (p);
1118 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1120 if (set && rtx_equal_p (x, SET_DEST (set)))
1122 rtx src = SET_SRC (set);
1124 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1125 src = XEXP (note, 0);
1127 if ((valid_to == NULL_RTX
1128 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1129 /* Reject hard registers because we don't usually want
1130 to use them; we'd rather use a pseudo. */
1131 && (! (REG_P (src)
1132 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1134 *pinsn = p;
1135 return src;
1139 /* If set in non-simple way, we don't have a value. */
1140 if (reg_set_p (x, p))
1141 break;
1144 return x;
1147 /* Return nonzero if register in range [REGNO, ENDREGNO)
1148 appears either explicitly or implicitly in X
1149 other than being stored into.
1151 References contained within the substructure at LOC do not count.
1152 LOC may be zero, meaning don't ignore anything. */
1155 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1156 rtx *loc)
1158 int i;
1159 unsigned int x_regno;
1160 RTX_CODE code;
1161 const char *fmt;
1163 repeat:
1164 /* The contents of a REG_NONNEG note is always zero, so we must come here
1165 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1166 if (x == 0)
1167 return 0;
1169 code = GET_CODE (x);
1171 switch (code)
1173 case REG:
1174 x_regno = REGNO (x);
1176 /* If we modifying the stack, frame, or argument pointer, it will
1177 clobber a virtual register. In fact, we could be more precise,
1178 but it isn't worth it. */
1179 if ((x_regno == STACK_POINTER_REGNUM
1180 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1181 || x_regno == ARG_POINTER_REGNUM
1182 #endif
1183 || x_regno == FRAME_POINTER_REGNUM)
1184 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1185 return 1;
1187 return (endregno > x_regno
1188 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1189 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1190 : 1));
1192 case SUBREG:
1193 /* If this is a SUBREG of a hard reg, we can see exactly which
1194 registers are being modified. Otherwise, handle normally. */
1195 if (REG_P (SUBREG_REG (x))
1196 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1198 unsigned int inner_regno = subreg_regno (x);
1199 unsigned int inner_endregno
1200 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1201 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1203 return endregno > inner_regno && regno < inner_endregno;
1205 break;
1207 case CLOBBER:
1208 case SET:
1209 if (&SET_DEST (x) != loc
1210 /* Note setting a SUBREG counts as referring to the REG it is in for
1211 a pseudo but not for hard registers since we can
1212 treat each word individually. */
1213 && ((GET_CODE (SET_DEST (x)) == SUBREG
1214 && loc != &SUBREG_REG (SET_DEST (x))
1215 && REG_P (SUBREG_REG (SET_DEST (x)))
1216 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1217 && refers_to_regno_p (regno, endregno,
1218 SUBREG_REG (SET_DEST (x)), loc))
1219 || (!REG_P (SET_DEST (x))
1220 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1221 return 1;
1223 if (code == CLOBBER || loc == &SET_SRC (x))
1224 return 0;
1225 x = SET_SRC (x);
1226 goto repeat;
1228 default:
1229 break;
1232 /* X does not match, so try its subexpressions. */
1234 fmt = GET_RTX_FORMAT (code);
1235 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1237 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1239 if (i == 0)
1241 x = XEXP (x, 0);
1242 goto repeat;
1244 else
1245 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1246 return 1;
1248 else if (fmt[i] == 'E')
1250 int j;
1251 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1252 if (loc != &XVECEXP (x, i, j)
1253 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1254 return 1;
1257 return 0;
1260 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1261 we check if any register number in X conflicts with the relevant register
1262 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1263 contains a MEM (we don't bother checking for memory addresses that can't
1264 conflict because we expect this to be a rare case. */
1267 reg_overlap_mentioned_p (rtx x, rtx in)
1269 unsigned int regno, endregno;
1271 /* If either argument is a constant, then modifying X can not
1272 affect IN. Here we look at IN, we can profitably combine
1273 CONSTANT_P (x) with the switch statement below. */
1274 if (CONSTANT_P (in))
1275 return 0;
1277 recurse:
1278 switch (GET_CODE (x))
1280 case STRICT_LOW_PART:
1281 case ZERO_EXTRACT:
1282 case SIGN_EXTRACT:
1283 /* Overly conservative. */
1284 x = XEXP (x, 0);
1285 goto recurse;
1287 case SUBREG:
1288 regno = REGNO (SUBREG_REG (x));
1289 if (regno < FIRST_PSEUDO_REGISTER)
1290 regno = subreg_regno (x);
1291 goto do_reg;
1293 case REG:
1294 regno = REGNO (x);
1295 do_reg:
1296 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1297 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1298 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1300 case MEM:
1302 const char *fmt;
1303 int i;
1305 if (MEM_P (in))
1306 return 1;
1308 fmt = GET_RTX_FORMAT (GET_CODE (in));
1309 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1310 if (fmt[i] == 'e')
1312 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1313 return 1;
1315 else if (fmt[i] == 'E')
1317 int j;
1318 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1319 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1320 return 1;
1323 return 0;
1326 case SCRATCH:
1327 case PC:
1328 case CC0:
1329 return reg_mentioned_p (x, in);
1331 case PARALLEL:
1333 int i;
1335 /* If any register in here refers to it we return true. */
1336 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1337 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1338 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1339 return 1;
1340 return 0;
1343 default:
1344 gcc_assert (CONSTANT_P (x));
1345 return 0;
1349 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1350 (X would be the pattern of an insn).
1351 FUN receives two arguments:
1352 the REG, MEM, CC0 or PC being stored in or clobbered,
1353 the SET or CLOBBER rtx that does the store.
1355 If the item being stored in or clobbered is a SUBREG of a hard register,
1356 the SUBREG will be passed. */
1358 void
1359 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1361 int i;
1363 if (GET_CODE (x) == COND_EXEC)
1364 x = COND_EXEC_CODE (x);
1366 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1368 rtx dest = SET_DEST (x);
1370 while ((GET_CODE (dest) == SUBREG
1371 && (!REG_P (SUBREG_REG (dest))
1372 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1373 || GET_CODE (dest) == ZERO_EXTRACT
1374 || GET_CODE (dest) == STRICT_LOW_PART)
1375 dest = XEXP (dest, 0);
1377 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1378 each of whose first operand is a register. */
1379 if (GET_CODE (dest) == PARALLEL)
1381 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1382 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1383 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1385 else
1386 (*fun) (dest, x, data);
1389 else if (GET_CODE (x) == PARALLEL)
1390 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1391 note_stores (XVECEXP (x, 0, i), fun, data);
1394 /* Like notes_stores, but call FUN for each expression that is being
1395 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1396 FUN for each expression, not any interior subexpressions. FUN receives a
1397 pointer to the expression and the DATA passed to this function.
1399 Note that this is not quite the same test as that done in reg_referenced_p
1400 since that considers something as being referenced if it is being
1401 partially set, while we do not. */
1403 void
1404 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1406 rtx body = *pbody;
1407 int i;
1409 switch (GET_CODE (body))
1411 case COND_EXEC:
1412 (*fun) (&COND_EXEC_TEST (body), data);
1413 note_uses (&COND_EXEC_CODE (body), fun, data);
1414 return;
1416 case PARALLEL:
1417 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1418 note_uses (&XVECEXP (body, 0, i), fun, data);
1419 return;
1421 case USE:
1422 (*fun) (&XEXP (body, 0), data);
1423 return;
1425 case ASM_OPERANDS:
1426 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1427 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1428 return;
1430 case TRAP_IF:
1431 (*fun) (&TRAP_CONDITION (body), data);
1432 return;
1434 case PREFETCH:
1435 (*fun) (&XEXP (body, 0), data);
1436 return;
1438 case UNSPEC:
1439 case UNSPEC_VOLATILE:
1440 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1441 (*fun) (&XVECEXP (body, 0, i), data);
1442 return;
1444 case CLOBBER:
1445 if (MEM_P (XEXP (body, 0)))
1446 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1447 return;
1449 case SET:
1451 rtx dest = SET_DEST (body);
1453 /* For sets we replace everything in source plus registers in memory
1454 expression in store and operands of a ZERO_EXTRACT. */
1455 (*fun) (&SET_SRC (body), data);
1457 if (GET_CODE (dest) == ZERO_EXTRACT)
1459 (*fun) (&XEXP (dest, 1), data);
1460 (*fun) (&XEXP (dest, 2), data);
1463 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1464 dest = XEXP (dest, 0);
1466 if (MEM_P (dest))
1467 (*fun) (&XEXP (dest, 0), data);
1469 return;
1471 default:
1472 /* All the other possibilities never store. */
1473 (*fun) (pbody, data);
1474 return;
1478 /* Return nonzero if X's old contents don't survive after INSN.
1479 This will be true if X is (cc0) or if X is a register and
1480 X dies in INSN or because INSN entirely sets X.
1482 "Entirely set" means set directly and not through a SUBREG, or
1483 ZERO_EXTRACT, so no trace of the old contents remains.
1484 Likewise, REG_INC does not count.
1486 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1487 but for this use that makes no difference, since regs don't overlap
1488 during their lifetimes. Therefore, this function may be used
1489 at any time after deaths have been computed (in flow.c).
1491 If REG is a hard reg that occupies multiple machine registers, this
1492 function will only return 1 if each of those registers will be replaced
1493 by INSN. */
1496 dead_or_set_p (rtx insn, rtx x)
1498 unsigned int regno, last_regno;
1499 unsigned int i;
1501 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1502 if (GET_CODE (x) == CC0)
1503 return 1;
1505 gcc_assert (REG_P (x));
1507 regno = REGNO (x);
1508 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1509 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1511 for (i = regno; i <= last_regno; i++)
1512 if (! dead_or_set_regno_p (insn, i))
1513 return 0;
1515 return 1;
1518 /* Return TRUE iff DEST is a register or subreg of a register and
1519 doesn't change the number of words of the inner register, and any
1520 part of the register is TEST_REGNO. */
1522 static bool
1523 covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1525 unsigned int regno, endregno;
1527 if (GET_CODE (dest) == SUBREG
1528 && (((GET_MODE_SIZE (GET_MODE (dest))
1529 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1530 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1531 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1532 dest = SUBREG_REG (dest);
1534 if (!REG_P (dest))
1535 return false;
1537 regno = REGNO (dest);
1538 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1539 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1540 return (test_regno >= regno && test_regno < endregno);
1543 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1544 any member matches the covers_regno_no_parallel_p criteria. */
1546 static bool
1547 covers_regno_p (rtx dest, unsigned int test_regno)
1549 if (GET_CODE (dest) == PARALLEL)
1551 /* Some targets place small structures in registers for return
1552 values of functions, and those registers are wrapped in
1553 PARALLELs that we may see as the destination of a SET. */
1554 int i;
1556 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1558 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1559 if (inner != NULL_RTX
1560 && covers_regno_no_parallel_p (inner, test_regno))
1561 return true;
1564 return false;
1566 else
1567 return covers_regno_no_parallel_p (dest, test_regno);
1570 /* Utility function for dead_or_set_p to check an individual register. Also
1571 called from flow.c. */
1574 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1576 rtx pattern;
1578 /* See if there is a death note for something that includes TEST_REGNO. */
1579 if (find_regno_note (insn, REG_DEAD, test_regno))
1580 return 1;
1582 if (CALL_P (insn)
1583 && find_regno_fusage (insn, CLOBBER, test_regno))
1584 return 1;
1586 pattern = PATTERN (insn);
1588 if (GET_CODE (pattern) == COND_EXEC)
1589 pattern = COND_EXEC_CODE (pattern);
1591 if (GET_CODE (pattern) == SET)
1592 return covers_regno_p (SET_DEST (pattern), test_regno);
1593 else if (GET_CODE (pattern) == PARALLEL)
1595 int i;
1597 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1599 rtx body = XVECEXP (pattern, 0, i);
1601 if (GET_CODE (body) == COND_EXEC)
1602 body = COND_EXEC_CODE (body);
1604 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1605 && covers_regno_p (SET_DEST (body), test_regno))
1606 return 1;
1610 return 0;
1613 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1614 If DATUM is nonzero, look for one whose datum is DATUM. */
1617 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1619 rtx link;
1621 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1622 if (! INSN_P (insn))
1623 return 0;
1624 if (datum == 0)
1626 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1627 if (REG_NOTE_KIND (link) == kind)
1628 return link;
1629 return 0;
1632 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1633 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1634 return link;
1635 return 0;
1638 /* Return the reg-note of kind KIND in insn INSN which applies to register
1639 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1640 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1641 it might be the case that the note overlaps REGNO. */
1644 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1646 rtx link;
1648 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1649 if (! INSN_P (insn))
1650 return 0;
1652 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1653 if (REG_NOTE_KIND (link) == kind
1654 /* Verify that it is a register, so that scratch and MEM won't cause a
1655 problem here. */
1656 && REG_P (XEXP (link, 0))
1657 && REGNO (XEXP (link, 0)) <= regno
1658 && ((REGNO (XEXP (link, 0))
1659 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1660 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1661 [GET_MODE (XEXP (link, 0))]))
1662 > regno))
1663 return link;
1664 return 0;
1667 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1668 has such a note. */
1671 find_reg_equal_equiv_note (rtx insn)
1673 rtx link;
1675 if (!INSN_P (insn))
1676 return 0;
1677 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1678 if (REG_NOTE_KIND (link) == REG_EQUAL
1679 || REG_NOTE_KIND (link) == REG_EQUIV)
1681 if (single_set (insn) == 0)
1682 return 0;
1683 return link;
1685 return NULL;
1688 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1689 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1692 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1694 /* If it's not a CALL_INSN, it can't possibly have a
1695 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1696 if (!CALL_P (insn))
1697 return 0;
1699 gcc_assert (datum);
1701 if (!REG_P (datum))
1703 rtx link;
1705 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1706 link;
1707 link = XEXP (link, 1))
1708 if (GET_CODE (XEXP (link, 0)) == code
1709 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1710 return 1;
1712 else
1714 unsigned int regno = REGNO (datum);
1716 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1717 to pseudo registers, so don't bother checking. */
1719 if (regno < FIRST_PSEUDO_REGISTER)
1721 unsigned int end_regno
1722 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1723 unsigned int i;
1725 for (i = regno; i < end_regno; i++)
1726 if (find_regno_fusage (insn, code, i))
1727 return 1;
1731 return 0;
1734 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1735 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1738 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1740 rtx link;
1742 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1743 to pseudo registers, so don't bother checking. */
1745 if (regno >= FIRST_PSEUDO_REGISTER
1746 || !CALL_P (insn) )
1747 return 0;
1749 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1751 unsigned int regnote;
1752 rtx op, reg;
1754 if (GET_CODE (op = XEXP (link, 0)) == code
1755 && REG_P (reg = XEXP (op, 0))
1756 && (regnote = REGNO (reg)) <= regno
1757 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1758 return 1;
1761 return 0;
1764 /* Return true if INSN is a call to a pure function. */
1767 pure_call_p (rtx insn)
1769 rtx link;
1771 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1772 return 0;
1774 /* Look for the note that differentiates const and pure functions. */
1775 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1777 rtx u, m;
1779 if (GET_CODE (u = XEXP (link, 0)) == USE
1780 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1781 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1782 return 1;
1785 return 0;
1788 /* Remove register note NOTE from the REG_NOTES of INSN. */
1790 void
1791 remove_note (rtx insn, rtx note)
1793 rtx link;
1795 if (note == NULL_RTX)
1796 return;
1798 if (REG_NOTES (insn) == note)
1800 REG_NOTES (insn) = XEXP (note, 1);
1801 return;
1804 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1805 if (XEXP (link, 1) == note)
1807 XEXP (link, 1) = XEXP (note, 1);
1808 return;
1811 gcc_unreachable ();
1814 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1815 return 1 if it is found. A simple equality test is used to determine if
1816 NODE matches. */
1819 in_expr_list_p (rtx listp, rtx node)
1821 rtx x;
1823 for (x = listp; x; x = XEXP (x, 1))
1824 if (node == XEXP (x, 0))
1825 return 1;
1827 return 0;
1830 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1831 remove that entry from the list if it is found.
1833 A simple equality test is used to determine if NODE matches. */
1835 void
1836 remove_node_from_expr_list (rtx node, rtx *listp)
1838 rtx temp = *listp;
1839 rtx prev = NULL_RTX;
1841 while (temp)
1843 if (node == XEXP (temp, 0))
1845 /* Splice the node out of the list. */
1846 if (prev)
1847 XEXP (prev, 1) = XEXP (temp, 1);
1848 else
1849 *listp = XEXP (temp, 1);
1851 return;
1854 prev = temp;
1855 temp = XEXP (temp, 1);
1859 /* Nonzero if X contains any volatile instructions. These are instructions
1860 which may cause unpredictable machine state instructions, and thus no
1861 instructions should be moved or combined across them. This includes
1862 only volatile asms and UNSPEC_VOLATILE instructions. */
1865 volatile_insn_p (rtx x)
1867 RTX_CODE code;
1869 code = GET_CODE (x);
1870 switch (code)
1872 case LABEL_REF:
1873 case SYMBOL_REF:
1874 case CONST_INT:
1875 case CONST:
1876 case CONST_DOUBLE:
1877 case CONST_VECTOR:
1878 case CC0:
1879 case PC:
1880 case REG:
1881 case SCRATCH:
1882 case CLOBBER:
1883 case ADDR_VEC:
1884 case ADDR_DIFF_VEC:
1885 case CALL:
1886 case MEM:
1887 return 0;
1889 case UNSPEC_VOLATILE:
1890 /* case TRAP_IF: This isn't clear yet. */
1891 return 1;
1893 case ASM_INPUT:
1894 case ASM_OPERANDS:
1895 if (MEM_VOLATILE_P (x))
1896 return 1;
1898 default:
1899 break;
1902 /* Recursively scan the operands of this expression. */
1905 const char *fmt = GET_RTX_FORMAT (code);
1906 int i;
1908 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1910 if (fmt[i] == 'e')
1912 if (volatile_insn_p (XEXP (x, i)))
1913 return 1;
1915 else if (fmt[i] == 'E')
1917 int j;
1918 for (j = 0; j < XVECLEN (x, i); j++)
1919 if (volatile_insn_p (XVECEXP (x, i, j)))
1920 return 1;
1924 return 0;
1927 /* Nonzero if X contains any volatile memory references
1928 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1931 volatile_refs_p (rtx x)
1933 RTX_CODE code;
1935 code = GET_CODE (x);
1936 switch (code)
1938 case LABEL_REF:
1939 case SYMBOL_REF:
1940 case CONST_INT:
1941 case CONST:
1942 case CONST_DOUBLE:
1943 case CONST_VECTOR:
1944 case CC0:
1945 case PC:
1946 case REG:
1947 case SCRATCH:
1948 case CLOBBER:
1949 case ADDR_VEC:
1950 case ADDR_DIFF_VEC:
1951 return 0;
1953 case UNSPEC_VOLATILE:
1954 return 1;
1956 case MEM:
1957 case ASM_INPUT:
1958 case ASM_OPERANDS:
1959 if (MEM_VOLATILE_P (x))
1960 return 1;
1962 default:
1963 break;
1966 /* Recursively scan the operands of this expression. */
1969 const char *fmt = GET_RTX_FORMAT (code);
1970 int i;
1972 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1974 if (fmt[i] == 'e')
1976 if (volatile_refs_p (XEXP (x, i)))
1977 return 1;
1979 else if (fmt[i] == 'E')
1981 int j;
1982 for (j = 0; j < XVECLEN (x, i); j++)
1983 if (volatile_refs_p (XVECEXP (x, i, j)))
1984 return 1;
1988 return 0;
1991 /* Similar to above, except that it also rejects register pre- and post-
1992 incrementing. */
1995 side_effects_p (rtx x)
1997 RTX_CODE code;
1999 code = GET_CODE (x);
2000 switch (code)
2002 case LABEL_REF:
2003 case SYMBOL_REF:
2004 case CONST_INT:
2005 case CONST:
2006 case CONST_DOUBLE:
2007 case CONST_VECTOR:
2008 case CC0:
2009 case PC:
2010 case REG:
2011 case SCRATCH:
2012 case ADDR_VEC:
2013 case ADDR_DIFF_VEC:
2014 return 0;
2016 case CLOBBER:
2017 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2018 when some combination can't be done. If we see one, don't think
2019 that we can simplify the expression. */
2020 return (GET_MODE (x) != VOIDmode);
2022 case PRE_INC:
2023 case PRE_DEC:
2024 case POST_INC:
2025 case POST_DEC:
2026 case PRE_MODIFY:
2027 case POST_MODIFY:
2028 case CALL:
2029 case UNSPEC_VOLATILE:
2030 /* case TRAP_IF: This isn't clear yet. */
2031 return 1;
2033 case MEM:
2034 case ASM_INPUT:
2035 case ASM_OPERANDS:
2036 if (MEM_VOLATILE_P (x))
2037 return 1;
2039 default:
2040 break;
2043 /* Recursively scan the operands of this expression. */
2046 const char *fmt = GET_RTX_FORMAT (code);
2047 int i;
2049 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2051 if (fmt[i] == 'e')
2053 if (side_effects_p (XEXP (x, i)))
2054 return 1;
2056 else if (fmt[i] == 'E')
2058 int j;
2059 for (j = 0; j < XVECLEN (x, i); j++)
2060 if (side_effects_p (XVECEXP (x, i, j)))
2061 return 1;
2065 return 0;
2068 /* Return nonzero if evaluating rtx X might cause a trap. */
2071 may_trap_p (rtx x)
2073 int i;
2074 enum rtx_code code;
2075 const char *fmt;
2077 if (x == 0)
2078 return 0;
2079 code = GET_CODE (x);
2080 switch (code)
2082 /* Handle these cases quickly. */
2083 case CONST_INT:
2084 case CONST_DOUBLE:
2085 case CONST_VECTOR:
2086 case SYMBOL_REF:
2087 case LABEL_REF:
2088 case CONST:
2089 case PC:
2090 case CC0:
2091 case REG:
2092 case SCRATCH:
2093 return 0;
2095 case ASM_INPUT:
2096 case UNSPEC_VOLATILE:
2097 case TRAP_IF:
2098 return 1;
2100 case ASM_OPERANDS:
2101 return MEM_VOLATILE_P (x);
2103 /* Memory ref can trap unless it's a static var or a stack slot. */
2104 case MEM:
2105 if (MEM_NOTRAP_P (x))
2106 return 0;
2107 return rtx_addr_can_trap_p (XEXP (x, 0));
2109 /* Division by a non-constant might trap. */
2110 case DIV:
2111 case MOD:
2112 case UDIV:
2113 case UMOD:
2114 if (HONOR_SNANS (GET_MODE (x)))
2115 return 1;
2116 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2117 return flag_trapping_math;
2118 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2119 return 1;
2120 break;
2122 case EXPR_LIST:
2123 /* An EXPR_LIST is used to represent a function call. This
2124 certainly may trap. */
2125 return 1;
2127 case GE:
2128 case GT:
2129 case LE:
2130 case LT:
2131 case LTGT:
2132 case COMPARE:
2133 /* Some floating point comparisons may trap. */
2134 if (!flag_trapping_math)
2135 break;
2136 /* ??? There is no machine independent way to check for tests that trap
2137 when COMPARE is used, though many targets do make this distinction.
2138 For instance, sparc uses CCFPE for compares which generate exceptions
2139 and CCFP for compares which do not generate exceptions. */
2140 if (HONOR_NANS (GET_MODE (x)))
2141 return 1;
2142 /* But often the compare has some CC mode, so check operand
2143 modes as well. */
2144 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2145 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2146 return 1;
2147 break;
2149 case EQ:
2150 case NE:
2151 if (HONOR_SNANS (GET_MODE (x)))
2152 return 1;
2153 /* Often comparison is CC mode, so check operand modes. */
2154 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2155 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2156 return 1;
2157 break;
2159 case FIX:
2160 /* Conversion of floating point might trap. */
2161 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2162 return 1;
2163 break;
2165 case NEG:
2166 case ABS:
2167 case SUBREG:
2168 /* These operations don't trap even with floating point. */
2169 break;
2171 default:
2172 /* Any floating arithmetic may trap. */
2173 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2174 && flag_trapping_math)
2175 return 1;
2178 fmt = GET_RTX_FORMAT (code);
2179 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2181 if (fmt[i] == 'e')
2183 if (may_trap_p (XEXP (x, i)))
2184 return 1;
2186 else if (fmt[i] == 'E')
2188 int j;
2189 for (j = 0; j < XVECLEN (x, i); j++)
2190 if (may_trap_p (XVECEXP (x, i, j)))
2191 return 1;
2194 return 0;
2197 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2198 i.e., an inequality. */
2201 inequality_comparisons_p (rtx x)
2203 const char *fmt;
2204 int len, i;
2205 enum rtx_code code = GET_CODE (x);
2207 switch (code)
2209 case REG:
2210 case SCRATCH:
2211 case PC:
2212 case CC0:
2213 case CONST_INT:
2214 case CONST_DOUBLE:
2215 case CONST_VECTOR:
2216 case CONST:
2217 case LABEL_REF:
2218 case SYMBOL_REF:
2219 return 0;
2221 case LT:
2222 case LTU:
2223 case GT:
2224 case GTU:
2225 case LE:
2226 case LEU:
2227 case GE:
2228 case GEU:
2229 return 1;
2231 default:
2232 break;
2235 len = GET_RTX_LENGTH (code);
2236 fmt = GET_RTX_FORMAT (code);
2238 for (i = 0; i < len; i++)
2240 if (fmt[i] == 'e')
2242 if (inequality_comparisons_p (XEXP (x, i)))
2243 return 1;
2245 else if (fmt[i] == 'E')
2247 int j;
2248 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2249 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2250 return 1;
2254 return 0;
2257 /* Replace any occurrence of FROM in X with TO. The function does
2258 not enter into CONST_DOUBLE for the replace.
2260 Note that copying is not done so X must not be shared unless all copies
2261 are to be modified. */
2264 replace_rtx (rtx x, rtx from, rtx to)
2266 int i, j;
2267 const char *fmt;
2269 /* The following prevents loops occurrence when we change MEM in
2270 CONST_DOUBLE onto the same CONST_DOUBLE. */
2271 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2272 return x;
2274 if (x == from)
2275 return to;
2277 /* Allow this function to make replacements in EXPR_LISTs. */
2278 if (x == 0)
2279 return 0;
2281 if (GET_CODE (x) == SUBREG)
2283 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2285 if (GET_CODE (new) == CONST_INT)
2287 x = simplify_subreg (GET_MODE (x), new,
2288 GET_MODE (SUBREG_REG (x)),
2289 SUBREG_BYTE (x));
2290 gcc_assert (x);
2292 else
2293 SUBREG_REG (x) = new;
2295 return x;
2297 else if (GET_CODE (x) == ZERO_EXTEND)
2299 rtx new = replace_rtx (XEXP (x, 0), from, to);
2301 if (GET_CODE (new) == CONST_INT)
2303 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2304 new, GET_MODE (XEXP (x, 0)));
2305 gcc_assert (x);
2307 else
2308 XEXP (x, 0) = new;
2310 return x;
2313 fmt = GET_RTX_FORMAT (GET_CODE (x));
2314 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2316 if (fmt[i] == 'e')
2317 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2318 else if (fmt[i] == 'E')
2319 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2320 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2323 return x;
2326 /* Throughout the rtx X, replace many registers according to REG_MAP.
2327 Return the replacement for X (which may be X with altered contents).
2328 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2329 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2331 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2332 should not be mapped to pseudos or vice versa since validate_change
2333 is not called.
2335 If REPLACE_DEST is 1, replacements are also done in destinations;
2336 otherwise, only sources are replaced. */
2339 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2341 enum rtx_code code;
2342 int i;
2343 const char *fmt;
2345 if (x == 0)
2346 return x;
2348 code = GET_CODE (x);
2349 switch (code)
2351 case SCRATCH:
2352 case PC:
2353 case CC0:
2354 case CONST_INT:
2355 case CONST_DOUBLE:
2356 case CONST_VECTOR:
2357 case CONST:
2358 case SYMBOL_REF:
2359 case LABEL_REF:
2360 return x;
2362 case REG:
2363 /* Verify that the register has an entry before trying to access it. */
2364 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2366 /* SUBREGs can't be shared. Always return a copy to ensure that if
2367 this replacement occurs more than once then each instance will
2368 get distinct rtx. */
2369 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2370 return copy_rtx (reg_map[REGNO (x)]);
2371 return reg_map[REGNO (x)];
2373 return x;
2375 case SUBREG:
2376 /* Prevent making nested SUBREGs. */
2377 if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2378 && reg_map[REGNO (SUBREG_REG (x))] != 0
2379 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2381 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2382 return simplify_gen_subreg (GET_MODE (x), map_val,
2383 GET_MODE (SUBREG_REG (x)),
2384 SUBREG_BYTE (x));
2386 break;
2388 case SET:
2389 if (replace_dest)
2390 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2392 else if (MEM_P (SET_DEST (x))
2393 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2394 /* Even if we are not to replace destinations, replace register if it
2395 is CONTAINED in destination (destination is memory or
2396 STRICT_LOW_PART). */
2397 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2398 reg_map, nregs, 0);
2399 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2400 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2401 break;
2403 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2404 return x;
2406 default:
2407 break;
2410 fmt = GET_RTX_FORMAT (code);
2411 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2413 if (fmt[i] == 'e')
2414 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2415 else if (fmt[i] == 'E')
2417 int j;
2418 for (j = 0; j < XVECLEN (x, i); j++)
2419 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2420 nregs, replace_dest);
2423 return x;
2426 /* Replace occurrences of the old label in *X with the new one.
2427 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2430 replace_label (rtx *x, void *data)
2432 rtx l = *x;
2433 rtx old_label = ((replace_label_data *) data)->r1;
2434 rtx new_label = ((replace_label_data *) data)->r2;
2435 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2437 if (l == NULL_RTX)
2438 return 0;
2440 if (GET_CODE (l) == SYMBOL_REF
2441 && CONSTANT_POOL_ADDRESS_P (l))
2443 rtx c = get_pool_constant (l);
2444 if (rtx_referenced_p (old_label, c))
2446 rtx new_c, new_l;
2447 replace_label_data *d = (replace_label_data *) data;
2449 /* Create a copy of constant C; replace the label inside
2450 but do not update LABEL_NUSES because uses in constant pool
2451 are not counted. */
2452 new_c = copy_rtx (c);
2453 d->update_label_nuses = false;
2454 for_each_rtx (&new_c, replace_label, data);
2455 d->update_label_nuses = update_label_nuses;
2457 /* Add the new constant NEW_C to constant pool and replace
2458 the old reference to constant by new reference. */
2459 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2460 *x = replace_rtx (l, l, new_l);
2462 return 0;
2465 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2466 field. This is not handled by for_each_rtx because it doesn't
2467 handle unprinted ('0') fields. */
2468 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2469 JUMP_LABEL (l) = new_label;
2471 if ((GET_CODE (l) == LABEL_REF
2472 || GET_CODE (l) == INSN_LIST)
2473 && XEXP (l, 0) == old_label)
2475 XEXP (l, 0) = new_label;
2476 if (update_label_nuses)
2478 ++LABEL_NUSES (new_label);
2479 --LABEL_NUSES (old_label);
2481 return 0;
2484 return 0;
2487 /* When *BODY is equal to X or X is directly referenced by *BODY
2488 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2489 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2491 static int
2492 rtx_referenced_p_1 (rtx *body, void *x)
2494 rtx y = (rtx) x;
2496 if (*body == NULL_RTX)
2497 return y == NULL_RTX;
2499 /* Return true if a label_ref *BODY refers to label Y. */
2500 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2501 return XEXP (*body, 0) == y;
2503 /* If *BODY is a reference to pool constant traverse the constant. */
2504 if (GET_CODE (*body) == SYMBOL_REF
2505 && CONSTANT_POOL_ADDRESS_P (*body))
2506 return rtx_referenced_p (y, get_pool_constant (*body));
2508 /* By default, compare the RTL expressions. */
2509 return rtx_equal_p (*body, y);
2512 /* Return true if X is referenced in BODY. */
2515 rtx_referenced_p (rtx x, rtx body)
2517 return for_each_rtx (&body, rtx_referenced_p_1, x);
2520 /* If INSN is a tablejump return true and store the label (before jump table) to
2521 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2523 bool
2524 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2526 rtx label, table;
2528 if (JUMP_P (insn)
2529 && (label = JUMP_LABEL (insn)) != NULL_RTX
2530 && (table = next_active_insn (label)) != NULL_RTX
2531 && JUMP_P (table)
2532 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2533 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2535 if (labelp)
2536 *labelp = label;
2537 if (tablep)
2538 *tablep = table;
2539 return true;
2541 return false;
2544 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2545 constant that is not in the constant pool and not in the condition
2546 of an IF_THEN_ELSE. */
2548 static int
2549 computed_jump_p_1 (rtx x)
2551 enum rtx_code code = GET_CODE (x);
2552 int i, j;
2553 const char *fmt;
2555 switch (code)
2557 case LABEL_REF:
2558 case PC:
2559 return 0;
2561 case CONST:
2562 case CONST_INT:
2563 case CONST_DOUBLE:
2564 case CONST_VECTOR:
2565 case SYMBOL_REF:
2566 case REG:
2567 return 1;
2569 case MEM:
2570 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2571 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2573 case IF_THEN_ELSE:
2574 return (computed_jump_p_1 (XEXP (x, 1))
2575 || computed_jump_p_1 (XEXP (x, 2)));
2577 default:
2578 break;
2581 fmt = GET_RTX_FORMAT (code);
2582 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2584 if (fmt[i] == 'e'
2585 && computed_jump_p_1 (XEXP (x, i)))
2586 return 1;
2588 else if (fmt[i] == 'E')
2589 for (j = 0; j < XVECLEN (x, i); j++)
2590 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2591 return 1;
2594 return 0;
2597 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2599 Tablejumps and casesi insns are not considered indirect jumps;
2600 we can recognize them by a (use (label_ref)). */
2603 computed_jump_p (rtx insn)
2605 int i;
2606 if (JUMP_P (insn))
2608 rtx pat = PATTERN (insn);
2610 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2611 return 0;
2612 else if (GET_CODE (pat) == PARALLEL)
2614 int len = XVECLEN (pat, 0);
2615 int has_use_labelref = 0;
2617 for (i = len - 1; i >= 0; i--)
2618 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2619 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2620 == LABEL_REF))
2621 has_use_labelref = 1;
2623 if (! has_use_labelref)
2624 for (i = len - 1; i >= 0; i--)
2625 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2626 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2627 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2628 return 1;
2630 else if (GET_CODE (pat) == SET
2631 && SET_DEST (pat) == pc_rtx
2632 && computed_jump_p_1 (SET_SRC (pat)))
2633 return 1;
2635 return 0;
2638 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2639 calls. Processes the subexpressions of EXP and passes them to F. */
2640 static int
2641 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2643 int result, i, j;
2644 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2645 rtx *x;
2647 for (; format[n] != '\0'; n++)
2649 switch (format[n])
2651 case 'e':
2652 /* Call F on X. */
2653 x = &XEXP (exp, n);
2654 result = (*f) (x, data);
2655 if (result == -1)
2656 /* Do not traverse sub-expressions. */
2657 continue;
2658 else if (result != 0)
2659 /* Stop the traversal. */
2660 return result;
2662 if (*x == NULL_RTX)
2663 /* There are no sub-expressions. */
2664 continue;
2666 i = non_rtx_starting_operands[GET_CODE (*x)];
2667 if (i >= 0)
2669 result = for_each_rtx_1 (*x, i, f, data);
2670 if (result != 0)
2671 return result;
2673 break;
2675 case 'V':
2676 case 'E':
2677 if (XVEC (exp, n) == 0)
2678 continue;
2679 for (j = 0; j < XVECLEN (exp, n); ++j)
2681 /* Call F on X. */
2682 x = &XVECEXP (exp, n, j);
2683 result = (*f) (x, data);
2684 if (result == -1)
2685 /* Do not traverse sub-expressions. */
2686 continue;
2687 else if (result != 0)
2688 /* Stop the traversal. */
2689 return result;
2691 if (*x == NULL_RTX)
2692 /* There are no sub-expressions. */
2693 continue;
2695 i = non_rtx_starting_operands[GET_CODE (*x)];
2696 if (i >= 0)
2698 result = for_each_rtx_1 (*x, i, f, data);
2699 if (result != 0)
2700 return result;
2703 break;
2705 default:
2706 /* Nothing to do. */
2707 break;
2711 return 0;
2714 /* Traverse X via depth-first search, calling F for each
2715 sub-expression (including X itself). F is also passed the DATA.
2716 If F returns -1, do not traverse sub-expressions, but continue
2717 traversing the rest of the tree. If F ever returns any other
2718 nonzero value, stop the traversal, and return the value returned
2719 by F. Otherwise, return 0. This function does not traverse inside
2720 tree structure that contains RTX_EXPRs, or into sub-expressions
2721 whose format code is `0' since it is not known whether or not those
2722 codes are actually RTL.
2724 This routine is very general, and could (should?) be used to
2725 implement many of the other routines in this file. */
2728 for_each_rtx (rtx *x, rtx_function f, void *data)
2730 int result;
2731 int i;
2733 /* Call F on X. */
2734 result = (*f) (x, data);
2735 if (result == -1)
2736 /* Do not traverse sub-expressions. */
2737 return 0;
2738 else if (result != 0)
2739 /* Stop the traversal. */
2740 return result;
2742 if (*x == NULL_RTX)
2743 /* There are no sub-expressions. */
2744 return 0;
2746 i = non_rtx_starting_operands[GET_CODE (*x)];
2747 if (i < 0)
2748 return 0;
2750 return for_each_rtx_1 (*x, i, f, data);
2754 /* Searches X for any reference to REGNO, returning the rtx of the
2755 reference found if any. Otherwise, returns NULL_RTX. */
2758 regno_use_in (unsigned int regno, rtx x)
2760 const char *fmt;
2761 int i, j;
2762 rtx tem;
2764 if (REG_P (x) && REGNO (x) == regno)
2765 return x;
2767 fmt = GET_RTX_FORMAT (GET_CODE (x));
2768 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2770 if (fmt[i] == 'e')
2772 if ((tem = regno_use_in (regno, XEXP (x, i))))
2773 return tem;
2775 else if (fmt[i] == 'E')
2776 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2777 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2778 return tem;
2781 return NULL_RTX;
2784 /* Return a value indicating whether OP, an operand of a commutative
2785 operation, is preferred as the first or second operand. The higher
2786 the value, the stronger the preference for being the first operand.
2787 We use negative values to indicate a preference for the first operand
2788 and positive values for the second operand. */
2791 commutative_operand_precedence (rtx op)
2793 enum rtx_code code = GET_CODE (op);
2795 /* Constants always come the second operand. Prefer "nice" constants. */
2796 if (code == CONST_INT)
2797 return -7;
2798 if (code == CONST_DOUBLE)
2799 return -6;
2800 op = avoid_constant_pool_reference (op);
2801 code = GET_CODE (op);
2803 switch (GET_RTX_CLASS (code))
2805 case RTX_CONST_OBJ:
2806 if (code == CONST_INT)
2807 return -5;
2808 if (code == CONST_DOUBLE)
2809 return -4;
2810 return -3;
2812 case RTX_EXTRA:
2813 /* SUBREGs of objects should come second. */
2814 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2815 return -2;
2817 if (!CONSTANT_P (op))
2818 return 0;
2819 else
2820 /* As for RTX_CONST_OBJ. */
2821 return -3;
2823 case RTX_OBJ:
2824 /* Complex expressions should be the first, so decrease priority
2825 of objects. */
2826 return -1;
2828 case RTX_COMM_ARITH:
2829 /* Prefer operands that are themselves commutative to be first.
2830 This helps to make things linear. In particular,
2831 (and (and (reg) (reg)) (not (reg))) is canonical. */
2832 return 4;
2834 case RTX_BIN_ARITH:
2835 /* If only one operand is a binary expression, it will be the first
2836 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2837 is canonical, although it will usually be further simplified. */
2838 return 2;
2840 case RTX_UNARY:
2841 /* Then prefer NEG and NOT. */
2842 if (code == NEG || code == NOT)
2843 return 1;
2845 default:
2846 return 0;
2850 /* Return 1 iff it is necessary to swap operands of commutative operation
2851 in order to canonicalize expression. */
2854 swap_commutative_operands_p (rtx x, rtx y)
2856 return (commutative_operand_precedence (x)
2857 < commutative_operand_precedence (y));
2860 /* Return 1 if X is an autoincrement side effect and the register is
2861 not the stack pointer. */
2863 auto_inc_p (rtx x)
2865 switch (GET_CODE (x))
2867 case PRE_INC:
2868 case POST_INC:
2869 case PRE_DEC:
2870 case POST_DEC:
2871 case PRE_MODIFY:
2872 case POST_MODIFY:
2873 /* There are no REG_INC notes for SP. */
2874 if (XEXP (x, 0) != stack_pointer_rtx)
2875 return 1;
2876 default:
2877 break;
2879 return 0;
2882 /* Return 1 if the sequence of instructions beginning with FROM and up
2883 to and including TO is safe to move. If NEW_TO is non-NULL, and
2884 the sequence is not already safe to move, but can be easily
2885 extended to a sequence which is safe, then NEW_TO will point to the
2886 end of the extended sequence.
2888 For now, this function only checks that the region contains whole
2889 exception regions, but it could be extended to check additional
2890 conditions as well. */
2893 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
2895 int eh_region_count = 0;
2896 int past_to_p = 0;
2897 rtx r = from;
2899 /* By default, assume the end of the region will be what was
2900 suggested. */
2901 if (new_to)
2902 *new_to = to;
2904 while (r)
2906 if (NOTE_P (r))
2908 switch (NOTE_LINE_NUMBER (r))
2910 case NOTE_INSN_EH_REGION_BEG:
2911 ++eh_region_count;
2912 break;
2914 case NOTE_INSN_EH_REGION_END:
2915 if (eh_region_count == 0)
2916 /* This sequence of instructions contains the end of
2917 an exception region, but not he beginning. Moving
2918 it will cause chaos. */
2919 return 0;
2921 --eh_region_count;
2922 break;
2924 default:
2925 break;
2928 else if (past_to_p)
2929 /* If we've passed TO, and we see a non-note instruction, we
2930 can't extend the sequence to a movable sequence. */
2931 return 0;
2933 if (r == to)
2935 if (!new_to)
2936 /* It's OK to move the sequence if there were matched sets of
2937 exception region notes. */
2938 return eh_region_count == 0;
2940 past_to_p = 1;
2943 /* It's OK to move the sequence if there were matched sets of
2944 exception region notes. */
2945 if (past_to_p && eh_region_count == 0)
2947 *new_to = r;
2948 return 1;
2951 /* Go to the next instruction. */
2952 r = NEXT_INSN (r);
2955 return 0;
2958 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2960 loc_mentioned_in_p (rtx *loc, rtx in)
2962 enum rtx_code code = GET_CODE (in);
2963 const char *fmt = GET_RTX_FORMAT (code);
2964 int i, j;
2966 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2968 if (loc == &in->u.fld[i].rt_rtx)
2969 return 1;
2970 if (fmt[i] == 'e')
2972 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2973 return 1;
2975 else if (fmt[i] == 'E')
2976 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2977 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2978 return 1;
2980 return 0;
2983 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2984 and SUBREG_BYTE, return the bit offset where the subreg begins
2985 (counting from the least significant bit of the operand). */
2987 unsigned int
2988 subreg_lsb_1 (enum machine_mode outer_mode,
2989 enum machine_mode inner_mode,
2990 unsigned int subreg_byte)
2992 unsigned int bitpos;
2993 unsigned int byte;
2994 unsigned int word;
2996 /* A paradoxical subreg begins at bit position 0. */
2997 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
2998 return 0;
3000 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3001 /* If the subreg crosses a word boundary ensure that
3002 it also begins and ends on a word boundary. */
3003 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3004 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3005 && (subreg_byte % UNITS_PER_WORD
3006 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3008 if (WORDS_BIG_ENDIAN)
3009 word = (GET_MODE_SIZE (inner_mode)
3010 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3011 else
3012 word = subreg_byte / UNITS_PER_WORD;
3013 bitpos = word * BITS_PER_WORD;
3015 if (BYTES_BIG_ENDIAN)
3016 byte = (GET_MODE_SIZE (inner_mode)
3017 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3018 else
3019 byte = subreg_byte % UNITS_PER_WORD;
3020 bitpos += byte * BITS_PER_UNIT;
3022 return bitpos;
3025 /* Given a subreg X, return the bit offset where the subreg begins
3026 (counting from the least significant bit of the reg). */
3028 unsigned int
3029 subreg_lsb (rtx x)
3031 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3032 SUBREG_BYTE (x));
3035 /* This function returns the regno offset of a subreg expression.
3036 xregno - A regno of an inner hard subreg_reg (or what will become one).
3037 xmode - The mode of xregno.
3038 offset - The byte offset.
3039 ymode - The mode of a top level SUBREG (or what may become one).
3040 RETURN - The regno offset which would be used. */
3041 unsigned int
3042 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3043 unsigned int offset, enum machine_mode ymode)
3045 int nregs_xmode, nregs_ymode, nregs_xmode_unit_int;
3046 int mode_multiple, nregs_multiple;
3047 int y_offset;
3048 enum machine_mode xmode_unit, xmode_unit_int;
3050 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3052 if (GET_MODE_INNER (xmode) == VOIDmode)
3053 xmode_unit = xmode;
3054 else
3055 xmode_unit = GET_MODE_INNER (xmode);
3057 if (FLOAT_MODE_P (xmode_unit))
3059 xmode_unit_int = int_mode_for_mode (xmode_unit);
3060 if (xmode_unit_int == BLKmode)
3061 /* It's probably bad to be here; a port should have an integer mode
3062 that's the same size as anything of which it takes a SUBREG. */
3063 xmode_unit_int = xmode_unit;
3065 else
3066 xmode_unit_int = xmode_unit;
3068 nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
3070 /* Adjust nregs_xmode to allow for 'holes'. */
3071 if (nregs_xmode_unit_int != hard_regno_nregs[xregno][xmode_unit])
3072 nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
3073 else
3074 nregs_xmode = hard_regno_nregs[xregno][xmode];
3076 nregs_ymode = hard_regno_nregs[xregno][ymode];
3078 /* If this is a big endian paradoxical subreg, which uses more actual
3079 hard registers than the original register, we must return a negative
3080 offset so that we find the proper highpart of the register. */
3081 if (offset == 0
3082 && nregs_ymode > nregs_xmode
3083 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3084 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3085 return nregs_xmode - nregs_ymode;
3087 if (offset == 0 || nregs_xmode == nregs_ymode)
3088 return 0;
3090 /* Size of ymode must not be greater than the size of xmode. */
3091 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3092 gcc_assert (mode_multiple != 0);
3094 y_offset = offset / GET_MODE_SIZE (ymode);
3095 nregs_multiple = nregs_xmode / nregs_ymode;
3096 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3099 /* This function returns true when the offset is representable via
3100 subreg_offset in the given regno.
3101 xregno - A regno of an inner hard subreg_reg (or what will become one).
3102 xmode - The mode of xregno.
3103 offset - The byte offset.
3104 ymode - The mode of a top level SUBREG (or what may become one).
3105 RETURN - Whether the offset is representable. */
3106 bool
3107 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3108 unsigned int offset, enum machine_mode ymode)
3110 int nregs_xmode, nregs_ymode, nregs_xmode_unit, nregs_xmode_unit_int;
3111 int mode_multiple, nregs_multiple;
3112 int y_offset;
3113 enum machine_mode xmode_unit, xmode_unit_int;
3115 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3117 if (GET_MODE_INNER (xmode) == VOIDmode)
3118 xmode_unit = xmode;
3119 else
3120 xmode_unit = GET_MODE_INNER (xmode);
3122 if (FLOAT_MODE_P (xmode_unit))
3124 xmode_unit_int = int_mode_for_mode (xmode_unit);
3125 if (xmode_unit_int == BLKmode)
3126 /* It's probably bad to be here; a port should have an integer mode
3127 that's the same size as anything of which it takes a SUBREG. */
3128 xmode_unit_int = xmode_unit;
3130 else
3131 xmode_unit_int = xmode_unit;
3133 nregs_xmode_unit = hard_regno_nregs[xregno][xmode_unit];
3134 nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
3136 /* If there are holes in a non-scalar mode in registers, we expect
3137 that it is made up of its units concatenated together. */
3138 if (nregs_xmode_unit != nregs_xmode_unit_int)
3140 gcc_assert (nregs_xmode_unit * GET_MODE_NUNITS (xmode)
3141 == hard_regno_nregs[xregno][xmode]);
3143 /* You can only ask for a SUBREG of a value with holes in the middle
3144 if you don't cross the holes. (Such a SUBREG should be done by
3145 picking a different register class, or doing it in memory if
3146 necessary.) An example of a value with holes is XCmode on 32-bit
3147 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3148 3 for each part, but in memory it's two 128-bit parts.
3149 Padding is assumed to be at the end (not necessarily the 'high part')
3150 of each unit. */
3151 if (nregs_xmode_unit != nregs_xmode_unit_int
3152 && (offset / GET_MODE_SIZE (xmode_unit_int) + 1
3153 < GET_MODE_NUNITS (xmode))
3154 && (offset / GET_MODE_SIZE (xmode_unit_int)
3155 != ((offset + GET_MODE_SIZE (ymode) - 1)
3156 / GET_MODE_SIZE (xmode_unit_int))))
3157 return false;
3159 nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
3161 else
3162 nregs_xmode = hard_regno_nregs[xregno][xmode];
3164 nregs_ymode = hard_regno_nregs[xregno][ymode];
3166 /* Paradoxical subregs are otherwise valid. */
3167 if (offset == 0
3168 && nregs_ymode > nregs_xmode
3169 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3170 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3171 return true;
3173 /* Lowpart subregs are otherwise valid. */
3174 if (offset == subreg_lowpart_offset (ymode, xmode))
3175 return true;
3177 /* This should always pass, otherwise we don't know how to verify
3178 the constraint. These conditions may be relaxed but
3179 subreg_regno_offset would need to be redesigned. */
3180 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3181 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3183 /* The XMODE value can be seen as a vector of NREGS_XMODE
3184 values. The subreg must represent a lowpart of given field.
3185 Compute what field it is. */
3186 offset -= subreg_lowpart_offset (ymode,
3187 mode_for_size (GET_MODE_BITSIZE (xmode)
3188 / nregs_xmode,
3189 MODE_INT, 0));
3191 /* Size of ymode must not be greater than the size of xmode. */
3192 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3193 gcc_assert (mode_multiple != 0);
3195 y_offset = offset / GET_MODE_SIZE (ymode);
3196 nregs_multiple = nregs_xmode / nregs_ymode;
3198 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3199 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3201 return (!(y_offset % (mode_multiple / nregs_multiple)));
3204 /* Return the final regno that a subreg expression refers to. */
3205 unsigned int
3206 subreg_regno (rtx x)
3208 unsigned int ret;
3209 rtx subreg = SUBREG_REG (x);
3210 int regno = REGNO (subreg);
3212 ret = regno + subreg_regno_offset (regno,
3213 GET_MODE (subreg),
3214 SUBREG_BYTE (x),
3215 GET_MODE (x));
3216 return ret;
3219 struct parms_set_data
3221 int nregs;
3222 HARD_REG_SET regs;
3225 /* Helper function for noticing stores to parameter registers. */
3226 static void
3227 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3229 struct parms_set_data *d = data;
3230 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3231 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3233 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3234 d->nregs--;
3238 /* Look backward for first parameter to be loaded.
3239 Note that loads of all parameters will not necessarily be
3240 found if CSE has eliminated some of them (e.g., an argument
3241 to the outer function is passed down as a parameter).
3242 Do not skip BOUNDARY. */
3244 find_first_parameter_load (rtx call_insn, rtx boundary)
3246 struct parms_set_data parm;
3247 rtx p, before, first_set;
3249 /* Since different machines initialize their parameter registers
3250 in different orders, assume nothing. Collect the set of all
3251 parameter registers. */
3252 CLEAR_HARD_REG_SET (parm.regs);
3253 parm.nregs = 0;
3254 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3255 if (GET_CODE (XEXP (p, 0)) == USE
3256 && REG_P (XEXP (XEXP (p, 0), 0)))
3258 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3260 /* We only care about registers which can hold function
3261 arguments. */
3262 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3263 continue;
3265 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3266 parm.nregs++;
3268 before = call_insn;
3269 first_set = call_insn;
3271 /* Search backward for the first set of a register in this set. */
3272 while (parm.nregs && before != boundary)
3274 before = PREV_INSN (before);
3276 /* It is possible that some loads got CSEed from one call to
3277 another. Stop in that case. */
3278 if (CALL_P (before))
3279 break;
3281 /* Our caller needs either ensure that we will find all sets
3282 (in case code has not been optimized yet), or take care
3283 for possible labels in a way by setting boundary to preceding
3284 CODE_LABEL. */
3285 if (LABEL_P (before))
3287 gcc_assert (before == boundary);
3288 break;
3291 if (INSN_P (before))
3293 int nregs_old = parm.nregs;
3294 note_stores (PATTERN (before), parms_set, &parm);
3295 /* If we found something that did not set a parameter reg,
3296 we're done. Do not keep going, as that might result
3297 in hoisting an insn before the setting of a pseudo
3298 that is used by the hoisted insn. */
3299 if (nregs_old != parm.nregs)
3300 first_set = before;
3301 else
3302 break;
3305 return first_set;
3308 /* Return true if we should avoid inserting code between INSN and preceding
3309 call instruction. */
3311 bool
3312 keep_with_call_p (rtx insn)
3314 rtx set;
3316 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3318 if (REG_P (SET_DEST (set))
3319 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3320 && fixed_regs[REGNO (SET_DEST (set))]
3321 && general_operand (SET_SRC (set), VOIDmode))
3322 return true;
3323 if (REG_P (SET_SRC (set))
3324 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3325 && REG_P (SET_DEST (set))
3326 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3327 return true;
3328 /* There may be a stack pop just after the call and before the store
3329 of the return register. Search for the actual store when deciding
3330 if we can break or not. */
3331 if (SET_DEST (set) == stack_pointer_rtx)
3333 rtx i2 = next_nonnote_insn (insn);
3334 if (i2 && keep_with_call_p (i2))
3335 return true;
3338 return false;
3341 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3342 to non-complex jumps. That is, direct unconditional, conditional,
3343 and tablejumps, but not computed jumps or returns. It also does
3344 not apply to the fallthru case of a conditional jump. */
3346 bool
3347 label_is_jump_target_p (rtx label, rtx jump_insn)
3349 rtx tmp = JUMP_LABEL (jump_insn);
3351 if (label == tmp)
3352 return true;
3354 if (tablejump_p (jump_insn, NULL, &tmp))
3356 rtvec vec = XVEC (PATTERN (tmp),
3357 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3358 int i, veclen = GET_NUM_ELEM (vec);
3360 for (i = 0; i < veclen; ++i)
3361 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3362 return true;
3365 return false;
3369 /* Return an estimate of the cost of computing rtx X.
3370 One use is in cse, to decide which expression to keep in the hash table.
3371 Another is in rtl generation, to pick the cheapest way to multiply.
3372 Other uses like the latter are expected in the future. */
3375 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3377 int i, j;
3378 enum rtx_code code;
3379 const char *fmt;
3380 int total;
3382 if (x == 0)
3383 return 0;
3385 /* Compute the default costs of certain things.
3386 Note that targetm.rtx_costs can override the defaults. */
3388 code = GET_CODE (x);
3389 switch (code)
3391 case MULT:
3392 total = COSTS_N_INSNS (5);
3393 break;
3394 case DIV:
3395 case UDIV:
3396 case MOD:
3397 case UMOD:
3398 total = COSTS_N_INSNS (7);
3399 break;
3400 case USE:
3401 /* Used in loop.c and combine.c as a marker. */
3402 total = 0;
3403 break;
3404 default:
3405 total = COSTS_N_INSNS (1);
3408 switch (code)
3410 case REG:
3411 return 0;
3413 case SUBREG:
3414 total = 0;
3415 /* If we can't tie these modes, make this expensive. The larger
3416 the mode, the more expensive it is. */
3417 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3418 return COSTS_N_INSNS (2
3419 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3420 break;
3422 default:
3423 if (targetm.rtx_costs (x, code, outer_code, &total))
3424 return total;
3425 break;
3428 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3429 which is already in total. */
3431 fmt = GET_RTX_FORMAT (code);
3432 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3433 if (fmt[i] == 'e')
3434 total += rtx_cost (XEXP (x, i), code);
3435 else if (fmt[i] == 'E')
3436 for (j = 0; j < XVECLEN (x, i); j++)
3437 total += rtx_cost (XVECEXP (x, i, j), code);
3439 return total;
3442 /* Return cost of address expression X.
3443 Expect that X is properly formed address reference. */
3446 address_cost (rtx x, enum machine_mode mode)
3448 /* We may be asked for cost of various unusual addresses, such as operands
3449 of push instruction. It is not worthwhile to complicate writing
3450 of the target hook by such cases. */
3452 if (!memory_address_p (mode, x))
3453 return 1000;
3455 return targetm.address_cost (x);
3458 /* If the target doesn't override, compute the cost as with arithmetic. */
3461 default_address_cost (rtx x)
3463 return rtx_cost (x, MEM);
3467 unsigned HOST_WIDE_INT
3468 nonzero_bits (rtx x, enum machine_mode mode)
3470 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3473 unsigned int
3474 num_sign_bit_copies (rtx x, enum machine_mode mode)
3476 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3479 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3480 It avoids exponential behavior in nonzero_bits1 when X has
3481 identical subexpressions on the first or the second level. */
3483 static unsigned HOST_WIDE_INT
3484 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3485 enum machine_mode known_mode,
3486 unsigned HOST_WIDE_INT known_ret)
3488 if (x == known_x && mode == known_mode)
3489 return known_ret;
3491 /* Try to find identical subexpressions. If found call
3492 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3493 precomputed value for the subexpression as KNOWN_RET. */
3495 if (ARITHMETIC_P (x))
3497 rtx x0 = XEXP (x, 0);
3498 rtx x1 = XEXP (x, 1);
3500 /* Check the first level. */
3501 if (x0 == x1)
3502 return nonzero_bits1 (x, mode, x0, mode,
3503 cached_nonzero_bits (x0, mode, known_x,
3504 known_mode, known_ret));
3506 /* Check the second level. */
3507 if (ARITHMETIC_P (x0)
3508 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3509 return nonzero_bits1 (x, mode, x1, mode,
3510 cached_nonzero_bits (x1, mode, known_x,
3511 known_mode, known_ret));
3513 if (ARITHMETIC_P (x1)
3514 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3515 return nonzero_bits1 (x, mode, x0, mode,
3516 cached_nonzero_bits (x0, mode, known_x,
3517 known_mode, known_ret));
3520 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3523 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3524 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3525 is less useful. We can't allow both, because that results in exponential
3526 run time recursion. There is a nullstone testcase that triggered
3527 this. This macro avoids accidental uses of num_sign_bit_copies. */
3528 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3530 /* Given an expression, X, compute which bits in X can be nonzero.
3531 We don't care about bits outside of those defined in MODE.
3533 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3534 an arithmetic operation, we can do better. */
3536 static unsigned HOST_WIDE_INT
3537 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3538 enum machine_mode known_mode,
3539 unsigned HOST_WIDE_INT known_ret)
3541 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3542 unsigned HOST_WIDE_INT inner_nz;
3543 enum rtx_code code;
3544 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3546 /* For floating-point values, assume all bits are needed. */
3547 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3548 return nonzero;
3550 /* If X is wider than MODE, use its mode instead. */
3551 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3553 mode = GET_MODE (x);
3554 nonzero = GET_MODE_MASK (mode);
3555 mode_width = GET_MODE_BITSIZE (mode);
3558 if (mode_width > HOST_BITS_PER_WIDE_INT)
3559 /* Our only callers in this case look for single bit values. So
3560 just return the mode mask. Those tests will then be false. */
3561 return nonzero;
3563 #ifndef WORD_REGISTER_OPERATIONS
3564 /* If MODE is wider than X, but both are a single word for both the host
3565 and target machines, we can compute this from which bits of the
3566 object might be nonzero in its own mode, taking into account the fact
3567 that on many CISC machines, accessing an object in a wider mode
3568 causes the high-order bits to become undefined. So they are
3569 not known to be zero. */
3571 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3572 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3573 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3574 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3576 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3577 known_x, known_mode, known_ret);
3578 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3579 return nonzero;
3581 #endif
3583 code = GET_CODE (x);
3584 switch (code)
3586 case REG:
3587 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3588 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3589 all the bits above ptr_mode are known to be zero. */
3590 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3591 && REG_POINTER (x))
3592 nonzero &= GET_MODE_MASK (ptr_mode);
3593 #endif
3595 /* Include declared information about alignment of pointers. */
3596 /* ??? We don't properly preserve REG_POINTER changes across
3597 pointer-to-integer casts, so we can't trust it except for
3598 things that we know must be pointers. See execute/960116-1.c. */
3599 if ((x == stack_pointer_rtx
3600 || x == frame_pointer_rtx
3601 || x == arg_pointer_rtx)
3602 && REGNO_POINTER_ALIGN (REGNO (x)))
3604 unsigned HOST_WIDE_INT alignment
3605 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3607 #ifdef PUSH_ROUNDING
3608 /* If PUSH_ROUNDING is defined, it is possible for the
3609 stack to be momentarily aligned only to that amount,
3610 so we pick the least alignment. */
3611 if (x == stack_pointer_rtx && PUSH_ARGS)
3612 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3613 alignment);
3614 #endif
3616 nonzero &= ~(alignment - 1);
3620 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3621 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3622 known_mode, known_ret,
3623 &nonzero_for_hook);
3625 if (new)
3626 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3627 known_mode, known_ret);
3629 return nonzero_for_hook;
3632 case CONST_INT:
3633 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3634 /* If X is negative in MODE, sign-extend the value. */
3635 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3636 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3637 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3638 #endif
3640 return INTVAL (x);
3642 case MEM:
3643 #ifdef LOAD_EXTEND_OP
3644 /* In many, if not most, RISC machines, reading a byte from memory
3645 zeros the rest of the register. Noticing that fact saves a lot
3646 of extra zero-extends. */
3647 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3648 nonzero &= GET_MODE_MASK (GET_MODE (x));
3649 #endif
3650 break;
3652 case EQ: case NE:
3653 case UNEQ: case LTGT:
3654 case GT: case GTU: case UNGT:
3655 case LT: case LTU: case UNLT:
3656 case GE: case GEU: case UNGE:
3657 case LE: case LEU: case UNLE:
3658 case UNORDERED: case ORDERED:
3659 /* If this produces an integer result, we know which bits are set.
3660 Code here used to clear bits outside the mode of X, but that is
3661 now done above. */
3662 /* Mind that MODE is the mode the caller wants to look at this
3663 operation in, and not the actual operation mode. We can wind
3664 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3665 that describes the results of a vector compare. */
3666 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3667 && mode_width <= HOST_BITS_PER_WIDE_INT)
3668 nonzero = STORE_FLAG_VALUE;
3669 break;
3671 case NEG:
3672 #if 0
3673 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3674 and num_sign_bit_copies. */
3675 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3676 == GET_MODE_BITSIZE (GET_MODE (x)))
3677 nonzero = 1;
3678 #endif
3680 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3681 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3682 break;
3684 case ABS:
3685 #if 0
3686 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3687 and num_sign_bit_copies. */
3688 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3689 == GET_MODE_BITSIZE (GET_MODE (x)))
3690 nonzero = 1;
3691 #endif
3692 break;
3694 case TRUNCATE:
3695 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3696 known_x, known_mode, known_ret)
3697 & GET_MODE_MASK (mode));
3698 break;
3700 case ZERO_EXTEND:
3701 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3702 known_x, known_mode, known_ret);
3703 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3704 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3705 break;
3707 case SIGN_EXTEND:
3708 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3709 Otherwise, show all the bits in the outer mode but not the inner
3710 may be nonzero. */
3711 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3712 known_x, known_mode, known_ret);
3713 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3715 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3716 if (inner_nz
3717 & (((HOST_WIDE_INT) 1
3718 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3719 inner_nz |= (GET_MODE_MASK (mode)
3720 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3723 nonzero &= inner_nz;
3724 break;
3726 case AND:
3727 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3728 known_x, known_mode, known_ret)
3729 & cached_nonzero_bits (XEXP (x, 1), mode,
3730 known_x, known_mode, known_ret);
3731 break;
3733 case XOR: case IOR:
3734 case UMIN: case UMAX: case SMIN: case SMAX:
3736 unsigned HOST_WIDE_INT nonzero0 =
3737 cached_nonzero_bits (XEXP (x, 0), mode,
3738 known_x, known_mode, known_ret);
3740 /* Don't call nonzero_bits for the second time if it cannot change
3741 anything. */
3742 if ((nonzero & nonzero0) != nonzero)
3743 nonzero &= nonzero0
3744 | cached_nonzero_bits (XEXP (x, 1), mode,
3745 known_x, known_mode, known_ret);
3747 break;
3749 case PLUS: case MINUS:
3750 case MULT:
3751 case DIV: case UDIV:
3752 case MOD: case UMOD:
3753 /* We can apply the rules of arithmetic to compute the number of
3754 high- and low-order zero bits of these operations. We start by
3755 computing the width (position of the highest-order nonzero bit)
3756 and the number of low-order zero bits for each value. */
3758 unsigned HOST_WIDE_INT nz0 =
3759 cached_nonzero_bits (XEXP (x, 0), mode,
3760 known_x, known_mode, known_ret);
3761 unsigned HOST_WIDE_INT nz1 =
3762 cached_nonzero_bits (XEXP (x, 1), mode,
3763 known_x, known_mode, known_ret);
3764 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3765 int width0 = floor_log2 (nz0) + 1;
3766 int width1 = floor_log2 (nz1) + 1;
3767 int low0 = floor_log2 (nz0 & -nz0);
3768 int low1 = floor_log2 (nz1 & -nz1);
3769 HOST_WIDE_INT op0_maybe_minusp
3770 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3771 HOST_WIDE_INT op1_maybe_minusp
3772 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3773 unsigned int result_width = mode_width;
3774 int result_low = 0;
3776 switch (code)
3778 case PLUS:
3779 result_width = MAX (width0, width1) + 1;
3780 result_low = MIN (low0, low1);
3781 break;
3782 case MINUS:
3783 result_low = MIN (low0, low1);
3784 break;
3785 case MULT:
3786 result_width = width0 + width1;
3787 result_low = low0 + low1;
3788 break;
3789 case DIV:
3790 if (width1 == 0)
3791 break;
3792 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3793 result_width = width0;
3794 break;
3795 case UDIV:
3796 if (width1 == 0)
3797 break;
3798 result_width = width0;
3799 break;
3800 case MOD:
3801 if (width1 == 0)
3802 break;
3803 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3804 result_width = MIN (width0, width1);
3805 result_low = MIN (low0, low1);
3806 break;
3807 case UMOD:
3808 if (width1 == 0)
3809 break;
3810 result_width = MIN (width0, width1);
3811 result_low = MIN (low0, low1);
3812 break;
3813 default:
3814 gcc_unreachable ();
3817 if (result_width < mode_width)
3818 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3820 if (result_low > 0)
3821 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3823 #ifdef POINTERS_EXTEND_UNSIGNED
3824 /* If pointers extend unsigned and this is an addition or subtraction
3825 to a pointer in Pmode, all the bits above ptr_mode are known to be
3826 zero. */
3827 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3828 && (code == PLUS || code == MINUS)
3829 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3830 nonzero &= GET_MODE_MASK (ptr_mode);
3831 #endif
3833 break;
3835 case ZERO_EXTRACT:
3836 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3837 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3838 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3839 break;
3841 case SUBREG:
3842 /* If this is a SUBREG formed for a promoted variable that has
3843 been zero-extended, we know that at least the high-order bits
3844 are zero, though others might be too. */
3846 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3847 nonzero = GET_MODE_MASK (GET_MODE (x))
3848 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3849 known_x, known_mode, known_ret);
3851 /* If the inner mode is a single word for both the host and target
3852 machines, we can compute this from which bits of the inner
3853 object might be nonzero. */
3854 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3855 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3856 <= HOST_BITS_PER_WIDE_INT))
3858 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3859 known_x, known_mode, known_ret);
3861 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3862 /* If this is a typical RISC machine, we only have to worry
3863 about the way loads are extended. */
3864 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3865 ? (((nonzero
3866 & (((unsigned HOST_WIDE_INT) 1
3867 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3868 != 0))
3869 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3870 || !MEM_P (SUBREG_REG (x)))
3871 #endif
3873 /* On many CISC machines, accessing an object in a wider mode
3874 causes the high-order bits to become undefined. So they are
3875 not known to be zero. */
3876 if (GET_MODE_SIZE (GET_MODE (x))
3877 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3878 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3879 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3882 break;
3884 case ASHIFTRT:
3885 case LSHIFTRT:
3886 case ASHIFT:
3887 case ROTATE:
3888 /* The nonzero bits are in two classes: any bits within MODE
3889 that aren't in GET_MODE (x) are always significant. The rest of the
3890 nonzero bits are those that are significant in the operand of
3891 the shift when shifted the appropriate number of bits. This
3892 shows that high-order bits are cleared by the right shift and
3893 low-order bits by left shifts. */
3894 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3895 && INTVAL (XEXP (x, 1)) >= 0
3896 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3898 enum machine_mode inner_mode = GET_MODE (x);
3899 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3900 int count = INTVAL (XEXP (x, 1));
3901 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3902 unsigned HOST_WIDE_INT op_nonzero =
3903 cached_nonzero_bits (XEXP (x, 0), mode,
3904 known_x, known_mode, known_ret);
3905 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3906 unsigned HOST_WIDE_INT outer = 0;
3908 if (mode_width > width)
3909 outer = (op_nonzero & nonzero & ~mode_mask);
3911 if (code == LSHIFTRT)
3912 inner >>= count;
3913 else if (code == ASHIFTRT)
3915 inner >>= count;
3917 /* If the sign bit may have been nonzero before the shift, we
3918 need to mark all the places it could have been copied to
3919 by the shift as possibly nonzero. */
3920 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3921 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3923 else if (code == ASHIFT)
3924 inner <<= count;
3925 else
3926 inner = ((inner << (count % width)
3927 | (inner >> (width - (count % width)))) & mode_mask);
3929 nonzero &= (outer | inner);
3931 break;
3933 case FFS:
3934 case POPCOUNT:
3935 /* This is at most the number of bits in the mode. */
3936 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3937 break;
3939 case CLZ:
3940 /* If CLZ has a known value at zero, then the nonzero bits are
3941 that value, plus the number of bits in the mode minus one. */
3942 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3943 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3944 else
3945 nonzero = -1;
3946 break;
3948 case CTZ:
3949 /* If CTZ has a known value at zero, then the nonzero bits are
3950 that value, plus the number of bits in the mode minus one. */
3951 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3952 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3953 else
3954 nonzero = -1;
3955 break;
3957 case PARITY:
3958 nonzero = 1;
3959 break;
3961 case IF_THEN_ELSE:
3963 unsigned HOST_WIDE_INT nonzero_true =
3964 cached_nonzero_bits (XEXP (x, 1), mode,
3965 known_x, known_mode, known_ret);
3967 /* Don't call nonzero_bits for the second time if it cannot change
3968 anything. */
3969 if ((nonzero & nonzero_true) != nonzero)
3970 nonzero &= nonzero_true
3971 | cached_nonzero_bits (XEXP (x, 2), mode,
3972 known_x, known_mode, known_ret);
3974 break;
3976 default:
3977 break;
3980 return nonzero;
3983 /* See the macro definition above. */
3984 #undef cached_num_sign_bit_copies
3987 /* The function cached_num_sign_bit_copies is a wrapper around
3988 num_sign_bit_copies1. It avoids exponential behavior in
3989 num_sign_bit_copies1 when X has identical subexpressions on the
3990 first or the second level. */
3992 static unsigned int
3993 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
3994 enum machine_mode known_mode,
3995 unsigned int known_ret)
3997 if (x == known_x && mode == known_mode)
3998 return known_ret;
4000 /* Try to find identical subexpressions. If found call
4001 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4002 the precomputed value for the subexpression as KNOWN_RET. */
4004 if (ARITHMETIC_P (x))
4006 rtx x0 = XEXP (x, 0);
4007 rtx x1 = XEXP (x, 1);
4009 /* Check the first level. */
4010 if (x0 == x1)
4011 return
4012 num_sign_bit_copies1 (x, mode, x0, mode,
4013 cached_num_sign_bit_copies (x0, mode, known_x,
4014 known_mode,
4015 known_ret));
4017 /* Check the second level. */
4018 if (ARITHMETIC_P (x0)
4019 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4020 return
4021 num_sign_bit_copies1 (x, mode, x1, mode,
4022 cached_num_sign_bit_copies (x1, mode, known_x,
4023 known_mode,
4024 known_ret));
4026 if (ARITHMETIC_P (x1)
4027 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4028 return
4029 num_sign_bit_copies1 (x, mode, x0, mode,
4030 cached_num_sign_bit_copies (x0, mode, known_x,
4031 known_mode,
4032 known_ret));
4035 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4038 /* Return the number of bits at the high-order end of X that are known to
4039 be equal to the sign bit. X will be used in mode MODE; if MODE is
4040 VOIDmode, X will be used in its own mode. The returned value will always
4041 be between 1 and the number of bits in MODE. */
4043 static unsigned int
4044 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
4045 enum machine_mode known_mode,
4046 unsigned int known_ret)
4048 enum rtx_code code = GET_CODE (x);
4049 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4050 int num0, num1, result;
4051 unsigned HOST_WIDE_INT nonzero;
4053 /* If we weren't given a mode, use the mode of X. If the mode is still
4054 VOIDmode, we don't know anything. Likewise if one of the modes is
4055 floating-point. */
4057 if (mode == VOIDmode)
4058 mode = GET_MODE (x);
4060 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4061 return 1;
4063 /* For a smaller object, just ignore the high bits. */
4064 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4066 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4067 known_x, known_mode, known_ret);
4068 return MAX (1,
4069 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4072 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4074 #ifndef WORD_REGISTER_OPERATIONS
4075 /* If this machine does not do all register operations on the entire
4076 register and MODE is wider than the mode of X, we can say nothing
4077 at all about the high-order bits. */
4078 return 1;
4079 #else
4080 /* Likewise on machines that do, if the mode of the object is smaller
4081 than a word and loads of that size don't sign extend, we can say
4082 nothing about the high order bits. */
4083 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4084 #ifdef LOAD_EXTEND_OP
4085 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4086 #endif
4088 return 1;
4089 #endif
4092 switch (code)
4094 case REG:
4096 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4097 /* If pointers extend signed and this is a pointer in Pmode, say that
4098 all the bits above ptr_mode are known to be sign bit copies. */
4099 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4100 && REG_POINTER (x))
4101 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4102 #endif
4105 unsigned int copies_for_hook = 1, copies = 1;
4106 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4107 known_mode, known_ret,
4108 &copies_for_hook);
4110 if (new)
4111 copies = cached_num_sign_bit_copies (new, mode, known_x,
4112 known_mode, known_ret);
4114 if (copies > 1 || copies_for_hook > 1)
4115 return MAX (copies, copies_for_hook);
4117 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4119 break;
4121 case MEM:
4122 #ifdef LOAD_EXTEND_OP
4123 /* Some RISC machines sign-extend all loads of smaller than a word. */
4124 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4125 return MAX (1, ((int) bitwidth
4126 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4127 #endif
4128 break;
4130 case CONST_INT:
4131 /* If the constant is negative, take its 1's complement and remask.
4132 Then see how many zero bits we have. */
4133 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4134 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4135 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4136 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4138 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4140 case SUBREG:
4141 /* If this is a SUBREG for a promoted object that is sign-extended
4142 and we are looking at it in a wider mode, we know that at least the
4143 high-order bits are known to be sign bit copies. */
4145 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4147 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4148 known_x, known_mode, known_ret);
4149 return MAX ((int) bitwidth
4150 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4151 num0);
4154 /* For a smaller object, just ignore the high bits. */
4155 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4157 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4158 known_x, known_mode, known_ret);
4159 return MAX (1, (num0
4160 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4161 - bitwidth)));
4164 #ifdef WORD_REGISTER_OPERATIONS
4165 #ifdef LOAD_EXTEND_OP
4166 /* For paradoxical SUBREGs on machines where all register operations
4167 affect the entire register, just look inside. Note that we are
4168 passing MODE to the recursive call, so the number of sign bit copies
4169 will remain relative to that mode, not the inner mode. */
4171 /* This works only if loads sign extend. Otherwise, if we get a
4172 reload for the inner part, it may be loaded from the stack, and
4173 then we lose all sign bit copies that existed before the store
4174 to the stack. */
4176 if ((GET_MODE_SIZE (GET_MODE (x))
4177 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4178 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4179 && MEM_P (SUBREG_REG (x)))
4180 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4181 known_x, known_mode, known_ret);
4182 #endif
4183 #endif
4184 break;
4186 case SIGN_EXTRACT:
4187 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4188 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4189 break;
4191 case SIGN_EXTEND:
4192 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4193 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4194 known_x, known_mode, known_ret));
4196 case TRUNCATE:
4197 /* For a smaller object, just ignore the high bits. */
4198 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4199 known_x, known_mode, known_ret);
4200 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4201 - bitwidth)));
4203 case NOT:
4204 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4205 known_x, known_mode, known_ret);
4207 case ROTATE: case ROTATERT:
4208 /* If we are rotating left by a number of bits less than the number
4209 of sign bit copies, we can just subtract that amount from the
4210 number. */
4211 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4212 && INTVAL (XEXP (x, 1)) >= 0
4213 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4215 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4216 known_x, known_mode, known_ret);
4217 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4218 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4220 break;
4222 case NEG:
4223 /* In general, this subtracts one sign bit copy. But if the value
4224 is known to be positive, the number of sign bit copies is the
4225 same as that of the input. Finally, if the input has just one bit
4226 that might be nonzero, all the bits are copies of the sign bit. */
4227 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4228 known_x, known_mode, known_ret);
4229 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4230 return num0 > 1 ? num0 - 1 : 1;
4232 nonzero = nonzero_bits (XEXP (x, 0), mode);
4233 if (nonzero == 1)
4234 return bitwidth;
4236 if (num0 > 1
4237 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4238 num0--;
4240 return num0;
4242 case IOR: case AND: case XOR:
4243 case SMIN: case SMAX: case UMIN: case UMAX:
4244 /* Logical operations will preserve the number of sign-bit copies.
4245 MIN and MAX operations always return one of the operands. */
4246 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4247 known_x, known_mode, known_ret);
4248 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4249 known_x, known_mode, known_ret);
4250 return MIN (num0, num1);
4252 case PLUS: case MINUS:
4253 /* For addition and subtraction, we can have a 1-bit carry. However,
4254 if we are subtracting 1 from a positive number, there will not
4255 be such a carry. Furthermore, if the positive number is known to
4256 be 0 or 1, we know the result is either -1 or 0. */
4258 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4259 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4261 nonzero = nonzero_bits (XEXP (x, 0), mode);
4262 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4263 return (nonzero == 1 || nonzero == 0 ? bitwidth
4264 : bitwidth - floor_log2 (nonzero) - 1);
4267 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4268 known_x, known_mode, known_ret);
4269 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4270 known_x, known_mode, known_ret);
4271 result = MAX (1, MIN (num0, num1) - 1);
4273 #ifdef POINTERS_EXTEND_UNSIGNED
4274 /* If pointers extend signed and this is an addition or subtraction
4275 to a pointer in Pmode, all the bits above ptr_mode are known to be
4276 sign bit copies. */
4277 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4278 && (code == PLUS || code == MINUS)
4279 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4280 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4281 - GET_MODE_BITSIZE (ptr_mode) + 1),
4282 result);
4283 #endif
4284 return result;
4286 case MULT:
4287 /* The number of bits of the product is the sum of the number of
4288 bits of both terms. However, unless one of the terms if known
4289 to be positive, we must allow for an additional bit since negating
4290 a negative number can remove one sign bit copy. */
4292 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4293 known_x, known_mode, known_ret);
4294 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4295 known_x, known_mode, known_ret);
4297 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4298 if (result > 0
4299 && (bitwidth > HOST_BITS_PER_WIDE_INT
4300 || (((nonzero_bits (XEXP (x, 0), mode)
4301 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4302 && ((nonzero_bits (XEXP (x, 1), mode)
4303 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4304 result--;
4306 return MAX (1, result);
4308 case UDIV:
4309 /* The result must be <= the first operand. If the first operand
4310 has the high bit set, we know nothing about the number of sign
4311 bit copies. */
4312 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4313 return 1;
4314 else if ((nonzero_bits (XEXP (x, 0), mode)
4315 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4316 return 1;
4317 else
4318 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4319 known_x, known_mode, known_ret);
4321 case UMOD:
4322 /* The result must be <= the second operand. */
4323 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4324 known_x, known_mode, known_ret);
4326 case DIV:
4327 /* Similar to unsigned division, except that we have to worry about
4328 the case where the divisor is negative, in which case we have
4329 to add 1. */
4330 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4331 known_x, known_mode, known_ret);
4332 if (result > 1
4333 && (bitwidth > HOST_BITS_PER_WIDE_INT
4334 || (nonzero_bits (XEXP (x, 1), mode)
4335 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4336 result--;
4338 return result;
4340 case MOD:
4341 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4342 known_x, known_mode, known_ret);
4343 if (result > 1
4344 && (bitwidth > HOST_BITS_PER_WIDE_INT
4345 || (nonzero_bits (XEXP (x, 1), mode)
4346 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4347 result--;
4349 return result;
4351 case ASHIFTRT:
4352 /* Shifts by a constant add to the number of bits equal to the
4353 sign bit. */
4354 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4355 known_x, known_mode, known_ret);
4356 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4357 && INTVAL (XEXP (x, 1)) > 0)
4358 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4360 return num0;
4362 case ASHIFT:
4363 /* Left shifts destroy copies. */
4364 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4365 || INTVAL (XEXP (x, 1)) < 0
4366 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4367 return 1;
4369 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4370 known_x, known_mode, known_ret);
4371 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4373 case IF_THEN_ELSE:
4374 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4375 known_x, known_mode, known_ret);
4376 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4377 known_x, known_mode, known_ret);
4378 return MIN (num0, num1);
4380 case EQ: case NE: case GE: case GT: case LE: case LT:
4381 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4382 case GEU: case GTU: case LEU: case LTU:
4383 case UNORDERED: case ORDERED:
4384 /* If the constant is negative, take its 1's complement and remask.
4385 Then see how many zero bits we have. */
4386 nonzero = STORE_FLAG_VALUE;
4387 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4388 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4389 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4391 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4393 default:
4394 break;
4397 /* If we haven't been able to figure it out by one of the above rules,
4398 see if some of the high-order bits are known to be zero. If so,
4399 count those bits and return one less than that amount. If we can't
4400 safely compute the mask for this mode, always return BITWIDTH. */
4402 bitwidth = GET_MODE_BITSIZE (mode);
4403 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4404 return 1;
4406 nonzero = nonzero_bits (x, mode);
4407 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4408 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4411 /* Calculate the rtx_cost of a single instruction. A return value of
4412 zero indicates an instruction pattern without a known cost. */
4415 insn_rtx_cost (rtx pat)
4417 int i, cost;
4418 rtx set;
4420 /* Extract the single set rtx from the instruction pattern.
4421 We can't use single_set since we only have the pattern. */
4422 if (GET_CODE (pat) == SET)
4423 set = pat;
4424 else if (GET_CODE (pat) == PARALLEL)
4426 set = NULL_RTX;
4427 for (i = 0; i < XVECLEN (pat, 0); i++)
4429 rtx x = XVECEXP (pat, 0, i);
4430 if (GET_CODE (x) == SET)
4432 if (set)
4433 return 0;
4434 set = x;
4437 if (!set)
4438 return 0;
4440 else
4441 return 0;
4443 cost = rtx_cost (SET_SRC (set), SET);
4444 return cost > 0 ? cost : COSTS_N_INSNS (1);
4447 /* Given an insn INSN and condition COND, return the condition in a
4448 canonical form to simplify testing by callers. Specifically:
4450 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4451 (2) Both operands will be machine operands; (cc0) will have been replaced.
4452 (3) If an operand is a constant, it will be the second operand.
4453 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4454 for GE, GEU, and LEU.
4456 If the condition cannot be understood, or is an inequality floating-point
4457 comparison which needs to be reversed, 0 will be returned.
4459 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4461 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4462 insn used in locating the condition was found. If a replacement test
4463 of the condition is desired, it should be placed in front of that
4464 insn and we will be sure that the inputs are still valid.
4466 If WANT_REG is nonzero, we wish the condition to be relative to that
4467 register, if possible. Therefore, do not canonicalize the condition
4468 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4469 to be a compare to a CC mode register.
4471 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4472 and at INSN. */
4475 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4476 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4478 enum rtx_code code;
4479 rtx prev = insn;
4480 rtx set;
4481 rtx tem;
4482 rtx op0, op1;
4483 int reverse_code = 0;
4484 enum machine_mode mode;
4486 code = GET_CODE (cond);
4487 mode = GET_MODE (cond);
4488 op0 = XEXP (cond, 0);
4489 op1 = XEXP (cond, 1);
4491 if (reverse)
4492 code = reversed_comparison_code (cond, insn);
4493 if (code == UNKNOWN)
4494 return 0;
4496 if (earliest)
4497 *earliest = insn;
4499 /* If we are comparing a register with zero, see if the register is set
4500 in the previous insn to a COMPARE or a comparison operation. Perform
4501 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4502 in cse.c */
4504 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4505 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4506 && op1 == CONST0_RTX (GET_MODE (op0))
4507 && op0 != want_reg)
4509 /* Set nonzero when we find something of interest. */
4510 rtx x = 0;
4512 #ifdef HAVE_cc0
4513 /* If comparison with cc0, import actual comparison from compare
4514 insn. */
4515 if (op0 == cc0_rtx)
4517 if ((prev = prev_nonnote_insn (prev)) == 0
4518 || !NONJUMP_INSN_P (prev)
4519 || (set = single_set (prev)) == 0
4520 || SET_DEST (set) != cc0_rtx)
4521 return 0;
4523 op0 = SET_SRC (set);
4524 op1 = CONST0_RTX (GET_MODE (op0));
4525 if (earliest)
4526 *earliest = prev;
4528 #endif
4530 /* If this is a COMPARE, pick up the two things being compared. */
4531 if (GET_CODE (op0) == COMPARE)
4533 op1 = XEXP (op0, 1);
4534 op0 = XEXP (op0, 0);
4535 continue;
4537 else if (!REG_P (op0))
4538 break;
4540 /* Go back to the previous insn. Stop if it is not an INSN. We also
4541 stop if it isn't a single set or if it has a REG_INC note because
4542 we don't want to bother dealing with it. */
4544 if ((prev = prev_nonnote_insn (prev)) == 0
4545 || !NONJUMP_INSN_P (prev)
4546 || FIND_REG_INC_NOTE (prev, NULL_RTX))
4547 break;
4549 set = set_of (op0, prev);
4551 if (set
4552 && (GET_CODE (set) != SET
4553 || !rtx_equal_p (SET_DEST (set), op0)))
4554 break;
4556 /* If this is setting OP0, get what it sets it to if it looks
4557 relevant. */
4558 if (set)
4560 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4561 #ifdef FLOAT_STORE_FLAG_VALUE
4562 REAL_VALUE_TYPE fsfv;
4563 #endif
4565 /* ??? We may not combine comparisons done in a CCmode with
4566 comparisons not done in a CCmode. This is to aid targets
4567 like Alpha that have an IEEE compliant EQ instruction, and
4568 a non-IEEE compliant BEQ instruction. The use of CCmode is
4569 actually artificial, simply to prevent the combination, but
4570 should not affect other platforms.
4572 However, we must allow VOIDmode comparisons to match either
4573 CCmode or non-CCmode comparison, because some ports have
4574 modeless comparisons inside branch patterns.
4576 ??? This mode check should perhaps look more like the mode check
4577 in simplify_comparison in combine. */
4579 if ((GET_CODE (SET_SRC (set)) == COMPARE
4580 || (((code == NE
4581 || (code == LT
4582 && GET_MODE_CLASS (inner_mode) == MODE_INT
4583 && (GET_MODE_BITSIZE (inner_mode)
4584 <= HOST_BITS_PER_WIDE_INT)
4585 && (STORE_FLAG_VALUE
4586 & ((HOST_WIDE_INT) 1
4587 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4588 #ifdef FLOAT_STORE_FLAG_VALUE
4589 || (code == LT
4590 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4591 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4592 REAL_VALUE_NEGATIVE (fsfv)))
4593 #endif
4595 && COMPARISON_P (SET_SRC (set))))
4596 && (((GET_MODE_CLASS (mode) == MODE_CC)
4597 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4598 || mode == VOIDmode || inner_mode == VOIDmode))
4599 x = SET_SRC (set);
4600 else if (((code == EQ
4601 || (code == GE
4602 && (GET_MODE_BITSIZE (inner_mode)
4603 <= HOST_BITS_PER_WIDE_INT)
4604 && GET_MODE_CLASS (inner_mode) == MODE_INT
4605 && (STORE_FLAG_VALUE
4606 & ((HOST_WIDE_INT) 1
4607 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4608 #ifdef FLOAT_STORE_FLAG_VALUE
4609 || (code == GE
4610 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4611 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4612 REAL_VALUE_NEGATIVE (fsfv)))
4613 #endif
4615 && COMPARISON_P (SET_SRC (set))
4616 && (((GET_MODE_CLASS (mode) == MODE_CC)
4617 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4618 || mode == VOIDmode || inner_mode == VOIDmode))
4621 reverse_code = 1;
4622 x = SET_SRC (set);
4624 else
4625 break;
4628 else if (reg_set_p (op0, prev))
4629 /* If this sets OP0, but not directly, we have to give up. */
4630 break;
4632 if (x)
4634 /* If the caller is expecting the condition to be valid at INSN,
4635 make sure X doesn't change before INSN. */
4636 if (valid_at_insn_p)
4637 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4638 break;
4639 if (COMPARISON_P (x))
4640 code = GET_CODE (x);
4641 if (reverse_code)
4643 code = reversed_comparison_code (x, prev);
4644 if (code == UNKNOWN)
4645 return 0;
4646 reverse_code = 0;
4649 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4650 if (earliest)
4651 *earliest = prev;
4655 /* If constant is first, put it last. */
4656 if (CONSTANT_P (op0))
4657 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4659 /* If OP0 is the result of a comparison, we weren't able to find what
4660 was really being compared, so fail. */
4661 if (!allow_cc_mode
4662 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4663 return 0;
4665 /* Canonicalize any ordered comparison with integers involving equality
4666 if we can do computations in the relevant mode and we do not
4667 overflow. */
4669 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4670 && GET_CODE (op1) == CONST_INT
4671 && GET_MODE (op0) != VOIDmode
4672 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4674 HOST_WIDE_INT const_val = INTVAL (op1);
4675 unsigned HOST_WIDE_INT uconst_val = const_val;
4676 unsigned HOST_WIDE_INT max_val
4677 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4679 switch (code)
4681 case LE:
4682 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4683 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4684 break;
4686 /* When cross-compiling, const_val might be sign-extended from
4687 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4688 case GE:
4689 if ((HOST_WIDE_INT) (const_val & max_val)
4690 != (((HOST_WIDE_INT) 1
4691 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4692 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4693 break;
4695 case LEU:
4696 if (uconst_val < max_val)
4697 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4698 break;
4700 case GEU:
4701 if (uconst_val != 0)
4702 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4703 break;
4705 default:
4706 break;
4710 /* Never return CC0; return zero instead. */
4711 if (CC0_P (op0))
4712 return 0;
4714 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4717 /* Given a jump insn JUMP, return the condition that will cause it to branch
4718 to its JUMP_LABEL. If the condition cannot be understood, or is an
4719 inequality floating-point comparison which needs to be reversed, 0 will
4720 be returned.
4722 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4723 insn used in locating the condition was found. If a replacement test
4724 of the condition is desired, it should be placed in front of that
4725 insn and we will be sure that the inputs are still valid. If EARLIEST
4726 is null, the returned condition will be valid at INSN.
4728 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4729 compare CC mode register.
4731 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4734 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4736 rtx cond;
4737 int reverse;
4738 rtx set;
4740 /* If this is not a standard conditional jump, we can't parse it. */
4741 if (!JUMP_P (jump)
4742 || ! any_condjump_p (jump))
4743 return 0;
4744 set = pc_set (jump);
4746 cond = XEXP (SET_SRC (set), 0);
4748 /* If this branches to JUMP_LABEL when the condition is false, reverse
4749 the condition. */
4750 reverse
4751 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4752 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4754 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4755 allow_cc_mode, valid_at_insn_p);
4759 /* Initialize non_rtx_starting_operands, which is used to speed up
4760 for_each_rtx. */
4761 void
4762 init_rtlanal (void)
4764 int i;
4765 for (i = 0; i < NUM_RTX_CODE; i++)
4767 const char *format = GET_RTX_FORMAT (i);
4768 const char *first = strpbrk (format, "eEV");
4769 non_rtx_starting_operands[i] = first ? first - format : -1;