* config/arm/arm.md (addsi3_cbranch_scratch): Correct constraints.
[official-gcc.git] / gcc / rtlanal.c
blobd61df8902bf5da0d177eb5bb39806047a181b830
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "basic-block.h"
37 #include "real.h"
38 #include "regs.h"
39 #include "function.h"
41 /* Forward declarations */
42 static int global_reg_mentioned_p_1 (rtx *, void *);
43 static void set_of_1 (rtx, rtx, void *);
44 static int rtx_referenced_p_1 (rtx *, void *);
45 static int computed_jump_p_1 (rtx);
46 static void parms_set (rtx, rtx, void *);
48 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
49 rtx, enum machine_mode,
50 unsigned HOST_WIDE_INT);
51 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
52 enum machine_mode,
53 unsigned HOST_WIDE_INT);
54 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
55 enum machine_mode,
56 unsigned int);
57 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
58 enum machine_mode, unsigned int);
60 /* Bit flags that specify the machine subtype we are compiling for.
61 Bits are tested using macros TARGET_... defined in the tm.h file
62 and set by `-m...' switches. Must be defined in rtlanal.c. */
64 int target_flags;
66 /* Return 1 if the value of X is unstable
67 (would be different at a different point in the program).
68 The frame pointer, arg pointer, etc. are considered stable
69 (within one function) and so is anything marked `unchanging'. */
71 int
72 rtx_unstable_p (rtx x)
74 RTX_CODE code = GET_CODE (x);
75 int i;
76 const char *fmt;
78 switch (code)
80 case MEM:
81 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
83 case CONST:
84 case CONST_INT:
85 case CONST_DOUBLE:
86 case CONST_VECTOR:
87 case SYMBOL_REF:
88 case LABEL_REF:
89 return 0;
91 case REG:
92 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
93 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
94 /* The arg pointer varies if it is not a fixed register. */
95 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
96 return 0;
97 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
98 /* ??? When call-clobbered, the value is stable modulo the restore
99 that must happen after a call. This currently screws up local-alloc
100 into believing that the restore is not needed. */
101 if (x == pic_offset_table_rtx)
102 return 0;
103 #endif
104 return 1;
106 case ASM_OPERANDS:
107 if (MEM_VOLATILE_P (x))
108 return 1;
110 /* Fall through. */
112 default:
113 break;
116 fmt = GET_RTX_FORMAT (code);
117 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
118 if (fmt[i] == 'e')
120 if (rtx_unstable_p (XEXP (x, i)))
121 return 1;
123 else if (fmt[i] == 'E')
125 int j;
126 for (j = 0; j < XVECLEN (x, i); j++)
127 if (rtx_unstable_p (XVECEXP (x, i, j)))
128 return 1;
131 return 0;
134 /* Return 1 if X has a value that can vary even between two
135 executions of the program. 0 means X can be compared reliably
136 against certain constants or near-constants.
137 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
138 zero, we are slightly more conservative.
139 The frame pointer and the arg pointer are considered constant. */
142 rtx_varies_p (rtx x, int for_alias)
144 RTX_CODE code;
145 int i;
146 const char *fmt;
148 if (!x)
149 return 0;
151 code = GET_CODE (x);
152 switch (code)
154 case MEM:
155 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
157 case CONST:
158 case CONST_INT:
159 case CONST_DOUBLE:
160 case CONST_VECTOR:
161 case SYMBOL_REF:
162 case LABEL_REF:
163 return 0;
165 case REG:
166 /* Note that we have to test for the actual rtx used for the frame
167 and arg pointers and not just the register number in case we have
168 eliminated the frame and/or arg pointer and are using it
169 for pseudos. */
170 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
171 /* The arg pointer varies if it is not a fixed register. */
172 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
173 return 0;
174 if (x == pic_offset_table_rtx
175 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
176 /* ??? When call-clobbered, the value is stable modulo the restore
177 that must happen after a call. This currently screws up
178 local-alloc into believing that the restore is not needed, so we
179 must return 0 only if we are called from alias analysis. */
180 && for_alias
181 #endif
183 return 0;
184 return 1;
186 case LO_SUM:
187 /* The operand 0 of a LO_SUM is considered constant
188 (in fact it is related specifically to operand 1)
189 during alias analysis. */
190 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
191 || rtx_varies_p (XEXP (x, 1), for_alias);
193 case ASM_OPERANDS:
194 if (MEM_VOLATILE_P (x))
195 return 1;
197 /* Fall through. */
199 default:
200 break;
203 fmt = GET_RTX_FORMAT (code);
204 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
205 if (fmt[i] == 'e')
207 if (rtx_varies_p (XEXP (x, i), for_alias))
208 return 1;
210 else if (fmt[i] == 'E')
212 int j;
213 for (j = 0; j < XVECLEN (x, i); j++)
214 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
215 return 1;
218 return 0;
221 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
224 rtx_addr_can_trap_p (rtx x)
226 enum rtx_code code = GET_CODE (x);
228 switch (code)
230 case SYMBOL_REF:
231 return SYMBOL_REF_WEAK (x);
233 case LABEL_REF:
234 return 0;
236 case REG:
237 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
238 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
239 || x == stack_pointer_rtx
240 /* The arg pointer varies if it is not a fixed register. */
241 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
242 return 0;
243 /* All of the virtual frame registers are stack references. */
244 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
245 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
246 return 0;
247 return 1;
249 case CONST:
250 return rtx_addr_can_trap_p (XEXP (x, 0));
252 case PLUS:
253 /* An address is assumed not to trap if it is an address that can't
254 trap plus a constant integer or it is the pic register plus a
255 constant. */
256 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
257 && GET_CODE (XEXP (x, 1)) == CONST_INT)
258 || (XEXP (x, 0) == pic_offset_table_rtx
259 && CONSTANT_P (XEXP (x, 1))));
261 case LO_SUM:
262 case PRE_MODIFY:
263 return rtx_addr_can_trap_p (XEXP (x, 1));
265 case PRE_DEC:
266 case PRE_INC:
267 case POST_DEC:
268 case POST_INC:
269 case POST_MODIFY:
270 return rtx_addr_can_trap_p (XEXP (x, 0));
272 default:
273 break;
276 /* If it isn't one of the case above, it can cause a trap. */
277 return 1;
280 /* Return true if X is an address that is known to not be zero. */
282 bool
283 nonzero_address_p (rtx x)
285 enum rtx_code code = GET_CODE (x);
287 switch (code)
289 case SYMBOL_REF:
290 return !SYMBOL_REF_WEAK (x);
292 case LABEL_REF:
293 return true;
295 case REG:
296 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
297 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
298 || x == stack_pointer_rtx
299 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
300 return true;
301 /* All of the virtual frame registers are stack references. */
302 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
303 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
304 return true;
305 return false;
307 case CONST:
308 return nonzero_address_p (XEXP (x, 0));
310 case PLUS:
311 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
313 /* Pointers aren't allowed to wrap. If we've got a register
314 that is known to be a pointer, and a positive offset, then
315 the composite can't be zero. */
316 if (INTVAL (XEXP (x, 1)) > 0
317 && REG_P (XEXP (x, 0))
318 && REG_POINTER (XEXP (x, 0)))
319 return true;
321 return nonzero_address_p (XEXP (x, 0));
323 /* Handle PIC references. */
324 else if (XEXP (x, 0) == pic_offset_table_rtx
325 && CONSTANT_P (XEXP (x, 1)))
326 return true;
327 return false;
329 case PRE_MODIFY:
330 /* Similar to the above; allow positive offsets. Further, since
331 auto-inc is only allowed in memories, the register must be a
332 pointer. */
333 if (GET_CODE (XEXP (x, 1)) == CONST_INT
334 && INTVAL (XEXP (x, 1)) > 0)
335 return true;
336 return nonzero_address_p (XEXP (x, 0));
338 case PRE_INC:
339 /* Similarly. Further, the offset is always positive. */
340 return true;
342 case PRE_DEC:
343 case POST_DEC:
344 case POST_INC:
345 case POST_MODIFY:
346 return nonzero_address_p (XEXP (x, 0));
348 case LO_SUM:
349 return nonzero_address_p (XEXP (x, 1));
351 default:
352 break;
355 /* If it isn't one of the case above, might be zero. */
356 return false;
359 /* Return 1 if X refers to a memory location whose address
360 cannot be compared reliably with constant addresses,
361 or if X refers to a BLKmode memory object.
362 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
363 zero, we are slightly more conservative. */
366 rtx_addr_varies_p (rtx x, int for_alias)
368 enum rtx_code code;
369 int i;
370 const char *fmt;
372 if (x == 0)
373 return 0;
375 code = GET_CODE (x);
376 if (code == MEM)
377 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
379 fmt = GET_RTX_FORMAT (code);
380 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
381 if (fmt[i] == 'e')
383 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
384 return 1;
386 else if (fmt[i] == 'E')
388 int j;
389 for (j = 0; j < XVECLEN (x, i); j++)
390 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
391 return 1;
393 return 0;
396 /* Return the value of the integer term in X, if one is apparent;
397 otherwise return 0.
398 Only obvious integer terms are detected.
399 This is used in cse.c with the `related_value' field. */
401 HOST_WIDE_INT
402 get_integer_term (rtx x)
404 if (GET_CODE (x) == CONST)
405 x = XEXP (x, 0);
407 if (GET_CODE (x) == MINUS
408 && GET_CODE (XEXP (x, 1)) == CONST_INT)
409 return - INTVAL (XEXP (x, 1));
410 if (GET_CODE (x) == PLUS
411 && GET_CODE (XEXP (x, 1)) == CONST_INT)
412 return INTVAL (XEXP (x, 1));
413 return 0;
416 /* If X is a constant, return the value sans apparent integer term;
417 otherwise return 0.
418 Only obvious integer terms are detected. */
421 get_related_value (rtx x)
423 if (GET_CODE (x) != CONST)
424 return 0;
425 x = XEXP (x, 0);
426 if (GET_CODE (x) == PLUS
427 && GET_CODE (XEXP (x, 1)) == CONST_INT)
428 return XEXP (x, 0);
429 else if (GET_CODE (x) == MINUS
430 && GET_CODE (XEXP (x, 1)) == CONST_INT)
431 return XEXP (x, 0);
432 return 0;
435 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
436 a global register. */
438 static int
439 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
441 int regno;
442 rtx x = *loc;
444 if (! x)
445 return 0;
447 switch (GET_CODE (x))
449 case SUBREG:
450 if (REG_P (SUBREG_REG (x)))
452 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
453 && global_regs[subreg_regno (x)])
454 return 1;
455 return 0;
457 break;
459 case REG:
460 regno = REGNO (x);
461 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
462 return 1;
463 return 0;
465 case SCRATCH:
466 case PC:
467 case CC0:
468 case CONST_INT:
469 case CONST_DOUBLE:
470 case CONST:
471 case LABEL_REF:
472 return 0;
474 case CALL:
475 /* A non-constant call might use a global register. */
476 return 1;
478 default:
479 break;
482 return 0;
485 /* Returns nonzero if X mentions a global register. */
488 global_reg_mentioned_p (rtx x)
490 if (INSN_P (x))
492 if (CALL_P (x))
494 if (! CONST_OR_PURE_CALL_P (x))
495 return 1;
496 x = CALL_INSN_FUNCTION_USAGE (x);
497 if (x == 0)
498 return 0;
500 else
501 x = PATTERN (x);
504 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
507 /* Return the number of places FIND appears within X. If COUNT_DEST is
508 zero, we do not count occurrences inside the destination of a SET. */
511 count_occurrences (rtx x, rtx find, int count_dest)
513 int i, j;
514 enum rtx_code code;
515 const char *format_ptr;
516 int count;
518 if (x == find)
519 return 1;
521 code = GET_CODE (x);
523 switch (code)
525 case REG:
526 case CONST_INT:
527 case CONST_DOUBLE:
528 case CONST_VECTOR:
529 case SYMBOL_REF:
530 case CODE_LABEL:
531 case PC:
532 case CC0:
533 return 0;
535 case MEM:
536 if (MEM_P (find) && rtx_equal_p (x, find))
537 return 1;
538 break;
540 case SET:
541 if (SET_DEST (x) == find && ! count_dest)
542 return count_occurrences (SET_SRC (x), find, count_dest);
543 break;
545 default:
546 break;
549 format_ptr = GET_RTX_FORMAT (code);
550 count = 0;
552 for (i = 0; i < GET_RTX_LENGTH (code); i++)
554 switch (*format_ptr++)
556 case 'e':
557 count += count_occurrences (XEXP (x, i), find, count_dest);
558 break;
560 case 'E':
561 for (j = 0; j < XVECLEN (x, i); j++)
562 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
563 break;
566 return count;
569 /* Nonzero if register REG appears somewhere within IN.
570 Also works if REG is not a register; in this case it checks
571 for a subexpression of IN that is Lisp "equal" to REG. */
574 reg_mentioned_p (rtx reg, rtx in)
576 const char *fmt;
577 int i;
578 enum rtx_code code;
580 if (in == 0)
581 return 0;
583 if (reg == in)
584 return 1;
586 if (GET_CODE (in) == LABEL_REF)
587 return reg == XEXP (in, 0);
589 code = GET_CODE (in);
591 switch (code)
593 /* Compare registers by number. */
594 case REG:
595 return REG_P (reg) && REGNO (in) == REGNO (reg);
597 /* These codes have no constituent expressions
598 and are unique. */
599 case SCRATCH:
600 case CC0:
601 case PC:
602 return 0;
604 case CONST_INT:
605 case CONST_VECTOR:
606 case CONST_DOUBLE:
607 /* These are kept unique for a given value. */
608 return 0;
610 default:
611 break;
614 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
615 return 1;
617 fmt = GET_RTX_FORMAT (code);
619 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
621 if (fmt[i] == 'E')
623 int j;
624 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
625 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
626 return 1;
628 else if (fmt[i] == 'e'
629 && reg_mentioned_p (reg, XEXP (in, i)))
630 return 1;
632 return 0;
635 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
636 no CODE_LABEL insn. */
639 no_labels_between_p (rtx beg, rtx end)
641 rtx p;
642 if (beg == end)
643 return 0;
644 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
645 if (LABEL_P (p))
646 return 0;
647 return 1;
650 /* Nonzero if register REG is used in an insn between
651 FROM_INSN and TO_INSN (exclusive of those two). */
654 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
656 rtx insn;
658 if (from_insn == to_insn)
659 return 0;
661 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
662 if (INSN_P (insn)
663 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
664 || (CALL_P (insn)
665 && (find_reg_fusage (insn, USE, reg)
666 || find_reg_fusage (insn, CLOBBER, reg)))))
667 return 1;
668 return 0;
671 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
672 is entirely replaced by a new value and the only use is as a SET_DEST,
673 we do not consider it a reference. */
676 reg_referenced_p (rtx x, rtx body)
678 int i;
680 switch (GET_CODE (body))
682 case SET:
683 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
684 return 1;
686 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
687 of a REG that occupies all of the REG, the insn references X if
688 it is mentioned in the destination. */
689 if (GET_CODE (SET_DEST (body)) != CC0
690 && GET_CODE (SET_DEST (body)) != PC
691 && !REG_P (SET_DEST (body))
692 && ! (GET_CODE (SET_DEST (body)) == SUBREG
693 && REG_P (SUBREG_REG (SET_DEST (body)))
694 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
695 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
696 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
697 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
698 && reg_overlap_mentioned_p (x, SET_DEST (body)))
699 return 1;
700 return 0;
702 case ASM_OPERANDS:
703 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
704 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
705 return 1;
706 return 0;
708 case CALL:
709 case USE:
710 case IF_THEN_ELSE:
711 return reg_overlap_mentioned_p (x, body);
713 case TRAP_IF:
714 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
716 case PREFETCH:
717 return reg_overlap_mentioned_p (x, XEXP (body, 0));
719 case UNSPEC:
720 case UNSPEC_VOLATILE:
721 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
722 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
723 return 1;
724 return 0;
726 case PARALLEL:
727 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
728 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
729 return 1;
730 return 0;
732 case CLOBBER:
733 if (MEM_P (XEXP (body, 0)))
734 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
735 return 1;
736 return 0;
738 case COND_EXEC:
739 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
740 return 1;
741 return reg_referenced_p (x, COND_EXEC_CODE (body));
743 default:
744 return 0;
748 /* Nonzero if register REG is set or clobbered in an insn between
749 FROM_INSN and TO_INSN (exclusive of those two). */
752 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
754 rtx insn;
756 if (from_insn == to_insn)
757 return 0;
759 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
760 if (INSN_P (insn) && reg_set_p (reg, insn))
761 return 1;
762 return 0;
765 /* Internals of reg_set_between_p. */
767 reg_set_p (rtx reg, rtx insn)
769 /* We can be passed an insn or part of one. If we are passed an insn,
770 check if a side-effect of the insn clobbers REG. */
771 if (INSN_P (insn)
772 && (FIND_REG_INC_NOTE (insn, reg)
773 || (CALL_P (insn)
774 && ((REG_P (reg)
775 && REGNO (reg) < FIRST_PSEUDO_REGISTER
776 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
777 REGNO (reg)))
778 || MEM_P (reg)
779 || find_reg_fusage (insn, CLOBBER, reg)))))
780 return 1;
782 return set_of (reg, insn) != NULL_RTX;
785 /* Similar to reg_set_between_p, but check all registers in X. Return 0
786 only if none of them are modified between START and END. Do not
787 consider non-registers one way or the other. */
790 regs_set_between_p (rtx x, rtx start, rtx end)
792 enum rtx_code code = GET_CODE (x);
793 const char *fmt;
794 int i, j;
796 switch (code)
798 case CONST_INT:
799 case CONST_DOUBLE:
800 case CONST_VECTOR:
801 case CONST:
802 case SYMBOL_REF:
803 case LABEL_REF:
804 case PC:
805 case CC0:
806 return 0;
808 case REG:
809 return reg_set_between_p (x, start, end);
811 default:
812 break;
815 fmt = GET_RTX_FORMAT (code);
816 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
818 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
819 return 1;
821 else if (fmt[i] == 'E')
822 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
823 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
824 return 1;
827 return 0;
830 /* Similar to reg_set_between_p, but check all registers in X. Return 0
831 only if none of them are modified between START and END. Return 1 if
832 X contains a MEM; this routine does usememory aliasing. */
835 modified_between_p (rtx x, rtx start, rtx end)
837 enum rtx_code code = GET_CODE (x);
838 const char *fmt;
839 int i, j;
840 rtx insn;
842 if (start == end)
843 return 0;
845 switch (code)
847 case CONST_INT:
848 case CONST_DOUBLE:
849 case CONST_VECTOR:
850 case CONST:
851 case SYMBOL_REF:
852 case LABEL_REF:
853 return 0;
855 case PC:
856 case CC0:
857 return 1;
859 case MEM:
860 if (MEM_READONLY_P (x))
861 return 0;
862 if (modified_between_p (XEXP (x, 0), start, end))
863 return 1;
864 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
865 if (memory_modified_in_insn_p (x, insn))
866 return 1;
867 return 0;
868 break;
870 case REG:
871 return reg_set_between_p (x, start, end);
873 default:
874 break;
877 fmt = GET_RTX_FORMAT (code);
878 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
880 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
881 return 1;
883 else if (fmt[i] == 'E')
884 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
885 if (modified_between_p (XVECEXP (x, i, j), start, end))
886 return 1;
889 return 0;
892 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
893 of them are modified in INSN. Return 1 if X contains a MEM; this routine
894 does use memory aliasing. */
897 modified_in_p (rtx x, rtx insn)
899 enum rtx_code code = GET_CODE (x);
900 const char *fmt;
901 int i, j;
903 switch (code)
905 case CONST_INT:
906 case CONST_DOUBLE:
907 case CONST_VECTOR:
908 case CONST:
909 case SYMBOL_REF:
910 case LABEL_REF:
911 return 0;
913 case PC:
914 case CC0:
915 return 1;
917 case MEM:
918 if (MEM_READONLY_P (x))
919 return 0;
920 if (modified_in_p (XEXP (x, 0), insn))
921 return 1;
922 if (memory_modified_in_insn_p (x, insn))
923 return 1;
924 return 0;
925 break;
927 case REG:
928 return reg_set_p (x, insn);
930 default:
931 break;
934 fmt = GET_RTX_FORMAT (code);
935 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
937 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
938 return 1;
940 else if (fmt[i] == 'E')
941 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
942 if (modified_in_p (XVECEXP (x, i, j), insn))
943 return 1;
946 return 0;
949 /* Helper function for set_of. */
950 struct set_of_data
952 rtx found;
953 rtx pat;
956 static void
957 set_of_1 (rtx x, rtx pat, void *data1)
959 struct set_of_data *data = (struct set_of_data *) (data1);
960 if (rtx_equal_p (x, data->pat)
961 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
962 data->found = pat;
965 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
966 (either directly or via STRICT_LOW_PART and similar modifiers). */
968 set_of (rtx pat, rtx insn)
970 struct set_of_data data;
971 data.found = NULL_RTX;
972 data.pat = pat;
973 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
974 return data.found;
977 /* Given an INSN, return a SET expression if this insn has only a single SET.
978 It may also have CLOBBERs, USEs, or SET whose output
979 will not be used, which we ignore. */
982 single_set_2 (rtx insn, rtx pat)
984 rtx set = NULL;
985 int set_verified = 1;
986 int i;
988 if (GET_CODE (pat) == PARALLEL)
990 for (i = 0; i < XVECLEN (pat, 0); i++)
992 rtx sub = XVECEXP (pat, 0, i);
993 switch (GET_CODE (sub))
995 case USE:
996 case CLOBBER:
997 break;
999 case SET:
1000 /* We can consider insns having multiple sets, where all
1001 but one are dead as single set insns. In common case
1002 only single set is present in the pattern so we want
1003 to avoid checking for REG_UNUSED notes unless necessary.
1005 When we reach set first time, we just expect this is
1006 the single set we are looking for and only when more
1007 sets are found in the insn, we check them. */
1008 if (!set_verified)
1010 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1011 && !side_effects_p (set))
1012 set = NULL;
1013 else
1014 set_verified = 1;
1016 if (!set)
1017 set = sub, set_verified = 0;
1018 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1019 || side_effects_p (sub))
1020 return NULL_RTX;
1021 break;
1023 default:
1024 return NULL_RTX;
1028 return set;
1031 /* Given an INSN, return nonzero if it has more than one SET, else return
1032 zero. */
1035 multiple_sets (rtx insn)
1037 int found;
1038 int i;
1040 /* INSN must be an insn. */
1041 if (! INSN_P (insn))
1042 return 0;
1044 /* Only a PARALLEL can have multiple SETs. */
1045 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1047 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1048 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1050 /* If we have already found a SET, then return now. */
1051 if (found)
1052 return 1;
1053 else
1054 found = 1;
1058 /* Either zero or one SET. */
1059 return 0;
1062 /* Return nonzero if the destination of SET equals the source
1063 and there are no side effects. */
1066 set_noop_p (rtx set)
1068 rtx src = SET_SRC (set);
1069 rtx dst = SET_DEST (set);
1071 if (dst == pc_rtx && src == pc_rtx)
1072 return 1;
1074 if (MEM_P (dst) && MEM_P (src))
1075 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1077 if (GET_CODE (dst) == SIGN_EXTRACT
1078 || GET_CODE (dst) == ZERO_EXTRACT)
1079 return rtx_equal_p (XEXP (dst, 0), src)
1080 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1081 && !side_effects_p (src);
1083 if (GET_CODE (dst) == STRICT_LOW_PART)
1084 dst = XEXP (dst, 0);
1086 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1088 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1089 return 0;
1090 src = SUBREG_REG (src);
1091 dst = SUBREG_REG (dst);
1094 return (REG_P (src) && REG_P (dst)
1095 && REGNO (src) == REGNO (dst));
1098 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1099 value to itself. */
1102 noop_move_p (rtx insn)
1104 rtx pat = PATTERN (insn);
1106 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1107 return 1;
1109 /* Insns carrying these notes are useful later on. */
1110 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1111 return 0;
1113 /* For now treat an insn with a REG_RETVAL note as a
1114 a special insn which should not be considered a no-op. */
1115 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1116 return 0;
1118 if (GET_CODE (pat) == SET && set_noop_p (pat))
1119 return 1;
1121 if (GET_CODE (pat) == PARALLEL)
1123 int i;
1124 /* If nothing but SETs of registers to themselves,
1125 this insn can also be deleted. */
1126 for (i = 0; i < XVECLEN (pat, 0); i++)
1128 rtx tem = XVECEXP (pat, 0, i);
1130 if (GET_CODE (tem) == USE
1131 || GET_CODE (tem) == CLOBBER)
1132 continue;
1134 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1135 return 0;
1138 return 1;
1140 return 0;
1144 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1145 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1146 If the object was modified, if we hit a partial assignment to X, or hit a
1147 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1148 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1149 be the src. */
1152 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1154 rtx p;
1156 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1157 p = PREV_INSN (p))
1158 if (INSN_P (p))
1160 rtx set = single_set (p);
1161 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1163 if (set && rtx_equal_p (x, SET_DEST (set)))
1165 rtx src = SET_SRC (set);
1167 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1168 src = XEXP (note, 0);
1170 if ((valid_to == NULL_RTX
1171 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1172 /* Reject hard registers because we don't usually want
1173 to use them; we'd rather use a pseudo. */
1174 && (! (REG_P (src)
1175 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1177 *pinsn = p;
1178 return src;
1182 /* If set in non-simple way, we don't have a value. */
1183 if (reg_set_p (x, p))
1184 break;
1187 return x;
1190 /* Return nonzero if register in range [REGNO, ENDREGNO)
1191 appears either explicitly or implicitly in X
1192 other than being stored into.
1194 References contained within the substructure at LOC do not count.
1195 LOC may be zero, meaning don't ignore anything. */
1198 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1199 rtx *loc)
1201 int i;
1202 unsigned int x_regno;
1203 RTX_CODE code;
1204 const char *fmt;
1206 repeat:
1207 /* The contents of a REG_NONNEG note is always zero, so we must come here
1208 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1209 if (x == 0)
1210 return 0;
1212 code = GET_CODE (x);
1214 switch (code)
1216 case REG:
1217 x_regno = REGNO (x);
1219 /* If we modifying the stack, frame, or argument pointer, it will
1220 clobber a virtual register. In fact, we could be more precise,
1221 but it isn't worth it. */
1222 if ((x_regno == STACK_POINTER_REGNUM
1223 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1224 || x_regno == ARG_POINTER_REGNUM
1225 #endif
1226 || x_regno == FRAME_POINTER_REGNUM)
1227 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1228 return 1;
1230 return (endregno > x_regno
1231 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1232 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1233 : 1));
1235 case SUBREG:
1236 /* If this is a SUBREG of a hard reg, we can see exactly which
1237 registers are being modified. Otherwise, handle normally. */
1238 if (REG_P (SUBREG_REG (x))
1239 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1241 unsigned int inner_regno = subreg_regno (x);
1242 unsigned int inner_endregno
1243 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1244 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1246 return endregno > inner_regno && regno < inner_endregno;
1248 break;
1250 case CLOBBER:
1251 case SET:
1252 if (&SET_DEST (x) != loc
1253 /* Note setting a SUBREG counts as referring to the REG it is in for
1254 a pseudo but not for hard registers since we can
1255 treat each word individually. */
1256 && ((GET_CODE (SET_DEST (x)) == SUBREG
1257 && loc != &SUBREG_REG (SET_DEST (x))
1258 && REG_P (SUBREG_REG (SET_DEST (x)))
1259 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1260 && refers_to_regno_p (regno, endregno,
1261 SUBREG_REG (SET_DEST (x)), loc))
1262 || (!REG_P (SET_DEST (x))
1263 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1264 return 1;
1266 if (code == CLOBBER || loc == &SET_SRC (x))
1267 return 0;
1268 x = SET_SRC (x);
1269 goto repeat;
1271 default:
1272 break;
1275 /* X does not match, so try its subexpressions. */
1277 fmt = GET_RTX_FORMAT (code);
1278 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1280 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1282 if (i == 0)
1284 x = XEXP (x, 0);
1285 goto repeat;
1287 else
1288 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1289 return 1;
1291 else if (fmt[i] == 'E')
1293 int j;
1294 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1295 if (loc != &XVECEXP (x, i, j)
1296 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1297 return 1;
1300 return 0;
1303 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1304 we check if any register number in X conflicts with the relevant register
1305 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1306 contains a MEM (we don't bother checking for memory addresses that can't
1307 conflict because we expect this to be a rare case. */
1310 reg_overlap_mentioned_p (rtx x, rtx in)
1312 unsigned int regno, endregno;
1314 /* If either argument is a constant, then modifying X can not
1315 affect IN. Here we look at IN, we can profitably combine
1316 CONSTANT_P (x) with the switch statement below. */
1317 if (CONSTANT_P (in))
1318 return 0;
1320 recurse:
1321 switch (GET_CODE (x))
1323 case STRICT_LOW_PART:
1324 case ZERO_EXTRACT:
1325 case SIGN_EXTRACT:
1326 /* Overly conservative. */
1327 x = XEXP (x, 0);
1328 goto recurse;
1330 case SUBREG:
1331 regno = REGNO (SUBREG_REG (x));
1332 if (regno < FIRST_PSEUDO_REGISTER)
1333 regno = subreg_regno (x);
1334 goto do_reg;
1336 case REG:
1337 regno = REGNO (x);
1338 do_reg:
1339 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1340 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1341 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1343 case MEM:
1345 const char *fmt;
1346 int i;
1348 if (MEM_P (in))
1349 return 1;
1351 fmt = GET_RTX_FORMAT (GET_CODE (in));
1352 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1353 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1354 return 1;
1356 return 0;
1359 case SCRATCH:
1360 case PC:
1361 case CC0:
1362 return reg_mentioned_p (x, in);
1364 case PARALLEL:
1366 int i;
1368 /* If any register in here refers to it we return true. */
1369 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1370 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1371 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1372 return 1;
1373 return 0;
1376 default:
1377 gcc_assert (CONSTANT_P (x));
1378 return 0;
1382 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1383 (X would be the pattern of an insn).
1384 FUN receives two arguments:
1385 the REG, MEM, CC0 or PC being stored in or clobbered,
1386 the SET or CLOBBER rtx that does the store.
1388 If the item being stored in or clobbered is a SUBREG of a hard register,
1389 the SUBREG will be passed. */
1391 void
1392 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1394 int i;
1396 if (GET_CODE (x) == COND_EXEC)
1397 x = COND_EXEC_CODE (x);
1399 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1401 rtx dest = SET_DEST (x);
1403 while ((GET_CODE (dest) == SUBREG
1404 && (!REG_P (SUBREG_REG (dest))
1405 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1406 || GET_CODE (dest) == ZERO_EXTRACT
1407 || GET_CODE (dest) == SIGN_EXTRACT
1408 || GET_CODE (dest) == STRICT_LOW_PART)
1409 dest = XEXP (dest, 0);
1411 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1412 each of whose first operand is a register. */
1413 if (GET_CODE (dest) == PARALLEL)
1415 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1416 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1417 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1419 else
1420 (*fun) (dest, x, data);
1423 else if (GET_CODE (x) == PARALLEL)
1424 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1425 note_stores (XVECEXP (x, 0, i), fun, data);
1428 /* Like notes_stores, but call FUN for each expression that is being
1429 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1430 FUN for each expression, not any interior subexpressions. FUN receives a
1431 pointer to the expression and the DATA passed to this function.
1433 Note that this is not quite the same test as that done in reg_referenced_p
1434 since that considers something as being referenced if it is being
1435 partially set, while we do not. */
1437 void
1438 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1440 rtx body = *pbody;
1441 int i;
1443 switch (GET_CODE (body))
1445 case COND_EXEC:
1446 (*fun) (&COND_EXEC_TEST (body), data);
1447 note_uses (&COND_EXEC_CODE (body), fun, data);
1448 return;
1450 case PARALLEL:
1451 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1452 note_uses (&XVECEXP (body, 0, i), fun, data);
1453 return;
1455 case USE:
1456 (*fun) (&XEXP (body, 0), data);
1457 return;
1459 case ASM_OPERANDS:
1460 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1461 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1462 return;
1464 case TRAP_IF:
1465 (*fun) (&TRAP_CONDITION (body), data);
1466 return;
1468 case PREFETCH:
1469 (*fun) (&XEXP (body, 0), data);
1470 return;
1472 case UNSPEC:
1473 case UNSPEC_VOLATILE:
1474 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1475 (*fun) (&XVECEXP (body, 0, i), data);
1476 return;
1478 case CLOBBER:
1479 if (MEM_P (XEXP (body, 0)))
1480 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1481 return;
1483 case SET:
1485 rtx dest = SET_DEST (body);
1487 /* For sets we replace everything in source plus registers in memory
1488 expression in store and operands of a ZERO_EXTRACT. */
1489 (*fun) (&SET_SRC (body), data);
1491 if (GET_CODE (dest) == ZERO_EXTRACT)
1493 (*fun) (&XEXP (dest, 1), data);
1494 (*fun) (&XEXP (dest, 2), data);
1497 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1498 dest = XEXP (dest, 0);
1500 if (MEM_P (dest))
1501 (*fun) (&XEXP (dest, 0), data);
1503 return;
1505 default:
1506 /* All the other possibilities never store. */
1507 (*fun) (pbody, data);
1508 return;
1512 /* Return nonzero if X's old contents don't survive after INSN.
1513 This will be true if X is (cc0) or if X is a register and
1514 X dies in INSN or because INSN entirely sets X.
1516 "Entirely set" means set directly and not through a SUBREG,
1517 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1518 Likewise, REG_INC does not count.
1520 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1521 but for this use that makes no difference, since regs don't overlap
1522 during their lifetimes. Therefore, this function may be used
1523 at any time after deaths have been computed (in flow.c).
1525 If REG is a hard reg that occupies multiple machine registers, this
1526 function will only return 1 if each of those registers will be replaced
1527 by INSN. */
1530 dead_or_set_p (rtx insn, rtx x)
1532 unsigned int regno, last_regno;
1533 unsigned int i;
1535 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1536 if (GET_CODE (x) == CC0)
1537 return 1;
1539 gcc_assert (REG_P (x));
1541 regno = REGNO (x);
1542 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1543 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1545 for (i = regno; i <= last_regno; i++)
1546 if (! dead_or_set_regno_p (insn, i))
1547 return 0;
1549 return 1;
1552 /* Utility function for dead_or_set_p to check an individual register. Also
1553 called from flow.c. */
1556 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1558 unsigned int regno, endregno;
1559 rtx pattern;
1561 /* See if there is a death note for something that includes TEST_REGNO. */
1562 if (find_regno_note (insn, REG_DEAD, test_regno))
1563 return 1;
1565 if (CALL_P (insn)
1566 && find_regno_fusage (insn, CLOBBER, test_regno))
1567 return 1;
1569 pattern = PATTERN (insn);
1571 if (GET_CODE (pattern) == COND_EXEC)
1572 pattern = COND_EXEC_CODE (pattern);
1574 if (GET_CODE (pattern) == SET)
1576 rtx dest = SET_DEST (pattern);
1578 /* A value is totally replaced if it is the destination or the
1579 destination is a SUBREG of REGNO that does not change the number of
1580 words in it. */
1581 if (GET_CODE (dest) == SUBREG
1582 && (((GET_MODE_SIZE (GET_MODE (dest))
1583 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1584 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1585 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1586 dest = SUBREG_REG (dest);
1588 if (!REG_P (dest))
1589 return 0;
1591 regno = REGNO (dest);
1592 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1593 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1595 return (test_regno >= regno && test_regno < endregno);
1597 else if (GET_CODE (pattern) == PARALLEL)
1599 int i;
1601 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1603 rtx body = XVECEXP (pattern, 0, i);
1605 if (GET_CODE (body) == COND_EXEC)
1606 body = COND_EXEC_CODE (body);
1608 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1610 rtx dest = SET_DEST (body);
1612 if (GET_CODE (dest) == SUBREG
1613 && (((GET_MODE_SIZE (GET_MODE (dest))
1614 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1615 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1616 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1617 dest = SUBREG_REG (dest);
1619 if (!REG_P (dest))
1620 continue;
1622 regno = REGNO (dest);
1623 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1624 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1626 if (test_regno >= regno && test_regno < endregno)
1627 return 1;
1632 return 0;
1635 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1636 If DATUM is nonzero, look for one whose datum is DATUM. */
1639 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1641 rtx link;
1643 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1644 if (! INSN_P (insn))
1645 return 0;
1646 if (datum == 0)
1648 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1649 if (REG_NOTE_KIND (link) == kind)
1650 return link;
1651 return 0;
1654 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1655 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1656 return link;
1657 return 0;
1660 /* Return the reg-note of kind KIND in insn INSN which applies to register
1661 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1662 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1663 it might be the case that the note overlaps REGNO. */
1666 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1668 rtx link;
1670 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1671 if (! INSN_P (insn))
1672 return 0;
1674 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1675 if (REG_NOTE_KIND (link) == kind
1676 /* Verify that it is a register, so that scratch and MEM won't cause a
1677 problem here. */
1678 && REG_P (XEXP (link, 0))
1679 && REGNO (XEXP (link, 0)) <= regno
1680 && ((REGNO (XEXP (link, 0))
1681 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1682 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1683 [GET_MODE (XEXP (link, 0))]))
1684 > regno))
1685 return link;
1686 return 0;
1689 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1690 has such a note. */
1693 find_reg_equal_equiv_note (rtx insn)
1695 rtx link;
1697 if (!INSN_P (insn))
1698 return 0;
1699 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1700 if (REG_NOTE_KIND (link) == REG_EQUAL
1701 || REG_NOTE_KIND (link) == REG_EQUIV)
1703 if (single_set (insn) == 0)
1704 return 0;
1705 return link;
1707 return NULL;
1710 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1711 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1714 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1716 /* If it's not a CALL_INSN, it can't possibly have a
1717 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1718 if (!CALL_P (insn))
1719 return 0;
1721 gcc_assert (datum);
1723 if (!REG_P (datum))
1725 rtx link;
1727 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1728 link;
1729 link = XEXP (link, 1))
1730 if (GET_CODE (XEXP (link, 0)) == code
1731 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1732 return 1;
1734 else
1736 unsigned int regno = REGNO (datum);
1738 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1739 to pseudo registers, so don't bother checking. */
1741 if (regno < FIRST_PSEUDO_REGISTER)
1743 unsigned int end_regno
1744 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1745 unsigned int i;
1747 for (i = regno; i < end_regno; i++)
1748 if (find_regno_fusage (insn, code, i))
1749 return 1;
1753 return 0;
1756 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1757 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1760 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1762 rtx link;
1764 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1765 to pseudo registers, so don't bother checking. */
1767 if (regno >= FIRST_PSEUDO_REGISTER
1768 || !CALL_P (insn) )
1769 return 0;
1771 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1773 unsigned int regnote;
1774 rtx op, reg;
1776 if (GET_CODE (op = XEXP (link, 0)) == code
1777 && REG_P (reg = XEXP (op, 0))
1778 && (regnote = REGNO (reg)) <= regno
1779 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1780 return 1;
1783 return 0;
1786 /* Return true if INSN is a call to a pure function. */
1789 pure_call_p (rtx insn)
1791 rtx link;
1793 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1794 return 0;
1796 /* Look for the note that differentiates const and pure functions. */
1797 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1799 rtx u, m;
1801 if (GET_CODE (u = XEXP (link, 0)) == USE
1802 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1803 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1804 return 1;
1807 return 0;
1810 /* Remove register note NOTE from the REG_NOTES of INSN. */
1812 void
1813 remove_note (rtx insn, rtx note)
1815 rtx link;
1817 if (note == NULL_RTX)
1818 return;
1820 if (REG_NOTES (insn) == note)
1822 REG_NOTES (insn) = XEXP (note, 1);
1823 return;
1826 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1827 if (XEXP (link, 1) == note)
1829 XEXP (link, 1) = XEXP (note, 1);
1830 return;
1833 gcc_unreachable ();
1836 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1837 return 1 if it is found. A simple equality test is used to determine if
1838 NODE matches. */
1841 in_expr_list_p (rtx listp, rtx node)
1843 rtx x;
1845 for (x = listp; x; x = XEXP (x, 1))
1846 if (node == XEXP (x, 0))
1847 return 1;
1849 return 0;
1852 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1853 remove that entry from the list if it is found.
1855 A simple equality test is used to determine if NODE matches. */
1857 void
1858 remove_node_from_expr_list (rtx node, rtx *listp)
1860 rtx temp = *listp;
1861 rtx prev = NULL_RTX;
1863 while (temp)
1865 if (node == XEXP (temp, 0))
1867 /* Splice the node out of the list. */
1868 if (prev)
1869 XEXP (prev, 1) = XEXP (temp, 1);
1870 else
1871 *listp = XEXP (temp, 1);
1873 return;
1876 prev = temp;
1877 temp = XEXP (temp, 1);
1881 /* Nonzero if X contains any volatile instructions. These are instructions
1882 which may cause unpredictable machine state instructions, and thus no
1883 instructions should be moved or combined across them. This includes
1884 only volatile asms and UNSPEC_VOLATILE instructions. */
1887 volatile_insn_p (rtx x)
1889 RTX_CODE code;
1891 code = GET_CODE (x);
1892 switch (code)
1894 case LABEL_REF:
1895 case SYMBOL_REF:
1896 case CONST_INT:
1897 case CONST:
1898 case CONST_DOUBLE:
1899 case CONST_VECTOR:
1900 case CC0:
1901 case PC:
1902 case REG:
1903 case SCRATCH:
1904 case CLOBBER:
1905 case ADDR_VEC:
1906 case ADDR_DIFF_VEC:
1907 case CALL:
1908 case MEM:
1909 return 0;
1911 case UNSPEC_VOLATILE:
1912 /* case TRAP_IF: This isn't clear yet. */
1913 return 1;
1915 case ASM_INPUT:
1916 case ASM_OPERANDS:
1917 if (MEM_VOLATILE_P (x))
1918 return 1;
1920 default:
1921 break;
1924 /* Recursively scan the operands of this expression. */
1927 const char *fmt = GET_RTX_FORMAT (code);
1928 int i;
1930 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1932 if (fmt[i] == 'e')
1934 if (volatile_insn_p (XEXP (x, i)))
1935 return 1;
1937 else if (fmt[i] == 'E')
1939 int j;
1940 for (j = 0; j < XVECLEN (x, i); j++)
1941 if (volatile_insn_p (XVECEXP (x, i, j)))
1942 return 1;
1946 return 0;
1949 /* Nonzero if X contains any volatile memory references
1950 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1953 volatile_refs_p (rtx x)
1955 RTX_CODE code;
1957 code = GET_CODE (x);
1958 switch (code)
1960 case LABEL_REF:
1961 case SYMBOL_REF:
1962 case CONST_INT:
1963 case CONST:
1964 case CONST_DOUBLE:
1965 case CONST_VECTOR:
1966 case CC0:
1967 case PC:
1968 case REG:
1969 case SCRATCH:
1970 case CLOBBER:
1971 case ADDR_VEC:
1972 case ADDR_DIFF_VEC:
1973 return 0;
1975 case UNSPEC_VOLATILE:
1976 return 1;
1978 case MEM:
1979 case ASM_INPUT:
1980 case ASM_OPERANDS:
1981 if (MEM_VOLATILE_P (x))
1982 return 1;
1984 default:
1985 break;
1988 /* Recursively scan the operands of this expression. */
1991 const char *fmt = GET_RTX_FORMAT (code);
1992 int i;
1994 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1996 if (fmt[i] == 'e')
1998 if (volatile_refs_p (XEXP (x, i)))
1999 return 1;
2001 else if (fmt[i] == 'E')
2003 int j;
2004 for (j = 0; j < XVECLEN (x, i); j++)
2005 if (volatile_refs_p (XVECEXP (x, i, j)))
2006 return 1;
2010 return 0;
2013 /* Similar to above, except that it also rejects register pre- and post-
2014 incrementing. */
2017 side_effects_p (rtx x)
2019 RTX_CODE code;
2021 code = GET_CODE (x);
2022 switch (code)
2024 case LABEL_REF:
2025 case SYMBOL_REF:
2026 case CONST_INT:
2027 case CONST:
2028 case CONST_DOUBLE:
2029 case CONST_VECTOR:
2030 case CC0:
2031 case PC:
2032 case REG:
2033 case SCRATCH:
2034 case ADDR_VEC:
2035 case ADDR_DIFF_VEC:
2036 return 0;
2038 case CLOBBER:
2039 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2040 when some combination can't be done. If we see one, don't think
2041 that we can simplify the expression. */
2042 return (GET_MODE (x) != VOIDmode);
2044 case PRE_INC:
2045 case PRE_DEC:
2046 case POST_INC:
2047 case POST_DEC:
2048 case PRE_MODIFY:
2049 case POST_MODIFY:
2050 case CALL:
2051 case UNSPEC_VOLATILE:
2052 /* case TRAP_IF: This isn't clear yet. */
2053 return 1;
2055 case MEM:
2056 case ASM_INPUT:
2057 case ASM_OPERANDS:
2058 if (MEM_VOLATILE_P (x))
2059 return 1;
2061 default:
2062 break;
2065 /* Recursively scan the operands of this expression. */
2068 const char *fmt = GET_RTX_FORMAT (code);
2069 int i;
2071 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2073 if (fmt[i] == 'e')
2075 if (side_effects_p (XEXP (x, i)))
2076 return 1;
2078 else if (fmt[i] == 'E')
2080 int j;
2081 for (j = 0; j < XVECLEN (x, i); j++)
2082 if (side_effects_p (XVECEXP (x, i, j)))
2083 return 1;
2087 return 0;
2090 /* Return nonzero if evaluating rtx X might cause a trap. */
2093 may_trap_p (rtx x)
2095 int i;
2096 enum rtx_code code;
2097 const char *fmt;
2099 if (x == 0)
2100 return 0;
2101 code = GET_CODE (x);
2102 switch (code)
2104 /* Handle these cases quickly. */
2105 case CONST_INT:
2106 case CONST_DOUBLE:
2107 case CONST_VECTOR:
2108 case SYMBOL_REF:
2109 case LABEL_REF:
2110 case CONST:
2111 case PC:
2112 case CC0:
2113 case REG:
2114 case SCRATCH:
2115 return 0;
2117 case ASM_INPUT:
2118 case UNSPEC_VOLATILE:
2119 case TRAP_IF:
2120 return 1;
2122 case ASM_OPERANDS:
2123 return MEM_VOLATILE_P (x);
2125 /* Memory ref can trap unless it's a static var or a stack slot. */
2126 case MEM:
2127 if (MEM_NOTRAP_P (x))
2128 return 0;
2129 return rtx_addr_can_trap_p (XEXP (x, 0));
2131 /* Division by a non-constant might trap. */
2132 case DIV:
2133 case MOD:
2134 case UDIV:
2135 case UMOD:
2136 if (HONOR_SNANS (GET_MODE (x)))
2137 return 1;
2138 if (! CONSTANT_P (XEXP (x, 1))
2139 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2140 && flag_trapping_math))
2141 return 1;
2142 if (XEXP (x, 1) == const0_rtx)
2143 return 1;
2144 break;
2146 case EXPR_LIST:
2147 /* An EXPR_LIST is used to represent a function call. This
2148 certainly may trap. */
2149 return 1;
2151 case GE:
2152 case GT:
2153 case LE:
2154 case LT:
2155 case LTGT:
2156 case COMPARE:
2157 /* Some floating point comparisons may trap. */
2158 if (!flag_trapping_math)
2159 break;
2160 /* ??? There is no machine independent way to check for tests that trap
2161 when COMPARE is used, though many targets do make this distinction.
2162 For instance, sparc uses CCFPE for compares which generate exceptions
2163 and CCFP for compares which do not generate exceptions. */
2164 if (HONOR_NANS (GET_MODE (x)))
2165 return 1;
2166 /* But often the compare has some CC mode, so check operand
2167 modes as well. */
2168 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2169 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2170 return 1;
2171 break;
2173 case EQ:
2174 case NE:
2175 if (HONOR_SNANS (GET_MODE (x)))
2176 return 1;
2177 /* Often comparison is CC mode, so check operand modes. */
2178 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2179 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2180 return 1;
2181 break;
2183 case FIX:
2184 /* Conversion of floating point might trap. */
2185 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2186 return 1;
2187 break;
2189 case NEG:
2190 case ABS:
2191 /* These operations don't trap even with floating point. */
2192 break;
2194 default:
2195 /* Any floating arithmetic may trap. */
2196 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2197 && flag_trapping_math)
2198 return 1;
2201 fmt = GET_RTX_FORMAT (code);
2202 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2204 if (fmt[i] == 'e')
2206 if (may_trap_p (XEXP (x, i)))
2207 return 1;
2209 else if (fmt[i] == 'E')
2211 int j;
2212 for (j = 0; j < XVECLEN (x, i); j++)
2213 if (may_trap_p (XVECEXP (x, i, j)))
2214 return 1;
2217 return 0;
2220 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2221 i.e., an inequality. */
2224 inequality_comparisons_p (rtx x)
2226 const char *fmt;
2227 int len, i;
2228 enum rtx_code code = GET_CODE (x);
2230 switch (code)
2232 case REG:
2233 case SCRATCH:
2234 case PC:
2235 case CC0:
2236 case CONST_INT:
2237 case CONST_DOUBLE:
2238 case CONST_VECTOR:
2239 case CONST:
2240 case LABEL_REF:
2241 case SYMBOL_REF:
2242 return 0;
2244 case LT:
2245 case LTU:
2246 case GT:
2247 case GTU:
2248 case LE:
2249 case LEU:
2250 case GE:
2251 case GEU:
2252 return 1;
2254 default:
2255 break;
2258 len = GET_RTX_LENGTH (code);
2259 fmt = GET_RTX_FORMAT (code);
2261 for (i = 0; i < len; i++)
2263 if (fmt[i] == 'e')
2265 if (inequality_comparisons_p (XEXP (x, i)))
2266 return 1;
2268 else if (fmt[i] == 'E')
2270 int j;
2271 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2272 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2273 return 1;
2277 return 0;
2280 /* Replace any occurrence of FROM in X with TO. The function does
2281 not enter into CONST_DOUBLE for the replace.
2283 Note that copying is not done so X must not be shared unless all copies
2284 are to be modified. */
2287 replace_rtx (rtx x, rtx from, rtx to)
2289 int i, j;
2290 const char *fmt;
2292 /* The following prevents loops occurrence when we change MEM in
2293 CONST_DOUBLE onto the same CONST_DOUBLE. */
2294 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2295 return x;
2297 if (x == from)
2298 return to;
2300 /* Allow this function to make replacements in EXPR_LISTs. */
2301 if (x == 0)
2302 return 0;
2304 if (GET_CODE (x) == SUBREG)
2306 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2308 if (GET_CODE (new) == CONST_INT)
2310 x = simplify_subreg (GET_MODE (x), new,
2311 GET_MODE (SUBREG_REG (x)),
2312 SUBREG_BYTE (x));
2313 gcc_assert (x);
2315 else
2316 SUBREG_REG (x) = new;
2318 return x;
2320 else if (GET_CODE (x) == ZERO_EXTEND)
2322 rtx new = replace_rtx (XEXP (x, 0), from, to);
2324 if (GET_CODE (new) == CONST_INT)
2326 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2327 new, GET_MODE (XEXP (x, 0)));
2328 gcc_assert (x);
2330 else
2331 XEXP (x, 0) = new;
2333 return x;
2336 fmt = GET_RTX_FORMAT (GET_CODE (x));
2337 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2339 if (fmt[i] == 'e')
2340 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2341 else if (fmt[i] == 'E')
2342 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2343 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2346 return x;
2349 /* Throughout the rtx X, replace many registers according to REG_MAP.
2350 Return the replacement for X (which may be X with altered contents).
2351 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2352 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2354 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2355 should not be mapped to pseudos or vice versa since validate_change
2356 is not called.
2358 If REPLACE_DEST is 1, replacements are also done in destinations;
2359 otherwise, only sources are replaced. */
2362 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2364 enum rtx_code code;
2365 int i;
2366 const char *fmt;
2368 if (x == 0)
2369 return x;
2371 code = GET_CODE (x);
2372 switch (code)
2374 case SCRATCH:
2375 case PC:
2376 case CC0:
2377 case CONST_INT:
2378 case CONST_DOUBLE:
2379 case CONST_VECTOR:
2380 case CONST:
2381 case SYMBOL_REF:
2382 case LABEL_REF:
2383 return x;
2385 case REG:
2386 /* Verify that the register has an entry before trying to access it. */
2387 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2389 /* SUBREGs can't be shared. Always return a copy to ensure that if
2390 this replacement occurs more than once then each instance will
2391 get distinct rtx. */
2392 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2393 return copy_rtx (reg_map[REGNO (x)]);
2394 return reg_map[REGNO (x)];
2396 return x;
2398 case SUBREG:
2399 /* Prevent making nested SUBREGs. */
2400 if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2401 && reg_map[REGNO (SUBREG_REG (x))] != 0
2402 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2404 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2405 return simplify_gen_subreg (GET_MODE (x), map_val,
2406 GET_MODE (SUBREG_REG (x)),
2407 SUBREG_BYTE (x));
2409 break;
2411 case SET:
2412 if (replace_dest)
2413 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2415 else if (MEM_P (SET_DEST (x))
2416 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2417 /* Even if we are not to replace destinations, replace register if it
2418 is CONTAINED in destination (destination is memory or
2419 STRICT_LOW_PART). */
2420 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2421 reg_map, nregs, 0);
2422 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2423 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2424 break;
2426 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2427 return x;
2429 default:
2430 break;
2433 fmt = GET_RTX_FORMAT (code);
2434 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2436 if (fmt[i] == 'e')
2437 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2438 else if (fmt[i] == 'E')
2440 int j;
2441 for (j = 0; j < XVECLEN (x, i); j++)
2442 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2443 nregs, replace_dest);
2446 return x;
2449 /* Replace occurrences of the old label in *X with the new one.
2450 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2453 replace_label (rtx *x, void *data)
2455 rtx l = *x;
2456 rtx old_label = ((replace_label_data *) data)->r1;
2457 rtx new_label = ((replace_label_data *) data)->r2;
2458 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2460 if (l == NULL_RTX)
2461 return 0;
2463 if (GET_CODE (l) == SYMBOL_REF
2464 && CONSTANT_POOL_ADDRESS_P (l))
2466 rtx c = get_pool_constant (l);
2467 if (rtx_referenced_p (old_label, c))
2469 rtx new_c, new_l;
2470 replace_label_data *d = (replace_label_data *) data;
2472 /* Create a copy of constant C; replace the label inside
2473 but do not update LABEL_NUSES because uses in constant pool
2474 are not counted. */
2475 new_c = copy_rtx (c);
2476 d->update_label_nuses = false;
2477 for_each_rtx (&new_c, replace_label, data);
2478 d->update_label_nuses = update_label_nuses;
2480 /* Add the new constant NEW_C to constant pool and replace
2481 the old reference to constant by new reference. */
2482 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2483 *x = replace_rtx (l, l, new_l);
2485 return 0;
2488 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2489 field. This is not handled by for_each_rtx because it doesn't
2490 handle unprinted ('0') fields. */
2491 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2492 JUMP_LABEL (l) = new_label;
2494 if ((GET_CODE (l) == LABEL_REF
2495 || GET_CODE (l) == INSN_LIST)
2496 && XEXP (l, 0) == old_label)
2498 XEXP (l, 0) = new_label;
2499 if (update_label_nuses)
2501 ++LABEL_NUSES (new_label);
2502 --LABEL_NUSES (old_label);
2504 return 0;
2507 return 0;
2510 /* When *BODY is equal to X or X is directly referenced by *BODY
2511 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2512 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2514 static int
2515 rtx_referenced_p_1 (rtx *body, void *x)
2517 rtx y = (rtx) x;
2519 if (*body == NULL_RTX)
2520 return y == NULL_RTX;
2522 /* Return true if a label_ref *BODY refers to label Y. */
2523 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2524 return XEXP (*body, 0) == y;
2526 /* If *BODY is a reference to pool constant traverse the constant. */
2527 if (GET_CODE (*body) == SYMBOL_REF
2528 && CONSTANT_POOL_ADDRESS_P (*body))
2529 return rtx_referenced_p (y, get_pool_constant (*body));
2531 /* By default, compare the RTL expressions. */
2532 return rtx_equal_p (*body, y);
2535 /* Return true if X is referenced in BODY. */
2538 rtx_referenced_p (rtx x, rtx body)
2540 return for_each_rtx (&body, rtx_referenced_p_1, x);
2543 /* If INSN is a tablejump return true and store the label (before jump table) to
2544 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2546 bool
2547 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2549 rtx label, table;
2551 if (JUMP_P (insn)
2552 && (label = JUMP_LABEL (insn)) != NULL_RTX
2553 && (table = next_active_insn (label)) != NULL_RTX
2554 && JUMP_P (table)
2555 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2556 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2558 if (labelp)
2559 *labelp = label;
2560 if (tablep)
2561 *tablep = table;
2562 return true;
2564 return false;
2567 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2568 constant that is not in the constant pool and not in the condition
2569 of an IF_THEN_ELSE. */
2571 static int
2572 computed_jump_p_1 (rtx x)
2574 enum rtx_code code = GET_CODE (x);
2575 int i, j;
2576 const char *fmt;
2578 switch (code)
2580 case LABEL_REF:
2581 case PC:
2582 return 0;
2584 case CONST:
2585 case CONST_INT:
2586 case CONST_DOUBLE:
2587 case CONST_VECTOR:
2588 case SYMBOL_REF:
2589 case REG:
2590 return 1;
2592 case MEM:
2593 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2594 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2596 case IF_THEN_ELSE:
2597 return (computed_jump_p_1 (XEXP (x, 1))
2598 || computed_jump_p_1 (XEXP (x, 2)));
2600 default:
2601 break;
2604 fmt = GET_RTX_FORMAT (code);
2605 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2607 if (fmt[i] == 'e'
2608 && computed_jump_p_1 (XEXP (x, i)))
2609 return 1;
2611 else if (fmt[i] == 'E')
2612 for (j = 0; j < XVECLEN (x, i); j++)
2613 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2614 return 1;
2617 return 0;
2620 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2622 Tablejumps and casesi insns are not considered indirect jumps;
2623 we can recognize them by a (use (label_ref)). */
2626 computed_jump_p (rtx insn)
2628 int i;
2629 if (JUMP_P (insn))
2631 rtx pat = PATTERN (insn);
2633 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2634 return 0;
2635 else if (GET_CODE (pat) == PARALLEL)
2637 int len = XVECLEN (pat, 0);
2638 int has_use_labelref = 0;
2640 for (i = len - 1; i >= 0; i--)
2641 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2642 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2643 == LABEL_REF))
2644 has_use_labelref = 1;
2646 if (! has_use_labelref)
2647 for (i = len - 1; i >= 0; i--)
2648 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2649 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2650 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2651 return 1;
2653 else if (GET_CODE (pat) == SET
2654 && SET_DEST (pat) == pc_rtx
2655 && computed_jump_p_1 (SET_SRC (pat)))
2656 return 1;
2658 return 0;
2661 /* Traverse X via depth-first search, calling F for each
2662 sub-expression (including X itself). F is also passed the DATA.
2663 If F returns -1, do not traverse sub-expressions, but continue
2664 traversing the rest of the tree. If F ever returns any other
2665 nonzero value, stop the traversal, and return the value returned
2666 by F. Otherwise, return 0. This function does not traverse inside
2667 tree structure that contains RTX_EXPRs, or into sub-expressions
2668 whose format code is `0' since it is not known whether or not those
2669 codes are actually RTL.
2671 This routine is very general, and could (should?) be used to
2672 implement many of the other routines in this file. */
2675 for_each_rtx (rtx *x, rtx_function f, void *data)
2677 int result;
2678 int length;
2679 const char *format;
2680 int i;
2682 /* Call F on X. */
2683 result = (*f) (x, data);
2684 if (result == -1)
2685 /* Do not traverse sub-expressions. */
2686 return 0;
2687 else if (result != 0)
2688 /* Stop the traversal. */
2689 return result;
2691 if (*x == NULL_RTX)
2692 /* There are no sub-expressions. */
2693 return 0;
2695 length = GET_RTX_LENGTH (GET_CODE (*x));
2696 format = GET_RTX_FORMAT (GET_CODE (*x));
2698 for (i = 0; i < length; ++i)
2700 switch (format[i])
2702 case 'e':
2703 result = for_each_rtx (&XEXP (*x, i), f, data);
2704 if (result != 0)
2705 return result;
2706 break;
2708 case 'V':
2709 case 'E':
2710 if (XVEC (*x, i) != 0)
2712 int j;
2713 for (j = 0; j < XVECLEN (*x, i); ++j)
2715 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2716 if (result != 0)
2717 return result;
2720 break;
2722 default:
2723 /* Nothing to do. */
2724 break;
2729 return 0;
2732 /* Searches X for any reference to REGNO, returning the rtx of the
2733 reference found if any. Otherwise, returns NULL_RTX. */
2736 regno_use_in (unsigned int regno, rtx x)
2738 const char *fmt;
2739 int i, j;
2740 rtx tem;
2742 if (REG_P (x) && REGNO (x) == regno)
2743 return x;
2745 fmt = GET_RTX_FORMAT (GET_CODE (x));
2746 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2748 if (fmt[i] == 'e')
2750 if ((tem = regno_use_in (regno, XEXP (x, i))))
2751 return tem;
2753 else if (fmt[i] == 'E')
2754 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2755 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2756 return tem;
2759 return NULL_RTX;
2762 /* Return a value indicating whether OP, an operand of a commutative
2763 operation, is preferred as the first or second operand. The higher
2764 the value, the stronger the preference for being the first operand.
2765 We use negative values to indicate a preference for the first operand
2766 and positive values for the second operand. */
2769 commutative_operand_precedence (rtx op)
2771 enum rtx_code code = GET_CODE (op);
2773 /* Constants always come the second operand. Prefer "nice" constants. */
2774 if (code == CONST_INT)
2775 return -7;
2776 if (code == CONST_DOUBLE)
2777 return -6;
2778 op = avoid_constant_pool_reference (op);
2779 code = GET_CODE (op);
2781 switch (GET_RTX_CLASS (code))
2783 case RTX_CONST_OBJ:
2784 if (code == CONST_INT)
2785 return -5;
2786 if (code == CONST_DOUBLE)
2787 return -4;
2788 return -3;
2790 case RTX_EXTRA:
2791 /* SUBREGs of objects should come second. */
2792 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2793 return -2;
2795 if (!CONSTANT_P (op))
2796 return 0;
2797 else
2798 /* As for RTX_CONST_OBJ. */
2799 return -3;
2801 case RTX_OBJ:
2802 /* Complex expressions should be the first, so decrease priority
2803 of objects. */
2804 return -1;
2806 case RTX_COMM_ARITH:
2807 /* Prefer operands that are themselves commutative to be first.
2808 This helps to make things linear. In particular,
2809 (and (and (reg) (reg)) (not (reg))) is canonical. */
2810 return 4;
2812 case RTX_BIN_ARITH:
2813 /* If only one operand is a binary expression, it will be the first
2814 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2815 is canonical, although it will usually be further simplified. */
2816 return 2;
2818 case RTX_UNARY:
2819 /* Then prefer NEG and NOT. */
2820 if (code == NEG || code == NOT)
2821 return 1;
2823 default:
2824 return 0;
2828 /* Return 1 iff it is necessary to swap operands of commutative operation
2829 in order to canonicalize expression. */
2832 swap_commutative_operands_p (rtx x, rtx y)
2834 return (commutative_operand_precedence (x)
2835 < commutative_operand_precedence (y));
2838 /* Return 1 if X is an autoincrement side effect and the register is
2839 not the stack pointer. */
2841 auto_inc_p (rtx x)
2843 switch (GET_CODE (x))
2845 case PRE_INC:
2846 case POST_INC:
2847 case PRE_DEC:
2848 case POST_DEC:
2849 case PRE_MODIFY:
2850 case POST_MODIFY:
2851 /* There are no REG_INC notes for SP. */
2852 if (XEXP (x, 0) != stack_pointer_rtx)
2853 return 1;
2854 default:
2855 break;
2857 return 0;
2860 /* Return 1 if the sequence of instructions beginning with FROM and up
2861 to and including TO is safe to move. If NEW_TO is non-NULL, and
2862 the sequence is not already safe to move, but can be easily
2863 extended to a sequence which is safe, then NEW_TO will point to the
2864 end of the extended sequence.
2866 For now, this function only checks that the region contains whole
2867 exception regions, but it could be extended to check additional
2868 conditions as well. */
2871 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
2873 int eh_region_count = 0;
2874 int past_to_p = 0;
2875 rtx r = from;
2877 /* By default, assume the end of the region will be what was
2878 suggested. */
2879 if (new_to)
2880 *new_to = to;
2882 while (r)
2884 if (NOTE_P (r))
2886 switch (NOTE_LINE_NUMBER (r))
2888 case NOTE_INSN_EH_REGION_BEG:
2889 ++eh_region_count;
2890 break;
2892 case NOTE_INSN_EH_REGION_END:
2893 if (eh_region_count == 0)
2894 /* This sequence of instructions contains the end of
2895 an exception region, but not he beginning. Moving
2896 it will cause chaos. */
2897 return 0;
2899 --eh_region_count;
2900 break;
2902 default:
2903 break;
2906 else if (past_to_p)
2907 /* If we've passed TO, and we see a non-note instruction, we
2908 can't extend the sequence to a movable sequence. */
2909 return 0;
2911 if (r == to)
2913 if (!new_to)
2914 /* It's OK to move the sequence if there were matched sets of
2915 exception region notes. */
2916 return eh_region_count == 0;
2918 past_to_p = 1;
2921 /* It's OK to move the sequence if there were matched sets of
2922 exception region notes. */
2923 if (past_to_p && eh_region_count == 0)
2925 *new_to = r;
2926 return 1;
2929 /* Go to the next instruction. */
2930 r = NEXT_INSN (r);
2933 return 0;
2936 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2938 loc_mentioned_in_p (rtx *loc, rtx in)
2940 enum rtx_code code = GET_CODE (in);
2941 const char *fmt = GET_RTX_FORMAT (code);
2942 int i, j;
2944 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2946 if (loc == &in->u.fld[i].rt_rtx)
2947 return 1;
2948 if (fmt[i] == 'e')
2950 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2951 return 1;
2953 else if (fmt[i] == 'E')
2954 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2955 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2956 return 1;
2958 return 0;
2961 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2962 and SUBREG_BYTE, return the bit offset where the subreg begins
2963 (counting from the least significant bit of the operand). */
2965 unsigned int
2966 subreg_lsb_1 (enum machine_mode outer_mode,
2967 enum machine_mode inner_mode,
2968 unsigned int subreg_byte)
2970 unsigned int bitpos;
2971 unsigned int byte;
2972 unsigned int word;
2974 /* A paradoxical subreg begins at bit position 0. */
2975 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
2976 return 0;
2978 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2979 /* If the subreg crosses a word boundary ensure that
2980 it also begins and ends on a word boundary. */
2981 gcc_assert (!((subreg_byte % UNITS_PER_WORD
2982 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
2983 && (subreg_byte % UNITS_PER_WORD
2984 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
2986 if (WORDS_BIG_ENDIAN)
2987 word = (GET_MODE_SIZE (inner_mode)
2988 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
2989 else
2990 word = subreg_byte / UNITS_PER_WORD;
2991 bitpos = word * BITS_PER_WORD;
2993 if (BYTES_BIG_ENDIAN)
2994 byte = (GET_MODE_SIZE (inner_mode)
2995 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
2996 else
2997 byte = subreg_byte % UNITS_PER_WORD;
2998 bitpos += byte * BITS_PER_UNIT;
3000 return bitpos;
3003 /* Given a subreg X, return the bit offset where the subreg begins
3004 (counting from the least significant bit of the reg). */
3006 unsigned int
3007 subreg_lsb (rtx x)
3009 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3010 SUBREG_BYTE (x));
3013 /* This function returns the regno offset of a subreg expression.
3014 xregno - A regno of an inner hard subreg_reg (or what will become one).
3015 xmode - The mode of xregno.
3016 offset - The byte offset.
3017 ymode - The mode of a top level SUBREG (or what may become one).
3018 RETURN - The regno offset which would be used. */
3019 unsigned int
3020 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3021 unsigned int offset, enum machine_mode ymode)
3023 int nregs_xmode, nregs_ymode;
3024 int mode_multiple, nregs_multiple;
3025 int y_offset;
3027 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3029 nregs_xmode = hard_regno_nregs[xregno][xmode];
3030 nregs_ymode = hard_regno_nregs[xregno][ymode];
3032 /* If this is a big endian paradoxical subreg, which uses more actual
3033 hard registers than the original register, we must return a negative
3034 offset so that we find the proper highpart of the register. */
3035 if (offset == 0
3036 && nregs_ymode > nregs_xmode
3037 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3038 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3039 return nregs_xmode - nregs_ymode;
3041 if (offset == 0 || nregs_xmode == nregs_ymode)
3042 return 0;
3044 /* size of ymode must not be greater than the size of xmode. */
3045 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3046 gcc_assert (mode_multiple != 0);
3048 y_offset = offset / GET_MODE_SIZE (ymode);
3049 nregs_multiple = nregs_xmode / nregs_ymode;
3050 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3053 /* This function returns true when the offset is representable via
3054 subreg_offset in the given regno.
3055 xregno - A regno of an inner hard subreg_reg (or what will become one).
3056 xmode - The mode of xregno.
3057 offset - The byte offset.
3058 ymode - The mode of a top level SUBREG (or what may become one).
3059 RETURN - The regno offset which would be used. */
3060 bool
3061 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3062 unsigned int offset, enum machine_mode ymode)
3064 int nregs_xmode, nregs_ymode;
3065 int mode_multiple, nregs_multiple;
3066 int y_offset;
3068 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3070 nregs_xmode = hard_regno_nregs[xregno][xmode];
3071 nregs_ymode = hard_regno_nregs[xregno][ymode];
3073 /* Paradoxical subregs are always valid. */
3074 if (offset == 0
3075 && nregs_ymode > nregs_xmode
3076 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3077 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3078 return true;
3080 /* Lowpart subregs are always valid. */
3081 if (offset == subreg_lowpart_offset (ymode, xmode))
3082 return true;
3084 /* This should always pass, otherwise we don't know how to verify the
3085 constraint. These conditions may be relaxed but subreg_offset would
3086 need to be redesigned. */
3087 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3088 gcc_assert ((GET_MODE_SIZE (ymode) % nregs_ymode) == 0);
3089 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3091 /* The XMODE value can be seen as a vector of NREGS_XMODE
3092 values. The subreg must represent a lowpart of given field.
3093 Compute what field it is. */
3094 offset -= subreg_lowpart_offset (ymode,
3095 mode_for_size (GET_MODE_BITSIZE (xmode)
3096 / nregs_xmode,
3097 MODE_INT, 0));
3099 /* size of ymode must not be greater than the size of xmode. */
3100 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3101 gcc_assert (mode_multiple != 0);
3103 y_offset = offset / GET_MODE_SIZE (ymode);
3104 nregs_multiple = nregs_xmode / nregs_ymode;
3106 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3107 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3109 return (!(y_offset % (mode_multiple / nregs_multiple)));
3112 /* Return the final regno that a subreg expression refers to. */
3113 unsigned int
3114 subreg_regno (rtx x)
3116 unsigned int ret;
3117 rtx subreg = SUBREG_REG (x);
3118 int regno = REGNO (subreg);
3120 ret = regno + subreg_regno_offset (regno,
3121 GET_MODE (subreg),
3122 SUBREG_BYTE (x),
3123 GET_MODE (x));
3124 return ret;
3127 struct parms_set_data
3129 int nregs;
3130 HARD_REG_SET regs;
3133 /* Helper function for noticing stores to parameter registers. */
3134 static void
3135 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3137 struct parms_set_data *d = data;
3138 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3139 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3141 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3142 d->nregs--;
3146 /* Look backward for first parameter to be loaded.
3147 Do not skip BOUNDARY. */
3149 find_first_parameter_load (rtx call_insn, rtx boundary)
3151 struct parms_set_data parm;
3152 rtx p, before;
3154 /* Since different machines initialize their parameter registers
3155 in different orders, assume nothing. Collect the set of all
3156 parameter registers. */
3157 CLEAR_HARD_REG_SET (parm.regs);
3158 parm.nregs = 0;
3159 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3160 if (GET_CODE (XEXP (p, 0)) == USE
3161 && REG_P (XEXP (XEXP (p, 0), 0)))
3163 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3165 /* We only care about registers which can hold function
3166 arguments. */
3167 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3168 continue;
3170 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3171 parm.nregs++;
3173 before = call_insn;
3175 /* Search backward for the first set of a register in this set. */
3176 while (parm.nregs && before != boundary)
3178 before = PREV_INSN (before);
3180 /* It is possible that some loads got CSEed from one call to
3181 another. Stop in that case. */
3182 if (CALL_P (before))
3183 break;
3185 /* Our caller needs either ensure that we will find all sets
3186 (in case code has not been optimized yet), or take care
3187 for possible labels in a way by setting boundary to preceding
3188 CODE_LABEL. */
3189 if (LABEL_P (before))
3191 gcc_assert (before == boundary);
3192 break;
3195 if (INSN_P (before))
3196 note_stores (PATTERN (before), parms_set, &parm);
3198 return before;
3201 /* Return true if we should avoid inserting code between INSN and preceding
3202 call instruction. */
3204 bool
3205 keep_with_call_p (rtx insn)
3207 rtx set;
3209 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3211 if (REG_P (SET_DEST (set))
3212 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3213 && fixed_regs[REGNO (SET_DEST (set))]
3214 && general_operand (SET_SRC (set), VOIDmode))
3215 return true;
3216 if (REG_P (SET_SRC (set))
3217 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3218 && REG_P (SET_DEST (set))
3219 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3220 return true;
3221 /* There may be a stack pop just after the call and before the store
3222 of the return register. Search for the actual store when deciding
3223 if we can break or not. */
3224 if (SET_DEST (set) == stack_pointer_rtx)
3226 rtx i2 = next_nonnote_insn (insn);
3227 if (i2 && keep_with_call_p (i2))
3228 return true;
3231 return false;
3234 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3235 to non-complex jumps. That is, direct unconditional, conditional,
3236 and tablejumps, but not computed jumps or returns. It also does
3237 not apply to the fallthru case of a conditional jump. */
3239 bool
3240 label_is_jump_target_p (rtx label, rtx jump_insn)
3242 rtx tmp = JUMP_LABEL (jump_insn);
3244 if (label == tmp)
3245 return true;
3247 if (tablejump_p (jump_insn, NULL, &tmp))
3249 rtvec vec = XVEC (PATTERN (tmp),
3250 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3251 int i, veclen = GET_NUM_ELEM (vec);
3253 for (i = 0; i < veclen; ++i)
3254 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3255 return true;
3258 return false;
3262 /* Return an estimate of the cost of computing rtx X.
3263 One use is in cse, to decide which expression to keep in the hash table.
3264 Another is in rtl generation, to pick the cheapest way to multiply.
3265 Other uses like the latter are expected in the future. */
3268 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3270 int i, j;
3271 enum rtx_code code;
3272 const char *fmt;
3273 int total;
3275 if (x == 0)
3276 return 0;
3278 /* Compute the default costs of certain things.
3279 Note that targetm.rtx_costs can override the defaults. */
3281 code = GET_CODE (x);
3282 switch (code)
3284 case MULT:
3285 total = COSTS_N_INSNS (5);
3286 break;
3287 case DIV:
3288 case UDIV:
3289 case MOD:
3290 case UMOD:
3291 total = COSTS_N_INSNS (7);
3292 break;
3293 case USE:
3294 /* Used in loop.c and combine.c as a marker. */
3295 total = 0;
3296 break;
3297 default:
3298 total = COSTS_N_INSNS (1);
3301 switch (code)
3303 case REG:
3304 return 0;
3306 case SUBREG:
3307 /* If we can't tie these modes, make this expensive. The larger
3308 the mode, the more expensive it is. */
3309 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3310 return COSTS_N_INSNS (2
3311 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3312 break;
3314 default:
3315 if (targetm.rtx_costs (x, code, outer_code, &total))
3316 return total;
3317 break;
3320 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3321 which is already in total. */
3323 fmt = GET_RTX_FORMAT (code);
3324 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3325 if (fmt[i] == 'e')
3326 total += rtx_cost (XEXP (x, i), code);
3327 else if (fmt[i] == 'E')
3328 for (j = 0; j < XVECLEN (x, i); j++)
3329 total += rtx_cost (XVECEXP (x, i, j), code);
3331 return total;
3334 /* Return cost of address expression X.
3335 Expect that X is properly formed address reference. */
3338 address_cost (rtx x, enum machine_mode mode)
3340 /* We may be asked for cost of various unusual addresses, such as operands
3341 of push instruction. It is not worthwhile to complicate writing
3342 of the target hook by such cases. */
3344 if (!memory_address_p (mode, x))
3345 return 1000;
3347 return targetm.address_cost (x);
3350 /* If the target doesn't override, compute the cost as with arithmetic. */
3353 default_address_cost (rtx x)
3355 return rtx_cost (x, MEM);
3359 unsigned HOST_WIDE_INT
3360 nonzero_bits (rtx x, enum machine_mode mode)
3362 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3365 unsigned int
3366 num_sign_bit_copies (rtx x, enum machine_mode mode)
3368 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3371 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3372 It avoids exponential behavior in nonzero_bits1 when X has
3373 identical subexpressions on the first or the second level. */
3375 static unsigned HOST_WIDE_INT
3376 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3377 enum machine_mode known_mode,
3378 unsigned HOST_WIDE_INT known_ret)
3380 if (x == known_x && mode == known_mode)
3381 return known_ret;
3383 /* Try to find identical subexpressions. If found call
3384 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3385 precomputed value for the subexpression as KNOWN_RET. */
3387 if (ARITHMETIC_P (x))
3389 rtx x0 = XEXP (x, 0);
3390 rtx x1 = XEXP (x, 1);
3392 /* Check the first level. */
3393 if (x0 == x1)
3394 return nonzero_bits1 (x, mode, x0, mode,
3395 cached_nonzero_bits (x0, mode, known_x,
3396 known_mode, known_ret));
3398 /* Check the second level. */
3399 if (ARITHMETIC_P (x0)
3400 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3401 return nonzero_bits1 (x, mode, x1, mode,
3402 cached_nonzero_bits (x1, mode, known_x,
3403 known_mode, known_ret));
3405 if (ARITHMETIC_P (x1)
3406 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3407 return nonzero_bits1 (x, mode, x0, mode,
3408 cached_nonzero_bits (x0, mode, known_x,
3409 known_mode, known_ret));
3412 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3415 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3416 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3417 is less useful. We can't allow both, because that results in exponential
3418 run time recursion. There is a nullstone testcase that triggered
3419 this. This macro avoids accidental uses of num_sign_bit_copies. */
3420 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3422 /* Given an expression, X, compute which bits in X can be nonzero.
3423 We don't care about bits outside of those defined in MODE.
3425 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3426 an arithmetic operation, we can do better. */
3428 static unsigned HOST_WIDE_INT
3429 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3430 enum machine_mode known_mode,
3431 unsigned HOST_WIDE_INT known_ret)
3433 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3434 unsigned HOST_WIDE_INT inner_nz;
3435 enum rtx_code code;
3436 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3438 /* For floating-point values, assume all bits are needed. */
3439 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3440 return nonzero;
3442 /* If X is wider than MODE, use its mode instead. */
3443 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3445 mode = GET_MODE (x);
3446 nonzero = GET_MODE_MASK (mode);
3447 mode_width = GET_MODE_BITSIZE (mode);
3450 if (mode_width > HOST_BITS_PER_WIDE_INT)
3451 /* Our only callers in this case look for single bit values. So
3452 just return the mode mask. Those tests will then be false. */
3453 return nonzero;
3455 #ifndef WORD_REGISTER_OPERATIONS
3456 /* If MODE is wider than X, but both are a single word for both the host
3457 and target machines, we can compute this from which bits of the
3458 object might be nonzero in its own mode, taking into account the fact
3459 that on many CISC machines, accessing an object in a wider mode
3460 causes the high-order bits to become undefined. So they are
3461 not known to be zero. */
3463 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3464 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3465 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3466 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3468 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3469 known_x, known_mode, known_ret);
3470 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3471 return nonzero;
3473 #endif
3475 code = GET_CODE (x);
3476 switch (code)
3478 case REG:
3479 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3480 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3481 all the bits above ptr_mode are known to be zero. */
3482 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3483 && REG_POINTER (x))
3484 nonzero &= GET_MODE_MASK (ptr_mode);
3485 #endif
3487 /* Include declared information about alignment of pointers. */
3488 /* ??? We don't properly preserve REG_POINTER changes across
3489 pointer-to-integer casts, so we can't trust it except for
3490 things that we know must be pointers. See execute/960116-1.c. */
3491 if ((x == stack_pointer_rtx
3492 || x == frame_pointer_rtx
3493 || x == arg_pointer_rtx)
3494 && REGNO_POINTER_ALIGN (REGNO (x)))
3496 unsigned HOST_WIDE_INT alignment
3497 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3499 #ifdef PUSH_ROUNDING
3500 /* If PUSH_ROUNDING is defined, it is possible for the
3501 stack to be momentarily aligned only to that amount,
3502 so we pick the least alignment. */
3503 if (x == stack_pointer_rtx && PUSH_ARGS)
3504 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3505 alignment);
3506 #endif
3508 nonzero &= ~(alignment - 1);
3512 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3513 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3514 known_mode, known_ret,
3515 &nonzero_for_hook);
3517 if (new)
3518 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3519 known_mode, known_ret);
3521 return nonzero_for_hook;
3524 case CONST_INT:
3525 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3526 /* If X is negative in MODE, sign-extend the value. */
3527 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3528 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3529 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3530 #endif
3532 return INTVAL (x);
3534 case MEM:
3535 #ifdef LOAD_EXTEND_OP
3536 /* In many, if not most, RISC machines, reading a byte from memory
3537 zeros the rest of the register. Noticing that fact saves a lot
3538 of extra zero-extends. */
3539 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3540 nonzero &= GET_MODE_MASK (GET_MODE (x));
3541 #endif
3542 break;
3544 case EQ: case NE:
3545 case UNEQ: case LTGT:
3546 case GT: case GTU: case UNGT:
3547 case LT: case LTU: case UNLT:
3548 case GE: case GEU: case UNGE:
3549 case LE: case LEU: case UNLE:
3550 case UNORDERED: case ORDERED:
3552 /* If this produces an integer result, we know which bits are set.
3553 Code here used to clear bits outside the mode of X, but that is
3554 now done above. */
3556 if (GET_MODE_CLASS (mode) == MODE_INT
3557 && mode_width <= HOST_BITS_PER_WIDE_INT)
3558 nonzero = STORE_FLAG_VALUE;
3559 break;
3561 case NEG:
3562 #if 0
3563 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3564 and num_sign_bit_copies. */
3565 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3566 == GET_MODE_BITSIZE (GET_MODE (x)))
3567 nonzero = 1;
3568 #endif
3570 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3571 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3572 break;
3574 case ABS:
3575 #if 0
3576 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3577 and num_sign_bit_copies. */
3578 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3579 == GET_MODE_BITSIZE (GET_MODE (x)))
3580 nonzero = 1;
3581 #endif
3582 break;
3584 case TRUNCATE:
3585 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3586 known_x, known_mode, known_ret)
3587 & GET_MODE_MASK (mode));
3588 break;
3590 case ZERO_EXTEND:
3591 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3592 known_x, known_mode, known_ret);
3593 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3594 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3595 break;
3597 case SIGN_EXTEND:
3598 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3599 Otherwise, show all the bits in the outer mode but not the inner
3600 may be nonzero. */
3601 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3602 known_x, known_mode, known_ret);
3603 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3605 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3606 if (inner_nz
3607 & (((HOST_WIDE_INT) 1
3608 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3609 inner_nz |= (GET_MODE_MASK (mode)
3610 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3613 nonzero &= inner_nz;
3614 break;
3616 case AND:
3617 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3618 known_x, known_mode, known_ret)
3619 & cached_nonzero_bits (XEXP (x, 1), mode,
3620 known_x, known_mode, known_ret);
3621 break;
3623 case XOR: case IOR:
3624 case UMIN: case UMAX: case SMIN: case SMAX:
3626 unsigned HOST_WIDE_INT nonzero0 =
3627 cached_nonzero_bits (XEXP (x, 0), mode,
3628 known_x, known_mode, known_ret);
3630 /* Don't call nonzero_bits for the second time if it cannot change
3631 anything. */
3632 if ((nonzero & nonzero0) != nonzero)
3633 nonzero &= nonzero0
3634 | cached_nonzero_bits (XEXP (x, 1), mode,
3635 known_x, known_mode, known_ret);
3637 break;
3639 case PLUS: case MINUS:
3640 case MULT:
3641 case DIV: case UDIV:
3642 case MOD: case UMOD:
3643 /* We can apply the rules of arithmetic to compute the number of
3644 high- and low-order zero bits of these operations. We start by
3645 computing the width (position of the highest-order nonzero bit)
3646 and the number of low-order zero bits for each value. */
3648 unsigned HOST_WIDE_INT nz0 =
3649 cached_nonzero_bits (XEXP (x, 0), mode,
3650 known_x, known_mode, known_ret);
3651 unsigned HOST_WIDE_INT nz1 =
3652 cached_nonzero_bits (XEXP (x, 1), mode,
3653 known_x, known_mode, known_ret);
3654 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3655 int width0 = floor_log2 (nz0) + 1;
3656 int width1 = floor_log2 (nz1) + 1;
3657 int low0 = floor_log2 (nz0 & -nz0);
3658 int low1 = floor_log2 (nz1 & -nz1);
3659 HOST_WIDE_INT op0_maybe_minusp
3660 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3661 HOST_WIDE_INT op1_maybe_minusp
3662 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3663 unsigned int result_width = mode_width;
3664 int result_low = 0;
3666 switch (code)
3668 case PLUS:
3669 result_width = MAX (width0, width1) + 1;
3670 result_low = MIN (low0, low1);
3671 break;
3672 case MINUS:
3673 result_low = MIN (low0, low1);
3674 break;
3675 case MULT:
3676 result_width = width0 + width1;
3677 result_low = low0 + low1;
3678 break;
3679 case DIV:
3680 if (width1 == 0)
3681 break;
3682 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3683 result_width = width0;
3684 break;
3685 case UDIV:
3686 if (width1 == 0)
3687 break;
3688 result_width = width0;
3689 break;
3690 case MOD:
3691 if (width1 == 0)
3692 break;
3693 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3694 result_width = MIN (width0, width1);
3695 result_low = MIN (low0, low1);
3696 break;
3697 case UMOD:
3698 if (width1 == 0)
3699 break;
3700 result_width = MIN (width0, width1);
3701 result_low = MIN (low0, low1);
3702 break;
3703 default:
3704 gcc_unreachable ();
3707 if (result_width < mode_width)
3708 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3710 if (result_low > 0)
3711 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3713 #ifdef POINTERS_EXTEND_UNSIGNED
3714 /* If pointers extend unsigned and this is an addition or subtraction
3715 to a pointer in Pmode, all the bits above ptr_mode are known to be
3716 zero. */
3717 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3718 && (code == PLUS || code == MINUS)
3719 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3720 nonzero &= GET_MODE_MASK (ptr_mode);
3721 #endif
3723 break;
3725 case ZERO_EXTRACT:
3726 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3727 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3728 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3729 break;
3731 case SUBREG:
3732 /* If this is a SUBREG formed for a promoted variable that has
3733 been zero-extended, we know that at least the high-order bits
3734 are zero, though others might be too. */
3736 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3737 nonzero = GET_MODE_MASK (GET_MODE (x))
3738 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3739 known_x, known_mode, known_ret);
3741 /* If the inner mode is a single word for both the host and target
3742 machines, we can compute this from which bits of the inner
3743 object might be nonzero. */
3744 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3745 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3746 <= HOST_BITS_PER_WIDE_INT))
3748 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3749 known_x, known_mode, known_ret);
3751 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3752 /* If this is a typical RISC machine, we only have to worry
3753 about the way loads are extended. */
3754 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3755 ? (((nonzero
3756 & (((unsigned HOST_WIDE_INT) 1
3757 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3758 != 0))
3759 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3760 || !MEM_P (SUBREG_REG (x)))
3761 #endif
3763 /* On many CISC machines, accessing an object in a wider mode
3764 causes the high-order bits to become undefined. So they are
3765 not known to be zero. */
3766 if (GET_MODE_SIZE (GET_MODE (x))
3767 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3768 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3769 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3772 break;
3774 case ASHIFTRT:
3775 case LSHIFTRT:
3776 case ASHIFT:
3777 case ROTATE:
3778 /* The nonzero bits are in two classes: any bits within MODE
3779 that aren't in GET_MODE (x) are always significant. The rest of the
3780 nonzero bits are those that are significant in the operand of
3781 the shift when shifted the appropriate number of bits. This
3782 shows that high-order bits are cleared by the right shift and
3783 low-order bits by left shifts. */
3784 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3785 && INTVAL (XEXP (x, 1)) >= 0
3786 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3788 enum machine_mode inner_mode = GET_MODE (x);
3789 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3790 int count = INTVAL (XEXP (x, 1));
3791 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3792 unsigned HOST_WIDE_INT op_nonzero =
3793 cached_nonzero_bits (XEXP (x, 0), mode,
3794 known_x, known_mode, known_ret);
3795 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3796 unsigned HOST_WIDE_INT outer = 0;
3798 if (mode_width > width)
3799 outer = (op_nonzero & nonzero & ~mode_mask);
3801 if (code == LSHIFTRT)
3802 inner >>= count;
3803 else if (code == ASHIFTRT)
3805 inner >>= count;
3807 /* If the sign bit may have been nonzero before the shift, we
3808 need to mark all the places it could have been copied to
3809 by the shift as possibly nonzero. */
3810 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3811 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3813 else if (code == ASHIFT)
3814 inner <<= count;
3815 else
3816 inner = ((inner << (count % width)
3817 | (inner >> (width - (count % width)))) & mode_mask);
3819 nonzero &= (outer | inner);
3821 break;
3823 case FFS:
3824 case POPCOUNT:
3825 /* This is at most the number of bits in the mode. */
3826 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3827 break;
3829 case CLZ:
3830 /* If CLZ has a known value at zero, then the nonzero bits are
3831 that value, plus the number of bits in the mode minus one. */
3832 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3833 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3834 else
3835 nonzero = -1;
3836 break;
3838 case CTZ:
3839 /* If CTZ has a known value at zero, then the nonzero bits are
3840 that value, plus the number of bits in the mode minus one. */
3841 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3842 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3843 else
3844 nonzero = -1;
3845 break;
3847 case PARITY:
3848 nonzero = 1;
3849 break;
3851 case IF_THEN_ELSE:
3853 unsigned HOST_WIDE_INT nonzero_true =
3854 cached_nonzero_bits (XEXP (x, 1), mode,
3855 known_x, known_mode, known_ret);
3857 /* Don't call nonzero_bits for the second time if it cannot change
3858 anything. */
3859 if ((nonzero & nonzero_true) != nonzero)
3860 nonzero &= nonzero_true
3861 | cached_nonzero_bits (XEXP (x, 2), mode,
3862 known_x, known_mode, known_ret);
3864 break;
3866 default:
3867 break;
3870 return nonzero;
3873 /* See the macro definition above. */
3874 #undef cached_num_sign_bit_copies
3877 /* The function cached_num_sign_bit_copies is a wrapper around
3878 num_sign_bit_copies1. It avoids exponential behavior in
3879 num_sign_bit_copies1 when X has identical subexpressions on the
3880 first or the second level. */
3882 static unsigned int
3883 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
3884 enum machine_mode known_mode,
3885 unsigned int known_ret)
3887 if (x == known_x && mode == known_mode)
3888 return known_ret;
3890 /* Try to find identical subexpressions. If found call
3891 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3892 the precomputed value for the subexpression as KNOWN_RET. */
3894 if (ARITHMETIC_P (x))
3896 rtx x0 = XEXP (x, 0);
3897 rtx x1 = XEXP (x, 1);
3899 /* Check the first level. */
3900 if (x0 == x1)
3901 return
3902 num_sign_bit_copies1 (x, mode, x0, mode,
3903 cached_num_sign_bit_copies (x0, mode, known_x,
3904 known_mode,
3905 known_ret));
3907 /* Check the second level. */
3908 if (ARITHMETIC_P (x0)
3909 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3910 return
3911 num_sign_bit_copies1 (x, mode, x1, mode,
3912 cached_num_sign_bit_copies (x1, mode, known_x,
3913 known_mode,
3914 known_ret));
3916 if (ARITHMETIC_P (x1)
3917 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3918 return
3919 num_sign_bit_copies1 (x, mode, x0, mode,
3920 cached_num_sign_bit_copies (x0, mode, known_x,
3921 known_mode,
3922 known_ret));
3925 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
3928 /* Return the number of bits at the high-order end of X that are known to
3929 be equal to the sign bit. X will be used in mode MODE; if MODE is
3930 VOIDmode, X will be used in its own mode. The returned value will always
3931 be between 1 and the number of bits in MODE. */
3933 static unsigned int
3934 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
3935 enum machine_mode known_mode,
3936 unsigned int known_ret)
3938 enum rtx_code code = GET_CODE (x);
3939 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
3940 int num0, num1, result;
3941 unsigned HOST_WIDE_INT nonzero;
3943 /* If we weren't given a mode, use the mode of X. If the mode is still
3944 VOIDmode, we don't know anything. Likewise if one of the modes is
3945 floating-point. */
3947 if (mode == VOIDmode)
3948 mode = GET_MODE (x);
3950 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
3951 return 1;
3953 /* For a smaller object, just ignore the high bits. */
3954 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
3956 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
3957 known_x, known_mode, known_ret);
3958 return MAX (1,
3959 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
3962 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
3964 #ifndef WORD_REGISTER_OPERATIONS
3965 /* If this machine does not do all register operations on the entire
3966 register and MODE is wider than the mode of X, we can say nothing
3967 at all about the high-order bits. */
3968 return 1;
3969 #else
3970 /* Likewise on machines that do, if the mode of the object is smaller
3971 than a word and loads of that size don't sign extend, we can say
3972 nothing about the high order bits. */
3973 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
3974 #ifdef LOAD_EXTEND_OP
3975 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
3976 #endif
3978 return 1;
3979 #endif
3982 switch (code)
3984 case REG:
3986 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3987 /* If pointers extend signed and this is a pointer in Pmode, say that
3988 all the bits above ptr_mode are known to be sign bit copies. */
3989 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
3990 && REG_POINTER (x))
3991 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
3992 #endif
3995 unsigned int copies_for_hook = 1, copies = 1;
3996 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
3997 known_mode, known_ret,
3998 &copies_for_hook);
4000 if (new)
4001 copies = cached_num_sign_bit_copies (new, mode, known_x,
4002 known_mode, known_ret);
4004 if (copies > 1 || copies_for_hook > 1)
4005 return MAX (copies, copies_for_hook);
4007 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4009 break;
4011 case MEM:
4012 #ifdef LOAD_EXTEND_OP
4013 /* Some RISC machines sign-extend all loads of smaller than a word. */
4014 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4015 return MAX (1, ((int) bitwidth
4016 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4017 #endif
4018 break;
4020 case CONST_INT:
4021 /* If the constant is negative, take its 1's complement and remask.
4022 Then see how many zero bits we have. */
4023 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4024 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4025 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4026 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4028 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4030 case SUBREG:
4031 /* If this is a SUBREG for a promoted object that is sign-extended
4032 and we are looking at it in a wider mode, we know that at least the
4033 high-order bits are known to be sign bit copies. */
4035 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4037 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4038 known_x, known_mode, known_ret);
4039 return MAX ((int) bitwidth
4040 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4041 num0);
4044 /* For a smaller object, just ignore the high bits. */
4045 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4047 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4048 known_x, known_mode, known_ret);
4049 return MAX (1, (num0
4050 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4051 - bitwidth)));
4054 #ifdef WORD_REGISTER_OPERATIONS
4055 #ifdef LOAD_EXTEND_OP
4056 /* For paradoxical SUBREGs on machines where all register operations
4057 affect the entire register, just look inside. Note that we are
4058 passing MODE to the recursive call, so the number of sign bit copies
4059 will remain relative to that mode, not the inner mode. */
4061 /* This works only if loads sign extend. Otherwise, if we get a
4062 reload for the inner part, it may be loaded from the stack, and
4063 then we lose all sign bit copies that existed before the store
4064 to the stack. */
4066 if ((GET_MODE_SIZE (GET_MODE (x))
4067 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4068 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4069 && MEM_P (SUBREG_REG (x)))
4070 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4071 known_x, known_mode, known_ret);
4072 #endif
4073 #endif
4074 break;
4076 case SIGN_EXTRACT:
4077 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4078 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4079 break;
4081 case SIGN_EXTEND:
4082 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4083 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4084 known_x, known_mode, known_ret));
4086 case TRUNCATE:
4087 /* For a smaller object, just ignore the high bits. */
4088 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4089 known_x, known_mode, known_ret);
4090 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4091 - bitwidth)));
4093 case NOT:
4094 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4095 known_x, known_mode, known_ret);
4097 case ROTATE: case ROTATERT:
4098 /* If we are rotating left by a number of bits less than the number
4099 of sign bit copies, we can just subtract that amount from the
4100 number. */
4101 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4102 && INTVAL (XEXP (x, 1)) >= 0
4103 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4105 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4106 known_x, known_mode, known_ret);
4107 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4108 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4110 break;
4112 case NEG:
4113 /* In general, this subtracts one sign bit copy. But if the value
4114 is known to be positive, the number of sign bit copies is the
4115 same as that of the input. Finally, if the input has just one bit
4116 that might be nonzero, all the bits are copies of the sign bit. */
4117 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4118 known_x, known_mode, known_ret);
4119 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4120 return num0 > 1 ? num0 - 1 : 1;
4122 nonzero = nonzero_bits (XEXP (x, 0), mode);
4123 if (nonzero == 1)
4124 return bitwidth;
4126 if (num0 > 1
4127 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4128 num0--;
4130 return num0;
4132 case IOR: case AND: case XOR:
4133 case SMIN: case SMAX: case UMIN: case UMAX:
4134 /* Logical operations will preserve the number of sign-bit copies.
4135 MIN and MAX operations always return one of the operands. */
4136 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4137 known_x, known_mode, known_ret);
4138 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4139 known_x, known_mode, known_ret);
4140 return MIN (num0, num1);
4142 case PLUS: case MINUS:
4143 /* For addition and subtraction, we can have a 1-bit carry. However,
4144 if we are subtracting 1 from a positive number, there will not
4145 be such a carry. Furthermore, if the positive number is known to
4146 be 0 or 1, we know the result is either -1 or 0. */
4148 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4149 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4151 nonzero = nonzero_bits (XEXP (x, 0), mode);
4152 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4153 return (nonzero == 1 || nonzero == 0 ? bitwidth
4154 : bitwidth - floor_log2 (nonzero) - 1);
4157 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4158 known_x, known_mode, known_ret);
4159 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4160 known_x, known_mode, known_ret);
4161 result = MAX (1, MIN (num0, num1) - 1);
4163 #ifdef POINTERS_EXTEND_UNSIGNED
4164 /* If pointers extend signed and this is an addition or subtraction
4165 to a pointer in Pmode, all the bits above ptr_mode are known to be
4166 sign bit copies. */
4167 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4168 && (code == PLUS || code == MINUS)
4169 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4170 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4171 - GET_MODE_BITSIZE (ptr_mode) + 1),
4172 result);
4173 #endif
4174 return result;
4176 case MULT:
4177 /* The number of bits of the product is the sum of the number of
4178 bits of both terms. However, unless one of the terms if known
4179 to be positive, we must allow for an additional bit since negating
4180 a negative number can remove one sign bit copy. */
4182 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4183 known_x, known_mode, known_ret);
4184 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4185 known_x, known_mode, known_ret);
4187 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4188 if (result > 0
4189 && (bitwidth > HOST_BITS_PER_WIDE_INT
4190 || (((nonzero_bits (XEXP (x, 0), mode)
4191 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4192 && ((nonzero_bits (XEXP (x, 1), mode)
4193 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4194 result--;
4196 return MAX (1, result);
4198 case UDIV:
4199 /* The result must be <= the first operand. If the first operand
4200 has the high bit set, we know nothing about the number of sign
4201 bit copies. */
4202 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4203 return 1;
4204 else if ((nonzero_bits (XEXP (x, 0), mode)
4205 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4206 return 1;
4207 else
4208 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4209 known_x, known_mode, known_ret);
4211 case UMOD:
4212 /* The result must be <= the second operand. */
4213 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4214 known_x, known_mode, known_ret);
4216 case DIV:
4217 /* Similar to unsigned division, except that we have to worry about
4218 the case where the divisor is negative, in which case we have
4219 to add 1. */
4220 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4221 known_x, known_mode, known_ret);
4222 if (result > 1
4223 && (bitwidth > HOST_BITS_PER_WIDE_INT
4224 || (nonzero_bits (XEXP (x, 1), mode)
4225 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4226 result--;
4228 return result;
4230 case MOD:
4231 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4232 known_x, known_mode, known_ret);
4233 if (result > 1
4234 && (bitwidth > HOST_BITS_PER_WIDE_INT
4235 || (nonzero_bits (XEXP (x, 1), mode)
4236 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4237 result--;
4239 return result;
4241 case ASHIFTRT:
4242 /* Shifts by a constant add to the number of bits equal to the
4243 sign bit. */
4244 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4245 known_x, known_mode, known_ret);
4246 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4247 && INTVAL (XEXP (x, 1)) > 0)
4248 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4250 return num0;
4252 case ASHIFT:
4253 /* Left shifts destroy copies. */
4254 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4255 || INTVAL (XEXP (x, 1)) < 0
4256 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4257 return 1;
4259 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4260 known_x, known_mode, known_ret);
4261 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4263 case IF_THEN_ELSE:
4264 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4265 known_x, known_mode, known_ret);
4266 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4267 known_x, known_mode, known_ret);
4268 return MIN (num0, num1);
4270 case EQ: case NE: case GE: case GT: case LE: case LT:
4271 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4272 case GEU: case GTU: case LEU: case LTU:
4273 case UNORDERED: case ORDERED:
4274 /* If the constant is negative, take its 1's complement and remask.
4275 Then see how many zero bits we have. */
4276 nonzero = STORE_FLAG_VALUE;
4277 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4278 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4279 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4281 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4283 default:
4284 break;
4287 /* If we haven't been able to figure it out by one of the above rules,
4288 see if some of the high-order bits are known to be zero. If so,
4289 count those bits and return one less than that amount. If we can't
4290 safely compute the mask for this mode, always return BITWIDTH. */
4292 bitwidth = GET_MODE_BITSIZE (mode);
4293 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4294 return 1;
4296 nonzero = nonzero_bits (x, mode);
4297 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4298 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4301 /* Calculate the rtx_cost of a single instruction. A return value of
4302 zero indicates an instruction pattern without a known cost. */
4305 insn_rtx_cost (rtx pat)
4307 int i, cost;
4308 rtx set;
4310 /* Extract the single set rtx from the instruction pattern.
4311 We can't use single_set since we only have the pattern. */
4312 if (GET_CODE (pat) == SET)
4313 set = pat;
4314 else if (GET_CODE (pat) == PARALLEL)
4316 set = NULL_RTX;
4317 for (i = 0; i < XVECLEN (pat, 0); i++)
4319 rtx x = XVECEXP (pat, 0, i);
4320 if (GET_CODE (x) == SET)
4322 if (set)
4323 return 0;
4324 set = x;
4327 if (!set)
4328 return 0;
4330 else
4331 return 0;
4333 cost = rtx_cost (SET_SRC (set), SET);
4334 return cost > 0 ? cost : COSTS_N_INSNS (1);