2004-07-16 Daniel Berlin <dberlin@dberlin.org>
[official-gcc.git] / gcc / rtlanal.c
blobf3ce004bb2e81ced0dbac434f9bc4111d1d94074
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "basic-block.h"
37 #include "real.h"
38 #include "regs.h"
39 #include "function.h"
41 /* Forward declarations */
42 static int global_reg_mentioned_p_1 (rtx *, void *);
43 static void set_of_1 (rtx, rtx, void *);
44 static void insn_dependent_p_1 (rtx, rtx, void *);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
48 static bool hoist_test_store (rtx, rtx, regset);
49 static void hoist_update_store (rtx, rtx *, rtx, rtx);
51 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
52 rtx, enum machine_mode,
53 unsigned HOST_WIDE_INT);
54 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
55 enum machine_mode,
56 unsigned HOST_WIDE_INT);
57 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
58 enum machine_mode,
59 unsigned int);
60 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
61 enum machine_mode, unsigned int);
63 /* Bit flags that specify the machine subtype we are compiling for.
64 Bits are tested using macros TARGET_... defined in the tm.h file
65 and set by `-m...' switches. Must be defined in rtlanal.c. */
67 int target_flags;
69 /* Return 1 if the value of X is unstable
70 (would be different at a different point in the program).
71 The frame pointer, arg pointer, etc. are considered stable
72 (within one function) and so is anything marked `unchanging'. */
74 int
75 rtx_unstable_p (rtx x)
77 RTX_CODE code = GET_CODE (x);
78 int i;
79 const char *fmt;
81 switch (code)
83 case MEM:
84 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
86 case CONST:
87 case CONST_INT:
88 case CONST_DOUBLE:
89 case CONST_VECTOR:
90 case SYMBOL_REF:
91 case LABEL_REF:
92 return 0;
94 case REG:
95 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
96 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
97 /* The arg pointer varies if it is not a fixed register. */
98 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
99 || RTX_UNCHANGING_P (x))
100 return 0;
101 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
102 /* ??? When call-clobbered, the value is stable modulo the restore
103 that must happen after a call. This currently screws up local-alloc
104 into believing that the restore is not needed. */
105 if (x == pic_offset_table_rtx)
106 return 0;
107 #endif
108 return 1;
110 case ASM_OPERANDS:
111 if (MEM_VOLATILE_P (x))
112 return 1;
114 /* Fall through. */
116 default:
117 break;
120 fmt = GET_RTX_FORMAT (code);
121 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
122 if (fmt[i] == 'e')
124 if (rtx_unstable_p (XEXP (x, i)))
125 return 1;
127 else if (fmt[i] == 'E')
129 int j;
130 for (j = 0; j < XVECLEN (x, i); j++)
131 if (rtx_unstable_p (XVECEXP (x, i, j)))
132 return 1;
135 return 0;
138 /* Return 1 if X has a value that can vary even between two
139 executions of the program. 0 means X can be compared reliably
140 against certain constants or near-constants.
141 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
142 zero, we are slightly more conservative.
143 The frame pointer and the arg pointer are considered constant. */
146 rtx_varies_p (rtx x, int for_alias)
148 RTX_CODE code;
149 int i;
150 const char *fmt;
152 if (!x)
153 return 0;
155 code = GET_CODE (x);
156 switch (code)
158 case MEM:
159 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
161 case CONST:
162 case CONST_INT:
163 case CONST_DOUBLE:
164 case CONST_VECTOR:
165 case SYMBOL_REF:
166 case LABEL_REF:
167 return 0;
169 case REG:
170 /* Note that we have to test for the actual rtx used for the frame
171 and arg pointers and not just the register number in case we have
172 eliminated the frame and/or arg pointer and are using it
173 for pseudos. */
174 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
175 /* The arg pointer varies if it is not a fixed register. */
176 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
177 return 0;
178 if (x == pic_offset_table_rtx
179 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
180 /* ??? When call-clobbered, the value is stable modulo the restore
181 that must happen after a call. This currently screws up
182 local-alloc into believing that the restore is not needed, so we
183 must return 0 only if we are called from alias analysis. */
184 && for_alias
185 #endif
187 return 0;
188 return 1;
190 case LO_SUM:
191 /* The operand 0 of a LO_SUM is considered constant
192 (in fact it is related specifically to operand 1)
193 during alias analysis. */
194 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
195 || rtx_varies_p (XEXP (x, 1), for_alias);
197 case ASM_OPERANDS:
198 if (MEM_VOLATILE_P (x))
199 return 1;
201 /* Fall through. */
203 default:
204 break;
207 fmt = GET_RTX_FORMAT (code);
208 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
209 if (fmt[i] == 'e')
211 if (rtx_varies_p (XEXP (x, i), for_alias))
212 return 1;
214 else if (fmt[i] == 'E')
216 int j;
217 for (j = 0; j < XVECLEN (x, i); j++)
218 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
219 return 1;
222 return 0;
225 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
228 rtx_addr_can_trap_p (rtx x)
230 enum rtx_code code = GET_CODE (x);
232 switch (code)
234 case SYMBOL_REF:
235 return SYMBOL_REF_WEAK (x);
237 case LABEL_REF:
238 return 0;
240 case REG:
241 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
242 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
243 || x == stack_pointer_rtx
244 /* The arg pointer varies if it is not a fixed register. */
245 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
246 return 0;
247 /* All of the virtual frame registers are stack references. */
248 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
249 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
250 return 0;
251 return 1;
253 case CONST:
254 return rtx_addr_can_trap_p (XEXP (x, 0));
256 case PLUS:
257 /* An address is assumed not to trap if it is an address that can't
258 trap plus a constant integer or it is the pic register plus a
259 constant. */
260 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
261 && GET_CODE (XEXP (x, 1)) == CONST_INT)
262 || (XEXP (x, 0) == pic_offset_table_rtx
263 && CONSTANT_P (XEXP (x, 1))));
265 case LO_SUM:
266 case PRE_MODIFY:
267 return rtx_addr_can_trap_p (XEXP (x, 1));
269 case PRE_DEC:
270 case PRE_INC:
271 case POST_DEC:
272 case POST_INC:
273 case POST_MODIFY:
274 return rtx_addr_can_trap_p (XEXP (x, 0));
276 default:
277 break;
280 /* If it isn't one of the case above, it can cause a trap. */
281 return 1;
284 /* Return true if X is an address that is known to not be zero. */
286 bool
287 nonzero_address_p (rtx x)
289 enum rtx_code code = GET_CODE (x);
291 switch (code)
293 case SYMBOL_REF:
294 return !SYMBOL_REF_WEAK (x);
296 case LABEL_REF:
297 return true;
299 case REG:
300 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
301 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
302 || x == stack_pointer_rtx
303 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
304 return true;
305 /* All of the virtual frame registers are stack references. */
306 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
307 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
308 return true;
309 return false;
311 case CONST:
312 return nonzero_address_p (XEXP (x, 0));
314 case PLUS:
315 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
317 /* Pointers aren't allowed to wrap. If we've got a register
318 that is known to be a pointer, and a positive offset, then
319 the composite can't be zero. */
320 if (INTVAL (XEXP (x, 1)) > 0
321 && REG_P (XEXP (x, 0))
322 && REG_POINTER (XEXP (x, 0)))
323 return true;
325 return nonzero_address_p (XEXP (x, 0));
327 /* Handle PIC references. */
328 else if (XEXP (x, 0) == pic_offset_table_rtx
329 && CONSTANT_P (XEXP (x, 1)))
330 return true;
331 return false;
333 case PRE_MODIFY:
334 /* Similar to the above; allow positive offsets. Further, since
335 auto-inc is only allowed in memories, the register must be a
336 pointer. */
337 if (GET_CODE (XEXP (x, 1)) == CONST_INT
338 && INTVAL (XEXP (x, 1)) > 0)
339 return true;
340 return nonzero_address_p (XEXP (x, 0));
342 case PRE_INC:
343 /* Similarly. Further, the offset is always positive. */
344 return true;
346 case PRE_DEC:
347 case POST_DEC:
348 case POST_INC:
349 case POST_MODIFY:
350 return nonzero_address_p (XEXP (x, 0));
352 case LO_SUM:
353 return nonzero_address_p (XEXP (x, 1));
355 default:
356 break;
359 /* If it isn't one of the case above, might be zero. */
360 return false;
363 /* Return 1 if X refers to a memory location whose address
364 cannot be compared reliably with constant addresses,
365 or if X refers to a BLKmode memory object.
366 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
367 zero, we are slightly more conservative. */
370 rtx_addr_varies_p (rtx x, int for_alias)
372 enum rtx_code code;
373 int i;
374 const char *fmt;
376 if (x == 0)
377 return 0;
379 code = GET_CODE (x);
380 if (code == MEM)
381 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
383 fmt = GET_RTX_FORMAT (code);
384 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
385 if (fmt[i] == 'e')
387 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
388 return 1;
390 else if (fmt[i] == 'E')
392 int j;
393 for (j = 0; j < XVECLEN (x, i); j++)
394 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
395 return 1;
397 return 0;
400 /* Return the value of the integer term in X, if one is apparent;
401 otherwise return 0.
402 Only obvious integer terms are detected.
403 This is used in cse.c with the `related_value' field. */
405 HOST_WIDE_INT
406 get_integer_term (rtx x)
408 if (GET_CODE (x) == CONST)
409 x = XEXP (x, 0);
411 if (GET_CODE (x) == MINUS
412 && GET_CODE (XEXP (x, 1)) == CONST_INT)
413 return - INTVAL (XEXP (x, 1));
414 if (GET_CODE (x) == PLUS
415 && GET_CODE (XEXP (x, 1)) == CONST_INT)
416 return INTVAL (XEXP (x, 1));
417 return 0;
420 /* If X is a constant, return the value sans apparent integer term;
421 otherwise return 0.
422 Only obvious integer terms are detected. */
425 get_related_value (rtx x)
427 if (GET_CODE (x) != CONST)
428 return 0;
429 x = XEXP (x, 0);
430 if (GET_CODE (x) == PLUS
431 && GET_CODE (XEXP (x, 1)) == CONST_INT)
432 return XEXP (x, 0);
433 else if (GET_CODE (x) == MINUS
434 && GET_CODE (XEXP (x, 1)) == CONST_INT)
435 return XEXP (x, 0);
436 return 0;
439 /* Given a tablejump insn INSN, return the RTL expression for the offset
440 into the jump table. If the offset cannot be determined, then return
441 NULL_RTX.
443 If EARLIEST is nonzero, it is a pointer to a place where the earliest
444 insn used in locating the offset was found. */
447 get_jump_table_offset (rtx insn, rtx *earliest)
449 rtx label = NULL;
450 rtx table = NULL;
451 rtx set;
452 rtx old_insn;
453 rtx x;
454 rtx old_x;
455 rtx y;
456 rtx old_y;
457 int i;
459 if (!tablejump_p (insn, &label, &table) || !(set = single_set (insn)))
460 return NULL_RTX;
462 x = SET_SRC (set);
464 /* Some targets (eg, ARM) emit a tablejump that also
465 contains the out-of-range target. */
466 if (GET_CODE (x) == IF_THEN_ELSE
467 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
468 x = XEXP (x, 1);
470 /* Search backwards and locate the expression stored in X. */
471 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
472 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
475 /* If X is an expression using a relative address then strip
476 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
477 or the jump table label. */
478 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
479 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
481 for (i = 0; i < 2; i++)
483 old_insn = insn;
484 y = XEXP (x, i);
486 if (y == pc_rtx || y == pic_offset_table_rtx)
487 break;
489 for (old_y = NULL_RTX; REG_P (y) && y != old_y;
490 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
493 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
494 break;
497 if (i >= 2)
498 return NULL_RTX;
500 x = XEXP (x, 1 - i);
502 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
503 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
507 /* Strip off any sign or zero extension. */
508 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
510 x = XEXP (x, 0);
512 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
513 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
517 /* If X isn't a MEM then this isn't a tablejump we understand. */
518 if (!MEM_P (x))
519 return NULL_RTX;
521 /* Strip off the MEM. */
522 x = XEXP (x, 0);
524 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
525 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
528 /* If X isn't a PLUS than this isn't a tablejump we understand. */
529 if (GET_CODE (x) != PLUS)
530 return NULL_RTX;
532 /* At this point we should have an expression representing the jump table
533 plus an offset. Examine each operand in order to determine which one
534 represents the jump table. Knowing that tells us that the other operand
535 must represent the offset. */
536 for (i = 0; i < 2; i++)
538 old_insn = insn;
539 y = XEXP (x, i);
541 for (old_y = NULL_RTX; REG_P (y) && y != old_y;
542 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
545 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
546 && reg_mentioned_p (label, y))
547 break;
550 if (i >= 2)
551 return NULL_RTX;
553 x = XEXP (x, 1 - i);
555 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
556 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
557 for (i = 0; i < 2; i++)
558 if (XEXP (x, i) == pic_offset_table_rtx)
560 x = XEXP (x, 1 - i);
561 break;
564 if (earliest)
565 *earliest = insn;
567 /* Return the RTL expression representing the offset. */
568 return x;
571 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
572 a global register. */
574 static int
575 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
577 int regno;
578 rtx x = *loc;
580 if (! x)
581 return 0;
583 switch (GET_CODE (x))
585 case SUBREG:
586 if (REG_P (SUBREG_REG (x)))
588 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
589 && global_regs[subreg_regno (x)])
590 return 1;
591 return 0;
593 break;
595 case REG:
596 regno = REGNO (x);
597 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
598 return 1;
599 return 0;
601 case SCRATCH:
602 case PC:
603 case CC0:
604 case CONST_INT:
605 case CONST_DOUBLE:
606 case CONST:
607 case LABEL_REF:
608 return 0;
610 case CALL:
611 /* A non-constant call might use a global register. */
612 return 1;
614 default:
615 break;
618 return 0;
621 /* Returns nonzero if X mentions a global register. */
624 global_reg_mentioned_p (rtx x)
626 if (INSN_P (x))
628 if (CALL_P (x))
630 if (! CONST_OR_PURE_CALL_P (x))
631 return 1;
632 x = CALL_INSN_FUNCTION_USAGE (x);
633 if (x == 0)
634 return 0;
636 else
637 x = PATTERN (x);
640 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
643 /* Return the number of places FIND appears within X. If COUNT_DEST is
644 zero, we do not count occurrences inside the destination of a SET. */
647 count_occurrences (rtx x, rtx find, int count_dest)
649 int i, j;
650 enum rtx_code code;
651 const char *format_ptr;
652 int count;
654 if (x == find)
655 return 1;
657 code = GET_CODE (x);
659 switch (code)
661 case REG:
662 case CONST_INT:
663 case CONST_DOUBLE:
664 case CONST_VECTOR:
665 case SYMBOL_REF:
666 case CODE_LABEL:
667 case PC:
668 case CC0:
669 return 0;
671 case MEM:
672 if (MEM_P (find) && rtx_equal_p (x, find))
673 return 1;
674 break;
676 case SET:
677 if (SET_DEST (x) == find && ! count_dest)
678 return count_occurrences (SET_SRC (x), find, count_dest);
679 break;
681 default:
682 break;
685 format_ptr = GET_RTX_FORMAT (code);
686 count = 0;
688 for (i = 0; i < GET_RTX_LENGTH (code); i++)
690 switch (*format_ptr++)
692 case 'e':
693 count += count_occurrences (XEXP (x, i), find, count_dest);
694 break;
696 case 'E':
697 for (j = 0; j < XVECLEN (x, i); j++)
698 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
699 break;
702 return count;
705 /* Nonzero if register REG appears somewhere within IN.
706 Also works if REG is not a register; in this case it checks
707 for a subexpression of IN that is Lisp "equal" to REG. */
710 reg_mentioned_p (rtx reg, rtx in)
712 const char *fmt;
713 int i;
714 enum rtx_code code;
716 if (in == 0)
717 return 0;
719 if (reg == in)
720 return 1;
722 if (GET_CODE (in) == LABEL_REF)
723 return reg == XEXP (in, 0);
725 code = GET_CODE (in);
727 switch (code)
729 /* Compare registers by number. */
730 case REG:
731 return REG_P (reg) && REGNO (in) == REGNO (reg);
733 /* These codes have no constituent expressions
734 and are unique. */
735 case SCRATCH:
736 case CC0:
737 case PC:
738 return 0;
740 case CONST_INT:
741 case CONST_VECTOR:
742 case CONST_DOUBLE:
743 /* These are kept unique for a given value. */
744 return 0;
746 default:
747 break;
750 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
751 return 1;
753 fmt = GET_RTX_FORMAT (code);
755 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
757 if (fmt[i] == 'E')
759 int j;
760 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
761 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
762 return 1;
764 else if (fmt[i] == 'e'
765 && reg_mentioned_p (reg, XEXP (in, i)))
766 return 1;
768 return 0;
771 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
772 no CODE_LABEL insn. */
775 no_labels_between_p (rtx beg, rtx end)
777 rtx p;
778 if (beg == end)
779 return 0;
780 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
781 if (LABEL_P (p))
782 return 0;
783 return 1;
786 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
787 no JUMP_INSN insn. */
790 no_jumps_between_p (rtx beg, rtx end)
792 rtx p;
793 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
794 if (JUMP_P (p))
795 return 0;
796 return 1;
799 /* Nonzero if register REG is used in an insn between
800 FROM_INSN and TO_INSN (exclusive of those two). */
803 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
805 rtx insn;
807 if (from_insn == to_insn)
808 return 0;
810 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
811 if (INSN_P (insn)
812 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
813 || (CALL_P (insn)
814 && (find_reg_fusage (insn, USE, reg)
815 || find_reg_fusage (insn, CLOBBER, reg)))))
816 return 1;
817 return 0;
820 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
821 is entirely replaced by a new value and the only use is as a SET_DEST,
822 we do not consider it a reference. */
825 reg_referenced_p (rtx x, rtx body)
827 int i;
829 switch (GET_CODE (body))
831 case SET:
832 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
833 return 1;
835 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
836 of a REG that occupies all of the REG, the insn references X if
837 it is mentioned in the destination. */
838 if (GET_CODE (SET_DEST (body)) != CC0
839 && GET_CODE (SET_DEST (body)) != PC
840 && !REG_P (SET_DEST (body))
841 && ! (GET_CODE (SET_DEST (body)) == SUBREG
842 && REG_P (SUBREG_REG (SET_DEST (body)))
843 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
844 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
845 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
846 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
847 && reg_overlap_mentioned_p (x, SET_DEST (body)))
848 return 1;
849 return 0;
851 case ASM_OPERANDS:
852 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
853 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
854 return 1;
855 return 0;
857 case CALL:
858 case USE:
859 case IF_THEN_ELSE:
860 return reg_overlap_mentioned_p (x, body);
862 case TRAP_IF:
863 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
865 case PREFETCH:
866 return reg_overlap_mentioned_p (x, XEXP (body, 0));
868 case UNSPEC:
869 case UNSPEC_VOLATILE:
870 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
871 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
872 return 1;
873 return 0;
875 case PARALLEL:
876 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
877 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
878 return 1;
879 return 0;
881 case CLOBBER:
882 if (MEM_P (XEXP (body, 0)))
883 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
884 return 1;
885 return 0;
887 case COND_EXEC:
888 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
889 return 1;
890 return reg_referenced_p (x, COND_EXEC_CODE (body));
892 default:
893 return 0;
897 /* Nonzero if register REG is referenced in an insn between
898 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
899 not count. */
902 reg_referenced_between_p (rtx reg, rtx from_insn, rtx to_insn)
904 rtx insn;
906 if (from_insn == to_insn)
907 return 0;
909 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
910 if (INSN_P (insn)
911 && (reg_referenced_p (reg, PATTERN (insn))
912 || (CALL_P (insn)
913 && find_reg_fusage (insn, USE, reg))))
914 return 1;
915 return 0;
918 /* Nonzero if register REG is set or clobbered in an insn between
919 FROM_INSN and TO_INSN (exclusive of those two). */
922 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
924 rtx insn;
926 if (from_insn == to_insn)
927 return 0;
929 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
930 if (INSN_P (insn) && reg_set_p (reg, insn))
931 return 1;
932 return 0;
935 /* Internals of reg_set_between_p. */
937 reg_set_p (rtx reg, rtx insn)
939 /* We can be passed an insn or part of one. If we are passed an insn,
940 check if a side-effect of the insn clobbers REG. */
941 if (INSN_P (insn)
942 && (FIND_REG_INC_NOTE (insn, reg)
943 || (CALL_P (insn)
944 /* We'd like to test call_used_regs here, but rtlanal.c can't
945 reference that variable due to its use in genattrtab. So
946 we'll just be more conservative.
948 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
949 information holds all clobbered registers. */
950 && ((REG_P (reg)
951 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
952 || MEM_P (reg)
953 || find_reg_fusage (insn, CLOBBER, reg)))))
954 return 1;
956 return set_of (reg, insn) != NULL_RTX;
959 /* Similar to reg_set_between_p, but check all registers in X. Return 0
960 only if none of them are modified between START and END. Do not
961 consider non-registers one way or the other. */
964 regs_set_between_p (rtx x, rtx start, rtx end)
966 enum rtx_code code = GET_CODE (x);
967 const char *fmt;
968 int i, j;
970 switch (code)
972 case CONST_INT:
973 case CONST_DOUBLE:
974 case CONST_VECTOR:
975 case CONST:
976 case SYMBOL_REF:
977 case LABEL_REF:
978 case PC:
979 case CC0:
980 return 0;
982 case REG:
983 return reg_set_between_p (x, start, end);
985 default:
986 break;
989 fmt = GET_RTX_FORMAT (code);
990 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
992 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
993 return 1;
995 else if (fmt[i] == 'E')
996 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
997 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
998 return 1;
1001 return 0;
1004 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1005 only if none of them are modified between START and END. Return 1 if
1006 X contains a MEM; this routine does usememory aliasing. */
1009 modified_between_p (rtx x, rtx start, rtx end)
1011 enum rtx_code code = GET_CODE (x);
1012 const char *fmt;
1013 int i, j;
1014 rtx insn;
1016 if (start == end)
1017 return 0;
1019 switch (code)
1021 case CONST_INT:
1022 case CONST_DOUBLE:
1023 case CONST_VECTOR:
1024 case CONST:
1025 case SYMBOL_REF:
1026 case LABEL_REF:
1027 return 0;
1029 case PC:
1030 case CC0:
1031 return 1;
1033 case MEM:
1034 if (RTX_UNCHANGING_P (x))
1035 return 0;
1036 if (modified_between_p (XEXP (x, 0), start, end))
1037 return 1;
1038 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1039 if (memory_modified_in_insn_p (x, insn))
1040 return 1;
1041 return 0;
1042 break;
1044 case REG:
1045 return reg_set_between_p (x, start, end);
1047 default:
1048 break;
1051 fmt = GET_RTX_FORMAT (code);
1052 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1054 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1055 return 1;
1057 else if (fmt[i] == 'E')
1058 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1059 if (modified_between_p (XVECEXP (x, i, j), start, end))
1060 return 1;
1063 return 0;
1066 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1067 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1068 does use memory aliasing. */
1071 modified_in_p (rtx x, rtx insn)
1073 enum rtx_code code = GET_CODE (x);
1074 const char *fmt;
1075 int i, j;
1077 switch (code)
1079 case CONST_INT:
1080 case CONST_DOUBLE:
1081 case CONST_VECTOR:
1082 case CONST:
1083 case SYMBOL_REF:
1084 case LABEL_REF:
1085 return 0;
1087 case PC:
1088 case CC0:
1089 return 1;
1091 case MEM:
1092 if (RTX_UNCHANGING_P (x))
1093 return 0;
1094 if (modified_in_p (XEXP (x, 0), insn))
1095 return 1;
1096 if (memory_modified_in_insn_p (x, insn))
1097 return 1;
1098 return 0;
1099 break;
1101 case REG:
1102 return reg_set_p (x, insn);
1104 default:
1105 break;
1108 fmt = GET_RTX_FORMAT (code);
1109 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1111 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1112 return 1;
1114 else if (fmt[i] == 'E')
1115 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1116 if (modified_in_p (XVECEXP (x, i, j), insn))
1117 return 1;
1120 return 0;
1123 /* Return true if anything in insn X is (anti,output,true) dependent on
1124 anything in insn Y. */
1127 insn_dependent_p (rtx x, rtx y)
1129 rtx tmp;
1131 if (! INSN_P (x) || ! INSN_P (y))
1132 abort ();
1134 tmp = PATTERN (y);
1135 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1136 if (tmp == NULL_RTX)
1137 return 1;
1139 tmp = PATTERN (x);
1140 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1141 if (tmp == NULL_RTX)
1142 return 1;
1144 return 0;
1147 /* A helper routine for insn_dependent_p called through note_stores. */
1149 static void
1150 insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
1152 rtx * pinsn = (rtx *) data;
1154 if (*pinsn && reg_mentioned_p (x, *pinsn))
1155 *pinsn = NULL_RTX;
1158 /* Helper function for set_of. */
1159 struct set_of_data
1161 rtx found;
1162 rtx pat;
1165 static void
1166 set_of_1 (rtx x, rtx pat, void *data1)
1168 struct set_of_data *data = (struct set_of_data *) (data1);
1169 if (rtx_equal_p (x, data->pat)
1170 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1171 data->found = pat;
1174 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1175 (either directly or via STRICT_LOW_PART and similar modifiers). */
1177 set_of (rtx pat, rtx insn)
1179 struct set_of_data data;
1180 data.found = NULL_RTX;
1181 data.pat = pat;
1182 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1183 return data.found;
1186 /* Given an INSN, return a SET expression if this insn has only a single SET.
1187 It may also have CLOBBERs, USEs, or SET whose output
1188 will not be used, which we ignore. */
1191 single_set_2 (rtx insn, rtx pat)
1193 rtx set = NULL;
1194 int set_verified = 1;
1195 int i;
1197 if (GET_CODE (pat) == PARALLEL)
1199 for (i = 0; i < XVECLEN (pat, 0); i++)
1201 rtx sub = XVECEXP (pat, 0, i);
1202 switch (GET_CODE (sub))
1204 case USE:
1205 case CLOBBER:
1206 break;
1208 case SET:
1209 /* We can consider insns having multiple sets, where all
1210 but one are dead as single set insns. In common case
1211 only single set is present in the pattern so we want
1212 to avoid checking for REG_UNUSED notes unless necessary.
1214 When we reach set first time, we just expect this is
1215 the single set we are looking for and only when more
1216 sets are found in the insn, we check them. */
1217 if (!set_verified)
1219 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1220 && !side_effects_p (set))
1221 set = NULL;
1222 else
1223 set_verified = 1;
1225 if (!set)
1226 set = sub, set_verified = 0;
1227 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1228 || side_effects_p (sub))
1229 return NULL_RTX;
1230 break;
1232 default:
1233 return NULL_RTX;
1237 return set;
1240 /* Given an INSN, return nonzero if it has more than one SET, else return
1241 zero. */
1244 multiple_sets (rtx insn)
1246 int found;
1247 int i;
1249 /* INSN must be an insn. */
1250 if (! INSN_P (insn))
1251 return 0;
1253 /* Only a PARALLEL can have multiple SETs. */
1254 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1256 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1257 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1259 /* If we have already found a SET, then return now. */
1260 if (found)
1261 return 1;
1262 else
1263 found = 1;
1267 /* Either zero or one SET. */
1268 return 0;
1271 /* Return nonzero if the destination of SET equals the source
1272 and there are no side effects. */
1275 set_noop_p (rtx set)
1277 rtx src = SET_SRC (set);
1278 rtx dst = SET_DEST (set);
1280 if (dst == pc_rtx && src == pc_rtx)
1281 return 1;
1283 if (MEM_P (dst) && MEM_P (src))
1284 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1286 if (GET_CODE (dst) == SIGN_EXTRACT
1287 || GET_CODE (dst) == ZERO_EXTRACT)
1288 return rtx_equal_p (XEXP (dst, 0), src)
1289 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1290 && !side_effects_p (src);
1292 if (GET_CODE (dst) == STRICT_LOW_PART)
1293 dst = XEXP (dst, 0);
1295 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1297 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1298 return 0;
1299 src = SUBREG_REG (src);
1300 dst = SUBREG_REG (dst);
1303 return (REG_P (src) && REG_P (dst)
1304 && REGNO (src) == REGNO (dst));
1307 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1308 value to itself. */
1311 noop_move_p (rtx insn)
1313 rtx pat = PATTERN (insn);
1315 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1316 return 1;
1318 /* Insns carrying these notes are useful later on. */
1319 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1320 return 0;
1322 /* For now treat an insn with a REG_RETVAL note as a
1323 a special insn which should not be considered a no-op. */
1324 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1325 return 0;
1327 if (GET_CODE (pat) == SET && set_noop_p (pat))
1328 return 1;
1330 if (GET_CODE (pat) == PARALLEL)
1332 int i;
1333 /* If nothing but SETs of registers to themselves,
1334 this insn can also be deleted. */
1335 for (i = 0; i < XVECLEN (pat, 0); i++)
1337 rtx tem = XVECEXP (pat, 0, i);
1339 if (GET_CODE (tem) == USE
1340 || GET_CODE (tem) == CLOBBER)
1341 continue;
1343 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1344 return 0;
1347 return 1;
1349 return 0;
1353 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1354 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1355 If the object was modified, if we hit a partial assignment to X, or hit a
1356 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1357 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1358 be the src. */
1361 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1363 rtx p;
1365 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1366 p = PREV_INSN (p))
1367 if (INSN_P (p))
1369 rtx set = single_set (p);
1370 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1372 if (set && rtx_equal_p (x, SET_DEST (set)))
1374 rtx src = SET_SRC (set);
1376 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1377 src = XEXP (note, 0);
1379 if ((valid_to == NULL_RTX
1380 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1381 /* Reject hard registers because we don't usually want
1382 to use them; we'd rather use a pseudo. */
1383 && (! (REG_P (src)
1384 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1386 *pinsn = p;
1387 return src;
1391 /* If set in non-simple way, we don't have a value. */
1392 if (reg_set_p (x, p))
1393 break;
1396 return x;
1399 /* Return nonzero if register in range [REGNO, ENDREGNO)
1400 appears either explicitly or implicitly in X
1401 other than being stored into.
1403 References contained within the substructure at LOC do not count.
1404 LOC may be zero, meaning don't ignore anything. */
1407 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1408 rtx *loc)
1410 int i;
1411 unsigned int x_regno;
1412 RTX_CODE code;
1413 const char *fmt;
1415 repeat:
1416 /* The contents of a REG_NONNEG note is always zero, so we must come here
1417 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1418 if (x == 0)
1419 return 0;
1421 code = GET_CODE (x);
1423 switch (code)
1425 case REG:
1426 x_regno = REGNO (x);
1428 /* If we modifying the stack, frame, or argument pointer, it will
1429 clobber a virtual register. In fact, we could be more precise,
1430 but it isn't worth it. */
1431 if ((x_regno == STACK_POINTER_REGNUM
1432 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1433 || x_regno == ARG_POINTER_REGNUM
1434 #endif
1435 || x_regno == FRAME_POINTER_REGNUM)
1436 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1437 return 1;
1439 return (endregno > x_regno
1440 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1441 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1442 : 1));
1444 case SUBREG:
1445 /* If this is a SUBREG of a hard reg, we can see exactly which
1446 registers are being modified. Otherwise, handle normally. */
1447 if (REG_P (SUBREG_REG (x))
1448 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1450 unsigned int inner_regno = subreg_regno (x);
1451 unsigned int inner_endregno
1452 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1453 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1455 return endregno > inner_regno && regno < inner_endregno;
1457 break;
1459 case CLOBBER:
1460 case SET:
1461 if (&SET_DEST (x) != loc
1462 /* Note setting a SUBREG counts as referring to the REG it is in for
1463 a pseudo but not for hard registers since we can
1464 treat each word individually. */
1465 && ((GET_CODE (SET_DEST (x)) == SUBREG
1466 && loc != &SUBREG_REG (SET_DEST (x))
1467 && REG_P (SUBREG_REG (SET_DEST (x)))
1468 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1469 && refers_to_regno_p (regno, endregno,
1470 SUBREG_REG (SET_DEST (x)), loc))
1471 || (!REG_P (SET_DEST (x))
1472 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1473 return 1;
1475 if (code == CLOBBER || loc == &SET_SRC (x))
1476 return 0;
1477 x = SET_SRC (x);
1478 goto repeat;
1480 default:
1481 break;
1484 /* X does not match, so try its subexpressions. */
1486 fmt = GET_RTX_FORMAT (code);
1487 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1489 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1491 if (i == 0)
1493 x = XEXP (x, 0);
1494 goto repeat;
1496 else
1497 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1498 return 1;
1500 else if (fmt[i] == 'E')
1502 int j;
1503 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1504 if (loc != &XVECEXP (x, i, j)
1505 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1506 return 1;
1509 return 0;
1512 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1513 we check if any register number in X conflicts with the relevant register
1514 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1515 contains a MEM (we don't bother checking for memory addresses that can't
1516 conflict because we expect this to be a rare case. */
1519 reg_overlap_mentioned_p (rtx x, rtx in)
1521 unsigned int regno, endregno;
1523 /* If either argument is a constant, then modifying X can not
1524 affect IN. Here we look at IN, we can profitably combine
1525 CONSTANT_P (x) with the switch statement below. */
1526 if (CONSTANT_P (in))
1527 return 0;
1529 recurse:
1530 switch (GET_CODE (x))
1532 case STRICT_LOW_PART:
1533 case ZERO_EXTRACT:
1534 case SIGN_EXTRACT:
1535 /* Overly conservative. */
1536 x = XEXP (x, 0);
1537 goto recurse;
1539 case SUBREG:
1540 regno = REGNO (SUBREG_REG (x));
1541 if (regno < FIRST_PSEUDO_REGISTER)
1542 regno = subreg_regno (x);
1543 goto do_reg;
1545 case REG:
1546 regno = REGNO (x);
1547 do_reg:
1548 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1549 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1550 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1552 case MEM:
1554 const char *fmt;
1555 int i;
1557 if (MEM_P (in))
1558 return 1;
1560 fmt = GET_RTX_FORMAT (GET_CODE (in));
1561 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1562 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1563 return 1;
1565 return 0;
1568 case SCRATCH:
1569 case PC:
1570 case CC0:
1571 return reg_mentioned_p (x, in);
1573 case PARALLEL:
1575 int i;
1577 /* If any register in here refers to it we return true. */
1578 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1579 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1580 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1581 return 1;
1582 return 0;
1585 default:
1586 #ifdef ENABLE_CHECKING
1587 if (!CONSTANT_P (x))
1588 abort ();
1589 #endif
1591 return 0;
1595 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1596 (X would be the pattern of an insn).
1597 FUN receives two arguments:
1598 the REG, MEM, CC0 or PC being stored in or clobbered,
1599 the SET or CLOBBER rtx that does the store.
1601 If the item being stored in or clobbered is a SUBREG of a hard register,
1602 the SUBREG will be passed. */
1604 void
1605 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1607 int i;
1609 if (GET_CODE (x) == COND_EXEC)
1610 x = COND_EXEC_CODE (x);
1612 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1614 rtx dest = SET_DEST (x);
1616 while ((GET_CODE (dest) == SUBREG
1617 && (!REG_P (SUBREG_REG (dest))
1618 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1619 || GET_CODE (dest) == ZERO_EXTRACT
1620 || GET_CODE (dest) == SIGN_EXTRACT
1621 || GET_CODE (dest) == STRICT_LOW_PART)
1622 dest = XEXP (dest, 0);
1624 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1625 each of whose first operand is a register. */
1626 if (GET_CODE (dest) == PARALLEL)
1628 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1629 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1630 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1632 else
1633 (*fun) (dest, x, data);
1636 else if (GET_CODE (x) == PARALLEL)
1637 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1638 note_stores (XVECEXP (x, 0, i), fun, data);
1641 /* Like notes_stores, but call FUN for each expression that is being
1642 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1643 FUN for each expression, not any interior subexpressions. FUN receives a
1644 pointer to the expression and the DATA passed to this function.
1646 Note that this is not quite the same test as that done in reg_referenced_p
1647 since that considers something as being referenced if it is being
1648 partially set, while we do not. */
1650 void
1651 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1653 rtx body = *pbody;
1654 int i;
1656 switch (GET_CODE (body))
1658 case COND_EXEC:
1659 (*fun) (&COND_EXEC_TEST (body), data);
1660 note_uses (&COND_EXEC_CODE (body), fun, data);
1661 return;
1663 case PARALLEL:
1664 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1665 note_uses (&XVECEXP (body, 0, i), fun, data);
1666 return;
1668 case USE:
1669 (*fun) (&XEXP (body, 0), data);
1670 return;
1672 case ASM_OPERANDS:
1673 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1674 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1675 return;
1677 case TRAP_IF:
1678 (*fun) (&TRAP_CONDITION (body), data);
1679 return;
1681 case PREFETCH:
1682 (*fun) (&XEXP (body, 0), data);
1683 return;
1685 case UNSPEC:
1686 case UNSPEC_VOLATILE:
1687 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1688 (*fun) (&XVECEXP (body, 0, i), data);
1689 return;
1691 case CLOBBER:
1692 if (MEM_P (XEXP (body, 0)))
1693 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1694 return;
1696 case SET:
1698 rtx dest = SET_DEST (body);
1700 /* For sets we replace everything in source plus registers in memory
1701 expression in store and operands of a ZERO_EXTRACT. */
1702 (*fun) (&SET_SRC (body), data);
1704 if (GET_CODE (dest) == ZERO_EXTRACT)
1706 (*fun) (&XEXP (dest, 1), data);
1707 (*fun) (&XEXP (dest, 2), data);
1710 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1711 dest = XEXP (dest, 0);
1713 if (MEM_P (dest))
1714 (*fun) (&XEXP (dest, 0), data);
1716 return;
1718 default:
1719 /* All the other possibilities never store. */
1720 (*fun) (pbody, data);
1721 return;
1725 /* Return nonzero if X's old contents don't survive after INSN.
1726 This will be true if X is (cc0) or if X is a register and
1727 X dies in INSN or because INSN entirely sets X.
1729 "Entirely set" means set directly and not through a SUBREG,
1730 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1731 Likewise, REG_INC does not count.
1733 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1734 but for this use that makes no difference, since regs don't overlap
1735 during their lifetimes. Therefore, this function may be used
1736 at any time after deaths have been computed (in flow.c).
1738 If REG is a hard reg that occupies multiple machine registers, this
1739 function will only return 1 if each of those registers will be replaced
1740 by INSN. */
1743 dead_or_set_p (rtx insn, rtx x)
1745 unsigned int regno, last_regno;
1746 unsigned int i;
1748 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1749 if (GET_CODE (x) == CC0)
1750 return 1;
1752 if (!REG_P (x))
1753 abort ();
1755 regno = REGNO (x);
1756 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1757 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1759 for (i = regno; i <= last_regno; i++)
1760 if (! dead_or_set_regno_p (insn, i))
1761 return 0;
1763 return 1;
1766 /* Utility function for dead_or_set_p to check an individual register. Also
1767 called from flow.c. */
1770 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1772 unsigned int regno, endregno;
1773 rtx pattern;
1775 /* See if there is a death note for something that includes TEST_REGNO. */
1776 if (find_regno_note (insn, REG_DEAD, test_regno))
1777 return 1;
1779 if (CALL_P (insn)
1780 && find_regno_fusage (insn, CLOBBER, test_regno))
1781 return 1;
1783 pattern = PATTERN (insn);
1785 if (GET_CODE (pattern) == COND_EXEC)
1786 pattern = COND_EXEC_CODE (pattern);
1788 if (GET_CODE (pattern) == SET)
1790 rtx dest = SET_DEST (pattern);
1792 /* A value is totally replaced if it is the destination or the
1793 destination is a SUBREG of REGNO that does not change the number of
1794 words in it. */
1795 if (GET_CODE (dest) == SUBREG
1796 && (((GET_MODE_SIZE (GET_MODE (dest))
1797 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1798 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1799 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1800 dest = SUBREG_REG (dest);
1802 if (!REG_P (dest))
1803 return 0;
1805 regno = REGNO (dest);
1806 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1807 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1809 return (test_regno >= regno && test_regno < endregno);
1811 else if (GET_CODE (pattern) == PARALLEL)
1813 int i;
1815 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1817 rtx body = XVECEXP (pattern, 0, i);
1819 if (GET_CODE (body) == COND_EXEC)
1820 body = COND_EXEC_CODE (body);
1822 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1824 rtx dest = SET_DEST (body);
1826 if (GET_CODE (dest) == SUBREG
1827 && (((GET_MODE_SIZE (GET_MODE (dest))
1828 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1829 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1830 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1831 dest = SUBREG_REG (dest);
1833 if (!REG_P (dest))
1834 continue;
1836 regno = REGNO (dest);
1837 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1838 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1840 if (test_regno >= regno && test_regno < endregno)
1841 return 1;
1846 return 0;
1849 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1850 If DATUM is nonzero, look for one whose datum is DATUM. */
1853 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1855 rtx link;
1857 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1858 if (! INSN_P (insn))
1859 return 0;
1860 if (datum == 0)
1862 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1863 if (REG_NOTE_KIND (link) == kind)
1864 return link;
1865 return 0;
1868 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1869 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1870 return link;
1871 return 0;
1874 /* Return the reg-note of kind KIND in insn INSN which applies to register
1875 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1876 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1877 it might be the case that the note overlaps REGNO. */
1880 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1882 rtx link;
1884 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1885 if (! INSN_P (insn))
1886 return 0;
1888 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1889 if (REG_NOTE_KIND (link) == kind
1890 /* Verify that it is a register, so that scratch and MEM won't cause a
1891 problem here. */
1892 && REG_P (XEXP (link, 0))
1893 && REGNO (XEXP (link, 0)) <= regno
1894 && ((REGNO (XEXP (link, 0))
1895 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1896 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1897 [GET_MODE (XEXP (link, 0))]))
1898 > regno))
1899 return link;
1900 return 0;
1903 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1904 has such a note. */
1907 find_reg_equal_equiv_note (rtx insn)
1909 rtx link;
1911 if (!INSN_P (insn))
1912 return 0;
1913 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1914 if (REG_NOTE_KIND (link) == REG_EQUAL
1915 || REG_NOTE_KIND (link) == REG_EQUIV)
1917 if (single_set (insn) == 0)
1918 return 0;
1919 return link;
1921 return NULL;
1924 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1925 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1928 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1930 /* If it's not a CALL_INSN, it can't possibly have a
1931 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1932 if (!CALL_P (insn))
1933 return 0;
1935 if (! datum)
1936 abort ();
1938 if (!REG_P (datum))
1940 rtx link;
1942 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1943 link;
1944 link = XEXP (link, 1))
1945 if (GET_CODE (XEXP (link, 0)) == code
1946 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1947 return 1;
1949 else
1951 unsigned int regno = REGNO (datum);
1953 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1954 to pseudo registers, so don't bother checking. */
1956 if (regno < FIRST_PSEUDO_REGISTER)
1958 unsigned int end_regno
1959 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1960 unsigned int i;
1962 for (i = regno; i < end_regno; i++)
1963 if (find_regno_fusage (insn, code, i))
1964 return 1;
1968 return 0;
1971 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1972 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1975 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1977 rtx link;
1979 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1980 to pseudo registers, so don't bother checking. */
1982 if (regno >= FIRST_PSEUDO_REGISTER
1983 || !CALL_P (insn) )
1984 return 0;
1986 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1988 unsigned int regnote;
1989 rtx op, reg;
1991 if (GET_CODE (op = XEXP (link, 0)) == code
1992 && REG_P (reg = XEXP (op, 0))
1993 && (regnote = REGNO (reg)) <= regno
1994 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1995 return 1;
1998 return 0;
2001 /* Return true if INSN is a call to a pure function. */
2004 pure_call_p (rtx insn)
2006 rtx link;
2008 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
2009 return 0;
2011 /* Look for the note that differentiates const and pure functions. */
2012 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2014 rtx u, m;
2016 if (GET_CODE (u = XEXP (link, 0)) == USE
2017 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
2018 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2019 return 1;
2022 return 0;
2025 /* Remove register note NOTE from the REG_NOTES of INSN. */
2027 void
2028 remove_note (rtx insn, rtx note)
2030 rtx link;
2032 if (note == NULL_RTX)
2033 return;
2035 if (REG_NOTES (insn) == note)
2037 REG_NOTES (insn) = XEXP (note, 1);
2038 return;
2041 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2042 if (XEXP (link, 1) == note)
2044 XEXP (link, 1) = XEXP (note, 1);
2045 return;
2048 abort ();
2051 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2052 return 1 if it is found. A simple equality test is used to determine if
2053 NODE matches. */
2056 in_expr_list_p (rtx listp, rtx node)
2058 rtx x;
2060 for (x = listp; x; x = XEXP (x, 1))
2061 if (node == XEXP (x, 0))
2062 return 1;
2064 return 0;
2067 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2068 remove that entry from the list if it is found.
2070 A simple equality test is used to determine if NODE matches. */
2072 void
2073 remove_node_from_expr_list (rtx node, rtx *listp)
2075 rtx temp = *listp;
2076 rtx prev = NULL_RTX;
2078 while (temp)
2080 if (node == XEXP (temp, 0))
2082 /* Splice the node out of the list. */
2083 if (prev)
2084 XEXP (prev, 1) = XEXP (temp, 1);
2085 else
2086 *listp = XEXP (temp, 1);
2088 return;
2091 prev = temp;
2092 temp = XEXP (temp, 1);
2096 /* Nonzero if X contains any volatile instructions. These are instructions
2097 which may cause unpredictable machine state instructions, and thus no
2098 instructions should be moved or combined across them. This includes
2099 only volatile asms and UNSPEC_VOLATILE instructions. */
2102 volatile_insn_p (rtx x)
2104 RTX_CODE code;
2106 code = GET_CODE (x);
2107 switch (code)
2109 case LABEL_REF:
2110 case SYMBOL_REF:
2111 case CONST_INT:
2112 case CONST:
2113 case CONST_DOUBLE:
2114 case CONST_VECTOR:
2115 case CC0:
2116 case PC:
2117 case REG:
2118 case SCRATCH:
2119 case CLOBBER:
2120 case ADDR_VEC:
2121 case ADDR_DIFF_VEC:
2122 case CALL:
2123 case MEM:
2124 return 0;
2126 case UNSPEC_VOLATILE:
2127 /* case TRAP_IF: This isn't clear yet. */
2128 return 1;
2130 case ASM_INPUT:
2131 case ASM_OPERANDS:
2132 if (MEM_VOLATILE_P (x))
2133 return 1;
2135 default:
2136 break;
2139 /* Recursively scan the operands of this expression. */
2142 const char *fmt = GET_RTX_FORMAT (code);
2143 int i;
2145 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2147 if (fmt[i] == 'e')
2149 if (volatile_insn_p (XEXP (x, i)))
2150 return 1;
2152 else if (fmt[i] == 'E')
2154 int j;
2155 for (j = 0; j < XVECLEN (x, i); j++)
2156 if (volatile_insn_p (XVECEXP (x, i, j)))
2157 return 1;
2161 return 0;
2164 /* Nonzero if X contains any volatile memory references
2165 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2168 volatile_refs_p (rtx x)
2170 RTX_CODE code;
2172 code = GET_CODE (x);
2173 switch (code)
2175 case LABEL_REF:
2176 case SYMBOL_REF:
2177 case CONST_INT:
2178 case CONST:
2179 case CONST_DOUBLE:
2180 case CONST_VECTOR:
2181 case CC0:
2182 case PC:
2183 case REG:
2184 case SCRATCH:
2185 case CLOBBER:
2186 case ADDR_VEC:
2187 case ADDR_DIFF_VEC:
2188 return 0;
2190 case UNSPEC_VOLATILE:
2191 return 1;
2193 case MEM:
2194 case ASM_INPUT:
2195 case ASM_OPERANDS:
2196 if (MEM_VOLATILE_P (x))
2197 return 1;
2199 default:
2200 break;
2203 /* Recursively scan the operands of this expression. */
2206 const char *fmt = GET_RTX_FORMAT (code);
2207 int i;
2209 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2211 if (fmt[i] == 'e')
2213 if (volatile_refs_p (XEXP (x, i)))
2214 return 1;
2216 else if (fmt[i] == 'E')
2218 int j;
2219 for (j = 0; j < XVECLEN (x, i); j++)
2220 if (volatile_refs_p (XVECEXP (x, i, j)))
2221 return 1;
2225 return 0;
2228 /* Similar to above, except that it also rejects register pre- and post-
2229 incrementing. */
2232 side_effects_p (rtx x)
2234 RTX_CODE code;
2236 code = GET_CODE (x);
2237 switch (code)
2239 case LABEL_REF:
2240 case SYMBOL_REF:
2241 case CONST_INT:
2242 case CONST:
2243 case CONST_DOUBLE:
2244 case CONST_VECTOR:
2245 case CC0:
2246 case PC:
2247 case REG:
2248 case SCRATCH:
2249 case ADDR_VEC:
2250 case ADDR_DIFF_VEC:
2251 return 0;
2253 case CLOBBER:
2254 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2255 when some combination can't be done. If we see one, don't think
2256 that we can simplify the expression. */
2257 return (GET_MODE (x) != VOIDmode);
2259 case PRE_INC:
2260 case PRE_DEC:
2261 case POST_INC:
2262 case POST_DEC:
2263 case PRE_MODIFY:
2264 case POST_MODIFY:
2265 case CALL:
2266 case UNSPEC_VOLATILE:
2267 /* case TRAP_IF: This isn't clear yet. */
2268 return 1;
2270 case MEM:
2271 case ASM_INPUT:
2272 case ASM_OPERANDS:
2273 if (MEM_VOLATILE_P (x))
2274 return 1;
2276 default:
2277 break;
2280 /* Recursively scan the operands of this expression. */
2283 const char *fmt = GET_RTX_FORMAT (code);
2284 int i;
2286 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2288 if (fmt[i] == 'e')
2290 if (side_effects_p (XEXP (x, i)))
2291 return 1;
2293 else if (fmt[i] == 'E')
2295 int j;
2296 for (j = 0; j < XVECLEN (x, i); j++)
2297 if (side_effects_p (XVECEXP (x, i, j)))
2298 return 1;
2302 return 0;
2305 /* Return nonzero if evaluating rtx X might cause a trap. */
2308 may_trap_p (rtx x)
2310 int i;
2311 enum rtx_code code;
2312 const char *fmt;
2314 if (x == 0)
2315 return 0;
2316 code = GET_CODE (x);
2317 switch (code)
2319 /* Handle these cases quickly. */
2320 case CONST_INT:
2321 case CONST_DOUBLE:
2322 case CONST_VECTOR:
2323 case SYMBOL_REF:
2324 case LABEL_REF:
2325 case CONST:
2326 case PC:
2327 case CC0:
2328 case REG:
2329 case SCRATCH:
2330 return 0;
2332 case ASM_INPUT:
2333 case UNSPEC_VOLATILE:
2334 case TRAP_IF:
2335 return 1;
2337 case ASM_OPERANDS:
2338 return MEM_VOLATILE_P (x);
2340 /* Memory ref can trap unless it's a static var or a stack slot. */
2341 case MEM:
2342 if (MEM_NOTRAP_P (x))
2343 return 0;
2344 return rtx_addr_can_trap_p (XEXP (x, 0));
2346 /* Division by a non-constant might trap. */
2347 case DIV:
2348 case MOD:
2349 case UDIV:
2350 case UMOD:
2351 if (HONOR_SNANS (GET_MODE (x)))
2352 return 1;
2353 if (! CONSTANT_P (XEXP (x, 1))
2354 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2355 && flag_trapping_math))
2356 return 1;
2357 if (XEXP (x, 1) == const0_rtx)
2358 return 1;
2359 break;
2361 case EXPR_LIST:
2362 /* An EXPR_LIST is used to represent a function call. This
2363 certainly may trap. */
2364 return 1;
2366 case GE:
2367 case GT:
2368 case LE:
2369 case LT:
2370 case LTGT:
2371 case COMPARE:
2372 /* Some floating point comparisons may trap. */
2373 if (!flag_trapping_math)
2374 break;
2375 /* ??? There is no machine independent way to check for tests that trap
2376 when COMPARE is used, though many targets do make this distinction.
2377 For instance, sparc uses CCFPE for compares which generate exceptions
2378 and CCFP for compares which do not generate exceptions. */
2379 if (HONOR_NANS (GET_MODE (x)))
2380 return 1;
2381 /* But often the compare has some CC mode, so check operand
2382 modes as well. */
2383 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2384 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2385 return 1;
2386 break;
2388 case EQ:
2389 case NE:
2390 if (HONOR_SNANS (GET_MODE (x)))
2391 return 1;
2392 /* Often comparison is CC mode, so check operand modes. */
2393 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2394 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2395 return 1;
2396 break;
2398 case FIX:
2399 /* Conversion of floating point might trap. */
2400 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2401 return 1;
2402 break;
2404 case NEG:
2405 case ABS:
2406 /* These operations don't trap even with floating point. */
2407 break;
2409 default:
2410 /* Any floating arithmetic may trap. */
2411 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2412 && flag_trapping_math)
2413 return 1;
2416 fmt = GET_RTX_FORMAT (code);
2417 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2419 if (fmt[i] == 'e')
2421 if (may_trap_p (XEXP (x, i)))
2422 return 1;
2424 else if (fmt[i] == 'E')
2426 int j;
2427 for (j = 0; j < XVECLEN (x, i); j++)
2428 if (may_trap_p (XVECEXP (x, i, j)))
2429 return 1;
2432 return 0;
2435 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2436 i.e., an inequality. */
2439 inequality_comparisons_p (rtx x)
2441 const char *fmt;
2442 int len, i;
2443 enum rtx_code code = GET_CODE (x);
2445 switch (code)
2447 case REG:
2448 case SCRATCH:
2449 case PC:
2450 case CC0:
2451 case CONST_INT:
2452 case CONST_DOUBLE:
2453 case CONST_VECTOR:
2454 case CONST:
2455 case LABEL_REF:
2456 case SYMBOL_REF:
2457 return 0;
2459 case LT:
2460 case LTU:
2461 case GT:
2462 case GTU:
2463 case LE:
2464 case LEU:
2465 case GE:
2466 case GEU:
2467 return 1;
2469 default:
2470 break;
2473 len = GET_RTX_LENGTH (code);
2474 fmt = GET_RTX_FORMAT (code);
2476 for (i = 0; i < len; i++)
2478 if (fmt[i] == 'e')
2480 if (inequality_comparisons_p (XEXP (x, i)))
2481 return 1;
2483 else if (fmt[i] == 'E')
2485 int j;
2486 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2487 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2488 return 1;
2492 return 0;
2495 /* Replace any occurrence of FROM in X with TO. The function does
2496 not enter into CONST_DOUBLE for the replace.
2498 Note that copying is not done so X must not be shared unless all copies
2499 are to be modified. */
2502 replace_rtx (rtx x, rtx from, rtx to)
2504 int i, j;
2505 const char *fmt;
2507 /* The following prevents loops occurrence when we change MEM in
2508 CONST_DOUBLE onto the same CONST_DOUBLE. */
2509 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2510 return x;
2512 if (x == from)
2513 return to;
2515 /* Allow this function to make replacements in EXPR_LISTs. */
2516 if (x == 0)
2517 return 0;
2519 if (GET_CODE (x) == SUBREG)
2521 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2523 if (GET_CODE (new) == CONST_INT)
2525 x = simplify_subreg (GET_MODE (x), new,
2526 GET_MODE (SUBREG_REG (x)),
2527 SUBREG_BYTE (x));
2528 if (! x)
2529 abort ();
2531 else
2532 SUBREG_REG (x) = new;
2534 return x;
2536 else if (GET_CODE (x) == ZERO_EXTEND)
2538 rtx new = replace_rtx (XEXP (x, 0), from, to);
2540 if (GET_CODE (new) == CONST_INT)
2542 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2543 new, GET_MODE (XEXP (x, 0)));
2544 if (! x)
2545 abort ();
2547 else
2548 XEXP (x, 0) = new;
2550 return x;
2553 fmt = GET_RTX_FORMAT (GET_CODE (x));
2554 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2556 if (fmt[i] == 'e')
2557 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2558 else if (fmt[i] == 'E')
2559 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2560 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2563 return x;
2566 /* Throughout the rtx X, replace many registers according to REG_MAP.
2567 Return the replacement for X (which may be X with altered contents).
2568 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2569 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2571 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2572 should not be mapped to pseudos or vice versa since validate_change
2573 is not called.
2575 If REPLACE_DEST is 1, replacements are also done in destinations;
2576 otherwise, only sources are replaced. */
2579 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2581 enum rtx_code code;
2582 int i;
2583 const char *fmt;
2585 if (x == 0)
2586 return x;
2588 code = GET_CODE (x);
2589 switch (code)
2591 case SCRATCH:
2592 case PC:
2593 case CC0:
2594 case CONST_INT:
2595 case CONST_DOUBLE:
2596 case CONST_VECTOR:
2597 case CONST:
2598 case SYMBOL_REF:
2599 case LABEL_REF:
2600 return x;
2602 case REG:
2603 /* Verify that the register has an entry before trying to access it. */
2604 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2606 /* SUBREGs can't be shared. Always return a copy to ensure that if
2607 this replacement occurs more than once then each instance will
2608 get distinct rtx. */
2609 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2610 return copy_rtx (reg_map[REGNO (x)]);
2611 return reg_map[REGNO (x)];
2613 return x;
2615 case SUBREG:
2616 /* Prevent making nested SUBREGs. */
2617 if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2618 && reg_map[REGNO (SUBREG_REG (x))] != 0
2619 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2621 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2622 return simplify_gen_subreg (GET_MODE (x), map_val,
2623 GET_MODE (SUBREG_REG (x)),
2624 SUBREG_BYTE (x));
2626 break;
2628 case SET:
2629 if (replace_dest)
2630 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2632 else if (MEM_P (SET_DEST (x))
2633 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2634 /* Even if we are not to replace destinations, replace register if it
2635 is CONTAINED in destination (destination is memory or
2636 STRICT_LOW_PART). */
2637 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2638 reg_map, nregs, 0);
2639 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2640 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2641 break;
2643 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2644 return x;
2646 default:
2647 break;
2650 fmt = GET_RTX_FORMAT (code);
2651 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2653 if (fmt[i] == 'e')
2654 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2655 else if (fmt[i] == 'E')
2657 int j;
2658 for (j = 0; j < XVECLEN (x, i); j++)
2659 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2660 nregs, replace_dest);
2663 return x;
2666 /* Replace occurrences of the old label in *X with the new one.
2667 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2670 replace_label (rtx *x, void *data)
2672 rtx l = *x;
2673 rtx old_label = ((replace_label_data *) data)->r1;
2674 rtx new_label = ((replace_label_data *) data)->r2;
2675 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2677 if (l == NULL_RTX)
2678 return 0;
2680 if (GET_CODE (l) == SYMBOL_REF
2681 && CONSTANT_POOL_ADDRESS_P (l))
2683 rtx c = get_pool_constant (l);
2684 if (rtx_referenced_p (old_label, c))
2686 rtx new_c, new_l;
2687 replace_label_data *d = (replace_label_data *) data;
2689 /* Create a copy of constant C; replace the label inside
2690 but do not update LABEL_NUSES because uses in constant pool
2691 are not counted. */
2692 new_c = copy_rtx (c);
2693 d->update_label_nuses = false;
2694 for_each_rtx (&new_c, replace_label, data);
2695 d->update_label_nuses = update_label_nuses;
2697 /* Add the new constant NEW_C to constant pool and replace
2698 the old reference to constant by new reference. */
2699 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2700 *x = replace_rtx (l, l, new_l);
2702 return 0;
2705 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2706 field. This is not handled by for_each_rtx because it doesn't
2707 handle unprinted ('0') fields. */
2708 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2709 JUMP_LABEL (l) = new_label;
2711 if ((GET_CODE (l) == LABEL_REF
2712 || GET_CODE (l) == INSN_LIST)
2713 && XEXP (l, 0) == old_label)
2715 XEXP (l, 0) = new_label;
2716 if (update_label_nuses)
2718 ++LABEL_NUSES (new_label);
2719 --LABEL_NUSES (old_label);
2721 return 0;
2724 return 0;
2727 /* When *BODY is equal to X or X is directly referenced by *BODY
2728 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2729 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2731 static int
2732 rtx_referenced_p_1 (rtx *body, void *x)
2734 rtx y = (rtx) x;
2736 if (*body == NULL_RTX)
2737 return y == NULL_RTX;
2739 /* Return true if a label_ref *BODY refers to label Y. */
2740 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2741 return XEXP (*body, 0) == y;
2743 /* If *BODY is a reference to pool constant traverse the constant. */
2744 if (GET_CODE (*body) == SYMBOL_REF
2745 && CONSTANT_POOL_ADDRESS_P (*body))
2746 return rtx_referenced_p (y, get_pool_constant (*body));
2748 /* By default, compare the RTL expressions. */
2749 return rtx_equal_p (*body, y);
2752 /* Return true if X is referenced in BODY. */
2755 rtx_referenced_p (rtx x, rtx body)
2757 return for_each_rtx (&body, rtx_referenced_p_1, x);
2760 /* If INSN is a tablejump return true and store the label (before jump table) to
2761 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2763 bool
2764 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2766 rtx label, table;
2768 if (JUMP_P (insn)
2769 && (label = JUMP_LABEL (insn)) != NULL_RTX
2770 && (table = next_active_insn (label)) != NULL_RTX
2771 && JUMP_P (table)
2772 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2773 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2775 if (labelp)
2776 *labelp = label;
2777 if (tablep)
2778 *tablep = table;
2779 return true;
2781 return false;
2784 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2785 constant that is not in the constant pool and not in the condition
2786 of an IF_THEN_ELSE. */
2788 static int
2789 computed_jump_p_1 (rtx x)
2791 enum rtx_code code = GET_CODE (x);
2792 int i, j;
2793 const char *fmt;
2795 switch (code)
2797 case LABEL_REF:
2798 case PC:
2799 return 0;
2801 case CONST:
2802 case CONST_INT:
2803 case CONST_DOUBLE:
2804 case CONST_VECTOR:
2805 case SYMBOL_REF:
2806 case REG:
2807 return 1;
2809 case MEM:
2810 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2811 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2813 case IF_THEN_ELSE:
2814 return (computed_jump_p_1 (XEXP (x, 1))
2815 || computed_jump_p_1 (XEXP (x, 2)));
2817 default:
2818 break;
2821 fmt = GET_RTX_FORMAT (code);
2822 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2824 if (fmt[i] == 'e'
2825 && computed_jump_p_1 (XEXP (x, i)))
2826 return 1;
2828 else if (fmt[i] == 'E')
2829 for (j = 0; j < XVECLEN (x, i); j++)
2830 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2831 return 1;
2834 return 0;
2837 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2839 Tablejumps and casesi insns are not considered indirect jumps;
2840 we can recognize them by a (use (label_ref)). */
2843 computed_jump_p (rtx insn)
2845 int i;
2846 if (JUMP_P (insn))
2848 rtx pat = PATTERN (insn);
2850 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2851 return 0;
2852 else if (GET_CODE (pat) == PARALLEL)
2854 int len = XVECLEN (pat, 0);
2855 int has_use_labelref = 0;
2857 for (i = len - 1; i >= 0; i--)
2858 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2859 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2860 == LABEL_REF))
2861 has_use_labelref = 1;
2863 if (! has_use_labelref)
2864 for (i = len - 1; i >= 0; i--)
2865 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2866 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2867 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2868 return 1;
2870 else if (GET_CODE (pat) == SET
2871 && SET_DEST (pat) == pc_rtx
2872 && computed_jump_p_1 (SET_SRC (pat)))
2873 return 1;
2875 return 0;
2878 /* Traverse X via depth-first search, calling F for each
2879 sub-expression (including X itself). F is also passed the DATA.
2880 If F returns -1, do not traverse sub-expressions, but continue
2881 traversing the rest of the tree. If F ever returns any other
2882 nonzero value, stop the traversal, and return the value returned
2883 by F. Otherwise, return 0. This function does not traverse inside
2884 tree structure that contains RTX_EXPRs, or into sub-expressions
2885 whose format code is `0' since it is not known whether or not those
2886 codes are actually RTL.
2888 This routine is very general, and could (should?) be used to
2889 implement many of the other routines in this file. */
2892 for_each_rtx (rtx *x, rtx_function f, void *data)
2894 int result;
2895 int length;
2896 const char *format;
2897 int i;
2899 /* Call F on X. */
2900 result = (*f) (x, data);
2901 if (result == -1)
2902 /* Do not traverse sub-expressions. */
2903 return 0;
2904 else if (result != 0)
2905 /* Stop the traversal. */
2906 return result;
2908 if (*x == NULL_RTX)
2909 /* There are no sub-expressions. */
2910 return 0;
2912 length = GET_RTX_LENGTH (GET_CODE (*x));
2913 format = GET_RTX_FORMAT (GET_CODE (*x));
2915 for (i = 0; i < length; ++i)
2917 switch (format[i])
2919 case 'e':
2920 result = for_each_rtx (&XEXP (*x, i), f, data);
2921 if (result != 0)
2922 return result;
2923 break;
2925 case 'V':
2926 case 'E':
2927 if (XVEC (*x, i) != 0)
2929 int j;
2930 for (j = 0; j < XVECLEN (*x, i); ++j)
2932 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2933 if (result != 0)
2934 return result;
2937 break;
2939 default:
2940 /* Nothing to do. */
2941 break;
2946 return 0;
2949 /* Searches X for any reference to REGNO, returning the rtx of the
2950 reference found if any. Otherwise, returns NULL_RTX. */
2953 regno_use_in (unsigned int regno, rtx x)
2955 const char *fmt;
2956 int i, j;
2957 rtx tem;
2959 if (REG_P (x) && REGNO (x) == regno)
2960 return x;
2962 fmt = GET_RTX_FORMAT (GET_CODE (x));
2963 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2965 if (fmt[i] == 'e')
2967 if ((tem = regno_use_in (regno, XEXP (x, i))))
2968 return tem;
2970 else if (fmt[i] == 'E')
2971 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2972 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2973 return tem;
2976 return NULL_RTX;
2979 /* Return a value indicating whether OP, an operand of a commutative
2980 operation, is preferred as the first or second operand. The higher
2981 the value, the stronger the preference for being the first operand.
2982 We use negative values to indicate a preference for the first operand
2983 and positive values for the second operand. */
2986 commutative_operand_precedence (rtx op)
2988 enum rtx_code code = GET_CODE (op);
2990 /* Constants always come the second operand. Prefer "nice" constants. */
2991 if (code == CONST_INT)
2992 return -7;
2993 if (code == CONST_DOUBLE)
2994 return -6;
2995 op = avoid_constant_pool_reference (op);
2997 switch (GET_RTX_CLASS (code))
2999 case RTX_CONST_OBJ:
3000 if (code == CONST_INT)
3001 return -5;
3002 if (code == CONST_DOUBLE)
3003 return -4;
3004 return -3;
3006 case RTX_EXTRA:
3007 /* SUBREGs of objects should come second. */
3008 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3009 return -2;
3011 if (!CONSTANT_P (op))
3012 return 0;
3013 else
3014 /* As for RTX_CONST_OBJ. */
3015 return -3;
3017 case RTX_OBJ:
3018 /* Complex expressions should be the first, so decrease priority
3019 of objects. */
3020 return -1;
3022 case RTX_COMM_ARITH:
3023 /* Prefer operands that are themselves commutative to be first.
3024 This helps to make things linear. In particular,
3025 (and (and (reg) (reg)) (not (reg))) is canonical. */
3026 return 4;
3028 case RTX_BIN_ARITH:
3029 /* If only one operand is a binary expression, it will be the first
3030 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3031 is canonical, although it will usually be further simplified. */
3032 return 2;
3034 case RTX_UNARY:
3035 /* Then prefer NEG and NOT. */
3036 if (code == NEG || code == NOT)
3037 return 1;
3039 default:
3040 return 0;
3044 /* Return 1 iff it is necessary to swap operands of commutative operation
3045 in order to canonicalize expression. */
3048 swap_commutative_operands_p (rtx x, rtx y)
3050 return (commutative_operand_precedence (x)
3051 < commutative_operand_precedence (y));
3054 /* Return 1 if X is an autoincrement side effect and the register is
3055 not the stack pointer. */
3057 auto_inc_p (rtx x)
3059 switch (GET_CODE (x))
3061 case PRE_INC:
3062 case POST_INC:
3063 case PRE_DEC:
3064 case POST_DEC:
3065 case PRE_MODIFY:
3066 case POST_MODIFY:
3067 /* There are no REG_INC notes for SP. */
3068 if (XEXP (x, 0) != stack_pointer_rtx)
3069 return 1;
3070 default:
3071 break;
3073 return 0;
3076 /* Return 1 if the sequence of instructions beginning with FROM and up
3077 to and including TO is safe to move. If NEW_TO is non-NULL, and
3078 the sequence is not already safe to move, but can be easily
3079 extended to a sequence which is safe, then NEW_TO will point to the
3080 end of the extended sequence.
3082 For now, this function only checks that the region contains whole
3083 exception regions, but it could be extended to check additional
3084 conditions as well. */
3087 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
3089 int eh_region_count = 0;
3090 int past_to_p = 0;
3091 rtx r = from;
3093 /* By default, assume the end of the region will be what was
3094 suggested. */
3095 if (new_to)
3096 *new_to = to;
3098 while (r)
3100 if (NOTE_P (r))
3102 switch (NOTE_LINE_NUMBER (r))
3104 case NOTE_INSN_EH_REGION_BEG:
3105 ++eh_region_count;
3106 break;
3108 case NOTE_INSN_EH_REGION_END:
3109 if (eh_region_count == 0)
3110 /* This sequence of instructions contains the end of
3111 an exception region, but not he beginning. Moving
3112 it will cause chaos. */
3113 return 0;
3115 --eh_region_count;
3116 break;
3118 default:
3119 break;
3122 else if (past_to_p)
3123 /* If we've passed TO, and we see a non-note instruction, we
3124 can't extend the sequence to a movable sequence. */
3125 return 0;
3127 if (r == to)
3129 if (!new_to)
3130 /* It's OK to move the sequence if there were matched sets of
3131 exception region notes. */
3132 return eh_region_count == 0;
3134 past_to_p = 1;
3137 /* It's OK to move the sequence if there were matched sets of
3138 exception region notes. */
3139 if (past_to_p && eh_region_count == 0)
3141 *new_to = r;
3142 return 1;
3145 /* Go to the next instruction. */
3146 r = NEXT_INSN (r);
3149 return 0;
3152 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3154 loc_mentioned_in_p (rtx *loc, rtx in)
3156 enum rtx_code code = GET_CODE (in);
3157 const char *fmt = GET_RTX_FORMAT (code);
3158 int i, j;
3160 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3162 if (loc == &in->u.fld[i].rtx)
3163 return 1;
3164 if (fmt[i] == 'e')
3166 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3167 return 1;
3169 else if (fmt[i] == 'E')
3170 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3171 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3172 return 1;
3174 return 0;
3177 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3178 and SUBREG_BYTE, return the bit offset where the subreg begins
3179 (counting from the least significant bit of the operand). */
3181 unsigned int
3182 subreg_lsb_1 (enum machine_mode outer_mode,
3183 enum machine_mode inner_mode,
3184 unsigned int subreg_byte)
3186 unsigned int bitpos;
3187 unsigned int byte;
3188 unsigned int word;
3190 /* A paradoxical subreg begins at bit position 0. */
3191 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3192 return 0;
3194 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3195 /* If the subreg crosses a word boundary ensure that
3196 it also begins and ends on a word boundary. */
3197 if ((subreg_byte % UNITS_PER_WORD
3198 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3199 && (subreg_byte % UNITS_PER_WORD
3200 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))
3201 abort ();
3203 if (WORDS_BIG_ENDIAN)
3204 word = (GET_MODE_SIZE (inner_mode)
3205 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3206 else
3207 word = subreg_byte / UNITS_PER_WORD;
3208 bitpos = word * BITS_PER_WORD;
3210 if (BYTES_BIG_ENDIAN)
3211 byte = (GET_MODE_SIZE (inner_mode)
3212 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3213 else
3214 byte = subreg_byte % UNITS_PER_WORD;
3215 bitpos += byte * BITS_PER_UNIT;
3217 return bitpos;
3220 /* Given a subreg X, return the bit offset where the subreg begins
3221 (counting from the least significant bit of the reg). */
3223 unsigned int
3224 subreg_lsb (rtx x)
3226 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3227 SUBREG_BYTE (x));
3230 /* This function returns the regno offset of a subreg expression.
3231 xregno - A regno of an inner hard subreg_reg (or what will become one).
3232 xmode - The mode of xregno.
3233 offset - The byte offset.
3234 ymode - The mode of a top level SUBREG (or what may become one).
3235 RETURN - The regno offset which would be used. */
3236 unsigned int
3237 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3238 unsigned int offset, enum machine_mode ymode)
3240 int nregs_xmode, nregs_ymode;
3241 int mode_multiple, nregs_multiple;
3242 int y_offset;
3244 if (xregno >= FIRST_PSEUDO_REGISTER)
3245 abort ();
3247 nregs_xmode = hard_regno_nregs[xregno][xmode];
3248 nregs_ymode = hard_regno_nregs[xregno][ymode];
3250 /* If this is a big endian paradoxical subreg, which uses more actual
3251 hard registers than the original register, we must return a negative
3252 offset so that we find the proper highpart of the register. */
3253 if (offset == 0
3254 && nregs_ymode > nregs_xmode
3255 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3256 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3257 return nregs_xmode - nregs_ymode;
3259 if (offset == 0 || nregs_xmode == nregs_ymode)
3260 return 0;
3262 /* size of ymode must not be greater than the size of xmode. */
3263 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3264 if (mode_multiple == 0)
3265 abort ();
3267 y_offset = offset / GET_MODE_SIZE (ymode);
3268 nregs_multiple = nregs_xmode / nregs_ymode;
3269 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3272 /* This function returns true when the offset is representable via
3273 subreg_offset in the given regno.
3274 xregno - A regno of an inner hard subreg_reg (or what will become one).
3275 xmode - The mode of xregno.
3276 offset - The byte offset.
3277 ymode - The mode of a top level SUBREG (or what may become one).
3278 RETURN - The regno offset which would be used. */
3279 bool
3280 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3281 unsigned int offset, enum machine_mode ymode)
3283 int nregs_xmode, nregs_ymode;
3284 int mode_multiple, nregs_multiple;
3285 int y_offset;
3287 if (xregno >= FIRST_PSEUDO_REGISTER)
3288 abort ();
3290 nregs_xmode = hard_regno_nregs[xregno][xmode];
3291 nregs_ymode = hard_regno_nregs[xregno][ymode];
3293 /* Paradoxical subregs are always valid. */
3294 if (offset == 0
3295 && nregs_ymode > nregs_xmode
3296 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3297 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3298 return true;
3300 /* Lowpart subregs are always valid. */
3301 if (offset == subreg_lowpart_offset (ymode, xmode))
3302 return true;
3304 #ifdef ENABLE_CHECKING
3305 /* This should always pass, otherwise we don't know how to verify the
3306 constraint. These conditions may be relaxed but subreg_offset would
3307 need to be redesigned. */
3308 if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3309 || GET_MODE_SIZE (ymode) % nregs_ymode
3310 || nregs_xmode % nregs_ymode)
3311 abort ();
3312 #endif
3314 /* The XMODE value can be seen as a vector of NREGS_XMODE
3315 values. The subreg must represent a lowpart of given field.
3316 Compute what field it is. */
3317 offset -= subreg_lowpart_offset (ymode,
3318 mode_for_size (GET_MODE_BITSIZE (xmode)
3319 / nregs_xmode,
3320 MODE_INT, 0));
3322 /* size of ymode must not be greater than the size of xmode. */
3323 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3324 if (mode_multiple == 0)
3325 abort ();
3327 y_offset = offset / GET_MODE_SIZE (ymode);
3328 nregs_multiple = nregs_xmode / nregs_ymode;
3329 #ifdef ENABLE_CHECKING
3330 if (offset % GET_MODE_SIZE (ymode)
3331 || mode_multiple % nregs_multiple)
3332 abort ();
3333 #endif
3334 return (!(y_offset % (mode_multiple / nregs_multiple)));
3337 /* Return the final regno that a subreg expression refers to. */
3338 unsigned int
3339 subreg_regno (rtx x)
3341 unsigned int ret;
3342 rtx subreg = SUBREG_REG (x);
3343 int regno = REGNO (subreg);
3345 ret = regno + subreg_regno_offset (regno,
3346 GET_MODE (subreg),
3347 SUBREG_BYTE (x),
3348 GET_MODE (x));
3349 return ret;
3352 struct parms_set_data
3354 int nregs;
3355 HARD_REG_SET regs;
3358 /* Helper function for noticing stores to parameter registers. */
3359 static void
3360 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3362 struct parms_set_data *d = data;
3363 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3364 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3366 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3367 d->nregs--;
3371 /* Look backward for first parameter to be loaded.
3372 Do not skip BOUNDARY. */
3374 find_first_parameter_load (rtx call_insn, rtx boundary)
3376 struct parms_set_data parm;
3377 rtx p, before;
3379 /* Since different machines initialize their parameter registers
3380 in different orders, assume nothing. Collect the set of all
3381 parameter registers. */
3382 CLEAR_HARD_REG_SET (parm.regs);
3383 parm.nregs = 0;
3384 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3385 if (GET_CODE (XEXP (p, 0)) == USE
3386 && REG_P (XEXP (XEXP (p, 0), 0)))
3388 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3389 abort ();
3391 /* We only care about registers which can hold function
3392 arguments. */
3393 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3394 continue;
3396 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3397 parm.nregs++;
3399 before = call_insn;
3401 /* Search backward for the first set of a register in this set. */
3402 while (parm.nregs && before != boundary)
3404 before = PREV_INSN (before);
3406 /* It is possible that some loads got CSEed from one call to
3407 another. Stop in that case. */
3408 if (CALL_P (before))
3409 break;
3411 /* Our caller needs either ensure that we will find all sets
3412 (in case code has not been optimized yet), or take care
3413 for possible labels in a way by setting boundary to preceding
3414 CODE_LABEL. */
3415 if (LABEL_P (before))
3417 if (before != boundary)
3418 abort ();
3419 break;
3422 if (INSN_P (before))
3423 note_stores (PATTERN (before), parms_set, &parm);
3425 return before;
3428 /* Return true if we should avoid inserting code between INSN and preceding
3429 call instruction. */
3431 bool
3432 keep_with_call_p (rtx insn)
3434 rtx set;
3436 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3438 if (REG_P (SET_DEST (set))
3439 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3440 && fixed_regs[REGNO (SET_DEST (set))]
3441 && general_operand (SET_SRC (set), VOIDmode))
3442 return true;
3443 if (REG_P (SET_SRC (set))
3444 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3445 && REG_P (SET_DEST (set))
3446 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3447 return true;
3448 /* There may be a stack pop just after the call and before the store
3449 of the return register. Search for the actual store when deciding
3450 if we can break or not. */
3451 if (SET_DEST (set) == stack_pointer_rtx)
3453 rtx i2 = next_nonnote_insn (insn);
3454 if (i2 && keep_with_call_p (i2))
3455 return true;
3458 return false;
3461 /* Return true when store to register X can be hoisted to the place
3462 with LIVE registers (can be NULL). Value VAL contains destination
3463 whose value will be used. */
3465 static bool
3466 hoist_test_store (rtx x, rtx val, regset live)
3468 if (GET_CODE (x) == SCRATCH)
3469 return true;
3471 if (rtx_equal_p (x, val))
3472 return true;
3474 /* Allow subreg of X in case it is not writing just part of multireg pseudo.
3475 Then we would need to update all users to care hoisting the store too.
3476 Caller may represent that by specifying whole subreg as val. */
3478 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3480 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3481 && GET_MODE_BITSIZE (GET_MODE (x)) <
3482 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3483 return false;
3484 return true;
3486 if (GET_CODE (x) == SUBREG)
3487 x = SUBREG_REG (x);
3489 /* Anything except register store is not hoistable. This includes the
3490 partial stores to registers. */
3492 if (!REG_P (x))
3493 return false;
3495 /* Pseudo registers can be always replaced by another pseudo to avoid
3496 the side effect, for hard register we must ensure that they are dead.
3497 Eventually we may want to add code to try turn pseudos to hards, but it
3498 is unlikely useful. */
3500 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3502 int regno = REGNO (x);
3503 int n = hard_regno_nregs[regno][GET_MODE (x)];
3505 if (!live)
3506 return false;
3507 if (REGNO_REG_SET_P (live, regno))
3508 return false;
3509 while (--n > 0)
3510 if (REGNO_REG_SET_P (live, regno + n))
3511 return false;
3513 return true;
3517 /* Return true if INSN can be hoisted to place with LIVE hard registers
3518 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3519 and used by the hoisting pass. */
3521 bool
3522 can_hoist_insn_p (rtx insn, rtx val, regset live)
3524 rtx pat = PATTERN (insn);
3525 int i;
3527 /* It probably does not worth the complexity to handle multiple
3528 set stores. */
3529 if (!single_set (insn))
3530 return false;
3531 /* We can move CALL_INSN, but we need to check that all caller clobbered
3532 regs are dead. */
3533 if (CALL_P (insn))
3534 return false;
3535 /* In future we will handle hoisting of libcall sequences, but
3536 give up for now. */
3537 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3538 return false;
3539 switch (GET_CODE (pat))
3541 case SET:
3542 if (!hoist_test_store (SET_DEST (pat), val, live))
3543 return false;
3544 break;
3545 case USE:
3546 /* USES do have sick semantics, so do not move them. */
3547 return false;
3548 break;
3549 case CLOBBER:
3550 if (!hoist_test_store (XEXP (pat, 0), val, live))
3551 return false;
3552 break;
3553 case PARALLEL:
3554 for (i = 0; i < XVECLEN (pat, 0); i++)
3556 rtx x = XVECEXP (pat, 0, i);
3557 switch (GET_CODE (x))
3559 case SET:
3560 if (!hoist_test_store (SET_DEST (x), val, live))
3561 return false;
3562 break;
3563 case USE:
3564 /* We need to fix callers to really ensure availability
3565 of all values insn uses, but for now it is safe to prohibit
3566 hoisting of any insn having such a hidden uses. */
3567 return false;
3568 break;
3569 case CLOBBER:
3570 if (!hoist_test_store (SET_DEST (x), val, live))
3571 return false;
3572 break;
3573 default:
3574 break;
3577 break;
3578 default:
3579 abort ();
3581 return true;
3584 /* Update store after hoisting - replace all stores to pseudo registers
3585 by new ones to avoid clobbering of values except for store to VAL that will
3586 be updated to NEW. */
3588 static void
3589 hoist_update_store (rtx insn, rtx *xp, rtx val, rtx new)
3591 rtx x = *xp;
3593 if (GET_CODE (x) == SCRATCH)
3594 return;
3596 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3597 validate_change (insn, xp,
3598 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3599 SUBREG_BYTE (x)), 1);
3600 if (rtx_equal_p (x, val))
3602 validate_change (insn, xp, new, 1);
3603 return;
3605 if (GET_CODE (x) == SUBREG)
3607 xp = &SUBREG_REG (x);
3608 x = *xp;
3611 if (!REG_P (x))
3612 abort ();
3614 /* We've verified that hard registers are dead, so we may keep the side
3615 effect. Otherwise replace it by new pseudo. */
3616 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3617 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3618 REG_NOTES (insn)
3619 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3622 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3623 and each other side effect to pseudo register by new pseudo register. */
3626 hoist_insn_after (rtx insn, rtx after, rtx val, rtx new)
3628 rtx pat;
3629 int i;
3630 rtx note;
3632 insn = emit_copy_of_insn_after (insn, after);
3633 pat = PATTERN (insn);
3635 /* Remove REG_UNUSED notes as we will re-emit them. */
3636 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3637 remove_note (insn, note);
3639 /* To get this working callers must ensure to move everything referenced
3640 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3641 easier. */
3642 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3643 remove_note (insn, note);
3644 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3645 remove_note (insn, note);
3647 /* Remove REG_DEAD notes as they might not be valid anymore in case
3648 we create redundancy. */
3649 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3650 remove_note (insn, note);
3651 switch (GET_CODE (pat))
3653 case SET:
3654 hoist_update_store (insn, &SET_DEST (pat), val, new);
3655 break;
3656 case USE:
3657 break;
3658 case CLOBBER:
3659 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3660 break;
3661 case PARALLEL:
3662 for (i = 0; i < XVECLEN (pat, 0); i++)
3664 rtx x = XVECEXP (pat, 0, i);
3665 switch (GET_CODE (x))
3667 case SET:
3668 hoist_update_store (insn, &SET_DEST (x), val, new);
3669 break;
3670 case USE:
3671 break;
3672 case CLOBBER:
3673 hoist_update_store (insn, &SET_DEST (x), val, new);
3674 break;
3675 default:
3676 break;
3679 break;
3680 default:
3681 abort ();
3683 if (!apply_change_group ())
3684 abort ();
3686 return insn;
3690 hoist_insn_to_edge (rtx insn, edge e, rtx val, rtx new)
3692 rtx new_insn;
3694 /* We cannot insert instructions on an abnormal critical edge.
3695 It will be easier to find the culprit if we die now. */
3696 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3697 abort ();
3699 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3700 stuff. We also emit CALL_INSNS and firends. */
3701 if (e->insns.r == NULL_RTX)
3703 start_sequence ();
3704 emit_note (NOTE_INSN_DELETED);
3706 else
3707 push_to_sequence (e->insns.r);
3709 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3711 e->insns.r = get_insns ();
3712 end_sequence ();
3713 return new_insn;
3716 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3717 to non-complex jumps. That is, direct unconditional, conditional,
3718 and tablejumps, but not computed jumps or returns. It also does
3719 not apply to the fallthru case of a conditional jump. */
3721 bool
3722 label_is_jump_target_p (rtx label, rtx jump_insn)
3724 rtx tmp = JUMP_LABEL (jump_insn);
3726 if (label == tmp)
3727 return true;
3729 if (tablejump_p (jump_insn, NULL, &tmp))
3731 rtvec vec = XVEC (PATTERN (tmp),
3732 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3733 int i, veclen = GET_NUM_ELEM (vec);
3735 for (i = 0; i < veclen; ++i)
3736 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3737 return true;
3740 return false;
3744 /* Return an estimate of the cost of computing rtx X.
3745 One use is in cse, to decide which expression to keep in the hash table.
3746 Another is in rtl generation, to pick the cheapest way to multiply.
3747 Other uses like the latter are expected in the future. */
3750 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3752 int i, j;
3753 enum rtx_code code;
3754 const char *fmt;
3755 int total;
3757 if (x == 0)
3758 return 0;
3760 /* Compute the default costs of certain things.
3761 Note that targetm.rtx_costs can override the defaults. */
3763 code = GET_CODE (x);
3764 switch (code)
3766 case MULT:
3767 total = COSTS_N_INSNS (5);
3768 break;
3769 case DIV:
3770 case UDIV:
3771 case MOD:
3772 case UMOD:
3773 total = COSTS_N_INSNS (7);
3774 break;
3775 case USE:
3776 /* Used in loop.c and combine.c as a marker. */
3777 total = 0;
3778 break;
3779 default:
3780 total = COSTS_N_INSNS (1);
3783 switch (code)
3785 case REG:
3786 return 0;
3788 case SUBREG:
3789 /* If we can't tie these modes, make this expensive. The larger
3790 the mode, the more expensive it is. */
3791 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3792 return COSTS_N_INSNS (2
3793 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3794 break;
3796 default:
3797 if (targetm.rtx_costs (x, code, outer_code, &total))
3798 return total;
3799 break;
3802 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3803 which is already in total. */
3805 fmt = GET_RTX_FORMAT (code);
3806 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3807 if (fmt[i] == 'e')
3808 total += rtx_cost (XEXP (x, i), code);
3809 else if (fmt[i] == 'E')
3810 for (j = 0; j < XVECLEN (x, i); j++)
3811 total += rtx_cost (XVECEXP (x, i, j), code);
3813 return total;
3816 /* Return cost of address expression X.
3817 Expect that X is properly formed address reference. */
3820 address_cost (rtx x, enum machine_mode mode)
3822 /* We may be asked for cost of various unusual addresses, such as operands
3823 of push instruction. It is not worthwhile to complicate writing
3824 of the target hook by such cases. */
3826 if (!memory_address_p (mode, x))
3827 return 1000;
3829 return targetm.address_cost (x);
3832 /* If the target doesn't override, compute the cost as with arithmetic. */
3835 default_address_cost (rtx x)
3837 return rtx_cost (x, MEM);
3841 unsigned HOST_WIDE_INT
3842 nonzero_bits (rtx x, enum machine_mode mode)
3844 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3847 unsigned int
3848 num_sign_bit_copies (rtx x, enum machine_mode mode)
3850 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3853 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3854 It avoids exponential behavior in nonzero_bits1 when X has
3855 identical subexpressions on the first or the second level. */
3857 static unsigned HOST_WIDE_INT
3858 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3859 enum machine_mode known_mode,
3860 unsigned HOST_WIDE_INT known_ret)
3862 if (x == known_x && mode == known_mode)
3863 return known_ret;
3865 /* Try to find identical subexpressions. If found call
3866 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3867 precomputed value for the subexpression as KNOWN_RET. */
3869 if (ARITHMETIC_P (x))
3871 rtx x0 = XEXP (x, 0);
3872 rtx x1 = XEXP (x, 1);
3874 /* Check the first level. */
3875 if (x0 == x1)
3876 return nonzero_bits1 (x, mode, x0, mode,
3877 cached_nonzero_bits (x0, mode, known_x,
3878 known_mode, known_ret));
3880 /* Check the second level. */
3881 if (ARITHMETIC_P (x0)
3882 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3883 return nonzero_bits1 (x, mode, x1, mode,
3884 cached_nonzero_bits (x1, mode, known_x,
3885 known_mode, known_ret));
3887 if (ARITHMETIC_P (x1)
3888 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3889 return nonzero_bits1 (x, mode, x0, mode,
3890 cached_nonzero_bits (x0, mode, known_x,
3891 known_mode, known_ret));
3894 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3897 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3898 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3899 is less useful. We can't allow both, because that results in exponential
3900 run time recursion. There is a nullstone testcase that triggered
3901 this. This macro avoids accidental uses of num_sign_bit_copies. */
3902 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3904 /* Given an expression, X, compute which bits in X can be nonzero.
3905 We don't care about bits outside of those defined in MODE.
3907 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3908 an arithmetic operation, we can do better. */
3910 static unsigned HOST_WIDE_INT
3911 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3912 enum machine_mode known_mode,
3913 unsigned HOST_WIDE_INT known_ret)
3915 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3916 unsigned HOST_WIDE_INT inner_nz;
3917 enum rtx_code code;
3918 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3920 /* For floating-point values, assume all bits are needed. */
3921 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3922 return nonzero;
3924 /* If X is wider than MODE, use its mode instead. */
3925 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3927 mode = GET_MODE (x);
3928 nonzero = GET_MODE_MASK (mode);
3929 mode_width = GET_MODE_BITSIZE (mode);
3932 if (mode_width > HOST_BITS_PER_WIDE_INT)
3933 /* Our only callers in this case look for single bit values. So
3934 just return the mode mask. Those tests will then be false. */
3935 return nonzero;
3937 #ifndef WORD_REGISTER_OPERATIONS
3938 /* If MODE is wider than X, but both are a single word for both the host
3939 and target machines, we can compute this from which bits of the
3940 object might be nonzero in its own mode, taking into account the fact
3941 that on many CISC machines, accessing an object in a wider mode
3942 causes the high-order bits to become undefined. So they are
3943 not known to be zero. */
3945 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3946 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3947 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3948 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3950 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3951 known_x, known_mode, known_ret);
3952 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3953 return nonzero;
3955 #endif
3957 code = GET_CODE (x);
3958 switch (code)
3960 case REG:
3961 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3962 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3963 all the bits above ptr_mode are known to be zero. */
3964 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3965 && REG_POINTER (x))
3966 nonzero &= GET_MODE_MASK (ptr_mode);
3967 #endif
3969 /* Include declared information about alignment of pointers. */
3970 /* ??? We don't properly preserve REG_POINTER changes across
3971 pointer-to-integer casts, so we can't trust it except for
3972 things that we know must be pointers. See execute/960116-1.c. */
3973 if ((x == stack_pointer_rtx
3974 || x == frame_pointer_rtx
3975 || x == arg_pointer_rtx)
3976 && REGNO_POINTER_ALIGN (REGNO (x)))
3978 unsigned HOST_WIDE_INT alignment
3979 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3981 #ifdef PUSH_ROUNDING
3982 /* If PUSH_ROUNDING is defined, it is possible for the
3983 stack to be momentarily aligned only to that amount,
3984 so we pick the least alignment. */
3985 if (x == stack_pointer_rtx && PUSH_ARGS)
3986 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3987 alignment);
3988 #endif
3990 nonzero &= ~(alignment - 1);
3994 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3995 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3996 known_mode, known_ret,
3997 &nonzero_for_hook);
3999 if (new)
4000 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
4001 known_mode, known_ret);
4003 return nonzero_for_hook;
4006 case CONST_INT:
4007 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4008 /* If X is negative in MODE, sign-extend the value. */
4009 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
4010 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
4011 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
4012 #endif
4014 return INTVAL (x);
4016 case MEM:
4017 #ifdef LOAD_EXTEND_OP
4018 /* In many, if not most, RISC machines, reading a byte from memory
4019 zeros the rest of the register. Noticing that fact saves a lot
4020 of extra zero-extends. */
4021 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4022 nonzero &= GET_MODE_MASK (GET_MODE (x));
4023 #endif
4024 break;
4026 case EQ: case NE:
4027 case UNEQ: case LTGT:
4028 case GT: case GTU: case UNGT:
4029 case LT: case LTU: case UNLT:
4030 case GE: case GEU: case UNGE:
4031 case LE: case LEU: case UNLE:
4032 case UNORDERED: case ORDERED:
4034 /* If this produces an integer result, we know which bits are set.
4035 Code here used to clear bits outside the mode of X, but that is
4036 now done above. */
4038 if (GET_MODE_CLASS (mode) == MODE_INT
4039 && mode_width <= HOST_BITS_PER_WIDE_INT)
4040 nonzero = STORE_FLAG_VALUE;
4041 break;
4043 case NEG:
4044 #if 0
4045 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4046 and num_sign_bit_copies. */
4047 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4048 == GET_MODE_BITSIZE (GET_MODE (x)))
4049 nonzero = 1;
4050 #endif
4052 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
4053 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4054 break;
4056 case ABS:
4057 #if 0
4058 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4059 and num_sign_bit_copies. */
4060 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4061 == GET_MODE_BITSIZE (GET_MODE (x)))
4062 nonzero = 1;
4063 #endif
4064 break;
4066 case TRUNCATE:
4067 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4068 known_x, known_mode, known_ret)
4069 & GET_MODE_MASK (mode));
4070 break;
4072 case ZERO_EXTEND:
4073 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4074 known_x, known_mode, known_ret);
4075 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4076 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4077 break;
4079 case SIGN_EXTEND:
4080 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4081 Otherwise, show all the bits in the outer mode but not the inner
4082 may be nonzero. */
4083 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4084 known_x, known_mode, known_ret);
4085 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4087 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4088 if (inner_nz
4089 & (((HOST_WIDE_INT) 1
4090 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
4091 inner_nz |= (GET_MODE_MASK (mode)
4092 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4095 nonzero &= inner_nz;
4096 break;
4098 case AND:
4099 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4100 known_x, known_mode, known_ret)
4101 & cached_nonzero_bits (XEXP (x, 1), mode,
4102 known_x, known_mode, known_ret);
4103 break;
4105 case XOR: case IOR:
4106 case UMIN: case UMAX: case SMIN: case SMAX:
4108 unsigned HOST_WIDE_INT nonzero0 =
4109 cached_nonzero_bits (XEXP (x, 0), mode,
4110 known_x, known_mode, known_ret);
4112 /* Don't call nonzero_bits for the second time if it cannot change
4113 anything. */
4114 if ((nonzero & nonzero0) != nonzero)
4115 nonzero &= nonzero0
4116 | cached_nonzero_bits (XEXP (x, 1), mode,
4117 known_x, known_mode, known_ret);
4119 break;
4121 case PLUS: case MINUS:
4122 case MULT:
4123 case DIV: case UDIV:
4124 case MOD: case UMOD:
4125 /* We can apply the rules of arithmetic to compute the number of
4126 high- and low-order zero bits of these operations. We start by
4127 computing the width (position of the highest-order nonzero bit)
4128 and the number of low-order zero bits for each value. */
4130 unsigned HOST_WIDE_INT nz0 =
4131 cached_nonzero_bits (XEXP (x, 0), mode,
4132 known_x, known_mode, known_ret);
4133 unsigned HOST_WIDE_INT nz1 =
4134 cached_nonzero_bits (XEXP (x, 1), mode,
4135 known_x, known_mode, known_ret);
4136 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
4137 int width0 = floor_log2 (nz0) + 1;
4138 int width1 = floor_log2 (nz1) + 1;
4139 int low0 = floor_log2 (nz0 & -nz0);
4140 int low1 = floor_log2 (nz1 & -nz1);
4141 HOST_WIDE_INT op0_maybe_minusp
4142 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
4143 HOST_WIDE_INT op1_maybe_minusp
4144 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
4145 unsigned int result_width = mode_width;
4146 int result_low = 0;
4148 switch (code)
4150 case PLUS:
4151 result_width = MAX (width0, width1) + 1;
4152 result_low = MIN (low0, low1);
4153 break;
4154 case MINUS:
4155 result_low = MIN (low0, low1);
4156 break;
4157 case MULT:
4158 result_width = width0 + width1;
4159 result_low = low0 + low1;
4160 break;
4161 case DIV:
4162 if (width1 == 0)
4163 break;
4164 if (! op0_maybe_minusp && ! op1_maybe_minusp)
4165 result_width = width0;
4166 break;
4167 case UDIV:
4168 if (width1 == 0)
4169 break;
4170 result_width = width0;
4171 break;
4172 case MOD:
4173 if (width1 == 0)
4174 break;
4175 if (! op0_maybe_minusp && ! op1_maybe_minusp)
4176 result_width = MIN (width0, width1);
4177 result_low = MIN (low0, low1);
4178 break;
4179 case UMOD:
4180 if (width1 == 0)
4181 break;
4182 result_width = MIN (width0, width1);
4183 result_low = MIN (low0, low1);
4184 break;
4185 default:
4186 abort ();
4189 if (result_width < mode_width)
4190 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
4192 if (result_low > 0)
4193 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
4195 #ifdef POINTERS_EXTEND_UNSIGNED
4196 /* If pointers extend unsigned and this is an addition or subtraction
4197 to a pointer in Pmode, all the bits above ptr_mode are known to be
4198 zero. */
4199 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
4200 && (code == PLUS || code == MINUS)
4201 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4202 nonzero &= GET_MODE_MASK (ptr_mode);
4203 #endif
4205 break;
4207 case ZERO_EXTRACT:
4208 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4209 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4210 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4211 break;
4213 case SUBREG:
4214 /* If this is a SUBREG formed for a promoted variable that has
4215 been zero-extended, we know that at least the high-order bits
4216 are zero, though others might be too. */
4218 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4219 nonzero = GET_MODE_MASK (GET_MODE (x))
4220 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4221 known_x, known_mode, known_ret);
4223 /* If the inner mode is a single word for both the host and target
4224 machines, we can compute this from which bits of the inner
4225 object might be nonzero. */
4226 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
4227 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4228 <= HOST_BITS_PER_WIDE_INT))
4230 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4231 known_x, known_mode, known_ret);
4233 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4234 /* If this is a typical RISC machine, we only have to worry
4235 about the way loads are extended. */
4236 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4237 ? (((nonzero
4238 & (((unsigned HOST_WIDE_INT) 1
4239 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
4240 != 0))
4241 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
4242 || !MEM_P (SUBREG_REG (x)))
4243 #endif
4245 /* On many CISC machines, accessing an object in a wider mode
4246 causes the high-order bits to become undefined. So they are
4247 not known to be zero. */
4248 if (GET_MODE_SIZE (GET_MODE (x))
4249 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4250 nonzero |= (GET_MODE_MASK (GET_MODE (x))
4251 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
4254 break;
4256 case ASHIFTRT:
4257 case LSHIFTRT:
4258 case ASHIFT:
4259 case ROTATE:
4260 /* The nonzero bits are in two classes: any bits within MODE
4261 that aren't in GET_MODE (x) are always significant. The rest of the
4262 nonzero bits are those that are significant in the operand of
4263 the shift when shifted the appropriate number of bits. This
4264 shows that high-order bits are cleared by the right shift and
4265 low-order bits by left shifts. */
4266 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4267 && INTVAL (XEXP (x, 1)) >= 0
4268 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4270 enum machine_mode inner_mode = GET_MODE (x);
4271 unsigned int width = GET_MODE_BITSIZE (inner_mode);
4272 int count = INTVAL (XEXP (x, 1));
4273 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4274 unsigned HOST_WIDE_INT op_nonzero =
4275 cached_nonzero_bits (XEXP (x, 0), mode,
4276 known_x, known_mode, known_ret);
4277 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4278 unsigned HOST_WIDE_INT outer = 0;
4280 if (mode_width > width)
4281 outer = (op_nonzero & nonzero & ~mode_mask);
4283 if (code == LSHIFTRT)
4284 inner >>= count;
4285 else if (code == ASHIFTRT)
4287 inner >>= count;
4289 /* If the sign bit may have been nonzero before the shift, we
4290 need to mark all the places it could have been copied to
4291 by the shift as possibly nonzero. */
4292 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4293 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4295 else if (code == ASHIFT)
4296 inner <<= count;
4297 else
4298 inner = ((inner << (count % width)
4299 | (inner >> (width - (count % width)))) & mode_mask);
4301 nonzero &= (outer | inner);
4303 break;
4305 case FFS:
4306 case POPCOUNT:
4307 /* This is at most the number of bits in the mode. */
4308 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4309 break;
4311 case CLZ:
4312 /* If CLZ has a known value at zero, then the nonzero bits are
4313 that value, plus the number of bits in the mode minus one. */
4314 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4315 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4316 else
4317 nonzero = -1;
4318 break;
4320 case CTZ:
4321 /* If CTZ has a known value at zero, then the nonzero bits are
4322 that value, plus the number of bits in the mode minus one. */
4323 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4324 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4325 else
4326 nonzero = -1;
4327 break;
4329 case PARITY:
4330 nonzero = 1;
4331 break;
4333 case IF_THEN_ELSE:
4335 unsigned HOST_WIDE_INT nonzero_true =
4336 cached_nonzero_bits (XEXP (x, 1), mode,
4337 known_x, known_mode, known_ret);
4339 /* Don't call nonzero_bits for the second time if it cannot change
4340 anything. */
4341 if ((nonzero & nonzero_true) != nonzero)
4342 nonzero &= nonzero_true
4343 | cached_nonzero_bits (XEXP (x, 2), mode,
4344 known_x, known_mode, known_ret);
4346 break;
4348 default:
4349 break;
4352 return nonzero;
4355 /* See the macro definition above. */
4356 #undef cached_num_sign_bit_copies
4359 /* The function cached_num_sign_bit_copies is a wrapper around
4360 num_sign_bit_copies1. It avoids exponential behavior in
4361 num_sign_bit_copies1 when X has identical subexpressions on the
4362 first or the second level. */
4364 static unsigned int
4365 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
4366 enum machine_mode known_mode,
4367 unsigned int known_ret)
4369 if (x == known_x && mode == known_mode)
4370 return known_ret;
4372 /* Try to find identical subexpressions. If found call
4373 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4374 the precomputed value for the subexpression as KNOWN_RET. */
4376 if (ARITHMETIC_P (x))
4378 rtx x0 = XEXP (x, 0);
4379 rtx x1 = XEXP (x, 1);
4381 /* Check the first level. */
4382 if (x0 == x1)
4383 return
4384 num_sign_bit_copies1 (x, mode, x0, mode,
4385 cached_num_sign_bit_copies (x0, mode, known_x,
4386 known_mode,
4387 known_ret));
4389 /* Check the second level. */
4390 if (ARITHMETIC_P (x0)
4391 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4392 return
4393 num_sign_bit_copies1 (x, mode, x1, mode,
4394 cached_num_sign_bit_copies (x1, mode, known_x,
4395 known_mode,
4396 known_ret));
4398 if (ARITHMETIC_P (x1)
4399 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4400 return
4401 num_sign_bit_copies1 (x, mode, x0, mode,
4402 cached_num_sign_bit_copies (x0, mode, known_x,
4403 known_mode,
4404 known_ret));
4407 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4410 /* Return the number of bits at the high-order end of X that are known to
4411 be equal to the sign bit. X will be used in mode MODE; if MODE is
4412 VOIDmode, X will be used in its own mode. The returned value will always
4413 be between 1 and the number of bits in MODE. */
4415 static unsigned int
4416 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
4417 enum machine_mode known_mode,
4418 unsigned int known_ret)
4420 enum rtx_code code = GET_CODE (x);
4421 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4422 int num0, num1, result;
4423 unsigned HOST_WIDE_INT nonzero;
4425 /* If we weren't given a mode, use the mode of X. If the mode is still
4426 VOIDmode, we don't know anything. Likewise if one of the modes is
4427 floating-point. */
4429 if (mode == VOIDmode)
4430 mode = GET_MODE (x);
4432 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4433 return 1;
4435 /* For a smaller object, just ignore the high bits. */
4436 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4438 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4439 known_x, known_mode, known_ret);
4440 return MAX (1,
4441 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4444 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4446 #ifndef WORD_REGISTER_OPERATIONS
4447 /* If this machine does not do all register operations on the entire
4448 register and MODE is wider than the mode of X, we can say nothing
4449 at all about the high-order bits. */
4450 return 1;
4451 #else
4452 /* Likewise on machines that do, if the mode of the object is smaller
4453 than a word and loads of that size don't sign extend, we can say
4454 nothing about the high order bits. */
4455 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4456 #ifdef LOAD_EXTEND_OP
4457 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4458 #endif
4460 return 1;
4461 #endif
4464 switch (code)
4466 case REG:
4468 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4469 /* If pointers extend signed and this is a pointer in Pmode, say that
4470 all the bits above ptr_mode are known to be sign bit copies. */
4471 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4472 && REG_POINTER (x))
4473 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4474 #endif
4477 unsigned int copies_for_hook = 1, copies = 1;
4478 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4479 known_mode, known_ret,
4480 &copies_for_hook);
4482 if (new)
4483 copies = cached_num_sign_bit_copies (new, mode, known_x,
4484 known_mode, known_ret);
4486 if (copies > 1 || copies_for_hook > 1)
4487 return MAX (copies, copies_for_hook);
4489 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4491 break;
4493 case MEM:
4494 #ifdef LOAD_EXTEND_OP
4495 /* Some RISC machines sign-extend all loads of smaller than a word. */
4496 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4497 return MAX (1, ((int) bitwidth
4498 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4499 #endif
4500 break;
4502 case CONST_INT:
4503 /* If the constant is negative, take its 1's complement and remask.
4504 Then see how many zero bits we have. */
4505 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4506 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4507 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4508 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4510 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4512 case SUBREG:
4513 /* If this is a SUBREG for a promoted object that is sign-extended
4514 and we are looking at it in a wider mode, we know that at least the
4515 high-order bits are known to be sign bit copies. */
4517 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4519 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4520 known_x, known_mode, known_ret);
4521 return MAX ((int) bitwidth
4522 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4523 num0);
4526 /* For a smaller object, just ignore the high bits. */
4527 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4529 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4530 known_x, known_mode, known_ret);
4531 return MAX (1, (num0
4532 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4533 - bitwidth)));
4536 #ifdef WORD_REGISTER_OPERATIONS
4537 #ifdef LOAD_EXTEND_OP
4538 /* For paradoxical SUBREGs on machines where all register operations
4539 affect the entire register, just look inside. Note that we are
4540 passing MODE to the recursive call, so the number of sign bit copies
4541 will remain relative to that mode, not the inner mode. */
4543 /* This works only if loads sign extend. Otherwise, if we get a
4544 reload for the inner part, it may be loaded from the stack, and
4545 then we lose all sign bit copies that existed before the store
4546 to the stack. */
4548 if ((GET_MODE_SIZE (GET_MODE (x))
4549 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4550 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4551 && MEM_P (SUBREG_REG (x)))
4552 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4553 known_x, known_mode, known_ret);
4554 #endif
4555 #endif
4556 break;
4558 case SIGN_EXTRACT:
4559 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4560 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4561 break;
4563 case SIGN_EXTEND:
4564 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4565 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4566 known_x, known_mode, known_ret));
4568 case TRUNCATE:
4569 /* For a smaller object, just ignore the high bits. */
4570 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4571 known_x, known_mode, known_ret);
4572 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4573 - bitwidth)));
4575 case NOT:
4576 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4577 known_x, known_mode, known_ret);
4579 case ROTATE: case ROTATERT:
4580 /* If we are rotating left by a number of bits less than the number
4581 of sign bit copies, we can just subtract that amount from the
4582 number. */
4583 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4584 && INTVAL (XEXP (x, 1)) >= 0
4585 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4587 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4588 known_x, known_mode, known_ret);
4589 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4590 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4592 break;
4594 case NEG:
4595 /* In general, this subtracts one sign bit copy. But if the value
4596 is known to be positive, the number of sign bit copies is the
4597 same as that of the input. Finally, if the input has just one bit
4598 that might be nonzero, all the bits are copies of the sign bit. */
4599 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4600 known_x, known_mode, known_ret);
4601 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4602 return num0 > 1 ? num0 - 1 : 1;
4604 nonzero = nonzero_bits (XEXP (x, 0), mode);
4605 if (nonzero == 1)
4606 return bitwidth;
4608 if (num0 > 1
4609 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4610 num0--;
4612 return num0;
4614 case IOR: case AND: case XOR:
4615 case SMIN: case SMAX: case UMIN: case UMAX:
4616 /* Logical operations will preserve the number of sign-bit copies.
4617 MIN and MAX operations always return one of the operands. */
4618 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4619 known_x, known_mode, known_ret);
4620 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4621 known_x, known_mode, known_ret);
4622 return MIN (num0, num1);
4624 case PLUS: case MINUS:
4625 /* For addition and subtraction, we can have a 1-bit carry. However,
4626 if we are subtracting 1 from a positive number, there will not
4627 be such a carry. Furthermore, if the positive number is known to
4628 be 0 or 1, we know the result is either -1 or 0. */
4630 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4631 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4633 nonzero = nonzero_bits (XEXP (x, 0), mode);
4634 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4635 return (nonzero == 1 || nonzero == 0 ? bitwidth
4636 : bitwidth - floor_log2 (nonzero) - 1);
4639 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4640 known_x, known_mode, known_ret);
4641 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4642 known_x, known_mode, known_ret);
4643 result = MAX (1, MIN (num0, num1) - 1);
4645 #ifdef POINTERS_EXTEND_UNSIGNED
4646 /* If pointers extend signed and this is an addition or subtraction
4647 to a pointer in Pmode, all the bits above ptr_mode are known to be
4648 sign bit copies. */
4649 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4650 && (code == PLUS || code == MINUS)
4651 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4652 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4653 - GET_MODE_BITSIZE (ptr_mode) + 1),
4654 result);
4655 #endif
4656 return result;
4658 case MULT:
4659 /* The number of bits of the product is the sum of the number of
4660 bits of both terms. However, unless one of the terms if known
4661 to be positive, we must allow for an additional bit since negating
4662 a negative number can remove one sign bit copy. */
4664 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4665 known_x, known_mode, known_ret);
4666 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4667 known_x, known_mode, known_ret);
4669 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4670 if (result > 0
4671 && (bitwidth > HOST_BITS_PER_WIDE_INT
4672 || (((nonzero_bits (XEXP (x, 0), mode)
4673 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4674 && ((nonzero_bits (XEXP (x, 1), mode)
4675 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4676 result--;
4678 return MAX (1, result);
4680 case UDIV:
4681 /* The result must be <= the first operand. If the first operand
4682 has the high bit set, we know nothing about the number of sign
4683 bit copies. */
4684 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4685 return 1;
4686 else if ((nonzero_bits (XEXP (x, 0), mode)
4687 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4688 return 1;
4689 else
4690 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4691 known_x, known_mode, known_ret);
4693 case UMOD:
4694 /* The result must be <= the second operand. */
4695 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4696 known_x, known_mode, known_ret);
4698 case DIV:
4699 /* Similar to unsigned division, except that we have to worry about
4700 the case where the divisor is negative, in which case we have
4701 to add 1. */
4702 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4703 known_x, known_mode, known_ret);
4704 if (result > 1
4705 && (bitwidth > HOST_BITS_PER_WIDE_INT
4706 || (nonzero_bits (XEXP (x, 1), mode)
4707 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4708 result--;
4710 return result;
4712 case MOD:
4713 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4714 known_x, known_mode, known_ret);
4715 if (result > 1
4716 && (bitwidth > HOST_BITS_PER_WIDE_INT
4717 || (nonzero_bits (XEXP (x, 1), mode)
4718 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4719 result--;
4721 return result;
4723 case ASHIFTRT:
4724 /* Shifts by a constant add to the number of bits equal to the
4725 sign bit. */
4726 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4727 known_x, known_mode, known_ret);
4728 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4729 && INTVAL (XEXP (x, 1)) > 0)
4730 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4732 return num0;
4734 case ASHIFT:
4735 /* Left shifts destroy copies. */
4736 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4737 || INTVAL (XEXP (x, 1)) < 0
4738 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4739 return 1;
4741 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4742 known_x, known_mode, known_ret);
4743 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4745 case IF_THEN_ELSE:
4746 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4747 known_x, known_mode, known_ret);
4748 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4749 known_x, known_mode, known_ret);
4750 return MIN (num0, num1);
4752 case EQ: case NE: case GE: case GT: case LE: case LT:
4753 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4754 case GEU: case GTU: case LEU: case LTU:
4755 case UNORDERED: case ORDERED:
4756 /* If the constant is negative, take its 1's complement and remask.
4757 Then see how many zero bits we have. */
4758 nonzero = STORE_FLAG_VALUE;
4759 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4760 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4761 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4763 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4765 default:
4766 break;
4769 /* If we haven't been able to figure it out by one of the above rules,
4770 see if some of the high-order bits are known to be zero. If so,
4771 count those bits and return one less than that amount. If we can't
4772 safely compute the mask for this mode, always return BITWIDTH. */
4774 bitwidth = GET_MODE_BITSIZE (mode);
4775 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4776 return 1;
4778 nonzero = nonzero_bits (x, mode);
4779 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4780 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4783 /* Calculate the rtx_cost of a single instruction. A return value of
4784 zero indicates an instruction pattern without a known cost. */
4787 insn_rtx_cost (rtx pat)
4789 int i, cost;
4790 rtx set;
4792 /* Extract the single set rtx from the instruction pattern.
4793 We can't use single_set since we only have the pattern. */
4794 if (GET_CODE (pat) == SET)
4795 set = pat;
4796 else if (GET_CODE (pat) == PARALLEL)
4798 set = NULL_RTX;
4799 for (i = 0; i < XVECLEN (pat, 0); i++)
4801 rtx x = XVECEXP (pat, 0, i);
4802 if (GET_CODE (x) == SET)
4804 if (set)
4805 return 0;
4806 set = x;
4809 if (!set)
4810 return 0;
4812 else
4813 return 0;
4815 cost = rtx_cost (SET_SRC (set), SET);
4816 return cost > 0 ? cost : COSTS_N_INSNS (1);