* config/i860/i860-protos.h (i860_va_arg): Remove.
[official-gcc.git] / gcc / rtlanal.c
blob0eaf1c0df2e775d48c660ac465c0c4a8aba78099
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "basic-block.h"
37 #include "real.h"
38 #include "regs.h"
39 #include "function.h"
41 /* Forward declarations */
42 static int global_reg_mentioned_p_1 (rtx *, void *);
43 static void set_of_1 (rtx, rtx, void *);
44 static void insn_dependent_p_1 (rtx, rtx, void *);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
48 static bool hoist_test_store (rtx, rtx, regset);
49 static void hoist_update_store (rtx, rtx *, rtx, rtx);
51 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
52 rtx, enum machine_mode,
53 unsigned HOST_WIDE_INT);
54 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
55 enum machine_mode,
56 unsigned HOST_WIDE_INT);
57 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
58 enum machine_mode,
59 unsigned int);
60 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
61 enum machine_mode, unsigned int);
63 /* Bit flags that specify the machine subtype we are compiling for.
64 Bits are tested using macros TARGET_... defined in the tm.h file
65 and set by `-m...' switches. Must be defined in rtlanal.c. */
67 int target_flags;
69 /* Return 1 if the value of X is unstable
70 (would be different at a different point in the program).
71 The frame pointer, arg pointer, etc. are considered stable
72 (within one function) and so is anything marked `unchanging'. */
74 int
75 rtx_unstable_p (rtx x)
77 RTX_CODE code = GET_CODE (x);
78 int i;
79 const char *fmt;
81 switch (code)
83 case MEM:
84 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
86 case QUEUED:
87 return 1;
89 case CONST:
90 case CONST_INT:
91 case CONST_DOUBLE:
92 case CONST_VECTOR:
93 case SYMBOL_REF:
94 case LABEL_REF:
95 return 0;
97 case REG:
98 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
99 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
100 /* The arg pointer varies if it is not a fixed register. */
101 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
102 || RTX_UNCHANGING_P (x))
103 return 0;
104 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
105 /* ??? When call-clobbered, the value is stable modulo the restore
106 that must happen after a call. This currently screws up local-alloc
107 into believing that the restore is not needed. */
108 if (x == pic_offset_table_rtx)
109 return 0;
110 #endif
111 return 1;
113 case ASM_OPERANDS:
114 if (MEM_VOLATILE_P (x))
115 return 1;
117 /* Fall through. */
119 default:
120 break;
123 fmt = GET_RTX_FORMAT (code);
124 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
125 if (fmt[i] == 'e')
127 if (rtx_unstable_p (XEXP (x, i)))
128 return 1;
130 else if (fmt[i] == 'E')
132 int j;
133 for (j = 0; j < XVECLEN (x, i); j++)
134 if (rtx_unstable_p (XVECEXP (x, i, j)))
135 return 1;
138 return 0;
141 /* Return 1 if X has a value that can vary even between two
142 executions of the program. 0 means X can be compared reliably
143 against certain constants or near-constants.
144 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
145 zero, we are slightly more conservative.
146 The frame pointer and the arg pointer are considered constant. */
149 rtx_varies_p (rtx x, int for_alias)
151 RTX_CODE code;
152 int i;
153 const char *fmt;
155 if (!x)
156 return 0;
158 code = GET_CODE (x);
159 switch (code)
161 case MEM:
162 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
164 case QUEUED:
165 return 1;
167 case CONST:
168 case CONST_INT:
169 case CONST_DOUBLE:
170 case CONST_VECTOR:
171 case SYMBOL_REF:
172 case LABEL_REF:
173 return 0;
175 case REG:
176 /* Note that we have to test for the actual rtx used for the frame
177 and arg pointers and not just the register number in case we have
178 eliminated the frame and/or arg pointer and are using it
179 for pseudos. */
180 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
181 /* The arg pointer varies if it is not a fixed register. */
182 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
183 return 0;
184 if (x == pic_offset_table_rtx
185 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
186 /* ??? When call-clobbered, the value is stable modulo the restore
187 that must happen after a call. This currently screws up
188 local-alloc into believing that the restore is not needed, so we
189 must return 0 only if we are called from alias analysis. */
190 && for_alias
191 #endif
193 return 0;
194 return 1;
196 case LO_SUM:
197 /* The operand 0 of a LO_SUM is considered constant
198 (in fact it is related specifically to operand 1)
199 during alias analysis. */
200 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
201 || rtx_varies_p (XEXP (x, 1), for_alias);
203 case ASM_OPERANDS:
204 if (MEM_VOLATILE_P (x))
205 return 1;
207 /* Fall through. */
209 default:
210 break;
213 fmt = GET_RTX_FORMAT (code);
214 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
215 if (fmt[i] == 'e')
217 if (rtx_varies_p (XEXP (x, i), for_alias))
218 return 1;
220 else if (fmt[i] == 'E')
222 int j;
223 for (j = 0; j < XVECLEN (x, i); j++)
224 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
225 return 1;
228 return 0;
231 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
234 rtx_addr_can_trap_p (rtx x)
236 enum rtx_code code = GET_CODE (x);
238 switch (code)
240 case SYMBOL_REF:
241 return SYMBOL_REF_WEAK (x);
243 case LABEL_REF:
244 return 0;
246 case REG:
247 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
248 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
249 || x == stack_pointer_rtx
250 /* The arg pointer varies if it is not a fixed register. */
251 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
252 return 0;
253 /* All of the virtual frame registers are stack references. */
254 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
255 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
256 return 0;
257 return 1;
259 case CONST:
260 return rtx_addr_can_trap_p (XEXP (x, 0));
262 case PLUS:
263 /* An address is assumed not to trap if it is an address that can't
264 trap plus a constant integer or it is the pic register plus a
265 constant. */
266 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
267 && GET_CODE (XEXP (x, 1)) == CONST_INT)
268 || (XEXP (x, 0) == pic_offset_table_rtx
269 && CONSTANT_P (XEXP (x, 1))));
271 case LO_SUM:
272 case PRE_MODIFY:
273 return rtx_addr_can_trap_p (XEXP (x, 1));
275 case PRE_DEC:
276 case PRE_INC:
277 case POST_DEC:
278 case POST_INC:
279 case POST_MODIFY:
280 return rtx_addr_can_trap_p (XEXP (x, 0));
282 default:
283 break;
286 /* If it isn't one of the case above, it can cause a trap. */
287 return 1;
290 /* Return true if X is an address that is known to not be zero. */
292 bool
293 nonzero_address_p (rtx x)
295 enum rtx_code code = GET_CODE (x);
297 switch (code)
299 case SYMBOL_REF:
300 return !SYMBOL_REF_WEAK (x);
302 case LABEL_REF:
303 return true;
305 case REG:
306 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
307 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
308 || x == stack_pointer_rtx
309 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
310 return true;
311 /* All of the virtual frame registers are stack references. */
312 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
313 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
314 return true;
315 return false;
317 case CONST:
318 return nonzero_address_p (XEXP (x, 0));
320 case PLUS:
321 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
323 /* Pointers aren't allowed to wrap. If we've got a register
324 that is known to be a pointer, and a positive offset, then
325 the composite can't be zero. */
326 if (INTVAL (XEXP (x, 1)) > 0
327 && REG_P (XEXP (x, 0))
328 && REG_POINTER (XEXP (x, 0)))
329 return true;
331 return nonzero_address_p (XEXP (x, 0));
333 /* Handle PIC references. */
334 else if (XEXP (x, 0) == pic_offset_table_rtx
335 && CONSTANT_P (XEXP (x, 1)))
336 return true;
337 return false;
339 case PRE_MODIFY:
340 /* Similar to the above; allow positive offsets. Further, since
341 auto-inc is only allowed in memories, the register must be a
342 pointer. */
343 if (GET_CODE (XEXP (x, 1)) == CONST_INT
344 && INTVAL (XEXP (x, 1)) > 0)
345 return true;
346 return nonzero_address_p (XEXP (x, 0));
348 case PRE_INC:
349 /* Similarly. Further, the offset is always positive. */
350 return true;
352 case PRE_DEC:
353 case POST_DEC:
354 case POST_INC:
355 case POST_MODIFY:
356 return nonzero_address_p (XEXP (x, 0));
358 case LO_SUM:
359 return nonzero_address_p (XEXP (x, 1));
361 default:
362 break;
365 /* If it isn't one of the case above, might be zero. */
366 return false;
369 /* Return 1 if X refers to a memory location whose address
370 cannot be compared reliably with constant addresses,
371 or if X refers to a BLKmode memory object.
372 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
373 zero, we are slightly more conservative. */
376 rtx_addr_varies_p (rtx x, int for_alias)
378 enum rtx_code code;
379 int i;
380 const char *fmt;
382 if (x == 0)
383 return 0;
385 code = GET_CODE (x);
386 if (code == MEM)
387 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
389 fmt = GET_RTX_FORMAT (code);
390 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
391 if (fmt[i] == 'e')
393 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
394 return 1;
396 else if (fmt[i] == 'E')
398 int j;
399 for (j = 0; j < XVECLEN (x, i); j++)
400 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
401 return 1;
403 return 0;
406 /* Return the value of the integer term in X, if one is apparent;
407 otherwise return 0.
408 Only obvious integer terms are detected.
409 This is used in cse.c with the `related_value' field. */
411 HOST_WIDE_INT
412 get_integer_term (rtx x)
414 if (GET_CODE (x) == CONST)
415 x = XEXP (x, 0);
417 if (GET_CODE (x) == MINUS
418 && GET_CODE (XEXP (x, 1)) == CONST_INT)
419 return - INTVAL (XEXP (x, 1));
420 if (GET_CODE (x) == PLUS
421 && GET_CODE (XEXP (x, 1)) == CONST_INT)
422 return INTVAL (XEXP (x, 1));
423 return 0;
426 /* If X is a constant, return the value sans apparent integer term;
427 otherwise return 0.
428 Only obvious integer terms are detected. */
431 get_related_value (rtx x)
433 if (GET_CODE (x) != CONST)
434 return 0;
435 x = XEXP (x, 0);
436 if (GET_CODE (x) == PLUS
437 && GET_CODE (XEXP (x, 1)) == CONST_INT)
438 return XEXP (x, 0);
439 else if (GET_CODE (x) == MINUS
440 && GET_CODE (XEXP (x, 1)) == CONST_INT)
441 return XEXP (x, 0);
442 return 0;
445 /* Given a tablejump insn INSN, return the RTL expression for the offset
446 into the jump table. If the offset cannot be determined, then return
447 NULL_RTX.
449 If EARLIEST is nonzero, it is a pointer to a place where the earliest
450 insn used in locating the offset was found. */
453 get_jump_table_offset (rtx insn, rtx *earliest)
455 rtx label = NULL;
456 rtx table = NULL;
457 rtx set;
458 rtx old_insn;
459 rtx x;
460 rtx old_x;
461 rtx y;
462 rtx old_y;
463 int i;
465 if (!tablejump_p (insn, &label, &table) || !(set = single_set (insn)))
466 return NULL_RTX;
468 x = SET_SRC (set);
470 /* Some targets (eg, ARM) emit a tablejump that also
471 contains the out-of-range target. */
472 if (GET_CODE (x) == IF_THEN_ELSE
473 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
474 x = XEXP (x, 1);
476 /* Search backwards and locate the expression stored in X. */
477 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
478 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
481 /* If X is an expression using a relative address then strip
482 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
483 or the jump table label. */
484 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
485 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
487 for (i = 0; i < 2; i++)
489 old_insn = insn;
490 y = XEXP (x, i);
492 if (y == pc_rtx || y == pic_offset_table_rtx)
493 break;
495 for (old_y = NULL_RTX; REG_P (y) && y != old_y;
496 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
499 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
500 break;
503 if (i >= 2)
504 return NULL_RTX;
506 x = XEXP (x, 1 - i);
508 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
509 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
513 /* Strip off any sign or zero extension. */
514 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
516 x = XEXP (x, 0);
518 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
519 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
523 /* If X isn't a MEM then this isn't a tablejump we understand. */
524 if (!MEM_P (x))
525 return NULL_RTX;
527 /* Strip off the MEM. */
528 x = XEXP (x, 0);
530 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
531 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
534 /* If X isn't a PLUS than this isn't a tablejump we understand. */
535 if (GET_CODE (x) != PLUS)
536 return NULL_RTX;
538 /* At this point we should have an expression representing the jump table
539 plus an offset. Examine each operand in order to determine which one
540 represents the jump table. Knowing that tells us that the other operand
541 must represent the offset. */
542 for (i = 0; i < 2; i++)
544 old_insn = insn;
545 y = XEXP (x, i);
547 for (old_y = NULL_RTX; REG_P (y) && y != old_y;
548 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
551 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
552 && reg_mentioned_p (label, y))
553 break;
556 if (i >= 2)
557 return NULL_RTX;
559 x = XEXP (x, 1 - i);
561 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
562 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
563 for (i = 0; i < 2; i++)
564 if (XEXP (x, i) == pic_offset_table_rtx)
566 x = XEXP (x, 1 - i);
567 break;
570 if (earliest)
571 *earliest = insn;
573 /* Return the RTL expression representing the offset. */
574 return x;
577 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
578 a global register. */
580 static int
581 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
583 int regno;
584 rtx x = *loc;
586 if (! x)
587 return 0;
589 switch (GET_CODE (x))
591 case SUBREG:
592 if (REG_P (SUBREG_REG (x)))
594 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
595 && global_regs[subreg_regno (x)])
596 return 1;
597 return 0;
599 break;
601 case REG:
602 regno = REGNO (x);
603 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
604 return 1;
605 return 0;
607 case SCRATCH:
608 case PC:
609 case CC0:
610 case CONST_INT:
611 case CONST_DOUBLE:
612 case CONST:
613 case LABEL_REF:
614 return 0;
616 case CALL:
617 /* A non-constant call might use a global register. */
618 return 1;
620 default:
621 break;
624 return 0;
627 /* Returns nonzero if X mentions a global register. */
630 global_reg_mentioned_p (rtx x)
632 if (INSN_P (x))
634 if (GET_CODE (x) == CALL_INSN)
636 if (! CONST_OR_PURE_CALL_P (x))
637 return 1;
638 x = CALL_INSN_FUNCTION_USAGE (x);
639 if (x == 0)
640 return 0;
642 else
643 x = PATTERN (x);
646 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
649 /* Return the number of places FIND appears within X. If COUNT_DEST is
650 zero, we do not count occurrences inside the destination of a SET. */
653 count_occurrences (rtx x, rtx find, int count_dest)
655 int i, j;
656 enum rtx_code code;
657 const char *format_ptr;
658 int count;
660 if (x == find)
661 return 1;
663 code = GET_CODE (x);
665 switch (code)
667 case REG:
668 case CONST_INT:
669 case CONST_DOUBLE:
670 case CONST_VECTOR:
671 case SYMBOL_REF:
672 case CODE_LABEL:
673 case PC:
674 case CC0:
675 return 0;
677 case MEM:
678 if (MEM_P (find) && rtx_equal_p (x, find))
679 return 1;
680 break;
682 case SET:
683 if (SET_DEST (x) == find && ! count_dest)
684 return count_occurrences (SET_SRC (x), find, count_dest);
685 break;
687 default:
688 break;
691 format_ptr = GET_RTX_FORMAT (code);
692 count = 0;
694 for (i = 0; i < GET_RTX_LENGTH (code); i++)
696 switch (*format_ptr++)
698 case 'e':
699 count += count_occurrences (XEXP (x, i), find, count_dest);
700 break;
702 case 'E':
703 for (j = 0; j < XVECLEN (x, i); j++)
704 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
705 break;
708 return count;
711 /* Nonzero if register REG appears somewhere within IN.
712 Also works if REG is not a register; in this case it checks
713 for a subexpression of IN that is Lisp "equal" to REG. */
716 reg_mentioned_p (rtx reg, rtx in)
718 const char *fmt;
719 int i;
720 enum rtx_code code;
722 if (in == 0)
723 return 0;
725 if (reg == in)
726 return 1;
728 if (GET_CODE (in) == LABEL_REF)
729 return reg == XEXP (in, 0);
731 code = GET_CODE (in);
733 switch (code)
735 /* Compare registers by number. */
736 case REG:
737 return REG_P (reg) && REGNO (in) == REGNO (reg);
739 /* These codes have no constituent expressions
740 and are unique. */
741 case SCRATCH:
742 case CC0:
743 case PC:
744 return 0;
746 case CONST_INT:
747 case CONST_VECTOR:
748 case CONST_DOUBLE:
749 /* These are kept unique for a given value. */
750 return 0;
752 default:
753 break;
756 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
757 return 1;
759 fmt = GET_RTX_FORMAT (code);
761 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
763 if (fmt[i] == 'E')
765 int j;
766 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
767 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
768 return 1;
770 else if (fmt[i] == 'e'
771 && reg_mentioned_p (reg, XEXP (in, i)))
772 return 1;
774 return 0;
777 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
778 no CODE_LABEL insn. */
781 no_labels_between_p (rtx beg, rtx end)
783 rtx p;
784 if (beg == end)
785 return 0;
786 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
787 if (GET_CODE (p) == CODE_LABEL)
788 return 0;
789 return 1;
792 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
793 no JUMP_INSN insn. */
796 no_jumps_between_p (rtx beg, rtx end)
798 rtx p;
799 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
800 if (GET_CODE (p) == JUMP_INSN)
801 return 0;
802 return 1;
805 /* Nonzero if register REG is used in an insn between
806 FROM_INSN and TO_INSN (exclusive of those two). */
809 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
811 rtx insn;
813 if (from_insn == to_insn)
814 return 0;
816 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
817 if (INSN_P (insn)
818 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
819 || (GET_CODE (insn) == CALL_INSN
820 && (find_reg_fusage (insn, USE, reg)
821 || find_reg_fusage (insn, CLOBBER, reg)))))
822 return 1;
823 return 0;
826 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
827 is entirely replaced by a new value and the only use is as a SET_DEST,
828 we do not consider it a reference. */
831 reg_referenced_p (rtx x, rtx body)
833 int i;
835 switch (GET_CODE (body))
837 case SET:
838 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
839 return 1;
841 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
842 of a REG that occupies all of the REG, the insn references X if
843 it is mentioned in the destination. */
844 if (GET_CODE (SET_DEST (body)) != CC0
845 && GET_CODE (SET_DEST (body)) != PC
846 && !REG_P (SET_DEST (body))
847 && ! (GET_CODE (SET_DEST (body)) == SUBREG
848 && REG_P (SUBREG_REG (SET_DEST (body)))
849 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
850 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
851 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
852 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
853 && reg_overlap_mentioned_p (x, SET_DEST (body)))
854 return 1;
855 return 0;
857 case ASM_OPERANDS:
858 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
859 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
860 return 1;
861 return 0;
863 case CALL:
864 case USE:
865 case IF_THEN_ELSE:
866 return reg_overlap_mentioned_p (x, body);
868 case TRAP_IF:
869 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
871 case PREFETCH:
872 return reg_overlap_mentioned_p (x, XEXP (body, 0));
874 case UNSPEC:
875 case UNSPEC_VOLATILE:
876 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
877 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
878 return 1;
879 return 0;
881 case PARALLEL:
882 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
883 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
884 return 1;
885 return 0;
887 case CLOBBER:
888 if (MEM_P (XEXP (body, 0)))
889 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
890 return 1;
891 return 0;
893 case COND_EXEC:
894 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
895 return 1;
896 return reg_referenced_p (x, COND_EXEC_CODE (body));
898 default:
899 return 0;
903 /* Nonzero if register REG is referenced in an insn between
904 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
905 not count. */
908 reg_referenced_between_p (rtx reg, rtx from_insn, rtx to_insn)
910 rtx insn;
912 if (from_insn == to_insn)
913 return 0;
915 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
916 if (INSN_P (insn)
917 && (reg_referenced_p (reg, PATTERN (insn))
918 || (GET_CODE (insn) == CALL_INSN
919 && find_reg_fusage (insn, USE, reg))))
920 return 1;
921 return 0;
924 /* Nonzero if register REG is set or clobbered in an insn between
925 FROM_INSN and TO_INSN (exclusive of those two). */
928 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
930 rtx insn;
932 if (from_insn == to_insn)
933 return 0;
935 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
936 if (INSN_P (insn) && reg_set_p (reg, insn))
937 return 1;
938 return 0;
941 /* Internals of reg_set_between_p. */
943 reg_set_p (rtx reg, rtx insn)
945 /* We can be passed an insn or part of one. If we are passed an insn,
946 check if a side-effect of the insn clobbers REG. */
947 if (INSN_P (insn)
948 && (FIND_REG_INC_NOTE (insn, reg)
949 || (GET_CODE (insn) == CALL_INSN
950 /* We'd like to test call_used_regs here, but rtlanal.c can't
951 reference that variable due to its use in genattrtab. So
952 we'll just be more conservative.
954 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
955 information holds all clobbered registers. */
956 && ((REG_P (reg)
957 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
958 || MEM_P (reg)
959 || find_reg_fusage (insn, CLOBBER, reg)))))
960 return 1;
962 return set_of (reg, insn) != NULL_RTX;
965 /* Similar to reg_set_between_p, but check all registers in X. Return 0
966 only if none of them are modified between START and END. Do not
967 consider non-registers one way or the other. */
970 regs_set_between_p (rtx x, rtx start, rtx end)
972 enum rtx_code code = GET_CODE (x);
973 const char *fmt;
974 int i, j;
976 switch (code)
978 case CONST_INT:
979 case CONST_DOUBLE:
980 case CONST_VECTOR:
981 case CONST:
982 case SYMBOL_REF:
983 case LABEL_REF:
984 case PC:
985 case CC0:
986 return 0;
988 case REG:
989 return reg_set_between_p (x, start, end);
991 default:
992 break;
995 fmt = GET_RTX_FORMAT (code);
996 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
998 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
999 return 1;
1001 else if (fmt[i] == 'E')
1002 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1003 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
1004 return 1;
1007 return 0;
1010 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1011 only if none of them are modified between START and END. Return 1 if
1012 X contains a MEM; this routine does usememory aliasing. */
1015 modified_between_p (rtx x, rtx start, rtx end)
1017 enum rtx_code code = GET_CODE (x);
1018 const char *fmt;
1019 int i, j;
1020 rtx insn;
1022 if (start == end)
1023 return 0;
1025 switch (code)
1027 case CONST_INT:
1028 case CONST_DOUBLE:
1029 case CONST_VECTOR:
1030 case CONST:
1031 case SYMBOL_REF:
1032 case LABEL_REF:
1033 return 0;
1035 case PC:
1036 case CC0:
1037 return 1;
1039 case MEM:
1040 if (RTX_UNCHANGING_P (x))
1041 return 0;
1042 if (modified_between_p (XEXP (x, 0), start, end))
1043 return 1;
1044 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1045 if (memory_modified_in_insn_p (x, insn))
1046 return 1;
1047 return 0;
1048 break;
1050 case REG:
1051 return reg_set_between_p (x, start, end);
1053 default:
1054 break;
1057 fmt = GET_RTX_FORMAT (code);
1058 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1060 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1061 return 1;
1063 else if (fmt[i] == 'E')
1064 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1065 if (modified_between_p (XVECEXP (x, i, j), start, end))
1066 return 1;
1069 return 0;
1072 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1073 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1074 does use memory aliasing. */
1077 modified_in_p (rtx x, rtx insn)
1079 enum rtx_code code = GET_CODE (x);
1080 const char *fmt;
1081 int i, j;
1083 switch (code)
1085 case CONST_INT:
1086 case CONST_DOUBLE:
1087 case CONST_VECTOR:
1088 case CONST:
1089 case SYMBOL_REF:
1090 case LABEL_REF:
1091 return 0;
1093 case PC:
1094 case CC0:
1095 return 1;
1097 case MEM:
1098 if (RTX_UNCHANGING_P (x))
1099 return 0;
1100 if (modified_in_p (XEXP (x, 0), insn))
1101 return 1;
1102 if (memory_modified_in_insn_p (x, insn))
1103 return 1;
1104 return 0;
1105 break;
1107 case REG:
1108 return reg_set_p (x, insn);
1110 default:
1111 break;
1114 fmt = GET_RTX_FORMAT (code);
1115 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1117 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1118 return 1;
1120 else if (fmt[i] == 'E')
1121 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1122 if (modified_in_p (XVECEXP (x, i, j), insn))
1123 return 1;
1126 return 0;
1129 /* Return true if anything in insn X is (anti,output,true) dependent on
1130 anything in insn Y. */
1133 insn_dependent_p (rtx x, rtx y)
1135 rtx tmp;
1137 if (! INSN_P (x) || ! INSN_P (y))
1138 abort ();
1140 tmp = PATTERN (y);
1141 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1142 if (tmp == NULL_RTX)
1143 return 1;
1145 tmp = PATTERN (x);
1146 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1147 if (tmp == NULL_RTX)
1148 return 1;
1150 return 0;
1153 /* A helper routine for insn_dependent_p called through note_stores. */
1155 static void
1156 insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
1158 rtx * pinsn = (rtx *) data;
1160 if (*pinsn && reg_mentioned_p (x, *pinsn))
1161 *pinsn = NULL_RTX;
1164 /* Helper function for set_of. */
1165 struct set_of_data
1167 rtx found;
1168 rtx pat;
1171 static void
1172 set_of_1 (rtx x, rtx pat, void *data1)
1174 struct set_of_data *data = (struct set_of_data *) (data1);
1175 if (rtx_equal_p (x, data->pat)
1176 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1177 data->found = pat;
1180 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1181 (either directly or via STRICT_LOW_PART and similar modifiers). */
1183 set_of (rtx pat, rtx insn)
1185 struct set_of_data data;
1186 data.found = NULL_RTX;
1187 data.pat = pat;
1188 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1189 return data.found;
1192 /* Given an INSN, return a SET expression if this insn has only a single SET.
1193 It may also have CLOBBERs, USEs, or SET whose output
1194 will not be used, which we ignore. */
1197 single_set_2 (rtx insn, rtx pat)
1199 rtx set = NULL;
1200 int set_verified = 1;
1201 int i;
1203 if (GET_CODE (pat) == PARALLEL)
1205 for (i = 0; i < XVECLEN (pat, 0); i++)
1207 rtx sub = XVECEXP (pat, 0, i);
1208 switch (GET_CODE (sub))
1210 case USE:
1211 case CLOBBER:
1212 break;
1214 case SET:
1215 /* We can consider insns having multiple sets, where all
1216 but one are dead as single set insns. In common case
1217 only single set is present in the pattern so we want
1218 to avoid checking for REG_UNUSED notes unless necessary.
1220 When we reach set first time, we just expect this is
1221 the single set we are looking for and only when more
1222 sets are found in the insn, we check them. */
1223 if (!set_verified)
1225 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1226 && !side_effects_p (set))
1227 set = NULL;
1228 else
1229 set_verified = 1;
1231 if (!set)
1232 set = sub, set_verified = 0;
1233 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1234 || side_effects_p (sub))
1235 return NULL_RTX;
1236 break;
1238 default:
1239 return NULL_RTX;
1243 return set;
1246 /* Given an INSN, return nonzero if it has more than one SET, else return
1247 zero. */
1250 multiple_sets (rtx insn)
1252 int found;
1253 int i;
1255 /* INSN must be an insn. */
1256 if (! INSN_P (insn))
1257 return 0;
1259 /* Only a PARALLEL can have multiple SETs. */
1260 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1262 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1263 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1265 /* If we have already found a SET, then return now. */
1266 if (found)
1267 return 1;
1268 else
1269 found = 1;
1273 /* Either zero or one SET. */
1274 return 0;
1277 /* Return nonzero if the destination of SET equals the source
1278 and there are no side effects. */
1281 set_noop_p (rtx set)
1283 rtx src = SET_SRC (set);
1284 rtx dst = SET_DEST (set);
1286 if (dst == pc_rtx && src == pc_rtx)
1287 return 1;
1289 if (MEM_P (dst) && MEM_P (src))
1290 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1292 if (GET_CODE (dst) == SIGN_EXTRACT
1293 || GET_CODE (dst) == ZERO_EXTRACT)
1294 return rtx_equal_p (XEXP (dst, 0), src)
1295 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1296 && !side_effects_p (src);
1298 if (GET_CODE (dst) == STRICT_LOW_PART)
1299 dst = XEXP (dst, 0);
1301 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1303 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1304 return 0;
1305 src = SUBREG_REG (src);
1306 dst = SUBREG_REG (dst);
1309 return (REG_P (src) && REG_P (dst)
1310 && REGNO (src) == REGNO (dst));
1313 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1314 value to itself. */
1317 noop_move_p (rtx insn)
1319 rtx pat = PATTERN (insn);
1321 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1322 return 1;
1324 /* Insns carrying these notes are useful later on. */
1325 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1326 return 0;
1328 /* For now treat an insn with a REG_RETVAL note as a
1329 a special insn which should not be considered a no-op. */
1330 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1331 return 0;
1333 if (GET_CODE (pat) == SET && set_noop_p (pat))
1334 return 1;
1336 if (GET_CODE (pat) == PARALLEL)
1338 int i;
1339 /* If nothing but SETs of registers to themselves,
1340 this insn can also be deleted. */
1341 for (i = 0; i < XVECLEN (pat, 0); i++)
1343 rtx tem = XVECEXP (pat, 0, i);
1345 if (GET_CODE (tem) == USE
1346 || GET_CODE (tem) == CLOBBER)
1347 continue;
1349 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1350 return 0;
1353 return 1;
1355 return 0;
1359 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1360 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1361 If the object was modified, if we hit a partial assignment to X, or hit a
1362 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1363 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1364 be the src. */
1367 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1369 rtx p;
1371 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1372 p = PREV_INSN (p))
1373 if (INSN_P (p))
1375 rtx set = single_set (p);
1376 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1378 if (set && rtx_equal_p (x, SET_DEST (set)))
1380 rtx src = SET_SRC (set);
1382 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1383 src = XEXP (note, 0);
1385 if ((valid_to == NULL_RTX
1386 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1387 /* Reject hard registers because we don't usually want
1388 to use them; we'd rather use a pseudo. */
1389 && (! (REG_P (src)
1390 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1392 *pinsn = p;
1393 return src;
1397 /* If set in non-simple way, we don't have a value. */
1398 if (reg_set_p (x, p))
1399 break;
1402 return x;
1405 /* Return nonzero if register in range [REGNO, ENDREGNO)
1406 appears either explicitly or implicitly in X
1407 other than being stored into.
1409 References contained within the substructure at LOC do not count.
1410 LOC may be zero, meaning don't ignore anything. */
1413 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1414 rtx *loc)
1416 int i;
1417 unsigned int x_regno;
1418 RTX_CODE code;
1419 const char *fmt;
1421 repeat:
1422 /* The contents of a REG_NONNEG note is always zero, so we must come here
1423 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1424 if (x == 0)
1425 return 0;
1427 code = GET_CODE (x);
1429 switch (code)
1431 case REG:
1432 x_regno = REGNO (x);
1434 /* If we modifying the stack, frame, or argument pointer, it will
1435 clobber a virtual register. In fact, we could be more precise,
1436 but it isn't worth it. */
1437 if ((x_regno == STACK_POINTER_REGNUM
1438 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1439 || x_regno == ARG_POINTER_REGNUM
1440 #endif
1441 || x_regno == FRAME_POINTER_REGNUM)
1442 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1443 return 1;
1445 return (endregno > x_regno
1446 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1447 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1448 : 1));
1450 case SUBREG:
1451 /* If this is a SUBREG of a hard reg, we can see exactly which
1452 registers are being modified. Otherwise, handle normally. */
1453 if (REG_P (SUBREG_REG (x))
1454 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1456 unsigned int inner_regno = subreg_regno (x);
1457 unsigned int inner_endregno
1458 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1459 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1461 return endregno > inner_regno && regno < inner_endregno;
1463 break;
1465 case CLOBBER:
1466 case SET:
1467 if (&SET_DEST (x) != loc
1468 /* Note setting a SUBREG counts as referring to the REG it is in for
1469 a pseudo but not for hard registers since we can
1470 treat each word individually. */
1471 && ((GET_CODE (SET_DEST (x)) == SUBREG
1472 && loc != &SUBREG_REG (SET_DEST (x))
1473 && REG_P (SUBREG_REG (SET_DEST (x)))
1474 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1475 && refers_to_regno_p (regno, endregno,
1476 SUBREG_REG (SET_DEST (x)), loc))
1477 || (!REG_P (SET_DEST (x))
1478 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1479 return 1;
1481 if (code == CLOBBER || loc == &SET_SRC (x))
1482 return 0;
1483 x = SET_SRC (x);
1484 goto repeat;
1486 default:
1487 break;
1490 /* X does not match, so try its subexpressions. */
1492 fmt = GET_RTX_FORMAT (code);
1493 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1495 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1497 if (i == 0)
1499 x = XEXP (x, 0);
1500 goto repeat;
1502 else
1503 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1504 return 1;
1506 else if (fmt[i] == 'E')
1508 int j;
1509 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1510 if (loc != &XVECEXP (x, i, j)
1511 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1512 return 1;
1515 return 0;
1518 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1519 we check if any register number in X conflicts with the relevant register
1520 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1521 contains a MEM (we don't bother checking for memory addresses that can't
1522 conflict because we expect this to be a rare case. */
1525 reg_overlap_mentioned_p (rtx x, rtx in)
1527 unsigned int regno, endregno;
1529 /* If either argument is a constant, then modifying X can not
1530 affect IN. Here we look at IN, we can profitably combine
1531 CONSTANT_P (x) with the switch statement below. */
1532 if (CONSTANT_P (in))
1533 return 0;
1535 recurse:
1536 switch (GET_CODE (x))
1538 case STRICT_LOW_PART:
1539 case ZERO_EXTRACT:
1540 case SIGN_EXTRACT:
1541 /* Overly conservative. */
1542 x = XEXP (x, 0);
1543 goto recurse;
1545 case SUBREG:
1546 regno = REGNO (SUBREG_REG (x));
1547 if (regno < FIRST_PSEUDO_REGISTER)
1548 regno = subreg_regno (x);
1549 goto do_reg;
1551 case REG:
1552 regno = REGNO (x);
1553 do_reg:
1554 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1555 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1556 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1558 case MEM:
1560 const char *fmt;
1561 int i;
1563 if (MEM_P (in))
1564 return 1;
1566 fmt = GET_RTX_FORMAT (GET_CODE (in));
1567 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1568 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1569 return 1;
1571 return 0;
1574 case SCRATCH:
1575 case PC:
1576 case CC0:
1577 return reg_mentioned_p (x, in);
1579 case PARALLEL:
1581 int i;
1583 /* If any register in here refers to it we return true. */
1584 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1585 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1586 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1587 return 1;
1588 return 0;
1591 default:
1592 #ifdef ENABLE_CHECKING
1593 if (!CONSTANT_P (x))
1594 abort ();
1595 #endif
1597 return 0;
1601 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1602 (X would be the pattern of an insn).
1603 FUN receives two arguments:
1604 the REG, MEM, CC0 or PC being stored in or clobbered,
1605 the SET or CLOBBER rtx that does the store.
1607 If the item being stored in or clobbered is a SUBREG of a hard register,
1608 the SUBREG will be passed. */
1610 void
1611 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1613 int i;
1615 if (GET_CODE (x) == COND_EXEC)
1616 x = COND_EXEC_CODE (x);
1618 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1620 rtx dest = SET_DEST (x);
1622 while ((GET_CODE (dest) == SUBREG
1623 && (!REG_P (SUBREG_REG (dest))
1624 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1625 || GET_CODE (dest) == ZERO_EXTRACT
1626 || GET_CODE (dest) == SIGN_EXTRACT
1627 || GET_CODE (dest) == STRICT_LOW_PART)
1628 dest = XEXP (dest, 0);
1630 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1631 each of whose first operand is a register. */
1632 if (GET_CODE (dest) == PARALLEL)
1634 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1635 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1636 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1638 else
1639 (*fun) (dest, x, data);
1642 else if (GET_CODE (x) == PARALLEL)
1643 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1644 note_stores (XVECEXP (x, 0, i), fun, data);
1647 /* Like notes_stores, but call FUN for each expression that is being
1648 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1649 FUN for each expression, not any interior subexpressions. FUN receives a
1650 pointer to the expression and the DATA passed to this function.
1652 Note that this is not quite the same test as that done in reg_referenced_p
1653 since that considers something as being referenced if it is being
1654 partially set, while we do not. */
1656 void
1657 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1659 rtx body = *pbody;
1660 int i;
1662 switch (GET_CODE (body))
1664 case COND_EXEC:
1665 (*fun) (&COND_EXEC_TEST (body), data);
1666 note_uses (&COND_EXEC_CODE (body), fun, data);
1667 return;
1669 case PARALLEL:
1670 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1671 note_uses (&XVECEXP (body, 0, i), fun, data);
1672 return;
1674 case USE:
1675 (*fun) (&XEXP (body, 0), data);
1676 return;
1678 case ASM_OPERANDS:
1679 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1680 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1681 return;
1683 case TRAP_IF:
1684 (*fun) (&TRAP_CONDITION (body), data);
1685 return;
1687 case PREFETCH:
1688 (*fun) (&XEXP (body, 0), data);
1689 return;
1691 case UNSPEC:
1692 case UNSPEC_VOLATILE:
1693 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1694 (*fun) (&XVECEXP (body, 0, i), data);
1695 return;
1697 case CLOBBER:
1698 if (MEM_P (XEXP (body, 0)))
1699 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1700 return;
1702 case SET:
1704 rtx dest = SET_DEST (body);
1706 /* For sets we replace everything in source plus registers in memory
1707 expression in store and operands of a ZERO_EXTRACT. */
1708 (*fun) (&SET_SRC (body), data);
1710 if (GET_CODE (dest) == ZERO_EXTRACT)
1712 (*fun) (&XEXP (dest, 1), data);
1713 (*fun) (&XEXP (dest, 2), data);
1716 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1717 dest = XEXP (dest, 0);
1719 if (MEM_P (dest))
1720 (*fun) (&XEXP (dest, 0), data);
1722 return;
1724 default:
1725 /* All the other possibilities never store. */
1726 (*fun) (pbody, data);
1727 return;
1731 /* Return nonzero if X's old contents don't survive after INSN.
1732 This will be true if X is (cc0) or if X is a register and
1733 X dies in INSN or because INSN entirely sets X.
1735 "Entirely set" means set directly and not through a SUBREG,
1736 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1737 Likewise, REG_INC does not count.
1739 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1740 but for this use that makes no difference, since regs don't overlap
1741 during their lifetimes. Therefore, this function may be used
1742 at any time after deaths have been computed (in flow.c).
1744 If REG is a hard reg that occupies multiple machine registers, this
1745 function will only return 1 if each of those registers will be replaced
1746 by INSN. */
1749 dead_or_set_p (rtx insn, rtx x)
1751 unsigned int regno, last_regno;
1752 unsigned int i;
1754 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1755 if (GET_CODE (x) == CC0)
1756 return 1;
1758 if (!REG_P (x))
1759 abort ();
1761 regno = REGNO (x);
1762 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1763 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1765 for (i = regno; i <= last_regno; i++)
1766 if (! dead_or_set_regno_p (insn, i))
1767 return 0;
1769 return 1;
1772 /* Utility function for dead_or_set_p to check an individual register. Also
1773 called from flow.c. */
1776 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1778 unsigned int regno, endregno;
1779 rtx pattern;
1781 /* See if there is a death note for something that includes TEST_REGNO. */
1782 if (find_regno_note (insn, REG_DEAD, test_regno))
1783 return 1;
1785 if (GET_CODE (insn) == CALL_INSN
1786 && find_regno_fusage (insn, CLOBBER, test_regno))
1787 return 1;
1789 pattern = PATTERN (insn);
1791 if (GET_CODE (pattern) == COND_EXEC)
1792 pattern = COND_EXEC_CODE (pattern);
1794 if (GET_CODE (pattern) == SET)
1796 rtx dest = SET_DEST (pattern);
1798 /* A value is totally replaced if it is the destination or the
1799 destination is a SUBREG of REGNO that does not change the number of
1800 words in it. */
1801 if (GET_CODE (dest) == SUBREG
1802 && (((GET_MODE_SIZE (GET_MODE (dest))
1803 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1804 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1805 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1806 dest = SUBREG_REG (dest);
1808 if (!REG_P (dest))
1809 return 0;
1811 regno = REGNO (dest);
1812 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1813 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1815 return (test_regno >= regno && test_regno < endregno);
1817 else if (GET_CODE (pattern) == PARALLEL)
1819 int i;
1821 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1823 rtx body = XVECEXP (pattern, 0, i);
1825 if (GET_CODE (body) == COND_EXEC)
1826 body = COND_EXEC_CODE (body);
1828 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1830 rtx dest = SET_DEST (body);
1832 if (GET_CODE (dest) == SUBREG
1833 && (((GET_MODE_SIZE (GET_MODE (dest))
1834 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1835 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1836 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1837 dest = SUBREG_REG (dest);
1839 if (!REG_P (dest))
1840 continue;
1842 regno = REGNO (dest);
1843 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1844 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1846 if (test_regno >= regno && test_regno < endregno)
1847 return 1;
1852 return 0;
1855 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1856 If DATUM is nonzero, look for one whose datum is DATUM. */
1859 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1861 rtx link;
1863 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1864 if (! INSN_P (insn))
1865 return 0;
1866 if (datum == 0)
1868 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1869 if (REG_NOTE_KIND (link) == kind)
1870 return link;
1871 return 0;
1874 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1875 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1876 return link;
1877 return 0;
1880 /* Return the reg-note of kind KIND in insn INSN which applies to register
1881 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1882 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1883 it might be the case that the note overlaps REGNO. */
1886 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1888 rtx link;
1890 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1891 if (! INSN_P (insn))
1892 return 0;
1894 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1895 if (REG_NOTE_KIND (link) == kind
1896 /* Verify that it is a register, so that scratch and MEM won't cause a
1897 problem here. */
1898 && REG_P (XEXP (link, 0))
1899 && REGNO (XEXP (link, 0)) <= regno
1900 && ((REGNO (XEXP (link, 0))
1901 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1902 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1903 [GET_MODE (XEXP (link, 0))]))
1904 > regno))
1905 return link;
1906 return 0;
1909 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1910 has such a note. */
1913 find_reg_equal_equiv_note (rtx insn)
1915 rtx link;
1917 if (!INSN_P (insn))
1918 return 0;
1919 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1920 if (REG_NOTE_KIND (link) == REG_EQUAL
1921 || REG_NOTE_KIND (link) == REG_EQUIV)
1923 if (single_set (insn) == 0)
1924 return 0;
1925 return link;
1927 return NULL;
1930 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1931 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1934 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1936 /* If it's not a CALL_INSN, it can't possibly have a
1937 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1938 if (GET_CODE (insn) != CALL_INSN)
1939 return 0;
1941 if (! datum)
1942 abort ();
1944 if (!REG_P (datum))
1946 rtx link;
1948 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1949 link;
1950 link = XEXP (link, 1))
1951 if (GET_CODE (XEXP (link, 0)) == code
1952 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1953 return 1;
1955 else
1957 unsigned int regno = REGNO (datum);
1959 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1960 to pseudo registers, so don't bother checking. */
1962 if (regno < FIRST_PSEUDO_REGISTER)
1964 unsigned int end_regno
1965 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1966 unsigned int i;
1968 for (i = regno; i < end_regno; i++)
1969 if (find_regno_fusage (insn, code, i))
1970 return 1;
1974 return 0;
1977 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1978 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1981 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1983 rtx link;
1985 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1986 to pseudo registers, so don't bother checking. */
1988 if (regno >= FIRST_PSEUDO_REGISTER
1989 || GET_CODE (insn) != CALL_INSN )
1990 return 0;
1992 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1994 unsigned int regnote;
1995 rtx op, reg;
1997 if (GET_CODE (op = XEXP (link, 0)) == code
1998 && REG_P (reg = XEXP (op, 0))
1999 && (regnote = REGNO (reg)) <= regno
2000 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
2001 return 1;
2004 return 0;
2007 /* Return true if INSN is a call to a pure function. */
2010 pure_call_p (rtx insn)
2012 rtx link;
2014 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2015 return 0;
2017 /* Look for the note that differentiates const and pure functions. */
2018 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2020 rtx u, m;
2022 if (GET_CODE (u = XEXP (link, 0)) == USE
2023 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
2024 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2025 return 1;
2028 return 0;
2031 /* Remove register note NOTE from the REG_NOTES of INSN. */
2033 void
2034 remove_note (rtx insn, rtx note)
2036 rtx link;
2038 if (note == NULL_RTX)
2039 return;
2041 if (REG_NOTES (insn) == note)
2043 REG_NOTES (insn) = XEXP (note, 1);
2044 return;
2047 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2048 if (XEXP (link, 1) == note)
2050 XEXP (link, 1) = XEXP (note, 1);
2051 return;
2054 abort ();
2057 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2058 return 1 if it is found. A simple equality test is used to determine if
2059 NODE matches. */
2062 in_expr_list_p (rtx listp, rtx node)
2064 rtx x;
2066 for (x = listp; x; x = XEXP (x, 1))
2067 if (node == XEXP (x, 0))
2068 return 1;
2070 return 0;
2073 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2074 remove that entry from the list if it is found.
2076 A simple equality test is used to determine if NODE matches. */
2078 void
2079 remove_node_from_expr_list (rtx node, rtx *listp)
2081 rtx temp = *listp;
2082 rtx prev = NULL_RTX;
2084 while (temp)
2086 if (node == XEXP (temp, 0))
2088 /* Splice the node out of the list. */
2089 if (prev)
2090 XEXP (prev, 1) = XEXP (temp, 1);
2091 else
2092 *listp = XEXP (temp, 1);
2094 return;
2097 prev = temp;
2098 temp = XEXP (temp, 1);
2102 /* Nonzero if X contains any volatile instructions. These are instructions
2103 which may cause unpredictable machine state instructions, and thus no
2104 instructions should be moved or combined across them. This includes
2105 only volatile asms and UNSPEC_VOLATILE instructions. */
2108 volatile_insn_p (rtx x)
2110 RTX_CODE code;
2112 code = GET_CODE (x);
2113 switch (code)
2115 case LABEL_REF:
2116 case SYMBOL_REF:
2117 case CONST_INT:
2118 case CONST:
2119 case CONST_DOUBLE:
2120 case CONST_VECTOR:
2121 case CC0:
2122 case PC:
2123 case REG:
2124 case SCRATCH:
2125 case CLOBBER:
2126 case ADDR_VEC:
2127 case ADDR_DIFF_VEC:
2128 case CALL:
2129 case MEM:
2130 return 0;
2132 case UNSPEC_VOLATILE:
2133 /* case TRAP_IF: This isn't clear yet. */
2134 return 1;
2136 case ASM_INPUT:
2137 case ASM_OPERANDS:
2138 if (MEM_VOLATILE_P (x))
2139 return 1;
2141 default:
2142 break;
2145 /* Recursively scan the operands of this expression. */
2148 const char *fmt = GET_RTX_FORMAT (code);
2149 int i;
2151 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2153 if (fmt[i] == 'e')
2155 if (volatile_insn_p (XEXP (x, i)))
2156 return 1;
2158 else if (fmt[i] == 'E')
2160 int j;
2161 for (j = 0; j < XVECLEN (x, i); j++)
2162 if (volatile_insn_p (XVECEXP (x, i, j)))
2163 return 1;
2167 return 0;
2170 /* Nonzero if X contains any volatile memory references
2171 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2174 volatile_refs_p (rtx x)
2176 RTX_CODE code;
2178 code = GET_CODE (x);
2179 switch (code)
2181 case LABEL_REF:
2182 case SYMBOL_REF:
2183 case CONST_INT:
2184 case CONST:
2185 case CONST_DOUBLE:
2186 case CONST_VECTOR:
2187 case CC0:
2188 case PC:
2189 case REG:
2190 case SCRATCH:
2191 case CLOBBER:
2192 case ADDR_VEC:
2193 case ADDR_DIFF_VEC:
2194 return 0;
2196 case UNSPEC_VOLATILE:
2197 return 1;
2199 case MEM:
2200 case ASM_INPUT:
2201 case ASM_OPERANDS:
2202 if (MEM_VOLATILE_P (x))
2203 return 1;
2205 default:
2206 break;
2209 /* Recursively scan the operands of this expression. */
2212 const char *fmt = GET_RTX_FORMAT (code);
2213 int i;
2215 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2217 if (fmt[i] == 'e')
2219 if (volatile_refs_p (XEXP (x, i)))
2220 return 1;
2222 else if (fmt[i] == 'E')
2224 int j;
2225 for (j = 0; j < XVECLEN (x, i); j++)
2226 if (volatile_refs_p (XVECEXP (x, i, j)))
2227 return 1;
2231 return 0;
2234 /* Similar to above, except that it also rejects register pre- and post-
2235 incrementing. */
2238 side_effects_p (rtx x)
2240 RTX_CODE code;
2242 code = GET_CODE (x);
2243 switch (code)
2245 case LABEL_REF:
2246 case SYMBOL_REF:
2247 case CONST_INT:
2248 case CONST:
2249 case CONST_DOUBLE:
2250 case CONST_VECTOR:
2251 case CC0:
2252 case PC:
2253 case REG:
2254 case SCRATCH:
2255 case ADDR_VEC:
2256 case ADDR_DIFF_VEC:
2257 return 0;
2259 case CLOBBER:
2260 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2261 when some combination can't be done. If we see one, don't think
2262 that we can simplify the expression. */
2263 return (GET_MODE (x) != VOIDmode);
2265 case PRE_INC:
2266 case PRE_DEC:
2267 case POST_INC:
2268 case POST_DEC:
2269 case PRE_MODIFY:
2270 case POST_MODIFY:
2271 case CALL:
2272 case UNSPEC_VOLATILE:
2273 /* case TRAP_IF: This isn't clear yet. */
2274 return 1;
2276 case MEM:
2277 case ASM_INPUT:
2278 case ASM_OPERANDS:
2279 if (MEM_VOLATILE_P (x))
2280 return 1;
2282 default:
2283 break;
2286 /* Recursively scan the operands of this expression. */
2289 const char *fmt = GET_RTX_FORMAT (code);
2290 int i;
2292 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2294 if (fmt[i] == 'e')
2296 if (side_effects_p (XEXP (x, i)))
2297 return 1;
2299 else if (fmt[i] == 'E')
2301 int j;
2302 for (j = 0; j < XVECLEN (x, i); j++)
2303 if (side_effects_p (XVECEXP (x, i, j)))
2304 return 1;
2308 return 0;
2311 /* Return nonzero if evaluating rtx X might cause a trap. */
2314 may_trap_p (rtx x)
2316 int i;
2317 enum rtx_code code;
2318 const char *fmt;
2320 if (x == 0)
2321 return 0;
2322 code = GET_CODE (x);
2323 switch (code)
2325 /* Handle these cases quickly. */
2326 case CONST_INT:
2327 case CONST_DOUBLE:
2328 case CONST_VECTOR:
2329 case SYMBOL_REF:
2330 case LABEL_REF:
2331 case CONST:
2332 case PC:
2333 case CC0:
2334 case REG:
2335 case SCRATCH:
2336 return 0;
2338 case ASM_INPUT:
2339 case UNSPEC_VOLATILE:
2340 case TRAP_IF:
2341 return 1;
2343 case ASM_OPERANDS:
2344 return MEM_VOLATILE_P (x);
2346 /* Memory ref can trap unless it's a static var or a stack slot. */
2347 case MEM:
2348 if (MEM_NOTRAP_P (x))
2349 return 0;
2350 return rtx_addr_can_trap_p (XEXP (x, 0));
2352 /* Division by a non-constant might trap. */
2353 case DIV:
2354 case MOD:
2355 case UDIV:
2356 case UMOD:
2357 if (HONOR_SNANS (GET_MODE (x)))
2358 return 1;
2359 if (! CONSTANT_P (XEXP (x, 1))
2360 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2361 && flag_trapping_math))
2362 return 1;
2363 if (XEXP (x, 1) == const0_rtx)
2364 return 1;
2365 break;
2367 case EXPR_LIST:
2368 /* An EXPR_LIST is used to represent a function call. This
2369 certainly may trap. */
2370 return 1;
2372 case GE:
2373 case GT:
2374 case LE:
2375 case LT:
2376 case LTGT:
2377 case COMPARE:
2378 /* Some floating point comparisons may trap. */
2379 if (!flag_trapping_math)
2380 break;
2381 /* ??? There is no machine independent way to check for tests that trap
2382 when COMPARE is used, though many targets do make this distinction.
2383 For instance, sparc uses CCFPE for compares which generate exceptions
2384 and CCFP for compares which do not generate exceptions. */
2385 if (HONOR_NANS (GET_MODE (x)))
2386 return 1;
2387 /* But often the compare has some CC mode, so check operand
2388 modes as well. */
2389 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2390 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2391 return 1;
2392 break;
2394 case EQ:
2395 case NE:
2396 if (HONOR_SNANS (GET_MODE (x)))
2397 return 1;
2398 /* Often comparison is CC mode, so check operand modes. */
2399 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2400 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2401 return 1;
2402 break;
2404 case FIX:
2405 /* Conversion of floating point might trap. */
2406 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2407 return 1;
2408 break;
2410 case NEG:
2411 case ABS:
2412 /* These operations don't trap even with floating point. */
2413 break;
2415 default:
2416 /* Any floating arithmetic may trap. */
2417 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2418 && flag_trapping_math)
2419 return 1;
2422 fmt = GET_RTX_FORMAT (code);
2423 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2425 if (fmt[i] == 'e')
2427 if (may_trap_p (XEXP (x, i)))
2428 return 1;
2430 else if (fmt[i] == 'E')
2432 int j;
2433 for (j = 0; j < XVECLEN (x, i); j++)
2434 if (may_trap_p (XVECEXP (x, i, j)))
2435 return 1;
2438 return 0;
2441 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2442 i.e., an inequality. */
2445 inequality_comparisons_p (rtx x)
2447 const char *fmt;
2448 int len, i;
2449 enum rtx_code code = GET_CODE (x);
2451 switch (code)
2453 case REG:
2454 case SCRATCH:
2455 case PC:
2456 case CC0:
2457 case CONST_INT:
2458 case CONST_DOUBLE:
2459 case CONST_VECTOR:
2460 case CONST:
2461 case LABEL_REF:
2462 case SYMBOL_REF:
2463 return 0;
2465 case LT:
2466 case LTU:
2467 case GT:
2468 case GTU:
2469 case LE:
2470 case LEU:
2471 case GE:
2472 case GEU:
2473 return 1;
2475 default:
2476 break;
2479 len = GET_RTX_LENGTH (code);
2480 fmt = GET_RTX_FORMAT (code);
2482 for (i = 0; i < len; i++)
2484 if (fmt[i] == 'e')
2486 if (inequality_comparisons_p (XEXP (x, i)))
2487 return 1;
2489 else if (fmt[i] == 'E')
2491 int j;
2492 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2493 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2494 return 1;
2498 return 0;
2501 /* Replace any occurrence of FROM in X with TO. The function does
2502 not enter into CONST_DOUBLE for the replace.
2504 Note that copying is not done so X must not be shared unless all copies
2505 are to be modified. */
2508 replace_rtx (rtx x, rtx from, rtx to)
2510 int i, j;
2511 const char *fmt;
2513 /* The following prevents loops occurrence when we change MEM in
2514 CONST_DOUBLE onto the same CONST_DOUBLE. */
2515 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2516 return x;
2518 if (x == from)
2519 return to;
2521 /* Allow this function to make replacements in EXPR_LISTs. */
2522 if (x == 0)
2523 return 0;
2525 if (GET_CODE (x) == SUBREG)
2527 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2529 if (GET_CODE (new) == CONST_INT)
2531 x = simplify_subreg (GET_MODE (x), new,
2532 GET_MODE (SUBREG_REG (x)),
2533 SUBREG_BYTE (x));
2534 if (! x)
2535 abort ();
2537 else
2538 SUBREG_REG (x) = new;
2540 return x;
2542 else if (GET_CODE (x) == ZERO_EXTEND)
2544 rtx new = replace_rtx (XEXP (x, 0), from, to);
2546 if (GET_CODE (new) == CONST_INT)
2548 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2549 new, GET_MODE (XEXP (x, 0)));
2550 if (! x)
2551 abort ();
2553 else
2554 XEXP (x, 0) = new;
2556 return x;
2559 fmt = GET_RTX_FORMAT (GET_CODE (x));
2560 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2562 if (fmt[i] == 'e')
2563 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2564 else if (fmt[i] == 'E')
2565 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2566 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2569 return x;
2572 /* Throughout the rtx X, replace many registers according to REG_MAP.
2573 Return the replacement for X (which may be X with altered contents).
2574 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2575 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2577 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2578 should not be mapped to pseudos or vice versa since validate_change
2579 is not called.
2581 If REPLACE_DEST is 1, replacements are also done in destinations;
2582 otherwise, only sources are replaced. */
2585 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2587 enum rtx_code code;
2588 int i;
2589 const char *fmt;
2591 if (x == 0)
2592 return x;
2594 code = GET_CODE (x);
2595 switch (code)
2597 case SCRATCH:
2598 case PC:
2599 case CC0:
2600 case CONST_INT:
2601 case CONST_DOUBLE:
2602 case CONST_VECTOR:
2603 case CONST:
2604 case SYMBOL_REF:
2605 case LABEL_REF:
2606 return x;
2608 case REG:
2609 /* Verify that the register has an entry before trying to access it. */
2610 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2612 /* SUBREGs can't be shared. Always return a copy to ensure that if
2613 this replacement occurs more than once then each instance will
2614 get distinct rtx. */
2615 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2616 return copy_rtx (reg_map[REGNO (x)]);
2617 return reg_map[REGNO (x)];
2619 return x;
2621 case SUBREG:
2622 /* Prevent making nested SUBREGs. */
2623 if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2624 && reg_map[REGNO (SUBREG_REG (x))] != 0
2625 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2627 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2628 return simplify_gen_subreg (GET_MODE (x), map_val,
2629 GET_MODE (SUBREG_REG (x)),
2630 SUBREG_BYTE (x));
2632 break;
2634 case SET:
2635 if (replace_dest)
2636 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2638 else if (MEM_P (SET_DEST (x))
2639 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2640 /* Even if we are not to replace destinations, replace register if it
2641 is CONTAINED in destination (destination is memory or
2642 STRICT_LOW_PART). */
2643 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2644 reg_map, nregs, 0);
2645 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2646 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2647 break;
2649 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2650 return x;
2652 default:
2653 break;
2656 fmt = GET_RTX_FORMAT (code);
2657 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2659 if (fmt[i] == 'e')
2660 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2661 else if (fmt[i] == 'E')
2663 int j;
2664 for (j = 0; j < XVECLEN (x, i); j++)
2665 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2666 nregs, replace_dest);
2669 return x;
2672 /* Replace occurrences of the old label in *X with the new one.
2673 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2676 replace_label (rtx *x, void *data)
2678 rtx l = *x;
2679 rtx old_label = ((replace_label_data *) data)->r1;
2680 rtx new_label = ((replace_label_data *) data)->r2;
2681 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2683 if (l == NULL_RTX)
2684 return 0;
2686 if (GET_CODE (l) == SYMBOL_REF
2687 && CONSTANT_POOL_ADDRESS_P (l))
2689 rtx c = get_pool_constant (l);
2690 if (rtx_referenced_p (old_label, c))
2692 rtx new_c, new_l;
2693 replace_label_data *d = (replace_label_data *) data;
2695 /* Create a copy of constant C; replace the label inside
2696 but do not update LABEL_NUSES because uses in constant pool
2697 are not counted. */
2698 new_c = copy_rtx (c);
2699 d->update_label_nuses = false;
2700 for_each_rtx (&new_c, replace_label, data);
2701 d->update_label_nuses = update_label_nuses;
2703 /* Add the new constant NEW_C to constant pool and replace
2704 the old reference to constant by new reference. */
2705 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2706 *x = replace_rtx (l, l, new_l);
2708 return 0;
2711 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2712 field. This is not handled by for_each_rtx because it doesn't
2713 handle unprinted ('0') fields. */
2714 if (GET_CODE (l) == JUMP_INSN && JUMP_LABEL (l) == old_label)
2715 JUMP_LABEL (l) = new_label;
2717 if ((GET_CODE (l) == LABEL_REF
2718 || GET_CODE (l) == INSN_LIST)
2719 && XEXP (l, 0) == old_label)
2721 XEXP (l, 0) = new_label;
2722 if (update_label_nuses)
2724 ++LABEL_NUSES (new_label);
2725 --LABEL_NUSES (old_label);
2727 return 0;
2730 return 0;
2733 /* When *BODY is equal to X or X is directly referenced by *BODY
2734 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2735 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2737 static int
2738 rtx_referenced_p_1 (rtx *body, void *x)
2740 rtx y = (rtx) x;
2742 if (*body == NULL_RTX)
2743 return y == NULL_RTX;
2745 /* Return true if a label_ref *BODY refers to label Y. */
2746 if (GET_CODE (*body) == LABEL_REF && GET_CODE (y) == CODE_LABEL)
2747 return XEXP (*body, 0) == y;
2749 /* If *BODY is a reference to pool constant traverse the constant. */
2750 if (GET_CODE (*body) == SYMBOL_REF
2751 && CONSTANT_POOL_ADDRESS_P (*body))
2752 return rtx_referenced_p (y, get_pool_constant (*body));
2754 /* By default, compare the RTL expressions. */
2755 return rtx_equal_p (*body, y);
2758 /* Return true if X is referenced in BODY. */
2761 rtx_referenced_p (rtx x, rtx body)
2763 return for_each_rtx (&body, rtx_referenced_p_1, x);
2766 /* If INSN is a tablejump return true and store the label (before jump table) to
2767 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2769 bool
2770 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2772 rtx label, table;
2774 if (GET_CODE (insn) == JUMP_INSN
2775 && (label = JUMP_LABEL (insn)) != NULL_RTX
2776 && (table = next_active_insn (label)) != NULL_RTX
2777 && GET_CODE (table) == JUMP_INSN
2778 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2779 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2781 if (labelp)
2782 *labelp = label;
2783 if (tablep)
2784 *tablep = table;
2785 return true;
2787 return false;
2790 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2791 constant that is not in the constant pool and not in the condition
2792 of an IF_THEN_ELSE. */
2794 static int
2795 computed_jump_p_1 (rtx x)
2797 enum rtx_code code = GET_CODE (x);
2798 int i, j;
2799 const char *fmt;
2801 switch (code)
2803 case LABEL_REF:
2804 case PC:
2805 return 0;
2807 case CONST:
2808 case CONST_INT:
2809 case CONST_DOUBLE:
2810 case CONST_VECTOR:
2811 case SYMBOL_REF:
2812 case REG:
2813 return 1;
2815 case MEM:
2816 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2817 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2819 case IF_THEN_ELSE:
2820 return (computed_jump_p_1 (XEXP (x, 1))
2821 || computed_jump_p_1 (XEXP (x, 2)));
2823 default:
2824 break;
2827 fmt = GET_RTX_FORMAT (code);
2828 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2830 if (fmt[i] == 'e'
2831 && computed_jump_p_1 (XEXP (x, i)))
2832 return 1;
2834 else if (fmt[i] == 'E')
2835 for (j = 0; j < XVECLEN (x, i); j++)
2836 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2837 return 1;
2840 return 0;
2843 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2845 Tablejumps and casesi insns are not considered indirect jumps;
2846 we can recognize them by a (use (label_ref)). */
2849 computed_jump_p (rtx insn)
2851 int i;
2852 if (GET_CODE (insn) == JUMP_INSN)
2854 rtx pat = PATTERN (insn);
2856 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2857 return 0;
2858 else if (GET_CODE (pat) == PARALLEL)
2860 int len = XVECLEN (pat, 0);
2861 int has_use_labelref = 0;
2863 for (i = len - 1; i >= 0; i--)
2864 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2865 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2866 == LABEL_REF))
2867 has_use_labelref = 1;
2869 if (! has_use_labelref)
2870 for (i = len - 1; i >= 0; i--)
2871 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2872 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2873 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2874 return 1;
2876 else if (GET_CODE (pat) == SET
2877 && SET_DEST (pat) == pc_rtx
2878 && computed_jump_p_1 (SET_SRC (pat)))
2879 return 1;
2881 return 0;
2884 /* Traverse X via depth-first search, calling F for each
2885 sub-expression (including X itself). F is also passed the DATA.
2886 If F returns -1, do not traverse sub-expressions, but continue
2887 traversing the rest of the tree. If F ever returns any other
2888 nonzero value, stop the traversal, and return the value returned
2889 by F. Otherwise, return 0. This function does not traverse inside
2890 tree structure that contains RTX_EXPRs, or into sub-expressions
2891 whose format code is `0' since it is not known whether or not those
2892 codes are actually RTL.
2894 This routine is very general, and could (should?) be used to
2895 implement many of the other routines in this file. */
2898 for_each_rtx (rtx *x, rtx_function f, void *data)
2900 int result;
2901 int length;
2902 const char *format;
2903 int i;
2905 /* Call F on X. */
2906 result = (*f) (x, data);
2907 if (result == -1)
2908 /* Do not traverse sub-expressions. */
2909 return 0;
2910 else if (result != 0)
2911 /* Stop the traversal. */
2912 return result;
2914 if (*x == NULL_RTX)
2915 /* There are no sub-expressions. */
2916 return 0;
2918 length = GET_RTX_LENGTH (GET_CODE (*x));
2919 format = GET_RTX_FORMAT (GET_CODE (*x));
2921 for (i = 0; i < length; ++i)
2923 switch (format[i])
2925 case 'e':
2926 result = for_each_rtx (&XEXP (*x, i), f, data);
2927 if (result != 0)
2928 return result;
2929 break;
2931 case 'V':
2932 case 'E':
2933 if (XVEC (*x, i) != 0)
2935 int j;
2936 for (j = 0; j < XVECLEN (*x, i); ++j)
2938 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2939 if (result != 0)
2940 return result;
2943 break;
2945 default:
2946 /* Nothing to do. */
2947 break;
2952 return 0;
2955 /* Searches X for any reference to REGNO, returning the rtx of the
2956 reference found if any. Otherwise, returns NULL_RTX. */
2959 regno_use_in (unsigned int regno, rtx x)
2961 const char *fmt;
2962 int i, j;
2963 rtx tem;
2965 if (REG_P (x) && REGNO (x) == regno)
2966 return x;
2968 fmt = GET_RTX_FORMAT (GET_CODE (x));
2969 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2971 if (fmt[i] == 'e')
2973 if ((tem = regno_use_in (regno, XEXP (x, i))))
2974 return tem;
2976 else if (fmt[i] == 'E')
2977 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2978 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2979 return tem;
2982 return NULL_RTX;
2985 /* Return a value indicating whether OP, an operand of a commutative
2986 operation, is preferred as the first or second operand. The higher
2987 the value, the stronger the preference for being the first operand.
2988 We use negative values to indicate a preference for the first operand
2989 and positive values for the second operand. */
2992 commutative_operand_precedence (rtx op)
2994 enum rtx_code code = GET_CODE (op);
2996 /* Constants always come the second operand. Prefer "nice" constants. */
2997 if (code == CONST_INT)
2998 return -7;
2999 if (code == CONST_DOUBLE)
3000 return -6;
3001 op = avoid_constant_pool_reference (op);
3003 switch (GET_RTX_CLASS (code))
3005 case RTX_CONST_OBJ:
3006 if (code == CONST_INT)
3007 return -5;
3008 if (code == CONST_DOUBLE)
3009 return -4;
3010 return -3;
3012 case RTX_EXTRA:
3013 /* SUBREGs of objects should come second. */
3014 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3015 return -2;
3017 if (!CONSTANT_P (op))
3018 return 0;
3019 else
3020 /* As for RTX_CONST_OBJ. */
3021 return -3;
3023 case RTX_OBJ:
3024 /* Complex expressions should be the first, so decrease priority
3025 of objects. */
3026 return -1;
3028 case RTX_COMM_ARITH:
3029 /* Prefer operands that are themselves commutative to be first.
3030 This helps to make things linear. In particular,
3031 (and (and (reg) (reg)) (not (reg))) is canonical. */
3032 return 4;
3034 case RTX_BIN_ARITH:
3035 /* If only one operand is a binary expression, it will be the first
3036 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3037 is canonical, although it will usually be further simplified. */
3038 return 2;
3040 case RTX_UNARY:
3041 /* Then prefer NEG and NOT. */
3042 if (code == NEG || code == NOT)
3043 return 1;
3045 default:
3046 return 0;
3050 /* Return 1 iff it is necessary to swap operands of commutative operation
3051 in order to canonicalize expression. */
3054 swap_commutative_operands_p (rtx x, rtx y)
3056 return (commutative_operand_precedence (x)
3057 < commutative_operand_precedence (y));
3060 /* Return 1 if X is an autoincrement side effect and the register is
3061 not the stack pointer. */
3063 auto_inc_p (rtx x)
3065 switch (GET_CODE (x))
3067 case PRE_INC:
3068 case POST_INC:
3069 case PRE_DEC:
3070 case POST_DEC:
3071 case PRE_MODIFY:
3072 case POST_MODIFY:
3073 /* There are no REG_INC notes for SP. */
3074 if (XEXP (x, 0) != stack_pointer_rtx)
3075 return 1;
3076 default:
3077 break;
3079 return 0;
3082 /* Return 1 if the sequence of instructions beginning with FROM and up
3083 to and including TO is safe to move. If NEW_TO is non-NULL, and
3084 the sequence is not already safe to move, but can be easily
3085 extended to a sequence which is safe, then NEW_TO will point to the
3086 end of the extended sequence.
3088 For now, this function only checks that the region contains whole
3089 exception regions, but it could be extended to check additional
3090 conditions as well. */
3093 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
3095 int eh_region_count = 0;
3096 int past_to_p = 0;
3097 rtx r = from;
3099 /* By default, assume the end of the region will be what was
3100 suggested. */
3101 if (new_to)
3102 *new_to = to;
3104 while (r)
3106 if (GET_CODE (r) == NOTE)
3108 switch (NOTE_LINE_NUMBER (r))
3110 case NOTE_INSN_EH_REGION_BEG:
3111 ++eh_region_count;
3112 break;
3114 case NOTE_INSN_EH_REGION_END:
3115 if (eh_region_count == 0)
3116 /* This sequence of instructions contains the end of
3117 an exception region, but not he beginning. Moving
3118 it will cause chaos. */
3119 return 0;
3121 --eh_region_count;
3122 break;
3124 default:
3125 break;
3128 else if (past_to_p)
3129 /* If we've passed TO, and we see a non-note instruction, we
3130 can't extend the sequence to a movable sequence. */
3131 return 0;
3133 if (r == to)
3135 if (!new_to)
3136 /* It's OK to move the sequence if there were matched sets of
3137 exception region notes. */
3138 return eh_region_count == 0;
3140 past_to_p = 1;
3143 /* It's OK to move the sequence if there were matched sets of
3144 exception region notes. */
3145 if (past_to_p && eh_region_count == 0)
3147 *new_to = r;
3148 return 1;
3151 /* Go to the next instruction. */
3152 r = NEXT_INSN (r);
3155 return 0;
3158 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3160 loc_mentioned_in_p (rtx *loc, rtx in)
3162 enum rtx_code code = GET_CODE (in);
3163 const char *fmt = GET_RTX_FORMAT (code);
3164 int i, j;
3166 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3168 if (loc == &in->u.fld[i].rtx)
3169 return 1;
3170 if (fmt[i] == 'e')
3172 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3173 return 1;
3175 else if (fmt[i] == 'E')
3176 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3177 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3178 return 1;
3180 return 0;
3183 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3184 and SUBREG_BYTE, return the bit offset where the subreg begins
3185 (counting from the least significant bit of the operand). */
3187 unsigned int
3188 subreg_lsb_1 (enum machine_mode outer_mode,
3189 enum machine_mode inner_mode,
3190 unsigned int subreg_byte)
3192 unsigned int bitpos;
3193 unsigned int byte;
3194 unsigned int word;
3196 /* A paradoxical subreg begins at bit position 0. */
3197 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3198 return 0;
3200 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3201 /* If the subreg crosses a word boundary ensure that
3202 it also begins and ends on a word boundary. */
3203 if ((subreg_byte % UNITS_PER_WORD
3204 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3205 && (subreg_byte % UNITS_PER_WORD
3206 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))
3207 abort ();
3209 if (WORDS_BIG_ENDIAN)
3210 word = (GET_MODE_SIZE (inner_mode)
3211 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3212 else
3213 word = subreg_byte / UNITS_PER_WORD;
3214 bitpos = word * BITS_PER_WORD;
3216 if (BYTES_BIG_ENDIAN)
3217 byte = (GET_MODE_SIZE (inner_mode)
3218 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3219 else
3220 byte = subreg_byte % UNITS_PER_WORD;
3221 bitpos += byte * BITS_PER_UNIT;
3223 return bitpos;
3226 /* Given a subreg X, return the bit offset where the subreg begins
3227 (counting from the least significant bit of the reg). */
3229 unsigned int
3230 subreg_lsb (rtx x)
3232 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3233 SUBREG_BYTE (x));
3236 /* This function returns the regno offset of a subreg expression.
3237 xregno - A regno of an inner hard subreg_reg (or what will become one).
3238 xmode - The mode of xregno.
3239 offset - The byte offset.
3240 ymode - The mode of a top level SUBREG (or what may become one).
3241 RETURN - The regno offset which would be used. */
3242 unsigned int
3243 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3244 unsigned int offset, enum machine_mode ymode)
3246 int nregs_xmode, nregs_ymode;
3247 int mode_multiple, nregs_multiple;
3248 int y_offset;
3250 if (xregno >= FIRST_PSEUDO_REGISTER)
3251 abort ();
3253 nregs_xmode = hard_regno_nregs[xregno][xmode];
3254 nregs_ymode = hard_regno_nregs[xregno][ymode];
3256 /* If this is a big endian paradoxical subreg, which uses more actual
3257 hard registers than the original register, we must return a negative
3258 offset so that we find the proper highpart of the register. */
3259 if (offset == 0
3260 && nregs_ymode > nregs_xmode
3261 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3262 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3263 return nregs_xmode - nregs_ymode;
3265 if (offset == 0 || nregs_xmode == nregs_ymode)
3266 return 0;
3268 /* size of ymode must not be greater than the size of xmode. */
3269 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3270 if (mode_multiple == 0)
3271 abort ();
3273 y_offset = offset / GET_MODE_SIZE (ymode);
3274 nregs_multiple = nregs_xmode / nregs_ymode;
3275 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3278 /* This function returns true when the offset is representable via
3279 subreg_offset in the given regno.
3280 xregno - A regno of an inner hard subreg_reg (or what will become one).
3281 xmode - The mode of xregno.
3282 offset - The byte offset.
3283 ymode - The mode of a top level SUBREG (or what may become one).
3284 RETURN - The regno offset which would be used. */
3285 bool
3286 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3287 unsigned int offset, enum machine_mode ymode)
3289 int nregs_xmode, nregs_ymode;
3290 int mode_multiple, nregs_multiple;
3291 int y_offset;
3293 if (xregno >= FIRST_PSEUDO_REGISTER)
3294 abort ();
3296 nregs_xmode = hard_regno_nregs[xregno][xmode];
3297 nregs_ymode = hard_regno_nregs[xregno][ymode];
3299 /* Paradoxical subregs are always valid. */
3300 if (offset == 0
3301 && nregs_ymode > nregs_xmode
3302 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3303 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3304 return true;
3306 /* Lowpart subregs are always valid. */
3307 if (offset == subreg_lowpart_offset (ymode, xmode))
3308 return true;
3310 #ifdef ENABLE_CHECKING
3311 /* This should always pass, otherwise we don't know how to verify the
3312 constraint. These conditions may be relaxed but subreg_offset would
3313 need to be redesigned. */
3314 if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3315 || GET_MODE_SIZE (ymode) % nregs_ymode
3316 || nregs_xmode % nregs_ymode)
3317 abort ();
3318 #endif
3320 /* The XMODE value can be seen as a vector of NREGS_XMODE
3321 values. The subreg must represent a lowpart of given field.
3322 Compute what field it is. */
3323 offset -= subreg_lowpart_offset (ymode,
3324 mode_for_size (GET_MODE_BITSIZE (xmode)
3325 / nregs_xmode,
3326 MODE_INT, 0));
3328 /* size of ymode must not be greater than the size of xmode. */
3329 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3330 if (mode_multiple == 0)
3331 abort ();
3333 y_offset = offset / GET_MODE_SIZE (ymode);
3334 nregs_multiple = nregs_xmode / nregs_ymode;
3335 #ifdef ENABLE_CHECKING
3336 if (offset % GET_MODE_SIZE (ymode)
3337 || mode_multiple % nregs_multiple)
3338 abort ();
3339 #endif
3340 return (!(y_offset % (mode_multiple / nregs_multiple)));
3343 /* Return the final regno that a subreg expression refers to. */
3344 unsigned int
3345 subreg_regno (rtx x)
3347 unsigned int ret;
3348 rtx subreg = SUBREG_REG (x);
3349 int regno = REGNO (subreg);
3351 ret = regno + subreg_regno_offset (regno,
3352 GET_MODE (subreg),
3353 SUBREG_BYTE (x),
3354 GET_MODE (x));
3355 return ret;
3358 struct parms_set_data
3360 int nregs;
3361 HARD_REG_SET regs;
3364 /* Helper function for noticing stores to parameter registers. */
3365 static void
3366 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3368 struct parms_set_data *d = data;
3369 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3370 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3372 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3373 d->nregs--;
3377 /* Look backward for first parameter to be loaded.
3378 Do not skip BOUNDARY. */
3380 find_first_parameter_load (rtx call_insn, rtx boundary)
3382 struct parms_set_data parm;
3383 rtx p, before;
3385 /* Since different machines initialize their parameter registers
3386 in different orders, assume nothing. Collect the set of all
3387 parameter registers. */
3388 CLEAR_HARD_REG_SET (parm.regs);
3389 parm.nregs = 0;
3390 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3391 if (GET_CODE (XEXP (p, 0)) == USE
3392 && REG_P (XEXP (XEXP (p, 0), 0)))
3394 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3395 abort ();
3397 /* We only care about registers which can hold function
3398 arguments. */
3399 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3400 continue;
3402 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3403 parm.nregs++;
3405 before = call_insn;
3407 /* Search backward for the first set of a register in this set. */
3408 while (parm.nregs && before != boundary)
3410 before = PREV_INSN (before);
3412 /* It is possible that some loads got CSEed from one call to
3413 another. Stop in that case. */
3414 if (GET_CODE (before) == CALL_INSN)
3415 break;
3417 /* Our caller needs either ensure that we will find all sets
3418 (in case code has not been optimized yet), or take care
3419 for possible labels in a way by setting boundary to preceding
3420 CODE_LABEL. */
3421 if (GET_CODE (before) == CODE_LABEL)
3423 if (before != boundary)
3424 abort ();
3425 break;
3428 if (INSN_P (before))
3429 note_stores (PATTERN (before), parms_set, &parm);
3431 return before;
3434 /* Return true if we should avoid inserting code between INSN and preceding
3435 call instruction. */
3437 bool
3438 keep_with_call_p (rtx insn)
3440 rtx set;
3442 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3444 if (REG_P (SET_DEST (set))
3445 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3446 && fixed_regs[REGNO (SET_DEST (set))]
3447 && general_operand (SET_SRC (set), VOIDmode))
3448 return true;
3449 if (REG_P (SET_SRC (set))
3450 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3451 && REG_P (SET_DEST (set))
3452 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3453 return true;
3454 /* There may be a stack pop just after the call and before the store
3455 of the return register. Search for the actual store when deciding
3456 if we can break or not. */
3457 if (SET_DEST (set) == stack_pointer_rtx)
3459 rtx i2 = next_nonnote_insn (insn);
3460 if (i2 && keep_with_call_p (i2))
3461 return true;
3464 return false;
3467 /* Return true when store to register X can be hoisted to the place
3468 with LIVE registers (can be NULL). Value VAL contains destination
3469 whose value will be used. */
3471 static bool
3472 hoist_test_store (rtx x, rtx val, regset live)
3474 if (GET_CODE (x) == SCRATCH)
3475 return true;
3477 if (rtx_equal_p (x, val))
3478 return true;
3480 /* Allow subreg of X in case it is not writing just part of multireg pseudo.
3481 Then we would need to update all users to care hoisting the store too.
3482 Caller may represent that by specifying whole subreg as val. */
3484 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3486 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3487 && GET_MODE_BITSIZE (GET_MODE (x)) <
3488 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3489 return false;
3490 return true;
3492 if (GET_CODE (x) == SUBREG)
3493 x = SUBREG_REG (x);
3495 /* Anything except register store is not hoistable. This includes the
3496 partial stores to registers. */
3498 if (!REG_P (x))
3499 return false;
3501 /* Pseudo registers can be always replaced by another pseudo to avoid
3502 the side effect, for hard register we must ensure that they are dead.
3503 Eventually we may want to add code to try turn pseudos to hards, but it
3504 is unlikely useful. */
3506 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3508 int regno = REGNO (x);
3509 int n = hard_regno_nregs[regno][GET_MODE (x)];
3511 if (!live)
3512 return false;
3513 if (REGNO_REG_SET_P (live, regno))
3514 return false;
3515 while (--n > 0)
3516 if (REGNO_REG_SET_P (live, regno + n))
3517 return false;
3519 return true;
3523 /* Return true if INSN can be hoisted to place with LIVE hard registers
3524 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3525 and used by the hoisting pass. */
3527 bool
3528 can_hoist_insn_p (rtx insn, rtx val, regset live)
3530 rtx pat = PATTERN (insn);
3531 int i;
3533 /* It probably does not worth the complexity to handle multiple
3534 set stores. */
3535 if (!single_set (insn))
3536 return false;
3537 /* We can move CALL_INSN, but we need to check that all caller clobbered
3538 regs are dead. */
3539 if (GET_CODE (insn) == CALL_INSN)
3540 return false;
3541 /* In future we will handle hoisting of libcall sequences, but
3542 give up for now. */
3543 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3544 return false;
3545 switch (GET_CODE (pat))
3547 case SET:
3548 if (!hoist_test_store (SET_DEST (pat), val, live))
3549 return false;
3550 break;
3551 case USE:
3552 /* USES do have sick semantics, so do not move them. */
3553 return false;
3554 break;
3555 case CLOBBER:
3556 if (!hoist_test_store (XEXP (pat, 0), val, live))
3557 return false;
3558 break;
3559 case PARALLEL:
3560 for (i = 0; i < XVECLEN (pat, 0); i++)
3562 rtx x = XVECEXP (pat, 0, i);
3563 switch (GET_CODE (x))
3565 case SET:
3566 if (!hoist_test_store (SET_DEST (x), val, live))
3567 return false;
3568 break;
3569 case USE:
3570 /* We need to fix callers to really ensure availability
3571 of all values insn uses, but for now it is safe to prohibit
3572 hoisting of any insn having such a hidden uses. */
3573 return false;
3574 break;
3575 case CLOBBER:
3576 if (!hoist_test_store (SET_DEST (x), val, live))
3577 return false;
3578 break;
3579 default:
3580 break;
3583 break;
3584 default:
3585 abort ();
3587 return true;
3590 /* Update store after hoisting - replace all stores to pseudo registers
3591 by new ones to avoid clobbering of values except for store to VAL that will
3592 be updated to NEW. */
3594 static void
3595 hoist_update_store (rtx insn, rtx *xp, rtx val, rtx new)
3597 rtx x = *xp;
3599 if (GET_CODE (x) == SCRATCH)
3600 return;
3602 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3603 validate_change (insn, xp,
3604 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3605 SUBREG_BYTE (x)), 1);
3606 if (rtx_equal_p (x, val))
3608 validate_change (insn, xp, new, 1);
3609 return;
3611 if (GET_CODE (x) == SUBREG)
3613 xp = &SUBREG_REG (x);
3614 x = *xp;
3617 if (!REG_P (x))
3618 abort ();
3620 /* We've verified that hard registers are dead, so we may keep the side
3621 effect. Otherwise replace it by new pseudo. */
3622 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3623 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3624 REG_NOTES (insn)
3625 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3628 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3629 and each other side effect to pseudo register by new pseudo register. */
3632 hoist_insn_after (rtx insn, rtx after, rtx val, rtx new)
3634 rtx pat;
3635 int i;
3636 rtx note;
3638 insn = emit_copy_of_insn_after (insn, after);
3639 pat = PATTERN (insn);
3641 /* Remove REG_UNUSED notes as we will re-emit them. */
3642 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3643 remove_note (insn, note);
3645 /* To get this working callers must ensure to move everything referenced
3646 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3647 easier. */
3648 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3649 remove_note (insn, note);
3650 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3651 remove_note (insn, note);
3653 /* Remove REG_DEAD notes as they might not be valid anymore in case
3654 we create redundancy. */
3655 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3656 remove_note (insn, note);
3657 switch (GET_CODE (pat))
3659 case SET:
3660 hoist_update_store (insn, &SET_DEST (pat), val, new);
3661 break;
3662 case USE:
3663 break;
3664 case CLOBBER:
3665 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3666 break;
3667 case PARALLEL:
3668 for (i = 0; i < XVECLEN (pat, 0); i++)
3670 rtx x = XVECEXP (pat, 0, i);
3671 switch (GET_CODE (x))
3673 case SET:
3674 hoist_update_store (insn, &SET_DEST (x), val, new);
3675 break;
3676 case USE:
3677 break;
3678 case CLOBBER:
3679 hoist_update_store (insn, &SET_DEST (x), val, new);
3680 break;
3681 default:
3682 break;
3685 break;
3686 default:
3687 abort ();
3689 if (!apply_change_group ())
3690 abort ();
3692 return insn;
3696 hoist_insn_to_edge (rtx insn, edge e, rtx val, rtx new)
3698 rtx new_insn;
3700 /* We cannot insert instructions on an abnormal critical edge.
3701 It will be easier to find the culprit if we die now. */
3702 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3703 abort ();
3705 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3706 stuff. We also emit CALL_INSNS and firends. */
3707 if (e->insns.r == NULL_RTX)
3709 start_sequence ();
3710 emit_note (NOTE_INSN_DELETED);
3712 else
3713 push_to_sequence (e->insns.r);
3715 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3717 e->insns.r = get_insns ();
3718 end_sequence ();
3719 return new_insn;
3722 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3723 to non-complex jumps. That is, direct unconditional, conditional,
3724 and tablejumps, but not computed jumps or returns. It also does
3725 not apply to the fallthru case of a conditional jump. */
3727 bool
3728 label_is_jump_target_p (rtx label, rtx jump_insn)
3730 rtx tmp = JUMP_LABEL (jump_insn);
3732 if (label == tmp)
3733 return true;
3735 if (tablejump_p (jump_insn, NULL, &tmp))
3737 rtvec vec = XVEC (PATTERN (tmp),
3738 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3739 int i, veclen = GET_NUM_ELEM (vec);
3741 for (i = 0; i < veclen; ++i)
3742 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3743 return true;
3746 return false;
3750 /* Return an estimate of the cost of computing rtx X.
3751 One use is in cse, to decide which expression to keep in the hash table.
3752 Another is in rtl generation, to pick the cheapest way to multiply.
3753 Other uses like the latter are expected in the future. */
3756 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3758 int i, j;
3759 enum rtx_code code;
3760 const char *fmt;
3761 int total;
3763 if (x == 0)
3764 return 0;
3766 /* Compute the default costs of certain things.
3767 Note that targetm.rtx_costs can override the defaults. */
3769 code = GET_CODE (x);
3770 switch (code)
3772 case MULT:
3773 total = COSTS_N_INSNS (5);
3774 break;
3775 case DIV:
3776 case UDIV:
3777 case MOD:
3778 case UMOD:
3779 total = COSTS_N_INSNS (7);
3780 break;
3781 case USE:
3782 /* Used in loop.c and combine.c as a marker. */
3783 total = 0;
3784 break;
3785 default:
3786 total = COSTS_N_INSNS (1);
3789 switch (code)
3791 case REG:
3792 return 0;
3794 case SUBREG:
3795 /* If we can't tie these modes, make this expensive. The larger
3796 the mode, the more expensive it is. */
3797 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3798 return COSTS_N_INSNS (2
3799 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3800 break;
3802 default:
3803 if (targetm.rtx_costs (x, code, outer_code, &total))
3804 return total;
3805 break;
3808 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3809 which is already in total. */
3811 fmt = GET_RTX_FORMAT (code);
3812 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3813 if (fmt[i] == 'e')
3814 total += rtx_cost (XEXP (x, i), code);
3815 else if (fmt[i] == 'E')
3816 for (j = 0; j < XVECLEN (x, i); j++)
3817 total += rtx_cost (XVECEXP (x, i, j), code);
3819 return total;
3822 /* Return cost of address expression X.
3823 Expect that X is properly formed address reference. */
3826 address_cost (rtx x, enum machine_mode mode)
3828 /* We may be asked for cost of various unusual addresses, such as operands
3829 of push instruction. It is not worthwhile to complicate writing
3830 of the target hook by such cases. */
3832 if (!memory_address_p (mode, x))
3833 return 1000;
3835 return targetm.address_cost (x);
3838 /* If the target doesn't override, compute the cost as with arithmetic. */
3841 default_address_cost (rtx x)
3843 return rtx_cost (x, MEM);
3847 unsigned HOST_WIDE_INT
3848 nonzero_bits (rtx x, enum machine_mode mode)
3850 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3853 unsigned int
3854 num_sign_bit_copies (rtx x, enum machine_mode mode)
3856 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3859 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3860 It avoids exponential behavior in nonzero_bits1 when X has
3861 identical subexpressions on the first or the second level. */
3863 static unsigned HOST_WIDE_INT
3864 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3865 enum machine_mode known_mode,
3866 unsigned HOST_WIDE_INT known_ret)
3868 if (x == known_x && mode == known_mode)
3869 return known_ret;
3871 /* Try to find identical subexpressions. If found call
3872 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3873 precomputed value for the subexpression as KNOWN_RET. */
3875 if (ARITHMETIC_P (x))
3877 rtx x0 = XEXP (x, 0);
3878 rtx x1 = XEXP (x, 1);
3880 /* Check the first level. */
3881 if (x0 == x1)
3882 return nonzero_bits1 (x, mode, x0, mode,
3883 cached_nonzero_bits (x0, mode, known_x,
3884 known_mode, known_ret));
3886 /* Check the second level. */
3887 if (ARITHMETIC_P (x0)
3888 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3889 return nonzero_bits1 (x, mode, x1, mode,
3890 cached_nonzero_bits (x1, mode, known_x,
3891 known_mode, known_ret));
3893 if (ARITHMETIC_P (x1)
3894 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3895 return nonzero_bits1 (x, mode, x0, mode,
3896 cached_nonzero_bits (x0, mode, known_x,
3897 known_mode, known_ret));
3900 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3903 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3904 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3905 is less useful. We can't allow both, because that results in exponential
3906 run time recursion. There is a nullstone testcase that triggered
3907 this. This macro avoids accidental uses of num_sign_bit_copies. */
3908 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3910 /* Given an expression, X, compute which bits in X can be nonzero.
3911 We don't care about bits outside of those defined in MODE.
3913 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3914 an arithmetic operation, we can do better. */
3916 static unsigned HOST_WIDE_INT
3917 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3918 enum machine_mode known_mode,
3919 unsigned HOST_WIDE_INT known_ret)
3921 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3922 unsigned HOST_WIDE_INT inner_nz;
3923 enum rtx_code code;
3924 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3926 /* For floating-point values, assume all bits are needed. */
3927 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3928 return nonzero;
3930 /* If X is wider than MODE, use its mode instead. */
3931 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3933 mode = GET_MODE (x);
3934 nonzero = GET_MODE_MASK (mode);
3935 mode_width = GET_MODE_BITSIZE (mode);
3938 if (mode_width > HOST_BITS_PER_WIDE_INT)
3939 /* Our only callers in this case look for single bit values. So
3940 just return the mode mask. Those tests will then be false. */
3941 return nonzero;
3943 #ifndef WORD_REGISTER_OPERATIONS
3944 /* If MODE is wider than X, but both are a single word for both the host
3945 and target machines, we can compute this from which bits of the
3946 object might be nonzero in its own mode, taking into account the fact
3947 that on many CISC machines, accessing an object in a wider mode
3948 causes the high-order bits to become undefined. So they are
3949 not known to be zero. */
3951 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3952 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3953 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3954 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3956 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3957 known_x, known_mode, known_ret);
3958 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3959 return nonzero;
3961 #endif
3963 code = GET_CODE (x);
3964 switch (code)
3966 case REG:
3967 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3968 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3969 all the bits above ptr_mode are known to be zero. */
3970 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3971 && REG_POINTER (x))
3972 nonzero &= GET_MODE_MASK (ptr_mode);
3973 #endif
3975 /* Include declared information about alignment of pointers. */
3976 /* ??? We don't properly preserve REG_POINTER changes across
3977 pointer-to-integer casts, so we can't trust it except for
3978 things that we know must be pointers. See execute/960116-1.c. */
3979 if ((x == stack_pointer_rtx
3980 || x == frame_pointer_rtx
3981 || x == arg_pointer_rtx)
3982 && REGNO_POINTER_ALIGN (REGNO (x)))
3984 unsigned HOST_WIDE_INT alignment
3985 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3987 #ifdef PUSH_ROUNDING
3988 /* If PUSH_ROUNDING is defined, it is possible for the
3989 stack to be momentarily aligned only to that amount,
3990 so we pick the least alignment. */
3991 if (x == stack_pointer_rtx && PUSH_ARGS)
3992 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3993 alignment);
3994 #endif
3996 nonzero &= ~(alignment - 1);
4000 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
4001 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
4002 known_mode, known_ret,
4003 &nonzero_for_hook);
4005 if (new)
4006 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
4007 known_mode, known_ret);
4009 return nonzero_for_hook;
4012 case CONST_INT:
4013 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4014 /* If X is negative in MODE, sign-extend the value. */
4015 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
4016 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
4017 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
4018 #endif
4020 return INTVAL (x);
4022 case MEM:
4023 #ifdef LOAD_EXTEND_OP
4024 /* In many, if not most, RISC machines, reading a byte from memory
4025 zeros the rest of the register. Noticing that fact saves a lot
4026 of extra zero-extends. */
4027 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4028 nonzero &= GET_MODE_MASK (GET_MODE (x));
4029 #endif
4030 break;
4032 case EQ: case NE:
4033 case UNEQ: case LTGT:
4034 case GT: case GTU: case UNGT:
4035 case LT: case LTU: case UNLT:
4036 case GE: case GEU: case UNGE:
4037 case LE: case LEU: case UNLE:
4038 case UNORDERED: case ORDERED:
4040 /* If this produces an integer result, we know which bits are set.
4041 Code here used to clear bits outside the mode of X, but that is
4042 now done above. */
4044 if (GET_MODE_CLASS (mode) == MODE_INT
4045 && mode_width <= HOST_BITS_PER_WIDE_INT)
4046 nonzero = STORE_FLAG_VALUE;
4047 break;
4049 case NEG:
4050 #if 0
4051 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4052 and num_sign_bit_copies. */
4053 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4054 == GET_MODE_BITSIZE (GET_MODE (x)))
4055 nonzero = 1;
4056 #endif
4058 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
4059 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4060 break;
4062 case ABS:
4063 #if 0
4064 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4065 and num_sign_bit_copies. */
4066 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4067 == GET_MODE_BITSIZE (GET_MODE (x)))
4068 nonzero = 1;
4069 #endif
4070 break;
4072 case TRUNCATE:
4073 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4074 known_x, known_mode, known_ret)
4075 & GET_MODE_MASK (mode));
4076 break;
4078 case ZERO_EXTEND:
4079 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4080 known_x, known_mode, known_ret);
4081 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4082 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4083 break;
4085 case SIGN_EXTEND:
4086 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4087 Otherwise, show all the bits in the outer mode but not the inner
4088 may be nonzero. */
4089 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4090 known_x, known_mode, known_ret);
4091 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4093 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4094 if (inner_nz
4095 & (((HOST_WIDE_INT) 1
4096 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
4097 inner_nz |= (GET_MODE_MASK (mode)
4098 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4101 nonzero &= inner_nz;
4102 break;
4104 case AND:
4105 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4106 known_x, known_mode, known_ret)
4107 & cached_nonzero_bits (XEXP (x, 1), mode,
4108 known_x, known_mode, known_ret);
4109 break;
4111 case XOR: case IOR:
4112 case UMIN: case UMAX: case SMIN: case SMAX:
4114 unsigned HOST_WIDE_INT nonzero0 =
4115 cached_nonzero_bits (XEXP (x, 0), mode,
4116 known_x, known_mode, known_ret);
4118 /* Don't call nonzero_bits for the second time if it cannot change
4119 anything. */
4120 if ((nonzero & nonzero0) != nonzero)
4121 nonzero &= nonzero0
4122 | cached_nonzero_bits (XEXP (x, 1), mode,
4123 known_x, known_mode, known_ret);
4125 break;
4127 case PLUS: case MINUS:
4128 case MULT:
4129 case DIV: case UDIV:
4130 case MOD: case UMOD:
4131 /* We can apply the rules of arithmetic to compute the number of
4132 high- and low-order zero bits of these operations. We start by
4133 computing the width (position of the highest-order nonzero bit)
4134 and the number of low-order zero bits for each value. */
4136 unsigned HOST_WIDE_INT nz0 =
4137 cached_nonzero_bits (XEXP (x, 0), mode,
4138 known_x, known_mode, known_ret);
4139 unsigned HOST_WIDE_INT nz1 =
4140 cached_nonzero_bits (XEXP (x, 1), mode,
4141 known_x, known_mode, known_ret);
4142 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
4143 int width0 = floor_log2 (nz0) + 1;
4144 int width1 = floor_log2 (nz1) + 1;
4145 int low0 = floor_log2 (nz0 & -nz0);
4146 int low1 = floor_log2 (nz1 & -nz1);
4147 HOST_WIDE_INT op0_maybe_minusp
4148 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
4149 HOST_WIDE_INT op1_maybe_minusp
4150 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
4151 unsigned int result_width = mode_width;
4152 int result_low = 0;
4154 switch (code)
4156 case PLUS:
4157 result_width = MAX (width0, width1) + 1;
4158 result_low = MIN (low0, low1);
4159 break;
4160 case MINUS:
4161 result_low = MIN (low0, low1);
4162 break;
4163 case MULT:
4164 result_width = width0 + width1;
4165 result_low = low0 + low1;
4166 break;
4167 case DIV:
4168 if (width1 == 0)
4169 break;
4170 if (! op0_maybe_minusp && ! op1_maybe_minusp)
4171 result_width = width0;
4172 break;
4173 case UDIV:
4174 if (width1 == 0)
4175 break;
4176 result_width = width0;
4177 break;
4178 case MOD:
4179 if (width1 == 0)
4180 break;
4181 if (! op0_maybe_minusp && ! op1_maybe_minusp)
4182 result_width = MIN (width0, width1);
4183 result_low = MIN (low0, low1);
4184 break;
4185 case UMOD:
4186 if (width1 == 0)
4187 break;
4188 result_width = MIN (width0, width1);
4189 result_low = MIN (low0, low1);
4190 break;
4191 default:
4192 abort ();
4195 if (result_width < mode_width)
4196 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
4198 if (result_low > 0)
4199 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
4201 #ifdef POINTERS_EXTEND_UNSIGNED
4202 /* If pointers extend unsigned and this is an addition or subtraction
4203 to a pointer in Pmode, all the bits above ptr_mode are known to be
4204 zero. */
4205 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
4206 && (code == PLUS || code == MINUS)
4207 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4208 nonzero &= GET_MODE_MASK (ptr_mode);
4209 #endif
4211 break;
4213 case ZERO_EXTRACT:
4214 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4215 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4216 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4217 break;
4219 case SUBREG:
4220 /* If this is a SUBREG formed for a promoted variable that has
4221 been zero-extended, we know that at least the high-order bits
4222 are zero, though others might be too. */
4224 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4225 nonzero = GET_MODE_MASK (GET_MODE (x))
4226 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4227 known_x, known_mode, known_ret);
4229 /* If the inner mode is a single word for both the host and target
4230 machines, we can compute this from which bits of the inner
4231 object might be nonzero. */
4232 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
4233 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4234 <= HOST_BITS_PER_WIDE_INT))
4236 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4237 known_x, known_mode, known_ret);
4239 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4240 /* If this is a typical RISC machine, we only have to worry
4241 about the way loads are extended. */
4242 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4243 ? (((nonzero
4244 & (((unsigned HOST_WIDE_INT) 1
4245 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
4246 != 0))
4247 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
4248 || !MEM_P (SUBREG_REG (x)))
4249 #endif
4251 /* On many CISC machines, accessing an object in a wider mode
4252 causes the high-order bits to become undefined. So they are
4253 not known to be zero. */
4254 if (GET_MODE_SIZE (GET_MODE (x))
4255 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4256 nonzero |= (GET_MODE_MASK (GET_MODE (x))
4257 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
4260 break;
4262 case ASHIFTRT:
4263 case LSHIFTRT:
4264 case ASHIFT:
4265 case ROTATE:
4266 /* The nonzero bits are in two classes: any bits within MODE
4267 that aren't in GET_MODE (x) are always significant. The rest of the
4268 nonzero bits are those that are significant in the operand of
4269 the shift when shifted the appropriate number of bits. This
4270 shows that high-order bits are cleared by the right shift and
4271 low-order bits by left shifts. */
4272 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4273 && INTVAL (XEXP (x, 1)) >= 0
4274 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4276 enum machine_mode inner_mode = GET_MODE (x);
4277 unsigned int width = GET_MODE_BITSIZE (inner_mode);
4278 int count = INTVAL (XEXP (x, 1));
4279 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4280 unsigned HOST_WIDE_INT op_nonzero =
4281 cached_nonzero_bits (XEXP (x, 0), mode,
4282 known_x, known_mode, known_ret);
4283 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4284 unsigned HOST_WIDE_INT outer = 0;
4286 if (mode_width > width)
4287 outer = (op_nonzero & nonzero & ~mode_mask);
4289 if (code == LSHIFTRT)
4290 inner >>= count;
4291 else if (code == ASHIFTRT)
4293 inner >>= count;
4295 /* If the sign bit may have been nonzero before the shift, we
4296 need to mark all the places it could have been copied to
4297 by the shift as possibly nonzero. */
4298 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4299 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4301 else if (code == ASHIFT)
4302 inner <<= count;
4303 else
4304 inner = ((inner << (count % width)
4305 | (inner >> (width - (count % width)))) & mode_mask);
4307 nonzero &= (outer | inner);
4309 break;
4311 case FFS:
4312 case POPCOUNT:
4313 /* This is at most the number of bits in the mode. */
4314 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4315 break;
4317 case CLZ:
4318 /* If CLZ has a known value at zero, then the nonzero bits are
4319 that value, plus the number of bits in the mode minus one. */
4320 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4321 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4322 else
4323 nonzero = -1;
4324 break;
4326 case CTZ:
4327 /* If CTZ has a known value at zero, then the nonzero bits are
4328 that value, plus the number of bits in the mode minus one. */
4329 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4330 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4331 else
4332 nonzero = -1;
4333 break;
4335 case PARITY:
4336 nonzero = 1;
4337 break;
4339 case IF_THEN_ELSE:
4341 unsigned HOST_WIDE_INT nonzero_true =
4342 cached_nonzero_bits (XEXP (x, 1), mode,
4343 known_x, known_mode, known_ret);
4345 /* Don't call nonzero_bits for the second time if it cannot change
4346 anything. */
4347 if ((nonzero & nonzero_true) != nonzero)
4348 nonzero &= nonzero_true
4349 | cached_nonzero_bits (XEXP (x, 2), mode,
4350 known_x, known_mode, known_ret);
4352 break;
4354 default:
4355 break;
4358 return nonzero;
4361 /* See the macro definition above. */
4362 #undef cached_num_sign_bit_copies
4365 /* The function cached_num_sign_bit_copies is a wrapper around
4366 num_sign_bit_copies1. It avoids exponential behavior in
4367 num_sign_bit_copies1 when X has identical subexpressions on the
4368 first or the second level. */
4370 static unsigned int
4371 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
4372 enum machine_mode known_mode,
4373 unsigned int known_ret)
4375 if (x == known_x && mode == known_mode)
4376 return known_ret;
4378 /* Try to find identical subexpressions. If found call
4379 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4380 the precomputed value for the subexpression as KNOWN_RET. */
4382 if (ARITHMETIC_P (x))
4384 rtx x0 = XEXP (x, 0);
4385 rtx x1 = XEXP (x, 1);
4387 /* Check the first level. */
4388 if (x0 == x1)
4389 return
4390 num_sign_bit_copies1 (x, mode, x0, mode,
4391 cached_num_sign_bit_copies (x0, mode, known_x,
4392 known_mode,
4393 known_ret));
4395 /* Check the second level. */
4396 if (ARITHMETIC_P (x0)
4397 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4398 return
4399 num_sign_bit_copies1 (x, mode, x1, mode,
4400 cached_num_sign_bit_copies (x1, mode, known_x,
4401 known_mode,
4402 known_ret));
4404 if (ARITHMETIC_P (x1)
4405 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4406 return
4407 num_sign_bit_copies1 (x, mode, x0, mode,
4408 cached_num_sign_bit_copies (x0, mode, known_x,
4409 known_mode,
4410 known_ret));
4413 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4416 /* Return the number of bits at the high-order end of X that are known to
4417 be equal to the sign bit. X will be used in mode MODE; if MODE is
4418 VOIDmode, X will be used in its own mode. The returned value will always
4419 be between 1 and the number of bits in MODE. */
4421 static unsigned int
4422 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
4423 enum machine_mode known_mode,
4424 unsigned int known_ret)
4426 enum rtx_code code = GET_CODE (x);
4427 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4428 int num0, num1, result;
4429 unsigned HOST_WIDE_INT nonzero;
4431 /* If we weren't given a mode, use the mode of X. If the mode is still
4432 VOIDmode, we don't know anything. Likewise if one of the modes is
4433 floating-point. */
4435 if (mode == VOIDmode)
4436 mode = GET_MODE (x);
4438 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4439 return 1;
4441 /* For a smaller object, just ignore the high bits. */
4442 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4444 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4445 known_x, known_mode, known_ret);
4446 return MAX (1,
4447 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4450 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4452 #ifndef WORD_REGISTER_OPERATIONS
4453 /* If this machine does not do all register operations on the entire
4454 register and MODE is wider than the mode of X, we can say nothing
4455 at all about the high-order bits. */
4456 return 1;
4457 #else
4458 /* Likewise on machines that do, if the mode of the object is smaller
4459 than a word and loads of that size don't sign extend, we can say
4460 nothing about the high order bits. */
4461 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4462 #ifdef LOAD_EXTEND_OP
4463 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4464 #endif
4466 return 1;
4467 #endif
4470 switch (code)
4472 case REG:
4474 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4475 /* If pointers extend signed and this is a pointer in Pmode, say that
4476 all the bits above ptr_mode are known to be sign bit copies. */
4477 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4478 && REG_POINTER (x))
4479 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4480 #endif
4483 unsigned int copies_for_hook = 1, copies = 1;
4484 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4485 known_mode, known_ret,
4486 &copies_for_hook);
4488 if (new)
4489 copies = cached_num_sign_bit_copies (new, mode, known_x,
4490 known_mode, known_ret);
4492 if (copies > 1 || copies_for_hook > 1)
4493 return MAX (copies, copies_for_hook);
4495 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4497 break;
4499 case MEM:
4500 #ifdef LOAD_EXTEND_OP
4501 /* Some RISC machines sign-extend all loads of smaller than a word. */
4502 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4503 return MAX (1, ((int) bitwidth
4504 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4505 #endif
4506 break;
4508 case CONST_INT:
4509 /* If the constant is negative, take its 1's complement and remask.
4510 Then see how many zero bits we have. */
4511 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4512 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4513 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4514 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4516 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4518 case SUBREG:
4519 /* If this is a SUBREG for a promoted object that is sign-extended
4520 and we are looking at it in a wider mode, we know that at least the
4521 high-order bits are known to be sign bit copies. */
4523 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4525 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4526 known_x, known_mode, known_ret);
4527 return MAX ((int) bitwidth
4528 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4529 num0);
4532 /* For a smaller object, just ignore the high bits. */
4533 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4535 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4536 known_x, known_mode, known_ret);
4537 return MAX (1, (num0
4538 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4539 - bitwidth)));
4542 #ifdef WORD_REGISTER_OPERATIONS
4543 #ifdef LOAD_EXTEND_OP
4544 /* For paradoxical SUBREGs on machines where all register operations
4545 affect the entire register, just look inside. Note that we are
4546 passing MODE to the recursive call, so the number of sign bit copies
4547 will remain relative to that mode, not the inner mode. */
4549 /* This works only if loads sign extend. Otherwise, if we get a
4550 reload for the inner part, it may be loaded from the stack, and
4551 then we lose all sign bit copies that existed before the store
4552 to the stack. */
4554 if ((GET_MODE_SIZE (GET_MODE (x))
4555 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4556 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4557 && MEM_P (SUBREG_REG (x)))
4558 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4559 known_x, known_mode, known_ret);
4560 #endif
4561 #endif
4562 break;
4564 case SIGN_EXTRACT:
4565 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4566 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4567 break;
4569 case SIGN_EXTEND:
4570 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4571 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4572 known_x, known_mode, known_ret));
4574 case TRUNCATE:
4575 /* For a smaller object, just ignore the high bits. */
4576 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4577 known_x, known_mode, known_ret);
4578 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4579 - bitwidth)));
4581 case NOT:
4582 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4583 known_x, known_mode, known_ret);
4585 case ROTATE: case ROTATERT:
4586 /* If we are rotating left by a number of bits less than the number
4587 of sign bit copies, we can just subtract that amount from the
4588 number. */
4589 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4590 && INTVAL (XEXP (x, 1)) >= 0
4591 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4593 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4594 known_x, known_mode, known_ret);
4595 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4596 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4598 break;
4600 case NEG:
4601 /* In general, this subtracts one sign bit copy. But if the value
4602 is known to be positive, the number of sign bit copies is the
4603 same as that of the input. Finally, if the input has just one bit
4604 that might be nonzero, all the bits are copies of the sign bit. */
4605 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4606 known_x, known_mode, known_ret);
4607 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4608 return num0 > 1 ? num0 - 1 : 1;
4610 nonzero = nonzero_bits (XEXP (x, 0), mode);
4611 if (nonzero == 1)
4612 return bitwidth;
4614 if (num0 > 1
4615 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4616 num0--;
4618 return num0;
4620 case IOR: case AND: case XOR:
4621 case SMIN: case SMAX: case UMIN: case UMAX:
4622 /* Logical operations will preserve the number of sign-bit copies.
4623 MIN and MAX operations always return one of the operands. */
4624 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4625 known_x, known_mode, known_ret);
4626 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4627 known_x, known_mode, known_ret);
4628 return MIN (num0, num1);
4630 case PLUS: case MINUS:
4631 /* For addition and subtraction, we can have a 1-bit carry. However,
4632 if we are subtracting 1 from a positive number, there will not
4633 be such a carry. Furthermore, if the positive number is known to
4634 be 0 or 1, we know the result is either -1 or 0. */
4636 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4637 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4639 nonzero = nonzero_bits (XEXP (x, 0), mode);
4640 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4641 return (nonzero == 1 || nonzero == 0 ? bitwidth
4642 : bitwidth - floor_log2 (nonzero) - 1);
4645 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4646 known_x, known_mode, known_ret);
4647 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4648 known_x, known_mode, known_ret);
4649 result = MAX (1, MIN (num0, num1) - 1);
4651 #ifdef POINTERS_EXTEND_UNSIGNED
4652 /* If pointers extend signed and this is an addition or subtraction
4653 to a pointer in Pmode, all the bits above ptr_mode are known to be
4654 sign bit copies. */
4655 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4656 && (code == PLUS || code == MINUS)
4657 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4658 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4659 - GET_MODE_BITSIZE (ptr_mode) + 1),
4660 result);
4661 #endif
4662 return result;
4664 case MULT:
4665 /* The number of bits of the product is the sum of the number of
4666 bits of both terms. However, unless one of the terms if known
4667 to be positive, we must allow for an additional bit since negating
4668 a negative number can remove one sign bit copy. */
4670 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4671 known_x, known_mode, known_ret);
4672 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4673 known_x, known_mode, known_ret);
4675 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4676 if (result > 0
4677 && (bitwidth > HOST_BITS_PER_WIDE_INT
4678 || (((nonzero_bits (XEXP (x, 0), mode)
4679 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4680 && ((nonzero_bits (XEXP (x, 1), mode)
4681 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4682 result--;
4684 return MAX (1, result);
4686 case UDIV:
4687 /* The result must be <= the first operand. If the first operand
4688 has the high bit set, we know nothing about the number of sign
4689 bit copies. */
4690 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4691 return 1;
4692 else if ((nonzero_bits (XEXP (x, 0), mode)
4693 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4694 return 1;
4695 else
4696 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4697 known_x, known_mode, known_ret);
4699 case UMOD:
4700 /* The result must be <= the second operand. */
4701 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4702 known_x, known_mode, known_ret);
4704 case DIV:
4705 /* Similar to unsigned division, except that we have to worry about
4706 the case where the divisor is negative, in which case we have
4707 to add 1. */
4708 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4709 known_x, known_mode, known_ret);
4710 if (result > 1
4711 && (bitwidth > HOST_BITS_PER_WIDE_INT
4712 || (nonzero_bits (XEXP (x, 1), mode)
4713 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4714 result--;
4716 return result;
4718 case MOD:
4719 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4720 known_x, known_mode, known_ret);
4721 if (result > 1
4722 && (bitwidth > HOST_BITS_PER_WIDE_INT
4723 || (nonzero_bits (XEXP (x, 1), mode)
4724 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4725 result--;
4727 return result;
4729 case ASHIFTRT:
4730 /* Shifts by a constant add to the number of bits equal to the
4731 sign bit. */
4732 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4733 known_x, known_mode, known_ret);
4734 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4735 && INTVAL (XEXP (x, 1)) > 0)
4736 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4738 return num0;
4740 case ASHIFT:
4741 /* Left shifts destroy copies. */
4742 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4743 || INTVAL (XEXP (x, 1)) < 0
4744 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4745 return 1;
4747 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4748 known_x, known_mode, known_ret);
4749 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4751 case IF_THEN_ELSE:
4752 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4753 known_x, known_mode, known_ret);
4754 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4755 known_x, known_mode, known_ret);
4756 return MIN (num0, num1);
4758 case EQ: case NE: case GE: case GT: case LE: case LT:
4759 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4760 case GEU: case GTU: case LEU: case LTU:
4761 case UNORDERED: case ORDERED:
4762 /* If the constant is negative, take its 1's complement and remask.
4763 Then see how many zero bits we have. */
4764 nonzero = STORE_FLAG_VALUE;
4765 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4766 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4767 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4769 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4771 default:
4772 break;
4775 /* If we haven't been able to figure it out by one of the above rules,
4776 see if some of the high-order bits are known to be zero. If so,
4777 count those bits and return one less than that amount. If we can't
4778 safely compute the mask for this mode, always return BITWIDTH. */
4780 bitwidth = GET_MODE_BITSIZE (mode);
4781 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4782 return 1;
4784 nonzero = nonzero_bits (x, mode);
4785 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4786 ? 1 : bitwidth - floor_log2 (nonzero) - 1;