* arm.h (REVERSE_CONDITION): Define.
[official-gcc.git] / gcc / rtlanal.c
blob91fe437a97361609ae42f001e1df6971760a9af0
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "basic-block.h"
37 #include "real.h"
38 #include "regs.h"
39 #include "function.h"
41 /* Forward declarations */
42 static int global_reg_mentioned_p_1 (rtx *, void *);
43 static void set_of_1 (rtx, rtx, void *);
44 static void insn_dependent_p_1 (rtx, rtx, void *);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
48 static bool hoist_test_store (rtx, rtx, regset);
49 static void hoist_update_store (rtx, rtx *, rtx, rtx);
51 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
52 rtx, enum machine_mode,
53 unsigned HOST_WIDE_INT);
54 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
55 enum machine_mode,
56 unsigned HOST_WIDE_INT);
57 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
58 enum machine_mode,
59 unsigned int);
60 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
61 enum machine_mode, unsigned int);
63 /* Bit flags that specify the machine subtype we are compiling for.
64 Bits are tested using macros TARGET_... defined in the tm.h file
65 and set by `-m...' switches. Must be defined in rtlanal.c. */
67 int target_flags;
69 /* Return 1 if the value of X is unstable
70 (would be different at a different point in the program).
71 The frame pointer, arg pointer, etc. are considered stable
72 (within one function) and so is anything marked `unchanging'. */
74 int
75 rtx_unstable_p (rtx x)
77 RTX_CODE code = GET_CODE (x);
78 int i;
79 const char *fmt;
81 switch (code)
83 case MEM:
84 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
86 case CONST:
87 case CONST_INT:
88 case CONST_DOUBLE:
89 case CONST_VECTOR:
90 case SYMBOL_REF:
91 case LABEL_REF:
92 return 0;
94 case REG:
95 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
96 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
97 /* The arg pointer varies if it is not a fixed register. */
98 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
99 return 0;
100 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
101 /* ??? When call-clobbered, the value is stable modulo the restore
102 that must happen after a call. This currently screws up local-alloc
103 into believing that the restore is not needed. */
104 if (x == pic_offset_table_rtx)
105 return 0;
106 #endif
107 return 1;
109 case ASM_OPERANDS:
110 if (MEM_VOLATILE_P (x))
111 return 1;
113 /* Fall through. */
115 default:
116 break;
119 fmt = GET_RTX_FORMAT (code);
120 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
121 if (fmt[i] == 'e')
123 if (rtx_unstable_p (XEXP (x, i)))
124 return 1;
126 else if (fmt[i] == 'E')
128 int j;
129 for (j = 0; j < XVECLEN (x, i); j++)
130 if (rtx_unstable_p (XVECEXP (x, i, j)))
131 return 1;
134 return 0;
137 /* Return 1 if X has a value that can vary even between two
138 executions of the program. 0 means X can be compared reliably
139 against certain constants or near-constants.
140 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
141 zero, we are slightly more conservative.
142 The frame pointer and the arg pointer are considered constant. */
145 rtx_varies_p (rtx x, int for_alias)
147 RTX_CODE code;
148 int i;
149 const char *fmt;
151 if (!x)
152 return 0;
154 code = GET_CODE (x);
155 switch (code)
157 case MEM:
158 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
160 case CONST:
161 case CONST_INT:
162 case CONST_DOUBLE:
163 case CONST_VECTOR:
164 case SYMBOL_REF:
165 case LABEL_REF:
166 return 0;
168 case REG:
169 /* Note that we have to test for the actual rtx used for the frame
170 and arg pointers and not just the register number in case we have
171 eliminated the frame and/or arg pointer and are using it
172 for pseudos. */
173 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
174 /* The arg pointer varies if it is not a fixed register. */
175 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
176 return 0;
177 if (x == pic_offset_table_rtx
178 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
179 /* ??? When call-clobbered, the value is stable modulo the restore
180 that must happen after a call. This currently screws up
181 local-alloc into believing that the restore is not needed, so we
182 must return 0 only if we are called from alias analysis. */
183 && for_alias
184 #endif
186 return 0;
187 return 1;
189 case LO_SUM:
190 /* The operand 0 of a LO_SUM is considered constant
191 (in fact it is related specifically to operand 1)
192 during alias analysis. */
193 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
194 || rtx_varies_p (XEXP (x, 1), for_alias);
196 case ASM_OPERANDS:
197 if (MEM_VOLATILE_P (x))
198 return 1;
200 /* Fall through. */
202 default:
203 break;
206 fmt = GET_RTX_FORMAT (code);
207 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
208 if (fmt[i] == 'e')
210 if (rtx_varies_p (XEXP (x, i), for_alias))
211 return 1;
213 else if (fmt[i] == 'E')
215 int j;
216 for (j = 0; j < XVECLEN (x, i); j++)
217 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
218 return 1;
221 return 0;
224 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
227 rtx_addr_can_trap_p (rtx x)
229 enum rtx_code code = GET_CODE (x);
231 switch (code)
233 case SYMBOL_REF:
234 return SYMBOL_REF_WEAK (x);
236 case LABEL_REF:
237 return 0;
239 case REG:
240 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
241 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
242 || x == stack_pointer_rtx
243 /* The arg pointer varies if it is not a fixed register. */
244 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
245 return 0;
246 /* All of the virtual frame registers are stack references. */
247 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
248 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
249 return 0;
250 return 1;
252 case CONST:
253 return rtx_addr_can_trap_p (XEXP (x, 0));
255 case PLUS:
256 /* An address is assumed not to trap if it is an address that can't
257 trap plus a constant integer or it is the pic register plus a
258 constant. */
259 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
260 && GET_CODE (XEXP (x, 1)) == CONST_INT)
261 || (XEXP (x, 0) == pic_offset_table_rtx
262 && CONSTANT_P (XEXP (x, 1))));
264 case LO_SUM:
265 case PRE_MODIFY:
266 return rtx_addr_can_trap_p (XEXP (x, 1));
268 case PRE_DEC:
269 case PRE_INC:
270 case POST_DEC:
271 case POST_INC:
272 case POST_MODIFY:
273 return rtx_addr_can_trap_p (XEXP (x, 0));
275 default:
276 break;
279 /* If it isn't one of the case above, it can cause a trap. */
280 return 1;
283 /* Return true if X is an address that is known to not be zero. */
285 bool
286 nonzero_address_p (rtx x)
288 enum rtx_code code = GET_CODE (x);
290 switch (code)
292 case SYMBOL_REF:
293 return !SYMBOL_REF_WEAK (x);
295 case LABEL_REF:
296 return true;
298 case REG:
299 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
300 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
301 || x == stack_pointer_rtx
302 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
303 return true;
304 /* All of the virtual frame registers are stack references. */
305 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
306 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
307 return true;
308 return false;
310 case CONST:
311 return nonzero_address_p (XEXP (x, 0));
313 case PLUS:
314 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
316 /* Pointers aren't allowed to wrap. If we've got a register
317 that is known to be a pointer, and a positive offset, then
318 the composite can't be zero. */
319 if (INTVAL (XEXP (x, 1)) > 0
320 && REG_P (XEXP (x, 0))
321 && REG_POINTER (XEXP (x, 0)))
322 return true;
324 return nonzero_address_p (XEXP (x, 0));
326 /* Handle PIC references. */
327 else if (XEXP (x, 0) == pic_offset_table_rtx
328 && CONSTANT_P (XEXP (x, 1)))
329 return true;
330 return false;
332 case PRE_MODIFY:
333 /* Similar to the above; allow positive offsets. Further, since
334 auto-inc is only allowed in memories, the register must be a
335 pointer. */
336 if (GET_CODE (XEXP (x, 1)) == CONST_INT
337 && INTVAL (XEXP (x, 1)) > 0)
338 return true;
339 return nonzero_address_p (XEXP (x, 0));
341 case PRE_INC:
342 /* Similarly. Further, the offset is always positive. */
343 return true;
345 case PRE_DEC:
346 case POST_DEC:
347 case POST_INC:
348 case POST_MODIFY:
349 return nonzero_address_p (XEXP (x, 0));
351 case LO_SUM:
352 return nonzero_address_p (XEXP (x, 1));
354 default:
355 break;
358 /* If it isn't one of the case above, might be zero. */
359 return false;
362 /* Return 1 if X refers to a memory location whose address
363 cannot be compared reliably with constant addresses,
364 or if X refers to a BLKmode memory object.
365 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
366 zero, we are slightly more conservative. */
369 rtx_addr_varies_p (rtx x, int for_alias)
371 enum rtx_code code;
372 int i;
373 const char *fmt;
375 if (x == 0)
376 return 0;
378 code = GET_CODE (x);
379 if (code == MEM)
380 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
382 fmt = GET_RTX_FORMAT (code);
383 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
384 if (fmt[i] == 'e')
386 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
387 return 1;
389 else if (fmt[i] == 'E')
391 int j;
392 for (j = 0; j < XVECLEN (x, i); j++)
393 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
394 return 1;
396 return 0;
399 /* Return the value of the integer term in X, if one is apparent;
400 otherwise return 0.
401 Only obvious integer terms are detected.
402 This is used in cse.c with the `related_value' field. */
404 HOST_WIDE_INT
405 get_integer_term (rtx x)
407 if (GET_CODE (x) == CONST)
408 x = XEXP (x, 0);
410 if (GET_CODE (x) == MINUS
411 && GET_CODE (XEXP (x, 1)) == CONST_INT)
412 return - INTVAL (XEXP (x, 1));
413 if (GET_CODE (x) == PLUS
414 && GET_CODE (XEXP (x, 1)) == CONST_INT)
415 return INTVAL (XEXP (x, 1));
416 return 0;
419 /* If X is a constant, return the value sans apparent integer term;
420 otherwise return 0.
421 Only obvious integer terms are detected. */
424 get_related_value (rtx x)
426 if (GET_CODE (x) != CONST)
427 return 0;
428 x = XEXP (x, 0);
429 if (GET_CODE (x) == PLUS
430 && GET_CODE (XEXP (x, 1)) == CONST_INT)
431 return XEXP (x, 0);
432 else if (GET_CODE (x) == MINUS
433 && GET_CODE (XEXP (x, 1)) == CONST_INT)
434 return XEXP (x, 0);
435 return 0;
438 /* Given a tablejump insn INSN, return the RTL expression for the offset
439 into the jump table. If the offset cannot be determined, then return
440 NULL_RTX.
442 If EARLIEST is nonzero, it is a pointer to a place where the earliest
443 insn used in locating the offset was found. */
446 get_jump_table_offset (rtx insn, rtx *earliest)
448 rtx label = NULL;
449 rtx table = NULL;
450 rtx set;
451 rtx old_insn;
452 rtx x;
453 rtx old_x;
454 rtx y;
455 rtx old_y;
456 int i;
458 if (!tablejump_p (insn, &label, &table) || !(set = single_set (insn)))
459 return NULL_RTX;
461 x = SET_SRC (set);
463 /* Some targets (eg, ARM) emit a tablejump that also
464 contains the out-of-range target. */
465 if (GET_CODE (x) == IF_THEN_ELSE
466 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
467 x = XEXP (x, 1);
469 /* Search backwards and locate the expression stored in X. */
470 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
471 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
474 /* If X is an expression using a relative address then strip
475 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
476 or the jump table label. */
477 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
478 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
480 for (i = 0; i < 2; i++)
482 old_insn = insn;
483 y = XEXP (x, i);
485 if (y == pc_rtx || y == pic_offset_table_rtx)
486 break;
488 for (old_y = NULL_RTX; REG_P (y) && y != old_y;
489 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
492 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
493 break;
496 if (i >= 2)
497 return NULL_RTX;
499 x = XEXP (x, 1 - i);
501 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
502 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
506 /* Strip off any sign or zero extension. */
507 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
509 x = XEXP (x, 0);
511 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
512 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
516 /* If X isn't a MEM then this isn't a tablejump we understand. */
517 if (!MEM_P (x))
518 return NULL_RTX;
520 /* Strip off the MEM. */
521 x = XEXP (x, 0);
523 for (old_x = NULL_RTX; REG_P (x) && x != old_x;
524 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
527 /* If X isn't a PLUS than this isn't a tablejump we understand. */
528 if (GET_CODE (x) != PLUS)
529 return NULL_RTX;
531 /* At this point we should have an expression representing the jump table
532 plus an offset. Examine each operand in order to determine which one
533 represents the jump table. Knowing that tells us that the other operand
534 must represent the offset. */
535 for (i = 0; i < 2; i++)
537 old_insn = insn;
538 y = XEXP (x, i);
540 for (old_y = NULL_RTX; REG_P (y) && y != old_y;
541 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
544 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
545 && reg_mentioned_p (label, y))
546 break;
549 if (i >= 2)
550 return NULL_RTX;
552 x = XEXP (x, 1 - i);
554 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
555 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
556 for (i = 0; i < 2; i++)
557 if (XEXP (x, i) == pic_offset_table_rtx)
559 x = XEXP (x, 1 - i);
560 break;
563 if (earliest)
564 *earliest = insn;
566 /* Return the RTL expression representing the offset. */
567 return x;
570 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
571 a global register. */
573 static int
574 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
576 int regno;
577 rtx x = *loc;
579 if (! x)
580 return 0;
582 switch (GET_CODE (x))
584 case SUBREG:
585 if (REG_P (SUBREG_REG (x)))
587 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
588 && global_regs[subreg_regno (x)])
589 return 1;
590 return 0;
592 break;
594 case REG:
595 regno = REGNO (x);
596 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
597 return 1;
598 return 0;
600 case SCRATCH:
601 case PC:
602 case CC0:
603 case CONST_INT:
604 case CONST_DOUBLE:
605 case CONST:
606 case LABEL_REF:
607 return 0;
609 case CALL:
610 /* A non-constant call might use a global register. */
611 return 1;
613 default:
614 break;
617 return 0;
620 /* Returns nonzero if X mentions a global register. */
623 global_reg_mentioned_p (rtx x)
625 if (INSN_P (x))
627 if (CALL_P (x))
629 if (! CONST_OR_PURE_CALL_P (x))
630 return 1;
631 x = CALL_INSN_FUNCTION_USAGE (x);
632 if (x == 0)
633 return 0;
635 else
636 x = PATTERN (x);
639 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
642 /* Return the number of places FIND appears within X. If COUNT_DEST is
643 zero, we do not count occurrences inside the destination of a SET. */
646 count_occurrences (rtx x, rtx find, int count_dest)
648 int i, j;
649 enum rtx_code code;
650 const char *format_ptr;
651 int count;
653 if (x == find)
654 return 1;
656 code = GET_CODE (x);
658 switch (code)
660 case REG:
661 case CONST_INT:
662 case CONST_DOUBLE:
663 case CONST_VECTOR:
664 case SYMBOL_REF:
665 case CODE_LABEL:
666 case PC:
667 case CC0:
668 return 0;
670 case MEM:
671 if (MEM_P (find) && rtx_equal_p (x, find))
672 return 1;
673 break;
675 case SET:
676 if (SET_DEST (x) == find && ! count_dest)
677 return count_occurrences (SET_SRC (x), find, count_dest);
678 break;
680 default:
681 break;
684 format_ptr = GET_RTX_FORMAT (code);
685 count = 0;
687 for (i = 0; i < GET_RTX_LENGTH (code); i++)
689 switch (*format_ptr++)
691 case 'e':
692 count += count_occurrences (XEXP (x, i), find, count_dest);
693 break;
695 case 'E':
696 for (j = 0; j < XVECLEN (x, i); j++)
697 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
698 break;
701 return count;
704 /* Nonzero if register REG appears somewhere within IN.
705 Also works if REG is not a register; in this case it checks
706 for a subexpression of IN that is Lisp "equal" to REG. */
709 reg_mentioned_p (rtx reg, rtx in)
711 const char *fmt;
712 int i;
713 enum rtx_code code;
715 if (in == 0)
716 return 0;
718 if (reg == in)
719 return 1;
721 if (GET_CODE (in) == LABEL_REF)
722 return reg == XEXP (in, 0);
724 code = GET_CODE (in);
726 switch (code)
728 /* Compare registers by number. */
729 case REG:
730 return REG_P (reg) && REGNO (in) == REGNO (reg);
732 /* These codes have no constituent expressions
733 and are unique. */
734 case SCRATCH:
735 case CC0:
736 case PC:
737 return 0;
739 case CONST_INT:
740 case CONST_VECTOR:
741 case CONST_DOUBLE:
742 /* These are kept unique for a given value. */
743 return 0;
745 default:
746 break;
749 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
750 return 1;
752 fmt = GET_RTX_FORMAT (code);
754 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
756 if (fmt[i] == 'E')
758 int j;
759 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
760 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
761 return 1;
763 else if (fmt[i] == 'e'
764 && reg_mentioned_p (reg, XEXP (in, i)))
765 return 1;
767 return 0;
770 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
771 no CODE_LABEL insn. */
774 no_labels_between_p (rtx beg, rtx end)
776 rtx p;
777 if (beg == end)
778 return 0;
779 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
780 if (LABEL_P (p))
781 return 0;
782 return 1;
785 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
786 no JUMP_INSN insn. */
789 no_jumps_between_p (rtx beg, rtx end)
791 rtx p;
792 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
793 if (JUMP_P (p))
794 return 0;
795 return 1;
798 /* Nonzero if register REG is used in an insn between
799 FROM_INSN and TO_INSN (exclusive of those two). */
802 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
804 rtx insn;
806 if (from_insn == to_insn)
807 return 0;
809 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
810 if (INSN_P (insn)
811 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
812 || (CALL_P (insn)
813 && (find_reg_fusage (insn, USE, reg)
814 || find_reg_fusage (insn, CLOBBER, reg)))))
815 return 1;
816 return 0;
819 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
820 is entirely replaced by a new value and the only use is as a SET_DEST,
821 we do not consider it a reference. */
824 reg_referenced_p (rtx x, rtx body)
826 int i;
828 switch (GET_CODE (body))
830 case SET:
831 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
832 return 1;
834 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
835 of a REG that occupies all of the REG, the insn references X if
836 it is mentioned in the destination. */
837 if (GET_CODE (SET_DEST (body)) != CC0
838 && GET_CODE (SET_DEST (body)) != PC
839 && !REG_P (SET_DEST (body))
840 && ! (GET_CODE (SET_DEST (body)) == SUBREG
841 && REG_P (SUBREG_REG (SET_DEST (body)))
842 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
843 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
844 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
845 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
846 && reg_overlap_mentioned_p (x, SET_DEST (body)))
847 return 1;
848 return 0;
850 case ASM_OPERANDS:
851 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
852 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
853 return 1;
854 return 0;
856 case CALL:
857 case USE:
858 case IF_THEN_ELSE:
859 return reg_overlap_mentioned_p (x, body);
861 case TRAP_IF:
862 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
864 case PREFETCH:
865 return reg_overlap_mentioned_p (x, XEXP (body, 0));
867 case UNSPEC:
868 case UNSPEC_VOLATILE:
869 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
870 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
871 return 1;
872 return 0;
874 case PARALLEL:
875 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
876 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
877 return 1;
878 return 0;
880 case CLOBBER:
881 if (MEM_P (XEXP (body, 0)))
882 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
883 return 1;
884 return 0;
886 case COND_EXEC:
887 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
888 return 1;
889 return reg_referenced_p (x, COND_EXEC_CODE (body));
891 default:
892 return 0;
896 /* Nonzero if register REG is referenced in an insn between
897 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
898 not count. */
901 reg_referenced_between_p (rtx reg, rtx from_insn, rtx to_insn)
903 rtx insn;
905 if (from_insn == to_insn)
906 return 0;
908 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
909 if (INSN_P (insn)
910 && (reg_referenced_p (reg, PATTERN (insn))
911 || (CALL_P (insn)
912 && find_reg_fusage (insn, USE, reg))))
913 return 1;
914 return 0;
917 /* Nonzero if register REG is set or clobbered in an insn between
918 FROM_INSN and TO_INSN (exclusive of those two). */
921 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
923 rtx insn;
925 if (from_insn == to_insn)
926 return 0;
928 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
929 if (INSN_P (insn) && reg_set_p (reg, insn))
930 return 1;
931 return 0;
934 /* Internals of reg_set_between_p. */
936 reg_set_p (rtx reg, rtx insn)
938 /* We can be passed an insn or part of one. If we are passed an insn,
939 check if a side-effect of the insn clobbers REG. */
940 if (INSN_P (insn)
941 && (FIND_REG_INC_NOTE (insn, reg)
942 || (CALL_P (insn)
943 && ((REG_P (reg)
944 && REGNO (reg) < FIRST_PSEUDO_REGISTER
945 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
946 REGNO (reg)))
947 || MEM_P (reg)
948 || find_reg_fusage (insn, CLOBBER, reg)))))
949 return 1;
951 return set_of (reg, insn) != NULL_RTX;
954 /* Similar to reg_set_between_p, but check all registers in X. Return 0
955 only if none of them are modified between START and END. Do not
956 consider non-registers one way or the other. */
959 regs_set_between_p (rtx x, rtx start, rtx end)
961 enum rtx_code code = GET_CODE (x);
962 const char *fmt;
963 int i, j;
965 switch (code)
967 case CONST_INT:
968 case CONST_DOUBLE:
969 case CONST_VECTOR:
970 case CONST:
971 case SYMBOL_REF:
972 case LABEL_REF:
973 case PC:
974 case CC0:
975 return 0;
977 case REG:
978 return reg_set_between_p (x, start, end);
980 default:
981 break;
984 fmt = GET_RTX_FORMAT (code);
985 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
987 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
988 return 1;
990 else if (fmt[i] == 'E')
991 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
992 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
993 return 1;
996 return 0;
999 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1000 only if none of them are modified between START and END. Return 1 if
1001 X contains a MEM; this routine does usememory aliasing. */
1004 modified_between_p (rtx x, rtx start, rtx end)
1006 enum rtx_code code = GET_CODE (x);
1007 const char *fmt;
1008 int i, j;
1009 rtx insn;
1011 if (start == end)
1012 return 0;
1014 switch (code)
1016 case CONST_INT:
1017 case CONST_DOUBLE:
1018 case CONST_VECTOR:
1019 case CONST:
1020 case SYMBOL_REF:
1021 case LABEL_REF:
1022 return 0;
1024 case PC:
1025 case CC0:
1026 return 1;
1028 case MEM:
1029 if (MEM_READONLY_P (x))
1030 return 0;
1031 if (modified_between_p (XEXP (x, 0), start, end))
1032 return 1;
1033 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1034 if (memory_modified_in_insn_p (x, insn))
1035 return 1;
1036 return 0;
1037 break;
1039 case REG:
1040 return reg_set_between_p (x, start, end);
1042 default:
1043 break;
1046 fmt = GET_RTX_FORMAT (code);
1047 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1049 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1050 return 1;
1052 else if (fmt[i] == 'E')
1053 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1054 if (modified_between_p (XVECEXP (x, i, j), start, end))
1055 return 1;
1058 return 0;
1061 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1062 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1063 does use memory aliasing. */
1066 modified_in_p (rtx x, rtx insn)
1068 enum rtx_code code = GET_CODE (x);
1069 const char *fmt;
1070 int i, j;
1072 switch (code)
1074 case CONST_INT:
1075 case CONST_DOUBLE:
1076 case CONST_VECTOR:
1077 case CONST:
1078 case SYMBOL_REF:
1079 case LABEL_REF:
1080 return 0;
1082 case PC:
1083 case CC0:
1084 return 1;
1086 case MEM:
1087 if (MEM_READONLY_P (x))
1088 return 0;
1089 if (modified_in_p (XEXP (x, 0), insn))
1090 return 1;
1091 if (memory_modified_in_insn_p (x, insn))
1092 return 1;
1093 return 0;
1094 break;
1096 case REG:
1097 return reg_set_p (x, insn);
1099 default:
1100 break;
1103 fmt = GET_RTX_FORMAT (code);
1104 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1106 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1107 return 1;
1109 else if (fmt[i] == 'E')
1110 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1111 if (modified_in_p (XVECEXP (x, i, j), insn))
1112 return 1;
1115 return 0;
1118 /* Return true if anything in insn X is (anti,output,true) dependent on
1119 anything in insn Y. */
1122 insn_dependent_p (rtx x, rtx y)
1124 rtx tmp;
1126 if (! INSN_P (x) || ! INSN_P (y))
1127 abort ();
1129 tmp = PATTERN (y);
1130 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1131 if (tmp == NULL_RTX)
1132 return 1;
1134 tmp = PATTERN (x);
1135 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1136 if (tmp == NULL_RTX)
1137 return 1;
1139 return 0;
1142 /* A helper routine for insn_dependent_p called through note_stores. */
1144 static void
1145 insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
1147 rtx * pinsn = (rtx *) data;
1149 if (*pinsn && reg_mentioned_p (x, *pinsn))
1150 *pinsn = NULL_RTX;
1153 /* Helper function for set_of. */
1154 struct set_of_data
1156 rtx found;
1157 rtx pat;
1160 static void
1161 set_of_1 (rtx x, rtx pat, void *data1)
1163 struct set_of_data *data = (struct set_of_data *) (data1);
1164 if (rtx_equal_p (x, data->pat)
1165 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1166 data->found = pat;
1169 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1170 (either directly or via STRICT_LOW_PART and similar modifiers). */
1172 set_of (rtx pat, rtx insn)
1174 struct set_of_data data;
1175 data.found = NULL_RTX;
1176 data.pat = pat;
1177 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1178 return data.found;
1181 /* Given an INSN, return a SET expression if this insn has only a single SET.
1182 It may also have CLOBBERs, USEs, or SET whose output
1183 will not be used, which we ignore. */
1186 single_set_2 (rtx insn, rtx pat)
1188 rtx set = NULL;
1189 int set_verified = 1;
1190 int i;
1192 if (GET_CODE (pat) == PARALLEL)
1194 for (i = 0; i < XVECLEN (pat, 0); i++)
1196 rtx sub = XVECEXP (pat, 0, i);
1197 switch (GET_CODE (sub))
1199 case USE:
1200 case CLOBBER:
1201 break;
1203 case SET:
1204 /* We can consider insns having multiple sets, where all
1205 but one are dead as single set insns. In common case
1206 only single set is present in the pattern so we want
1207 to avoid checking for REG_UNUSED notes unless necessary.
1209 When we reach set first time, we just expect this is
1210 the single set we are looking for and only when more
1211 sets are found in the insn, we check them. */
1212 if (!set_verified)
1214 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1215 && !side_effects_p (set))
1216 set = NULL;
1217 else
1218 set_verified = 1;
1220 if (!set)
1221 set = sub, set_verified = 0;
1222 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1223 || side_effects_p (sub))
1224 return NULL_RTX;
1225 break;
1227 default:
1228 return NULL_RTX;
1232 return set;
1235 /* Given an INSN, return nonzero if it has more than one SET, else return
1236 zero. */
1239 multiple_sets (rtx insn)
1241 int found;
1242 int i;
1244 /* INSN must be an insn. */
1245 if (! INSN_P (insn))
1246 return 0;
1248 /* Only a PARALLEL can have multiple SETs. */
1249 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1251 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1252 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1254 /* If we have already found a SET, then return now. */
1255 if (found)
1256 return 1;
1257 else
1258 found = 1;
1262 /* Either zero or one SET. */
1263 return 0;
1266 /* Return nonzero if the destination of SET equals the source
1267 and there are no side effects. */
1270 set_noop_p (rtx set)
1272 rtx src = SET_SRC (set);
1273 rtx dst = SET_DEST (set);
1275 if (dst == pc_rtx && src == pc_rtx)
1276 return 1;
1278 if (MEM_P (dst) && MEM_P (src))
1279 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1281 if (GET_CODE (dst) == SIGN_EXTRACT
1282 || GET_CODE (dst) == ZERO_EXTRACT)
1283 return rtx_equal_p (XEXP (dst, 0), src)
1284 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1285 && !side_effects_p (src);
1287 if (GET_CODE (dst) == STRICT_LOW_PART)
1288 dst = XEXP (dst, 0);
1290 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1292 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1293 return 0;
1294 src = SUBREG_REG (src);
1295 dst = SUBREG_REG (dst);
1298 return (REG_P (src) && REG_P (dst)
1299 && REGNO (src) == REGNO (dst));
1302 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1303 value to itself. */
1306 noop_move_p (rtx insn)
1308 rtx pat = PATTERN (insn);
1310 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1311 return 1;
1313 /* Insns carrying these notes are useful later on. */
1314 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1315 return 0;
1317 /* For now treat an insn with a REG_RETVAL note as a
1318 a special insn which should not be considered a no-op. */
1319 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1320 return 0;
1322 if (GET_CODE (pat) == SET && set_noop_p (pat))
1323 return 1;
1325 if (GET_CODE (pat) == PARALLEL)
1327 int i;
1328 /* If nothing but SETs of registers to themselves,
1329 this insn can also be deleted. */
1330 for (i = 0; i < XVECLEN (pat, 0); i++)
1332 rtx tem = XVECEXP (pat, 0, i);
1334 if (GET_CODE (tem) == USE
1335 || GET_CODE (tem) == CLOBBER)
1336 continue;
1338 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1339 return 0;
1342 return 1;
1344 return 0;
1348 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1349 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1350 If the object was modified, if we hit a partial assignment to X, or hit a
1351 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1352 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1353 be the src. */
1356 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1358 rtx p;
1360 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1361 p = PREV_INSN (p))
1362 if (INSN_P (p))
1364 rtx set = single_set (p);
1365 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1367 if (set && rtx_equal_p (x, SET_DEST (set)))
1369 rtx src = SET_SRC (set);
1371 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1372 src = XEXP (note, 0);
1374 if ((valid_to == NULL_RTX
1375 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1376 /* Reject hard registers because we don't usually want
1377 to use them; we'd rather use a pseudo. */
1378 && (! (REG_P (src)
1379 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1381 *pinsn = p;
1382 return src;
1386 /* If set in non-simple way, we don't have a value. */
1387 if (reg_set_p (x, p))
1388 break;
1391 return x;
1394 /* Return nonzero if register in range [REGNO, ENDREGNO)
1395 appears either explicitly or implicitly in X
1396 other than being stored into.
1398 References contained within the substructure at LOC do not count.
1399 LOC may be zero, meaning don't ignore anything. */
1402 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1403 rtx *loc)
1405 int i;
1406 unsigned int x_regno;
1407 RTX_CODE code;
1408 const char *fmt;
1410 repeat:
1411 /* The contents of a REG_NONNEG note is always zero, so we must come here
1412 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1413 if (x == 0)
1414 return 0;
1416 code = GET_CODE (x);
1418 switch (code)
1420 case REG:
1421 x_regno = REGNO (x);
1423 /* If we modifying the stack, frame, or argument pointer, it will
1424 clobber a virtual register. In fact, we could be more precise,
1425 but it isn't worth it. */
1426 if ((x_regno == STACK_POINTER_REGNUM
1427 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1428 || x_regno == ARG_POINTER_REGNUM
1429 #endif
1430 || x_regno == FRAME_POINTER_REGNUM)
1431 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1432 return 1;
1434 return (endregno > x_regno
1435 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1436 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1437 : 1));
1439 case SUBREG:
1440 /* If this is a SUBREG of a hard reg, we can see exactly which
1441 registers are being modified. Otherwise, handle normally. */
1442 if (REG_P (SUBREG_REG (x))
1443 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1445 unsigned int inner_regno = subreg_regno (x);
1446 unsigned int inner_endregno
1447 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1448 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1450 return endregno > inner_regno && regno < inner_endregno;
1452 break;
1454 case CLOBBER:
1455 case SET:
1456 if (&SET_DEST (x) != loc
1457 /* Note setting a SUBREG counts as referring to the REG it is in for
1458 a pseudo but not for hard registers since we can
1459 treat each word individually. */
1460 && ((GET_CODE (SET_DEST (x)) == SUBREG
1461 && loc != &SUBREG_REG (SET_DEST (x))
1462 && REG_P (SUBREG_REG (SET_DEST (x)))
1463 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1464 && refers_to_regno_p (regno, endregno,
1465 SUBREG_REG (SET_DEST (x)), loc))
1466 || (!REG_P (SET_DEST (x))
1467 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1468 return 1;
1470 if (code == CLOBBER || loc == &SET_SRC (x))
1471 return 0;
1472 x = SET_SRC (x);
1473 goto repeat;
1475 default:
1476 break;
1479 /* X does not match, so try its subexpressions. */
1481 fmt = GET_RTX_FORMAT (code);
1482 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1484 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1486 if (i == 0)
1488 x = XEXP (x, 0);
1489 goto repeat;
1491 else
1492 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1493 return 1;
1495 else if (fmt[i] == 'E')
1497 int j;
1498 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1499 if (loc != &XVECEXP (x, i, j)
1500 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1501 return 1;
1504 return 0;
1507 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1508 we check if any register number in X conflicts with the relevant register
1509 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1510 contains a MEM (we don't bother checking for memory addresses that can't
1511 conflict because we expect this to be a rare case. */
1514 reg_overlap_mentioned_p (rtx x, rtx in)
1516 unsigned int regno, endregno;
1518 /* If either argument is a constant, then modifying X can not
1519 affect IN. Here we look at IN, we can profitably combine
1520 CONSTANT_P (x) with the switch statement below. */
1521 if (CONSTANT_P (in))
1522 return 0;
1524 recurse:
1525 switch (GET_CODE (x))
1527 case STRICT_LOW_PART:
1528 case ZERO_EXTRACT:
1529 case SIGN_EXTRACT:
1530 /* Overly conservative. */
1531 x = XEXP (x, 0);
1532 goto recurse;
1534 case SUBREG:
1535 regno = REGNO (SUBREG_REG (x));
1536 if (regno < FIRST_PSEUDO_REGISTER)
1537 regno = subreg_regno (x);
1538 goto do_reg;
1540 case REG:
1541 regno = REGNO (x);
1542 do_reg:
1543 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1544 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1545 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1547 case MEM:
1549 const char *fmt;
1550 int i;
1552 if (MEM_P (in))
1553 return 1;
1555 fmt = GET_RTX_FORMAT (GET_CODE (in));
1556 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1557 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1558 return 1;
1560 return 0;
1563 case SCRATCH:
1564 case PC:
1565 case CC0:
1566 return reg_mentioned_p (x, in);
1568 case PARALLEL:
1570 int i;
1572 /* If any register in here refers to it we return true. */
1573 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1574 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1575 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1576 return 1;
1577 return 0;
1580 default:
1581 #ifdef ENABLE_CHECKING
1582 if (!CONSTANT_P (x))
1583 abort ();
1584 #endif
1586 return 0;
1590 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1591 (X would be the pattern of an insn).
1592 FUN receives two arguments:
1593 the REG, MEM, CC0 or PC being stored in or clobbered,
1594 the SET or CLOBBER rtx that does the store.
1596 If the item being stored in or clobbered is a SUBREG of a hard register,
1597 the SUBREG will be passed. */
1599 void
1600 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1602 int i;
1604 if (GET_CODE (x) == COND_EXEC)
1605 x = COND_EXEC_CODE (x);
1607 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1609 rtx dest = SET_DEST (x);
1611 while ((GET_CODE (dest) == SUBREG
1612 && (!REG_P (SUBREG_REG (dest))
1613 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1614 || GET_CODE (dest) == ZERO_EXTRACT
1615 || GET_CODE (dest) == SIGN_EXTRACT
1616 || GET_CODE (dest) == STRICT_LOW_PART)
1617 dest = XEXP (dest, 0);
1619 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1620 each of whose first operand is a register. */
1621 if (GET_CODE (dest) == PARALLEL)
1623 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1624 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1625 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1627 else
1628 (*fun) (dest, x, data);
1631 else if (GET_CODE (x) == PARALLEL)
1632 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1633 note_stores (XVECEXP (x, 0, i), fun, data);
1636 /* Like notes_stores, but call FUN for each expression that is being
1637 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1638 FUN for each expression, not any interior subexpressions. FUN receives a
1639 pointer to the expression and the DATA passed to this function.
1641 Note that this is not quite the same test as that done in reg_referenced_p
1642 since that considers something as being referenced if it is being
1643 partially set, while we do not. */
1645 void
1646 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1648 rtx body = *pbody;
1649 int i;
1651 switch (GET_CODE (body))
1653 case COND_EXEC:
1654 (*fun) (&COND_EXEC_TEST (body), data);
1655 note_uses (&COND_EXEC_CODE (body), fun, data);
1656 return;
1658 case PARALLEL:
1659 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1660 note_uses (&XVECEXP (body, 0, i), fun, data);
1661 return;
1663 case USE:
1664 (*fun) (&XEXP (body, 0), data);
1665 return;
1667 case ASM_OPERANDS:
1668 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1669 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1670 return;
1672 case TRAP_IF:
1673 (*fun) (&TRAP_CONDITION (body), data);
1674 return;
1676 case PREFETCH:
1677 (*fun) (&XEXP (body, 0), data);
1678 return;
1680 case UNSPEC:
1681 case UNSPEC_VOLATILE:
1682 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1683 (*fun) (&XVECEXP (body, 0, i), data);
1684 return;
1686 case CLOBBER:
1687 if (MEM_P (XEXP (body, 0)))
1688 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1689 return;
1691 case SET:
1693 rtx dest = SET_DEST (body);
1695 /* For sets we replace everything in source plus registers in memory
1696 expression in store and operands of a ZERO_EXTRACT. */
1697 (*fun) (&SET_SRC (body), data);
1699 if (GET_CODE (dest) == ZERO_EXTRACT)
1701 (*fun) (&XEXP (dest, 1), data);
1702 (*fun) (&XEXP (dest, 2), data);
1705 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1706 dest = XEXP (dest, 0);
1708 if (MEM_P (dest))
1709 (*fun) (&XEXP (dest, 0), data);
1711 return;
1713 default:
1714 /* All the other possibilities never store. */
1715 (*fun) (pbody, data);
1716 return;
1720 /* Return nonzero if X's old contents don't survive after INSN.
1721 This will be true if X is (cc0) or if X is a register and
1722 X dies in INSN or because INSN entirely sets X.
1724 "Entirely set" means set directly and not through a SUBREG,
1725 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1726 Likewise, REG_INC does not count.
1728 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1729 but for this use that makes no difference, since regs don't overlap
1730 during their lifetimes. Therefore, this function may be used
1731 at any time after deaths have been computed (in flow.c).
1733 If REG is a hard reg that occupies multiple machine registers, this
1734 function will only return 1 if each of those registers will be replaced
1735 by INSN. */
1738 dead_or_set_p (rtx insn, rtx x)
1740 unsigned int regno, last_regno;
1741 unsigned int i;
1743 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1744 if (GET_CODE (x) == CC0)
1745 return 1;
1747 if (!REG_P (x))
1748 abort ();
1750 regno = REGNO (x);
1751 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1752 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1754 for (i = regno; i <= last_regno; i++)
1755 if (! dead_or_set_regno_p (insn, i))
1756 return 0;
1758 return 1;
1761 /* Utility function for dead_or_set_p to check an individual register. Also
1762 called from flow.c. */
1765 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1767 unsigned int regno, endregno;
1768 rtx pattern;
1770 /* See if there is a death note for something that includes TEST_REGNO. */
1771 if (find_regno_note (insn, REG_DEAD, test_regno))
1772 return 1;
1774 if (CALL_P (insn)
1775 && find_regno_fusage (insn, CLOBBER, test_regno))
1776 return 1;
1778 pattern = PATTERN (insn);
1780 if (GET_CODE (pattern) == COND_EXEC)
1781 pattern = COND_EXEC_CODE (pattern);
1783 if (GET_CODE (pattern) == SET)
1785 rtx dest = SET_DEST (pattern);
1787 /* A value is totally replaced if it is the destination or the
1788 destination is a SUBREG of REGNO that does not change the number of
1789 words in it. */
1790 if (GET_CODE (dest) == SUBREG
1791 && (((GET_MODE_SIZE (GET_MODE (dest))
1792 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1793 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1794 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1795 dest = SUBREG_REG (dest);
1797 if (!REG_P (dest))
1798 return 0;
1800 regno = REGNO (dest);
1801 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1802 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1804 return (test_regno >= regno && test_regno < endregno);
1806 else if (GET_CODE (pattern) == PARALLEL)
1808 int i;
1810 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1812 rtx body = XVECEXP (pattern, 0, i);
1814 if (GET_CODE (body) == COND_EXEC)
1815 body = COND_EXEC_CODE (body);
1817 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1819 rtx dest = SET_DEST (body);
1821 if (GET_CODE (dest) == SUBREG
1822 && (((GET_MODE_SIZE (GET_MODE (dest))
1823 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1824 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1825 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1826 dest = SUBREG_REG (dest);
1828 if (!REG_P (dest))
1829 continue;
1831 regno = REGNO (dest);
1832 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1833 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1835 if (test_regno >= regno && test_regno < endregno)
1836 return 1;
1841 return 0;
1844 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1845 If DATUM is nonzero, look for one whose datum is DATUM. */
1848 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1850 rtx link;
1852 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1853 if (! INSN_P (insn))
1854 return 0;
1855 if (datum == 0)
1857 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1858 if (REG_NOTE_KIND (link) == kind)
1859 return link;
1860 return 0;
1863 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1864 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1865 return link;
1866 return 0;
1869 /* Return the reg-note of kind KIND in insn INSN which applies to register
1870 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1871 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1872 it might be the case that the note overlaps REGNO. */
1875 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1877 rtx link;
1879 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1880 if (! INSN_P (insn))
1881 return 0;
1883 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1884 if (REG_NOTE_KIND (link) == kind
1885 /* Verify that it is a register, so that scratch and MEM won't cause a
1886 problem here. */
1887 && REG_P (XEXP (link, 0))
1888 && REGNO (XEXP (link, 0)) <= regno
1889 && ((REGNO (XEXP (link, 0))
1890 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1891 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1892 [GET_MODE (XEXP (link, 0))]))
1893 > regno))
1894 return link;
1895 return 0;
1898 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1899 has such a note. */
1902 find_reg_equal_equiv_note (rtx insn)
1904 rtx link;
1906 if (!INSN_P (insn))
1907 return 0;
1908 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1909 if (REG_NOTE_KIND (link) == REG_EQUAL
1910 || REG_NOTE_KIND (link) == REG_EQUIV)
1912 if (single_set (insn) == 0)
1913 return 0;
1914 return link;
1916 return NULL;
1919 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1920 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1923 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1925 /* If it's not a CALL_INSN, it can't possibly have a
1926 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1927 if (!CALL_P (insn))
1928 return 0;
1930 if (! datum)
1931 abort ();
1933 if (!REG_P (datum))
1935 rtx link;
1937 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1938 link;
1939 link = XEXP (link, 1))
1940 if (GET_CODE (XEXP (link, 0)) == code
1941 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1942 return 1;
1944 else
1946 unsigned int regno = REGNO (datum);
1948 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1949 to pseudo registers, so don't bother checking. */
1951 if (regno < FIRST_PSEUDO_REGISTER)
1953 unsigned int end_regno
1954 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1955 unsigned int i;
1957 for (i = regno; i < end_regno; i++)
1958 if (find_regno_fusage (insn, code, i))
1959 return 1;
1963 return 0;
1966 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1967 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1970 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1972 rtx link;
1974 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1975 to pseudo registers, so don't bother checking. */
1977 if (regno >= FIRST_PSEUDO_REGISTER
1978 || !CALL_P (insn) )
1979 return 0;
1981 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1983 unsigned int regnote;
1984 rtx op, reg;
1986 if (GET_CODE (op = XEXP (link, 0)) == code
1987 && REG_P (reg = XEXP (op, 0))
1988 && (regnote = REGNO (reg)) <= regno
1989 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1990 return 1;
1993 return 0;
1996 /* Return true if INSN is a call to a pure function. */
1999 pure_call_p (rtx insn)
2001 rtx link;
2003 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
2004 return 0;
2006 /* Look for the note that differentiates const and pure functions. */
2007 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2009 rtx u, m;
2011 if (GET_CODE (u = XEXP (link, 0)) == USE
2012 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
2013 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2014 return 1;
2017 return 0;
2020 /* Remove register note NOTE from the REG_NOTES of INSN. */
2022 void
2023 remove_note (rtx insn, rtx note)
2025 rtx link;
2027 if (note == NULL_RTX)
2028 return;
2030 if (REG_NOTES (insn) == note)
2032 REG_NOTES (insn) = XEXP (note, 1);
2033 return;
2036 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2037 if (XEXP (link, 1) == note)
2039 XEXP (link, 1) = XEXP (note, 1);
2040 return;
2043 abort ();
2046 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2047 return 1 if it is found. A simple equality test is used to determine if
2048 NODE matches. */
2051 in_expr_list_p (rtx listp, rtx node)
2053 rtx x;
2055 for (x = listp; x; x = XEXP (x, 1))
2056 if (node == XEXP (x, 0))
2057 return 1;
2059 return 0;
2062 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2063 remove that entry from the list if it is found.
2065 A simple equality test is used to determine if NODE matches. */
2067 void
2068 remove_node_from_expr_list (rtx node, rtx *listp)
2070 rtx temp = *listp;
2071 rtx prev = NULL_RTX;
2073 while (temp)
2075 if (node == XEXP (temp, 0))
2077 /* Splice the node out of the list. */
2078 if (prev)
2079 XEXP (prev, 1) = XEXP (temp, 1);
2080 else
2081 *listp = XEXP (temp, 1);
2083 return;
2086 prev = temp;
2087 temp = XEXP (temp, 1);
2091 /* Nonzero if X contains any volatile instructions. These are instructions
2092 which may cause unpredictable machine state instructions, and thus no
2093 instructions should be moved or combined across them. This includes
2094 only volatile asms and UNSPEC_VOLATILE instructions. */
2097 volatile_insn_p (rtx x)
2099 RTX_CODE code;
2101 code = GET_CODE (x);
2102 switch (code)
2104 case LABEL_REF:
2105 case SYMBOL_REF:
2106 case CONST_INT:
2107 case CONST:
2108 case CONST_DOUBLE:
2109 case CONST_VECTOR:
2110 case CC0:
2111 case PC:
2112 case REG:
2113 case SCRATCH:
2114 case CLOBBER:
2115 case ADDR_VEC:
2116 case ADDR_DIFF_VEC:
2117 case CALL:
2118 case MEM:
2119 return 0;
2121 case UNSPEC_VOLATILE:
2122 /* case TRAP_IF: This isn't clear yet. */
2123 return 1;
2125 case ASM_INPUT:
2126 case ASM_OPERANDS:
2127 if (MEM_VOLATILE_P (x))
2128 return 1;
2130 default:
2131 break;
2134 /* Recursively scan the operands of this expression. */
2137 const char *fmt = GET_RTX_FORMAT (code);
2138 int i;
2140 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2142 if (fmt[i] == 'e')
2144 if (volatile_insn_p (XEXP (x, i)))
2145 return 1;
2147 else if (fmt[i] == 'E')
2149 int j;
2150 for (j = 0; j < XVECLEN (x, i); j++)
2151 if (volatile_insn_p (XVECEXP (x, i, j)))
2152 return 1;
2156 return 0;
2159 /* Nonzero if X contains any volatile memory references
2160 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2163 volatile_refs_p (rtx x)
2165 RTX_CODE code;
2167 code = GET_CODE (x);
2168 switch (code)
2170 case LABEL_REF:
2171 case SYMBOL_REF:
2172 case CONST_INT:
2173 case CONST:
2174 case CONST_DOUBLE:
2175 case CONST_VECTOR:
2176 case CC0:
2177 case PC:
2178 case REG:
2179 case SCRATCH:
2180 case CLOBBER:
2181 case ADDR_VEC:
2182 case ADDR_DIFF_VEC:
2183 return 0;
2185 case UNSPEC_VOLATILE:
2186 return 1;
2188 case MEM:
2189 case ASM_INPUT:
2190 case ASM_OPERANDS:
2191 if (MEM_VOLATILE_P (x))
2192 return 1;
2194 default:
2195 break;
2198 /* Recursively scan the operands of this expression. */
2201 const char *fmt = GET_RTX_FORMAT (code);
2202 int i;
2204 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2206 if (fmt[i] == 'e')
2208 if (volatile_refs_p (XEXP (x, i)))
2209 return 1;
2211 else if (fmt[i] == 'E')
2213 int j;
2214 for (j = 0; j < XVECLEN (x, i); j++)
2215 if (volatile_refs_p (XVECEXP (x, i, j)))
2216 return 1;
2220 return 0;
2223 /* Similar to above, except that it also rejects register pre- and post-
2224 incrementing. */
2227 side_effects_p (rtx x)
2229 RTX_CODE code;
2231 code = GET_CODE (x);
2232 switch (code)
2234 case LABEL_REF:
2235 case SYMBOL_REF:
2236 case CONST_INT:
2237 case CONST:
2238 case CONST_DOUBLE:
2239 case CONST_VECTOR:
2240 case CC0:
2241 case PC:
2242 case REG:
2243 case SCRATCH:
2244 case ADDR_VEC:
2245 case ADDR_DIFF_VEC:
2246 return 0;
2248 case CLOBBER:
2249 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2250 when some combination can't be done. If we see one, don't think
2251 that we can simplify the expression. */
2252 return (GET_MODE (x) != VOIDmode);
2254 case PRE_INC:
2255 case PRE_DEC:
2256 case POST_INC:
2257 case POST_DEC:
2258 case PRE_MODIFY:
2259 case POST_MODIFY:
2260 case CALL:
2261 case UNSPEC_VOLATILE:
2262 /* case TRAP_IF: This isn't clear yet. */
2263 return 1;
2265 case MEM:
2266 case ASM_INPUT:
2267 case ASM_OPERANDS:
2268 if (MEM_VOLATILE_P (x))
2269 return 1;
2271 default:
2272 break;
2275 /* Recursively scan the operands of this expression. */
2278 const char *fmt = GET_RTX_FORMAT (code);
2279 int i;
2281 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2283 if (fmt[i] == 'e')
2285 if (side_effects_p (XEXP (x, i)))
2286 return 1;
2288 else if (fmt[i] == 'E')
2290 int j;
2291 for (j = 0; j < XVECLEN (x, i); j++)
2292 if (side_effects_p (XVECEXP (x, i, j)))
2293 return 1;
2297 return 0;
2300 /* Return nonzero if evaluating rtx X might cause a trap. */
2303 may_trap_p (rtx x)
2305 int i;
2306 enum rtx_code code;
2307 const char *fmt;
2309 if (x == 0)
2310 return 0;
2311 code = GET_CODE (x);
2312 switch (code)
2314 /* Handle these cases quickly. */
2315 case CONST_INT:
2316 case CONST_DOUBLE:
2317 case CONST_VECTOR:
2318 case SYMBOL_REF:
2319 case LABEL_REF:
2320 case CONST:
2321 case PC:
2322 case CC0:
2323 case REG:
2324 case SCRATCH:
2325 return 0;
2327 case ASM_INPUT:
2328 case UNSPEC_VOLATILE:
2329 case TRAP_IF:
2330 return 1;
2332 case ASM_OPERANDS:
2333 return MEM_VOLATILE_P (x);
2335 /* Memory ref can trap unless it's a static var or a stack slot. */
2336 case MEM:
2337 if (MEM_NOTRAP_P (x))
2338 return 0;
2339 return rtx_addr_can_trap_p (XEXP (x, 0));
2341 /* Division by a non-constant might trap. */
2342 case DIV:
2343 case MOD:
2344 case UDIV:
2345 case UMOD:
2346 if (HONOR_SNANS (GET_MODE (x)))
2347 return 1;
2348 if (! CONSTANT_P (XEXP (x, 1))
2349 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2350 && flag_trapping_math))
2351 return 1;
2352 if (XEXP (x, 1) == const0_rtx)
2353 return 1;
2354 break;
2356 case EXPR_LIST:
2357 /* An EXPR_LIST is used to represent a function call. This
2358 certainly may trap. */
2359 return 1;
2361 case GE:
2362 case GT:
2363 case LE:
2364 case LT:
2365 case LTGT:
2366 case COMPARE:
2367 /* Some floating point comparisons may trap. */
2368 if (!flag_trapping_math)
2369 break;
2370 /* ??? There is no machine independent way to check for tests that trap
2371 when COMPARE is used, though many targets do make this distinction.
2372 For instance, sparc uses CCFPE for compares which generate exceptions
2373 and CCFP for compares which do not generate exceptions. */
2374 if (HONOR_NANS (GET_MODE (x)))
2375 return 1;
2376 /* But often the compare has some CC mode, so check operand
2377 modes as well. */
2378 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2379 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2380 return 1;
2381 break;
2383 case EQ:
2384 case NE:
2385 if (HONOR_SNANS (GET_MODE (x)))
2386 return 1;
2387 /* Often comparison is CC mode, so check operand modes. */
2388 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2389 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2390 return 1;
2391 break;
2393 case FIX:
2394 /* Conversion of floating point might trap. */
2395 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2396 return 1;
2397 break;
2399 case NEG:
2400 case ABS:
2401 /* These operations don't trap even with floating point. */
2402 break;
2404 default:
2405 /* Any floating arithmetic may trap. */
2406 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2407 && flag_trapping_math)
2408 return 1;
2411 fmt = GET_RTX_FORMAT (code);
2412 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2414 if (fmt[i] == 'e')
2416 if (may_trap_p (XEXP (x, i)))
2417 return 1;
2419 else if (fmt[i] == 'E')
2421 int j;
2422 for (j = 0; j < XVECLEN (x, i); j++)
2423 if (may_trap_p (XVECEXP (x, i, j)))
2424 return 1;
2427 return 0;
2430 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2431 i.e., an inequality. */
2434 inequality_comparisons_p (rtx x)
2436 const char *fmt;
2437 int len, i;
2438 enum rtx_code code = GET_CODE (x);
2440 switch (code)
2442 case REG:
2443 case SCRATCH:
2444 case PC:
2445 case CC0:
2446 case CONST_INT:
2447 case CONST_DOUBLE:
2448 case CONST_VECTOR:
2449 case CONST:
2450 case LABEL_REF:
2451 case SYMBOL_REF:
2452 return 0;
2454 case LT:
2455 case LTU:
2456 case GT:
2457 case GTU:
2458 case LE:
2459 case LEU:
2460 case GE:
2461 case GEU:
2462 return 1;
2464 default:
2465 break;
2468 len = GET_RTX_LENGTH (code);
2469 fmt = GET_RTX_FORMAT (code);
2471 for (i = 0; i < len; i++)
2473 if (fmt[i] == 'e')
2475 if (inequality_comparisons_p (XEXP (x, i)))
2476 return 1;
2478 else if (fmt[i] == 'E')
2480 int j;
2481 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2482 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2483 return 1;
2487 return 0;
2490 /* Replace any occurrence of FROM in X with TO. The function does
2491 not enter into CONST_DOUBLE for the replace.
2493 Note that copying is not done so X must not be shared unless all copies
2494 are to be modified. */
2497 replace_rtx (rtx x, rtx from, rtx to)
2499 int i, j;
2500 const char *fmt;
2502 /* The following prevents loops occurrence when we change MEM in
2503 CONST_DOUBLE onto the same CONST_DOUBLE. */
2504 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2505 return x;
2507 if (x == from)
2508 return to;
2510 /* Allow this function to make replacements in EXPR_LISTs. */
2511 if (x == 0)
2512 return 0;
2514 if (GET_CODE (x) == SUBREG)
2516 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2518 if (GET_CODE (new) == CONST_INT)
2520 x = simplify_subreg (GET_MODE (x), new,
2521 GET_MODE (SUBREG_REG (x)),
2522 SUBREG_BYTE (x));
2523 if (! x)
2524 abort ();
2526 else
2527 SUBREG_REG (x) = new;
2529 return x;
2531 else if (GET_CODE (x) == ZERO_EXTEND)
2533 rtx new = replace_rtx (XEXP (x, 0), from, to);
2535 if (GET_CODE (new) == CONST_INT)
2537 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2538 new, GET_MODE (XEXP (x, 0)));
2539 if (! x)
2540 abort ();
2542 else
2543 XEXP (x, 0) = new;
2545 return x;
2548 fmt = GET_RTX_FORMAT (GET_CODE (x));
2549 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2551 if (fmt[i] == 'e')
2552 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2553 else if (fmt[i] == 'E')
2554 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2555 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2558 return x;
2561 /* Throughout the rtx X, replace many registers according to REG_MAP.
2562 Return the replacement for X (which may be X with altered contents).
2563 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2564 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2566 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2567 should not be mapped to pseudos or vice versa since validate_change
2568 is not called.
2570 If REPLACE_DEST is 1, replacements are also done in destinations;
2571 otherwise, only sources are replaced. */
2574 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2576 enum rtx_code code;
2577 int i;
2578 const char *fmt;
2580 if (x == 0)
2581 return x;
2583 code = GET_CODE (x);
2584 switch (code)
2586 case SCRATCH:
2587 case PC:
2588 case CC0:
2589 case CONST_INT:
2590 case CONST_DOUBLE:
2591 case CONST_VECTOR:
2592 case CONST:
2593 case SYMBOL_REF:
2594 case LABEL_REF:
2595 return x;
2597 case REG:
2598 /* Verify that the register has an entry before trying to access it. */
2599 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2601 /* SUBREGs can't be shared. Always return a copy to ensure that if
2602 this replacement occurs more than once then each instance will
2603 get distinct rtx. */
2604 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2605 return copy_rtx (reg_map[REGNO (x)]);
2606 return reg_map[REGNO (x)];
2608 return x;
2610 case SUBREG:
2611 /* Prevent making nested SUBREGs. */
2612 if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2613 && reg_map[REGNO (SUBREG_REG (x))] != 0
2614 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2616 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2617 return simplify_gen_subreg (GET_MODE (x), map_val,
2618 GET_MODE (SUBREG_REG (x)),
2619 SUBREG_BYTE (x));
2621 break;
2623 case SET:
2624 if (replace_dest)
2625 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2627 else if (MEM_P (SET_DEST (x))
2628 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2629 /* Even if we are not to replace destinations, replace register if it
2630 is CONTAINED in destination (destination is memory or
2631 STRICT_LOW_PART). */
2632 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2633 reg_map, nregs, 0);
2634 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2635 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2636 break;
2638 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2639 return x;
2641 default:
2642 break;
2645 fmt = GET_RTX_FORMAT (code);
2646 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2648 if (fmt[i] == 'e')
2649 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2650 else if (fmt[i] == 'E')
2652 int j;
2653 for (j = 0; j < XVECLEN (x, i); j++)
2654 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2655 nregs, replace_dest);
2658 return x;
2661 /* Replace occurrences of the old label in *X with the new one.
2662 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2665 replace_label (rtx *x, void *data)
2667 rtx l = *x;
2668 rtx old_label = ((replace_label_data *) data)->r1;
2669 rtx new_label = ((replace_label_data *) data)->r2;
2670 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2672 if (l == NULL_RTX)
2673 return 0;
2675 if (GET_CODE (l) == SYMBOL_REF
2676 && CONSTANT_POOL_ADDRESS_P (l))
2678 rtx c = get_pool_constant (l);
2679 if (rtx_referenced_p (old_label, c))
2681 rtx new_c, new_l;
2682 replace_label_data *d = (replace_label_data *) data;
2684 /* Create a copy of constant C; replace the label inside
2685 but do not update LABEL_NUSES because uses in constant pool
2686 are not counted. */
2687 new_c = copy_rtx (c);
2688 d->update_label_nuses = false;
2689 for_each_rtx (&new_c, replace_label, data);
2690 d->update_label_nuses = update_label_nuses;
2692 /* Add the new constant NEW_C to constant pool and replace
2693 the old reference to constant by new reference. */
2694 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2695 *x = replace_rtx (l, l, new_l);
2697 return 0;
2700 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2701 field. This is not handled by for_each_rtx because it doesn't
2702 handle unprinted ('0') fields. */
2703 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2704 JUMP_LABEL (l) = new_label;
2706 if ((GET_CODE (l) == LABEL_REF
2707 || GET_CODE (l) == INSN_LIST)
2708 && XEXP (l, 0) == old_label)
2710 XEXP (l, 0) = new_label;
2711 if (update_label_nuses)
2713 ++LABEL_NUSES (new_label);
2714 --LABEL_NUSES (old_label);
2716 return 0;
2719 return 0;
2722 /* When *BODY is equal to X or X is directly referenced by *BODY
2723 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2724 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2726 static int
2727 rtx_referenced_p_1 (rtx *body, void *x)
2729 rtx y = (rtx) x;
2731 if (*body == NULL_RTX)
2732 return y == NULL_RTX;
2734 /* Return true if a label_ref *BODY refers to label Y. */
2735 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2736 return XEXP (*body, 0) == y;
2738 /* If *BODY is a reference to pool constant traverse the constant. */
2739 if (GET_CODE (*body) == SYMBOL_REF
2740 && CONSTANT_POOL_ADDRESS_P (*body))
2741 return rtx_referenced_p (y, get_pool_constant (*body));
2743 /* By default, compare the RTL expressions. */
2744 return rtx_equal_p (*body, y);
2747 /* Return true if X is referenced in BODY. */
2750 rtx_referenced_p (rtx x, rtx body)
2752 return for_each_rtx (&body, rtx_referenced_p_1, x);
2755 /* If INSN is a tablejump return true and store the label (before jump table) to
2756 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2758 bool
2759 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2761 rtx label, table;
2763 if (JUMP_P (insn)
2764 && (label = JUMP_LABEL (insn)) != NULL_RTX
2765 && (table = next_active_insn (label)) != NULL_RTX
2766 && JUMP_P (table)
2767 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2768 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2770 if (labelp)
2771 *labelp = label;
2772 if (tablep)
2773 *tablep = table;
2774 return true;
2776 return false;
2779 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2780 constant that is not in the constant pool and not in the condition
2781 of an IF_THEN_ELSE. */
2783 static int
2784 computed_jump_p_1 (rtx x)
2786 enum rtx_code code = GET_CODE (x);
2787 int i, j;
2788 const char *fmt;
2790 switch (code)
2792 case LABEL_REF:
2793 case PC:
2794 return 0;
2796 case CONST:
2797 case CONST_INT:
2798 case CONST_DOUBLE:
2799 case CONST_VECTOR:
2800 case SYMBOL_REF:
2801 case REG:
2802 return 1;
2804 case MEM:
2805 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2806 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2808 case IF_THEN_ELSE:
2809 return (computed_jump_p_1 (XEXP (x, 1))
2810 || computed_jump_p_1 (XEXP (x, 2)));
2812 default:
2813 break;
2816 fmt = GET_RTX_FORMAT (code);
2817 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2819 if (fmt[i] == 'e'
2820 && computed_jump_p_1 (XEXP (x, i)))
2821 return 1;
2823 else if (fmt[i] == 'E')
2824 for (j = 0; j < XVECLEN (x, i); j++)
2825 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2826 return 1;
2829 return 0;
2832 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2834 Tablejumps and casesi insns are not considered indirect jumps;
2835 we can recognize them by a (use (label_ref)). */
2838 computed_jump_p (rtx insn)
2840 int i;
2841 if (JUMP_P (insn))
2843 rtx pat = PATTERN (insn);
2845 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2846 return 0;
2847 else if (GET_CODE (pat) == PARALLEL)
2849 int len = XVECLEN (pat, 0);
2850 int has_use_labelref = 0;
2852 for (i = len - 1; i >= 0; i--)
2853 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2854 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2855 == LABEL_REF))
2856 has_use_labelref = 1;
2858 if (! has_use_labelref)
2859 for (i = len - 1; i >= 0; i--)
2860 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2861 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2862 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2863 return 1;
2865 else if (GET_CODE (pat) == SET
2866 && SET_DEST (pat) == pc_rtx
2867 && computed_jump_p_1 (SET_SRC (pat)))
2868 return 1;
2870 return 0;
2873 /* Traverse X via depth-first search, calling F for each
2874 sub-expression (including X itself). F is also passed the DATA.
2875 If F returns -1, do not traverse sub-expressions, but continue
2876 traversing the rest of the tree. If F ever returns any other
2877 nonzero value, stop the traversal, and return the value returned
2878 by F. Otherwise, return 0. This function does not traverse inside
2879 tree structure that contains RTX_EXPRs, or into sub-expressions
2880 whose format code is `0' since it is not known whether or not those
2881 codes are actually RTL.
2883 This routine is very general, and could (should?) be used to
2884 implement many of the other routines in this file. */
2887 for_each_rtx (rtx *x, rtx_function f, void *data)
2889 int result;
2890 int length;
2891 const char *format;
2892 int i;
2894 /* Call F on X. */
2895 result = (*f) (x, data);
2896 if (result == -1)
2897 /* Do not traverse sub-expressions. */
2898 return 0;
2899 else if (result != 0)
2900 /* Stop the traversal. */
2901 return result;
2903 if (*x == NULL_RTX)
2904 /* There are no sub-expressions. */
2905 return 0;
2907 length = GET_RTX_LENGTH (GET_CODE (*x));
2908 format = GET_RTX_FORMAT (GET_CODE (*x));
2910 for (i = 0; i < length; ++i)
2912 switch (format[i])
2914 case 'e':
2915 result = for_each_rtx (&XEXP (*x, i), f, data);
2916 if (result != 0)
2917 return result;
2918 break;
2920 case 'V':
2921 case 'E':
2922 if (XVEC (*x, i) != 0)
2924 int j;
2925 for (j = 0; j < XVECLEN (*x, i); ++j)
2927 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2928 if (result != 0)
2929 return result;
2932 break;
2934 default:
2935 /* Nothing to do. */
2936 break;
2941 return 0;
2944 /* Searches X for any reference to REGNO, returning the rtx of the
2945 reference found if any. Otherwise, returns NULL_RTX. */
2948 regno_use_in (unsigned int regno, rtx x)
2950 const char *fmt;
2951 int i, j;
2952 rtx tem;
2954 if (REG_P (x) && REGNO (x) == regno)
2955 return x;
2957 fmt = GET_RTX_FORMAT (GET_CODE (x));
2958 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2960 if (fmt[i] == 'e')
2962 if ((tem = regno_use_in (regno, XEXP (x, i))))
2963 return tem;
2965 else if (fmt[i] == 'E')
2966 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2967 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2968 return tem;
2971 return NULL_RTX;
2974 /* Return a value indicating whether OP, an operand of a commutative
2975 operation, is preferred as the first or second operand. The higher
2976 the value, the stronger the preference for being the first operand.
2977 We use negative values to indicate a preference for the first operand
2978 and positive values for the second operand. */
2981 commutative_operand_precedence (rtx op)
2983 enum rtx_code code = GET_CODE (op);
2985 /* Constants always come the second operand. Prefer "nice" constants. */
2986 if (code == CONST_INT)
2987 return -7;
2988 if (code == CONST_DOUBLE)
2989 return -6;
2990 op = avoid_constant_pool_reference (op);
2992 switch (GET_RTX_CLASS (code))
2994 case RTX_CONST_OBJ:
2995 if (code == CONST_INT)
2996 return -5;
2997 if (code == CONST_DOUBLE)
2998 return -4;
2999 return -3;
3001 case RTX_EXTRA:
3002 /* SUBREGs of objects should come second. */
3003 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3004 return -2;
3006 if (!CONSTANT_P (op))
3007 return 0;
3008 else
3009 /* As for RTX_CONST_OBJ. */
3010 return -3;
3012 case RTX_OBJ:
3013 /* Complex expressions should be the first, so decrease priority
3014 of objects. */
3015 return -1;
3017 case RTX_COMM_ARITH:
3018 /* Prefer operands that are themselves commutative to be first.
3019 This helps to make things linear. In particular,
3020 (and (and (reg) (reg)) (not (reg))) is canonical. */
3021 return 4;
3023 case RTX_BIN_ARITH:
3024 /* If only one operand is a binary expression, it will be the first
3025 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3026 is canonical, although it will usually be further simplified. */
3027 return 2;
3029 case RTX_UNARY:
3030 /* Then prefer NEG and NOT. */
3031 if (code == NEG || code == NOT)
3032 return 1;
3034 default:
3035 return 0;
3039 /* Return 1 iff it is necessary to swap operands of commutative operation
3040 in order to canonicalize expression. */
3043 swap_commutative_operands_p (rtx x, rtx y)
3045 return (commutative_operand_precedence (x)
3046 < commutative_operand_precedence (y));
3049 /* Return 1 if X is an autoincrement side effect and the register is
3050 not the stack pointer. */
3052 auto_inc_p (rtx x)
3054 switch (GET_CODE (x))
3056 case PRE_INC:
3057 case POST_INC:
3058 case PRE_DEC:
3059 case POST_DEC:
3060 case PRE_MODIFY:
3061 case POST_MODIFY:
3062 /* There are no REG_INC notes for SP. */
3063 if (XEXP (x, 0) != stack_pointer_rtx)
3064 return 1;
3065 default:
3066 break;
3068 return 0;
3071 /* Return 1 if the sequence of instructions beginning with FROM and up
3072 to and including TO is safe to move. If NEW_TO is non-NULL, and
3073 the sequence is not already safe to move, but can be easily
3074 extended to a sequence which is safe, then NEW_TO will point to the
3075 end of the extended sequence.
3077 For now, this function only checks that the region contains whole
3078 exception regions, but it could be extended to check additional
3079 conditions as well. */
3082 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
3084 int eh_region_count = 0;
3085 int past_to_p = 0;
3086 rtx r = from;
3088 /* By default, assume the end of the region will be what was
3089 suggested. */
3090 if (new_to)
3091 *new_to = to;
3093 while (r)
3095 if (NOTE_P (r))
3097 switch (NOTE_LINE_NUMBER (r))
3099 case NOTE_INSN_EH_REGION_BEG:
3100 ++eh_region_count;
3101 break;
3103 case NOTE_INSN_EH_REGION_END:
3104 if (eh_region_count == 0)
3105 /* This sequence of instructions contains the end of
3106 an exception region, but not he beginning. Moving
3107 it will cause chaos. */
3108 return 0;
3110 --eh_region_count;
3111 break;
3113 default:
3114 break;
3117 else if (past_to_p)
3118 /* If we've passed TO, and we see a non-note instruction, we
3119 can't extend the sequence to a movable sequence. */
3120 return 0;
3122 if (r == to)
3124 if (!new_to)
3125 /* It's OK to move the sequence if there were matched sets of
3126 exception region notes. */
3127 return eh_region_count == 0;
3129 past_to_p = 1;
3132 /* It's OK to move the sequence if there were matched sets of
3133 exception region notes. */
3134 if (past_to_p && eh_region_count == 0)
3136 *new_to = r;
3137 return 1;
3140 /* Go to the next instruction. */
3141 r = NEXT_INSN (r);
3144 return 0;
3147 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3149 loc_mentioned_in_p (rtx *loc, rtx in)
3151 enum rtx_code code = GET_CODE (in);
3152 const char *fmt = GET_RTX_FORMAT (code);
3153 int i, j;
3155 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3157 if (loc == &in->u.fld[i].rt_rtx)
3158 return 1;
3159 if (fmt[i] == 'e')
3161 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3162 return 1;
3164 else if (fmt[i] == 'E')
3165 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3166 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3167 return 1;
3169 return 0;
3172 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3173 and SUBREG_BYTE, return the bit offset where the subreg begins
3174 (counting from the least significant bit of the operand). */
3176 unsigned int
3177 subreg_lsb_1 (enum machine_mode outer_mode,
3178 enum machine_mode inner_mode,
3179 unsigned int subreg_byte)
3181 unsigned int bitpos;
3182 unsigned int byte;
3183 unsigned int word;
3185 /* A paradoxical subreg begins at bit position 0. */
3186 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3187 return 0;
3189 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3190 /* If the subreg crosses a word boundary ensure that
3191 it also begins and ends on a word boundary. */
3192 if ((subreg_byte % UNITS_PER_WORD
3193 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3194 && (subreg_byte % UNITS_PER_WORD
3195 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))
3196 abort ();
3198 if (WORDS_BIG_ENDIAN)
3199 word = (GET_MODE_SIZE (inner_mode)
3200 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3201 else
3202 word = subreg_byte / UNITS_PER_WORD;
3203 bitpos = word * BITS_PER_WORD;
3205 if (BYTES_BIG_ENDIAN)
3206 byte = (GET_MODE_SIZE (inner_mode)
3207 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3208 else
3209 byte = subreg_byte % UNITS_PER_WORD;
3210 bitpos += byte * BITS_PER_UNIT;
3212 return bitpos;
3215 /* Given a subreg X, return the bit offset where the subreg begins
3216 (counting from the least significant bit of the reg). */
3218 unsigned int
3219 subreg_lsb (rtx x)
3221 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3222 SUBREG_BYTE (x));
3225 /* This function returns the regno offset of a subreg expression.
3226 xregno - A regno of an inner hard subreg_reg (or what will become one).
3227 xmode - The mode of xregno.
3228 offset - The byte offset.
3229 ymode - The mode of a top level SUBREG (or what may become one).
3230 RETURN - The regno offset which would be used. */
3231 unsigned int
3232 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3233 unsigned int offset, enum machine_mode ymode)
3235 int nregs_xmode, nregs_ymode;
3236 int mode_multiple, nregs_multiple;
3237 int y_offset;
3239 if (xregno >= FIRST_PSEUDO_REGISTER)
3240 abort ();
3242 nregs_xmode = hard_regno_nregs[xregno][xmode];
3243 nregs_ymode = hard_regno_nregs[xregno][ymode];
3245 /* If this is a big endian paradoxical subreg, which uses more actual
3246 hard registers than the original register, we must return a negative
3247 offset so that we find the proper highpart of the register. */
3248 if (offset == 0
3249 && nregs_ymode > nregs_xmode
3250 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3251 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3252 return nregs_xmode - nregs_ymode;
3254 if (offset == 0 || nregs_xmode == nregs_ymode)
3255 return 0;
3257 /* size of ymode must not be greater than the size of xmode. */
3258 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3259 if (mode_multiple == 0)
3260 abort ();
3262 y_offset = offset / GET_MODE_SIZE (ymode);
3263 nregs_multiple = nregs_xmode / nregs_ymode;
3264 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3267 /* This function returns true when the offset is representable via
3268 subreg_offset in the given regno.
3269 xregno - A regno of an inner hard subreg_reg (or what will become one).
3270 xmode - The mode of xregno.
3271 offset - The byte offset.
3272 ymode - The mode of a top level SUBREG (or what may become one).
3273 RETURN - The regno offset which would be used. */
3274 bool
3275 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3276 unsigned int offset, enum machine_mode ymode)
3278 int nregs_xmode, nregs_ymode;
3279 int mode_multiple, nregs_multiple;
3280 int y_offset;
3282 if (xregno >= FIRST_PSEUDO_REGISTER)
3283 abort ();
3285 nregs_xmode = hard_regno_nregs[xregno][xmode];
3286 nregs_ymode = hard_regno_nregs[xregno][ymode];
3288 /* Paradoxical subregs are always valid. */
3289 if (offset == 0
3290 && nregs_ymode > nregs_xmode
3291 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3292 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3293 return true;
3295 /* Lowpart subregs are always valid. */
3296 if (offset == subreg_lowpart_offset (ymode, xmode))
3297 return true;
3299 #ifdef ENABLE_CHECKING
3300 /* This should always pass, otherwise we don't know how to verify the
3301 constraint. These conditions may be relaxed but subreg_offset would
3302 need to be redesigned. */
3303 if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3304 || GET_MODE_SIZE (ymode) % nregs_ymode
3305 || nregs_xmode % nregs_ymode)
3306 abort ();
3307 #endif
3309 /* The XMODE value can be seen as a vector of NREGS_XMODE
3310 values. The subreg must represent a lowpart of given field.
3311 Compute what field it is. */
3312 offset -= subreg_lowpart_offset (ymode,
3313 mode_for_size (GET_MODE_BITSIZE (xmode)
3314 / nregs_xmode,
3315 MODE_INT, 0));
3317 /* size of ymode must not be greater than the size of xmode. */
3318 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3319 if (mode_multiple == 0)
3320 abort ();
3322 y_offset = offset / GET_MODE_SIZE (ymode);
3323 nregs_multiple = nregs_xmode / nregs_ymode;
3324 #ifdef ENABLE_CHECKING
3325 if (offset % GET_MODE_SIZE (ymode)
3326 || mode_multiple % nregs_multiple)
3327 abort ();
3328 #endif
3329 return (!(y_offset % (mode_multiple / nregs_multiple)));
3332 /* Return the final regno that a subreg expression refers to. */
3333 unsigned int
3334 subreg_regno (rtx x)
3336 unsigned int ret;
3337 rtx subreg = SUBREG_REG (x);
3338 int regno = REGNO (subreg);
3340 ret = regno + subreg_regno_offset (regno,
3341 GET_MODE (subreg),
3342 SUBREG_BYTE (x),
3343 GET_MODE (x));
3344 return ret;
3347 struct parms_set_data
3349 int nregs;
3350 HARD_REG_SET regs;
3353 /* Helper function for noticing stores to parameter registers. */
3354 static void
3355 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3357 struct parms_set_data *d = data;
3358 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3359 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3361 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3362 d->nregs--;
3366 /* Look backward for first parameter to be loaded.
3367 Do not skip BOUNDARY. */
3369 find_first_parameter_load (rtx call_insn, rtx boundary)
3371 struct parms_set_data parm;
3372 rtx p, before;
3374 /* Since different machines initialize their parameter registers
3375 in different orders, assume nothing. Collect the set of all
3376 parameter registers. */
3377 CLEAR_HARD_REG_SET (parm.regs);
3378 parm.nregs = 0;
3379 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3380 if (GET_CODE (XEXP (p, 0)) == USE
3381 && REG_P (XEXP (XEXP (p, 0), 0)))
3383 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3384 abort ();
3386 /* We only care about registers which can hold function
3387 arguments. */
3388 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3389 continue;
3391 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3392 parm.nregs++;
3394 before = call_insn;
3396 /* Search backward for the first set of a register in this set. */
3397 while (parm.nregs && before != boundary)
3399 before = PREV_INSN (before);
3401 /* It is possible that some loads got CSEed from one call to
3402 another. Stop in that case. */
3403 if (CALL_P (before))
3404 break;
3406 /* Our caller needs either ensure that we will find all sets
3407 (in case code has not been optimized yet), or take care
3408 for possible labels in a way by setting boundary to preceding
3409 CODE_LABEL. */
3410 if (LABEL_P (before))
3412 if (before != boundary)
3413 abort ();
3414 break;
3417 if (INSN_P (before))
3418 note_stores (PATTERN (before), parms_set, &parm);
3420 return before;
3423 /* Return true if we should avoid inserting code between INSN and preceding
3424 call instruction. */
3426 bool
3427 keep_with_call_p (rtx insn)
3429 rtx set;
3431 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3433 if (REG_P (SET_DEST (set))
3434 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3435 && fixed_regs[REGNO (SET_DEST (set))]
3436 && general_operand (SET_SRC (set), VOIDmode))
3437 return true;
3438 if (REG_P (SET_SRC (set))
3439 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3440 && REG_P (SET_DEST (set))
3441 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3442 return true;
3443 /* There may be a stack pop just after the call and before the store
3444 of the return register. Search for the actual store when deciding
3445 if we can break or not. */
3446 if (SET_DEST (set) == stack_pointer_rtx)
3448 rtx i2 = next_nonnote_insn (insn);
3449 if (i2 && keep_with_call_p (i2))
3450 return true;
3453 return false;
3456 /* Return true when store to register X can be hoisted to the place
3457 with LIVE registers (can be NULL). Value VAL contains destination
3458 whose value will be used. */
3460 static bool
3461 hoist_test_store (rtx x, rtx val, regset live)
3463 if (GET_CODE (x) == SCRATCH)
3464 return true;
3466 if (rtx_equal_p (x, val))
3467 return true;
3469 /* Allow subreg of X in case it is not writing just part of multireg pseudo.
3470 Then we would need to update all users to care hoisting the store too.
3471 Caller may represent that by specifying whole subreg as val. */
3473 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3475 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3476 && GET_MODE_BITSIZE (GET_MODE (x)) <
3477 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3478 return false;
3479 return true;
3481 if (GET_CODE (x) == SUBREG)
3482 x = SUBREG_REG (x);
3484 /* Anything except register store is not hoistable. This includes the
3485 partial stores to registers. */
3487 if (!REG_P (x))
3488 return false;
3490 /* Pseudo registers can be always replaced by another pseudo to avoid
3491 the side effect, for hard register we must ensure that they are dead.
3492 Eventually we may want to add code to try turn pseudos to hards, but it
3493 is unlikely useful. */
3495 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3497 int regno = REGNO (x);
3498 int n = hard_regno_nregs[regno][GET_MODE (x)];
3500 if (!live)
3501 return false;
3502 if (REGNO_REG_SET_P (live, regno))
3503 return false;
3504 while (--n > 0)
3505 if (REGNO_REG_SET_P (live, regno + n))
3506 return false;
3508 return true;
3512 /* Return true if INSN can be hoisted to place with LIVE hard registers
3513 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3514 and used by the hoisting pass. */
3516 bool
3517 can_hoist_insn_p (rtx insn, rtx val, regset live)
3519 rtx pat = PATTERN (insn);
3520 int i;
3522 /* It probably does not worth the complexity to handle multiple
3523 set stores. */
3524 if (!single_set (insn))
3525 return false;
3526 /* We can move CALL_INSN, but we need to check that all caller clobbered
3527 regs are dead. */
3528 if (CALL_P (insn))
3529 return false;
3530 /* In future we will handle hoisting of libcall sequences, but
3531 give up for now. */
3532 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3533 return false;
3534 switch (GET_CODE (pat))
3536 case SET:
3537 if (!hoist_test_store (SET_DEST (pat), val, live))
3538 return false;
3539 break;
3540 case USE:
3541 /* USES do have sick semantics, so do not move them. */
3542 return false;
3543 break;
3544 case CLOBBER:
3545 if (!hoist_test_store (XEXP (pat, 0), val, live))
3546 return false;
3547 break;
3548 case PARALLEL:
3549 for (i = 0; i < XVECLEN (pat, 0); i++)
3551 rtx x = XVECEXP (pat, 0, i);
3552 switch (GET_CODE (x))
3554 case SET:
3555 if (!hoist_test_store (SET_DEST (x), val, live))
3556 return false;
3557 break;
3558 case USE:
3559 /* We need to fix callers to really ensure availability
3560 of all values insn uses, but for now it is safe to prohibit
3561 hoisting of any insn having such a hidden uses. */
3562 return false;
3563 break;
3564 case CLOBBER:
3565 if (!hoist_test_store (SET_DEST (x), val, live))
3566 return false;
3567 break;
3568 default:
3569 break;
3572 break;
3573 default:
3574 abort ();
3576 return true;
3579 /* Update store after hoisting - replace all stores to pseudo registers
3580 by new ones to avoid clobbering of values except for store to VAL that will
3581 be updated to NEW. */
3583 static void
3584 hoist_update_store (rtx insn, rtx *xp, rtx val, rtx new)
3586 rtx x = *xp;
3588 if (GET_CODE (x) == SCRATCH)
3589 return;
3591 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3592 validate_change (insn, xp,
3593 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3594 SUBREG_BYTE (x)), 1);
3595 if (rtx_equal_p (x, val))
3597 validate_change (insn, xp, new, 1);
3598 return;
3600 if (GET_CODE (x) == SUBREG)
3602 xp = &SUBREG_REG (x);
3603 x = *xp;
3606 if (!REG_P (x))
3607 abort ();
3609 /* We've verified that hard registers are dead, so we may keep the side
3610 effect. Otherwise replace it by new pseudo. */
3611 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3612 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3613 REG_NOTES (insn)
3614 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3617 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3618 and each other side effect to pseudo register by new pseudo register. */
3621 hoist_insn_after (rtx insn, rtx after, rtx val, rtx new)
3623 rtx pat;
3624 int i;
3625 rtx note;
3627 insn = emit_copy_of_insn_after (insn, after);
3628 pat = PATTERN (insn);
3630 /* Remove REG_UNUSED notes as we will re-emit them. */
3631 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3632 remove_note (insn, note);
3634 /* To get this working callers must ensure to move everything referenced
3635 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3636 easier. */
3637 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3638 remove_note (insn, note);
3639 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3640 remove_note (insn, note);
3642 /* Remove REG_DEAD notes as they might not be valid anymore in case
3643 we create redundancy. */
3644 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3645 remove_note (insn, note);
3646 switch (GET_CODE (pat))
3648 case SET:
3649 hoist_update_store (insn, &SET_DEST (pat), val, new);
3650 break;
3651 case USE:
3652 break;
3653 case CLOBBER:
3654 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3655 break;
3656 case PARALLEL:
3657 for (i = 0; i < XVECLEN (pat, 0); i++)
3659 rtx x = XVECEXP (pat, 0, i);
3660 switch (GET_CODE (x))
3662 case SET:
3663 hoist_update_store (insn, &SET_DEST (x), val, new);
3664 break;
3665 case USE:
3666 break;
3667 case CLOBBER:
3668 hoist_update_store (insn, &SET_DEST (x), val, new);
3669 break;
3670 default:
3671 break;
3674 break;
3675 default:
3676 abort ();
3678 if (!apply_change_group ())
3679 abort ();
3681 return insn;
3685 hoist_insn_to_edge (rtx insn, edge e, rtx val, rtx new)
3687 rtx new_insn;
3689 /* We cannot insert instructions on an abnormal critical edge.
3690 It will be easier to find the culprit if we die now. */
3691 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3692 abort ();
3694 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3695 stuff. We also emit CALL_INSNS and firends. */
3696 if (e->insns.r == NULL_RTX)
3698 start_sequence ();
3699 emit_note (NOTE_INSN_DELETED);
3701 else
3702 push_to_sequence (e->insns.r);
3704 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3706 e->insns.r = get_insns ();
3707 end_sequence ();
3708 return new_insn;
3711 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3712 to non-complex jumps. That is, direct unconditional, conditional,
3713 and tablejumps, but not computed jumps or returns. It also does
3714 not apply to the fallthru case of a conditional jump. */
3716 bool
3717 label_is_jump_target_p (rtx label, rtx jump_insn)
3719 rtx tmp = JUMP_LABEL (jump_insn);
3721 if (label == tmp)
3722 return true;
3724 if (tablejump_p (jump_insn, NULL, &tmp))
3726 rtvec vec = XVEC (PATTERN (tmp),
3727 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3728 int i, veclen = GET_NUM_ELEM (vec);
3730 for (i = 0; i < veclen; ++i)
3731 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3732 return true;
3735 return false;
3739 /* Return an estimate of the cost of computing rtx X.
3740 One use is in cse, to decide which expression to keep in the hash table.
3741 Another is in rtl generation, to pick the cheapest way to multiply.
3742 Other uses like the latter are expected in the future. */
3745 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3747 int i, j;
3748 enum rtx_code code;
3749 const char *fmt;
3750 int total;
3752 if (x == 0)
3753 return 0;
3755 /* Compute the default costs of certain things.
3756 Note that targetm.rtx_costs can override the defaults. */
3758 code = GET_CODE (x);
3759 switch (code)
3761 case MULT:
3762 total = COSTS_N_INSNS (5);
3763 break;
3764 case DIV:
3765 case UDIV:
3766 case MOD:
3767 case UMOD:
3768 total = COSTS_N_INSNS (7);
3769 break;
3770 case USE:
3771 /* Used in loop.c and combine.c as a marker. */
3772 total = 0;
3773 break;
3774 default:
3775 total = COSTS_N_INSNS (1);
3778 switch (code)
3780 case REG:
3781 return 0;
3783 case SUBREG:
3784 /* If we can't tie these modes, make this expensive. The larger
3785 the mode, the more expensive it is. */
3786 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3787 return COSTS_N_INSNS (2
3788 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3789 break;
3791 default:
3792 if (targetm.rtx_costs (x, code, outer_code, &total))
3793 return total;
3794 break;
3797 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3798 which is already in total. */
3800 fmt = GET_RTX_FORMAT (code);
3801 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3802 if (fmt[i] == 'e')
3803 total += rtx_cost (XEXP (x, i), code);
3804 else if (fmt[i] == 'E')
3805 for (j = 0; j < XVECLEN (x, i); j++)
3806 total += rtx_cost (XVECEXP (x, i, j), code);
3808 return total;
3811 /* Return cost of address expression X.
3812 Expect that X is properly formed address reference. */
3815 address_cost (rtx x, enum machine_mode mode)
3817 /* We may be asked for cost of various unusual addresses, such as operands
3818 of push instruction. It is not worthwhile to complicate writing
3819 of the target hook by such cases. */
3821 if (!memory_address_p (mode, x))
3822 return 1000;
3824 return targetm.address_cost (x);
3827 /* If the target doesn't override, compute the cost as with arithmetic. */
3830 default_address_cost (rtx x)
3832 return rtx_cost (x, MEM);
3836 unsigned HOST_WIDE_INT
3837 nonzero_bits (rtx x, enum machine_mode mode)
3839 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3842 unsigned int
3843 num_sign_bit_copies (rtx x, enum machine_mode mode)
3845 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3848 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3849 It avoids exponential behavior in nonzero_bits1 when X has
3850 identical subexpressions on the first or the second level. */
3852 static unsigned HOST_WIDE_INT
3853 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3854 enum machine_mode known_mode,
3855 unsigned HOST_WIDE_INT known_ret)
3857 if (x == known_x && mode == known_mode)
3858 return known_ret;
3860 /* Try to find identical subexpressions. If found call
3861 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3862 precomputed value for the subexpression as KNOWN_RET. */
3864 if (ARITHMETIC_P (x))
3866 rtx x0 = XEXP (x, 0);
3867 rtx x1 = XEXP (x, 1);
3869 /* Check the first level. */
3870 if (x0 == x1)
3871 return nonzero_bits1 (x, mode, x0, mode,
3872 cached_nonzero_bits (x0, mode, known_x,
3873 known_mode, known_ret));
3875 /* Check the second level. */
3876 if (ARITHMETIC_P (x0)
3877 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3878 return nonzero_bits1 (x, mode, x1, mode,
3879 cached_nonzero_bits (x1, mode, known_x,
3880 known_mode, known_ret));
3882 if (ARITHMETIC_P (x1)
3883 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3884 return nonzero_bits1 (x, mode, x0, mode,
3885 cached_nonzero_bits (x0, mode, known_x,
3886 known_mode, known_ret));
3889 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3892 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3893 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3894 is less useful. We can't allow both, because that results in exponential
3895 run time recursion. There is a nullstone testcase that triggered
3896 this. This macro avoids accidental uses of num_sign_bit_copies. */
3897 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3899 /* Given an expression, X, compute which bits in X can be nonzero.
3900 We don't care about bits outside of those defined in MODE.
3902 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3903 an arithmetic operation, we can do better. */
3905 static unsigned HOST_WIDE_INT
3906 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3907 enum machine_mode known_mode,
3908 unsigned HOST_WIDE_INT known_ret)
3910 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3911 unsigned HOST_WIDE_INT inner_nz;
3912 enum rtx_code code;
3913 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3915 /* For floating-point values, assume all bits are needed. */
3916 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3917 return nonzero;
3919 /* If X is wider than MODE, use its mode instead. */
3920 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3922 mode = GET_MODE (x);
3923 nonzero = GET_MODE_MASK (mode);
3924 mode_width = GET_MODE_BITSIZE (mode);
3927 if (mode_width > HOST_BITS_PER_WIDE_INT)
3928 /* Our only callers in this case look for single bit values. So
3929 just return the mode mask. Those tests will then be false. */
3930 return nonzero;
3932 #ifndef WORD_REGISTER_OPERATIONS
3933 /* If MODE is wider than X, but both are a single word for both the host
3934 and target machines, we can compute this from which bits of the
3935 object might be nonzero in its own mode, taking into account the fact
3936 that on many CISC machines, accessing an object in a wider mode
3937 causes the high-order bits to become undefined. So they are
3938 not known to be zero. */
3940 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3941 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3942 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3943 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3945 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3946 known_x, known_mode, known_ret);
3947 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3948 return nonzero;
3950 #endif
3952 code = GET_CODE (x);
3953 switch (code)
3955 case REG:
3956 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3957 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3958 all the bits above ptr_mode are known to be zero. */
3959 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3960 && REG_POINTER (x))
3961 nonzero &= GET_MODE_MASK (ptr_mode);
3962 #endif
3964 /* Include declared information about alignment of pointers. */
3965 /* ??? We don't properly preserve REG_POINTER changes across
3966 pointer-to-integer casts, so we can't trust it except for
3967 things that we know must be pointers. See execute/960116-1.c. */
3968 if ((x == stack_pointer_rtx
3969 || x == frame_pointer_rtx
3970 || x == arg_pointer_rtx)
3971 && REGNO_POINTER_ALIGN (REGNO (x)))
3973 unsigned HOST_WIDE_INT alignment
3974 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3976 #ifdef PUSH_ROUNDING
3977 /* If PUSH_ROUNDING is defined, it is possible for the
3978 stack to be momentarily aligned only to that amount,
3979 so we pick the least alignment. */
3980 if (x == stack_pointer_rtx && PUSH_ARGS)
3981 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3982 alignment);
3983 #endif
3985 nonzero &= ~(alignment - 1);
3989 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3990 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3991 known_mode, known_ret,
3992 &nonzero_for_hook);
3994 if (new)
3995 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3996 known_mode, known_ret);
3998 return nonzero_for_hook;
4001 case CONST_INT:
4002 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4003 /* If X is negative in MODE, sign-extend the value. */
4004 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
4005 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
4006 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
4007 #endif
4009 return INTVAL (x);
4011 case MEM:
4012 #ifdef LOAD_EXTEND_OP
4013 /* In many, if not most, RISC machines, reading a byte from memory
4014 zeros the rest of the register. Noticing that fact saves a lot
4015 of extra zero-extends. */
4016 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4017 nonzero &= GET_MODE_MASK (GET_MODE (x));
4018 #endif
4019 break;
4021 case EQ: case NE:
4022 case UNEQ: case LTGT:
4023 case GT: case GTU: case UNGT:
4024 case LT: case LTU: case UNLT:
4025 case GE: case GEU: case UNGE:
4026 case LE: case LEU: case UNLE:
4027 case UNORDERED: case ORDERED:
4029 /* If this produces an integer result, we know which bits are set.
4030 Code here used to clear bits outside the mode of X, but that is
4031 now done above. */
4033 if (GET_MODE_CLASS (mode) == MODE_INT
4034 && mode_width <= HOST_BITS_PER_WIDE_INT)
4035 nonzero = STORE_FLAG_VALUE;
4036 break;
4038 case NEG:
4039 #if 0
4040 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4041 and num_sign_bit_copies. */
4042 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4043 == GET_MODE_BITSIZE (GET_MODE (x)))
4044 nonzero = 1;
4045 #endif
4047 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
4048 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4049 break;
4051 case ABS:
4052 #if 0
4053 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4054 and num_sign_bit_copies. */
4055 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4056 == GET_MODE_BITSIZE (GET_MODE (x)))
4057 nonzero = 1;
4058 #endif
4059 break;
4061 case TRUNCATE:
4062 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4063 known_x, known_mode, known_ret)
4064 & GET_MODE_MASK (mode));
4065 break;
4067 case ZERO_EXTEND:
4068 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4069 known_x, known_mode, known_ret);
4070 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4071 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4072 break;
4074 case SIGN_EXTEND:
4075 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4076 Otherwise, show all the bits in the outer mode but not the inner
4077 may be nonzero. */
4078 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4079 known_x, known_mode, known_ret);
4080 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4082 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4083 if (inner_nz
4084 & (((HOST_WIDE_INT) 1
4085 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
4086 inner_nz |= (GET_MODE_MASK (mode)
4087 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4090 nonzero &= inner_nz;
4091 break;
4093 case AND:
4094 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4095 known_x, known_mode, known_ret)
4096 & cached_nonzero_bits (XEXP (x, 1), mode,
4097 known_x, known_mode, known_ret);
4098 break;
4100 case XOR: case IOR:
4101 case UMIN: case UMAX: case SMIN: case SMAX:
4103 unsigned HOST_WIDE_INT nonzero0 =
4104 cached_nonzero_bits (XEXP (x, 0), mode,
4105 known_x, known_mode, known_ret);
4107 /* Don't call nonzero_bits for the second time if it cannot change
4108 anything. */
4109 if ((nonzero & nonzero0) != nonzero)
4110 nonzero &= nonzero0
4111 | cached_nonzero_bits (XEXP (x, 1), mode,
4112 known_x, known_mode, known_ret);
4114 break;
4116 case PLUS: case MINUS:
4117 case MULT:
4118 case DIV: case UDIV:
4119 case MOD: case UMOD:
4120 /* We can apply the rules of arithmetic to compute the number of
4121 high- and low-order zero bits of these operations. We start by
4122 computing the width (position of the highest-order nonzero bit)
4123 and the number of low-order zero bits for each value. */
4125 unsigned HOST_WIDE_INT nz0 =
4126 cached_nonzero_bits (XEXP (x, 0), mode,
4127 known_x, known_mode, known_ret);
4128 unsigned HOST_WIDE_INT nz1 =
4129 cached_nonzero_bits (XEXP (x, 1), mode,
4130 known_x, known_mode, known_ret);
4131 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
4132 int width0 = floor_log2 (nz0) + 1;
4133 int width1 = floor_log2 (nz1) + 1;
4134 int low0 = floor_log2 (nz0 & -nz0);
4135 int low1 = floor_log2 (nz1 & -nz1);
4136 HOST_WIDE_INT op0_maybe_minusp
4137 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
4138 HOST_WIDE_INT op1_maybe_minusp
4139 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
4140 unsigned int result_width = mode_width;
4141 int result_low = 0;
4143 switch (code)
4145 case PLUS:
4146 result_width = MAX (width0, width1) + 1;
4147 result_low = MIN (low0, low1);
4148 break;
4149 case MINUS:
4150 result_low = MIN (low0, low1);
4151 break;
4152 case MULT:
4153 result_width = width0 + width1;
4154 result_low = low0 + low1;
4155 break;
4156 case DIV:
4157 if (width1 == 0)
4158 break;
4159 if (! op0_maybe_minusp && ! op1_maybe_minusp)
4160 result_width = width0;
4161 break;
4162 case UDIV:
4163 if (width1 == 0)
4164 break;
4165 result_width = width0;
4166 break;
4167 case MOD:
4168 if (width1 == 0)
4169 break;
4170 if (! op0_maybe_minusp && ! op1_maybe_minusp)
4171 result_width = MIN (width0, width1);
4172 result_low = MIN (low0, low1);
4173 break;
4174 case UMOD:
4175 if (width1 == 0)
4176 break;
4177 result_width = MIN (width0, width1);
4178 result_low = MIN (low0, low1);
4179 break;
4180 default:
4181 abort ();
4184 if (result_width < mode_width)
4185 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
4187 if (result_low > 0)
4188 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
4190 #ifdef POINTERS_EXTEND_UNSIGNED
4191 /* If pointers extend unsigned and this is an addition or subtraction
4192 to a pointer in Pmode, all the bits above ptr_mode are known to be
4193 zero. */
4194 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
4195 && (code == PLUS || code == MINUS)
4196 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4197 nonzero &= GET_MODE_MASK (ptr_mode);
4198 #endif
4200 break;
4202 case ZERO_EXTRACT:
4203 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4204 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4205 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4206 break;
4208 case SUBREG:
4209 /* If this is a SUBREG formed for a promoted variable that has
4210 been zero-extended, we know that at least the high-order bits
4211 are zero, though others might be too. */
4213 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4214 nonzero = GET_MODE_MASK (GET_MODE (x))
4215 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4216 known_x, known_mode, known_ret);
4218 /* If the inner mode is a single word for both the host and target
4219 machines, we can compute this from which bits of the inner
4220 object might be nonzero. */
4221 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
4222 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4223 <= HOST_BITS_PER_WIDE_INT))
4225 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4226 known_x, known_mode, known_ret);
4228 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4229 /* If this is a typical RISC machine, we only have to worry
4230 about the way loads are extended. */
4231 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4232 ? (((nonzero
4233 & (((unsigned HOST_WIDE_INT) 1
4234 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
4235 != 0))
4236 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
4237 || !MEM_P (SUBREG_REG (x)))
4238 #endif
4240 /* On many CISC machines, accessing an object in a wider mode
4241 causes the high-order bits to become undefined. So they are
4242 not known to be zero. */
4243 if (GET_MODE_SIZE (GET_MODE (x))
4244 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4245 nonzero |= (GET_MODE_MASK (GET_MODE (x))
4246 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
4249 break;
4251 case ASHIFTRT:
4252 case LSHIFTRT:
4253 case ASHIFT:
4254 case ROTATE:
4255 /* The nonzero bits are in two classes: any bits within MODE
4256 that aren't in GET_MODE (x) are always significant. The rest of the
4257 nonzero bits are those that are significant in the operand of
4258 the shift when shifted the appropriate number of bits. This
4259 shows that high-order bits are cleared by the right shift and
4260 low-order bits by left shifts. */
4261 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4262 && INTVAL (XEXP (x, 1)) >= 0
4263 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4265 enum machine_mode inner_mode = GET_MODE (x);
4266 unsigned int width = GET_MODE_BITSIZE (inner_mode);
4267 int count = INTVAL (XEXP (x, 1));
4268 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4269 unsigned HOST_WIDE_INT op_nonzero =
4270 cached_nonzero_bits (XEXP (x, 0), mode,
4271 known_x, known_mode, known_ret);
4272 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4273 unsigned HOST_WIDE_INT outer = 0;
4275 if (mode_width > width)
4276 outer = (op_nonzero & nonzero & ~mode_mask);
4278 if (code == LSHIFTRT)
4279 inner >>= count;
4280 else if (code == ASHIFTRT)
4282 inner >>= count;
4284 /* If the sign bit may have been nonzero before the shift, we
4285 need to mark all the places it could have been copied to
4286 by the shift as possibly nonzero. */
4287 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4288 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4290 else if (code == ASHIFT)
4291 inner <<= count;
4292 else
4293 inner = ((inner << (count % width)
4294 | (inner >> (width - (count % width)))) & mode_mask);
4296 nonzero &= (outer | inner);
4298 break;
4300 case FFS:
4301 case POPCOUNT:
4302 /* This is at most the number of bits in the mode. */
4303 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4304 break;
4306 case CLZ:
4307 /* If CLZ has a known value at zero, then the nonzero bits are
4308 that value, plus the number of bits in the mode minus one. */
4309 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4310 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4311 else
4312 nonzero = -1;
4313 break;
4315 case CTZ:
4316 /* If CTZ has a known value at zero, then the nonzero bits are
4317 that value, plus the number of bits in the mode minus one. */
4318 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4319 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4320 else
4321 nonzero = -1;
4322 break;
4324 case PARITY:
4325 nonzero = 1;
4326 break;
4328 case IF_THEN_ELSE:
4330 unsigned HOST_WIDE_INT nonzero_true =
4331 cached_nonzero_bits (XEXP (x, 1), mode,
4332 known_x, known_mode, known_ret);
4334 /* Don't call nonzero_bits for the second time if it cannot change
4335 anything. */
4336 if ((nonzero & nonzero_true) != nonzero)
4337 nonzero &= nonzero_true
4338 | cached_nonzero_bits (XEXP (x, 2), mode,
4339 known_x, known_mode, known_ret);
4341 break;
4343 default:
4344 break;
4347 return nonzero;
4350 /* See the macro definition above. */
4351 #undef cached_num_sign_bit_copies
4354 /* The function cached_num_sign_bit_copies is a wrapper around
4355 num_sign_bit_copies1. It avoids exponential behavior in
4356 num_sign_bit_copies1 when X has identical subexpressions on the
4357 first or the second level. */
4359 static unsigned int
4360 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
4361 enum machine_mode known_mode,
4362 unsigned int known_ret)
4364 if (x == known_x && mode == known_mode)
4365 return known_ret;
4367 /* Try to find identical subexpressions. If found call
4368 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4369 the precomputed value for the subexpression as KNOWN_RET. */
4371 if (ARITHMETIC_P (x))
4373 rtx x0 = XEXP (x, 0);
4374 rtx x1 = XEXP (x, 1);
4376 /* Check the first level. */
4377 if (x0 == x1)
4378 return
4379 num_sign_bit_copies1 (x, mode, x0, mode,
4380 cached_num_sign_bit_copies (x0, mode, known_x,
4381 known_mode,
4382 known_ret));
4384 /* Check the second level. */
4385 if (ARITHMETIC_P (x0)
4386 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4387 return
4388 num_sign_bit_copies1 (x, mode, x1, mode,
4389 cached_num_sign_bit_copies (x1, mode, known_x,
4390 known_mode,
4391 known_ret));
4393 if (ARITHMETIC_P (x1)
4394 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4395 return
4396 num_sign_bit_copies1 (x, mode, x0, mode,
4397 cached_num_sign_bit_copies (x0, mode, known_x,
4398 known_mode,
4399 known_ret));
4402 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4405 /* Return the number of bits at the high-order end of X that are known to
4406 be equal to the sign bit. X will be used in mode MODE; if MODE is
4407 VOIDmode, X will be used in its own mode. The returned value will always
4408 be between 1 and the number of bits in MODE. */
4410 static unsigned int
4411 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
4412 enum machine_mode known_mode,
4413 unsigned int known_ret)
4415 enum rtx_code code = GET_CODE (x);
4416 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4417 int num0, num1, result;
4418 unsigned HOST_WIDE_INT nonzero;
4420 /* If we weren't given a mode, use the mode of X. If the mode is still
4421 VOIDmode, we don't know anything. Likewise if one of the modes is
4422 floating-point. */
4424 if (mode == VOIDmode)
4425 mode = GET_MODE (x);
4427 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4428 return 1;
4430 /* For a smaller object, just ignore the high bits. */
4431 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4433 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4434 known_x, known_mode, known_ret);
4435 return MAX (1,
4436 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4439 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4441 #ifndef WORD_REGISTER_OPERATIONS
4442 /* If this machine does not do all register operations on the entire
4443 register and MODE is wider than the mode of X, we can say nothing
4444 at all about the high-order bits. */
4445 return 1;
4446 #else
4447 /* Likewise on machines that do, if the mode of the object is smaller
4448 than a word and loads of that size don't sign extend, we can say
4449 nothing about the high order bits. */
4450 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4451 #ifdef LOAD_EXTEND_OP
4452 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4453 #endif
4455 return 1;
4456 #endif
4459 switch (code)
4461 case REG:
4463 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4464 /* If pointers extend signed and this is a pointer in Pmode, say that
4465 all the bits above ptr_mode are known to be sign bit copies. */
4466 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4467 && REG_POINTER (x))
4468 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4469 #endif
4472 unsigned int copies_for_hook = 1, copies = 1;
4473 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4474 known_mode, known_ret,
4475 &copies_for_hook);
4477 if (new)
4478 copies = cached_num_sign_bit_copies (new, mode, known_x,
4479 known_mode, known_ret);
4481 if (copies > 1 || copies_for_hook > 1)
4482 return MAX (copies, copies_for_hook);
4484 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4486 break;
4488 case MEM:
4489 #ifdef LOAD_EXTEND_OP
4490 /* Some RISC machines sign-extend all loads of smaller than a word. */
4491 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4492 return MAX (1, ((int) bitwidth
4493 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4494 #endif
4495 break;
4497 case CONST_INT:
4498 /* If the constant is negative, take its 1's complement and remask.
4499 Then see how many zero bits we have. */
4500 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4501 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4502 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4503 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4505 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4507 case SUBREG:
4508 /* If this is a SUBREG for a promoted object that is sign-extended
4509 and we are looking at it in a wider mode, we know that at least the
4510 high-order bits are known to be sign bit copies. */
4512 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4514 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4515 known_x, known_mode, known_ret);
4516 return MAX ((int) bitwidth
4517 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4518 num0);
4521 /* For a smaller object, just ignore the high bits. */
4522 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4524 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4525 known_x, known_mode, known_ret);
4526 return MAX (1, (num0
4527 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4528 - bitwidth)));
4531 #ifdef WORD_REGISTER_OPERATIONS
4532 #ifdef LOAD_EXTEND_OP
4533 /* For paradoxical SUBREGs on machines where all register operations
4534 affect the entire register, just look inside. Note that we are
4535 passing MODE to the recursive call, so the number of sign bit copies
4536 will remain relative to that mode, not the inner mode. */
4538 /* This works only if loads sign extend. Otherwise, if we get a
4539 reload for the inner part, it may be loaded from the stack, and
4540 then we lose all sign bit copies that existed before the store
4541 to the stack. */
4543 if ((GET_MODE_SIZE (GET_MODE (x))
4544 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4545 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4546 && MEM_P (SUBREG_REG (x)))
4547 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4548 known_x, known_mode, known_ret);
4549 #endif
4550 #endif
4551 break;
4553 case SIGN_EXTRACT:
4554 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4555 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4556 break;
4558 case SIGN_EXTEND:
4559 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4560 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4561 known_x, known_mode, known_ret));
4563 case TRUNCATE:
4564 /* For a smaller object, just ignore the high bits. */
4565 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4566 known_x, known_mode, known_ret);
4567 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4568 - bitwidth)));
4570 case NOT:
4571 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4572 known_x, known_mode, known_ret);
4574 case ROTATE: case ROTATERT:
4575 /* If we are rotating left by a number of bits less than the number
4576 of sign bit copies, we can just subtract that amount from the
4577 number. */
4578 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4579 && INTVAL (XEXP (x, 1)) >= 0
4580 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4582 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4583 known_x, known_mode, known_ret);
4584 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4585 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4587 break;
4589 case NEG:
4590 /* In general, this subtracts one sign bit copy. But if the value
4591 is known to be positive, the number of sign bit copies is the
4592 same as that of the input. Finally, if the input has just one bit
4593 that might be nonzero, all the bits are copies of the sign bit. */
4594 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4595 known_x, known_mode, known_ret);
4596 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4597 return num0 > 1 ? num0 - 1 : 1;
4599 nonzero = nonzero_bits (XEXP (x, 0), mode);
4600 if (nonzero == 1)
4601 return bitwidth;
4603 if (num0 > 1
4604 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4605 num0--;
4607 return num0;
4609 case IOR: case AND: case XOR:
4610 case SMIN: case SMAX: case UMIN: case UMAX:
4611 /* Logical operations will preserve the number of sign-bit copies.
4612 MIN and MAX operations always return one of the operands. */
4613 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4614 known_x, known_mode, known_ret);
4615 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4616 known_x, known_mode, known_ret);
4617 return MIN (num0, num1);
4619 case PLUS: case MINUS:
4620 /* For addition and subtraction, we can have a 1-bit carry. However,
4621 if we are subtracting 1 from a positive number, there will not
4622 be such a carry. Furthermore, if the positive number is known to
4623 be 0 or 1, we know the result is either -1 or 0. */
4625 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4626 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4628 nonzero = nonzero_bits (XEXP (x, 0), mode);
4629 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4630 return (nonzero == 1 || nonzero == 0 ? bitwidth
4631 : bitwidth - floor_log2 (nonzero) - 1);
4634 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4635 known_x, known_mode, known_ret);
4636 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4637 known_x, known_mode, known_ret);
4638 result = MAX (1, MIN (num0, num1) - 1);
4640 #ifdef POINTERS_EXTEND_UNSIGNED
4641 /* If pointers extend signed and this is an addition or subtraction
4642 to a pointer in Pmode, all the bits above ptr_mode are known to be
4643 sign bit copies. */
4644 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4645 && (code == PLUS || code == MINUS)
4646 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4647 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4648 - GET_MODE_BITSIZE (ptr_mode) + 1),
4649 result);
4650 #endif
4651 return result;
4653 case MULT:
4654 /* The number of bits of the product is the sum of the number of
4655 bits of both terms. However, unless one of the terms if known
4656 to be positive, we must allow for an additional bit since negating
4657 a negative number can remove one sign bit copy. */
4659 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4660 known_x, known_mode, known_ret);
4661 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4662 known_x, known_mode, known_ret);
4664 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4665 if (result > 0
4666 && (bitwidth > HOST_BITS_PER_WIDE_INT
4667 || (((nonzero_bits (XEXP (x, 0), mode)
4668 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4669 && ((nonzero_bits (XEXP (x, 1), mode)
4670 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4671 result--;
4673 return MAX (1, result);
4675 case UDIV:
4676 /* The result must be <= the first operand. If the first operand
4677 has the high bit set, we know nothing about the number of sign
4678 bit copies. */
4679 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4680 return 1;
4681 else if ((nonzero_bits (XEXP (x, 0), mode)
4682 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4683 return 1;
4684 else
4685 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4686 known_x, known_mode, known_ret);
4688 case UMOD:
4689 /* The result must be <= the second operand. */
4690 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4691 known_x, known_mode, known_ret);
4693 case DIV:
4694 /* Similar to unsigned division, except that we have to worry about
4695 the case where the divisor is negative, in which case we have
4696 to add 1. */
4697 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4698 known_x, known_mode, known_ret);
4699 if (result > 1
4700 && (bitwidth > HOST_BITS_PER_WIDE_INT
4701 || (nonzero_bits (XEXP (x, 1), mode)
4702 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4703 result--;
4705 return result;
4707 case MOD:
4708 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4709 known_x, known_mode, known_ret);
4710 if (result > 1
4711 && (bitwidth > HOST_BITS_PER_WIDE_INT
4712 || (nonzero_bits (XEXP (x, 1), mode)
4713 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4714 result--;
4716 return result;
4718 case ASHIFTRT:
4719 /* Shifts by a constant add to the number of bits equal to the
4720 sign bit. */
4721 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4722 known_x, known_mode, known_ret);
4723 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4724 && INTVAL (XEXP (x, 1)) > 0)
4725 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4727 return num0;
4729 case ASHIFT:
4730 /* Left shifts destroy copies. */
4731 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4732 || INTVAL (XEXP (x, 1)) < 0
4733 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4734 return 1;
4736 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4737 known_x, known_mode, known_ret);
4738 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4740 case IF_THEN_ELSE:
4741 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4742 known_x, known_mode, known_ret);
4743 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4744 known_x, known_mode, known_ret);
4745 return MIN (num0, num1);
4747 case EQ: case NE: case GE: case GT: case LE: case LT:
4748 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4749 case GEU: case GTU: case LEU: case LTU:
4750 case UNORDERED: case ORDERED:
4751 /* If the constant is negative, take its 1's complement and remask.
4752 Then see how many zero bits we have. */
4753 nonzero = STORE_FLAG_VALUE;
4754 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4755 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4756 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4758 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4760 default:
4761 break;
4764 /* If we haven't been able to figure it out by one of the above rules,
4765 see if some of the high-order bits are known to be zero. If so,
4766 count those bits and return one less than that amount. If we can't
4767 safely compute the mask for this mode, always return BITWIDTH. */
4769 bitwidth = GET_MODE_BITSIZE (mode);
4770 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4771 return 1;
4773 nonzero = nonzero_bits (x, mode);
4774 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4775 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4778 /* Calculate the rtx_cost of a single instruction. A return value of
4779 zero indicates an instruction pattern without a known cost. */
4782 insn_rtx_cost (rtx pat)
4784 int i, cost;
4785 rtx set;
4787 /* Extract the single set rtx from the instruction pattern.
4788 We can't use single_set since we only have the pattern. */
4789 if (GET_CODE (pat) == SET)
4790 set = pat;
4791 else if (GET_CODE (pat) == PARALLEL)
4793 set = NULL_RTX;
4794 for (i = 0; i < XVECLEN (pat, 0); i++)
4796 rtx x = XVECEXP (pat, 0, i);
4797 if (GET_CODE (x) == SET)
4799 if (set)
4800 return 0;
4801 set = x;
4804 if (!set)
4805 return 0;
4807 else
4808 return 0;
4810 cost = rtx_cost (SET_SRC (set), SET);
4811 return cost > 0 ? cost : COSTS_N_INSNS (1);