Parallelize in_ifaddrhead operation
[dragonfly.git] / contrib / gcc-4.1 / gcc / rtlanal.c
blobda2cd9b0bb4237b924bd747e9d1289bd15806606
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
20 02110-1301, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "real.h"
37 #include "regs.h"
38 #include "function.h"
40 /* Forward declarations */
41 static int global_reg_mentioned_p_1 (rtx *, void *);
42 static void set_of_1 (rtx, rtx, void *);
43 static bool covers_regno_p (rtx, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx, unsigned int);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
49 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50 rtx, enum machine_mode,
51 unsigned HOST_WIDE_INT);
52 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53 enum machine_mode,
54 unsigned HOST_WIDE_INT);
55 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56 enum machine_mode,
57 unsigned int);
58 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59 enum machine_mode, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands[NUM_RTX_CODE];
65 /* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
69 int target_flags;
71 /* Return 1 if the value of X is unstable
72 (would be different at a different point in the program).
73 The frame pointer, arg pointer, etc. are considered stable
74 (within one function) and so is anything marked `unchanging'. */
76 int
77 rtx_unstable_p (rtx x)
79 RTX_CODE code = GET_CODE (x);
80 int i;
81 const char *fmt;
83 switch (code)
85 case MEM:
86 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
88 case CONST:
89 case CONST_INT:
90 case CONST_DOUBLE:
91 case CONST_VECTOR:
92 case SYMBOL_REF:
93 case LABEL_REF:
94 return 0;
96 case REG:
97 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
98 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
99 /* The arg pointer varies if it is not a fixed register. */
100 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
101 return 0;
102 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
103 /* ??? When call-clobbered, the value is stable modulo the restore
104 that must happen after a call. This currently screws up local-alloc
105 into believing that the restore is not needed. */
106 if (x == pic_offset_table_rtx)
107 return 0;
108 #endif
109 return 1;
111 case ASM_OPERANDS:
112 if (MEM_VOLATILE_P (x))
113 return 1;
115 /* Fall through. */
117 default:
118 break;
121 fmt = GET_RTX_FORMAT (code);
122 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
123 if (fmt[i] == 'e')
125 if (rtx_unstable_p (XEXP (x, i)))
126 return 1;
128 else if (fmt[i] == 'E')
130 int j;
131 for (j = 0; j < XVECLEN (x, i); j++)
132 if (rtx_unstable_p (XVECEXP (x, i, j)))
133 return 1;
136 return 0;
139 /* Return 1 if X has a value that can vary even between two
140 executions of the program. 0 means X can be compared reliably
141 against certain constants or near-constants.
142 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
143 zero, we are slightly more conservative.
144 The frame pointer and the arg pointer are considered constant. */
147 rtx_varies_p (rtx x, int for_alias)
149 RTX_CODE code;
150 int i;
151 const char *fmt;
153 if (!x)
154 return 0;
156 code = GET_CODE (x);
157 switch (code)
159 case MEM:
160 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
162 case CONST:
163 case CONST_INT:
164 case CONST_DOUBLE:
165 case CONST_VECTOR:
166 case SYMBOL_REF:
167 case LABEL_REF:
168 return 0;
170 case REG:
171 /* Note that we have to test for the actual rtx used for the frame
172 and arg pointers and not just the register number in case we have
173 eliminated the frame and/or arg pointer and are using it
174 for pseudos. */
175 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
176 /* The arg pointer varies if it is not a fixed register. */
177 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
178 return 0;
179 if (x == pic_offset_table_rtx
180 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
181 /* ??? When call-clobbered, the value is stable modulo the restore
182 that must happen after a call. This currently screws up
183 local-alloc into believing that the restore is not needed, so we
184 must return 0 only if we are called from alias analysis. */
185 && for_alias
186 #endif
188 return 0;
189 return 1;
191 case LO_SUM:
192 /* The operand 0 of a LO_SUM is considered constant
193 (in fact it is related specifically to operand 1)
194 during alias analysis. */
195 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
196 || rtx_varies_p (XEXP (x, 1), for_alias);
198 case ASM_OPERANDS:
199 if (MEM_VOLATILE_P (x))
200 return 1;
202 /* Fall through. */
204 default:
205 break;
208 fmt = GET_RTX_FORMAT (code);
209 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
210 if (fmt[i] == 'e')
212 if (rtx_varies_p (XEXP (x, i), for_alias))
213 return 1;
215 else if (fmt[i] == 'E')
217 int j;
218 for (j = 0; j < XVECLEN (x, i); j++)
219 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
220 return 1;
223 return 0;
226 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
227 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
228 whether nonzero is returned for unaligned memory accesses on strict
229 alignment machines. */
231 static int
232 rtx_addr_can_trap_p_1 (rtx x, enum machine_mode mode, bool unaligned_mems)
234 enum rtx_code code = GET_CODE (x);
236 switch (code)
238 case SYMBOL_REF:
239 return SYMBOL_REF_WEAK (x);
241 case LABEL_REF:
242 return 0;
244 case REG:
245 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
246 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
247 || x == stack_pointer_rtx
248 /* The arg pointer varies if it is not a fixed register. */
249 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
250 return 0;
251 /* All of the virtual frame registers are stack references. */
252 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
253 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
254 return 0;
255 return 1;
257 case CONST:
258 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
260 case PLUS:
261 /* An address is assumed not to trap if:
262 - it is an address that can't trap plus a constant integer,
263 with the proper remainder modulo the mode size if we are
264 considering unaligned memory references. */
265 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
266 && GET_CODE (XEXP (x, 1)) == CONST_INT)
268 HOST_WIDE_INT offset;
270 if (!STRICT_ALIGNMENT
271 || !unaligned_mems
272 || GET_MODE_SIZE (mode) == 0)
273 return 0;
275 offset = INTVAL (XEXP (x, 1));
277 #ifdef SPARC_STACK_BOUNDARY_HACK
278 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
279 the real alignment of %sp. However, when it does this, the
280 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
281 if (SPARC_STACK_BOUNDARY_HACK
282 && (XEXP (x, 0) == stack_pointer_rtx
283 || XEXP (x, 0) == hard_frame_pointer_rtx))
284 offset -= STACK_POINTER_OFFSET;
285 #endif
287 return offset % GET_MODE_SIZE (mode) != 0;
290 /* - or it is the pic register plus a constant. */
291 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
292 return 0;
294 return 1;
296 case LO_SUM:
297 case PRE_MODIFY:
298 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
300 case PRE_DEC:
301 case PRE_INC:
302 case POST_DEC:
303 case POST_INC:
304 case POST_MODIFY:
305 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
307 default:
308 break;
311 /* If it isn't one of the case above, it can cause a trap. */
312 return 1;
315 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
318 rtx_addr_can_trap_p (rtx x)
320 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
323 /* Return true if X is an address that is known to not be zero. */
325 bool
326 nonzero_address_p (rtx x)
328 enum rtx_code code = GET_CODE (x);
330 switch (code)
332 case SYMBOL_REF:
333 return !SYMBOL_REF_WEAK (x);
335 case LABEL_REF:
336 return true;
338 case REG:
339 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
340 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
341 || x == stack_pointer_rtx
342 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
343 return true;
344 /* All of the virtual frame registers are stack references. */
345 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
346 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
347 return true;
348 return false;
350 case CONST:
351 return nonzero_address_p (XEXP (x, 0));
353 case PLUS:
354 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
356 /* Pointers aren't allowed to wrap. If we've got a register
357 that is known to be a pointer, and a positive offset, then
358 the composite can't be zero. */
359 if (INTVAL (XEXP (x, 1)) > 0
360 && REG_P (XEXP (x, 0))
361 && REG_POINTER (XEXP (x, 0)))
362 return true;
364 return nonzero_address_p (XEXP (x, 0));
366 /* Handle PIC references. */
367 else if (XEXP (x, 0) == pic_offset_table_rtx
368 && CONSTANT_P (XEXP (x, 1)))
369 return true;
370 return false;
372 case PRE_MODIFY:
373 /* Similar to the above; allow positive offsets. Further, since
374 auto-inc is only allowed in memories, the register must be a
375 pointer. */
376 if (GET_CODE (XEXP (x, 1)) == CONST_INT
377 && INTVAL (XEXP (x, 1)) > 0)
378 return true;
379 return nonzero_address_p (XEXP (x, 0));
381 case PRE_INC:
382 /* Similarly. Further, the offset is always positive. */
383 return true;
385 case PRE_DEC:
386 case POST_DEC:
387 case POST_INC:
388 case POST_MODIFY:
389 return nonzero_address_p (XEXP (x, 0));
391 case LO_SUM:
392 return nonzero_address_p (XEXP (x, 1));
394 default:
395 break;
398 /* If it isn't one of the case above, might be zero. */
399 return false;
402 /* Return 1 if X refers to a memory location whose address
403 cannot be compared reliably with constant addresses,
404 or if X refers to a BLKmode memory object.
405 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
406 zero, we are slightly more conservative. */
409 rtx_addr_varies_p (rtx x, int for_alias)
411 enum rtx_code code;
412 int i;
413 const char *fmt;
415 if (x == 0)
416 return 0;
418 code = GET_CODE (x);
419 if (code == MEM)
420 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
422 fmt = GET_RTX_FORMAT (code);
423 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
424 if (fmt[i] == 'e')
426 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
427 return 1;
429 else if (fmt[i] == 'E')
431 int j;
432 for (j = 0; j < XVECLEN (x, i); j++)
433 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
434 return 1;
436 return 0;
439 /* Return the value of the integer term in X, if one is apparent;
440 otherwise return 0.
441 Only obvious integer terms are detected.
442 This is used in cse.c with the `related_value' field. */
444 HOST_WIDE_INT
445 get_integer_term (rtx x)
447 if (GET_CODE (x) == CONST)
448 x = XEXP (x, 0);
450 if (GET_CODE (x) == MINUS
451 && GET_CODE (XEXP (x, 1)) == CONST_INT)
452 return - INTVAL (XEXP (x, 1));
453 if (GET_CODE (x) == PLUS
454 && GET_CODE (XEXP (x, 1)) == CONST_INT)
455 return INTVAL (XEXP (x, 1));
456 return 0;
459 /* If X is a constant, return the value sans apparent integer term;
460 otherwise return 0.
461 Only obvious integer terms are detected. */
464 get_related_value (rtx x)
466 if (GET_CODE (x) != CONST)
467 return 0;
468 x = XEXP (x, 0);
469 if (GET_CODE (x) == PLUS
470 && GET_CODE (XEXP (x, 1)) == CONST_INT)
471 return XEXP (x, 0);
472 else if (GET_CODE (x) == MINUS
473 && GET_CODE (XEXP (x, 1)) == CONST_INT)
474 return XEXP (x, 0);
475 return 0;
478 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
479 a global register. */
481 static int
482 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
484 int regno;
485 rtx x = *loc;
487 if (! x)
488 return 0;
490 switch (GET_CODE (x))
492 case SUBREG:
493 if (REG_P (SUBREG_REG (x)))
495 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
496 && global_regs[subreg_regno (x)])
497 return 1;
498 return 0;
500 break;
502 case REG:
503 regno = REGNO (x);
504 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
505 return 1;
506 return 0;
508 case SCRATCH:
509 case PC:
510 case CC0:
511 case CONST_INT:
512 case CONST_DOUBLE:
513 case CONST:
514 case LABEL_REF:
515 return 0;
517 case CALL:
518 /* A non-constant call might use a global register. */
519 return 1;
521 default:
522 break;
525 return 0;
528 /* Returns nonzero if X mentions a global register. */
531 global_reg_mentioned_p (rtx x)
533 if (INSN_P (x))
535 if (CALL_P (x))
537 if (! CONST_OR_PURE_CALL_P (x))
538 return 1;
539 x = CALL_INSN_FUNCTION_USAGE (x);
540 if (x == 0)
541 return 0;
543 else
544 x = PATTERN (x);
547 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
550 /* Return the number of places FIND appears within X. If COUNT_DEST is
551 zero, we do not count occurrences inside the destination of a SET. */
554 count_occurrences (rtx x, rtx find, int count_dest)
556 int i, j;
557 enum rtx_code code;
558 const char *format_ptr;
559 int count;
561 if (x == find)
562 return 1;
564 code = GET_CODE (x);
566 switch (code)
568 case REG:
569 case CONST_INT:
570 case CONST_DOUBLE:
571 case CONST_VECTOR:
572 case SYMBOL_REF:
573 case CODE_LABEL:
574 case PC:
575 case CC0:
576 return 0;
578 case MEM:
579 if (MEM_P (find) && rtx_equal_p (x, find))
580 return 1;
581 break;
583 case SET:
584 if (SET_DEST (x) == find && ! count_dest)
585 return count_occurrences (SET_SRC (x), find, count_dest);
586 break;
588 default:
589 break;
592 format_ptr = GET_RTX_FORMAT (code);
593 count = 0;
595 for (i = 0; i < GET_RTX_LENGTH (code); i++)
597 switch (*format_ptr++)
599 case 'e':
600 count += count_occurrences (XEXP (x, i), find, count_dest);
601 break;
603 case 'E':
604 for (j = 0; j < XVECLEN (x, i); j++)
605 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
606 break;
609 return count;
612 /* Nonzero if register REG appears somewhere within IN.
613 Also works if REG is not a register; in this case it checks
614 for a subexpression of IN that is Lisp "equal" to REG. */
617 reg_mentioned_p (rtx reg, rtx in)
619 const char *fmt;
620 int i;
621 enum rtx_code code;
623 if (in == 0)
624 return 0;
626 if (reg == in)
627 return 1;
629 if (GET_CODE (in) == LABEL_REF)
630 return reg == XEXP (in, 0);
632 code = GET_CODE (in);
634 switch (code)
636 /* Compare registers by number. */
637 case REG:
638 return REG_P (reg) && REGNO (in) == REGNO (reg);
640 /* These codes have no constituent expressions
641 and are unique. */
642 case SCRATCH:
643 case CC0:
644 case PC:
645 return 0;
647 case CONST_INT:
648 case CONST_VECTOR:
649 case CONST_DOUBLE:
650 /* These are kept unique for a given value. */
651 return 0;
653 default:
654 break;
657 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
658 return 1;
660 fmt = GET_RTX_FORMAT (code);
662 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
664 if (fmt[i] == 'E')
666 int j;
667 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
668 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
669 return 1;
671 else if (fmt[i] == 'e'
672 && reg_mentioned_p (reg, XEXP (in, i)))
673 return 1;
675 return 0;
678 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
679 no CODE_LABEL insn. */
682 no_labels_between_p (rtx beg, rtx end)
684 rtx p;
685 if (beg == end)
686 return 0;
687 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
688 if (LABEL_P (p))
689 return 0;
690 return 1;
693 /* Nonzero if register REG is used in an insn between
694 FROM_INSN and TO_INSN (exclusive of those two). */
697 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
699 rtx insn;
701 if (from_insn == to_insn)
702 return 0;
704 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
705 if (INSN_P (insn)
706 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
707 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
708 return 1;
709 return 0;
712 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
713 is entirely replaced by a new value and the only use is as a SET_DEST,
714 we do not consider it a reference. */
717 reg_referenced_p (rtx x, rtx body)
719 int i;
721 switch (GET_CODE (body))
723 case SET:
724 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
725 return 1;
727 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
728 of a REG that occupies all of the REG, the insn references X if
729 it is mentioned in the destination. */
730 if (GET_CODE (SET_DEST (body)) != CC0
731 && GET_CODE (SET_DEST (body)) != PC
732 && !REG_P (SET_DEST (body))
733 && ! (GET_CODE (SET_DEST (body)) == SUBREG
734 && REG_P (SUBREG_REG (SET_DEST (body)))
735 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
736 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
737 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
738 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
739 && reg_overlap_mentioned_p (x, SET_DEST (body)))
740 return 1;
741 return 0;
743 case ASM_OPERANDS:
744 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
745 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
746 return 1;
747 return 0;
749 case CALL:
750 case USE:
751 case IF_THEN_ELSE:
752 return reg_overlap_mentioned_p (x, body);
754 case TRAP_IF:
755 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
757 case PREFETCH:
758 return reg_overlap_mentioned_p (x, XEXP (body, 0));
760 case UNSPEC:
761 case UNSPEC_VOLATILE:
762 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
763 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
764 return 1;
765 return 0;
767 case PARALLEL:
768 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
769 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
770 return 1;
771 return 0;
773 case CLOBBER:
774 if (MEM_P (XEXP (body, 0)))
775 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
776 return 1;
777 return 0;
779 case COND_EXEC:
780 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
781 return 1;
782 return reg_referenced_p (x, COND_EXEC_CODE (body));
784 default:
785 return 0;
789 /* Nonzero if register REG is set or clobbered in an insn between
790 FROM_INSN and TO_INSN (exclusive of those two). */
793 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
795 rtx insn;
797 if (from_insn == to_insn)
798 return 0;
800 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
801 if (INSN_P (insn) && reg_set_p (reg, insn))
802 return 1;
803 return 0;
806 /* Internals of reg_set_between_p. */
808 reg_set_p (rtx reg, rtx insn)
810 /* We can be passed an insn or part of one. If we are passed an insn,
811 check if a side-effect of the insn clobbers REG. */
812 if (INSN_P (insn)
813 && (FIND_REG_INC_NOTE (insn, reg)
814 || (CALL_P (insn)
815 && ((REG_P (reg)
816 && REGNO (reg) < FIRST_PSEUDO_REGISTER
817 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
818 REGNO (reg)))
819 || MEM_P (reg)
820 || find_reg_fusage (insn, CLOBBER, reg)))))
821 return 1;
823 return set_of (reg, insn) != NULL_RTX;
826 /* Similar to reg_set_between_p, but check all registers in X. Return 0
827 only if none of them are modified between START and END. Return 1 if
828 X contains a MEM; this routine does usememory aliasing. */
831 modified_between_p (rtx x, rtx start, rtx end)
833 enum rtx_code code = GET_CODE (x);
834 const char *fmt;
835 int i, j;
836 rtx insn;
838 if (start == end)
839 return 0;
841 switch (code)
843 case CONST_INT:
844 case CONST_DOUBLE:
845 case CONST_VECTOR:
846 case CONST:
847 case SYMBOL_REF:
848 case LABEL_REF:
849 return 0;
851 case PC:
852 case CC0:
853 return 1;
855 case MEM:
856 if (modified_between_p (XEXP (x, 0), start, end))
857 return 1;
858 if (MEM_READONLY_P (x))
859 return 0;
860 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
861 if (memory_modified_in_insn_p (x, insn))
862 return 1;
863 return 0;
864 break;
866 case REG:
867 return reg_set_between_p (x, start, end);
869 default:
870 break;
873 fmt = GET_RTX_FORMAT (code);
874 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
876 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
877 return 1;
879 else if (fmt[i] == 'E')
880 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
881 if (modified_between_p (XVECEXP (x, i, j), start, end))
882 return 1;
885 return 0;
888 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
889 of them are modified in INSN. Return 1 if X contains a MEM; this routine
890 does use memory aliasing. */
893 modified_in_p (rtx x, rtx insn)
895 enum rtx_code code = GET_CODE (x);
896 const char *fmt;
897 int i, j;
899 switch (code)
901 case CONST_INT:
902 case CONST_DOUBLE:
903 case CONST_VECTOR:
904 case CONST:
905 case SYMBOL_REF:
906 case LABEL_REF:
907 return 0;
909 case PC:
910 case CC0:
911 return 1;
913 case MEM:
914 if (modified_in_p (XEXP (x, 0), insn))
915 return 1;
916 if (MEM_READONLY_P (x))
917 return 0;
918 if (memory_modified_in_insn_p (x, insn))
919 return 1;
920 return 0;
921 break;
923 case REG:
924 return reg_set_p (x, insn);
926 default:
927 break;
930 fmt = GET_RTX_FORMAT (code);
931 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
933 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
934 return 1;
936 else if (fmt[i] == 'E')
937 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
938 if (modified_in_p (XVECEXP (x, i, j), insn))
939 return 1;
942 return 0;
945 /* Helper function for set_of. */
946 struct set_of_data
948 rtx found;
949 rtx pat;
952 static void
953 set_of_1 (rtx x, rtx pat, void *data1)
955 struct set_of_data *data = (struct set_of_data *) (data1);
956 if (rtx_equal_p (x, data->pat)
957 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
958 data->found = pat;
961 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
962 (either directly or via STRICT_LOW_PART and similar modifiers). */
964 set_of (rtx pat, rtx insn)
966 struct set_of_data data;
967 data.found = NULL_RTX;
968 data.pat = pat;
969 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
970 return data.found;
973 /* Given an INSN, return a SET expression if this insn has only a single SET.
974 It may also have CLOBBERs, USEs, or SET whose output
975 will not be used, which we ignore. */
978 single_set_2 (rtx insn, rtx pat)
980 rtx set = NULL;
981 int set_verified = 1;
982 int i;
984 if (GET_CODE (pat) == PARALLEL)
986 for (i = 0; i < XVECLEN (pat, 0); i++)
988 rtx sub = XVECEXP (pat, 0, i);
989 switch (GET_CODE (sub))
991 case USE:
992 case CLOBBER:
993 break;
995 case SET:
996 /* We can consider insns having multiple sets, where all
997 but one are dead as single set insns. In common case
998 only single set is present in the pattern so we want
999 to avoid checking for REG_UNUSED notes unless necessary.
1001 When we reach set first time, we just expect this is
1002 the single set we are looking for and only when more
1003 sets are found in the insn, we check them. */
1004 if (!set_verified)
1006 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1007 && !side_effects_p (set))
1008 set = NULL;
1009 else
1010 set_verified = 1;
1012 if (!set)
1013 set = sub, set_verified = 0;
1014 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1015 || side_effects_p (sub))
1016 return NULL_RTX;
1017 break;
1019 default:
1020 return NULL_RTX;
1024 return set;
1027 /* Given an INSN, return nonzero if it has more than one SET, else return
1028 zero. */
1031 multiple_sets (rtx insn)
1033 int found;
1034 int i;
1036 /* INSN must be an insn. */
1037 if (! INSN_P (insn))
1038 return 0;
1040 /* Only a PARALLEL can have multiple SETs. */
1041 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1043 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1044 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1046 /* If we have already found a SET, then return now. */
1047 if (found)
1048 return 1;
1049 else
1050 found = 1;
1054 /* Either zero or one SET. */
1055 return 0;
1058 /* Return nonzero if the destination of SET equals the source
1059 and there are no side effects. */
1062 set_noop_p (rtx set)
1064 rtx src = SET_SRC (set);
1065 rtx dst = SET_DEST (set);
1067 if (dst == pc_rtx && src == pc_rtx)
1068 return 1;
1070 if (MEM_P (dst) && MEM_P (src))
1071 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1073 if (GET_CODE (dst) == ZERO_EXTRACT)
1074 return rtx_equal_p (XEXP (dst, 0), src)
1075 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1076 && !side_effects_p (src);
1078 if (GET_CODE (dst) == STRICT_LOW_PART)
1079 dst = XEXP (dst, 0);
1081 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1083 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1084 return 0;
1085 src = SUBREG_REG (src);
1086 dst = SUBREG_REG (dst);
1089 return (REG_P (src) && REG_P (dst)
1090 && REGNO (src) == REGNO (dst));
1093 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1094 value to itself. */
1097 noop_move_p (rtx insn)
1099 rtx pat = PATTERN (insn);
1101 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1102 return 1;
1104 /* Insns carrying these notes are useful later on. */
1105 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1106 return 0;
1108 /* For now treat an insn with a REG_RETVAL note as a
1109 a special insn which should not be considered a no-op. */
1110 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1111 return 0;
1113 if (GET_CODE (pat) == SET && set_noop_p (pat))
1114 return 1;
1116 if (GET_CODE (pat) == PARALLEL)
1118 int i;
1119 /* If nothing but SETs of registers to themselves,
1120 this insn can also be deleted. */
1121 for (i = 0; i < XVECLEN (pat, 0); i++)
1123 rtx tem = XVECEXP (pat, 0, i);
1125 if (GET_CODE (tem) == USE
1126 || GET_CODE (tem) == CLOBBER)
1127 continue;
1129 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1130 return 0;
1133 return 1;
1135 return 0;
1139 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1140 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1141 If the object was modified, if we hit a partial assignment to X, or hit a
1142 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1143 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1144 be the src. */
1147 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1149 rtx p;
1151 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1152 p = PREV_INSN (p))
1153 if (INSN_P (p))
1155 rtx set = single_set (p);
1156 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1158 if (set && rtx_equal_p (x, SET_DEST (set)))
1160 rtx src = SET_SRC (set);
1162 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1163 src = XEXP (note, 0);
1165 if ((valid_to == NULL_RTX
1166 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1167 /* Reject hard registers because we don't usually want
1168 to use them; we'd rather use a pseudo. */
1169 && (! (REG_P (src)
1170 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1172 *pinsn = p;
1173 return src;
1177 /* If set in non-simple way, we don't have a value. */
1178 if (reg_set_p (x, p))
1179 break;
1182 return x;
1185 /* Return nonzero if register in range [REGNO, ENDREGNO)
1186 appears either explicitly or implicitly in X
1187 other than being stored into.
1189 References contained within the substructure at LOC do not count.
1190 LOC may be zero, meaning don't ignore anything. */
1193 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1194 rtx *loc)
1196 int i;
1197 unsigned int x_regno;
1198 RTX_CODE code;
1199 const char *fmt;
1201 repeat:
1202 /* The contents of a REG_NONNEG note is always zero, so we must come here
1203 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1204 if (x == 0)
1205 return 0;
1207 code = GET_CODE (x);
1209 switch (code)
1211 case REG:
1212 x_regno = REGNO (x);
1214 /* If we modifying the stack, frame, or argument pointer, it will
1215 clobber a virtual register. In fact, we could be more precise,
1216 but it isn't worth it. */
1217 if ((x_regno == STACK_POINTER_REGNUM
1218 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1219 || x_regno == ARG_POINTER_REGNUM
1220 #endif
1221 || x_regno == FRAME_POINTER_REGNUM)
1222 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1223 return 1;
1225 return (endregno > x_regno
1226 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1227 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1228 : 1));
1230 case SUBREG:
1231 /* If this is a SUBREG of a hard reg, we can see exactly which
1232 registers are being modified. Otherwise, handle normally. */
1233 if (REG_P (SUBREG_REG (x))
1234 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1236 unsigned int inner_regno = subreg_regno (x);
1237 unsigned int inner_endregno
1238 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1239 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1241 return endregno > inner_regno && regno < inner_endregno;
1243 break;
1245 case CLOBBER:
1246 case SET:
1247 if (&SET_DEST (x) != loc
1248 /* Note setting a SUBREG counts as referring to the REG it is in for
1249 a pseudo but not for hard registers since we can
1250 treat each word individually. */
1251 && ((GET_CODE (SET_DEST (x)) == SUBREG
1252 && loc != &SUBREG_REG (SET_DEST (x))
1253 && REG_P (SUBREG_REG (SET_DEST (x)))
1254 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1255 && refers_to_regno_p (regno, endregno,
1256 SUBREG_REG (SET_DEST (x)), loc))
1257 || (!REG_P (SET_DEST (x))
1258 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1259 return 1;
1261 if (code == CLOBBER || loc == &SET_SRC (x))
1262 return 0;
1263 x = SET_SRC (x);
1264 goto repeat;
1266 default:
1267 break;
1270 /* X does not match, so try its subexpressions. */
1272 fmt = GET_RTX_FORMAT (code);
1273 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1275 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1277 if (i == 0)
1279 x = XEXP (x, 0);
1280 goto repeat;
1282 else
1283 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1284 return 1;
1286 else if (fmt[i] == 'E')
1288 int j;
1289 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1290 if (loc != &XVECEXP (x, i, j)
1291 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1292 return 1;
1295 return 0;
1298 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1299 we check if any register number in X conflicts with the relevant register
1300 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1301 contains a MEM (we don't bother checking for memory addresses that can't
1302 conflict because we expect this to be a rare case. */
1305 reg_overlap_mentioned_p (rtx x, rtx in)
1307 unsigned int regno, endregno;
1309 /* If either argument is a constant, then modifying X can not
1310 affect IN. Here we look at IN, we can profitably combine
1311 CONSTANT_P (x) with the switch statement below. */
1312 if (CONSTANT_P (in))
1313 return 0;
1315 recurse:
1316 switch (GET_CODE (x))
1318 case STRICT_LOW_PART:
1319 case ZERO_EXTRACT:
1320 case SIGN_EXTRACT:
1321 /* Overly conservative. */
1322 x = XEXP (x, 0);
1323 goto recurse;
1325 case SUBREG:
1326 regno = REGNO (SUBREG_REG (x));
1327 if (regno < FIRST_PSEUDO_REGISTER)
1328 regno = subreg_regno (x);
1329 goto do_reg;
1331 case REG:
1332 regno = REGNO (x);
1333 do_reg:
1334 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1335 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1336 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1338 case MEM:
1340 const char *fmt;
1341 int i;
1343 if (MEM_P (in))
1344 return 1;
1346 fmt = GET_RTX_FORMAT (GET_CODE (in));
1347 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1348 if (fmt[i] == 'e')
1350 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1351 return 1;
1353 else if (fmt[i] == 'E')
1355 int j;
1356 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1357 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1358 return 1;
1361 return 0;
1364 case SCRATCH:
1365 case PC:
1366 case CC0:
1367 return reg_mentioned_p (x, in);
1369 case PARALLEL:
1371 int i;
1373 /* If any register in here refers to it we return true. */
1374 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1375 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1376 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1377 return 1;
1378 return 0;
1381 default:
1382 gcc_assert (CONSTANT_P (x));
1383 return 0;
1387 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1388 (X would be the pattern of an insn).
1389 FUN receives two arguments:
1390 the REG, MEM, CC0 or PC being stored in or clobbered,
1391 the SET or CLOBBER rtx that does the store.
1393 If the item being stored in or clobbered is a SUBREG of a hard register,
1394 the SUBREG will be passed. */
1396 void
1397 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1399 int i;
1401 if (GET_CODE (x) == COND_EXEC)
1402 x = COND_EXEC_CODE (x);
1404 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1406 rtx dest = SET_DEST (x);
1408 while ((GET_CODE (dest) == SUBREG
1409 && (!REG_P (SUBREG_REG (dest))
1410 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1411 || GET_CODE (dest) == ZERO_EXTRACT
1412 || GET_CODE (dest) == STRICT_LOW_PART)
1413 dest = XEXP (dest, 0);
1415 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1416 each of whose first operand is a register. */
1417 if (GET_CODE (dest) == PARALLEL)
1419 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1420 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1421 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1423 else
1424 (*fun) (dest, x, data);
1427 else if (GET_CODE (x) == PARALLEL)
1428 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1429 note_stores (XVECEXP (x, 0, i), fun, data);
1432 /* Like notes_stores, but call FUN for each expression that is being
1433 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1434 FUN for each expression, not any interior subexpressions. FUN receives a
1435 pointer to the expression and the DATA passed to this function.
1437 Note that this is not quite the same test as that done in reg_referenced_p
1438 since that considers something as being referenced if it is being
1439 partially set, while we do not. */
1441 void
1442 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1444 rtx body = *pbody;
1445 int i;
1447 switch (GET_CODE (body))
1449 case COND_EXEC:
1450 (*fun) (&COND_EXEC_TEST (body), data);
1451 note_uses (&COND_EXEC_CODE (body), fun, data);
1452 return;
1454 case PARALLEL:
1455 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1456 note_uses (&XVECEXP (body, 0, i), fun, data);
1457 return;
1459 case USE:
1460 (*fun) (&XEXP (body, 0), data);
1461 return;
1463 case ASM_OPERANDS:
1464 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1465 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1466 return;
1468 case TRAP_IF:
1469 (*fun) (&TRAP_CONDITION (body), data);
1470 return;
1472 case PREFETCH:
1473 (*fun) (&XEXP (body, 0), data);
1474 return;
1476 case UNSPEC:
1477 case UNSPEC_VOLATILE:
1478 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1479 (*fun) (&XVECEXP (body, 0, i), data);
1480 return;
1482 case CLOBBER:
1483 if (MEM_P (XEXP (body, 0)))
1484 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1485 return;
1487 case SET:
1489 rtx dest = SET_DEST (body);
1491 /* For sets we replace everything in source plus registers in memory
1492 expression in store and operands of a ZERO_EXTRACT. */
1493 (*fun) (&SET_SRC (body), data);
1495 if (GET_CODE (dest) == ZERO_EXTRACT)
1497 (*fun) (&XEXP (dest, 1), data);
1498 (*fun) (&XEXP (dest, 2), data);
1501 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1502 dest = XEXP (dest, 0);
1504 if (MEM_P (dest))
1505 (*fun) (&XEXP (dest, 0), data);
1507 return;
1509 default:
1510 /* All the other possibilities never store. */
1511 (*fun) (pbody, data);
1512 return;
1516 /* Return nonzero if X's old contents don't survive after INSN.
1517 This will be true if X is (cc0) or if X is a register and
1518 X dies in INSN or because INSN entirely sets X.
1520 "Entirely set" means set directly and not through a SUBREG, or
1521 ZERO_EXTRACT, so no trace of the old contents remains.
1522 Likewise, REG_INC does not count.
1524 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1525 but for this use that makes no difference, since regs don't overlap
1526 during their lifetimes. Therefore, this function may be used
1527 at any time after deaths have been computed (in flow.c).
1529 If REG is a hard reg that occupies multiple machine registers, this
1530 function will only return 1 if each of those registers will be replaced
1531 by INSN. */
1534 dead_or_set_p (rtx insn, rtx x)
1536 unsigned int regno, last_regno;
1537 unsigned int i;
1539 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1540 if (GET_CODE (x) == CC0)
1541 return 1;
1543 gcc_assert (REG_P (x));
1545 regno = REGNO (x);
1546 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1547 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1549 for (i = regno; i <= last_regno; i++)
1550 if (! dead_or_set_regno_p (insn, i))
1551 return 0;
1553 return 1;
1556 /* Return TRUE iff DEST is a register or subreg of a register and
1557 doesn't change the number of words of the inner register, and any
1558 part of the register is TEST_REGNO. */
1560 static bool
1561 covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1563 unsigned int regno, endregno;
1565 if (GET_CODE (dest) == SUBREG
1566 && (((GET_MODE_SIZE (GET_MODE (dest))
1567 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1568 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1569 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1570 dest = SUBREG_REG (dest);
1572 if (!REG_P (dest))
1573 return false;
1575 regno = REGNO (dest);
1576 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1577 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1578 return (test_regno >= regno && test_regno < endregno);
1581 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1582 any member matches the covers_regno_no_parallel_p criteria. */
1584 static bool
1585 covers_regno_p (rtx dest, unsigned int test_regno)
1587 if (GET_CODE (dest) == PARALLEL)
1589 /* Some targets place small structures in registers for return
1590 values of functions, and those registers are wrapped in
1591 PARALLELs that we may see as the destination of a SET. */
1592 int i;
1594 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1596 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1597 if (inner != NULL_RTX
1598 && covers_regno_no_parallel_p (inner, test_regno))
1599 return true;
1602 return false;
1604 else
1605 return covers_regno_no_parallel_p (dest, test_regno);
1608 /* Utility function for dead_or_set_p to check an individual register. Also
1609 called from flow.c. */
1612 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1614 rtx pattern;
1616 /* See if there is a death note for something that includes TEST_REGNO. */
1617 if (find_regno_note (insn, REG_DEAD, test_regno))
1618 return 1;
1620 if (CALL_P (insn)
1621 && find_regno_fusage (insn, CLOBBER, test_regno))
1622 return 1;
1624 pattern = PATTERN (insn);
1626 if (GET_CODE (pattern) == COND_EXEC)
1627 pattern = COND_EXEC_CODE (pattern);
1629 if (GET_CODE (pattern) == SET)
1630 return covers_regno_p (SET_DEST (pattern), test_regno);
1631 else if (GET_CODE (pattern) == PARALLEL)
1633 int i;
1635 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1637 rtx body = XVECEXP (pattern, 0, i);
1639 if (GET_CODE (body) == COND_EXEC)
1640 body = COND_EXEC_CODE (body);
1642 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1643 && covers_regno_p (SET_DEST (body), test_regno))
1644 return 1;
1648 return 0;
1651 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1652 If DATUM is nonzero, look for one whose datum is DATUM. */
1655 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1657 rtx link;
1659 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1660 if (! INSN_P (insn))
1661 return 0;
1662 if (datum == 0)
1664 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1665 if (REG_NOTE_KIND (link) == kind)
1666 return link;
1667 return 0;
1670 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1671 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1672 return link;
1673 return 0;
1676 /* Return the reg-note of kind KIND in insn INSN which applies to register
1677 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1678 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1679 it might be the case that the note overlaps REGNO. */
1682 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1684 rtx link;
1686 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1687 if (! INSN_P (insn))
1688 return 0;
1690 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1691 if (REG_NOTE_KIND (link) == kind
1692 /* Verify that it is a register, so that scratch and MEM won't cause a
1693 problem here. */
1694 && REG_P (XEXP (link, 0))
1695 && REGNO (XEXP (link, 0)) <= regno
1696 && ((REGNO (XEXP (link, 0))
1697 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1698 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1699 [GET_MODE (XEXP (link, 0))]))
1700 > regno))
1701 return link;
1702 return 0;
1705 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1706 has such a note. */
1709 find_reg_equal_equiv_note (rtx insn)
1711 rtx link;
1713 if (!INSN_P (insn))
1714 return 0;
1715 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1716 if (REG_NOTE_KIND (link) == REG_EQUAL
1717 || REG_NOTE_KIND (link) == REG_EQUIV)
1719 if (single_set (insn) == 0)
1720 return 0;
1721 return link;
1723 return NULL;
1726 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1727 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1730 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1732 /* If it's not a CALL_INSN, it can't possibly have a
1733 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1734 if (!CALL_P (insn))
1735 return 0;
1737 gcc_assert (datum);
1739 if (!REG_P (datum))
1741 rtx link;
1743 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1744 link;
1745 link = XEXP (link, 1))
1746 if (GET_CODE (XEXP (link, 0)) == code
1747 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1748 return 1;
1750 else
1752 unsigned int regno = REGNO (datum);
1754 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1755 to pseudo registers, so don't bother checking. */
1757 if (regno < FIRST_PSEUDO_REGISTER)
1759 unsigned int end_regno
1760 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1761 unsigned int i;
1763 for (i = regno; i < end_regno; i++)
1764 if (find_regno_fusage (insn, code, i))
1765 return 1;
1769 return 0;
1772 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1773 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1776 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1778 rtx link;
1780 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1781 to pseudo registers, so don't bother checking. */
1783 if (regno >= FIRST_PSEUDO_REGISTER
1784 || !CALL_P (insn) )
1785 return 0;
1787 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1789 unsigned int regnote;
1790 rtx op, reg;
1792 if (GET_CODE (op = XEXP (link, 0)) == code
1793 && REG_P (reg = XEXP (op, 0))
1794 && (regnote = REGNO (reg)) <= regno
1795 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1796 return 1;
1799 return 0;
1802 /* Return true if INSN is a call to a pure function. */
1805 pure_call_p (rtx insn)
1807 rtx link;
1809 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1810 return 0;
1812 /* Look for the note that differentiates const and pure functions. */
1813 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1815 rtx u, m;
1817 if (GET_CODE (u = XEXP (link, 0)) == USE
1818 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1819 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1820 return 1;
1823 return 0;
1826 /* Remove register note NOTE from the REG_NOTES of INSN. */
1828 void
1829 remove_note (rtx insn, rtx note)
1831 rtx link;
1833 if (note == NULL_RTX)
1834 return;
1836 if (REG_NOTES (insn) == note)
1838 REG_NOTES (insn) = XEXP (note, 1);
1839 return;
1842 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1843 if (XEXP (link, 1) == note)
1845 XEXP (link, 1) = XEXP (note, 1);
1846 return;
1849 gcc_unreachable ();
1852 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1853 return 1 if it is found. A simple equality test is used to determine if
1854 NODE matches. */
1857 in_expr_list_p (rtx listp, rtx node)
1859 rtx x;
1861 for (x = listp; x; x = XEXP (x, 1))
1862 if (node == XEXP (x, 0))
1863 return 1;
1865 return 0;
1868 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1869 remove that entry from the list if it is found.
1871 A simple equality test is used to determine if NODE matches. */
1873 void
1874 remove_node_from_expr_list (rtx node, rtx *listp)
1876 rtx temp = *listp;
1877 rtx prev = NULL_RTX;
1879 while (temp)
1881 if (node == XEXP (temp, 0))
1883 /* Splice the node out of the list. */
1884 if (prev)
1885 XEXP (prev, 1) = XEXP (temp, 1);
1886 else
1887 *listp = XEXP (temp, 1);
1889 return;
1892 prev = temp;
1893 temp = XEXP (temp, 1);
1897 /* Nonzero if X contains any volatile instructions. These are instructions
1898 which may cause unpredictable machine state instructions, and thus no
1899 instructions should be moved or combined across them. This includes
1900 only volatile asms and UNSPEC_VOLATILE instructions. */
1903 volatile_insn_p (rtx x)
1905 RTX_CODE code;
1907 code = GET_CODE (x);
1908 switch (code)
1910 case LABEL_REF:
1911 case SYMBOL_REF:
1912 case CONST_INT:
1913 case CONST:
1914 case CONST_DOUBLE:
1915 case CONST_VECTOR:
1916 case CC0:
1917 case PC:
1918 case REG:
1919 case SCRATCH:
1920 case CLOBBER:
1921 case ADDR_VEC:
1922 case ADDR_DIFF_VEC:
1923 case CALL:
1924 case MEM:
1925 return 0;
1927 case UNSPEC_VOLATILE:
1928 /* case TRAP_IF: This isn't clear yet. */
1929 return 1;
1931 case ASM_INPUT:
1932 case ASM_OPERANDS:
1933 if (MEM_VOLATILE_P (x))
1934 return 1;
1936 default:
1937 break;
1940 /* Recursively scan the operands of this expression. */
1943 const char *fmt = GET_RTX_FORMAT (code);
1944 int i;
1946 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1948 if (fmt[i] == 'e')
1950 if (volatile_insn_p (XEXP (x, i)))
1951 return 1;
1953 else if (fmt[i] == 'E')
1955 int j;
1956 for (j = 0; j < XVECLEN (x, i); j++)
1957 if (volatile_insn_p (XVECEXP (x, i, j)))
1958 return 1;
1962 return 0;
1965 /* Nonzero if X contains any volatile memory references
1966 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1969 volatile_refs_p (rtx x)
1971 RTX_CODE code;
1973 code = GET_CODE (x);
1974 switch (code)
1976 case LABEL_REF:
1977 case SYMBOL_REF:
1978 case CONST_INT:
1979 case CONST:
1980 case CONST_DOUBLE:
1981 case CONST_VECTOR:
1982 case CC0:
1983 case PC:
1984 case REG:
1985 case SCRATCH:
1986 case CLOBBER:
1987 case ADDR_VEC:
1988 case ADDR_DIFF_VEC:
1989 return 0;
1991 case UNSPEC_VOLATILE:
1992 return 1;
1994 case MEM:
1995 case ASM_INPUT:
1996 case ASM_OPERANDS:
1997 if (MEM_VOLATILE_P (x))
1998 return 1;
2000 default:
2001 break;
2004 /* Recursively scan the operands of this expression. */
2007 const char *fmt = GET_RTX_FORMAT (code);
2008 int i;
2010 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2012 if (fmt[i] == 'e')
2014 if (volatile_refs_p (XEXP (x, i)))
2015 return 1;
2017 else if (fmt[i] == 'E')
2019 int j;
2020 for (j = 0; j < XVECLEN (x, i); j++)
2021 if (volatile_refs_p (XVECEXP (x, i, j)))
2022 return 1;
2026 return 0;
2029 /* Similar to above, except that it also rejects register pre- and post-
2030 incrementing. */
2033 side_effects_p (rtx x)
2035 RTX_CODE code;
2037 code = GET_CODE (x);
2038 switch (code)
2040 case LABEL_REF:
2041 case SYMBOL_REF:
2042 case CONST_INT:
2043 case CONST:
2044 case CONST_DOUBLE:
2045 case CONST_VECTOR:
2046 case CC0:
2047 case PC:
2048 case REG:
2049 case SCRATCH:
2050 case ADDR_VEC:
2051 case ADDR_DIFF_VEC:
2052 return 0;
2054 case CLOBBER:
2055 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2056 when some combination can't be done. If we see one, don't think
2057 that we can simplify the expression. */
2058 return (GET_MODE (x) != VOIDmode);
2060 case PRE_INC:
2061 case PRE_DEC:
2062 case POST_INC:
2063 case POST_DEC:
2064 case PRE_MODIFY:
2065 case POST_MODIFY:
2066 case CALL:
2067 case UNSPEC_VOLATILE:
2068 /* case TRAP_IF: This isn't clear yet. */
2069 return 1;
2071 case MEM:
2072 case ASM_INPUT:
2073 case ASM_OPERANDS:
2074 if (MEM_VOLATILE_P (x))
2075 return 1;
2077 default:
2078 break;
2081 /* Recursively scan the operands of this expression. */
2084 const char *fmt = GET_RTX_FORMAT (code);
2085 int i;
2087 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2089 if (fmt[i] == 'e')
2091 if (side_effects_p (XEXP (x, i)))
2092 return 1;
2094 else if (fmt[i] == 'E')
2096 int j;
2097 for (j = 0; j < XVECLEN (x, i); j++)
2098 if (side_effects_p (XVECEXP (x, i, j)))
2099 return 1;
2103 return 0;
2106 /* Return nonzero if evaluating rtx X might cause a trap. UNALIGNED_MEMS
2107 controls whether nonzero is returned for unaligned memory accesses on
2108 strict alignment machines. */
2110 static int
2111 may_trap_p_1 (rtx x, bool unaligned_mems)
2113 int i;
2114 enum rtx_code code;
2115 const char *fmt;
2117 if (x == 0)
2118 return 0;
2119 code = GET_CODE (x);
2120 switch (code)
2122 /* Handle these cases quickly. */
2123 case CONST_INT:
2124 case CONST_DOUBLE:
2125 case CONST_VECTOR:
2126 case SYMBOL_REF:
2127 case LABEL_REF:
2128 case CONST:
2129 case PC:
2130 case CC0:
2131 case REG:
2132 case SCRATCH:
2133 return 0;
2135 case ASM_INPUT:
2136 case UNSPEC_VOLATILE:
2137 case TRAP_IF:
2138 return 1;
2140 case ASM_OPERANDS:
2141 return MEM_VOLATILE_P (x);
2143 /* Memory ref can trap unless it's a static var or a stack slot. */
2144 case MEM:
2145 if (MEM_NOTRAP_P (x)
2146 && (!STRICT_ALIGNMENT || !unaligned_mems))
2147 return 0;
2148 return
2149 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2151 /* Division by a non-constant might trap. */
2152 case DIV:
2153 case MOD:
2154 case UDIV:
2155 case UMOD:
2156 if (HONOR_SNANS (GET_MODE (x)))
2157 return 1;
2158 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2159 return flag_trapping_math;
2160 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2161 return 1;
2162 break;
2164 case EXPR_LIST:
2165 /* An EXPR_LIST is used to represent a function call. This
2166 certainly may trap. */
2167 return 1;
2169 case GE:
2170 case GT:
2171 case LE:
2172 case LT:
2173 case LTGT:
2174 case COMPARE:
2175 /* Some floating point comparisons may trap. */
2176 if (!flag_trapping_math)
2177 break;
2178 /* ??? There is no machine independent way to check for tests that trap
2179 when COMPARE is used, though many targets do make this distinction.
2180 For instance, sparc uses CCFPE for compares which generate exceptions
2181 and CCFP for compares which do not generate exceptions. */
2182 if (HONOR_NANS (GET_MODE (x)))
2183 return 1;
2184 /* But often the compare has some CC mode, so check operand
2185 modes as well. */
2186 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2187 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2188 return 1;
2189 break;
2191 case EQ:
2192 case NE:
2193 if (HONOR_SNANS (GET_MODE (x)))
2194 return 1;
2195 /* Often comparison is CC mode, so check operand modes. */
2196 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2197 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2198 return 1;
2199 break;
2201 case FIX:
2202 /* Conversion of floating point might trap. */
2203 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2204 return 1;
2205 break;
2207 case NEG:
2208 case ABS:
2209 case SUBREG:
2210 /* These operations don't trap even with floating point. */
2211 break;
2213 default:
2214 /* Any floating arithmetic may trap. */
2215 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2216 && flag_trapping_math)
2217 return 1;
2220 fmt = GET_RTX_FORMAT (code);
2221 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2223 if (fmt[i] == 'e')
2225 if (may_trap_p_1 (XEXP (x, i), unaligned_mems))
2226 return 1;
2228 else if (fmt[i] == 'E')
2230 int j;
2231 for (j = 0; j < XVECLEN (x, i); j++)
2232 if (may_trap_p_1 (XVECEXP (x, i, j), unaligned_mems))
2233 return 1;
2236 return 0;
2239 /* Return nonzero if evaluating rtx X might cause a trap. */
2242 may_trap_p (rtx x)
2244 return may_trap_p_1 (x, false);
2247 /* Same as above, but additionally return non-zero if evaluating rtx X might
2248 cause a fault. We define a fault for the purpose of this function as a
2249 erroneous execution condition that cannot be encountered during the normal
2250 execution of a valid program; the typical example is an unaligned memory
2251 access on a strict alignment machine. The compiler guarantees that it
2252 doesn't generate code that will fault from a valid program, but this
2253 guarantee doesn't mean anything for individual instructions. Consider
2254 the following example:
2256 struct S { int d; union { char *cp; int *ip; }; };
2258 int foo(struct S *s)
2260 if (s->d == 1)
2261 return *s->ip;
2262 else
2263 return *s->cp;
2266 on a strict alignment machine. In a valid program, foo will never be
2267 invoked on a structure for which d is equal to 1 and the underlying
2268 unique field of the union not aligned on a 4-byte boundary, but the
2269 expression *s->ip might cause a fault if considered individually.
2271 At the RTL level, potentially problematic expressions will almost always
2272 verify may_trap_p; for example, the above dereference can be emitted as
2273 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2274 However, suppose that foo is inlined in a caller that causes s->cp to
2275 point to a local character variable and guarantees that s->d is not set
2276 to 1; foo may have been effectively translated into pseudo-RTL as:
2278 if ((reg:SI) == 1)
2279 (set (reg:SI) (mem:SI (%fp - 7)))
2280 else
2281 (set (reg:QI) (mem:QI (%fp - 7)))
2283 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2284 memory reference to a stack slot, but it will certainly cause a fault
2285 on a strict alignment machine. */
2288 may_trap_or_fault_p (rtx x)
2290 return may_trap_p_1 (x, true);
2293 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2294 i.e., an inequality. */
2297 inequality_comparisons_p (rtx x)
2299 const char *fmt;
2300 int len, i;
2301 enum rtx_code code = GET_CODE (x);
2303 switch (code)
2305 case REG:
2306 case SCRATCH:
2307 case PC:
2308 case CC0:
2309 case CONST_INT:
2310 case CONST_DOUBLE:
2311 case CONST_VECTOR:
2312 case CONST:
2313 case LABEL_REF:
2314 case SYMBOL_REF:
2315 return 0;
2317 case LT:
2318 case LTU:
2319 case GT:
2320 case GTU:
2321 case LE:
2322 case LEU:
2323 case GE:
2324 case GEU:
2325 return 1;
2327 default:
2328 break;
2331 len = GET_RTX_LENGTH (code);
2332 fmt = GET_RTX_FORMAT (code);
2334 for (i = 0; i < len; i++)
2336 if (fmt[i] == 'e')
2338 if (inequality_comparisons_p (XEXP (x, i)))
2339 return 1;
2341 else if (fmt[i] == 'E')
2343 int j;
2344 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2345 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2346 return 1;
2350 return 0;
2353 /* Replace any occurrence of FROM in X with TO. The function does
2354 not enter into CONST_DOUBLE for the replace.
2356 Note that copying is not done so X must not be shared unless all copies
2357 are to be modified. */
2360 replace_rtx (rtx x, rtx from, rtx to)
2362 int i, j;
2363 const char *fmt;
2365 /* The following prevents loops occurrence when we change MEM in
2366 CONST_DOUBLE onto the same CONST_DOUBLE. */
2367 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2368 return x;
2370 if (x == from)
2371 return to;
2373 /* Allow this function to make replacements in EXPR_LISTs. */
2374 if (x == 0)
2375 return 0;
2377 if (GET_CODE (x) == SUBREG)
2379 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2381 if (GET_CODE (new) == CONST_INT)
2383 x = simplify_subreg (GET_MODE (x), new,
2384 GET_MODE (SUBREG_REG (x)),
2385 SUBREG_BYTE (x));
2386 gcc_assert (x);
2388 else
2389 SUBREG_REG (x) = new;
2391 return x;
2393 else if (GET_CODE (x) == ZERO_EXTEND)
2395 rtx new = replace_rtx (XEXP (x, 0), from, to);
2397 if (GET_CODE (new) == CONST_INT)
2399 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2400 new, GET_MODE (XEXP (x, 0)));
2401 gcc_assert (x);
2403 else
2404 XEXP (x, 0) = new;
2406 return x;
2409 fmt = GET_RTX_FORMAT (GET_CODE (x));
2410 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2412 if (fmt[i] == 'e')
2413 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2414 else if (fmt[i] == 'E')
2415 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2416 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2419 return x;
2422 /* Throughout the rtx X, replace many registers according to REG_MAP.
2423 Return the replacement for X (which may be X with altered contents).
2424 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2425 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2427 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2428 should not be mapped to pseudos or vice versa since validate_change
2429 is not called.
2431 If REPLACE_DEST is 1, replacements are also done in destinations;
2432 otherwise, only sources are replaced. */
2435 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2437 enum rtx_code code;
2438 int i;
2439 const char *fmt;
2441 if (x == 0)
2442 return x;
2444 code = GET_CODE (x);
2445 switch (code)
2447 case SCRATCH:
2448 case PC:
2449 case CC0:
2450 case CONST_INT:
2451 case CONST_DOUBLE:
2452 case CONST_VECTOR:
2453 case CONST:
2454 case SYMBOL_REF:
2455 case LABEL_REF:
2456 return x;
2458 case REG:
2459 /* Verify that the register has an entry before trying to access it. */
2460 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2462 /* SUBREGs can't be shared. Always return a copy to ensure that if
2463 this replacement occurs more than once then each instance will
2464 get distinct rtx. */
2465 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2466 return copy_rtx (reg_map[REGNO (x)]);
2467 return reg_map[REGNO (x)];
2469 return x;
2471 case SUBREG:
2472 /* Prevent making nested SUBREGs. */
2473 if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2474 && reg_map[REGNO (SUBREG_REG (x))] != 0
2475 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2477 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2478 return simplify_gen_subreg (GET_MODE (x), map_val,
2479 GET_MODE (SUBREG_REG (x)),
2480 SUBREG_BYTE (x));
2482 break;
2484 case SET:
2485 if (replace_dest)
2486 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2488 else if (MEM_P (SET_DEST (x))
2489 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2490 /* Even if we are not to replace destinations, replace register if it
2491 is CONTAINED in destination (destination is memory or
2492 STRICT_LOW_PART). */
2493 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2494 reg_map, nregs, 0);
2495 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2496 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2497 break;
2499 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2500 return x;
2502 default:
2503 break;
2506 fmt = GET_RTX_FORMAT (code);
2507 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2509 if (fmt[i] == 'e')
2510 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2511 else if (fmt[i] == 'E')
2513 int j;
2514 for (j = 0; j < XVECLEN (x, i); j++)
2515 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2516 nregs, replace_dest);
2519 return x;
2522 /* Replace occurrences of the old label in *X with the new one.
2523 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2526 replace_label (rtx *x, void *data)
2528 rtx l = *x;
2529 rtx old_label = ((replace_label_data *) data)->r1;
2530 rtx new_label = ((replace_label_data *) data)->r2;
2531 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2533 if (l == NULL_RTX)
2534 return 0;
2536 if (GET_CODE (l) == SYMBOL_REF
2537 && CONSTANT_POOL_ADDRESS_P (l))
2539 rtx c = get_pool_constant (l);
2540 if (rtx_referenced_p (old_label, c))
2542 rtx new_c, new_l;
2543 replace_label_data *d = (replace_label_data *) data;
2545 /* Create a copy of constant C; replace the label inside
2546 but do not update LABEL_NUSES because uses in constant pool
2547 are not counted. */
2548 new_c = copy_rtx (c);
2549 d->update_label_nuses = false;
2550 for_each_rtx (&new_c, replace_label, data);
2551 d->update_label_nuses = update_label_nuses;
2553 /* Add the new constant NEW_C to constant pool and replace
2554 the old reference to constant by new reference. */
2555 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2556 *x = replace_rtx (l, l, new_l);
2558 return 0;
2561 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2562 field. This is not handled by for_each_rtx because it doesn't
2563 handle unprinted ('0') fields. */
2564 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2565 JUMP_LABEL (l) = new_label;
2567 if ((GET_CODE (l) == LABEL_REF
2568 || GET_CODE (l) == INSN_LIST)
2569 && XEXP (l, 0) == old_label)
2571 XEXP (l, 0) = new_label;
2572 if (update_label_nuses)
2574 ++LABEL_NUSES (new_label);
2575 --LABEL_NUSES (old_label);
2577 return 0;
2580 return 0;
2583 /* When *BODY is equal to X or X is directly referenced by *BODY
2584 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2585 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2587 static int
2588 rtx_referenced_p_1 (rtx *body, void *x)
2590 rtx y = (rtx) x;
2592 if (*body == NULL_RTX)
2593 return y == NULL_RTX;
2595 /* Return true if a label_ref *BODY refers to label Y. */
2596 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2597 return XEXP (*body, 0) == y;
2599 /* If *BODY is a reference to pool constant traverse the constant. */
2600 if (GET_CODE (*body) == SYMBOL_REF
2601 && CONSTANT_POOL_ADDRESS_P (*body))
2602 return rtx_referenced_p (y, get_pool_constant (*body));
2604 /* By default, compare the RTL expressions. */
2605 return rtx_equal_p (*body, y);
2608 /* Return true if X is referenced in BODY. */
2611 rtx_referenced_p (rtx x, rtx body)
2613 return for_each_rtx (&body, rtx_referenced_p_1, x);
2616 /* If INSN is a tablejump return true and store the label (before jump table) to
2617 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2619 bool
2620 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2622 rtx label, table;
2624 if (JUMP_P (insn)
2625 && (label = JUMP_LABEL (insn)) != NULL_RTX
2626 && (table = next_active_insn (label)) != NULL_RTX
2627 && JUMP_P (table)
2628 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2629 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2631 if (labelp)
2632 *labelp = label;
2633 if (tablep)
2634 *tablep = table;
2635 return true;
2637 return false;
2640 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2641 constant that is not in the constant pool and not in the condition
2642 of an IF_THEN_ELSE. */
2644 static int
2645 computed_jump_p_1 (rtx x)
2647 enum rtx_code code = GET_CODE (x);
2648 int i, j;
2649 const char *fmt;
2651 switch (code)
2653 case LABEL_REF:
2654 case PC:
2655 return 0;
2657 case CONST:
2658 case CONST_INT:
2659 case CONST_DOUBLE:
2660 case CONST_VECTOR:
2661 case SYMBOL_REF:
2662 case REG:
2663 return 1;
2665 case MEM:
2666 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2667 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2669 case IF_THEN_ELSE:
2670 return (computed_jump_p_1 (XEXP (x, 1))
2671 || computed_jump_p_1 (XEXP (x, 2)));
2673 default:
2674 break;
2677 fmt = GET_RTX_FORMAT (code);
2678 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2680 if (fmt[i] == 'e'
2681 && computed_jump_p_1 (XEXP (x, i)))
2682 return 1;
2684 else if (fmt[i] == 'E')
2685 for (j = 0; j < XVECLEN (x, i); j++)
2686 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2687 return 1;
2690 return 0;
2693 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2695 Tablejumps and casesi insns are not considered indirect jumps;
2696 we can recognize them by a (use (label_ref)). */
2699 computed_jump_p (rtx insn)
2701 int i;
2702 if (JUMP_P (insn))
2704 rtx pat = PATTERN (insn);
2706 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2707 return 0;
2708 else if (GET_CODE (pat) == PARALLEL)
2710 int len = XVECLEN (pat, 0);
2711 int has_use_labelref = 0;
2713 for (i = len - 1; i >= 0; i--)
2714 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2715 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2716 == LABEL_REF))
2717 has_use_labelref = 1;
2719 if (! has_use_labelref)
2720 for (i = len - 1; i >= 0; i--)
2721 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2722 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2723 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2724 return 1;
2726 else if (GET_CODE (pat) == SET
2727 && SET_DEST (pat) == pc_rtx
2728 && computed_jump_p_1 (SET_SRC (pat)))
2729 return 1;
2731 return 0;
2734 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2735 calls. Processes the subexpressions of EXP and passes them to F. */
2736 static int
2737 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2739 int result, i, j;
2740 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2741 rtx *x;
2743 for (; format[n] != '\0'; n++)
2745 switch (format[n])
2747 case 'e':
2748 /* Call F on X. */
2749 x = &XEXP (exp, n);
2750 result = (*f) (x, data);
2751 if (result == -1)
2752 /* Do not traverse sub-expressions. */
2753 continue;
2754 else if (result != 0)
2755 /* Stop the traversal. */
2756 return result;
2758 if (*x == NULL_RTX)
2759 /* There are no sub-expressions. */
2760 continue;
2762 i = non_rtx_starting_operands[GET_CODE (*x)];
2763 if (i >= 0)
2765 result = for_each_rtx_1 (*x, i, f, data);
2766 if (result != 0)
2767 return result;
2769 break;
2771 case 'V':
2772 case 'E':
2773 if (XVEC (exp, n) == 0)
2774 continue;
2775 for (j = 0; j < XVECLEN (exp, n); ++j)
2777 /* Call F on X. */
2778 x = &XVECEXP (exp, n, j);
2779 result = (*f) (x, data);
2780 if (result == -1)
2781 /* Do not traverse sub-expressions. */
2782 continue;
2783 else if (result != 0)
2784 /* Stop the traversal. */
2785 return result;
2787 if (*x == NULL_RTX)
2788 /* There are no sub-expressions. */
2789 continue;
2791 i = non_rtx_starting_operands[GET_CODE (*x)];
2792 if (i >= 0)
2794 result = for_each_rtx_1 (*x, i, f, data);
2795 if (result != 0)
2796 return result;
2799 break;
2801 default:
2802 /* Nothing to do. */
2803 break;
2807 return 0;
2810 /* Traverse X via depth-first search, calling F for each
2811 sub-expression (including X itself). F is also passed the DATA.
2812 If F returns -1, do not traverse sub-expressions, but continue
2813 traversing the rest of the tree. If F ever returns any other
2814 nonzero value, stop the traversal, and return the value returned
2815 by F. Otherwise, return 0. This function does not traverse inside
2816 tree structure that contains RTX_EXPRs, or into sub-expressions
2817 whose format code is `0' since it is not known whether or not those
2818 codes are actually RTL.
2820 This routine is very general, and could (should?) be used to
2821 implement many of the other routines in this file. */
2824 for_each_rtx (rtx *x, rtx_function f, void *data)
2826 int result;
2827 int i;
2829 /* Call F on X. */
2830 result = (*f) (x, data);
2831 if (result == -1)
2832 /* Do not traverse sub-expressions. */
2833 return 0;
2834 else if (result != 0)
2835 /* Stop the traversal. */
2836 return result;
2838 if (*x == NULL_RTX)
2839 /* There are no sub-expressions. */
2840 return 0;
2842 i = non_rtx_starting_operands[GET_CODE (*x)];
2843 if (i < 0)
2844 return 0;
2846 return for_each_rtx_1 (*x, i, f, data);
2850 /* Searches X for any reference to REGNO, returning the rtx of the
2851 reference found if any. Otherwise, returns NULL_RTX. */
2854 regno_use_in (unsigned int regno, rtx x)
2856 const char *fmt;
2857 int i, j;
2858 rtx tem;
2860 if (REG_P (x) && REGNO (x) == regno)
2861 return x;
2863 fmt = GET_RTX_FORMAT (GET_CODE (x));
2864 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2866 if (fmt[i] == 'e')
2868 if ((tem = regno_use_in (regno, XEXP (x, i))))
2869 return tem;
2871 else if (fmt[i] == 'E')
2872 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2873 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2874 return tem;
2877 return NULL_RTX;
2880 /* Return a value indicating whether OP, an operand of a commutative
2881 operation, is preferred as the first or second operand. The higher
2882 the value, the stronger the preference for being the first operand.
2883 We use negative values to indicate a preference for the first operand
2884 and positive values for the second operand. */
2887 commutative_operand_precedence (rtx op)
2889 enum rtx_code code = GET_CODE (op);
2891 /* Constants always come the second operand. Prefer "nice" constants. */
2892 if (code == CONST_INT)
2893 return -7;
2894 if (code == CONST_DOUBLE)
2895 return -6;
2896 op = avoid_constant_pool_reference (op);
2897 code = GET_CODE (op);
2899 switch (GET_RTX_CLASS (code))
2901 case RTX_CONST_OBJ:
2902 if (code == CONST_INT)
2903 return -5;
2904 if (code == CONST_DOUBLE)
2905 return -4;
2906 return -3;
2908 case RTX_EXTRA:
2909 /* SUBREGs of objects should come second. */
2910 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2911 return -2;
2913 if (!CONSTANT_P (op))
2914 return 0;
2915 else
2916 /* As for RTX_CONST_OBJ. */
2917 return -3;
2919 case RTX_OBJ:
2920 /* Complex expressions should be the first, so decrease priority
2921 of objects. */
2922 return -1;
2924 case RTX_COMM_ARITH:
2925 /* Prefer operands that are themselves commutative to be first.
2926 This helps to make things linear. In particular,
2927 (and (and (reg) (reg)) (not (reg))) is canonical. */
2928 return 4;
2930 case RTX_BIN_ARITH:
2931 /* If only one operand is a binary expression, it will be the first
2932 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2933 is canonical, although it will usually be further simplified. */
2934 return 2;
2936 case RTX_UNARY:
2937 /* Then prefer NEG and NOT. */
2938 if (code == NEG || code == NOT)
2939 return 1;
2941 default:
2942 return 0;
2946 /* Return 1 iff it is necessary to swap operands of commutative operation
2947 in order to canonicalize expression. */
2950 swap_commutative_operands_p (rtx x, rtx y)
2952 return (commutative_operand_precedence (x)
2953 < commutative_operand_precedence (y));
2956 /* Return 1 if X is an autoincrement side effect and the register is
2957 not the stack pointer. */
2959 auto_inc_p (rtx x)
2961 switch (GET_CODE (x))
2963 case PRE_INC:
2964 case POST_INC:
2965 case PRE_DEC:
2966 case POST_DEC:
2967 case PRE_MODIFY:
2968 case POST_MODIFY:
2969 /* There are no REG_INC notes for SP. */
2970 if (XEXP (x, 0) != stack_pointer_rtx)
2971 return 1;
2972 default:
2973 break;
2975 return 0;
2978 /* Return 1 if the sequence of instructions beginning with FROM and up
2979 to and including TO is safe to move. If NEW_TO is non-NULL, and
2980 the sequence is not already safe to move, but can be easily
2981 extended to a sequence which is safe, then NEW_TO will point to the
2982 end of the extended sequence.
2984 For now, this function only checks that the region contains whole
2985 exception regions, but it could be extended to check additional
2986 conditions as well. */
2989 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
2991 int eh_region_count = 0;
2992 int past_to_p = 0;
2993 rtx r = from;
2995 /* By default, assume the end of the region will be what was
2996 suggested. */
2997 if (new_to)
2998 *new_to = to;
3000 while (r)
3002 if (NOTE_P (r))
3004 switch (NOTE_LINE_NUMBER (r))
3006 case NOTE_INSN_EH_REGION_BEG:
3007 ++eh_region_count;
3008 break;
3010 case NOTE_INSN_EH_REGION_END:
3011 if (eh_region_count == 0)
3012 /* This sequence of instructions contains the end of
3013 an exception region, but not he beginning. Moving
3014 it will cause chaos. */
3015 return 0;
3017 --eh_region_count;
3018 break;
3020 default:
3021 break;
3024 else if (past_to_p)
3025 /* If we've passed TO, and we see a non-note instruction, we
3026 can't extend the sequence to a movable sequence. */
3027 return 0;
3029 if (r == to)
3031 if (!new_to)
3032 /* It's OK to move the sequence if there were matched sets of
3033 exception region notes. */
3034 return eh_region_count == 0;
3036 past_to_p = 1;
3039 /* It's OK to move the sequence if there were matched sets of
3040 exception region notes. */
3041 if (past_to_p && eh_region_count == 0)
3043 *new_to = r;
3044 return 1;
3047 /* Go to the next instruction. */
3048 r = NEXT_INSN (r);
3051 return 0;
3054 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3056 loc_mentioned_in_p (rtx *loc, rtx in)
3058 enum rtx_code code = GET_CODE (in);
3059 const char *fmt = GET_RTX_FORMAT (code);
3060 int i, j;
3062 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3064 if (loc == &in->u.fld[i].rt_rtx)
3065 return 1;
3066 if (fmt[i] == 'e')
3068 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3069 return 1;
3071 else if (fmt[i] == 'E')
3072 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3073 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3074 return 1;
3076 return 0;
3079 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3080 and SUBREG_BYTE, return the bit offset where the subreg begins
3081 (counting from the least significant bit of the operand). */
3083 unsigned int
3084 subreg_lsb_1 (enum machine_mode outer_mode,
3085 enum machine_mode inner_mode,
3086 unsigned int subreg_byte)
3088 unsigned int bitpos;
3089 unsigned int byte;
3090 unsigned int word;
3092 /* A paradoxical subreg begins at bit position 0. */
3093 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3094 return 0;
3096 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3097 /* If the subreg crosses a word boundary ensure that
3098 it also begins and ends on a word boundary. */
3099 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3100 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3101 && (subreg_byte % UNITS_PER_WORD
3102 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3104 if (WORDS_BIG_ENDIAN)
3105 word = (GET_MODE_SIZE (inner_mode)
3106 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3107 else
3108 word = subreg_byte / UNITS_PER_WORD;
3109 bitpos = word * BITS_PER_WORD;
3111 if (BYTES_BIG_ENDIAN)
3112 byte = (GET_MODE_SIZE (inner_mode)
3113 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3114 else
3115 byte = subreg_byte % UNITS_PER_WORD;
3116 bitpos += byte * BITS_PER_UNIT;
3118 return bitpos;
3121 /* Given a subreg X, return the bit offset where the subreg begins
3122 (counting from the least significant bit of the reg). */
3124 unsigned int
3125 subreg_lsb (rtx x)
3127 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3128 SUBREG_BYTE (x));
3131 /* This function returns the regno offset of a subreg expression.
3132 xregno - A regno of an inner hard subreg_reg (or what will become one).
3133 xmode - The mode of xregno.
3134 offset - The byte offset.
3135 ymode - The mode of a top level SUBREG (or what may become one).
3136 RETURN - The regno offset which would be used. */
3137 unsigned int
3138 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3139 unsigned int offset, enum machine_mode ymode)
3141 int nregs_xmode, nregs_ymode;
3142 int mode_multiple, nregs_multiple;
3143 int y_offset;
3145 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3147 /* Adjust nregs_xmode to allow for 'holes'. */
3148 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3149 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3150 else
3151 nregs_xmode = hard_regno_nregs[xregno][xmode];
3153 nregs_ymode = hard_regno_nregs[xregno][ymode];
3155 /* If this is a big endian paradoxical subreg, which uses more actual
3156 hard registers than the original register, we must return a negative
3157 offset so that we find the proper highpart of the register. */
3158 if (offset == 0
3159 && nregs_ymode > nregs_xmode
3160 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3161 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3162 return nregs_xmode - nregs_ymode;
3164 if (offset == 0 || nregs_xmode == nregs_ymode)
3165 return 0;
3167 /* Size of ymode must not be greater than the size of xmode. */
3168 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3169 gcc_assert (mode_multiple != 0);
3171 y_offset = offset / GET_MODE_SIZE (ymode);
3172 nregs_multiple = nregs_xmode / nregs_ymode;
3173 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3176 /* This function returns true when the offset is representable via
3177 subreg_offset in the given regno.
3178 xregno - A regno of an inner hard subreg_reg (or what will become one).
3179 xmode - The mode of xregno.
3180 offset - The byte offset.
3181 ymode - The mode of a top level SUBREG (or what may become one).
3182 RETURN - Whether the offset is representable. */
3183 bool
3184 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3185 unsigned int offset, enum machine_mode ymode)
3187 int nregs_xmode, nregs_ymode;
3188 int mode_multiple, nregs_multiple;
3189 int y_offset;
3190 int regsize_xmode, regsize_ymode;
3192 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3194 /* If there are holes in a non-scalar mode in registers, we expect
3195 that it is made up of its units concatenated together. */
3196 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3198 enum machine_mode xmode_unit;
3200 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3201 if (GET_MODE_INNER (xmode) == VOIDmode)
3202 xmode_unit = xmode;
3203 else
3204 xmode_unit = GET_MODE_INNER (xmode);
3205 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3206 gcc_assert (nregs_xmode
3207 == (GET_MODE_NUNITS (xmode)
3208 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3209 gcc_assert (hard_regno_nregs[xregno][xmode]
3210 == (hard_regno_nregs[xregno][xmode_unit]
3211 * GET_MODE_NUNITS (xmode)));
3213 /* You can only ask for a SUBREG of a value with holes in the middle
3214 if you don't cross the holes. (Such a SUBREG should be done by
3215 picking a different register class, or doing it in memory if
3216 necessary.) An example of a value with holes is XCmode on 32-bit
3217 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3218 3 for each part, but in memory it's two 128-bit parts.
3219 Padding is assumed to be at the end (not necessarily the 'high part')
3220 of each unit. */
3221 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3222 < GET_MODE_NUNITS (xmode))
3223 && (offset / GET_MODE_SIZE (xmode_unit)
3224 != ((offset + GET_MODE_SIZE (ymode) - 1)
3225 / GET_MODE_SIZE (xmode_unit))))
3226 return false;
3228 else
3229 nregs_xmode = hard_regno_nregs[xregno][xmode];
3231 nregs_ymode = hard_regno_nregs[xregno][ymode];
3233 /* Paradoxical subregs are otherwise valid. */
3234 if (offset == 0
3235 && nregs_ymode > nregs_xmode
3236 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3237 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3238 return true;
3240 /* If registers store different numbers of bits in the different
3241 modes, we cannot generally form this subreg. */
3242 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3243 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3244 if (regsize_xmode > regsize_ymode && nregs_ymode > 1)
3245 return false;
3246 if (regsize_ymode > regsize_xmode && nregs_xmode > 1)
3247 return false;
3249 /* Lowpart subregs are otherwise valid. */
3250 if (offset == subreg_lowpart_offset (ymode, xmode))
3251 return true;
3253 /* This should always pass, otherwise we don't know how to verify
3254 the constraint. These conditions may be relaxed but
3255 subreg_regno_offset would need to be redesigned. */
3256 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3257 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3259 /* The XMODE value can be seen as a vector of NREGS_XMODE
3260 values. The subreg must represent a lowpart of given field.
3261 Compute what field it is. */
3262 offset -= subreg_lowpart_offset (ymode,
3263 mode_for_size (GET_MODE_BITSIZE (xmode)
3264 / nregs_xmode,
3265 MODE_INT, 0));
3267 /* Size of ymode must not be greater than the size of xmode. */
3268 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3269 gcc_assert (mode_multiple != 0);
3271 y_offset = offset / GET_MODE_SIZE (ymode);
3272 nregs_multiple = nregs_xmode / nregs_ymode;
3274 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3275 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3277 return (!(y_offset % (mode_multiple / nregs_multiple)));
3280 /* Return the final regno that a subreg expression refers to. */
3281 unsigned int
3282 subreg_regno (rtx x)
3284 unsigned int ret;
3285 rtx subreg = SUBREG_REG (x);
3286 int regno = REGNO (subreg);
3288 ret = regno + subreg_regno_offset (regno,
3289 GET_MODE (subreg),
3290 SUBREG_BYTE (x),
3291 GET_MODE (x));
3292 return ret;
3295 struct parms_set_data
3297 int nregs;
3298 HARD_REG_SET regs;
3301 /* Helper function for noticing stores to parameter registers. */
3302 static void
3303 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3305 struct parms_set_data *d = data;
3306 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3307 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3309 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3310 d->nregs--;
3314 /* Look backward for first parameter to be loaded.
3315 Note that loads of all parameters will not necessarily be
3316 found if CSE has eliminated some of them (e.g., an argument
3317 to the outer function is passed down as a parameter).
3318 Do not skip BOUNDARY. */
3320 find_first_parameter_load (rtx call_insn, rtx boundary)
3322 struct parms_set_data parm;
3323 rtx p, before, first_set;
3325 /* Since different machines initialize their parameter registers
3326 in different orders, assume nothing. Collect the set of all
3327 parameter registers. */
3328 CLEAR_HARD_REG_SET (parm.regs);
3329 parm.nregs = 0;
3330 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3331 if (GET_CODE (XEXP (p, 0)) == USE
3332 && REG_P (XEXP (XEXP (p, 0), 0)))
3334 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3336 /* We only care about registers which can hold function
3337 arguments. */
3338 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3339 continue;
3341 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3342 parm.nregs++;
3344 before = call_insn;
3345 first_set = call_insn;
3347 /* Search backward for the first set of a register in this set. */
3348 while (parm.nregs && before != boundary)
3350 before = PREV_INSN (before);
3352 /* It is possible that some loads got CSEed from one call to
3353 another. Stop in that case. */
3354 if (CALL_P (before))
3355 break;
3357 /* Our caller needs either ensure that we will find all sets
3358 (in case code has not been optimized yet), or take care
3359 for possible labels in a way by setting boundary to preceding
3360 CODE_LABEL. */
3361 if (LABEL_P (before))
3363 gcc_assert (before == boundary);
3364 break;
3367 if (INSN_P (before))
3369 int nregs_old = parm.nregs;
3370 note_stores (PATTERN (before), parms_set, &parm);
3371 /* If we found something that did not set a parameter reg,
3372 we're done. Do not keep going, as that might result
3373 in hoisting an insn before the setting of a pseudo
3374 that is used by the hoisted insn. */
3375 if (nregs_old != parm.nregs)
3376 first_set = before;
3377 else
3378 break;
3381 return first_set;
3384 /* Return true if we should avoid inserting code between INSN and preceding
3385 call instruction. */
3387 bool
3388 keep_with_call_p (rtx insn)
3390 rtx set;
3392 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3394 if (REG_P (SET_DEST (set))
3395 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3396 && fixed_regs[REGNO (SET_DEST (set))]
3397 && general_operand (SET_SRC (set), VOIDmode))
3398 return true;
3399 if (REG_P (SET_SRC (set))
3400 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3401 && REG_P (SET_DEST (set))
3402 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3403 return true;
3404 /* There may be a stack pop just after the call and before the store
3405 of the return register. Search for the actual store when deciding
3406 if we can break or not. */
3407 if (SET_DEST (set) == stack_pointer_rtx)
3409 rtx i2 = next_nonnote_insn (insn);
3410 if (i2 && keep_with_call_p (i2))
3411 return true;
3414 return false;
3417 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3418 to non-complex jumps. That is, direct unconditional, conditional,
3419 and tablejumps, but not computed jumps or returns. It also does
3420 not apply to the fallthru case of a conditional jump. */
3422 bool
3423 label_is_jump_target_p (rtx label, rtx jump_insn)
3425 rtx tmp = JUMP_LABEL (jump_insn);
3427 if (label == tmp)
3428 return true;
3430 if (tablejump_p (jump_insn, NULL, &tmp))
3432 rtvec vec = XVEC (PATTERN (tmp),
3433 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3434 int i, veclen = GET_NUM_ELEM (vec);
3436 for (i = 0; i < veclen; ++i)
3437 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3438 return true;
3441 return false;
3445 /* Return an estimate of the cost of computing rtx X.
3446 One use is in cse, to decide which expression to keep in the hash table.
3447 Another is in rtl generation, to pick the cheapest way to multiply.
3448 Other uses like the latter are expected in the future. */
3451 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3453 int i, j;
3454 enum rtx_code code;
3455 const char *fmt;
3456 int total;
3458 if (x == 0)
3459 return 0;
3461 /* Compute the default costs of certain things.
3462 Note that targetm.rtx_costs can override the defaults. */
3464 code = GET_CODE (x);
3465 switch (code)
3467 case MULT:
3468 total = COSTS_N_INSNS (5);
3469 break;
3470 case DIV:
3471 case UDIV:
3472 case MOD:
3473 case UMOD:
3474 total = COSTS_N_INSNS (7);
3475 break;
3476 case USE:
3477 /* Used in loop.c and combine.c as a marker. */
3478 total = 0;
3479 break;
3480 default:
3481 total = COSTS_N_INSNS (1);
3484 switch (code)
3486 case REG:
3487 return 0;
3489 case SUBREG:
3490 total = 0;
3491 /* If we can't tie these modes, make this expensive. The larger
3492 the mode, the more expensive it is. */
3493 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3494 return COSTS_N_INSNS (2
3495 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3496 break;
3498 default:
3499 if (targetm.rtx_costs (x, code, outer_code, &total))
3500 return total;
3501 break;
3504 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3505 which is already in total. */
3507 fmt = GET_RTX_FORMAT (code);
3508 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3509 if (fmt[i] == 'e')
3510 total += rtx_cost (XEXP (x, i), code);
3511 else if (fmt[i] == 'E')
3512 for (j = 0; j < XVECLEN (x, i); j++)
3513 total += rtx_cost (XVECEXP (x, i, j), code);
3515 return total;
3518 /* Return cost of address expression X.
3519 Expect that X is properly formed address reference. */
3522 address_cost (rtx x, enum machine_mode mode)
3524 /* We may be asked for cost of various unusual addresses, such as operands
3525 of push instruction. It is not worthwhile to complicate writing
3526 of the target hook by such cases. */
3528 if (!memory_address_p (mode, x))
3529 return 1000;
3531 return targetm.address_cost (x);
3534 /* If the target doesn't override, compute the cost as with arithmetic. */
3537 default_address_cost (rtx x)
3539 return rtx_cost (x, MEM);
3543 unsigned HOST_WIDE_INT
3544 nonzero_bits (rtx x, enum machine_mode mode)
3546 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3549 unsigned int
3550 num_sign_bit_copies (rtx x, enum machine_mode mode)
3552 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3555 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3556 It avoids exponential behavior in nonzero_bits1 when X has
3557 identical subexpressions on the first or the second level. */
3559 static unsigned HOST_WIDE_INT
3560 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3561 enum machine_mode known_mode,
3562 unsigned HOST_WIDE_INT known_ret)
3564 if (x == known_x && mode == known_mode)
3565 return known_ret;
3567 /* Try to find identical subexpressions. If found call
3568 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3569 precomputed value for the subexpression as KNOWN_RET. */
3571 if (ARITHMETIC_P (x))
3573 rtx x0 = XEXP (x, 0);
3574 rtx x1 = XEXP (x, 1);
3576 /* Check the first level. */
3577 if (x0 == x1)
3578 return nonzero_bits1 (x, mode, x0, mode,
3579 cached_nonzero_bits (x0, mode, known_x,
3580 known_mode, known_ret));
3582 /* Check the second level. */
3583 if (ARITHMETIC_P (x0)
3584 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3585 return nonzero_bits1 (x, mode, x1, mode,
3586 cached_nonzero_bits (x1, mode, known_x,
3587 known_mode, known_ret));
3589 if (ARITHMETIC_P (x1)
3590 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3591 return nonzero_bits1 (x, mode, x0, mode,
3592 cached_nonzero_bits (x0, mode, known_x,
3593 known_mode, known_ret));
3596 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3599 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3600 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3601 is less useful. We can't allow both, because that results in exponential
3602 run time recursion. There is a nullstone testcase that triggered
3603 this. This macro avoids accidental uses of num_sign_bit_copies. */
3604 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3606 /* Given an expression, X, compute which bits in X can be nonzero.
3607 We don't care about bits outside of those defined in MODE.
3609 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3610 an arithmetic operation, we can do better. */
3612 static unsigned HOST_WIDE_INT
3613 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3614 enum machine_mode known_mode,
3615 unsigned HOST_WIDE_INT known_ret)
3617 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3618 unsigned HOST_WIDE_INT inner_nz;
3619 enum rtx_code code;
3620 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3622 /* For floating-point values, assume all bits are needed. */
3623 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3624 return nonzero;
3626 /* If X is wider than MODE, use its mode instead. */
3627 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3629 mode = GET_MODE (x);
3630 nonzero = GET_MODE_MASK (mode);
3631 mode_width = GET_MODE_BITSIZE (mode);
3634 if (mode_width > HOST_BITS_PER_WIDE_INT)
3635 /* Our only callers in this case look for single bit values. So
3636 just return the mode mask. Those tests will then be false. */
3637 return nonzero;
3639 #ifndef WORD_REGISTER_OPERATIONS
3640 /* If MODE is wider than X, but both are a single word for both the host
3641 and target machines, we can compute this from which bits of the
3642 object might be nonzero in its own mode, taking into account the fact
3643 that on many CISC machines, accessing an object in a wider mode
3644 causes the high-order bits to become undefined. So they are
3645 not known to be zero. */
3647 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3648 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3649 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3650 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3652 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3653 known_x, known_mode, known_ret);
3654 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3655 return nonzero;
3657 #endif
3659 code = GET_CODE (x);
3660 switch (code)
3662 case REG:
3663 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3664 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3665 all the bits above ptr_mode are known to be zero. */
3666 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3667 && REG_POINTER (x))
3668 nonzero &= GET_MODE_MASK (ptr_mode);
3669 #endif
3671 /* Include declared information about alignment of pointers. */
3672 /* ??? We don't properly preserve REG_POINTER changes across
3673 pointer-to-integer casts, so we can't trust it except for
3674 things that we know must be pointers. See execute/960116-1.c. */
3675 if ((x == stack_pointer_rtx
3676 || x == frame_pointer_rtx
3677 || x == arg_pointer_rtx)
3678 && REGNO_POINTER_ALIGN (REGNO (x)))
3680 unsigned HOST_WIDE_INT alignment
3681 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3683 #ifdef PUSH_ROUNDING
3684 /* If PUSH_ROUNDING is defined, it is possible for the
3685 stack to be momentarily aligned only to that amount,
3686 so we pick the least alignment. */
3687 if (x == stack_pointer_rtx && PUSH_ARGS)
3688 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3689 alignment);
3690 #endif
3692 nonzero &= ~(alignment - 1);
3696 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3697 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3698 known_mode, known_ret,
3699 &nonzero_for_hook);
3701 if (new)
3702 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3703 known_mode, known_ret);
3705 return nonzero_for_hook;
3708 case CONST_INT:
3709 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3710 /* If X is negative in MODE, sign-extend the value. */
3711 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3712 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3713 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3714 #endif
3716 return INTVAL (x);
3718 case MEM:
3719 #ifdef LOAD_EXTEND_OP
3720 /* In many, if not most, RISC machines, reading a byte from memory
3721 zeros the rest of the register. Noticing that fact saves a lot
3722 of extra zero-extends. */
3723 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3724 nonzero &= GET_MODE_MASK (GET_MODE (x));
3725 #endif
3726 break;
3728 case EQ: case NE:
3729 case UNEQ: case LTGT:
3730 case GT: case GTU: case UNGT:
3731 case LT: case LTU: case UNLT:
3732 case GE: case GEU: case UNGE:
3733 case LE: case LEU: case UNLE:
3734 case UNORDERED: case ORDERED:
3735 /* If this produces an integer result, we know which bits are set.
3736 Code here used to clear bits outside the mode of X, but that is
3737 now done above. */
3738 /* Mind that MODE is the mode the caller wants to look at this
3739 operation in, and not the actual operation mode. We can wind
3740 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3741 that describes the results of a vector compare. */
3742 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3743 && mode_width <= HOST_BITS_PER_WIDE_INT)
3744 nonzero = STORE_FLAG_VALUE;
3745 break;
3747 case NEG:
3748 #if 0
3749 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3750 and num_sign_bit_copies. */
3751 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3752 == GET_MODE_BITSIZE (GET_MODE (x)))
3753 nonzero = 1;
3754 #endif
3756 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3757 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3758 break;
3760 case ABS:
3761 #if 0
3762 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3763 and num_sign_bit_copies. */
3764 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3765 == GET_MODE_BITSIZE (GET_MODE (x)))
3766 nonzero = 1;
3767 #endif
3768 break;
3770 case TRUNCATE:
3771 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3772 known_x, known_mode, known_ret)
3773 & GET_MODE_MASK (mode));
3774 break;
3776 case ZERO_EXTEND:
3777 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3778 known_x, known_mode, known_ret);
3779 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3780 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3781 break;
3783 case SIGN_EXTEND:
3784 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3785 Otherwise, show all the bits in the outer mode but not the inner
3786 may be nonzero. */
3787 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3788 known_x, known_mode, known_ret);
3789 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3791 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3792 if (inner_nz
3793 & (((HOST_WIDE_INT) 1
3794 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3795 inner_nz |= (GET_MODE_MASK (mode)
3796 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3799 nonzero &= inner_nz;
3800 break;
3802 case AND:
3803 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3804 known_x, known_mode, known_ret)
3805 & cached_nonzero_bits (XEXP (x, 1), mode,
3806 known_x, known_mode, known_ret);
3807 break;
3809 case XOR: case IOR:
3810 case UMIN: case UMAX: case SMIN: case SMAX:
3812 unsigned HOST_WIDE_INT nonzero0 =
3813 cached_nonzero_bits (XEXP (x, 0), mode,
3814 known_x, known_mode, known_ret);
3816 /* Don't call nonzero_bits for the second time if it cannot change
3817 anything. */
3818 if ((nonzero & nonzero0) != nonzero)
3819 nonzero &= nonzero0
3820 | cached_nonzero_bits (XEXP (x, 1), mode,
3821 known_x, known_mode, known_ret);
3823 break;
3825 case PLUS: case MINUS:
3826 case MULT:
3827 case DIV: case UDIV:
3828 case MOD: case UMOD:
3829 /* We can apply the rules of arithmetic to compute the number of
3830 high- and low-order zero bits of these operations. We start by
3831 computing the width (position of the highest-order nonzero bit)
3832 and the number of low-order zero bits for each value. */
3834 unsigned HOST_WIDE_INT nz0 =
3835 cached_nonzero_bits (XEXP (x, 0), mode,
3836 known_x, known_mode, known_ret);
3837 unsigned HOST_WIDE_INT nz1 =
3838 cached_nonzero_bits (XEXP (x, 1), mode,
3839 known_x, known_mode, known_ret);
3840 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3841 int width0 = floor_log2 (nz0) + 1;
3842 int width1 = floor_log2 (nz1) + 1;
3843 int low0 = floor_log2 (nz0 & -nz0);
3844 int low1 = floor_log2 (nz1 & -nz1);
3845 HOST_WIDE_INT op0_maybe_minusp
3846 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3847 HOST_WIDE_INT op1_maybe_minusp
3848 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3849 unsigned int result_width = mode_width;
3850 int result_low = 0;
3852 switch (code)
3854 case PLUS:
3855 result_width = MAX (width0, width1) + 1;
3856 result_low = MIN (low0, low1);
3857 break;
3858 case MINUS:
3859 result_low = MIN (low0, low1);
3860 break;
3861 case MULT:
3862 result_width = width0 + width1;
3863 result_low = low0 + low1;
3864 break;
3865 case DIV:
3866 if (width1 == 0)
3867 break;
3868 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3869 result_width = width0;
3870 break;
3871 case UDIV:
3872 if (width1 == 0)
3873 break;
3874 result_width = width0;
3875 break;
3876 case MOD:
3877 if (width1 == 0)
3878 break;
3879 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3880 result_width = MIN (width0, width1);
3881 result_low = MIN (low0, low1);
3882 break;
3883 case UMOD:
3884 if (width1 == 0)
3885 break;
3886 result_width = MIN (width0, width1);
3887 result_low = MIN (low0, low1);
3888 break;
3889 default:
3890 gcc_unreachable ();
3893 if (result_width < mode_width)
3894 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3896 if (result_low > 0)
3897 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3899 #ifdef POINTERS_EXTEND_UNSIGNED
3900 /* If pointers extend unsigned and this is an addition or subtraction
3901 to a pointer in Pmode, all the bits above ptr_mode are known to be
3902 zero. */
3903 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3904 && (code == PLUS || code == MINUS)
3905 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3906 nonzero &= GET_MODE_MASK (ptr_mode);
3907 #endif
3909 break;
3911 case ZERO_EXTRACT:
3912 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3913 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3914 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3915 break;
3917 case SUBREG:
3918 /* If this is a SUBREG formed for a promoted variable that has
3919 been zero-extended, we know that at least the high-order bits
3920 are zero, though others might be too. */
3922 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3923 nonzero = GET_MODE_MASK (GET_MODE (x))
3924 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3925 known_x, known_mode, known_ret);
3927 /* If the inner mode is a single word for both the host and target
3928 machines, we can compute this from which bits of the inner
3929 object might be nonzero. */
3930 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3931 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3932 <= HOST_BITS_PER_WIDE_INT))
3934 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3935 known_x, known_mode, known_ret);
3937 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3938 /* If this is a typical RISC machine, we only have to worry
3939 about the way loads are extended. */
3940 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3941 ? (((nonzero
3942 & (((unsigned HOST_WIDE_INT) 1
3943 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3944 != 0))
3945 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3946 || !MEM_P (SUBREG_REG (x)))
3947 #endif
3949 /* On many CISC machines, accessing an object in a wider mode
3950 causes the high-order bits to become undefined. So they are
3951 not known to be zero. */
3952 if (GET_MODE_SIZE (GET_MODE (x))
3953 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3954 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3955 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3958 break;
3960 case ASHIFTRT:
3961 case LSHIFTRT:
3962 case ASHIFT:
3963 case ROTATE:
3964 /* The nonzero bits are in two classes: any bits within MODE
3965 that aren't in GET_MODE (x) are always significant. The rest of the
3966 nonzero bits are those that are significant in the operand of
3967 the shift when shifted the appropriate number of bits. This
3968 shows that high-order bits are cleared by the right shift and
3969 low-order bits by left shifts. */
3970 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3971 && INTVAL (XEXP (x, 1)) >= 0
3972 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3974 enum machine_mode inner_mode = GET_MODE (x);
3975 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3976 int count = INTVAL (XEXP (x, 1));
3977 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3978 unsigned HOST_WIDE_INT op_nonzero =
3979 cached_nonzero_bits (XEXP (x, 0), mode,
3980 known_x, known_mode, known_ret);
3981 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3982 unsigned HOST_WIDE_INT outer = 0;
3984 if (mode_width > width)
3985 outer = (op_nonzero & nonzero & ~mode_mask);
3987 if (code == LSHIFTRT)
3988 inner >>= count;
3989 else if (code == ASHIFTRT)
3991 inner >>= count;
3993 /* If the sign bit may have been nonzero before the shift, we
3994 need to mark all the places it could have been copied to
3995 by the shift as possibly nonzero. */
3996 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3997 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3999 else if (code == ASHIFT)
4000 inner <<= count;
4001 else
4002 inner = ((inner << (count % width)
4003 | (inner >> (width - (count % width)))) & mode_mask);
4005 nonzero &= (outer | inner);
4007 break;
4009 case FFS:
4010 case POPCOUNT:
4011 /* This is at most the number of bits in the mode. */
4012 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4013 break;
4015 case CLZ:
4016 /* If CLZ has a known value at zero, then the nonzero bits are
4017 that value, plus the number of bits in the mode minus one. */
4018 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4019 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4020 else
4021 nonzero = -1;
4022 break;
4024 case CTZ:
4025 /* If CTZ has a known value at zero, then the nonzero bits are
4026 that value, plus the number of bits in the mode minus one. */
4027 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4028 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4029 else
4030 nonzero = -1;
4031 break;
4033 case PARITY:
4034 nonzero = 1;
4035 break;
4037 case IF_THEN_ELSE:
4039 unsigned HOST_WIDE_INT nonzero_true =
4040 cached_nonzero_bits (XEXP (x, 1), mode,
4041 known_x, known_mode, known_ret);
4043 /* Don't call nonzero_bits for the second time if it cannot change
4044 anything. */
4045 if ((nonzero & nonzero_true) != nonzero)
4046 nonzero &= nonzero_true
4047 | cached_nonzero_bits (XEXP (x, 2), mode,
4048 known_x, known_mode, known_ret);
4050 break;
4052 default:
4053 break;
4056 return nonzero;
4059 /* See the macro definition above. */
4060 #undef cached_num_sign_bit_copies
4063 /* The function cached_num_sign_bit_copies is a wrapper around
4064 num_sign_bit_copies1. It avoids exponential behavior in
4065 num_sign_bit_copies1 when X has identical subexpressions on the
4066 first or the second level. */
4068 static unsigned int
4069 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
4070 enum machine_mode known_mode,
4071 unsigned int known_ret)
4073 if (x == known_x && mode == known_mode)
4074 return known_ret;
4076 /* Try to find identical subexpressions. If found call
4077 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4078 the precomputed value for the subexpression as KNOWN_RET. */
4080 if (ARITHMETIC_P (x))
4082 rtx x0 = XEXP (x, 0);
4083 rtx x1 = XEXP (x, 1);
4085 /* Check the first level. */
4086 if (x0 == x1)
4087 return
4088 num_sign_bit_copies1 (x, mode, x0, mode,
4089 cached_num_sign_bit_copies (x0, mode, known_x,
4090 known_mode,
4091 known_ret));
4093 /* Check the second level. */
4094 if (ARITHMETIC_P (x0)
4095 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4096 return
4097 num_sign_bit_copies1 (x, mode, x1, mode,
4098 cached_num_sign_bit_copies (x1, mode, known_x,
4099 known_mode,
4100 known_ret));
4102 if (ARITHMETIC_P (x1)
4103 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4104 return
4105 num_sign_bit_copies1 (x, mode, x0, mode,
4106 cached_num_sign_bit_copies (x0, mode, known_x,
4107 known_mode,
4108 known_ret));
4111 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4114 /* Return the number of bits at the high-order end of X that are known to
4115 be equal to the sign bit. X will be used in mode MODE; if MODE is
4116 VOIDmode, X will be used in its own mode. The returned value will always
4117 be between 1 and the number of bits in MODE. */
4119 static unsigned int
4120 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
4121 enum machine_mode known_mode,
4122 unsigned int known_ret)
4124 enum rtx_code code = GET_CODE (x);
4125 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4126 int num0, num1, result;
4127 unsigned HOST_WIDE_INT nonzero;
4129 /* If we weren't given a mode, use the mode of X. If the mode is still
4130 VOIDmode, we don't know anything. Likewise if one of the modes is
4131 floating-point. */
4133 if (mode == VOIDmode)
4134 mode = GET_MODE (x);
4136 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4137 return 1;
4139 /* For a smaller object, just ignore the high bits. */
4140 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4142 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4143 known_x, known_mode, known_ret);
4144 return MAX (1,
4145 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4148 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4150 #ifndef WORD_REGISTER_OPERATIONS
4151 /* If this machine does not do all register operations on the entire
4152 register and MODE is wider than the mode of X, we can say nothing
4153 at all about the high-order bits. */
4154 return 1;
4155 #else
4156 /* Likewise on machines that do, if the mode of the object is smaller
4157 than a word and loads of that size don't sign extend, we can say
4158 nothing about the high order bits. */
4159 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4160 #ifdef LOAD_EXTEND_OP
4161 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4162 #endif
4164 return 1;
4165 #endif
4168 switch (code)
4170 case REG:
4172 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4173 /* If pointers extend signed and this is a pointer in Pmode, say that
4174 all the bits above ptr_mode are known to be sign bit copies. */
4175 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4176 && REG_POINTER (x))
4177 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4178 #endif
4181 unsigned int copies_for_hook = 1, copies = 1;
4182 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4183 known_mode, known_ret,
4184 &copies_for_hook);
4186 if (new)
4187 copies = cached_num_sign_bit_copies (new, mode, known_x,
4188 known_mode, known_ret);
4190 if (copies > 1 || copies_for_hook > 1)
4191 return MAX (copies, copies_for_hook);
4193 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4195 break;
4197 case MEM:
4198 #ifdef LOAD_EXTEND_OP
4199 /* Some RISC machines sign-extend all loads of smaller than a word. */
4200 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4201 return MAX (1, ((int) bitwidth
4202 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4203 #endif
4204 break;
4206 case CONST_INT:
4207 /* If the constant is negative, take its 1's complement and remask.
4208 Then see how many zero bits we have. */
4209 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4210 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4211 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4212 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4214 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4216 case SUBREG:
4217 /* If this is a SUBREG for a promoted object that is sign-extended
4218 and we are looking at it in a wider mode, we know that at least the
4219 high-order bits are known to be sign bit copies. */
4221 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4223 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4224 known_x, known_mode, known_ret);
4225 return MAX ((int) bitwidth
4226 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4227 num0);
4230 /* For a smaller object, just ignore the high bits. */
4231 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4233 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4234 known_x, known_mode, known_ret);
4235 return MAX (1, (num0
4236 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4237 - bitwidth)));
4240 #ifdef WORD_REGISTER_OPERATIONS
4241 #ifdef LOAD_EXTEND_OP
4242 /* For paradoxical SUBREGs on machines where all register operations
4243 affect the entire register, just look inside. Note that we are
4244 passing MODE to the recursive call, so the number of sign bit copies
4245 will remain relative to that mode, not the inner mode. */
4247 /* This works only if loads sign extend. Otherwise, if we get a
4248 reload for the inner part, it may be loaded from the stack, and
4249 then we lose all sign bit copies that existed before the store
4250 to the stack. */
4252 if ((GET_MODE_SIZE (GET_MODE (x))
4253 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4254 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4255 && MEM_P (SUBREG_REG (x)))
4256 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4257 known_x, known_mode, known_ret);
4258 #endif
4259 #endif
4260 break;
4262 case SIGN_EXTRACT:
4263 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4264 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4265 break;
4267 case SIGN_EXTEND:
4268 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4269 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4270 known_x, known_mode, known_ret));
4272 case TRUNCATE:
4273 /* For a smaller object, just ignore the high bits. */
4274 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4275 known_x, known_mode, known_ret);
4276 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4277 - bitwidth)));
4279 case NOT:
4280 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4281 known_x, known_mode, known_ret);
4283 case ROTATE: case ROTATERT:
4284 /* If we are rotating left by a number of bits less than the number
4285 of sign bit copies, we can just subtract that amount from the
4286 number. */
4287 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4288 && INTVAL (XEXP (x, 1)) >= 0
4289 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4291 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4292 known_x, known_mode, known_ret);
4293 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4294 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4296 break;
4298 case NEG:
4299 /* In general, this subtracts one sign bit copy. But if the value
4300 is known to be positive, the number of sign bit copies is the
4301 same as that of the input. Finally, if the input has just one bit
4302 that might be nonzero, all the bits are copies of the sign bit. */
4303 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4304 known_x, known_mode, known_ret);
4305 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4306 return num0 > 1 ? num0 - 1 : 1;
4308 nonzero = nonzero_bits (XEXP (x, 0), mode);
4309 if (nonzero == 1)
4310 return bitwidth;
4312 if (num0 > 1
4313 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4314 num0--;
4316 return num0;
4318 case IOR: case AND: case XOR:
4319 case SMIN: case SMAX: case UMIN: case UMAX:
4320 /* Logical operations will preserve the number of sign-bit copies.
4321 MIN and MAX operations always return one of the operands. */
4322 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4323 known_x, known_mode, known_ret);
4324 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4325 known_x, known_mode, known_ret);
4326 return MIN (num0, num1);
4328 case PLUS: case MINUS:
4329 /* For addition and subtraction, we can have a 1-bit carry. However,
4330 if we are subtracting 1 from a positive number, there will not
4331 be such a carry. Furthermore, if the positive number is known to
4332 be 0 or 1, we know the result is either -1 or 0. */
4334 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4335 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4337 nonzero = nonzero_bits (XEXP (x, 0), mode);
4338 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4339 return (nonzero == 1 || nonzero == 0 ? bitwidth
4340 : bitwidth - floor_log2 (nonzero) - 1);
4343 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4344 known_x, known_mode, known_ret);
4345 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4346 known_x, known_mode, known_ret);
4347 result = MAX (1, MIN (num0, num1) - 1);
4349 #ifdef POINTERS_EXTEND_UNSIGNED
4350 /* If pointers extend signed and this is an addition or subtraction
4351 to a pointer in Pmode, all the bits above ptr_mode are known to be
4352 sign bit copies. */
4353 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4354 && (code == PLUS || code == MINUS)
4355 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4356 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4357 - GET_MODE_BITSIZE (ptr_mode) + 1),
4358 result);
4359 #endif
4360 return result;
4362 case MULT:
4363 /* The number of bits of the product is the sum of the number of
4364 bits of both terms. However, unless one of the terms if known
4365 to be positive, we must allow for an additional bit since negating
4366 a negative number can remove one sign bit copy. */
4368 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4369 known_x, known_mode, known_ret);
4370 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4371 known_x, known_mode, known_ret);
4373 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4374 if (result > 0
4375 && (bitwidth > HOST_BITS_PER_WIDE_INT
4376 || (((nonzero_bits (XEXP (x, 0), mode)
4377 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4378 && ((nonzero_bits (XEXP (x, 1), mode)
4379 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4380 result--;
4382 return MAX (1, result);
4384 case UDIV:
4385 /* The result must be <= the first operand. If the first operand
4386 has the high bit set, we know nothing about the number of sign
4387 bit copies. */
4388 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4389 return 1;
4390 else if ((nonzero_bits (XEXP (x, 0), mode)
4391 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4392 return 1;
4393 else
4394 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4395 known_x, known_mode, known_ret);
4397 case UMOD:
4398 /* The result must be <= the second operand. */
4399 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4400 known_x, known_mode, known_ret);
4402 case DIV:
4403 /* Similar to unsigned division, except that we have to worry about
4404 the case where the divisor is negative, in which case we have
4405 to add 1. */
4406 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4407 known_x, known_mode, known_ret);
4408 if (result > 1
4409 && (bitwidth > HOST_BITS_PER_WIDE_INT
4410 || (nonzero_bits (XEXP (x, 1), mode)
4411 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4412 result--;
4414 return result;
4416 case MOD:
4417 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4418 known_x, known_mode, known_ret);
4419 if (result > 1
4420 && (bitwidth > HOST_BITS_PER_WIDE_INT
4421 || (nonzero_bits (XEXP (x, 1), mode)
4422 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4423 result--;
4425 return result;
4427 case ASHIFTRT:
4428 /* Shifts by a constant add to the number of bits equal to the
4429 sign bit. */
4430 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4431 known_x, known_mode, known_ret);
4432 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4433 && INTVAL (XEXP (x, 1)) > 0)
4434 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4436 return num0;
4438 case ASHIFT:
4439 /* Left shifts destroy copies. */
4440 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4441 || INTVAL (XEXP (x, 1)) < 0
4442 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4443 return 1;
4445 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4446 known_x, known_mode, known_ret);
4447 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4449 case IF_THEN_ELSE:
4450 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4451 known_x, known_mode, known_ret);
4452 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4453 known_x, known_mode, known_ret);
4454 return MIN (num0, num1);
4456 case EQ: case NE: case GE: case GT: case LE: case LT:
4457 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4458 case GEU: case GTU: case LEU: case LTU:
4459 case UNORDERED: case ORDERED:
4460 /* If the constant is negative, take its 1's complement and remask.
4461 Then see how many zero bits we have. */
4462 nonzero = STORE_FLAG_VALUE;
4463 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4464 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4465 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4467 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4469 default:
4470 break;
4473 /* If we haven't been able to figure it out by one of the above rules,
4474 see if some of the high-order bits are known to be zero. If so,
4475 count those bits and return one less than that amount. If we can't
4476 safely compute the mask for this mode, always return BITWIDTH. */
4478 bitwidth = GET_MODE_BITSIZE (mode);
4479 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4480 return 1;
4482 nonzero = nonzero_bits (x, mode);
4483 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4484 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4487 /* Calculate the rtx_cost of a single instruction. A return value of
4488 zero indicates an instruction pattern without a known cost. */
4491 insn_rtx_cost (rtx pat)
4493 int i, cost;
4494 rtx set;
4496 /* Extract the single set rtx from the instruction pattern.
4497 We can't use single_set since we only have the pattern. */
4498 if (GET_CODE (pat) == SET)
4499 set = pat;
4500 else if (GET_CODE (pat) == PARALLEL)
4502 set = NULL_RTX;
4503 for (i = 0; i < XVECLEN (pat, 0); i++)
4505 rtx x = XVECEXP (pat, 0, i);
4506 if (GET_CODE (x) == SET)
4508 if (set)
4509 return 0;
4510 set = x;
4513 if (!set)
4514 return 0;
4516 else
4517 return 0;
4519 cost = rtx_cost (SET_SRC (set), SET);
4520 return cost > 0 ? cost : COSTS_N_INSNS (1);
4523 /* Given an insn INSN and condition COND, return the condition in a
4524 canonical form to simplify testing by callers. Specifically:
4526 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4527 (2) Both operands will be machine operands; (cc0) will have been replaced.
4528 (3) If an operand is a constant, it will be the second operand.
4529 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4530 for GE, GEU, and LEU.
4532 If the condition cannot be understood, or is an inequality floating-point
4533 comparison which needs to be reversed, 0 will be returned.
4535 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4537 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4538 insn used in locating the condition was found. If a replacement test
4539 of the condition is desired, it should be placed in front of that
4540 insn and we will be sure that the inputs are still valid.
4542 If WANT_REG is nonzero, we wish the condition to be relative to that
4543 register, if possible. Therefore, do not canonicalize the condition
4544 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4545 to be a compare to a CC mode register.
4547 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4548 and at INSN. */
4551 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4552 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4554 enum rtx_code code;
4555 rtx prev = insn;
4556 rtx set;
4557 rtx tem;
4558 rtx op0, op1;
4559 int reverse_code = 0;
4560 enum machine_mode mode;
4562 code = GET_CODE (cond);
4563 mode = GET_MODE (cond);
4564 op0 = XEXP (cond, 0);
4565 op1 = XEXP (cond, 1);
4567 if (reverse)
4568 code = reversed_comparison_code (cond, insn);
4569 if (code == UNKNOWN)
4570 return 0;
4572 if (earliest)
4573 *earliest = insn;
4575 /* If we are comparing a register with zero, see if the register is set
4576 in the previous insn to a COMPARE or a comparison operation. Perform
4577 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4578 in cse.c */
4580 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4581 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4582 && op1 == CONST0_RTX (GET_MODE (op0))
4583 && op0 != want_reg)
4585 /* Set nonzero when we find something of interest. */
4586 rtx x = 0;
4588 #ifdef HAVE_cc0
4589 /* If comparison with cc0, import actual comparison from compare
4590 insn. */
4591 if (op0 == cc0_rtx)
4593 if ((prev = prev_nonnote_insn (prev)) == 0
4594 || !NONJUMP_INSN_P (prev)
4595 || (set = single_set (prev)) == 0
4596 || SET_DEST (set) != cc0_rtx)
4597 return 0;
4599 op0 = SET_SRC (set);
4600 op1 = CONST0_RTX (GET_MODE (op0));
4601 if (earliest)
4602 *earliest = prev;
4604 #endif
4606 /* If this is a COMPARE, pick up the two things being compared. */
4607 if (GET_CODE (op0) == COMPARE)
4609 op1 = XEXP (op0, 1);
4610 op0 = XEXP (op0, 0);
4611 continue;
4613 else if (!REG_P (op0))
4614 break;
4616 /* Go back to the previous insn. Stop if it is not an INSN. We also
4617 stop if it isn't a single set or if it has a REG_INC note because
4618 we don't want to bother dealing with it. */
4620 if ((prev = prev_nonnote_insn (prev)) == 0
4621 || !NONJUMP_INSN_P (prev)
4622 || FIND_REG_INC_NOTE (prev, NULL_RTX))
4623 break;
4625 set = set_of (op0, prev);
4627 if (set
4628 && (GET_CODE (set) != SET
4629 || !rtx_equal_p (SET_DEST (set), op0)))
4630 break;
4632 /* If this is setting OP0, get what it sets it to if it looks
4633 relevant. */
4634 if (set)
4636 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4637 #ifdef FLOAT_STORE_FLAG_VALUE
4638 REAL_VALUE_TYPE fsfv;
4639 #endif
4641 /* ??? We may not combine comparisons done in a CCmode with
4642 comparisons not done in a CCmode. This is to aid targets
4643 like Alpha that have an IEEE compliant EQ instruction, and
4644 a non-IEEE compliant BEQ instruction. The use of CCmode is
4645 actually artificial, simply to prevent the combination, but
4646 should not affect other platforms.
4648 However, we must allow VOIDmode comparisons to match either
4649 CCmode or non-CCmode comparison, because some ports have
4650 modeless comparisons inside branch patterns.
4652 ??? This mode check should perhaps look more like the mode check
4653 in simplify_comparison in combine. */
4655 if ((GET_CODE (SET_SRC (set)) == COMPARE
4656 || (((code == NE
4657 || (code == LT
4658 && GET_MODE_CLASS (inner_mode) == MODE_INT
4659 && (GET_MODE_BITSIZE (inner_mode)
4660 <= HOST_BITS_PER_WIDE_INT)
4661 && (STORE_FLAG_VALUE
4662 & ((HOST_WIDE_INT) 1
4663 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4664 #ifdef FLOAT_STORE_FLAG_VALUE
4665 || (code == LT
4666 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4667 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4668 REAL_VALUE_NEGATIVE (fsfv)))
4669 #endif
4671 && COMPARISON_P (SET_SRC (set))))
4672 && (((GET_MODE_CLASS (mode) == MODE_CC)
4673 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4674 || mode == VOIDmode || inner_mode == VOIDmode))
4675 x = SET_SRC (set);
4676 else if (((code == EQ
4677 || (code == GE
4678 && (GET_MODE_BITSIZE (inner_mode)
4679 <= HOST_BITS_PER_WIDE_INT)
4680 && GET_MODE_CLASS (inner_mode) == MODE_INT
4681 && (STORE_FLAG_VALUE
4682 & ((HOST_WIDE_INT) 1
4683 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4684 #ifdef FLOAT_STORE_FLAG_VALUE
4685 || (code == GE
4686 && GET_MODE_CLASS (inner_mode) == MODE_FLOAT
4687 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4688 REAL_VALUE_NEGATIVE (fsfv)))
4689 #endif
4691 && COMPARISON_P (SET_SRC (set))
4692 && (((GET_MODE_CLASS (mode) == MODE_CC)
4693 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4694 || mode == VOIDmode || inner_mode == VOIDmode))
4697 reverse_code = 1;
4698 x = SET_SRC (set);
4700 else
4701 break;
4704 else if (reg_set_p (op0, prev))
4705 /* If this sets OP0, but not directly, we have to give up. */
4706 break;
4708 if (x)
4710 /* If the caller is expecting the condition to be valid at INSN,
4711 make sure X doesn't change before INSN. */
4712 if (valid_at_insn_p)
4713 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4714 break;
4715 if (COMPARISON_P (x))
4716 code = GET_CODE (x);
4717 if (reverse_code)
4719 code = reversed_comparison_code (x, prev);
4720 if (code == UNKNOWN)
4721 return 0;
4722 reverse_code = 0;
4725 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4726 if (earliest)
4727 *earliest = prev;
4731 /* If constant is first, put it last. */
4732 if (CONSTANT_P (op0))
4733 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4735 /* If OP0 is the result of a comparison, we weren't able to find what
4736 was really being compared, so fail. */
4737 if (!allow_cc_mode
4738 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4739 return 0;
4741 /* Canonicalize any ordered comparison with integers involving equality
4742 if we can do computations in the relevant mode and we do not
4743 overflow. */
4745 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4746 && GET_CODE (op1) == CONST_INT
4747 && GET_MODE (op0) != VOIDmode
4748 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4750 HOST_WIDE_INT const_val = INTVAL (op1);
4751 unsigned HOST_WIDE_INT uconst_val = const_val;
4752 unsigned HOST_WIDE_INT max_val
4753 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4755 switch (code)
4757 case LE:
4758 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4759 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4760 break;
4762 /* When cross-compiling, const_val might be sign-extended from
4763 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4764 case GE:
4765 if ((HOST_WIDE_INT) (const_val & max_val)
4766 != (((HOST_WIDE_INT) 1
4767 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4768 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4769 break;
4771 case LEU:
4772 if (uconst_val < max_val)
4773 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4774 break;
4776 case GEU:
4777 if (uconst_val != 0)
4778 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4779 break;
4781 default:
4782 break;
4786 /* Never return CC0; return zero instead. */
4787 if (CC0_P (op0))
4788 return 0;
4790 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4793 /* Given a jump insn JUMP, return the condition that will cause it to branch
4794 to its JUMP_LABEL. If the condition cannot be understood, or is an
4795 inequality floating-point comparison which needs to be reversed, 0 will
4796 be returned.
4798 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4799 insn used in locating the condition was found. If a replacement test
4800 of the condition is desired, it should be placed in front of that
4801 insn and we will be sure that the inputs are still valid. If EARLIEST
4802 is null, the returned condition will be valid at INSN.
4804 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4805 compare CC mode register.
4807 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4810 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4812 rtx cond;
4813 int reverse;
4814 rtx set;
4816 /* If this is not a standard conditional jump, we can't parse it. */
4817 if (!JUMP_P (jump)
4818 || ! any_condjump_p (jump))
4819 return 0;
4820 set = pc_set (jump);
4822 cond = XEXP (SET_SRC (set), 0);
4824 /* If this branches to JUMP_LABEL when the condition is false, reverse
4825 the condition. */
4826 reverse
4827 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4828 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4830 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4831 allow_cc_mode, valid_at_insn_p);
4835 /* Initialize non_rtx_starting_operands, which is used to speed up
4836 for_each_rtx. */
4837 void
4838 init_rtlanal (void)
4840 int i;
4841 for (i = 0; i < NUM_RTX_CODE; i++)
4843 const char *format = GET_RTX_FORMAT (i);
4844 const char *first = strpbrk (format, "eEV");
4845 non_rtx_starting_operands[i] = first ? first - format : -1;