* name-lookup.c (set_decl_namespace): Use CP_DECL_CONTEXT.
[official-gcc.git] / gcc / rtlanal.c
blob490c221c9b876aec4d92b537601ad3cd531d87e6
1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software
4 Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "toplev.h"
29 #include "rtl.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "target.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "regs.h"
39 #include "function.h"
41 /* Forward declarations */
42 static int global_reg_mentioned_p_1 (rtx *, void *);
43 static void set_of_1 (rtx, rtx, void *);
44 static bool covers_regno_p (rtx, unsigned int);
45 static bool covers_regno_no_parallel_p (rtx, unsigned int);
46 static int rtx_referenced_p_1 (rtx *, void *);
47 static int computed_jump_p_1 (rtx);
48 static void parms_set (rtx, rtx, void *);
50 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
51 rtx, enum machine_mode,
52 unsigned HOST_WIDE_INT);
53 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
54 enum machine_mode,
55 unsigned HOST_WIDE_INT);
56 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
57 enum machine_mode,
58 unsigned int);
59 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
60 enum machine_mode, unsigned int);
62 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
63 -1 if a code has no such operand. */
64 static int non_rtx_starting_operands[NUM_RTX_CODE];
66 /* Bit flags that specify the machine subtype we are compiling for.
67 Bits are tested using macros TARGET_... defined in the tm.h file
68 and set by `-m...' switches. Must be defined in rtlanal.c. */
70 int target_flags;
72 /* Return 1 if the value of X is unstable
73 (would be different at a different point in the program).
74 The frame pointer, arg pointer, etc. are considered stable
75 (within one function) and so is anything marked `unchanging'. */
77 int
78 rtx_unstable_p (rtx x)
80 RTX_CODE code = GET_CODE (x);
81 int i;
82 const char *fmt;
84 switch (code)
86 case MEM:
87 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
89 case CONST:
90 case CONST_INT:
91 case CONST_DOUBLE:
92 case CONST_VECTOR:
93 case SYMBOL_REF:
94 case LABEL_REF:
95 return 0;
97 case REG:
98 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
99 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
100 /* The arg pointer varies if it is not a fixed register. */
101 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
102 return 0;
103 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
104 /* ??? When call-clobbered, the value is stable modulo the restore
105 that must happen after a call. This currently screws up local-alloc
106 into believing that the restore is not needed. */
107 if (x == pic_offset_table_rtx)
108 return 0;
109 #endif
110 return 1;
112 case ASM_OPERANDS:
113 if (MEM_VOLATILE_P (x))
114 return 1;
116 /* Fall through. */
118 default:
119 break;
122 fmt = GET_RTX_FORMAT (code);
123 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
124 if (fmt[i] == 'e')
126 if (rtx_unstable_p (XEXP (x, i)))
127 return 1;
129 else if (fmt[i] == 'E')
131 int j;
132 for (j = 0; j < XVECLEN (x, i); j++)
133 if (rtx_unstable_p (XVECEXP (x, i, j)))
134 return 1;
137 return 0;
140 /* Return 1 if X has a value that can vary even between two
141 executions of the program. 0 means X can be compared reliably
142 against certain constants or near-constants.
143 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
144 zero, we are slightly more conservative.
145 The frame pointer and the arg pointer are considered constant. */
148 rtx_varies_p (rtx x, int for_alias)
150 RTX_CODE code;
151 int i;
152 const char *fmt;
154 if (!x)
155 return 0;
157 code = GET_CODE (x);
158 switch (code)
160 case MEM:
161 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
163 case CONST:
164 case CONST_INT:
165 case CONST_DOUBLE:
166 case CONST_VECTOR:
167 case SYMBOL_REF:
168 case LABEL_REF:
169 return 0;
171 case REG:
172 /* Note that we have to test for the actual rtx used for the frame
173 and arg pointers and not just the register number in case we have
174 eliminated the frame and/or arg pointer and are using it
175 for pseudos. */
176 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
177 /* The arg pointer varies if it is not a fixed register. */
178 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
179 return 0;
180 if (x == pic_offset_table_rtx
181 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
182 /* ??? When call-clobbered, the value is stable modulo the restore
183 that must happen after a call. This currently screws up
184 local-alloc into believing that the restore is not needed, so we
185 must return 0 only if we are called from alias analysis. */
186 && for_alias
187 #endif
189 return 0;
190 return 1;
192 case LO_SUM:
193 /* The operand 0 of a LO_SUM is considered constant
194 (in fact it is related specifically to operand 1)
195 during alias analysis. */
196 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
197 || rtx_varies_p (XEXP (x, 1), for_alias);
199 case ASM_OPERANDS:
200 if (MEM_VOLATILE_P (x))
201 return 1;
203 /* Fall through. */
205 default:
206 break;
209 fmt = GET_RTX_FORMAT (code);
210 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
211 if (fmt[i] == 'e')
213 if (rtx_varies_p (XEXP (x, i), for_alias))
214 return 1;
216 else if (fmt[i] == 'E')
218 int j;
219 for (j = 0; j < XVECLEN (x, i); j++)
220 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
221 return 1;
224 return 0;
227 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
228 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
229 whether nonzero is returned for unaligned memory accesses on strict
230 alignment machines. */
232 static int
233 rtx_addr_can_trap_p_1 (rtx x, enum machine_mode mode, bool unaligned_mems)
235 enum rtx_code code = GET_CODE (x);
237 switch (code)
239 case SYMBOL_REF:
240 return SYMBOL_REF_WEAK (x);
242 case LABEL_REF:
243 return 0;
245 case REG:
246 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
247 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
248 || x == stack_pointer_rtx
249 /* The arg pointer varies if it is not a fixed register. */
250 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
251 return 0;
252 /* All of the virtual frame registers are stack references. */
253 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
254 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
255 return 0;
256 return 1;
258 case CONST:
259 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
261 case PLUS:
262 /* An address is assumed not to trap if:
263 - it is an address that can't trap plus a constant integer,
264 with the proper remainder modulo the mode size if we are
265 considering unaligned memory references. */
266 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
267 && GET_CODE (XEXP (x, 1)) == CONST_INT)
269 HOST_WIDE_INT offset;
271 if (!STRICT_ALIGNMENT
272 || !unaligned_mems
273 || GET_MODE_SIZE (mode) == 0)
274 return 0;
276 offset = INTVAL (XEXP (x, 1));
278 #ifdef SPARC_STACK_BOUNDARY_HACK
279 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
280 the real alignment of %sp. However, when it does this, the
281 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
282 if (SPARC_STACK_BOUNDARY_HACK
283 && (XEXP (x, 0) == stack_pointer_rtx
284 || XEXP (x, 0) == hard_frame_pointer_rtx))
285 offset -= STACK_POINTER_OFFSET;
286 #endif
288 return offset % GET_MODE_SIZE (mode) != 0;
291 /* - or it is the pic register plus a constant. */
292 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
293 return 0;
295 return 1;
297 case LO_SUM:
298 case PRE_MODIFY:
299 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
301 case PRE_DEC:
302 case PRE_INC:
303 case POST_DEC:
304 case POST_INC:
305 case POST_MODIFY:
306 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
308 default:
309 break;
312 /* If it isn't one of the case above, it can cause a trap. */
313 return 1;
316 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
319 rtx_addr_can_trap_p (rtx x)
321 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
324 /* Return true if X is an address that is known to not be zero. */
326 bool
327 nonzero_address_p (rtx x)
329 enum rtx_code code = GET_CODE (x);
331 switch (code)
333 case SYMBOL_REF:
334 return !SYMBOL_REF_WEAK (x);
336 case LABEL_REF:
337 return true;
339 case REG:
340 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
341 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
342 || x == stack_pointer_rtx
343 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
344 return true;
345 /* All of the virtual frame registers are stack references. */
346 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
347 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
348 return true;
349 return false;
351 case CONST:
352 return nonzero_address_p (XEXP (x, 0));
354 case PLUS:
355 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
357 /* Pointers aren't allowed to wrap. If we've got a register
358 that is known to be a pointer, and a positive offset, then
359 the composite can't be zero. */
360 if (INTVAL (XEXP (x, 1)) > 0
361 && REG_P (XEXP (x, 0))
362 && REG_POINTER (XEXP (x, 0)))
363 return true;
365 return nonzero_address_p (XEXP (x, 0));
367 /* Handle PIC references. */
368 else if (XEXP (x, 0) == pic_offset_table_rtx
369 && CONSTANT_P (XEXP (x, 1)))
370 return true;
371 return false;
373 case PRE_MODIFY:
374 /* Similar to the above; allow positive offsets. Further, since
375 auto-inc is only allowed in memories, the register must be a
376 pointer. */
377 if (GET_CODE (XEXP (x, 1)) == CONST_INT
378 && INTVAL (XEXP (x, 1)) > 0)
379 return true;
380 return nonzero_address_p (XEXP (x, 0));
382 case PRE_INC:
383 /* Similarly. Further, the offset is always positive. */
384 return true;
386 case PRE_DEC:
387 case POST_DEC:
388 case POST_INC:
389 case POST_MODIFY:
390 return nonzero_address_p (XEXP (x, 0));
392 case LO_SUM:
393 return nonzero_address_p (XEXP (x, 1));
395 default:
396 break;
399 /* If it isn't one of the case above, might be zero. */
400 return false;
403 /* Return 1 if X refers to a memory location whose address
404 cannot be compared reliably with constant addresses,
405 or if X refers to a BLKmode memory object.
406 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
407 zero, we are slightly more conservative. */
410 rtx_addr_varies_p (rtx x, int for_alias)
412 enum rtx_code code;
413 int i;
414 const char *fmt;
416 if (x == 0)
417 return 0;
419 code = GET_CODE (x);
420 if (code == MEM)
421 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
423 fmt = GET_RTX_FORMAT (code);
424 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
425 if (fmt[i] == 'e')
427 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
428 return 1;
430 else if (fmt[i] == 'E')
432 int j;
433 for (j = 0; j < XVECLEN (x, i); j++)
434 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
435 return 1;
437 return 0;
440 /* Return the value of the integer term in X, if one is apparent;
441 otherwise return 0.
442 Only obvious integer terms are detected.
443 This is used in cse.c with the `related_value' field. */
445 HOST_WIDE_INT
446 get_integer_term (rtx x)
448 if (GET_CODE (x) == CONST)
449 x = XEXP (x, 0);
451 if (GET_CODE (x) == MINUS
452 && GET_CODE (XEXP (x, 1)) == CONST_INT)
453 return - INTVAL (XEXP (x, 1));
454 if (GET_CODE (x) == PLUS
455 && GET_CODE (XEXP (x, 1)) == CONST_INT)
456 return INTVAL (XEXP (x, 1));
457 return 0;
460 /* If X is a constant, return the value sans apparent integer term;
461 otherwise return 0.
462 Only obvious integer terms are detected. */
465 get_related_value (rtx x)
467 if (GET_CODE (x) != CONST)
468 return 0;
469 x = XEXP (x, 0);
470 if (GET_CODE (x) == PLUS
471 && GET_CODE (XEXP (x, 1)) == CONST_INT)
472 return XEXP (x, 0);
473 else if (GET_CODE (x) == MINUS
474 && GET_CODE (XEXP (x, 1)) == CONST_INT)
475 return XEXP (x, 0);
476 return 0;
479 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
480 a global register. */
482 static int
483 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
485 int regno;
486 rtx x = *loc;
488 if (! x)
489 return 0;
491 switch (GET_CODE (x))
493 case SUBREG:
494 if (REG_P (SUBREG_REG (x)))
496 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
497 && global_regs[subreg_regno (x)])
498 return 1;
499 return 0;
501 break;
503 case REG:
504 regno = REGNO (x);
505 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
506 return 1;
507 return 0;
509 case SCRATCH:
510 case PC:
511 case CC0:
512 case CONST_INT:
513 case CONST_DOUBLE:
514 case CONST:
515 case LABEL_REF:
516 return 0;
518 case CALL:
519 /* A non-constant call might use a global register. */
520 return 1;
522 default:
523 break;
526 return 0;
529 /* Returns nonzero if X mentions a global register. */
532 global_reg_mentioned_p (rtx x)
534 if (INSN_P (x))
536 if (CALL_P (x))
538 if (! CONST_OR_PURE_CALL_P (x))
539 return 1;
540 x = CALL_INSN_FUNCTION_USAGE (x);
541 if (x == 0)
542 return 0;
544 else
545 x = PATTERN (x);
548 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
551 /* Return the number of places FIND appears within X. If COUNT_DEST is
552 zero, we do not count occurrences inside the destination of a SET. */
555 count_occurrences (rtx x, rtx find, int count_dest)
557 int i, j;
558 enum rtx_code code;
559 const char *format_ptr;
560 int count;
562 if (x == find)
563 return 1;
565 code = GET_CODE (x);
567 switch (code)
569 case REG:
570 case CONST_INT:
571 case CONST_DOUBLE:
572 case CONST_VECTOR:
573 case SYMBOL_REF:
574 case CODE_LABEL:
575 case PC:
576 case CC0:
577 return 0;
579 case MEM:
580 if (MEM_P (find) && rtx_equal_p (x, find))
581 return 1;
582 break;
584 case SET:
585 if (SET_DEST (x) == find && ! count_dest)
586 return count_occurrences (SET_SRC (x), find, count_dest);
587 break;
589 default:
590 break;
593 format_ptr = GET_RTX_FORMAT (code);
594 count = 0;
596 for (i = 0; i < GET_RTX_LENGTH (code); i++)
598 switch (*format_ptr++)
600 case 'e':
601 count += count_occurrences (XEXP (x, i), find, count_dest);
602 break;
604 case 'E':
605 for (j = 0; j < XVECLEN (x, i); j++)
606 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
607 break;
610 return count;
613 /* Nonzero if register REG appears somewhere within IN.
614 Also works if REG is not a register; in this case it checks
615 for a subexpression of IN that is Lisp "equal" to REG. */
618 reg_mentioned_p (rtx reg, rtx in)
620 const char *fmt;
621 int i;
622 enum rtx_code code;
624 if (in == 0)
625 return 0;
627 if (reg == in)
628 return 1;
630 if (GET_CODE (in) == LABEL_REF)
631 return reg == XEXP (in, 0);
633 code = GET_CODE (in);
635 switch (code)
637 /* Compare registers by number. */
638 case REG:
639 return REG_P (reg) && REGNO (in) == REGNO (reg);
641 /* These codes have no constituent expressions
642 and are unique. */
643 case SCRATCH:
644 case CC0:
645 case PC:
646 return 0;
648 case CONST_INT:
649 case CONST_VECTOR:
650 case CONST_DOUBLE:
651 /* These are kept unique for a given value. */
652 return 0;
654 default:
655 break;
658 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
659 return 1;
661 fmt = GET_RTX_FORMAT (code);
663 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
665 if (fmt[i] == 'E')
667 int j;
668 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
669 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
670 return 1;
672 else if (fmt[i] == 'e'
673 && reg_mentioned_p (reg, XEXP (in, i)))
674 return 1;
676 return 0;
679 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
680 no CODE_LABEL insn. */
683 no_labels_between_p (rtx beg, rtx end)
685 rtx p;
686 if (beg == end)
687 return 0;
688 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
689 if (LABEL_P (p))
690 return 0;
691 return 1;
694 /* Nonzero if register REG is used in an insn between
695 FROM_INSN and TO_INSN (exclusive of those two). */
698 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
700 rtx insn;
702 if (from_insn == to_insn)
703 return 0;
705 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
706 if (INSN_P (insn)
707 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
708 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
709 return 1;
710 return 0;
713 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
714 is entirely replaced by a new value and the only use is as a SET_DEST,
715 we do not consider it a reference. */
718 reg_referenced_p (rtx x, rtx body)
720 int i;
722 switch (GET_CODE (body))
724 case SET:
725 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
726 return 1;
728 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
729 of a REG that occupies all of the REG, the insn references X if
730 it is mentioned in the destination. */
731 if (GET_CODE (SET_DEST (body)) != CC0
732 && GET_CODE (SET_DEST (body)) != PC
733 && !REG_P (SET_DEST (body))
734 && ! (GET_CODE (SET_DEST (body)) == SUBREG
735 && REG_P (SUBREG_REG (SET_DEST (body)))
736 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
737 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
738 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
739 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
740 && reg_overlap_mentioned_p (x, SET_DEST (body)))
741 return 1;
742 return 0;
744 case ASM_OPERANDS:
745 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
746 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
747 return 1;
748 return 0;
750 case CALL:
751 case USE:
752 case IF_THEN_ELSE:
753 return reg_overlap_mentioned_p (x, body);
755 case TRAP_IF:
756 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
758 case PREFETCH:
759 return reg_overlap_mentioned_p (x, XEXP (body, 0));
761 case UNSPEC:
762 case UNSPEC_VOLATILE:
763 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
764 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
765 return 1;
766 return 0;
768 case PARALLEL:
769 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
770 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
771 return 1;
772 return 0;
774 case CLOBBER:
775 if (MEM_P (XEXP (body, 0)))
776 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
777 return 1;
778 return 0;
780 case COND_EXEC:
781 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
782 return 1;
783 return reg_referenced_p (x, COND_EXEC_CODE (body));
785 default:
786 return 0;
790 /* Nonzero if register REG is set or clobbered in an insn between
791 FROM_INSN and TO_INSN (exclusive of those two). */
794 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
796 rtx insn;
798 if (from_insn == to_insn)
799 return 0;
801 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
802 if (INSN_P (insn) && reg_set_p (reg, insn))
803 return 1;
804 return 0;
807 /* Internals of reg_set_between_p. */
809 reg_set_p (rtx reg, rtx insn)
811 /* We can be passed an insn or part of one. If we are passed an insn,
812 check if a side-effect of the insn clobbers REG. */
813 if (INSN_P (insn)
814 && (FIND_REG_INC_NOTE (insn, reg)
815 || (CALL_P (insn)
816 && ((REG_P (reg)
817 && REGNO (reg) < FIRST_PSEUDO_REGISTER
818 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
819 REGNO (reg)))
820 || MEM_P (reg)
821 || find_reg_fusage (insn, CLOBBER, reg)))))
822 return 1;
824 return set_of (reg, insn) != NULL_RTX;
827 /* Similar to reg_set_between_p, but check all registers in X. Return 0
828 only if none of them are modified between START and END. Return 1 if
829 X contains a MEM; this routine does usememory aliasing. */
832 modified_between_p (rtx x, rtx start, rtx end)
834 enum rtx_code code = GET_CODE (x);
835 const char *fmt;
836 int i, j;
837 rtx insn;
839 if (start == end)
840 return 0;
842 switch (code)
844 case CONST_INT:
845 case CONST_DOUBLE:
846 case CONST_VECTOR:
847 case CONST:
848 case SYMBOL_REF:
849 case LABEL_REF:
850 return 0;
852 case PC:
853 case CC0:
854 return 1;
856 case MEM:
857 if (modified_between_p (XEXP (x, 0), start, end))
858 return 1;
859 if (MEM_READONLY_P (x))
860 return 0;
861 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
862 if (memory_modified_in_insn_p (x, insn))
863 return 1;
864 return 0;
865 break;
867 case REG:
868 return reg_set_between_p (x, start, end);
870 default:
871 break;
874 fmt = GET_RTX_FORMAT (code);
875 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
877 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
878 return 1;
880 else if (fmt[i] == 'E')
881 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
882 if (modified_between_p (XVECEXP (x, i, j), start, end))
883 return 1;
886 return 0;
889 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
890 of them are modified in INSN. Return 1 if X contains a MEM; this routine
891 does use memory aliasing. */
894 modified_in_p (rtx x, rtx insn)
896 enum rtx_code code = GET_CODE (x);
897 const char *fmt;
898 int i, j;
900 switch (code)
902 case CONST_INT:
903 case CONST_DOUBLE:
904 case CONST_VECTOR:
905 case CONST:
906 case SYMBOL_REF:
907 case LABEL_REF:
908 return 0;
910 case PC:
911 case CC0:
912 return 1;
914 case MEM:
915 if (modified_in_p (XEXP (x, 0), insn))
916 return 1;
917 if (MEM_READONLY_P (x))
918 return 0;
919 if (memory_modified_in_insn_p (x, insn))
920 return 1;
921 return 0;
922 break;
924 case REG:
925 return reg_set_p (x, insn);
927 default:
928 break;
931 fmt = GET_RTX_FORMAT (code);
932 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
934 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
935 return 1;
937 else if (fmt[i] == 'E')
938 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
939 if (modified_in_p (XVECEXP (x, i, j), insn))
940 return 1;
943 return 0;
946 /* Helper function for set_of. */
947 struct set_of_data
949 rtx found;
950 rtx pat;
953 static void
954 set_of_1 (rtx x, rtx pat, void *data1)
956 struct set_of_data *data = (struct set_of_data *) (data1);
957 if (rtx_equal_p (x, data->pat)
958 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
959 data->found = pat;
962 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
963 (either directly or via STRICT_LOW_PART and similar modifiers). */
965 set_of (rtx pat, rtx insn)
967 struct set_of_data data;
968 data.found = NULL_RTX;
969 data.pat = pat;
970 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
971 return data.found;
974 /* Given an INSN, return a SET expression if this insn has only a single SET.
975 It may also have CLOBBERs, USEs, or SET whose output
976 will not be used, which we ignore. */
979 single_set_2 (rtx insn, rtx pat)
981 rtx set = NULL;
982 int set_verified = 1;
983 int i;
985 if (GET_CODE (pat) == PARALLEL)
987 for (i = 0; i < XVECLEN (pat, 0); i++)
989 rtx sub = XVECEXP (pat, 0, i);
990 switch (GET_CODE (sub))
992 case USE:
993 case CLOBBER:
994 break;
996 case SET:
997 /* We can consider insns having multiple sets, where all
998 but one are dead as single set insns. In common case
999 only single set is present in the pattern so we want
1000 to avoid checking for REG_UNUSED notes unless necessary.
1002 When we reach set first time, we just expect this is
1003 the single set we are looking for and only when more
1004 sets are found in the insn, we check them. */
1005 if (!set_verified)
1007 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1008 && !side_effects_p (set))
1009 set = NULL;
1010 else
1011 set_verified = 1;
1013 if (!set)
1014 set = sub, set_verified = 0;
1015 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1016 || side_effects_p (sub))
1017 return NULL_RTX;
1018 break;
1020 default:
1021 return NULL_RTX;
1025 return set;
1028 /* Given an INSN, return nonzero if it has more than one SET, else return
1029 zero. */
1032 multiple_sets (rtx insn)
1034 int found;
1035 int i;
1037 /* INSN must be an insn. */
1038 if (! INSN_P (insn))
1039 return 0;
1041 /* Only a PARALLEL can have multiple SETs. */
1042 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1044 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1045 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1047 /* If we have already found a SET, then return now. */
1048 if (found)
1049 return 1;
1050 else
1051 found = 1;
1055 /* Either zero or one SET. */
1056 return 0;
1059 /* Return nonzero if the destination of SET equals the source
1060 and there are no side effects. */
1063 set_noop_p (rtx set)
1065 rtx src = SET_SRC (set);
1066 rtx dst = SET_DEST (set);
1068 if (dst == pc_rtx && src == pc_rtx)
1069 return 1;
1071 if (MEM_P (dst) && MEM_P (src))
1072 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1074 if (GET_CODE (dst) == ZERO_EXTRACT)
1075 return rtx_equal_p (XEXP (dst, 0), src)
1076 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1077 && !side_effects_p (src);
1079 if (GET_CODE (dst) == STRICT_LOW_PART)
1080 dst = XEXP (dst, 0);
1082 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1084 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1085 return 0;
1086 src = SUBREG_REG (src);
1087 dst = SUBREG_REG (dst);
1090 return (REG_P (src) && REG_P (dst)
1091 && REGNO (src) == REGNO (dst));
1094 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1095 value to itself. */
1098 noop_move_p (rtx insn)
1100 rtx pat = PATTERN (insn);
1102 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1103 return 1;
1105 /* Insns carrying these notes are useful later on. */
1106 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1107 return 0;
1109 /* For now treat an insn with a REG_RETVAL note as a
1110 a special insn which should not be considered a no-op. */
1111 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1112 return 0;
1114 if (GET_CODE (pat) == SET && set_noop_p (pat))
1115 return 1;
1117 if (GET_CODE (pat) == PARALLEL)
1119 int i;
1120 /* If nothing but SETs of registers to themselves,
1121 this insn can also be deleted. */
1122 for (i = 0; i < XVECLEN (pat, 0); i++)
1124 rtx tem = XVECEXP (pat, 0, i);
1126 if (GET_CODE (tem) == USE
1127 || GET_CODE (tem) == CLOBBER)
1128 continue;
1130 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1131 return 0;
1134 return 1;
1136 return 0;
1140 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1141 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1142 If the object was modified, if we hit a partial assignment to X, or hit a
1143 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1144 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1145 be the src. */
1148 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1150 rtx p;
1152 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1153 p = PREV_INSN (p))
1154 if (INSN_P (p))
1156 rtx set = single_set (p);
1157 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1159 if (set && rtx_equal_p (x, SET_DEST (set)))
1161 rtx src = SET_SRC (set);
1163 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1164 src = XEXP (note, 0);
1166 if ((valid_to == NULL_RTX
1167 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1168 /* Reject hard registers because we don't usually want
1169 to use them; we'd rather use a pseudo. */
1170 && (! (REG_P (src)
1171 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1173 *pinsn = p;
1174 return src;
1178 /* If set in non-simple way, we don't have a value. */
1179 if (reg_set_p (x, p))
1180 break;
1183 return x;
1186 /* Return nonzero if register in range [REGNO, ENDREGNO)
1187 appears either explicitly or implicitly in X
1188 other than being stored into.
1190 References contained within the substructure at LOC do not count.
1191 LOC may be zero, meaning don't ignore anything. */
1194 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1195 rtx *loc)
1197 int i;
1198 unsigned int x_regno;
1199 RTX_CODE code;
1200 const char *fmt;
1202 repeat:
1203 /* The contents of a REG_NONNEG note is always zero, so we must come here
1204 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1205 if (x == 0)
1206 return 0;
1208 code = GET_CODE (x);
1210 switch (code)
1212 case REG:
1213 x_regno = REGNO (x);
1215 /* If we modifying the stack, frame, or argument pointer, it will
1216 clobber a virtual register. In fact, we could be more precise,
1217 but it isn't worth it. */
1218 if ((x_regno == STACK_POINTER_REGNUM
1219 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1220 || x_regno == ARG_POINTER_REGNUM
1221 #endif
1222 || x_regno == FRAME_POINTER_REGNUM)
1223 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1224 return 1;
1226 return (endregno > x_regno
1227 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1228 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1229 : 1));
1231 case SUBREG:
1232 /* If this is a SUBREG of a hard reg, we can see exactly which
1233 registers are being modified. Otherwise, handle normally. */
1234 if (REG_P (SUBREG_REG (x))
1235 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1237 unsigned int inner_regno = subreg_regno (x);
1238 unsigned int inner_endregno
1239 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1240 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1242 return endregno > inner_regno && regno < inner_endregno;
1244 break;
1246 case CLOBBER:
1247 case SET:
1248 if (&SET_DEST (x) != loc
1249 /* Note setting a SUBREG counts as referring to the REG it is in for
1250 a pseudo but not for hard registers since we can
1251 treat each word individually. */
1252 && ((GET_CODE (SET_DEST (x)) == SUBREG
1253 && loc != &SUBREG_REG (SET_DEST (x))
1254 && REG_P (SUBREG_REG (SET_DEST (x)))
1255 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1256 && refers_to_regno_p (regno, endregno,
1257 SUBREG_REG (SET_DEST (x)), loc))
1258 || (!REG_P (SET_DEST (x))
1259 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1260 return 1;
1262 if (code == CLOBBER || loc == &SET_SRC (x))
1263 return 0;
1264 x = SET_SRC (x);
1265 goto repeat;
1267 default:
1268 break;
1271 /* X does not match, so try its subexpressions. */
1273 fmt = GET_RTX_FORMAT (code);
1274 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1276 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1278 if (i == 0)
1280 x = XEXP (x, 0);
1281 goto repeat;
1283 else
1284 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1285 return 1;
1287 else if (fmt[i] == 'E')
1289 int j;
1290 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1291 if (loc != &XVECEXP (x, i, j)
1292 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1293 return 1;
1296 return 0;
1299 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1300 we check if any register number in X conflicts with the relevant register
1301 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1302 contains a MEM (we don't bother checking for memory addresses that can't
1303 conflict because we expect this to be a rare case. */
1306 reg_overlap_mentioned_p (rtx x, rtx in)
1308 unsigned int regno, endregno;
1310 /* If either argument is a constant, then modifying X can not
1311 affect IN. Here we look at IN, we can profitably combine
1312 CONSTANT_P (x) with the switch statement below. */
1313 if (CONSTANT_P (in))
1314 return 0;
1316 recurse:
1317 switch (GET_CODE (x))
1319 case STRICT_LOW_PART:
1320 case ZERO_EXTRACT:
1321 case SIGN_EXTRACT:
1322 /* Overly conservative. */
1323 x = XEXP (x, 0);
1324 goto recurse;
1326 case SUBREG:
1327 regno = REGNO (SUBREG_REG (x));
1328 if (regno < FIRST_PSEUDO_REGISTER)
1329 regno = subreg_regno (x);
1330 goto do_reg;
1332 case REG:
1333 regno = REGNO (x);
1334 do_reg:
1335 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1336 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1337 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1339 case MEM:
1341 const char *fmt;
1342 int i;
1344 if (MEM_P (in))
1345 return 1;
1347 fmt = GET_RTX_FORMAT (GET_CODE (in));
1348 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1349 if (fmt[i] == 'e')
1351 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1352 return 1;
1354 else if (fmt[i] == 'E')
1356 int j;
1357 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1358 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1359 return 1;
1362 return 0;
1365 case SCRATCH:
1366 case PC:
1367 case CC0:
1368 return reg_mentioned_p (x, in);
1370 case PARALLEL:
1372 int i;
1374 /* If any register in here refers to it we return true. */
1375 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1376 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1377 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1378 return 1;
1379 return 0;
1382 default:
1383 gcc_assert (CONSTANT_P (x));
1384 return 0;
1388 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1389 (X would be the pattern of an insn).
1390 FUN receives two arguments:
1391 the REG, MEM, CC0 or PC being stored in or clobbered,
1392 the SET or CLOBBER rtx that does the store.
1394 If the item being stored in or clobbered is a SUBREG of a hard register,
1395 the SUBREG will be passed. */
1397 void
1398 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1400 int i;
1402 if (GET_CODE (x) == COND_EXEC)
1403 x = COND_EXEC_CODE (x);
1405 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1407 rtx dest = SET_DEST (x);
1409 while ((GET_CODE (dest) == SUBREG
1410 && (!REG_P (SUBREG_REG (dest))
1411 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1412 || GET_CODE (dest) == ZERO_EXTRACT
1413 || GET_CODE (dest) == STRICT_LOW_PART)
1414 dest = XEXP (dest, 0);
1416 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1417 each of whose first operand is a register. */
1418 if (GET_CODE (dest) == PARALLEL)
1420 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1421 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1422 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1424 else
1425 (*fun) (dest, x, data);
1428 else if (GET_CODE (x) == PARALLEL)
1429 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1430 note_stores (XVECEXP (x, 0, i), fun, data);
1433 /* Like notes_stores, but call FUN for each expression that is being
1434 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1435 FUN for each expression, not any interior subexpressions. FUN receives a
1436 pointer to the expression and the DATA passed to this function.
1438 Note that this is not quite the same test as that done in reg_referenced_p
1439 since that considers something as being referenced if it is being
1440 partially set, while we do not. */
1442 void
1443 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1445 rtx body = *pbody;
1446 int i;
1448 switch (GET_CODE (body))
1450 case COND_EXEC:
1451 (*fun) (&COND_EXEC_TEST (body), data);
1452 note_uses (&COND_EXEC_CODE (body), fun, data);
1453 return;
1455 case PARALLEL:
1456 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1457 note_uses (&XVECEXP (body, 0, i), fun, data);
1458 return;
1460 case USE:
1461 (*fun) (&XEXP (body, 0), data);
1462 return;
1464 case ASM_OPERANDS:
1465 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1466 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1467 return;
1469 case TRAP_IF:
1470 (*fun) (&TRAP_CONDITION (body), data);
1471 return;
1473 case PREFETCH:
1474 (*fun) (&XEXP (body, 0), data);
1475 return;
1477 case UNSPEC:
1478 case UNSPEC_VOLATILE:
1479 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1480 (*fun) (&XVECEXP (body, 0, i), data);
1481 return;
1483 case CLOBBER:
1484 if (MEM_P (XEXP (body, 0)))
1485 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1486 return;
1488 case SET:
1490 rtx dest = SET_DEST (body);
1492 /* For sets we replace everything in source plus registers in memory
1493 expression in store and operands of a ZERO_EXTRACT. */
1494 (*fun) (&SET_SRC (body), data);
1496 if (GET_CODE (dest) == ZERO_EXTRACT)
1498 (*fun) (&XEXP (dest, 1), data);
1499 (*fun) (&XEXP (dest, 2), data);
1502 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1503 dest = XEXP (dest, 0);
1505 if (MEM_P (dest))
1506 (*fun) (&XEXP (dest, 0), data);
1508 return;
1510 default:
1511 /* All the other possibilities never store. */
1512 (*fun) (pbody, data);
1513 return;
1517 /* Return nonzero if X's old contents don't survive after INSN.
1518 This will be true if X is (cc0) or if X is a register and
1519 X dies in INSN or because INSN entirely sets X.
1521 "Entirely set" means set directly and not through a SUBREG, or
1522 ZERO_EXTRACT, so no trace of the old contents remains.
1523 Likewise, REG_INC does not count.
1525 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1526 but for this use that makes no difference, since regs don't overlap
1527 during their lifetimes. Therefore, this function may be used
1528 at any time after deaths have been computed (in flow.c).
1530 If REG is a hard reg that occupies multiple machine registers, this
1531 function will only return 1 if each of those registers will be replaced
1532 by INSN. */
1535 dead_or_set_p (rtx insn, rtx x)
1537 unsigned int regno, last_regno;
1538 unsigned int i;
1540 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1541 if (GET_CODE (x) == CC0)
1542 return 1;
1544 gcc_assert (REG_P (x));
1546 regno = REGNO (x);
1547 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1548 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1550 for (i = regno; i <= last_regno; i++)
1551 if (! dead_or_set_regno_p (insn, i))
1552 return 0;
1554 return 1;
1557 /* Return TRUE iff DEST is a register or subreg of a register and
1558 doesn't change the number of words of the inner register, and any
1559 part of the register is TEST_REGNO. */
1561 static bool
1562 covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1564 unsigned int regno, endregno;
1566 if (GET_CODE (dest) == SUBREG
1567 && (((GET_MODE_SIZE (GET_MODE (dest))
1568 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1569 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1570 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1571 dest = SUBREG_REG (dest);
1573 if (!REG_P (dest))
1574 return false;
1576 regno = REGNO (dest);
1577 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1578 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1579 return (test_regno >= regno && test_regno < endregno);
1582 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1583 any member matches the covers_regno_no_parallel_p criteria. */
1585 static bool
1586 covers_regno_p (rtx dest, unsigned int test_regno)
1588 if (GET_CODE (dest) == PARALLEL)
1590 /* Some targets place small structures in registers for return
1591 values of functions, and those registers are wrapped in
1592 PARALLELs that we may see as the destination of a SET. */
1593 int i;
1595 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1597 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1598 if (inner != NULL_RTX
1599 && covers_regno_no_parallel_p (inner, test_regno))
1600 return true;
1603 return false;
1605 else
1606 return covers_regno_no_parallel_p (dest, test_regno);
1609 /* Utility function for dead_or_set_p to check an individual register. Also
1610 called from flow.c. */
1613 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1615 rtx pattern;
1617 /* See if there is a death note for something that includes TEST_REGNO. */
1618 if (find_regno_note (insn, REG_DEAD, test_regno))
1619 return 1;
1621 if (CALL_P (insn)
1622 && find_regno_fusage (insn, CLOBBER, test_regno))
1623 return 1;
1625 pattern = PATTERN (insn);
1627 if (GET_CODE (pattern) == COND_EXEC)
1628 pattern = COND_EXEC_CODE (pattern);
1630 if (GET_CODE (pattern) == SET)
1631 return covers_regno_p (SET_DEST (pattern), test_regno);
1632 else if (GET_CODE (pattern) == PARALLEL)
1634 int i;
1636 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1638 rtx body = XVECEXP (pattern, 0, i);
1640 if (GET_CODE (body) == COND_EXEC)
1641 body = COND_EXEC_CODE (body);
1643 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1644 && covers_regno_p (SET_DEST (body), test_regno))
1645 return 1;
1649 return 0;
1652 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1653 If DATUM is nonzero, look for one whose datum is DATUM. */
1656 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1658 rtx link;
1660 gcc_assert (insn);
1662 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1663 if (! INSN_P (insn))
1664 return 0;
1665 if (datum == 0)
1667 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1668 if (REG_NOTE_KIND (link) == kind)
1669 return link;
1670 return 0;
1673 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1674 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1675 return link;
1676 return 0;
1679 /* Return the reg-note of kind KIND in insn INSN which applies to register
1680 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1681 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1682 it might be the case that the note overlaps REGNO. */
1685 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1687 rtx link;
1689 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1690 if (! INSN_P (insn))
1691 return 0;
1693 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1694 if (REG_NOTE_KIND (link) == kind
1695 /* Verify that it is a register, so that scratch and MEM won't cause a
1696 problem here. */
1697 && REG_P (XEXP (link, 0))
1698 && REGNO (XEXP (link, 0)) <= regno
1699 && ((REGNO (XEXP (link, 0))
1700 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1701 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1702 [GET_MODE (XEXP (link, 0))]))
1703 > regno))
1704 return link;
1705 return 0;
1708 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1709 has such a note. */
1712 find_reg_equal_equiv_note (rtx insn)
1714 rtx link;
1716 if (!INSN_P (insn))
1717 return 0;
1718 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1719 if (REG_NOTE_KIND (link) == REG_EQUAL
1720 || REG_NOTE_KIND (link) == REG_EQUIV)
1722 if (single_set (insn) == 0)
1723 return 0;
1724 return link;
1726 return NULL;
1729 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1730 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1733 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1735 /* If it's not a CALL_INSN, it can't possibly have a
1736 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1737 if (!CALL_P (insn))
1738 return 0;
1740 gcc_assert (datum);
1742 if (!REG_P (datum))
1744 rtx link;
1746 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1747 link;
1748 link = XEXP (link, 1))
1749 if (GET_CODE (XEXP (link, 0)) == code
1750 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1751 return 1;
1753 else
1755 unsigned int regno = REGNO (datum);
1757 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1758 to pseudo registers, so don't bother checking. */
1760 if (regno < FIRST_PSEUDO_REGISTER)
1762 unsigned int end_regno
1763 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1764 unsigned int i;
1766 for (i = regno; i < end_regno; i++)
1767 if (find_regno_fusage (insn, code, i))
1768 return 1;
1772 return 0;
1775 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1776 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1779 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1781 rtx link;
1783 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1784 to pseudo registers, so don't bother checking. */
1786 if (regno >= FIRST_PSEUDO_REGISTER
1787 || !CALL_P (insn) )
1788 return 0;
1790 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1792 unsigned int regnote;
1793 rtx op, reg;
1795 if (GET_CODE (op = XEXP (link, 0)) == code
1796 && REG_P (reg = XEXP (op, 0))
1797 && (regnote = REGNO (reg)) <= regno
1798 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1799 return 1;
1802 return 0;
1805 /* Return true if INSN is a call to a pure function. */
1808 pure_call_p (rtx insn)
1810 rtx link;
1812 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1813 return 0;
1815 /* Look for the note that differentiates const and pure functions. */
1816 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1818 rtx u, m;
1820 if (GET_CODE (u = XEXP (link, 0)) == USE
1821 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1822 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1823 return 1;
1826 return 0;
1829 /* Remove register note NOTE from the REG_NOTES of INSN. */
1831 void
1832 remove_note (rtx insn, rtx note)
1834 rtx link;
1836 if (note == NULL_RTX)
1837 return;
1839 if (REG_NOTES (insn) == note)
1841 REG_NOTES (insn) = XEXP (note, 1);
1842 return;
1845 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1846 if (XEXP (link, 1) == note)
1848 XEXP (link, 1) = XEXP (note, 1);
1849 return;
1852 gcc_unreachable ();
1855 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1856 return 1 if it is found. A simple equality test is used to determine if
1857 NODE matches. */
1860 in_expr_list_p (rtx listp, rtx node)
1862 rtx x;
1864 for (x = listp; x; x = XEXP (x, 1))
1865 if (node == XEXP (x, 0))
1866 return 1;
1868 return 0;
1871 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1872 remove that entry from the list if it is found.
1874 A simple equality test is used to determine if NODE matches. */
1876 void
1877 remove_node_from_expr_list (rtx node, rtx *listp)
1879 rtx temp = *listp;
1880 rtx prev = NULL_RTX;
1882 while (temp)
1884 if (node == XEXP (temp, 0))
1886 /* Splice the node out of the list. */
1887 if (prev)
1888 XEXP (prev, 1) = XEXP (temp, 1);
1889 else
1890 *listp = XEXP (temp, 1);
1892 return;
1895 prev = temp;
1896 temp = XEXP (temp, 1);
1900 /* Nonzero if X contains any volatile instructions. These are instructions
1901 which may cause unpredictable machine state instructions, and thus no
1902 instructions should be moved or combined across them. This includes
1903 only volatile asms and UNSPEC_VOLATILE instructions. */
1906 volatile_insn_p (rtx x)
1908 RTX_CODE code;
1910 code = GET_CODE (x);
1911 switch (code)
1913 case LABEL_REF:
1914 case SYMBOL_REF:
1915 case CONST_INT:
1916 case CONST:
1917 case CONST_DOUBLE:
1918 case CONST_VECTOR:
1919 case CC0:
1920 case PC:
1921 case REG:
1922 case SCRATCH:
1923 case CLOBBER:
1924 case ADDR_VEC:
1925 case ADDR_DIFF_VEC:
1926 case CALL:
1927 case MEM:
1928 return 0;
1930 case UNSPEC_VOLATILE:
1931 /* case TRAP_IF: This isn't clear yet. */
1932 return 1;
1934 case ASM_INPUT:
1935 case ASM_OPERANDS:
1936 if (MEM_VOLATILE_P (x))
1937 return 1;
1939 default:
1940 break;
1943 /* Recursively scan the operands of this expression. */
1946 const char *fmt = GET_RTX_FORMAT (code);
1947 int i;
1949 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1951 if (fmt[i] == 'e')
1953 if (volatile_insn_p (XEXP (x, i)))
1954 return 1;
1956 else if (fmt[i] == 'E')
1958 int j;
1959 for (j = 0; j < XVECLEN (x, i); j++)
1960 if (volatile_insn_p (XVECEXP (x, i, j)))
1961 return 1;
1965 return 0;
1968 /* Nonzero if X contains any volatile memory references
1969 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1972 volatile_refs_p (rtx x)
1974 RTX_CODE code;
1976 code = GET_CODE (x);
1977 switch (code)
1979 case LABEL_REF:
1980 case SYMBOL_REF:
1981 case CONST_INT:
1982 case CONST:
1983 case CONST_DOUBLE:
1984 case CONST_VECTOR:
1985 case CC0:
1986 case PC:
1987 case REG:
1988 case SCRATCH:
1989 case CLOBBER:
1990 case ADDR_VEC:
1991 case ADDR_DIFF_VEC:
1992 return 0;
1994 case UNSPEC_VOLATILE:
1995 return 1;
1997 case MEM:
1998 case ASM_INPUT:
1999 case ASM_OPERANDS:
2000 if (MEM_VOLATILE_P (x))
2001 return 1;
2003 default:
2004 break;
2007 /* Recursively scan the operands of this expression. */
2010 const char *fmt = GET_RTX_FORMAT (code);
2011 int i;
2013 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2015 if (fmt[i] == 'e')
2017 if (volatile_refs_p (XEXP (x, i)))
2018 return 1;
2020 else if (fmt[i] == 'E')
2022 int j;
2023 for (j = 0; j < XVECLEN (x, i); j++)
2024 if (volatile_refs_p (XVECEXP (x, i, j)))
2025 return 1;
2029 return 0;
2032 /* Similar to above, except that it also rejects register pre- and post-
2033 incrementing. */
2036 side_effects_p (rtx x)
2038 RTX_CODE code;
2040 code = GET_CODE (x);
2041 switch (code)
2043 case LABEL_REF:
2044 case SYMBOL_REF:
2045 case CONST_INT:
2046 case CONST:
2047 case CONST_DOUBLE:
2048 case CONST_VECTOR:
2049 case CC0:
2050 case PC:
2051 case REG:
2052 case SCRATCH:
2053 case ADDR_VEC:
2054 case ADDR_DIFF_VEC:
2055 return 0;
2057 case CLOBBER:
2058 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2059 when some combination can't be done. If we see one, don't think
2060 that we can simplify the expression. */
2061 return (GET_MODE (x) != VOIDmode);
2063 case PRE_INC:
2064 case PRE_DEC:
2065 case POST_INC:
2066 case POST_DEC:
2067 case PRE_MODIFY:
2068 case POST_MODIFY:
2069 case CALL:
2070 case UNSPEC_VOLATILE:
2071 /* case TRAP_IF: This isn't clear yet. */
2072 return 1;
2074 case MEM:
2075 case ASM_INPUT:
2076 case ASM_OPERANDS:
2077 if (MEM_VOLATILE_P (x))
2078 return 1;
2080 default:
2081 break;
2084 /* Recursively scan the operands of this expression. */
2087 const char *fmt = GET_RTX_FORMAT (code);
2088 int i;
2090 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2092 if (fmt[i] == 'e')
2094 if (side_effects_p (XEXP (x, i)))
2095 return 1;
2097 else if (fmt[i] == 'E')
2099 int j;
2100 for (j = 0; j < XVECLEN (x, i); j++)
2101 if (side_effects_p (XVECEXP (x, i, j)))
2102 return 1;
2106 return 0;
2109 /* Return nonzero if evaluating rtx X might cause a trap. UNALIGNED_MEMS
2110 controls whether nonzero is returned for unaligned memory accesses on
2111 strict alignment machines. */
2113 static int
2114 may_trap_p_1 (rtx x, bool unaligned_mems)
2116 int i;
2117 enum rtx_code code;
2118 const char *fmt;
2120 if (x == 0)
2121 return 0;
2122 code = GET_CODE (x);
2123 switch (code)
2125 /* Handle these cases quickly. */
2126 case CONST_INT:
2127 case CONST_DOUBLE:
2128 case CONST_VECTOR:
2129 case SYMBOL_REF:
2130 case LABEL_REF:
2131 case CONST:
2132 case PC:
2133 case CC0:
2134 case REG:
2135 case SCRATCH:
2136 return 0;
2138 case ASM_INPUT:
2139 case UNSPEC_VOLATILE:
2140 case TRAP_IF:
2141 return 1;
2143 case ASM_OPERANDS:
2144 return MEM_VOLATILE_P (x);
2146 /* Memory ref can trap unless it's a static var or a stack slot. */
2147 case MEM:
2148 if (MEM_NOTRAP_P (x)
2149 && (!STRICT_ALIGNMENT || !unaligned_mems))
2150 return 0;
2151 return
2152 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2154 /* Division by a non-constant might trap. */
2155 case DIV:
2156 case MOD:
2157 case UDIV:
2158 case UMOD:
2159 if (HONOR_SNANS (GET_MODE (x)))
2160 return 1;
2161 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2162 return flag_trapping_math;
2163 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2164 return 1;
2165 break;
2167 case EXPR_LIST:
2168 /* An EXPR_LIST is used to represent a function call. This
2169 certainly may trap. */
2170 return 1;
2172 case GE:
2173 case GT:
2174 case LE:
2175 case LT:
2176 case LTGT:
2177 case COMPARE:
2178 /* Some floating point comparisons may trap. */
2179 if (!flag_trapping_math)
2180 break;
2181 /* ??? There is no machine independent way to check for tests that trap
2182 when COMPARE is used, though many targets do make this distinction.
2183 For instance, sparc uses CCFPE for compares which generate exceptions
2184 and CCFP for compares which do not generate exceptions. */
2185 if (HONOR_NANS (GET_MODE (x)))
2186 return 1;
2187 /* But often the compare has some CC mode, so check operand
2188 modes as well. */
2189 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2190 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2191 return 1;
2192 break;
2194 case EQ:
2195 case NE:
2196 if (HONOR_SNANS (GET_MODE (x)))
2197 return 1;
2198 /* Often comparison is CC mode, so check operand modes. */
2199 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2200 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2201 return 1;
2202 break;
2204 case FIX:
2205 /* Conversion of floating point might trap. */
2206 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2207 return 1;
2208 break;
2210 case NEG:
2211 case ABS:
2212 case SUBREG:
2213 /* These operations don't trap even with floating point. */
2214 break;
2216 default:
2217 /* Any floating arithmetic may trap. */
2218 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2219 && flag_trapping_math)
2220 return 1;
2223 fmt = GET_RTX_FORMAT (code);
2224 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2226 if (fmt[i] == 'e')
2228 if (may_trap_p_1 (XEXP (x, i), unaligned_mems))
2229 return 1;
2231 else if (fmt[i] == 'E')
2233 int j;
2234 for (j = 0; j < XVECLEN (x, i); j++)
2235 if (may_trap_p_1 (XVECEXP (x, i, j), unaligned_mems))
2236 return 1;
2239 return 0;
2242 /* Return nonzero if evaluating rtx X might cause a trap. */
2245 may_trap_p (rtx x)
2247 return may_trap_p_1 (x, false);
2250 /* Same as above, but additionally return non-zero if evaluating rtx X might
2251 cause a fault. We define a fault for the purpose of this function as a
2252 erroneous execution condition that cannot be encountered during the normal
2253 execution of a valid program; the typical example is an unaligned memory
2254 access on a strict alignment machine. The compiler guarantees that it
2255 doesn't generate code that will fault from a valid program, but this
2256 guarantee doesn't mean anything for individual instructions. Consider
2257 the following example:
2259 struct S { int d; union { char *cp; int *ip; }; };
2261 int foo(struct S *s)
2263 if (s->d == 1)
2264 return *s->ip;
2265 else
2266 return *s->cp;
2269 on a strict alignment machine. In a valid program, foo will never be
2270 invoked on a structure for which d is equal to 1 and the underlying
2271 unique field of the union not aligned on a 4-byte boundary, but the
2272 expression *s->ip might cause a fault if considered individually.
2274 At the RTL level, potentially problematic expressions will almost always
2275 verify may_trap_p; for example, the above dereference can be emitted as
2276 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2277 However, suppose that foo is inlined in a caller that causes s->cp to
2278 point to a local character variable and guarantees that s->d is not set
2279 to 1; foo may have been effectively translated into pseudo-RTL as:
2281 if ((reg:SI) == 1)
2282 (set (reg:SI) (mem:SI (%fp - 7)))
2283 else
2284 (set (reg:QI) (mem:QI (%fp - 7)))
2286 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2287 memory reference to a stack slot, but it will certainly cause a fault
2288 on a strict alignment machine. */
2291 may_trap_or_fault_p (rtx x)
2293 return may_trap_p_1 (x, true);
2296 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2297 i.e., an inequality. */
2300 inequality_comparisons_p (rtx x)
2302 const char *fmt;
2303 int len, i;
2304 enum rtx_code code = GET_CODE (x);
2306 switch (code)
2308 case REG:
2309 case SCRATCH:
2310 case PC:
2311 case CC0:
2312 case CONST_INT:
2313 case CONST_DOUBLE:
2314 case CONST_VECTOR:
2315 case CONST:
2316 case LABEL_REF:
2317 case SYMBOL_REF:
2318 return 0;
2320 case LT:
2321 case LTU:
2322 case GT:
2323 case GTU:
2324 case LE:
2325 case LEU:
2326 case GE:
2327 case GEU:
2328 return 1;
2330 default:
2331 break;
2334 len = GET_RTX_LENGTH (code);
2335 fmt = GET_RTX_FORMAT (code);
2337 for (i = 0; i < len; i++)
2339 if (fmt[i] == 'e')
2341 if (inequality_comparisons_p (XEXP (x, i)))
2342 return 1;
2344 else if (fmt[i] == 'E')
2346 int j;
2347 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2348 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2349 return 1;
2353 return 0;
2356 /* Replace any occurrence of FROM in X with TO. The function does
2357 not enter into CONST_DOUBLE for the replace.
2359 Note that copying is not done so X must not be shared unless all copies
2360 are to be modified. */
2363 replace_rtx (rtx x, rtx from, rtx to)
2365 int i, j;
2366 const char *fmt;
2368 /* The following prevents loops occurrence when we change MEM in
2369 CONST_DOUBLE onto the same CONST_DOUBLE. */
2370 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2371 return x;
2373 if (x == from)
2374 return to;
2376 /* Allow this function to make replacements in EXPR_LISTs. */
2377 if (x == 0)
2378 return 0;
2380 if (GET_CODE (x) == SUBREG)
2382 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2384 if (GET_CODE (new) == CONST_INT)
2386 x = simplify_subreg (GET_MODE (x), new,
2387 GET_MODE (SUBREG_REG (x)),
2388 SUBREG_BYTE (x));
2389 gcc_assert (x);
2391 else
2392 SUBREG_REG (x) = new;
2394 return x;
2396 else if (GET_CODE (x) == ZERO_EXTEND)
2398 rtx new = replace_rtx (XEXP (x, 0), from, to);
2400 if (GET_CODE (new) == CONST_INT)
2402 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2403 new, GET_MODE (XEXP (x, 0)));
2404 gcc_assert (x);
2406 else
2407 XEXP (x, 0) = new;
2409 return x;
2412 fmt = GET_RTX_FORMAT (GET_CODE (x));
2413 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2415 if (fmt[i] == 'e')
2416 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2417 else if (fmt[i] == 'E')
2418 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2419 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2422 return x;
2425 /* Throughout the rtx X, replace many registers according to REG_MAP.
2426 Return the replacement for X (which may be X with altered contents).
2427 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2428 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2430 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2431 should not be mapped to pseudos or vice versa since validate_change
2432 is not called.
2434 If REPLACE_DEST is 1, replacements are also done in destinations;
2435 otherwise, only sources are replaced. */
2438 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2440 enum rtx_code code;
2441 int i;
2442 const char *fmt;
2444 if (x == 0)
2445 return x;
2447 code = GET_CODE (x);
2448 switch (code)
2450 case SCRATCH:
2451 case PC:
2452 case CC0:
2453 case CONST_INT:
2454 case CONST_DOUBLE:
2455 case CONST_VECTOR:
2456 case CONST:
2457 case SYMBOL_REF:
2458 case LABEL_REF:
2459 return x;
2461 case REG:
2462 /* Verify that the register has an entry before trying to access it. */
2463 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2465 /* SUBREGs can't be shared. Always return a copy to ensure that if
2466 this replacement occurs more than once then each instance will
2467 get distinct rtx. */
2468 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2469 return copy_rtx (reg_map[REGNO (x)]);
2470 return reg_map[REGNO (x)];
2472 return x;
2474 case SUBREG:
2475 /* Prevent making nested SUBREGs. */
2476 if (REG_P (SUBREG_REG (x)) && REGNO (SUBREG_REG (x)) < nregs
2477 && reg_map[REGNO (SUBREG_REG (x))] != 0
2478 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2480 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2481 return simplify_gen_subreg (GET_MODE (x), map_val,
2482 GET_MODE (SUBREG_REG (x)),
2483 SUBREG_BYTE (x));
2485 break;
2487 case SET:
2488 if (replace_dest)
2489 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2491 else if (MEM_P (SET_DEST (x))
2492 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2493 /* Even if we are not to replace destinations, replace register if it
2494 is CONTAINED in destination (destination is memory or
2495 STRICT_LOW_PART). */
2496 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2497 reg_map, nregs, 0);
2498 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2499 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2500 break;
2502 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2503 return x;
2505 default:
2506 break;
2509 fmt = GET_RTX_FORMAT (code);
2510 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2512 if (fmt[i] == 'e')
2513 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2514 else if (fmt[i] == 'E')
2516 int j;
2517 for (j = 0; j < XVECLEN (x, i); j++)
2518 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2519 nregs, replace_dest);
2522 return x;
2525 /* Replace occurrences of the old label in *X with the new one.
2526 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2529 replace_label (rtx *x, void *data)
2531 rtx l = *x;
2532 rtx old_label = ((replace_label_data *) data)->r1;
2533 rtx new_label = ((replace_label_data *) data)->r2;
2534 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2536 if (l == NULL_RTX)
2537 return 0;
2539 if (GET_CODE (l) == SYMBOL_REF
2540 && CONSTANT_POOL_ADDRESS_P (l))
2542 rtx c = get_pool_constant (l);
2543 if (rtx_referenced_p (old_label, c))
2545 rtx new_c, new_l;
2546 replace_label_data *d = (replace_label_data *) data;
2548 /* Create a copy of constant C; replace the label inside
2549 but do not update LABEL_NUSES because uses in constant pool
2550 are not counted. */
2551 new_c = copy_rtx (c);
2552 d->update_label_nuses = false;
2553 for_each_rtx (&new_c, replace_label, data);
2554 d->update_label_nuses = update_label_nuses;
2556 /* Add the new constant NEW_C to constant pool and replace
2557 the old reference to constant by new reference. */
2558 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2559 *x = replace_rtx (l, l, new_l);
2561 return 0;
2564 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2565 field. This is not handled by for_each_rtx because it doesn't
2566 handle unprinted ('0') fields. */
2567 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2568 JUMP_LABEL (l) = new_label;
2570 if ((GET_CODE (l) == LABEL_REF
2571 || GET_CODE (l) == INSN_LIST)
2572 && XEXP (l, 0) == old_label)
2574 XEXP (l, 0) = new_label;
2575 if (update_label_nuses)
2577 ++LABEL_NUSES (new_label);
2578 --LABEL_NUSES (old_label);
2580 return 0;
2583 return 0;
2586 /* When *BODY is equal to X or X is directly referenced by *BODY
2587 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2588 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2590 static int
2591 rtx_referenced_p_1 (rtx *body, void *x)
2593 rtx y = (rtx) x;
2595 if (*body == NULL_RTX)
2596 return y == NULL_RTX;
2598 /* Return true if a label_ref *BODY refers to label Y. */
2599 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2600 return XEXP (*body, 0) == y;
2602 /* If *BODY is a reference to pool constant traverse the constant. */
2603 if (GET_CODE (*body) == SYMBOL_REF
2604 && CONSTANT_POOL_ADDRESS_P (*body))
2605 return rtx_referenced_p (y, get_pool_constant (*body));
2607 /* By default, compare the RTL expressions. */
2608 return rtx_equal_p (*body, y);
2611 /* Return true if X is referenced in BODY. */
2614 rtx_referenced_p (rtx x, rtx body)
2616 return for_each_rtx (&body, rtx_referenced_p_1, x);
2619 /* If INSN is a tablejump return true and store the label (before jump table) to
2620 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2622 bool
2623 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2625 rtx label, table;
2627 if (JUMP_P (insn)
2628 && (label = JUMP_LABEL (insn)) != NULL_RTX
2629 && (table = next_active_insn (label)) != NULL_RTX
2630 && JUMP_P (table)
2631 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2632 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2634 if (labelp)
2635 *labelp = label;
2636 if (tablep)
2637 *tablep = table;
2638 return true;
2640 return false;
2643 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2644 constant that is not in the constant pool and not in the condition
2645 of an IF_THEN_ELSE. */
2647 static int
2648 computed_jump_p_1 (rtx x)
2650 enum rtx_code code = GET_CODE (x);
2651 int i, j;
2652 const char *fmt;
2654 switch (code)
2656 case LABEL_REF:
2657 case PC:
2658 return 0;
2660 case CONST:
2661 case CONST_INT:
2662 case CONST_DOUBLE:
2663 case CONST_VECTOR:
2664 case SYMBOL_REF:
2665 case REG:
2666 return 1;
2668 case MEM:
2669 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2670 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2672 case IF_THEN_ELSE:
2673 return (computed_jump_p_1 (XEXP (x, 1))
2674 || computed_jump_p_1 (XEXP (x, 2)));
2676 default:
2677 break;
2680 fmt = GET_RTX_FORMAT (code);
2681 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2683 if (fmt[i] == 'e'
2684 && computed_jump_p_1 (XEXP (x, i)))
2685 return 1;
2687 else if (fmt[i] == 'E')
2688 for (j = 0; j < XVECLEN (x, i); j++)
2689 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2690 return 1;
2693 return 0;
2696 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2698 Tablejumps and casesi insns are not considered indirect jumps;
2699 we can recognize them by a (use (label_ref)). */
2702 computed_jump_p (rtx insn)
2704 int i;
2705 if (JUMP_P (insn))
2707 rtx pat = PATTERN (insn);
2709 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2710 return 0;
2711 else if (GET_CODE (pat) == PARALLEL)
2713 int len = XVECLEN (pat, 0);
2714 int has_use_labelref = 0;
2716 for (i = len - 1; i >= 0; i--)
2717 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2718 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2719 == LABEL_REF))
2720 has_use_labelref = 1;
2722 if (! has_use_labelref)
2723 for (i = len - 1; i >= 0; i--)
2724 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2725 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2726 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2727 return 1;
2729 else if (GET_CODE (pat) == SET
2730 && SET_DEST (pat) == pc_rtx
2731 && computed_jump_p_1 (SET_SRC (pat)))
2732 return 1;
2734 return 0;
2737 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2738 calls. Processes the subexpressions of EXP and passes them to F. */
2739 static int
2740 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2742 int result, i, j;
2743 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2744 rtx *x;
2746 for (; format[n] != '\0'; n++)
2748 switch (format[n])
2750 case 'e':
2751 /* Call F on X. */
2752 x = &XEXP (exp, n);
2753 result = (*f) (x, data);
2754 if (result == -1)
2755 /* Do not traverse sub-expressions. */
2756 continue;
2757 else if (result != 0)
2758 /* Stop the traversal. */
2759 return result;
2761 if (*x == NULL_RTX)
2762 /* There are no sub-expressions. */
2763 continue;
2765 i = non_rtx_starting_operands[GET_CODE (*x)];
2766 if (i >= 0)
2768 result = for_each_rtx_1 (*x, i, f, data);
2769 if (result != 0)
2770 return result;
2772 break;
2774 case 'V':
2775 case 'E':
2776 if (XVEC (exp, n) == 0)
2777 continue;
2778 for (j = 0; j < XVECLEN (exp, n); ++j)
2780 /* Call F on X. */
2781 x = &XVECEXP (exp, n, j);
2782 result = (*f) (x, data);
2783 if (result == -1)
2784 /* Do not traverse sub-expressions. */
2785 continue;
2786 else if (result != 0)
2787 /* Stop the traversal. */
2788 return result;
2790 if (*x == NULL_RTX)
2791 /* There are no sub-expressions. */
2792 continue;
2794 i = non_rtx_starting_operands[GET_CODE (*x)];
2795 if (i >= 0)
2797 result = for_each_rtx_1 (*x, i, f, data);
2798 if (result != 0)
2799 return result;
2802 break;
2804 default:
2805 /* Nothing to do. */
2806 break;
2810 return 0;
2813 /* Traverse X via depth-first search, calling F for each
2814 sub-expression (including X itself). F is also passed the DATA.
2815 If F returns -1, do not traverse sub-expressions, but continue
2816 traversing the rest of the tree. If F ever returns any other
2817 nonzero value, stop the traversal, and return the value returned
2818 by F. Otherwise, return 0. This function does not traverse inside
2819 tree structure that contains RTX_EXPRs, or into sub-expressions
2820 whose format code is `0' since it is not known whether or not those
2821 codes are actually RTL.
2823 This routine is very general, and could (should?) be used to
2824 implement many of the other routines in this file. */
2827 for_each_rtx (rtx *x, rtx_function f, void *data)
2829 int result;
2830 int i;
2832 /* Call F on X. */
2833 result = (*f) (x, data);
2834 if (result == -1)
2835 /* Do not traverse sub-expressions. */
2836 return 0;
2837 else if (result != 0)
2838 /* Stop the traversal. */
2839 return result;
2841 if (*x == NULL_RTX)
2842 /* There are no sub-expressions. */
2843 return 0;
2845 i = non_rtx_starting_operands[GET_CODE (*x)];
2846 if (i < 0)
2847 return 0;
2849 return for_each_rtx_1 (*x, i, f, data);
2853 /* Searches X for any reference to REGNO, returning the rtx of the
2854 reference found if any. Otherwise, returns NULL_RTX. */
2857 regno_use_in (unsigned int regno, rtx x)
2859 const char *fmt;
2860 int i, j;
2861 rtx tem;
2863 if (REG_P (x) && REGNO (x) == regno)
2864 return x;
2866 fmt = GET_RTX_FORMAT (GET_CODE (x));
2867 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2869 if (fmt[i] == 'e')
2871 if ((tem = regno_use_in (regno, XEXP (x, i))))
2872 return tem;
2874 else if (fmt[i] == 'E')
2875 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2876 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2877 return tem;
2880 return NULL_RTX;
2883 /* Return a value indicating whether OP, an operand of a commutative
2884 operation, is preferred as the first or second operand. The higher
2885 the value, the stronger the preference for being the first operand.
2886 We use negative values to indicate a preference for the first operand
2887 and positive values for the second operand. */
2890 commutative_operand_precedence (rtx op)
2892 enum rtx_code code = GET_CODE (op);
2894 /* Constants always come the second operand. Prefer "nice" constants. */
2895 if (code == CONST_INT)
2896 return -7;
2897 if (code == CONST_DOUBLE)
2898 return -6;
2899 op = avoid_constant_pool_reference (op);
2900 code = GET_CODE (op);
2902 switch (GET_RTX_CLASS (code))
2904 case RTX_CONST_OBJ:
2905 if (code == CONST_INT)
2906 return -5;
2907 if (code == CONST_DOUBLE)
2908 return -4;
2909 return -3;
2911 case RTX_EXTRA:
2912 /* SUBREGs of objects should come second. */
2913 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2914 return -2;
2916 if (!CONSTANT_P (op))
2917 return 0;
2918 else
2919 /* As for RTX_CONST_OBJ. */
2920 return -3;
2922 case RTX_OBJ:
2923 /* Complex expressions should be the first, so decrease priority
2924 of objects. */
2925 return -1;
2927 case RTX_COMM_ARITH:
2928 /* Prefer operands that are themselves commutative to be first.
2929 This helps to make things linear. In particular,
2930 (and (and (reg) (reg)) (not (reg))) is canonical. */
2931 return 4;
2933 case RTX_BIN_ARITH:
2934 /* If only one operand is a binary expression, it will be the first
2935 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2936 is canonical, although it will usually be further simplified. */
2937 return 2;
2939 case RTX_UNARY:
2940 /* Then prefer NEG and NOT. */
2941 if (code == NEG || code == NOT)
2942 return 1;
2944 default:
2945 return 0;
2949 /* Return 1 iff it is necessary to swap operands of commutative operation
2950 in order to canonicalize expression. */
2953 swap_commutative_operands_p (rtx x, rtx y)
2955 return (commutative_operand_precedence (x)
2956 < commutative_operand_precedence (y));
2959 /* Return 1 if X is an autoincrement side effect and the register is
2960 not the stack pointer. */
2962 auto_inc_p (rtx x)
2964 switch (GET_CODE (x))
2966 case PRE_INC:
2967 case POST_INC:
2968 case PRE_DEC:
2969 case POST_DEC:
2970 case PRE_MODIFY:
2971 case POST_MODIFY:
2972 /* There are no REG_INC notes for SP. */
2973 if (XEXP (x, 0) != stack_pointer_rtx)
2974 return 1;
2975 default:
2976 break;
2978 return 0;
2981 /* Return 1 if the sequence of instructions beginning with FROM and up
2982 to and including TO is safe to move. If NEW_TO is non-NULL, and
2983 the sequence is not already safe to move, but can be easily
2984 extended to a sequence which is safe, then NEW_TO will point to the
2985 end of the extended sequence.
2987 For now, this function only checks that the region contains whole
2988 exception regions, but it could be extended to check additional
2989 conditions as well. */
2992 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
2994 int eh_region_count = 0;
2995 int past_to_p = 0;
2996 rtx r = from;
2998 /* By default, assume the end of the region will be what was
2999 suggested. */
3000 if (new_to)
3001 *new_to = to;
3003 while (r)
3005 if (NOTE_P (r))
3007 switch (NOTE_LINE_NUMBER (r))
3009 case NOTE_INSN_EH_REGION_BEG:
3010 ++eh_region_count;
3011 break;
3013 case NOTE_INSN_EH_REGION_END:
3014 if (eh_region_count == 0)
3015 /* This sequence of instructions contains the end of
3016 an exception region, but not he beginning. Moving
3017 it will cause chaos. */
3018 return 0;
3020 --eh_region_count;
3021 break;
3023 default:
3024 break;
3027 else if (past_to_p)
3028 /* If we've passed TO, and we see a non-note instruction, we
3029 can't extend the sequence to a movable sequence. */
3030 return 0;
3032 if (r == to)
3034 if (!new_to)
3035 /* It's OK to move the sequence if there were matched sets of
3036 exception region notes. */
3037 return eh_region_count == 0;
3039 past_to_p = 1;
3042 /* It's OK to move the sequence if there were matched sets of
3043 exception region notes. */
3044 if (past_to_p && eh_region_count == 0)
3046 *new_to = r;
3047 return 1;
3050 /* Go to the next instruction. */
3051 r = NEXT_INSN (r);
3054 return 0;
3057 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3059 loc_mentioned_in_p (rtx *loc, rtx in)
3061 enum rtx_code code = GET_CODE (in);
3062 const char *fmt = GET_RTX_FORMAT (code);
3063 int i, j;
3065 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3067 if (loc == &in->u.fld[i].rt_rtx)
3068 return 1;
3069 if (fmt[i] == 'e')
3071 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3072 return 1;
3074 else if (fmt[i] == 'E')
3075 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3076 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3077 return 1;
3079 return 0;
3082 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3083 and SUBREG_BYTE, return the bit offset where the subreg begins
3084 (counting from the least significant bit of the operand). */
3086 unsigned int
3087 subreg_lsb_1 (enum machine_mode outer_mode,
3088 enum machine_mode inner_mode,
3089 unsigned int subreg_byte)
3091 unsigned int bitpos;
3092 unsigned int byte;
3093 unsigned int word;
3095 /* A paradoxical subreg begins at bit position 0. */
3096 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3097 return 0;
3099 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3100 /* If the subreg crosses a word boundary ensure that
3101 it also begins and ends on a word boundary. */
3102 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3103 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3104 && (subreg_byte % UNITS_PER_WORD
3105 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3107 if (WORDS_BIG_ENDIAN)
3108 word = (GET_MODE_SIZE (inner_mode)
3109 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3110 else
3111 word = subreg_byte / UNITS_PER_WORD;
3112 bitpos = word * BITS_PER_WORD;
3114 if (BYTES_BIG_ENDIAN)
3115 byte = (GET_MODE_SIZE (inner_mode)
3116 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3117 else
3118 byte = subreg_byte % UNITS_PER_WORD;
3119 bitpos += byte * BITS_PER_UNIT;
3121 return bitpos;
3124 /* Given a subreg X, return the bit offset where the subreg begins
3125 (counting from the least significant bit of the reg). */
3127 unsigned int
3128 subreg_lsb (rtx x)
3130 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3131 SUBREG_BYTE (x));
3134 /* This function returns the regno offset of a subreg expression.
3135 xregno - A regno of an inner hard subreg_reg (or what will become one).
3136 xmode - The mode of xregno.
3137 offset - The byte offset.
3138 ymode - The mode of a top level SUBREG (or what may become one).
3139 RETURN - The regno offset which would be used. */
3140 unsigned int
3141 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3142 unsigned int offset, enum machine_mode ymode)
3144 int nregs_xmode, nregs_ymode, nregs_xmode_unit_int;
3145 int mode_multiple, nregs_multiple;
3146 int y_offset;
3147 enum machine_mode xmode_unit, xmode_unit_int;
3149 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3151 if (GET_MODE_INNER (xmode) == VOIDmode)
3152 xmode_unit = xmode;
3153 else
3154 xmode_unit = GET_MODE_INNER (xmode);
3156 if (FLOAT_MODE_P (xmode_unit))
3158 xmode_unit_int = int_mode_for_mode (xmode_unit);
3159 if (xmode_unit_int == BLKmode)
3160 /* It's probably bad to be here; a port should have an integer mode
3161 that's the same size as anything of which it takes a SUBREG. */
3162 xmode_unit_int = xmode_unit;
3164 else
3165 xmode_unit_int = xmode_unit;
3167 nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
3169 /* Adjust nregs_xmode to allow for 'holes'. */
3170 if (nregs_xmode_unit_int != hard_regno_nregs[xregno][xmode_unit])
3171 nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
3172 else
3173 nregs_xmode = hard_regno_nregs[xregno][xmode];
3175 nregs_ymode = hard_regno_nregs[xregno][ymode];
3177 /* If this is a big endian paradoxical subreg, which uses more actual
3178 hard registers than the original register, we must return a negative
3179 offset so that we find the proper highpart of the register. */
3180 if (offset == 0
3181 && nregs_ymode > nregs_xmode
3182 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3183 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3184 return nregs_xmode - nregs_ymode;
3186 if (offset == 0 || nregs_xmode == nregs_ymode)
3187 return 0;
3189 /* Size of ymode must not be greater than the size of xmode. */
3190 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3191 gcc_assert (mode_multiple != 0);
3193 y_offset = offset / GET_MODE_SIZE (ymode);
3194 nregs_multiple = nregs_xmode / nregs_ymode;
3195 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3198 /* This function returns true when the offset is representable via
3199 subreg_offset in the given regno.
3200 xregno - A regno of an inner hard subreg_reg (or what will become one).
3201 xmode - The mode of xregno.
3202 offset - The byte offset.
3203 ymode - The mode of a top level SUBREG (or what may become one).
3204 RETURN - Whether the offset is representable. */
3205 bool
3206 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3207 unsigned int offset, enum machine_mode ymode)
3209 int nregs_xmode, nregs_ymode, nregs_xmode_unit, nregs_xmode_unit_int;
3210 int mode_multiple, nregs_multiple;
3211 int y_offset;
3212 enum machine_mode xmode_unit, xmode_unit_int;
3214 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3216 if (GET_MODE_INNER (xmode) == VOIDmode)
3217 xmode_unit = xmode;
3218 else
3219 xmode_unit = GET_MODE_INNER (xmode);
3221 if (FLOAT_MODE_P (xmode_unit))
3223 xmode_unit_int = int_mode_for_mode (xmode_unit);
3224 if (xmode_unit_int == BLKmode)
3225 /* It's probably bad to be here; a port should have an integer mode
3226 that's the same size as anything of which it takes a SUBREG. */
3227 xmode_unit_int = xmode_unit;
3229 else
3230 xmode_unit_int = xmode_unit;
3232 nregs_xmode_unit = hard_regno_nregs[xregno][xmode_unit];
3233 nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
3235 /* If there are holes in a non-scalar mode in registers, we expect
3236 that it is made up of its units concatenated together. */
3237 if (nregs_xmode_unit != nregs_xmode_unit_int)
3239 gcc_assert (nregs_xmode_unit * GET_MODE_NUNITS (xmode)
3240 == hard_regno_nregs[xregno][xmode]);
3242 /* You can only ask for a SUBREG of a value with holes in the middle
3243 if you don't cross the holes. (Such a SUBREG should be done by
3244 picking a different register class, or doing it in memory if
3245 necessary.) An example of a value with holes is XCmode on 32-bit
3246 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3247 3 for each part, but in memory it's two 128-bit parts.
3248 Padding is assumed to be at the end (not necessarily the 'high part')
3249 of each unit. */
3250 if (nregs_xmode_unit != nregs_xmode_unit_int
3251 && (offset / GET_MODE_SIZE (xmode_unit_int) + 1
3252 < GET_MODE_NUNITS (xmode))
3253 && (offset / GET_MODE_SIZE (xmode_unit_int)
3254 != ((offset + GET_MODE_SIZE (ymode) - 1)
3255 / GET_MODE_SIZE (xmode_unit_int))))
3256 return false;
3258 nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
3260 else
3261 nregs_xmode = hard_regno_nregs[xregno][xmode];
3263 nregs_ymode = hard_regno_nregs[xregno][ymode];
3265 /* Paradoxical subregs are otherwise valid. */
3266 if (offset == 0
3267 && nregs_ymode > nregs_xmode
3268 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3269 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3270 return true;
3272 /* Lowpart subregs are otherwise valid. */
3273 if (offset == subreg_lowpart_offset (ymode, xmode))
3274 return true;
3276 /* This should always pass, otherwise we don't know how to verify
3277 the constraint. These conditions may be relaxed but
3278 subreg_regno_offset would need to be redesigned. */
3279 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3280 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3282 /* The XMODE value can be seen as a vector of NREGS_XMODE
3283 values. The subreg must represent a lowpart of given field.
3284 Compute what field it is. */
3285 offset -= subreg_lowpart_offset (ymode,
3286 mode_for_size (GET_MODE_BITSIZE (xmode)
3287 / nregs_xmode,
3288 MODE_INT, 0));
3290 /* Size of ymode must not be greater than the size of xmode. */
3291 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3292 gcc_assert (mode_multiple != 0);
3294 y_offset = offset / GET_MODE_SIZE (ymode);
3295 nregs_multiple = nregs_xmode / nregs_ymode;
3297 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3298 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3300 return (!(y_offset % (mode_multiple / nregs_multiple)));
3303 /* Return the final regno that a subreg expression refers to. */
3304 unsigned int
3305 subreg_regno (rtx x)
3307 unsigned int ret;
3308 rtx subreg = SUBREG_REG (x);
3309 int regno = REGNO (subreg);
3311 ret = regno + subreg_regno_offset (regno,
3312 GET_MODE (subreg),
3313 SUBREG_BYTE (x),
3314 GET_MODE (x));
3315 return ret;
3318 struct parms_set_data
3320 int nregs;
3321 HARD_REG_SET regs;
3324 /* Helper function for noticing stores to parameter registers. */
3325 static void
3326 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3328 struct parms_set_data *d = data;
3329 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3330 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3332 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3333 d->nregs--;
3337 /* Look backward for first parameter to be loaded.
3338 Note that loads of all parameters will not necessarily be
3339 found if CSE has eliminated some of them (e.g., an argument
3340 to the outer function is passed down as a parameter).
3341 Do not skip BOUNDARY. */
3343 find_first_parameter_load (rtx call_insn, rtx boundary)
3345 struct parms_set_data parm;
3346 rtx p, before, first_set;
3348 /* Since different machines initialize their parameter registers
3349 in different orders, assume nothing. Collect the set of all
3350 parameter registers. */
3351 CLEAR_HARD_REG_SET (parm.regs);
3352 parm.nregs = 0;
3353 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3354 if (GET_CODE (XEXP (p, 0)) == USE
3355 && REG_P (XEXP (XEXP (p, 0), 0)))
3357 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3359 /* We only care about registers which can hold function
3360 arguments. */
3361 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3362 continue;
3364 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3365 parm.nregs++;
3367 before = call_insn;
3368 first_set = call_insn;
3370 /* Search backward for the first set of a register in this set. */
3371 while (parm.nregs && before != boundary)
3373 before = PREV_INSN (before);
3375 /* It is possible that some loads got CSEed from one call to
3376 another. Stop in that case. */
3377 if (CALL_P (before))
3378 break;
3380 /* Our caller needs either ensure that we will find all sets
3381 (in case code has not been optimized yet), or take care
3382 for possible labels in a way by setting boundary to preceding
3383 CODE_LABEL. */
3384 if (LABEL_P (before))
3386 gcc_assert (before == boundary);
3387 break;
3390 if (INSN_P (before))
3392 int nregs_old = parm.nregs;
3393 note_stores (PATTERN (before), parms_set, &parm);
3394 /* If we found something that did not set a parameter reg,
3395 we're done. Do not keep going, as that might result
3396 in hoisting an insn before the setting of a pseudo
3397 that is used by the hoisted insn. */
3398 if (nregs_old != parm.nregs)
3399 first_set = before;
3400 else
3401 break;
3404 return first_set;
3407 /* Return true if we should avoid inserting code between INSN and preceding
3408 call instruction. */
3410 bool
3411 keep_with_call_p (rtx insn)
3413 rtx set;
3415 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3417 if (REG_P (SET_DEST (set))
3418 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3419 && fixed_regs[REGNO (SET_DEST (set))]
3420 && general_operand (SET_SRC (set), VOIDmode))
3421 return true;
3422 if (REG_P (SET_SRC (set))
3423 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3424 && REG_P (SET_DEST (set))
3425 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3426 return true;
3427 /* There may be a stack pop just after the call and before the store
3428 of the return register. Search for the actual store when deciding
3429 if we can break or not. */
3430 if (SET_DEST (set) == stack_pointer_rtx)
3432 rtx i2 = next_nonnote_insn (insn);
3433 if (i2 && keep_with_call_p (i2))
3434 return true;
3437 return false;
3440 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3441 to non-complex jumps. That is, direct unconditional, conditional,
3442 and tablejumps, but not computed jumps or returns. It also does
3443 not apply to the fallthru case of a conditional jump. */
3445 bool
3446 label_is_jump_target_p (rtx label, rtx jump_insn)
3448 rtx tmp = JUMP_LABEL (jump_insn);
3450 if (label == tmp)
3451 return true;
3453 if (tablejump_p (jump_insn, NULL, &tmp))
3455 rtvec vec = XVEC (PATTERN (tmp),
3456 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3457 int i, veclen = GET_NUM_ELEM (vec);
3459 for (i = 0; i < veclen; ++i)
3460 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3461 return true;
3464 return false;
3468 /* Return an estimate of the cost of computing rtx X.
3469 One use is in cse, to decide which expression to keep in the hash table.
3470 Another is in rtl generation, to pick the cheapest way to multiply.
3471 Other uses like the latter are expected in the future. */
3474 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3476 int i, j;
3477 enum rtx_code code;
3478 const char *fmt;
3479 int total;
3481 if (x == 0)
3482 return 0;
3484 /* Compute the default costs of certain things.
3485 Note that targetm.rtx_costs can override the defaults. */
3487 code = GET_CODE (x);
3488 switch (code)
3490 case MULT:
3491 total = COSTS_N_INSNS (5);
3492 break;
3493 case DIV:
3494 case UDIV:
3495 case MOD:
3496 case UMOD:
3497 total = COSTS_N_INSNS (7);
3498 break;
3499 case USE:
3500 /* Used in loop.c and combine.c as a marker. */
3501 total = 0;
3502 break;
3503 default:
3504 total = COSTS_N_INSNS (1);
3507 switch (code)
3509 case REG:
3510 return 0;
3512 case SUBREG:
3513 total = 0;
3514 /* If we can't tie these modes, make this expensive. The larger
3515 the mode, the more expensive it is. */
3516 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3517 return COSTS_N_INSNS (2
3518 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3519 break;
3521 default:
3522 if (targetm.rtx_costs (x, code, outer_code, &total))
3523 return total;
3524 break;
3527 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3528 which is already in total. */
3530 fmt = GET_RTX_FORMAT (code);
3531 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3532 if (fmt[i] == 'e')
3533 total += rtx_cost (XEXP (x, i), code);
3534 else if (fmt[i] == 'E')
3535 for (j = 0; j < XVECLEN (x, i); j++)
3536 total += rtx_cost (XVECEXP (x, i, j), code);
3538 return total;
3541 /* Return cost of address expression X.
3542 Expect that X is properly formed address reference. */
3545 address_cost (rtx x, enum machine_mode mode)
3547 /* We may be asked for cost of various unusual addresses, such as operands
3548 of push instruction. It is not worthwhile to complicate writing
3549 of the target hook by such cases. */
3551 if (!memory_address_p (mode, x))
3552 return 1000;
3554 return targetm.address_cost (x);
3557 /* If the target doesn't override, compute the cost as with arithmetic. */
3560 default_address_cost (rtx x)
3562 return rtx_cost (x, MEM);
3566 unsigned HOST_WIDE_INT
3567 nonzero_bits (rtx x, enum machine_mode mode)
3569 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3572 unsigned int
3573 num_sign_bit_copies (rtx x, enum machine_mode mode)
3575 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3578 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3579 It avoids exponential behavior in nonzero_bits1 when X has
3580 identical subexpressions on the first or the second level. */
3582 static unsigned HOST_WIDE_INT
3583 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3584 enum machine_mode known_mode,
3585 unsigned HOST_WIDE_INT known_ret)
3587 if (x == known_x && mode == known_mode)
3588 return known_ret;
3590 /* Try to find identical subexpressions. If found call
3591 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3592 precomputed value for the subexpression as KNOWN_RET. */
3594 if (ARITHMETIC_P (x))
3596 rtx x0 = XEXP (x, 0);
3597 rtx x1 = XEXP (x, 1);
3599 /* Check the first level. */
3600 if (x0 == x1)
3601 return nonzero_bits1 (x, mode, x0, mode,
3602 cached_nonzero_bits (x0, mode, known_x,
3603 known_mode, known_ret));
3605 /* Check the second level. */
3606 if (ARITHMETIC_P (x0)
3607 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3608 return nonzero_bits1 (x, mode, x1, mode,
3609 cached_nonzero_bits (x1, mode, known_x,
3610 known_mode, known_ret));
3612 if (ARITHMETIC_P (x1)
3613 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3614 return nonzero_bits1 (x, mode, x0, mode,
3615 cached_nonzero_bits (x0, mode, known_x,
3616 known_mode, known_ret));
3619 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3622 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3623 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3624 is less useful. We can't allow both, because that results in exponential
3625 run time recursion. There is a nullstone testcase that triggered
3626 this. This macro avoids accidental uses of num_sign_bit_copies. */
3627 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3629 /* Given an expression, X, compute which bits in X can be nonzero.
3630 We don't care about bits outside of those defined in MODE.
3632 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3633 an arithmetic operation, we can do better. */
3635 static unsigned HOST_WIDE_INT
3636 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3637 enum machine_mode known_mode,
3638 unsigned HOST_WIDE_INT known_ret)
3640 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3641 unsigned HOST_WIDE_INT inner_nz;
3642 enum rtx_code code;
3643 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3645 /* For floating-point values, assume all bits are needed. */
3646 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3647 return nonzero;
3649 /* If X is wider than MODE, use its mode instead. */
3650 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3652 mode = GET_MODE (x);
3653 nonzero = GET_MODE_MASK (mode);
3654 mode_width = GET_MODE_BITSIZE (mode);
3657 if (mode_width > HOST_BITS_PER_WIDE_INT)
3658 /* Our only callers in this case look for single bit values. So
3659 just return the mode mask. Those tests will then be false. */
3660 return nonzero;
3662 #ifndef WORD_REGISTER_OPERATIONS
3663 /* If MODE is wider than X, but both are a single word for both the host
3664 and target machines, we can compute this from which bits of the
3665 object might be nonzero in its own mode, taking into account the fact
3666 that on many CISC machines, accessing an object in a wider mode
3667 causes the high-order bits to become undefined. So they are
3668 not known to be zero. */
3670 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3671 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3672 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3673 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3675 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3676 known_x, known_mode, known_ret);
3677 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3678 return nonzero;
3680 #endif
3682 code = GET_CODE (x);
3683 switch (code)
3685 case REG:
3686 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3687 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3688 all the bits above ptr_mode are known to be zero. */
3689 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3690 && REG_POINTER (x))
3691 nonzero &= GET_MODE_MASK (ptr_mode);
3692 #endif
3694 /* Include declared information about alignment of pointers. */
3695 /* ??? We don't properly preserve REG_POINTER changes across
3696 pointer-to-integer casts, so we can't trust it except for
3697 things that we know must be pointers. See execute/960116-1.c. */
3698 if ((x == stack_pointer_rtx
3699 || x == frame_pointer_rtx
3700 || x == arg_pointer_rtx)
3701 && REGNO_POINTER_ALIGN (REGNO (x)))
3703 unsigned HOST_WIDE_INT alignment
3704 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3706 #ifdef PUSH_ROUNDING
3707 /* If PUSH_ROUNDING is defined, it is possible for the
3708 stack to be momentarily aligned only to that amount,
3709 so we pick the least alignment. */
3710 if (x == stack_pointer_rtx && PUSH_ARGS)
3711 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3712 alignment);
3713 #endif
3715 nonzero &= ~(alignment - 1);
3719 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3720 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3721 known_mode, known_ret,
3722 &nonzero_for_hook);
3724 if (new)
3725 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3726 known_mode, known_ret);
3728 return nonzero_for_hook;
3731 case CONST_INT:
3732 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3733 /* If X is negative in MODE, sign-extend the value. */
3734 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3735 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3736 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3737 #endif
3739 return INTVAL (x);
3741 case MEM:
3742 #ifdef LOAD_EXTEND_OP
3743 /* In many, if not most, RISC machines, reading a byte from memory
3744 zeros the rest of the register. Noticing that fact saves a lot
3745 of extra zero-extends. */
3746 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3747 nonzero &= GET_MODE_MASK (GET_MODE (x));
3748 #endif
3749 break;
3751 case EQ: case NE:
3752 case UNEQ: case LTGT:
3753 case GT: case GTU: case UNGT:
3754 case LT: case LTU: case UNLT:
3755 case GE: case GEU: case UNGE:
3756 case LE: case LEU: case UNLE:
3757 case UNORDERED: case ORDERED:
3758 /* If this produces an integer result, we know which bits are set.
3759 Code here used to clear bits outside the mode of X, but that is
3760 now done above. */
3761 /* Mind that MODE is the mode the caller wants to look at this
3762 operation in, and not the actual operation mode. We can wind
3763 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3764 that describes the results of a vector compare. */
3765 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3766 && mode_width <= HOST_BITS_PER_WIDE_INT)
3767 nonzero = STORE_FLAG_VALUE;
3768 break;
3770 case NEG:
3771 #if 0
3772 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3773 and num_sign_bit_copies. */
3774 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3775 == GET_MODE_BITSIZE (GET_MODE (x)))
3776 nonzero = 1;
3777 #endif
3779 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3780 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3781 break;
3783 case ABS:
3784 #if 0
3785 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3786 and num_sign_bit_copies. */
3787 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3788 == GET_MODE_BITSIZE (GET_MODE (x)))
3789 nonzero = 1;
3790 #endif
3791 break;
3793 case TRUNCATE:
3794 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3795 known_x, known_mode, known_ret)
3796 & GET_MODE_MASK (mode));
3797 break;
3799 case ZERO_EXTEND:
3800 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3801 known_x, known_mode, known_ret);
3802 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3803 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3804 break;
3806 case SIGN_EXTEND:
3807 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3808 Otherwise, show all the bits in the outer mode but not the inner
3809 may be nonzero. */
3810 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3811 known_x, known_mode, known_ret);
3812 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3814 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3815 if (inner_nz
3816 & (((HOST_WIDE_INT) 1
3817 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3818 inner_nz |= (GET_MODE_MASK (mode)
3819 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3822 nonzero &= inner_nz;
3823 break;
3825 case AND:
3826 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3827 known_x, known_mode, known_ret)
3828 & cached_nonzero_bits (XEXP (x, 1), mode,
3829 known_x, known_mode, known_ret);
3830 break;
3832 case XOR: case IOR:
3833 case UMIN: case UMAX: case SMIN: case SMAX:
3835 unsigned HOST_WIDE_INT nonzero0 =
3836 cached_nonzero_bits (XEXP (x, 0), mode,
3837 known_x, known_mode, known_ret);
3839 /* Don't call nonzero_bits for the second time if it cannot change
3840 anything. */
3841 if ((nonzero & nonzero0) != nonzero)
3842 nonzero &= nonzero0
3843 | cached_nonzero_bits (XEXP (x, 1), mode,
3844 known_x, known_mode, known_ret);
3846 break;
3848 case PLUS: case MINUS:
3849 case MULT:
3850 case DIV: case UDIV:
3851 case MOD: case UMOD:
3852 /* We can apply the rules of arithmetic to compute the number of
3853 high- and low-order zero bits of these operations. We start by
3854 computing the width (position of the highest-order nonzero bit)
3855 and the number of low-order zero bits for each value. */
3857 unsigned HOST_WIDE_INT nz0 =
3858 cached_nonzero_bits (XEXP (x, 0), mode,
3859 known_x, known_mode, known_ret);
3860 unsigned HOST_WIDE_INT nz1 =
3861 cached_nonzero_bits (XEXP (x, 1), mode,
3862 known_x, known_mode, known_ret);
3863 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3864 int width0 = floor_log2 (nz0) + 1;
3865 int width1 = floor_log2 (nz1) + 1;
3866 int low0 = floor_log2 (nz0 & -nz0);
3867 int low1 = floor_log2 (nz1 & -nz1);
3868 HOST_WIDE_INT op0_maybe_minusp
3869 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3870 HOST_WIDE_INT op1_maybe_minusp
3871 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3872 unsigned int result_width = mode_width;
3873 int result_low = 0;
3875 switch (code)
3877 case PLUS:
3878 result_width = MAX (width0, width1) + 1;
3879 result_low = MIN (low0, low1);
3880 break;
3881 case MINUS:
3882 result_low = MIN (low0, low1);
3883 break;
3884 case MULT:
3885 result_width = width0 + width1;
3886 result_low = low0 + low1;
3887 break;
3888 case DIV:
3889 if (width1 == 0)
3890 break;
3891 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3892 result_width = width0;
3893 break;
3894 case UDIV:
3895 if (width1 == 0)
3896 break;
3897 result_width = width0;
3898 break;
3899 case MOD:
3900 if (width1 == 0)
3901 break;
3902 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3903 result_width = MIN (width0, width1);
3904 result_low = MIN (low0, low1);
3905 break;
3906 case UMOD:
3907 if (width1 == 0)
3908 break;
3909 result_width = MIN (width0, width1);
3910 result_low = MIN (low0, low1);
3911 break;
3912 default:
3913 gcc_unreachable ();
3916 if (result_width < mode_width)
3917 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3919 if (result_low > 0)
3920 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3922 #ifdef POINTERS_EXTEND_UNSIGNED
3923 /* If pointers extend unsigned and this is an addition or subtraction
3924 to a pointer in Pmode, all the bits above ptr_mode are known to be
3925 zero. */
3926 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3927 && (code == PLUS || code == MINUS)
3928 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3929 nonzero &= GET_MODE_MASK (ptr_mode);
3930 #endif
3932 break;
3934 case ZERO_EXTRACT:
3935 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3936 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3937 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3938 break;
3940 case SUBREG:
3941 /* If this is a SUBREG formed for a promoted variable that has
3942 been zero-extended, we know that at least the high-order bits
3943 are zero, though others might be too. */
3945 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3946 nonzero = GET_MODE_MASK (GET_MODE (x))
3947 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3948 known_x, known_mode, known_ret);
3950 /* If the inner mode is a single word for both the host and target
3951 machines, we can compute this from which bits of the inner
3952 object might be nonzero. */
3953 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3954 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3955 <= HOST_BITS_PER_WIDE_INT))
3957 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3958 known_x, known_mode, known_ret);
3960 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3961 /* If this is a typical RISC machine, we only have to worry
3962 about the way loads are extended. */
3963 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3964 ? (((nonzero
3965 & (((unsigned HOST_WIDE_INT) 1
3966 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3967 != 0))
3968 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3969 || !MEM_P (SUBREG_REG (x)))
3970 #endif
3972 /* On many CISC machines, accessing an object in a wider mode
3973 causes the high-order bits to become undefined. So they are
3974 not known to be zero. */
3975 if (GET_MODE_SIZE (GET_MODE (x))
3976 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3977 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3978 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3981 break;
3983 case ASHIFTRT:
3984 case LSHIFTRT:
3985 case ASHIFT:
3986 case ROTATE:
3987 /* The nonzero bits are in two classes: any bits within MODE
3988 that aren't in GET_MODE (x) are always significant. The rest of the
3989 nonzero bits are those that are significant in the operand of
3990 the shift when shifted the appropriate number of bits. This
3991 shows that high-order bits are cleared by the right shift and
3992 low-order bits by left shifts. */
3993 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3994 && INTVAL (XEXP (x, 1)) >= 0
3995 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3997 enum machine_mode inner_mode = GET_MODE (x);
3998 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3999 int count = INTVAL (XEXP (x, 1));
4000 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4001 unsigned HOST_WIDE_INT op_nonzero =
4002 cached_nonzero_bits (XEXP (x, 0), mode,
4003 known_x, known_mode, known_ret);
4004 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4005 unsigned HOST_WIDE_INT outer = 0;
4007 if (mode_width > width)
4008 outer = (op_nonzero & nonzero & ~mode_mask);
4010 if (code == LSHIFTRT)
4011 inner >>= count;
4012 else if (code == ASHIFTRT)
4014 inner >>= count;
4016 /* If the sign bit may have been nonzero before the shift, we
4017 need to mark all the places it could have been copied to
4018 by the shift as possibly nonzero. */
4019 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
4020 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
4022 else if (code == ASHIFT)
4023 inner <<= count;
4024 else
4025 inner = ((inner << (count % width)
4026 | (inner >> (width - (count % width)))) & mode_mask);
4028 nonzero &= (outer | inner);
4030 break;
4032 case FFS:
4033 case POPCOUNT:
4034 /* This is at most the number of bits in the mode. */
4035 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4036 break;
4038 case CLZ:
4039 /* If CLZ has a known value at zero, then the nonzero bits are
4040 that value, plus the number of bits in the mode minus one. */
4041 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4042 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4043 else
4044 nonzero = -1;
4045 break;
4047 case CTZ:
4048 /* If CTZ has a known value at zero, then the nonzero bits are
4049 that value, plus the number of bits in the mode minus one. */
4050 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4051 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4052 else
4053 nonzero = -1;
4054 break;
4056 case PARITY:
4057 nonzero = 1;
4058 break;
4060 case IF_THEN_ELSE:
4062 unsigned HOST_WIDE_INT nonzero_true =
4063 cached_nonzero_bits (XEXP (x, 1), mode,
4064 known_x, known_mode, known_ret);
4066 /* Don't call nonzero_bits for the second time if it cannot change
4067 anything. */
4068 if ((nonzero & nonzero_true) != nonzero)
4069 nonzero &= nonzero_true
4070 | cached_nonzero_bits (XEXP (x, 2), mode,
4071 known_x, known_mode, known_ret);
4073 break;
4075 default:
4076 break;
4079 return nonzero;
4082 /* See the macro definition above. */
4083 #undef cached_num_sign_bit_copies
4086 /* The function cached_num_sign_bit_copies is a wrapper around
4087 num_sign_bit_copies1. It avoids exponential behavior in
4088 num_sign_bit_copies1 when X has identical subexpressions on the
4089 first or the second level. */
4091 static unsigned int
4092 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
4093 enum machine_mode known_mode,
4094 unsigned int known_ret)
4096 if (x == known_x && mode == known_mode)
4097 return known_ret;
4099 /* Try to find identical subexpressions. If found call
4100 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4101 the precomputed value for the subexpression as KNOWN_RET. */
4103 if (ARITHMETIC_P (x))
4105 rtx x0 = XEXP (x, 0);
4106 rtx x1 = XEXP (x, 1);
4108 /* Check the first level. */
4109 if (x0 == x1)
4110 return
4111 num_sign_bit_copies1 (x, mode, x0, mode,
4112 cached_num_sign_bit_copies (x0, mode, known_x,
4113 known_mode,
4114 known_ret));
4116 /* Check the second level. */
4117 if (ARITHMETIC_P (x0)
4118 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4119 return
4120 num_sign_bit_copies1 (x, mode, x1, mode,
4121 cached_num_sign_bit_copies (x1, mode, known_x,
4122 known_mode,
4123 known_ret));
4125 if (ARITHMETIC_P (x1)
4126 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4127 return
4128 num_sign_bit_copies1 (x, mode, x0, mode,
4129 cached_num_sign_bit_copies (x0, mode, known_x,
4130 known_mode,
4131 known_ret));
4134 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4137 /* Return the number of bits at the high-order end of X that are known to
4138 be equal to the sign bit. X will be used in mode MODE; if MODE is
4139 VOIDmode, X will be used in its own mode. The returned value will always
4140 be between 1 and the number of bits in MODE. */
4142 static unsigned int
4143 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
4144 enum machine_mode known_mode,
4145 unsigned int known_ret)
4147 enum rtx_code code = GET_CODE (x);
4148 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
4149 int num0, num1, result;
4150 unsigned HOST_WIDE_INT nonzero;
4152 /* If we weren't given a mode, use the mode of X. If the mode is still
4153 VOIDmode, we don't know anything. Likewise if one of the modes is
4154 floating-point. */
4156 if (mode == VOIDmode)
4157 mode = GET_MODE (x);
4159 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
4160 return 1;
4162 /* For a smaller object, just ignore the high bits. */
4163 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
4165 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4166 known_x, known_mode, known_ret);
4167 return MAX (1,
4168 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
4171 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
4173 #ifndef WORD_REGISTER_OPERATIONS
4174 /* If this machine does not do all register operations on the entire
4175 register and MODE is wider than the mode of X, we can say nothing
4176 at all about the high-order bits. */
4177 return 1;
4178 #else
4179 /* Likewise on machines that do, if the mode of the object is smaller
4180 than a word and loads of that size don't sign extend, we can say
4181 nothing about the high order bits. */
4182 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
4183 #ifdef LOAD_EXTEND_OP
4184 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4185 #endif
4187 return 1;
4188 #endif
4191 switch (code)
4193 case REG:
4195 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4196 /* If pointers extend signed and this is a pointer in Pmode, say that
4197 all the bits above ptr_mode are known to be sign bit copies. */
4198 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
4199 && REG_POINTER (x))
4200 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
4201 #endif
4204 unsigned int copies_for_hook = 1, copies = 1;
4205 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4206 known_mode, known_ret,
4207 &copies_for_hook);
4209 if (new)
4210 copies = cached_num_sign_bit_copies (new, mode, known_x,
4211 known_mode, known_ret);
4213 if (copies > 1 || copies_for_hook > 1)
4214 return MAX (copies, copies_for_hook);
4216 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4218 break;
4220 case MEM:
4221 #ifdef LOAD_EXTEND_OP
4222 /* Some RISC machines sign-extend all loads of smaller than a word. */
4223 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4224 return MAX (1, ((int) bitwidth
4225 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4226 #endif
4227 break;
4229 case CONST_INT:
4230 /* If the constant is negative, take its 1's complement and remask.
4231 Then see how many zero bits we have. */
4232 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4233 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4234 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4235 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4237 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4239 case SUBREG:
4240 /* If this is a SUBREG for a promoted object that is sign-extended
4241 and we are looking at it in a wider mode, we know that at least the
4242 high-order bits are known to be sign bit copies. */
4244 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4246 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4247 known_x, known_mode, known_ret);
4248 return MAX ((int) bitwidth
4249 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4250 num0);
4253 /* For a smaller object, just ignore the high bits. */
4254 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4256 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4257 known_x, known_mode, known_ret);
4258 return MAX (1, (num0
4259 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4260 - bitwidth)));
4263 #ifdef WORD_REGISTER_OPERATIONS
4264 #ifdef LOAD_EXTEND_OP
4265 /* For paradoxical SUBREGs on machines where all register operations
4266 affect the entire register, just look inside. Note that we are
4267 passing MODE to the recursive call, so the number of sign bit copies
4268 will remain relative to that mode, not the inner mode. */
4270 /* This works only if loads sign extend. Otherwise, if we get a
4271 reload for the inner part, it may be loaded from the stack, and
4272 then we lose all sign bit copies that existed before the store
4273 to the stack. */
4275 if ((GET_MODE_SIZE (GET_MODE (x))
4276 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4277 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4278 && MEM_P (SUBREG_REG (x)))
4279 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4280 known_x, known_mode, known_ret);
4281 #endif
4282 #endif
4283 break;
4285 case SIGN_EXTRACT:
4286 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4287 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4288 break;
4290 case SIGN_EXTEND:
4291 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4292 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4293 known_x, known_mode, known_ret));
4295 case TRUNCATE:
4296 /* For a smaller object, just ignore the high bits. */
4297 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4298 known_x, known_mode, known_ret);
4299 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4300 - bitwidth)));
4302 case NOT:
4303 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4304 known_x, known_mode, known_ret);
4306 case ROTATE: case ROTATERT:
4307 /* If we are rotating left by a number of bits less than the number
4308 of sign bit copies, we can just subtract that amount from the
4309 number. */
4310 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4311 && INTVAL (XEXP (x, 1)) >= 0
4312 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4314 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4315 known_x, known_mode, known_ret);
4316 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4317 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4319 break;
4321 case NEG:
4322 /* In general, this subtracts one sign bit copy. But if the value
4323 is known to be positive, the number of sign bit copies is the
4324 same as that of the input. Finally, if the input has just one bit
4325 that might be nonzero, all the bits are copies of the sign bit. */
4326 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4327 known_x, known_mode, known_ret);
4328 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4329 return num0 > 1 ? num0 - 1 : 1;
4331 nonzero = nonzero_bits (XEXP (x, 0), mode);
4332 if (nonzero == 1)
4333 return bitwidth;
4335 if (num0 > 1
4336 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4337 num0--;
4339 return num0;
4341 case IOR: case AND: case XOR:
4342 case SMIN: case SMAX: case UMIN: case UMAX:
4343 /* Logical operations will preserve the number of sign-bit copies.
4344 MIN and MAX operations always return one of the operands. */
4345 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4346 known_x, known_mode, known_ret);
4347 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4348 known_x, known_mode, known_ret);
4349 return MIN (num0, num1);
4351 case PLUS: case MINUS:
4352 /* For addition and subtraction, we can have a 1-bit carry. However,
4353 if we are subtracting 1 from a positive number, there will not
4354 be such a carry. Furthermore, if the positive number is known to
4355 be 0 or 1, we know the result is either -1 or 0. */
4357 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4358 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4360 nonzero = nonzero_bits (XEXP (x, 0), mode);
4361 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4362 return (nonzero == 1 || nonzero == 0 ? bitwidth
4363 : bitwidth - floor_log2 (nonzero) - 1);
4366 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4367 known_x, known_mode, known_ret);
4368 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4369 known_x, known_mode, known_ret);
4370 result = MAX (1, MIN (num0, num1) - 1);
4372 #ifdef POINTERS_EXTEND_UNSIGNED
4373 /* If pointers extend signed and this is an addition or subtraction
4374 to a pointer in Pmode, all the bits above ptr_mode are known to be
4375 sign bit copies. */
4376 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4377 && (code == PLUS || code == MINUS)
4378 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4379 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4380 - GET_MODE_BITSIZE (ptr_mode) + 1),
4381 result);
4382 #endif
4383 return result;
4385 case MULT:
4386 /* The number of bits of the product is the sum of the number of
4387 bits of both terms. However, unless one of the terms if known
4388 to be positive, we must allow for an additional bit since negating
4389 a negative number can remove one sign bit copy. */
4391 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4392 known_x, known_mode, known_ret);
4393 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4394 known_x, known_mode, known_ret);
4396 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4397 if (result > 0
4398 && (bitwidth > HOST_BITS_PER_WIDE_INT
4399 || (((nonzero_bits (XEXP (x, 0), mode)
4400 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4401 && ((nonzero_bits (XEXP (x, 1), mode)
4402 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4403 result--;
4405 return MAX (1, result);
4407 case UDIV:
4408 /* The result must be <= the first operand. If the first operand
4409 has the high bit set, we know nothing about the number of sign
4410 bit copies. */
4411 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4412 return 1;
4413 else if ((nonzero_bits (XEXP (x, 0), mode)
4414 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4415 return 1;
4416 else
4417 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4418 known_x, known_mode, known_ret);
4420 case UMOD:
4421 /* The result must be <= the second operand. */
4422 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4423 known_x, known_mode, known_ret);
4425 case DIV:
4426 /* Similar to unsigned division, except that we have to worry about
4427 the case where the divisor is negative, in which case we have
4428 to add 1. */
4429 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4430 known_x, known_mode, known_ret);
4431 if (result > 1
4432 && (bitwidth > HOST_BITS_PER_WIDE_INT
4433 || (nonzero_bits (XEXP (x, 1), mode)
4434 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4435 result--;
4437 return result;
4439 case MOD:
4440 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4441 known_x, known_mode, known_ret);
4442 if (result > 1
4443 && (bitwidth > HOST_BITS_PER_WIDE_INT
4444 || (nonzero_bits (XEXP (x, 1), mode)
4445 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4446 result--;
4448 return result;
4450 case ASHIFTRT:
4451 /* Shifts by a constant add to the number of bits equal to the
4452 sign bit. */
4453 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4454 known_x, known_mode, known_ret);
4455 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4456 && INTVAL (XEXP (x, 1)) > 0)
4457 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4459 return num0;
4461 case ASHIFT:
4462 /* Left shifts destroy copies. */
4463 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4464 || INTVAL (XEXP (x, 1)) < 0
4465 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4466 return 1;
4468 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4469 known_x, known_mode, known_ret);
4470 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4472 case IF_THEN_ELSE:
4473 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4474 known_x, known_mode, known_ret);
4475 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4476 known_x, known_mode, known_ret);
4477 return MIN (num0, num1);
4479 case EQ: case NE: case GE: case GT: case LE: case LT:
4480 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4481 case GEU: case GTU: case LEU: case LTU:
4482 case UNORDERED: case ORDERED:
4483 /* If the constant is negative, take its 1's complement and remask.
4484 Then see how many zero bits we have. */
4485 nonzero = STORE_FLAG_VALUE;
4486 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4487 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4488 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4490 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4492 default:
4493 break;
4496 /* If we haven't been able to figure it out by one of the above rules,
4497 see if some of the high-order bits are known to be zero. If so,
4498 count those bits and return one less than that amount. If we can't
4499 safely compute the mask for this mode, always return BITWIDTH. */
4501 bitwidth = GET_MODE_BITSIZE (mode);
4502 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4503 return 1;
4505 nonzero = nonzero_bits (x, mode);
4506 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4507 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4510 /* Calculate the rtx_cost of a single instruction. A return value of
4511 zero indicates an instruction pattern without a known cost. */
4514 insn_rtx_cost (rtx pat)
4516 int i, cost;
4517 rtx set;
4519 /* Extract the single set rtx from the instruction pattern.
4520 We can't use single_set since we only have the pattern. */
4521 if (GET_CODE (pat) == SET)
4522 set = pat;
4523 else if (GET_CODE (pat) == PARALLEL)
4525 set = NULL_RTX;
4526 for (i = 0; i < XVECLEN (pat, 0); i++)
4528 rtx x = XVECEXP (pat, 0, i);
4529 if (GET_CODE (x) == SET)
4531 if (set)
4532 return 0;
4533 set = x;
4536 if (!set)
4537 return 0;
4539 else
4540 return 0;
4542 cost = rtx_cost (SET_SRC (set), SET);
4543 return cost > 0 ? cost : COSTS_N_INSNS (1);
4546 /* Given an insn INSN and condition COND, return the condition in a
4547 canonical form to simplify testing by callers. Specifically:
4549 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4550 (2) Both operands will be machine operands; (cc0) will have been replaced.
4551 (3) If an operand is a constant, it will be the second operand.
4552 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4553 for GE, GEU, and LEU.
4555 If the condition cannot be understood, or is an inequality floating-point
4556 comparison which needs to be reversed, 0 will be returned.
4558 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4560 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4561 insn used in locating the condition was found. If a replacement test
4562 of the condition is desired, it should be placed in front of that
4563 insn and we will be sure that the inputs are still valid.
4565 If WANT_REG is nonzero, we wish the condition to be relative to that
4566 register, if possible. Therefore, do not canonicalize the condition
4567 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4568 to be a compare to a CC mode register.
4570 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4571 and at INSN. */
4574 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4575 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4577 enum rtx_code code;
4578 rtx prev = insn;
4579 rtx set;
4580 rtx tem;
4581 rtx op0, op1;
4582 int reverse_code = 0;
4583 enum machine_mode mode;
4585 code = GET_CODE (cond);
4586 mode = GET_MODE (cond);
4587 op0 = XEXP (cond, 0);
4588 op1 = XEXP (cond, 1);
4590 if (reverse)
4591 code = reversed_comparison_code (cond, insn);
4592 if (code == UNKNOWN)
4593 return 0;
4595 if (earliest)
4596 *earliest = insn;
4598 /* If we are comparing a register with zero, see if the register is set
4599 in the previous insn to a COMPARE or a comparison operation. Perform
4600 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4601 in cse.c */
4603 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4604 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4605 && op1 == CONST0_RTX (GET_MODE (op0))
4606 && op0 != want_reg)
4608 /* Set nonzero when we find something of interest. */
4609 rtx x = 0;
4611 #ifdef HAVE_cc0
4612 /* If comparison with cc0, import actual comparison from compare
4613 insn. */
4614 if (op0 == cc0_rtx)
4616 if ((prev = prev_nonnote_insn (prev)) == 0
4617 || !NONJUMP_INSN_P (prev)
4618 || (set = single_set (prev)) == 0
4619 || SET_DEST (set) != cc0_rtx)
4620 return 0;
4622 op0 = SET_SRC (set);
4623 op1 = CONST0_RTX (GET_MODE (op0));
4624 if (earliest)
4625 *earliest = prev;
4627 #endif
4629 /* If this is a COMPARE, pick up the two things being compared. */
4630 if (GET_CODE (op0) == COMPARE)
4632 op1 = XEXP (op0, 1);
4633 op0 = XEXP (op0, 0);
4634 continue;
4636 else if (!REG_P (op0))
4637 break;
4639 /* Go back to the previous insn. Stop if it is not an INSN. We also
4640 stop if it isn't a single set or if it has a REG_INC note because
4641 we don't want to bother dealing with it. */
4643 if ((prev = prev_nonnote_insn (prev)) == 0
4644 || !NONJUMP_INSN_P (prev)
4645 || FIND_REG_INC_NOTE (prev, NULL_RTX))
4646 break;
4648 set = set_of (op0, prev);
4650 if (set
4651 && (GET_CODE (set) != SET
4652 || !rtx_equal_p (SET_DEST (set), op0)))
4653 break;
4655 /* If this is setting OP0, get what it sets it to if it looks
4656 relevant. */
4657 if (set)
4659 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4660 #ifdef FLOAT_STORE_FLAG_VALUE
4661 REAL_VALUE_TYPE fsfv;
4662 #endif
4664 /* ??? We may not combine comparisons done in a CCmode with
4665 comparisons not done in a CCmode. This is to aid targets
4666 like Alpha that have an IEEE compliant EQ instruction, and
4667 a non-IEEE compliant BEQ instruction. The use of CCmode is
4668 actually artificial, simply to prevent the combination, but
4669 should not affect other platforms.
4671 However, we must allow VOIDmode comparisons to match either
4672 CCmode or non-CCmode comparison, because some ports have
4673 modeless comparisons inside branch patterns.
4675 ??? This mode check should perhaps look more like the mode check
4676 in simplify_comparison in combine. */
4678 if ((GET_CODE (SET_SRC (set)) == COMPARE
4679 || (((code == NE
4680 || (code == LT
4681 && GET_MODE_CLASS (inner_mode) == MODE_INT
4682 && (GET_MODE_BITSIZE (inner_mode)
4683 <= HOST_BITS_PER_WIDE_INT)
4684 && (STORE_FLAG_VALUE
4685 & ((HOST_WIDE_INT) 1
4686 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4687 #ifdef FLOAT_STORE_FLAG_VALUE
4688 || (code == LT
4689 && SCALAR_FLOAT_MODE_P (inner_mode)
4690 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4691 REAL_VALUE_NEGATIVE (fsfv)))
4692 #endif
4694 && COMPARISON_P (SET_SRC (set))))
4695 && (((GET_MODE_CLASS (mode) == MODE_CC)
4696 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4697 || mode == VOIDmode || inner_mode == VOIDmode))
4698 x = SET_SRC (set);
4699 else if (((code == EQ
4700 || (code == GE
4701 && (GET_MODE_BITSIZE (inner_mode)
4702 <= HOST_BITS_PER_WIDE_INT)
4703 && GET_MODE_CLASS (inner_mode) == MODE_INT
4704 && (STORE_FLAG_VALUE
4705 & ((HOST_WIDE_INT) 1
4706 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4707 #ifdef FLOAT_STORE_FLAG_VALUE
4708 || (code == GE
4709 && SCALAR_FLOAT_MODE_P (inner_mode)
4710 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4711 REAL_VALUE_NEGATIVE (fsfv)))
4712 #endif
4714 && COMPARISON_P (SET_SRC (set))
4715 && (((GET_MODE_CLASS (mode) == MODE_CC)
4716 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4717 || mode == VOIDmode || inner_mode == VOIDmode))
4720 reverse_code = 1;
4721 x = SET_SRC (set);
4723 else
4724 break;
4727 else if (reg_set_p (op0, prev))
4728 /* If this sets OP0, but not directly, we have to give up. */
4729 break;
4731 if (x)
4733 /* If the caller is expecting the condition to be valid at INSN,
4734 make sure X doesn't change before INSN. */
4735 if (valid_at_insn_p)
4736 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4737 break;
4738 if (COMPARISON_P (x))
4739 code = GET_CODE (x);
4740 if (reverse_code)
4742 code = reversed_comparison_code (x, prev);
4743 if (code == UNKNOWN)
4744 return 0;
4745 reverse_code = 0;
4748 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4749 if (earliest)
4750 *earliest = prev;
4754 /* If constant is first, put it last. */
4755 if (CONSTANT_P (op0))
4756 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4758 /* If OP0 is the result of a comparison, we weren't able to find what
4759 was really being compared, so fail. */
4760 if (!allow_cc_mode
4761 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4762 return 0;
4764 /* Canonicalize any ordered comparison with integers involving equality
4765 if we can do computations in the relevant mode and we do not
4766 overflow. */
4768 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4769 && GET_CODE (op1) == CONST_INT
4770 && GET_MODE (op0) != VOIDmode
4771 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4773 HOST_WIDE_INT const_val = INTVAL (op1);
4774 unsigned HOST_WIDE_INT uconst_val = const_val;
4775 unsigned HOST_WIDE_INT max_val
4776 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4778 switch (code)
4780 case LE:
4781 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4782 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4783 break;
4785 /* When cross-compiling, const_val might be sign-extended from
4786 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4787 case GE:
4788 if ((HOST_WIDE_INT) (const_val & max_val)
4789 != (((HOST_WIDE_INT) 1
4790 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4791 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4792 break;
4794 case LEU:
4795 if (uconst_val < max_val)
4796 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4797 break;
4799 case GEU:
4800 if (uconst_val != 0)
4801 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4802 break;
4804 default:
4805 break;
4809 /* Never return CC0; return zero instead. */
4810 if (CC0_P (op0))
4811 return 0;
4813 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4816 /* Given a jump insn JUMP, return the condition that will cause it to branch
4817 to its JUMP_LABEL. If the condition cannot be understood, or is an
4818 inequality floating-point comparison which needs to be reversed, 0 will
4819 be returned.
4821 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4822 insn used in locating the condition was found. If a replacement test
4823 of the condition is desired, it should be placed in front of that
4824 insn and we will be sure that the inputs are still valid. If EARLIEST
4825 is null, the returned condition will be valid at INSN.
4827 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4828 compare CC mode register.
4830 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4833 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4835 rtx cond;
4836 int reverse;
4837 rtx set;
4839 /* If this is not a standard conditional jump, we can't parse it. */
4840 if (!JUMP_P (jump)
4841 || ! any_condjump_p (jump))
4842 return 0;
4843 set = pc_set (jump);
4845 cond = XEXP (SET_SRC (set), 0);
4847 /* If this branches to JUMP_LABEL when the condition is false, reverse
4848 the condition. */
4849 reverse
4850 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4851 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4853 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4854 allow_cc_mode, valid_at_insn_p);
4858 /* Initialize non_rtx_starting_operands, which is used to speed up
4859 for_each_rtx. */
4860 void
4861 init_rtlanal (void)
4863 int i;
4864 for (i = 0; i < NUM_RTX_CODE; i++)
4866 const char *format = GET_RTX_FORMAT (i);
4867 const char *first = strpbrk (format, "eEV");
4868 non_rtx_starting_operands[i] = first ? first - format : -1;