2006-10-28 Andrew Pinski <andrew_pinski@playstation.sony.com>
[official-gcc.git] / gcc / rtlanal.c
blobb0a816106d43a898a3b5cd3c46eaf10701bfc266
1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software
4 Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "toplev.h"
29 #include "rtl.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "target.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "regs.h"
39 #include "function.h"
41 /* Forward declarations */
42 static void set_of_1 (rtx, rtx, void *);
43 static bool covers_regno_p (rtx, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx, unsigned int);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
49 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50 rtx, enum machine_mode,
51 unsigned HOST_WIDE_INT);
52 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53 enum machine_mode,
54 unsigned HOST_WIDE_INT);
55 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56 enum machine_mode,
57 unsigned int);
58 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59 enum machine_mode, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands[NUM_RTX_CODE];
65 /* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
69 int target_flags;
71 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
72 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
73 SIGN_EXTEND then while narrowing we also have to enforce the
74 representation and sign-extend the value to mode DESTINATION_REP.
76 If the value is already sign-extended to DESTINATION_REP mode we
77 can just switch to DESTINATION mode on it. For each pair of
78 integral modes SOURCE and DESTINATION, when truncating from SOURCE
79 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
80 contains the number of high-order bits in SOURCE that have to be
81 copies of the sign-bit so that we can do this mode-switch to
82 DESTINATION. */
84 static unsigned int
85 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
87 /* Return 1 if the value of X is unstable
88 (would be different at a different point in the program).
89 The frame pointer, arg pointer, etc. are considered stable
90 (within one function) and so is anything marked `unchanging'. */
92 int
93 rtx_unstable_p (rtx x)
95 RTX_CODE code = GET_CODE (x);
96 int i;
97 const char *fmt;
99 switch (code)
101 case MEM:
102 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
104 case CONST:
105 case CONST_INT:
106 case CONST_DOUBLE:
107 case CONST_VECTOR:
108 case SYMBOL_REF:
109 case LABEL_REF:
110 return 0;
112 case REG:
113 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
114 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
115 /* The arg pointer varies if it is not a fixed register. */
116 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
117 return 0;
118 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
119 /* ??? When call-clobbered, the value is stable modulo the restore
120 that must happen after a call. This currently screws up local-alloc
121 into believing that the restore is not needed. */
122 if (x == pic_offset_table_rtx)
123 return 0;
124 #endif
125 return 1;
127 case ASM_OPERANDS:
128 if (MEM_VOLATILE_P (x))
129 return 1;
131 /* Fall through. */
133 default:
134 break;
137 fmt = GET_RTX_FORMAT (code);
138 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
139 if (fmt[i] == 'e')
141 if (rtx_unstable_p (XEXP (x, i)))
142 return 1;
144 else if (fmt[i] == 'E')
146 int j;
147 for (j = 0; j < XVECLEN (x, i); j++)
148 if (rtx_unstable_p (XVECEXP (x, i, j)))
149 return 1;
152 return 0;
155 /* Return 1 if X has a value that can vary even between two
156 executions of the program. 0 means X can be compared reliably
157 against certain constants or near-constants.
158 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
159 zero, we are slightly more conservative.
160 The frame pointer and the arg pointer are considered constant. */
163 rtx_varies_p (rtx x, int for_alias)
165 RTX_CODE code;
166 int i;
167 const char *fmt;
169 if (!x)
170 return 0;
172 code = GET_CODE (x);
173 switch (code)
175 case MEM:
176 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
178 case CONST:
179 case CONST_INT:
180 case CONST_DOUBLE:
181 case CONST_VECTOR:
182 case SYMBOL_REF:
183 case LABEL_REF:
184 return 0;
186 case REG:
187 /* Note that we have to test for the actual rtx used for the frame
188 and arg pointers and not just the register number in case we have
189 eliminated the frame and/or arg pointer and are using it
190 for pseudos. */
191 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
192 /* The arg pointer varies if it is not a fixed register. */
193 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
194 return 0;
195 if (x == pic_offset_table_rtx
196 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
197 /* ??? When call-clobbered, the value is stable modulo the restore
198 that must happen after a call. This currently screws up
199 local-alloc into believing that the restore is not needed, so we
200 must return 0 only if we are called from alias analysis. */
201 && for_alias
202 #endif
204 return 0;
205 return 1;
207 case LO_SUM:
208 /* The operand 0 of a LO_SUM is considered constant
209 (in fact it is related specifically to operand 1)
210 during alias analysis. */
211 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
212 || rtx_varies_p (XEXP (x, 1), for_alias);
214 case ASM_OPERANDS:
215 if (MEM_VOLATILE_P (x))
216 return 1;
218 /* Fall through. */
220 default:
221 break;
224 fmt = GET_RTX_FORMAT (code);
225 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
226 if (fmt[i] == 'e')
228 if (rtx_varies_p (XEXP (x, i), for_alias))
229 return 1;
231 else if (fmt[i] == 'E')
233 int j;
234 for (j = 0; j < XVECLEN (x, i); j++)
235 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
236 return 1;
239 return 0;
242 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
243 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
244 whether nonzero is returned for unaligned memory accesses on strict
245 alignment machines. */
247 static int
248 rtx_addr_can_trap_p_1 (rtx x, enum machine_mode mode, bool unaligned_mems)
250 enum rtx_code code = GET_CODE (x);
252 switch (code)
254 case SYMBOL_REF:
255 return SYMBOL_REF_WEAK (x);
257 case LABEL_REF:
258 return 0;
260 case REG:
261 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
262 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
263 || x == stack_pointer_rtx
264 /* The arg pointer varies if it is not a fixed register. */
265 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
266 return 0;
267 /* All of the virtual frame registers are stack references. */
268 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
269 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
270 return 0;
271 return 1;
273 case CONST:
274 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
276 case PLUS:
277 /* An address is assumed not to trap if:
278 - it is an address that can't trap plus a constant integer,
279 with the proper remainder modulo the mode size if we are
280 considering unaligned memory references. */
281 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
282 && GET_CODE (XEXP (x, 1)) == CONST_INT)
284 HOST_WIDE_INT offset;
286 if (!STRICT_ALIGNMENT
287 || !unaligned_mems
288 || GET_MODE_SIZE (mode) == 0)
289 return 0;
291 offset = INTVAL (XEXP (x, 1));
293 #ifdef SPARC_STACK_BOUNDARY_HACK
294 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
295 the real alignment of %sp. However, when it does this, the
296 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
297 if (SPARC_STACK_BOUNDARY_HACK
298 && (XEXP (x, 0) == stack_pointer_rtx
299 || XEXP (x, 0) == hard_frame_pointer_rtx))
300 offset -= STACK_POINTER_OFFSET;
301 #endif
303 return offset % GET_MODE_SIZE (mode) != 0;
306 /* - or it is the pic register plus a constant. */
307 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
308 return 0;
310 return 1;
312 case LO_SUM:
313 case PRE_MODIFY:
314 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
316 case PRE_DEC:
317 case PRE_INC:
318 case POST_DEC:
319 case POST_INC:
320 case POST_MODIFY:
321 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
323 default:
324 break;
327 /* If it isn't one of the case above, it can cause a trap. */
328 return 1;
331 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
334 rtx_addr_can_trap_p (rtx x)
336 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
339 /* Return true if X is an address that is known to not be zero. */
341 bool
342 nonzero_address_p (rtx x)
344 enum rtx_code code = GET_CODE (x);
346 switch (code)
348 case SYMBOL_REF:
349 return !SYMBOL_REF_WEAK (x);
351 case LABEL_REF:
352 return true;
354 case REG:
355 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
356 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
357 || x == stack_pointer_rtx
358 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
359 return true;
360 /* All of the virtual frame registers are stack references. */
361 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
362 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
363 return true;
364 return false;
366 case CONST:
367 return nonzero_address_p (XEXP (x, 0));
369 case PLUS:
370 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
371 return nonzero_address_p (XEXP (x, 0));
372 /* Handle PIC references. */
373 else if (XEXP (x, 0) == pic_offset_table_rtx
374 && CONSTANT_P (XEXP (x, 1)))
375 return true;
376 return false;
378 case PRE_MODIFY:
379 /* Similar to the above; allow positive offsets. Further, since
380 auto-inc is only allowed in memories, the register must be a
381 pointer. */
382 if (GET_CODE (XEXP (x, 1)) == CONST_INT
383 && INTVAL (XEXP (x, 1)) > 0)
384 return true;
385 return nonzero_address_p (XEXP (x, 0));
387 case PRE_INC:
388 /* Similarly. Further, the offset is always positive. */
389 return true;
391 case PRE_DEC:
392 case POST_DEC:
393 case POST_INC:
394 case POST_MODIFY:
395 return nonzero_address_p (XEXP (x, 0));
397 case LO_SUM:
398 return nonzero_address_p (XEXP (x, 1));
400 default:
401 break;
404 /* If it isn't one of the case above, might be zero. */
405 return false;
408 /* Return 1 if X refers to a memory location whose address
409 cannot be compared reliably with constant addresses,
410 or if X refers to a BLKmode memory object.
411 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
412 zero, we are slightly more conservative. */
415 rtx_addr_varies_p (rtx x, int for_alias)
417 enum rtx_code code;
418 int i;
419 const char *fmt;
421 if (x == 0)
422 return 0;
424 code = GET_CODE (x);
425 if (code == MEM)
426 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
428 fmt = GET_RTX_FORMAT (code);
429 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
430 if (fmt[i] == 'e')
432 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
433 return 1;
435 else if (fmt[i] == 'E')
437 int j;
438 for (j = 0; j < XVECLEN (x, i); j++)
439 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
440 return 1;
442 return 0;
445 /* Return the value of the integer term in X, if one is apparent;
446 otherwise return 0.
447 Only obvious integer terms are detected.
448 This is used in cse.c with the `related_value' field. */
450 HOST_WIDE_INT
451 get_integer_term (rtx x)
453 if (GET_CODE (x) == CONST)
454 x = XEXP (x, 0);
456 if (GET_CODE (x) == MINUS
457 && GET_CODE (XEXP (x, 1)) == CONST_INT)
458 return - INTVAL (XEXP (x, 1));
459 if (GET_CODE (x) == PLUS
460 && GET_CODE (XEXP (x, 1)) == CONST_INT)
461 return INTVAL (XEXP (x, 1));
462 return 0;
465 /* If X is a constant, return the value sans apparent integer term;
466 otherwise return 0.
467 Only obvious integer terms are detected. */
470 get_related_value (rtx x)
472 if (GET_CODE (x) != CONST)
473 return 0;
474 x = XEXP (x, 0);
475 if (GET_CODE (x) == PLUS
476 && GET_CODE (XEXP (x, 1)) == CONST_INT)
477 return XEXP (x, 0);
478 else if (GET_CODE (x) == MINUS
479 && GET_CODE (XEXP (x, 1)) == CONST_INT)
480 return XEXP (x, 0);
481 return 0;
484 /* Return the number of places FIND appears within X. If COUNT_DEST is
485 zero, we do not count occurrences inside the destination of a SET. */
488 count_occurrences (rtx x, rtx find, int count_dest)
490 int i, j;
491 enum rtx_code code;
492 const char *format_ptr;
493 int count;
495 if (x == find)
496 return 1;
498 code = GET_CODE (x);
500 switch (code)
502 case REG:
503 case CONST_INT:
504 case CONST_DOUBLE:
505 case CONST_VECTOR:
506 case SYMBOL_REF:
507 case CODE_LABEL:
508 case PC:
509 case CC0:
510 return 0;
512 case MEM:
513 if (MEM_P (find) && rtx_equal_p (x, find))
514 return 1;
515 break;
517 case SET:
518 if (SET_DEST (x) == find && ! count_dest)
519 return count_occurrences (SET_SRC (x), find, count_dest);
520 break;
522 default:
523 break;
526 format_ptr = GET_RTX_FORMAT (code);
527 count = 0;
529 for (i = 0; i < GET_RTX_LENGTH (code); i++)
531 switch (*format_ptr++)
533 case 'e':
534 count += count_occurrences (XEXP (x, i), find, count_dest);
535 break;
537 case 'E':
538 for (j = 0; j < XVECLEN (x, i); j++)
539 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
540 break;
543 return count;
546 /* Nonzero if register REG appears somewhere within IN.
547 Also works if REG is not a register; in this case it checks
548 for a subexpression of IN that is Lisp "equal" to REG. */
551 reg_mentioned_p (rtx reg, rtx in)
553 const char *fmt;
554 int i;
555 enum rtx_code code;
557 if (in == 0)
558 return 0;
560 if (reg == in)
561 return 1;
563 if (GET_CODE (in) == LABEL_REF)
564 return reg == XEXP (in, 0);
566 code = GET_CODE (in);
568 switch (code)
570 /* Compare registers by number. */
571 case REG:
572 return REG_P (reg) && REGNO (in) == REGNO (reg);
574 /* These codes have no constituent expressions
575 and are unique. */
576 case SCRATCH:
577 case CC0:
578 case PC:
579 return 0;
581 case CONST_INT:
582 case CONST_VECTOR:
583 case CONST_DOUBLE:
584 /* These are kept unique for a given value. */
585 return 0;
587 default:
588 break;
591 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
592 return 1;
594 fmt = GET_RTX_FORMAT (code);
596 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
598 if (fmt[i] == 'E')
600 int j;
601 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
602 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
603 return 1;
605 else if (fmt[i] == 'e'
606 && reg_mentioned_p (reg, XEXP (in, i)))
607 return 1;
609 return 0;
612 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
613 no CODE_LABEL insn. */
616 no_labels_between_p (rtx beg, rtx end)
618 rtx p;
619 if (beg == end)
620 return 0;
621 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
622 if (LABEL_P (p))
623 return 0;
624 return 1;
627 /* Nonzero if register REG is used in an insn between
628 FROM_INSN and TO_INSN (exclusive of those two). */
631 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
633 rtx insn;
635 if (from_insn == to_insn)
636 return 0;
638 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
639 if (INSN_P (insn)
640 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
641 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
642 return 1;
643 return 0;
646 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
647 is entirely replaced by a new value and the only use is as a SET_DEST,
648 we do not consider it a reference. */
651 reg_referenced_p (rtx x, rtx body)
653 int i;
655 switch (GET_CODE (body))
657 case SET:
658 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
659 return 1;
661 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
662 of a REG that occupies all of the REG, the insn references X if
663 it is mentioned in the destination. */
664 if (GET_CODE (SET_DEST (body)) != CC0
665 && GET_CODE (SET_DEST (body)) != PC
666 && !REG_P (SET_DEST (body))
667 && ! (GET_CODE (SET_DEST (body)) == SUBREG
668 && REG_P (SUBREG_REG (SET_DEST (body)))
669 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
670 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
671 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
672 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
673 && reg_overlap_mentioned_p (x, SET_DEST (body)))
674 return 1;
675 return 0;
677 case ASM_OPERANDS:
678 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
679 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
680 return 1;
681 return 0;
683 case CALL:
684 case USE:
685 case IF_THEN_ELSE:
686 return reg_overlap_mentioned_p (x, body);
688 case TRAP_IF:
689 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
691 case PREFETCH:
692 return reg_overlap_mentioned_p (x, XEXP (body, 0));
694 case UNSPEC:
695 case UNSPEC_VOLATILE:
696 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
697 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
698 return 1;
699 return 0;
701 case PARALLEL:
702 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
703 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
704 return 1;
705 return 0;
707 case CLOBBER:
708 if (MEM_P (XEXP (body, 0)))
709 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
710 return 1;
711 return 0;
713 case COND_EXEC:
714 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
715 return 1;
716 return reg_referenced_p (x, COND_EXEC_CODE (body));
718 default:
719 return 0;
723 /* Nonzero if register REG is set or clobbered in an insn between
724 FROM_INSN and TO_INSN (exclusive of those two). */
727 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
729 rtx insn;
731 if (from_insn == to_insn)
732 return 0;
734 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
735 if (INSN_P (insn) && reg_set_p (reg, insn))
736 return 1;
737 return 0;
740 /* Internals of reg_set_between_p. */
742 reg_set_p (rtx reg, rtx insn)
744 /* We can be passed an insn or part of one. If we are passed an insn,
745 check if a side-effect of the insn clobbers REG. */
746 if (INSN_P (insn)
747 && (FIND_REG_INC_NOTE (insn, reg)
748 || (CALL_P (insn)
749 && ((REG_P (reg)
750 && REGNO (reg) < FIRST_PSEUDO_REGISTER
751 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
752 REGNO (reg)))
753 || MEM_P (reg)
754 || find_reg_fusage (insn, CLOBBER, reg)))))
755 return 1;
757 return set_of (reg, insn) != NULL_RTX;
760 /* Similar to reg_set_between_p, but check all registers in X. Return 0
761 only if none of them are modified between START and END. Return 1 if
762 X contains a MEM; this routine does usememory aliasing. */
765 modified_between_p (rtx x, rtx start, rtx end)
767 enum rtx_code code = GET_CODE (x);
768 const char *fmt;
769 int i, j;
770 rtx insn;
772 if (start == end)
773 return 0;
775 switch (code)
777 case CONST_INT:
778 case CONST_DOUBLE:
779 case CONST_VECTOR:
780 case CONST:
781 case SYMBOL_REF:
782 case LABEL_REF:
783 return 0;
785 case PC:
786 case CC0:
787 return 1;
789 case MEM:
790 if (modified_between_p (XEXP (x, 0), start, end))
791 return 1;
792 if (MEM_READONLY_P (x))
793 return 0;
794 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
795 if (memory_modified_in_insn_p (x, insn))
796 return 1;
797 return 0;
798 break;
800 case REG:
801 return reg_set_between_p (x, start, end);
803 default:
804 break;
807 fmt = GET_RTX_FORMAT (code);
808 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
810 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
811 return 1;
813 else if (fmt[i] == 'E')
814 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
815 if (modified_between_p (XVECEXP (x, i, j), start, end))
816 return 1;
819 return 0;
822 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
823 of them are modified in INSN. Return 1 if X contains a MEM; this routine
824 does use memory aliasing. */
827 modified_in_p (rtx x, rtx insn)
829 enum rtx_code code = GET_CODE (x);
830 const char *fmt;
831 int i, j;
833 switch (code)
835 case CONST_INT:
836 case CONST_DOUBLE:
837 case CONST_VECTOR:
838 case CONST:
839 case SYMBOL_REF:
840 case LABEL_REF:
841 return 0;
843 case PC:
844 case CC0:
845 return 1;
847 case MEM:
848 if (modified_in_p (XEXP (x, 0), insn))
849 return 1;
850 if (MEM_READONLY_P (x))
851 return 0;
852 if (memory_modified_in_insn_p (x, insn))
853 return 1;
854 return 0;
855 break;
857 case REG:
858 return reg_set_p (x, insn);
860 default:
861 break;
864 fmt = GET_RTX_FORMAT (code);
865 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
867 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
868 return 1;
870 else if (fmt[i] == 'E')
871 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
872 if (modified_in_p (XVECEXP (x, i, j), insn))
873 return 1;
876 return 0;
879 /* Helper function for set_of. */
880 struct set_of_data
882 rtx found;
883 rtx pat;
886 static void
887 set_of_1 (rtx x, rtx pat, void *data1)
889 struct set_of_data *data = (struct set_of_data *) (data1);
890 if (rtx_equal_p (x, data->pat)
891 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
892 data->found = pat;
895 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
896 (either directly or via STRICT_LOW_PART and similar modifiers). */
898 set_of (rtx pat, rtx insn)
900 struct set_of_data data;
901 data.found = NULL_RTX;
902 data.pat = pat;
903 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
904 return data.found;
907 /* Given an INSN, return a SET expression if this insn has only a single SET.
908 It may also have CLOBBERs, USEs, or SET whose output
909 will not be used, which we ignore. */
912 single_set_2 (rtx insn, rtx pat)
914 rtx set = NULL;
915 int set_verified = 1;
916 int i;
918 if (GET_CODE (pat) == PARALLEL)
920 for (i = 0; i < XVECLEN (pat, 0); i++)
922 rtx sub = XVECEXP (pat, 0, i);
923 switch (GET_CODE (sub))
925 case USE:
926 case CLOBBER:
927 break;
929 case SET:
930 /* We can consider insns having multiple sets, where all
931 but one are dead as single set insns. In common case
932 only single set is present in the pattern so we want
933 to avoid checking for REG_UNUSED notes unless necessary.
935 When we reach set first time, we just expect this is
936 the single set we are looking for and only when more
937 sets are found in the insn, we check them. */
938 if (!set_verified)
940 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
941 && !side_effects_p (set))
942 set = NULL;
943 else
944 set_verified = 1;
946 if (!set)
947 set = sub, set_verified = 0;
948 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
949 || side_effects_p (sub))
950 return NULL_RTX;
951 break;
953 default:
954 return NULL_RTX;
958 return set;
961 /* Given an INSN, return nonzero if it has more than one SET, else return
962 zero. */
965 multiple_sets (rtx insn)
967 int found;
968 int i;
970 /* INSN must be an insn. */
971 if (! INSN_P (insn))
972 return 0;
974 /* Only a PARALLEL can have multiple SETs. */
975 if (GET_CODE (PATTERN (insn)) == PARALLEL)
977 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
978 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
980 /* If we have already found a SET, then return now. */
981 if (found)
982 return 1;
983 else
984 found = 1;
988 /* Either zero or one SET. */
989 return 0;
992 /* Return nonzero if the destination of SET equals the source
993 and there are no side effects. */
996 set_noop_p (rtx set)
998 rtx src = SET_SRC (set);
999 rtx dst = SET_DEST (set);
1001 if (dst == pc_rtx && src == pc_rtx)
1002 return 1;
1004 if (MEM_P (dst) && MEM_P (src))
1005 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1007 if (GET_CODE (dst) == ZERO_EXTRACT)
1008 return rtx_equal_p (XEXP (dst, 0), src)
1009 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1010 && !side_effects_p (src);
1012 if (GET_CODE (dst) == STRICT_LOW_PART)
1013 dst = XEXP (dst, 0);
1015 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1017 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1018 return 0;
1019 src = SUBREG_REG (src);
1020 dst = SUBREG_REG (dst);
1023 return (REG_P (src) && REG_P (dst)
1024 && REGNO (src) == REGNO (dst));
1027 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1028 value to itself. */
1031 noop_move_p (rtx insn)
1033 rtx pat = PATTERN (insn);
1035 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1036 return 1;
1038 /* Insns carrying these notes are useful later on. */
1039 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1040 return 0;
1042 /* For now treat an insn with a REG_RETVAL note as a
1043 a special insn which should not be considered a no-op. */
1044 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1045 return 0;
1047 if (GET_CODE (pat) == SET && set_noop_p (pat))
1048 return 1;
1050 if (GET_CODE (pat) == PARALLEL)
1052 int i;
1053 /* If nothing but SETs of registers to themselves,
1054 this insn can also be deleted. */
1055 for (i = 0; i < XVECLEN (pat, 0); i++)
1057 rtx tem = XVECEXP (pat, 0, i);
1059 if (GET_CODE (tem) == USE
1060 || GET_CODE (tem) == CLOBBER)
1061 continue;
1063 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1064 return 0;
1067 return 1;
1069 return 0;
1073 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1074 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1075 If the object was modified, if we hit a partial assignment to X, or hit a
1076 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1077 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1078 be the src. */
1081 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1083 rtx p;
1085 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1086 p = PREV_INSN (p))
1087 if (INSN_P (p))
1089 rtx set = single_set (p);
1090 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1092 if (set && rtx_equal_p (x, SET_DEST (set)))
1094 rtx src = SET_SRC (set);
1096 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1097 src = XEXP (note, 0);
1099 if ((valid_to == NULL_RTX
1100 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1101 /* Reject hard registers because we don't usually want
1102 to use them; we'd rather use a pseudo. */
1103 && (! (REG_P (src)
1104 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1106 *pinsn = p;
1107 return src;
1111 /* If set in non-simple way, we don't have a value. */
1112 if (reg_set_p (x, p))
1113 break;
1116 return x;
1119 /* Return nonzero if register in range [REGNO, ENDREGNO)
1120 appears either explicitly or implicitly in X
1121 other than being stored into.
1123 References contained within the substructure at LOC do not count.
1124 LOC may be zero, meaning don't ignore anything. */
1127 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1128 rtx *loc)
1130 int i;
1131 unsigned int x_regno;
1132 RTX_CODE code;
1133 const char *fmt;
1135 repeat:
1136 /* The contents of a REG_NONNEG note is always zero, so we must come here
1137 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1138 if (x == 0)
1139 return 0;
1141 code = GET_CODE (x);
1143 switch (code)
1145 case REG:
1146 x_regno = REGNO (x);
1148 /* If we modifying the stack, frame, or argument pointer, it will
1149 clobber a virtual register. In fact, we could be more precise,
1150 but it isn't worth it. */
1151 if ((x_regno == STACK_POINTER_REGNUM
1152 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1153 || x_regno == ARG_POINTER_REGNUM
1154 #endif
1155 || x_regno == FRAME_POINTER_REGNUM)
1156 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1157 return 1;
1159 return (endregno > x_regno
1160 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1161 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1162 : 1));
1164 case SUBREG:
1165 /* If this is a SUBREG of a hard reg, we can see exactly which
1166 registers are being modified. Otherwise, handle normally. */
1167 if (REG_P (SUBREG_REG (x))
1168 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1170 unsigned int inner_regno = subreg_regno (x);
1171 unsigned int inner_endregno
1172 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1173 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1175 return endregno > inner_regno && regno < inner_endregno;
1177 break;
1179 case CLOBBER:
1180 case SET:
1181 if (&SET_DEST (x) != loc
1182 /* Note setting a SUBREG counts as referring to the REG it is in for
1183 a pseudo but not for hard registers since we can
1184 treat each word individually. */
1185 && ((GET_CODE (SET_DEST (x)) == SUBREG
1186 && loc != &SUBREG_REG (SET_DEST (x))
1187 && REG_P (SUBREG_REG (SET_DEST (x)))
1188 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1189 && refers_to_regno_p (regno, endregno,
1190 SUBREG_REG (SET_DEST (x)), loc))
1191 || (!REG_P (SET_DEST (x))
1192 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1193 return 1;
1195 if (code == CLOBBER || loc == &SET_SRC (x))
1196 return 0;
1197 x = SET_SRC (x);
1198 goto repeat;
1200 default:
1201 break;
1204 /* X does not match, so try its subexpressions. */
1206 fmt = GET_RTX_FORMAT (code);
1207 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1209 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1211 if (i == 0)
1213 x = XEXP (x, 0);
1214 goto repeat;
1216 else
1217 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1218 return 1;
1220 else if (fmt[i] == 'E')
1222 int j;
1223 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1224 if (loc != &XVECEXP (x, i, j)
1225 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1226 return 1;
1229 return 0;
1232 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1233 we check if any register number in X conflicts with the relevant register
1234 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1235 contains a MEM (we don't bother checking for memory addresses that can't
1236 conflict because we expect this to be a rare case. */
1239 reg_overlap_mentioned_p (rtx x, rtx in)
1241 unsigned int regno, endregno;
1243 /* If either argument is a constant, then modifying X can not
1244 affect IN. Here we look at IN, we can profitably combine
1245 CONSTANT_P (x) with the switch statement below. */
1246 if (CONSTANT_P (in))
1247 return 0;
1249 recurse:
1250 switch (GET_CODE (x))
1252 case STRICT_LOW_PART:
1253 case ZERO_EXTRACT:
1254 case SIGN_EXTRACT:
1255 /* Overly conservative. */
1256 x = XEXP (x, 0);
1257 goto recurse;
1259 case SUBREG:
1260 regno = REGNO (SUBREG_REG (x));
1261 if (regno < FIRST_PSEUDO_REGISTER)
1262 regno = subreg_regno (x);
1263 goto do_reg;
1265 case REG:
1266 regno = REGNO (x);
1267 do_reg:
1268 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1269 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1270 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1272 case MEM:
1274 const char *fmt;
1275 int i;
1277 if (MEM_P (in))
1278 return 1;
1280 fmt = GET_RTX_FORMAT (GET_CODE (in));
1281 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1282 if (fmt[i] == 'e')
1284 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1285 return 1;
1287 else if (fmt[i] == 'E')
1289 int j;
1290 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1291 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1292 return 1;
1295 return 0;
1298 case SCRATCH:
1299 case PC:
1300 case CC0:
1301 return reg_mentioned_p (x, in);
1303 case PARALLEL:
1305 int i;
1307 /* If any register in here refers to it we return true. */
1308 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1309 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1310 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1311 return 1;
1312 return 0;
1315 default:
1316 gcc_assert (CONSTANT_P (x));
1317 return 0;
1321 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1322 (X would be the pattern of an insn).
1323 FUN receives two arguments:
1324 the REG, MEM, CC0 or PC being stored in or clobbered,
1325 the SET or CLOBBER rtx that does the store.
1327 If the item being stored in or clobbered is a SUBREG of a hard register,
1328 the SUBREG will be passed. */
1330 void
1331 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1333 int i;
1335 if (GET_CODE (x) == COND_EXEC)
1336 x = COND_EXEC_CODE (x);
1338 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1340 rtx dest = SET_DEST (x);
1342 while ((GET_CODE (dest) == SUBREG
1343 && (!REG_P (SUBREG_REG (dest))
1344 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1345 || GET_CODE (dest) == ZERO_EXTRACT
1346 || GET_CODE (dest) == STRICT_LOW_PART)
1347 dest = XEXP (dest, 0);
1349 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1350 each of whose first operand is a register. */
1351 if (GET_CODE (dest) == PARALLEL)
1353 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1354 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1355 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1357 else
1358 (*fun) (dest, x, data);
1361 else if (GET_CODE (x) == PARALLEL)
1362 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1363 note_stores (XVECEXP (x, 0, i), fun, data);
1366 /* Like notes_stores, but call FUN for each expression that is being
1367 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1368 FUN for each expression, not any interior subexpressions. FUN receives a
1369 pointer to the expression and the DATA passed to this function.
1371 Note that this is not quite the same test as that done in reg_referenced_p
1372 since that considers something as being referenced if it is being
1373 partially set, while we do not. */
1375 void
1376 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1378 rtx body = *pbody;
1379 int i;
1381 switch (GET_CODE (body))
1383 case COND_EXEC:
1384 (*fun) (&COND_EXEC_TEST (body), data);
1385 note_uses (&COND_EXEC_CODE (body), fun, data);
1386 return;
1388 case PARALLEL:
1389 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1390 note_uses (&XVECEXP (body, 0, i), fun, data);
1391 return;
1393 case USE:
1394 (*fun) (&XEXP (body, 0), data);
1395 return;
1397 case ASM_OPERANDS:
1398 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1399 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1400 return;
1402 case TRAP_IF:
1403 (*fun) (&TRAP_CONDITION (body), data);
1404 return;
1406 case PREFETCH:
1407 (*fun) (&XEXP (body, 0), data);
1408 return;
1410 case UNSPEC:
1411 case UNSPEC_VOLATILE:
1412 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1413 (*fun) (&XVECEXP (body, 0, i), data);
1414 return;
1416 case CLOBBER:
1417 if (MEM_P (XEXP (body, 0)))
1418 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1419 return;
1421 case SET:
1423 rtx dest = SET_DEST (body);
1425 /* For sets we replace everything in source plus registers in memory
1426 expression in store and operands of a ZERO_EXTRACT. */
1427 (*fun) (&SET_SRC (body), data);
1429 if (GET_CODE (dest) == ZERO_EXTRACT)
1431 (*fun) (&XEXP (dest, 1), data);
1432 (*fun) (&XEXP (dest, 2), data);
1435 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1436 dest = XEXP (dest, 0);
1438 if (MEM_P (dest))
1439 (*fun) (&XEXP (dest, 0), data);
1441 return;
1443 default:
1444 /* All the other possibilities never store. */
1445 (*fun) (pbody, data);
1446 return;
1450 /* Return nonzero if X's old contents don't survive after INSN.
1451 This will be true if X is (cc0) or if X is a register and
1452 X dies in INSN or because INSN entirely sets X.
1454 "Entirely set" means set directly and not through a SUBREG, or
1455 ZERO_EXTRACT, so no trace of the old contents remains.
1456 Likewise, REG_INC does not count.
1458 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1459 but for this use that makes no difference, since regs don't overlap
1460 during their lifetimes. Therefore, this function may be used
1461 at any time after deaths have been computed (in flow.c).
1463 If REG is a hard reg that occupies multiple machine registers, this
1464 function will only return 1 if each of those registers will be replaced
1465 by INSN. */
1468 dead_or_set_p (rtx insn, rtx x)
1470 unsigned int regno, last_regno;
1471 unsigned int i;
1473 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1474 if (GET_CODE (x) == CC0)
1475 return 1;
1477 gcc_assert (REG_P (x));
1479 regno = REGNO (x);
1480 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1481 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1483 for (i = regno; i <= last_regno; i++)
1484 if (! dead_or_set_regno_p (insn, i))
1485 return 0;
1487 return 1;
1490 /* Return TRUE iff DEST is a register or subreg of a register and
1491 doesn't change the number of words of the inner register, and any
1492 part of the register is TEST_REGNO. */
1494 static bool
1495 covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1497 unsigned int regno, endregno;
1499 if (GET_CODE (dest) == SUBREG
1500 && (((GET_MODE_SIZE (GET_MODE (dest))
1501 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1502 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1503 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1504 dest = SUBREG_REG (dest);
1506 if (!REG_P (dest))
1507 return false;
1509 regno = REGNO (dest);
1510 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1511 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1512 return (test_regno >= regno && test_regno < endregno);
1515 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1516 any member matches the covers_regno_no_parallel_p criteria. */
1518 static bool
1519 covers_regno_p (rtx dest, unsigned int test_regno)
1521 if (GET_CODE (dest) == PARALLEL)
1523 /* Some targets place small structures in registers for return
1524 values of functions, and those registers are wrapped in
1525 PARALLELs that we may see as the destination of a SET. */
1526 int i;
1528 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1530 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1531 if (inner != NULL_RTX
1532 && covers_regno_no_parallel_p (inner, test_regno))
1533 return true;
1536 return false;
1538 else
1539 return covers_regno_no_parallel_p (dest, test_regno);
1542 /* Utility function for dead_or_set_p to check an individual register. Also
1543 called from flow.c. */
1546 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1548 rtx pattern;
1550 /* See if there is a death note for something that includes TEST_REGNO. */
1551 if (find_regno_note (insn, REG_DEAD, test_regno))
1552 return 1;
1554 if (CALL_P (insn)
1555 && find_regno_fusage (insn, CLOBBER, test_regno))
1556 return 1;
1558 pattern = PATTERN (insn);
1560 if (GET_CODE (pattern) == COND_EXEC)
1561 pattern = COND_EXEC_CODE (pattern);
1563 if (GET_CODE (pattern) == SET)
1564 return covers_regno_p (SET_DEST (pattern), test_regno);
1565 else if (GET_CODE (pattern) == PARALLEL)
1567 int i;
1569 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1571 rtx body = XVECEXP (pattern, 0, i);
1573 if (GET_CODE (body) == COND_EXEC)
1574 body = COND_EXEC_CODE (body);
1576 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1577 && covers_regno_p (SET_DEST (body), test_regno))
1578 return 1;
1582 return 0;
1585 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1586 If DATUM is nonzero, look for one whose datum is DATUM. */
1589 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1591 rtx link;
1593 gcc_assert (insn);
1595 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1596 if (! INSN_P (insn))
1597 return 0;
1598 if (datum == 0)
1600 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1601 if (REG_NOTE_KIND (link) == kind)
1602 return link;
1603 return 0;
1606 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1607 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1608 return link;
1609 return 0;
1612 /* Return the reg-note of kind KIND in insn INSN which applies to register
1613 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1614 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1615 it might be the case that the note overlaps REGNO. */
1618 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1620 rtx link;
1622 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1623 if (! INSN_P (insn))
1624 return 0;
1626 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1627 if (REG_NOTE_KIND (link) == kind
1628 /* Verify that it is a register, so that scratch and MEM won't cause a
1629 problem here. */
1630 && REG_P (XEXP (link, 0))
1631 && REGNO (XEXP (link, 0)) <= regno
1632 && ((REGNO (XEXP (link, 0))
1633 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1634 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1635 [GET_MODE (XEXP (link, 0))]))
1636 > regno))
1637 return link;
1638 return 0;
1641 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1642 has such a note. */
1645 find_reg_equal_equiv_note (rtx insn)
1647 rtx link;
1649 if (!INSN_P (insn))
1650 return 0;
1651 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1652 if (REG_NOTE_KIND (link) == REG_EQUAL
1653 || REG_NOTE_KIND (link) == REG_EQUIV)
1655 if (single_set (insn) == 0)
1656 return 0;
1657 return link;
1659 return NULL;
1662 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1663 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1666 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1668 /* If it's not a CALL_INSN, it can't possibly have a
1669 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1670 if (!CALL_P (insn))
1671 return 0;
1673 gcc_assert (datum);
1675 if (!REG_P (datum))
1677 rtx link;
1679 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1680 link;
1681 link = XEXP (link, 1))
1682 if (GET_CODE (XEXP (link, 0)) == code
1683 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1684 return 1;
1686 else
1688 unsigned int regno = REGNO (datum);
1690 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1691 to pseudo registers, so don't bother checking. */
1693 if (regno < FIRST_PSEUDO_REGISTER)
1695 unsigned int end_regno
1696 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1697 unsigned int i;
1699 for (i = regno; i < end_regno; i++)
1700 if (find_regno_fusage (insn, code, i))
1701 return 1;
1705 return 0;
1708 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1709 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1712 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1714 rtx link;
1716 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1717 to pseudo registers, so don't bother checking. */
1719 if (regno >= FIRST_PSEUDO_REGISTER
1720 || !CALL_P (insn) )
1721 return 0;
1723 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1725 unsigned int regnote;
1726 rtx op, reg;
1728 if (GET_CODE (op = XEXP (link, 0)) == code
1729 && REG_P (reg = XEXP (op, 0))
1730 && (regnote = REGNO (reg)) <= regno
1731 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1732 return 1;
1735 return 0;
1738 /* Return true if INSN is a call to a pure function. */
1741 pure_call_p (rtx insn)
1743 rtx link;
1745 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1746 return 0;
1748 /* Look for the note that differentiates const and pure functions. */
1749 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1751 rtx u, m;
1753 if (GET_CODE (u = XEXP (link, 0)) == USE
1754 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1755 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1756 return 1;
1759 return 0;
1762 /* Remove register note NOTE from the REG_NOTES of INSN. */
1764 void
1765 remove_note (rtx insn, rtx note)
1767 rtx link;
1769 if (note == NULL_RTX)
1770 return;
1772 if (REG_NOTES (insn) == note)
1774 REG_NOTES (insn) = XEXP (note, 1);
1775 return;
1778 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1779 if (XEXP (link, 1) == note)
1781 XEXP (link, 1) = XEXP (note, 1);
1782 return;
1785 gcc_unreachable ();
1788 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1789 return 1 if it is found. A simple equality test is used to determine if
1790 NODE matches. */
1793 in_expr_list_p (rtx listp, rtx node)
1795 rtx x;
1797 for (x = listp; x; x = XEXP (x, 1))
1798 if (node == XEXP (x, 0))
1799 return 1;
1801 return 0;
1804 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1805 remove that entry from the list if it is found.
1807 A simple equality test is used to determine if NODE matches. */
1809 void
1810 remove_node_from_expr_list (rtx node, rtx *listp)
1812 rtx temp = *listp;
1813 rtx prev = NULL_RTX;
1815 while (temp)
1817 if (node == XEXP (temp, 0))
1819 /* Splice the node out of the list. */
1820 if (prev)
1821 XEXP (prev, 1) = XEXP (temp, 1);
1822 else
1823 *listp = XEXP (temp, 1);
1825 return;
1828 prev = temp;
1829 temp = XEXP (temp, 1);
1833 /* Nonzero if X contains any volatile instructions. These are instructions
1834 which may cause unpredictable machine state instructions, and thus no
1835 instructions should be moved or combined across them. This includes
1836 only volatile asms and UNSPEC_VOLATILE instructions. */
1839 volatile_insn_p (rtx x)
1841 RTX_CODE code;
1843 code = GET_CODE (x);
1844 switch (code)
1846 case LABEL_REF:
1847 case SYMBOL_REF:
1848 case CONST_INT:
1849 case CONST:
1850 case CONST_DOUBLE:
1851 case CONST_VECTOR:
1852 case CC0:
1853 case PC:
1854 case REG:
1855 case SCRATCH:
1856 case CLOBBER:
1857 case ADDR_VEC:
1858 case ADDR_DIFF_VEC:
1859 case CALL:
1860 case MEM:
1861 return 0;
1863 case UNSPEC_VOLATILE:
1864 /* case TRAP_IF: This isn't clear yet. */
1865 return 1;
1867 case ASM_INPUT:
1868 case ASM_OPERANDS:
1869 if (MEM_VOLATILE_P (x))
1870 return 1;
1872 default:
1873 break;
1876 /* Recursively scan the operands of this expression. */
1879 const char *fmt = GET_RTX_FORMAT (code);
1880 int i;
1882 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1884 if (fmt[i] == 'e')
1886 if (volatile_insn_p (XEXP (x, i)))
1887 return 1;
1889 else if (fmt[i] == 'E')
1891 int j;
1892 for (j = 0; j < XVECLEN (x, i); j++)
1893 if (volatile_insn_p (XVECEXP (x, i, j)))
1894 return 1;
1898 return 0;
1901 /* Nonzero if X contains any volatile memory references
1902 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1905 volatile_refs_p (rtx x)
1907 RTX_CODE code;
1909 code = GET_CODE (x);
1910 switch (code)
1912 case LABEL_REF:
1913 case SYMBOL_REF:
1914 case CONST_INT:
1915 case CONST:
1916 case CONST_DOUBLE:
1917 case CONST_VECTOR:
1918 case CC0:
1919 case PC:
1920 case REG:
1921 case SCRATCH:
1922 case CLOBBER:
1923 case ADDR_VEC:
1924 case ADDR_DIFF_VEC:
1925 return 0;
1927 case UNSPEC_VOLATILE:
1928 return 1;
1930 case MEM:
1931 case ASM_INPUT:
1932 case ASM_OPERANDS:
1933 if (MEM_VOLATILE_P (x))
1934 return 1;
1936 default:
1937 break;
1940 /* Recursively scan the operands of this expression. */
1943 const char *fmt = GET_RTX_FORMAT (code);
1944 int i;
1946 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1948 if (fmt[i] == 'e')
1950 if (volatile_refs_p (XEXP (x, i)))
1951 return 1;
1953 else if (fmt[i] == 'E')
1955 int j;
1956 for (j = 0; j < XVECLEN (x, i); j++)
1957 if (volatile_refs_p (XVECEXP (x, i, j)))
1958 return 1;
1962 return 0;
1965 /* Similar to above, except that it also rejects register pre- and post-
1966 incrementing. */
1969 side_effects_p (rtx x)
1971 RTX_CODE code;
1973 code = GET_CODE (x);
1974 switch (code)
1976 case LABEL_REF:
1977 case SYMBOL_REF:
1978 case CONST_INT:
1979 case CONST:
1980 case CONST_DOUBLE:
1981 case CONST_VECTOR:
1982 case CC0:
1983 case PC:
1984 case REG:
1985 case SCRATCH:
1986 case ADDR_VEC:
1987 case ADDR_DIFF_VEC:
1988 return 0;
1990 case CLOBBER:
1991 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
1992 when some combination can't be done. If we see one, don't think
1993 that we can simplify the expression. */
1994 return (GET_MODE (x) != VOIDmode);
1996 case PRE_INC:
1997 case PRE_DEC:
1998 case POST_INC:
1999 case POST_DEC:
2000 case PRE_MODIFY:
2001 case POST_MODIFY:
2002 case CALL:
2003 case UNSPEC_VOLATILE:
2004 /* case TRAP_IF: This isn't clear yet. */
2005 return 1;
2007 case MEM:
2008 case ASM_INPUT:
2009 case ASM_OPERANDS:
2010 if (MEM_VOLATILE_P (x))
2011 return 1;
2013 default:
2014 break;
2017 /* Recursively scan the operands of this expression. */
2020 const char *fmt = GET_RTX_FORMAT (code);
2021 int i;
2023 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2025 if (fmt[i] == 'e')
2027 if (side_effects_p (XEXP (x, i)))
2028 return 1;
2030 else if (fmt[i] == 'E')
2032 int j;
2033 for (j = 0; j < XVECLEN (x, i); j++)
2034 if (side_effects_p (XVECEXP (x, i, j)))
2035 return 1;
2039 return 0;
2042 enum may_trap_p_flags
2044 MTP_UNALIGNED_MEMS = 1,
2045 MTP_AFTER_MOVE = 2
2047 /* Return nonzero if evaluating rtx X might cause a trap.
2048 (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2049 unaligned memory accesses on strict alignment machines. If
2050 (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2051 cannot trap at its current location, but it might become trapping if moved
2052 elsewhere. */
2054 static int
2055 may_trap_p_1 (rtx x, unsigned flags)
2057 int i;
2058 enum rtx_code code;
2059 const char *fmt;
2060 bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2062 if (x == 0)
2063 return 0;
2064 code = GET_CODE (x);
2065 switch (code)
2067 /* Handle these cases quickly. */
2068 case CONST_INT:
2069 case CONST_DOUBLE:
2070 case CONST_VECTOR:
2071 case SYMBOL_REF:
2072 case LABEL_REF:
2073 case CONST:
2074 case PC:
2075 case CC0:
2076 case REG:
2077 case SCRATCH:
2078 return 0;
2080 case ASM_INPUT:
2081 case UNSPEC_VOLATILE:
2082 case TRAP_IF:
2083 return 1;
2085 case ASM_OPERANDS:
2086 return MEM_VOLATILE_P (x);
2088 /* Memory ref can trap unless it's a static var or a stack slot. */
2089 case MEM:
2090 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2091 reference; moving it out of condition might cause its address
2092 become invalid. */
2093 !(flags & MTP_AFTER_MOVE)
2094 && MEM_NOTRAP_P (x)
2095 && (!STRICT_ALIGNMENT || !unaligned_mems))
2096 return 0;
2097 return
2098 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2100 /* Division by a non-constant might trap. */
2101 case DIV:
2102 case MOD:
2103 case UDIV:
2104 case UMOD:
2105 if (HONOR_SNANS (GET_MODE (x)))
2106 return 1;
2107 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2108 return flag_trapping_math;
2109 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2110 return 1;
2111 break;
2113 case EXPR_LIST:
2114 /* An EXPR_LIST is used to represent a function call. This
2115 certainly may trap. */
2116 return 1;
2118 case GE:
2119 case GT:
2120 case LE:
2121 case LT:
2122 case LTGT:
2123 case COMPARE:
2124 /* Some floating point comparisons may trap. */
2125 if (!flag_trapping_math)
2126 break;
2127 /* ??? There is no machine independent way to check for tests that trap
2128 when COMPARE is used, though many targets do make this distinction.
2129 For instance, sparc uses CCFPE for compares which generate exceptions
2130 and CCFP for compares which do not generate exceptions. */
2131 if (HONOR_NANS (GET_MODE (x)))
2132 return 1;
2133 /* But often the compare has some CC mode, so check operand
2134 modes as well. */
2135 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2136 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2137 return 1;
2138 break;
2140 case EQ:
2141 case NE:
2142 if (HONOR_SNANS (GET_MODE (x)))
2143 return 1;
2144 /* Often comparison is CC mode, so check operand modes. */
2145 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2146 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2147 return 1;
2148 break;
2150 case FIX:
2151 /* Conversion of floating point might trap. */
2152 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2153 return 1;
2154 break;
2156 case NEG:
2157 case ABS:
2158 case SUBREG:
2159 /* These operations don't trap even with floating point. */
2160 break;
2162 default:
2163 /* Any floating arithmetic may trap. */
2164 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2165 && flag_trapping_math)
2166 return 1;
2169 fmt = GET_RTX_FORMAT (code);
2170 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2172 if (fmt[i] == 'e')
2174 if (may_trap_p_1 (XEXP (x, i), flags))
2175 return 1;
2177 else if (fmt[i] == 'E')
2179 int j;
2180 for (j = 0; j < XVECLEN (x, i); j++)
2181 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2182 return 1;
2185 return 0;
2188 /* Return nonzero if evaluating rtx X might cause a trap. */
2191 may_trap_p (rtx x)
2193 return may_trap_p_1 (x, 0);
2196 /* Return nonzero if evaluating rtx X might cause a trap, when the expression
2197 is moved from its current location by some optimization. */
2200 may_trap_after_code_motion_p (rtx x)
2202 return may_trap_p_1 (x, MTP_AFTER_MOVE);
2205 /* Same as above, but additionally return nonzero if evaluating rtx X might
2206 cause a fault. We define a fault for the purpose of this function as a
2207 erroneous execution condition that cannot be encountered during the normal
2208 execution of a valid program; the typical example is an unaligned memory
2209 access on a strict alignment machine. The compiler guarantees that it
2210 doesn't generate code that will fault from a valid program, but this
2211 guarantee doesn't mean anything for individual instructions. Consider
2212 the following example:
2214 struct S { int d; union { char *cp; int *ip; }; };
2216 int foo(struct S *s)
2218 if (s->d == 1)
2219 return *s->ip;
2220 else
2221 return *s->cp;
2224 on a strict alignment machine. In a valid program, foo will never be
2225 invoked on a structure for which d is equal to 1 and the underlying
2226 unique field of the union not aligned on a 4-byte boundary, but the
2227 expression *s->ip might cause a fault if considered individually.
2229 At the RTL level, potentially problematic expressions will almost always
2230 verify may_trap_p; for example, the above dereference can be emitted as
2231 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2232 However, suppose that foo is inlined in a caller that causes s->cp to
2233 point to a local character variable and guarantees that s->d is not set
2234 to 1; foo may have been effectively translated into pseudo-RTL as:
2236 if ((reg:SI) == 1)
2237 (set (reg:SI) (mem:SI (%fp - 7)))
2238 else
2239 (set (reg:QI) (mem:QI (%fp - 7)))
2241 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2242 memory reference to a stack slot, but it will certainly cause a fault
2243 on a strict alignment machine. */
2246 may_trap_or_fault_p (rtx x)
2248 return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2251 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2252 i.e., an inequality. */
2255 inequality_comparisons_p (rtx x)
2257 const char *fmt;
2258 int len, i;
2259 enum rtx_code code = GET_CODE (x);
2261 switch (code)
2263 case REG:
2264 case SCRATCH:
2265 case PC:
2266 case CC0:
2267 case CONST_INT:
2268 case CONST_DOUBLE:
2269 case CONST_VECTOR:
2270 case CONST:
2271 case LABEL_REF:
2272 case SYMBOL_REF:
2273 return 0;
2275 case LT:
2276 case LTU:
2277 case GT:
2278 case GTU:
2279 case LE:
2280 case LEU:
2281 case GE:
2282 case GEU:
2283 return 1;
2285 default:
2286 break;
2289 len = GET_RTX_LENGTH (code);
2290 fmt = GET_RTX_FORMAT (code);
2292 for (i = 0; i < len; i++)
2294 if (fmt[i] == 'e')
2296 if (inequality_comparisons_p (XEXP (x, i)))
2297 return 1;
2299 else if (fmt[i] == 'E')
2301 int j;
2302 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2303 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2304 return 1;
2308 return 0;
2311 /* Replace any occurrence of FROM in X with TO. The function does
2312 not enter into CONST_DOUBLE for the replace.
2314 Note that copying is not done so X must not be shared unless all copies
2315 are to be modified. */
2318 replace_rtx (rtx x, rtx from, rtx to)
2320 int i, j;
2321 const char *fmt;
2323 /* The following prevents loops occurrence when we change MEM in
2324 CONST_DOUBLE onto the same CONST_DOUBLE. */
2325 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2326 return x;
2328 if (x == from)
2329 return to;
2331 /* Allow this function to make replacements in EXPR_LISTs. */
2332 if (x == 0)
2333 return 0;
2335 if (GET_CODE (x) == SUBREG)
2337 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2339 if (GET_CODE (new) == CONST_INT)
2341 x = simplify_subreg (GET_MODE (x), new,
2342 GET_MODE (SUBREG_REG (x)),
2343 SUBREG_BYTE (x));
2344 gcc_assert (x);
2346 else
2347 SUBREG_REG (x) = new;
2349 return x;
2351 else if (GET_CODE (x) == ZERO_EXTEND)
2353 rtx new = replace_rtx (XEXP (x, 0), from, to);
2355 if (GET_CODE (new) == CONST_INT)
2357 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2358 new, GET_MODE (XEXP (x, 0)));
2359 gcc_assert (x);
2361 else
2362 XEXP (x, 0) = new;
2364 return x;
2367 fmt = GET_RTX_FORMAT (GET_CODE (x));
2368 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2370 if (fmt[i] == 'e')
2371 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2372 else if (fmt[i] == 'E')
2373 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2374 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2377 return x;
2380 /* Replace occurrences of the old label in *X with the new one.
2381 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2384 replace_label (rtx *x, void *data)
2386 rtx l = *x;
2387 rtx old_label = ((replace_label_data *) data)->r1;
2388 rtx new_label = ((replace_label_data *) data)->r2;
2389 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2391 if (l == NULL_RTX)
2392 return 0;
2394 if (GET_CODE (l) == SYMBOL_REF
2395 && CONSTANT_POOL_ADDRESS_P (l))
2397 rtx c = get_pool_constant (l);
2398 if (rtx_referenced_p (old_label, c))
2400 rtx new_c, new_l;
2401 replace_label_data *d = (replace_label_data *) data;
2403 /* Create a copy of constant C; replace the label inside
2404 but do not update LABEL_NUSES because uses in constant pool
2405 are not counted. */
2406 new_c = copy_rtx (c);
2407 d->update_label_nuses = false;
2408 for_each_rtx (&new_c, replace_label, data);
2409 d->update_label_nuses = update_label_nuses;
2411 /* Add the new constant NEW_C to constant pool and replace
2412 the old reference to constant by new reference. */
2413 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2414 *x = replace_rtx (l, l, new_l);
2416 return 0;
2419 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2420 field. This is not handled by for_each_rtx because it doesn't
2421 handle unprinted ('0') fields. */
2422 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2423 JUMP_LABEL (l) = new_label;
2425 if ((GET_CODE (l) == LABEL_REF
2426 || GET_CODE (l) == INSN_LIST)
2427 && XEXP (l, 0) == old_label)
2429 XEXP (l, 0) = new_label;
2430 if (update_label_nuses)
2432 ++LABEL_NUSES (new_label);
2433 --LABEL_NUSES (old_label);
2435 return 0;
2438 return 0;
2441 /* When *BODY is equal to X or X is directly referenced by *BODY
2442 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2443 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2445 static int
2446 rtx_referenced_p_1 (rtx *body, void *x)
2448 rtx y = (rtx) x;
2450 if (*body == NULL_RTX)
2451 return y == NULL_RTX;
2453 /* Return true if a label_ref *BODY refers to label Y. */
2454 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2455 return XEXP (*body, 0) == y;
2457 /* If *BODY is a reference to pool constant traverse the constant. */
2458 if (GET_CODE (*body) == SYMBOL_REF
2459 && CONSTANT_POOL_ADDRESS_P (*body))
2460 return rtx_referenced_p (y, get_pool_constant (*body));
2462 /* By default, compare the RTL expressions. */
2463 return rtx_equal_p (*body, y);
2466 /* Return true if X is referenced in BODY. */
2469 rtx_referenced_p (rtx x, rtx body)
2471 return for_each_rtx (&body, rtx_referenced_p_1, x);
2474 /* If INSN is a tablejump return true and store the label (before jump table) to
2475 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2477 bool
2478 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2480 rtx label, table;
2482 if (JUMP_P (insn)
2483 && (label = JUMP_LABEL (insn)) != NULL_RTX
2484 && (table = next_active_insn (label)) != NULL_RTX
2485 && JUMP_P (table)
2486 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2487 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2489 if (labelp)
2490 *labelp = label;
2491 if (tablep)
2492 *tablep = table;
2493 return true;
2495 return false;
2498 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2499 constant that is not in the constant pool and not in the condition
2500 of an IF_THEN_ELSE. */
2502 static int
2503 computed_jump_p_1 (rtx x)
2505 enum rtx_code code = GET_CODE (x);
2506 int i, j;
2507 const char *fmt;
2509 switch (code)
2511 case LABEL_REF:
2512 case PC:
2513 return 0;
2515 case CONST:
2516 case CONST_INT:
2517 case CONST_DOUBLE:
2518 case CONST_VECTOR:
2519 case SYMBOL_REF:
2520 case REG:
2521 return 1;
2523 case MEM:
2524 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2525 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2527 case IF_THEN_ELSE:
2528 return (computed_jump_p_1 (XEXP (x, 1))
2529 || computed_jump_p_1 (XEXP (x, 2)));
2531 default:
2532 break;
2535 fmt = GET_RTX_FORMAT (code);
2536 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2538 if (fmt[i] == 'e'
2539 && computed_jump_p_1 (XEXP (x, i)))
2540 return 1;
2542 else if (fmt[i] == 'E')
2543 for (j = 0; j < XVECLEN (x, i); j++)
2544 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2545 return 1;
2548 return 0;
2551 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2553 Tablejumps and casesi insns are not considered indirect jumps;
2554 we can recognize them by a (use (label_ref)). */
2557 computed_jump_p (rtx insn)
2559 int i;
2560 if (JUMP_P (insn))
2562 rtx pat = PATTERN (insn);
2564 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2565 return 0;
2566 else if (GET_CODE (pat) == PARALLEL)
2568 int len = XVECLEN (pat, 0);
2569 int has_use_labelref = 0;
2571 for (i = len - 1; i >= 0; i--)
2572 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2573 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2574 == LABEL_REF))
2575 has_use_labelref = 1;
2577 if (! has_use_labelref)
2578 for (i = len - 1; i >= 0; i--)
2579 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2580 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2581 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2582 return 1;
2584 else if (GET_CODE (pat) == SET
2585 && SET_DEST (pat) == pc_rtx
2586 && computed_jump_p_1 (SET_SRC (pat)))
2587 return 1;
2589 return 0;
2592 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2593 calls. Processes the subexpressions of EXP and passes them to F. */
2594 static int
2595 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2597 int result, i, j;
2598 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2599 rtx *x;
2601 for (; format[n] != '\0'; n++)
2603 switch (format[n])
2605 case 'e':
2606 /* Call F on X. */
2607 x = &XEXP (exp, n);
2608 result = (*f) (x, data);
2609 if (result == -1)
2610 /* Do not traverse sub-expressions. */
2611 continue;
2612 else if (result != 0)
2613 /* Stop the traversal. */
2614 return result;
2616 if (*x == NULL_RTX)
2617 /* There are no sub-expressions. */
2618 continue;
2620 i = non_rtx_starting_operands[GET_CODE (*x)];
2621 if (i >= 0)
2623 result = for_each_rtx_1 (*x, i, f, data);
2624 if (result != 0)
2625 return result;
2627 break;
2629 case 'V':
2630 case 'E':
2631 if (XVEC (exp, n) == 0)
2632 continue;
2633 for (j = 0; j < XVECLEN (exp, n); ++j)
2635 /* Call F on X. */
2636 x = &XVECEXP (exp, n, j);
2637 result = (*f) (x, data);
2638 if (result == -1)
2639 /* Do not traverse sub-expressions. */
2640 continue;
2641 else if (result != 0)
2642 /* Stop the traversal. */
2643 return result;
2645 if (*x == NULL_RTX)
2646 /* There are no sub-expressions. */
2647 continue;
2649 i = non_rtx_starting_operands[GET_CODE (*x)];
2650 if (i >= 0)
2652 result = for_each_rtx_1 (*x, i, f, data);
2653 if (result != 0)
2654 return result;
2657 break;
2659 default:
2660 /* Nothing to do. */
2661 break;
2665 return 0;
2668 /* Traverse X via depth-first search, calling F for each
2669 sub-expression (including X itself). F is also passed the DATA.
2670 If F returns -1, do not traverse sub-expressions, but continue
2671 traversing the rest of the tree. If F ever returns any other
2672 nonzero value, stop the traversal, and return the value returned
2673 by F. Otherwise, return 0. This function does not traverse inside
2674 tree structure that contains RTX_EXPRs, or into sub-expressions
2675 whose format code is `0' since it is not known whether or not those
2676 codes are actually RTL.
2678 This routine is very general, and could (should?) be used to
2679 implement many of the other routines in this file. */
2682 for_each_rtx (rtx *x, rtx_function f, void *data)
2684 int result;
2685 int i;
2687 /* Call F on X. */
2688 result = (*f) (x, data);
2689 if (result == -1)
2690 /* Do not traverse sub-expressions. */
2691 return 0;
2692 else if (result != 0)
2693 /* Stop the traversal. */
2694 return result;
2696 if (*x == NULL_RTX)
2697 /* There are no sub-expressions. */
2698 return 0;
2700 i = non_rtx_starting_operands[GET_CODE (*x)];
2701 if (i < 0)
2702 return 0;
2704 return for_each_rtx_1 (*x, i, f, data);
2708 /* Searches X for any reference to REGNO, returning the rtx of the
2709 reference found if any. Otherwise, returns NULL_RTX. */
2712 regno_use_in (unsigned int regno, rtx x)
2714 const char *fmt;
2715 int i, j;
2716 rtx tem;
2718 if (REG_P (x) && REGNO (x) == regno)
2719 return x;
2721 fmt = GET_RTX_FORMAT (GET_CODE (x));
2722 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2724 if (fmt[i] == 'e')
2726 if ((tem = regno_use_in (regno, XEXP (x, i))))
2727 return tem;
2729 else if (fmt[i] == 'E')
2730 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2731 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2732 return tem;
2735 return NULL_RTX;
2738 /* Return a value indicating whether OP, an operand of a commutative
2739 operation, is preferred as the first or second operand. The higher
2740 the value, the stronger the preference for being the first operand.
2741 We use negative values to indicate a preference for the first operand
2742 and positive values for the second operand. */
2745 commutative_operand_precedence (rtx op)
2747 enum rtx_code code = GET_CODE (op);
2749 /* Constants always come the second operand. Prefer "nice" constants. */
2750 if (code == CONST_INT)
2751 return -7;
2752 if (code == CONST_DOUBLE)
2753 return -6;
2754 op = avoid_constant_pool_reference (op);
2755 code = GET_CODE (op);
2757 switch (GET_RTX_CLASS (code))
2759 case RTX_CONST_OBJ:
2760 if (code == CONST_INT)
2761 return -5;
2762 if (code == CONST_DOUBLE)
2763 return -4;
2764 return -3;
2766 case RTX_EXTRA:
2767 /* SUBREGs of objects should come second. */
2768 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2769 return -2;
2771 if (!CONSTANT_P (op))
2772 return 0;
2773 else
2774 /* As for RTX_CONST_OBJ. */
2775 return -3;
2777 case RTX_OBJ:
2778 /* Complex expressions should be the first, so decrease priority
2779 of objects. */
2780 return -1;
2782 case RTX_COMM_ARITH:
2783 /* Prefer operands that are themselves commutative to be first.
2784 This helps to make things linear. In particular,
2785 (and (and (reg) (reg)) (not (reg))) is canonical. */
2786 return 4;
2788 case RTX_BIN_ARITH:
2789 /* If only one operand is a binary expression, it will be the first
2790 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2791 is canonical, although it will usually be further simplified. */
2792 return 2;
2794 case RTX_UNARY:
2795 /* Then prefer NEG and NOT. */
2796 if (code == NEG || code == NOT)
2797 return 1;
2799 default:
2800 return 0;
2804 /* Return 1 iff it is necessary to swap operands of commutative operation
2805 in order to canonicalize expression. */
2808 swap_commutative_operands_p (rtx x, rtx y)
2810 return (commutative_operand_precedence (x)
2811 < commutative_operand_precedence (y));
2814 /* Return 1 if X is an autoincrement side effect and the register is
2815 not the stack pointer. */
2817 auto_inc_p (rtx x)
2819 switch (GET_CODE (x))
2821 case PRE_INC:
2822 case POST_INC:
2823 case PRE_DEC:
2824 case POST_DEC:
2825 case PRE_MODIFY:
2826 case POST_MODIFY:
2827 /* There are no REG_INC notes for SP. */
2828 if (XEXP (x, 0) != stack_pointer_rtx)
2829 return 1;
2830 default:
2831 break;
2833 return 0;
2836 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2838 loc_mentioned_in_p (rtx *loc, rtx in)
2840 enum rtx_code code = GET_CODE (in);
2841 const char *fmt = GET_RTX_FORMAT (code);
2842 int i, j;
2844 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2846 if (loc == &in->u.fld[i].rt_rtx)
2847 return 1;
2848 if (fmt[i] == 'e')
2850 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2851 return 1;
2853 else if (fmt[i] == 'E')
2854 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2855 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2856 return 1;
2858 return 0;
2861 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2862 and SUBREG_BYTE, return the bit offset where the subreg begins
2863 (counting from the least significant bit of the operand). */
2865 unsigned int
2866 subreg_lsb_1 (enum machine_mode outer_mode,
2867 enum machine_mode inner_mode,
2868 unsigned int subreg_byte)
2870 unsigned int bitpos;
2871 unsigned int byte;
2872 unsigned int word;
2874 /* A paradoxical subreg begins at bit position 0. */
2875 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
2876 return 0;
2878 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2879 /* If the subreg crosses a word boundary ensure that
2880 it also begins and ends on a word boundary. */
2881 gcc_assert (!((subreg_byte % UNITS_PER_WORD
2882 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
2883 && (subreg_byte % UNITS_PER_WORD
2884 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
2886 if (WORDS_BIG_ENDIAN)
2887 word = (GET_MODE_SIZE (inner_mode)
2888 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
2889 else
2890 word = subreg_byte / UNITS_PER_WORD;
2891 bitpos = word * BITS_PER_WORD;
2893 if (BYTES_BIG_ENDIAN)
2894 byte = (GET_MODE_SIZE (inner_mode)
2895 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
2896 else
2897 byte = subreg_byte % UNITS_PER_WORD;
2898 bitpos += byte * BITS_PER_UNIT;
2900 return bitpos;
2903 /* Given a subreg X, return the bit offset where the subreg begins
2904 (counting from the least significant bit of the reg). */
2906 unsigned int
2907 subreg_lsb (rtx x)
2909 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
2910 SUBREG_BYTE (x));
2913 /* This function returns the regno offset of a subreg expression.
2914 xregno - A regno of an inner hard subreg_reg (or what will become one).
2915 xmode - The mode of xregno.
2916 offset - The byte offset.
2917 ymode - The mode of a top level SUBREG (or what may become one).
2918 RETURN - The regno offset which would be used. */
2919 unsigned int
2920 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
2921 unsigned int offset, enum machine_mode ymode)
2923 int nregs_xmode, nregs_ymode, nregs_xmode_unit_int;
2924 int mode_multiple, nregs_multiple;
2925 int y_offset;
2926 enum machine_mode xmode_unit, xmode_unit_int;
2928 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
2930 if (GET_MODE_INNER (xmode) == VOIDmode)
2931 xmode_unit = xmode;
2932 else
2933 xmode_unit = GET_MODE_INNER (xmode);
2935 if (FLOAT_MODE_P (xmode_unit))
2937 xmode_unit_int = int_mode_for_mode (xmode_unit);
2938 if (xmode_unit_int == BLKmode)
2939 /* It's probably bad to be here; a port should have an integer mode
2940 that's the same size as anything of which it takes a SUBREG. */
2941 xmode_unit_int = xmode_unit;
2943 else
2944 xmode_unit_int = xmode_unit;
2946 nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
2948 /* Adjust nregs_xmode to allow for 'holes'. */
2949 if (nregs_xmode_unit_int != hard_regno_nregs[xregno][xmode_unit])
2950 nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
2951 else
2952 nregs_xmode = hard_regno_nregs[xregno][xmode];
2954 nregs_ymode = hard_regno_nregs[xregno][ymode];
2956 /* If this is a big endian paradoxical subreg, which uses more actual
2957 hard registers than the original register, we must return a negative
2958 offset so that we find the proper highpart of the register. */
2959 if (offset == 0
2960 && nregs_ymode > nregs_xmode
2961 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2962 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
2963 return nregs_xmode - nregs_ymode;
2965 if (offset == 0 || nregs_xmode == nregs_ymode)
2966 return 0;
2968 /* Size of ymode must not be greater than the size of xmode. */
2969 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
2970 gcc_assert (mode_multiple != 0);
2972 y_offset = offset / GET_MODE_SIZE (ymode);
2973 nregs_multiple = nregs_xmode / nregs_ymode;
2974 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
2977 /* This function returns true when the offset is representable via
2978 subreg_offset in the given regno.
2979 xregno - A regno of an inner hard subreg_reg (or what will become one).
2980 xmode - The mode of xregno.
2981 offset - The byte offset.
2982 ymode - The mode of a top level SUBREG (or what may become one).
2983 RETURN - Whether the offset is representable. */
2984 bool
2985 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
2986 unsigned int offset, enum machine_mode ymode)
2988 int nregs_xmode, nregs_ymode, nregs_xmode_unit, nregs_xmode_unit_int;
2989 int mode_multiple, nregs_multiple;
2990 int y_offset;
2991 enum machine_mode xmode_unit, xmode_unit_int;
2993 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
2995 if (GET_MODE_INNER (xmode) == VOIDmode)
2996 xmode_unit = xmode;
2997 else
2998 xmode_unit = GET_MODE_INNER (xmode);
3000 if (FLOAT_MODE_P (xmode_unit))
3002 xmode_unit_int = int_mode_for_mode (xmode_unit);
3003 if (xmode_unit_int == BLKmode)
3004 /* It's probably bad to be here; a port should have an integer mode
3005 that's the same size as anything of which it takes a SUBREG. */
3006 xmode_unit_int = xmode_unit;
3008 else
3009 xmode_unit_int = xmode_unit;
3011 nregs_xmode_unit = hard_regno_nregs[xregno][xmode_unit];
3012 nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
3014 /* If there are holes in a non-scalar mode in registers, we expect
3015 that it is made up of its units concatenated together. */
3016 if (nregs_xmode_unit != nregs_xmode_unit_int)
3018 gcc_assert (nregs_xmode_unit * GET_MODE_NUNITS (xmode)
3019 == hard_regno_nregs[xregno][xmode]);
3021 /* You can only ask for a SUBREG of a value with holes in the middle
3022 if you don't cross the holes. (Such a SUBREG should be done by
3023 picking a different register class, or doing it in memory if
3024 necessary.) An example of a value with holes is XCmode on 32-bit
3025 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3026 3 for each part, but in memory it's two 128-bit parts.
3027 Padding is assumed to be at the end (not necessarily the 'high part')
3028 of each unit. */
3029 if (nregs_xmode_unit != nregs_xmode_unit_int
3030 && (offset / GET_MODE_SIZE (xmode_unit_int) + 1
3031 < GET_MODE_NUNITS (xmode))
3032 && (offset / GET_MODE_SIZE (xmode_unit_int)
3033 != ((offset + GET_MODE_SIZE (ymode) - 1)
3034 / GET_MODE_SIZE (xmode_unit_int))))
3035 return false;
3037 nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
3039 else
3040 nregs_xmode = hard_regno_nregs[xregno][xmode];
3042 nregs_ymode = hard_regno_nregs[xregno][ymode];
3044 /* Paradoxical subregs are otherwise valid. */
3045 if (offset == 0
3046 && nregs_ymode > nregs_xmode
3047 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3048 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3049 return true;
3051 /* Lowpart subregs are otherwise valid. */
3052 if (offset == subreg_lowpart_offset (ymode, xmode))
3053 return true;
3055 /* This should always pass, otherwise we don't know how to verify
3056 the constraint. These conditions may be relaxed but
3057 subreg_regno_offset would need to be redesigned. */
3058 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3059 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3061 /* The XMODE value can be seen as a vector of NREGS_XMODE
3062 values. The subreg must represent a lowpart of given field.
3063 Compute what field it is. */
3064 offset -= subreg_lowpart_offset (ymode,
3065 mode_for_size (GET_MODE_BITSIZE (xmode)
3066 / nregs_xmode,
3067 MODE_INT, 0));
3069 /* Size of ymode must not be greater than the size of xmode. */
3070 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3071 gcc_assert (mode_multiple != 0);
3073 y_offset = offset / GET_MODE_SIZE (ymode);
3074 nregs_multiple = nregs_xmode / nregs_ymode;
3076 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3077 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3079 return (!(y_offset % (mode_multiple / nregs_multiple)));
3082 /* Return the final regno that a subreg expression refers to. */
3083 unsigned int
3084 subreg_regno (rtx x)
3086 unsigned int ret;
3087 rtx subreg = SUBREG_REG (x);
3088 int regno = REGNO (subreg);
3090 ret = regno + subreg_regno_offset (regno,
3091 GET_MODE (subreg),
3092 SUBREG_BYTE (x),
3093 GET_MODE (x));
3094 return ret;
3097 struct parms_set_data
3099 int nregs;
3100 HARD_REG_SET regs;
3103 /* Helper function for noticing stores to parameter registers. */
3104 static void
3105 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3107 struct parms_set_data *d = data;
3108 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3109 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3111 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3112 d->nregs--;
3116 /* Look backward for first parameter to be loaded.
3117 Note that loads of all parameters will not necessarily be
3118 found if CSE has eliminated some of them (e.g., an argument
3119 to the outer function is passed down as a parameter).
3120 Do not skip BOUNDARY. */
3122 find_first_parameter_load (rtx call_insn, rtx boundary)
3124 struct parms_set_data parm;
3125 rtx p, before, first_set;
3127 /* Since different machines initialize their parameter registers
3128 in different orders, assume nothing. Collect the set of all
3129 parameter registers. */
3130 CLEAR_HARD_REG_SET (parm.regs);
3131 parm.nregs = 0;
3132 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3133 if (GET_CODE (XEXP (p, 0)) == USE
3134 && REG_P (XEXP (XEXP (p, 0), 0)))
3136 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3138 /* We only care about registers which can hold function
3139 arguments. */
3140 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3141 continue;
3143 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3144 parm.nregs++;
3146 before = call_insn;
3147 first_set = call_insn;
3149 /* Search backward for the first set of a register in this set. */
3150 while (parm.nregs && before != boundary)
3152 before = PREV_INSN (before);
3154 /* It is possible that some loads got CSEed from one call to
3155 another. Stop in that case. */
3156 if (CALL_P (before))
3157 break;
3159 /* Our caller needs either ensure that we will find all sets
3160 (in case code has not been optimized yet), or take care
3161 for possible labels in a way by setting boundary to preceding
3162 CODE_LABEL. */
3163 if (LABEL_P (before))
3165 gcc_assert (before == boundary);
3166 break;
3169 if (INSN_P (before))
3171 int nregs_old = parm.nregs;
3172 note_stores (PATTERN (before), parms_set, &parm);
3173 /* If we found something that did not set a parameter reg,
3174 we're done. Do not keep going, as that might result
3175 in hoisting an insn before the setting of a pseudo
3176 that is used by the hoisted insn. */
3177 if (nregs_old != parm.nregs)
3178 first_set = before;
3179 else
3180 break;
3183 return first_set;
3186 /* Return true if we should avoid inserting code between INSN and preceding
3187 call instruction. */
3189 bool
3190 keep_with_call_p (rtx insn)
3192 rtx set;
3194 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3196 if (REG_P (SET_DEST (set))
3197 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3198 && fixed_regs[REGNO (SET_DEST (set))]
3199 && general_operand (SET_SRC (set), VOIDmode))
3200 return true;
3201 if (REG_P (SET_SRC (set))
3202 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3203 && REG_P (SET_DEST (set))
3204 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3205 return true;
3206 /* There may be a stack pop just after the call and before the store
3207 of the return register. Search for the actual store when deciding
3208 if we can break or not. */
3209 if (SET_DEST (set) == stack_pointer_rtx)
3211 rtx i2 = next_nonnote_insn (insn);
3212 if (i2 && keep_with_call_p (i2))
3213 return true;
3216 return false;
3219 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3220 to non-complex jumps. That is, direct unconditional, conditional,
3221 and tablejumps, but not computed jumps or returns. It also does
3222 not apply to the fallthru case of a conditional jump. */
3224 bool
3225 label_is_jump_target_p (rtx label, rtx jump_insn)
3227 rtx tmp = JUMP_LABEL (jump_insn);
3229 if (label == tmp)
3230 return true;
3232 if (tablejump_p (jump_insn, NULL, &tmp))
3234 rtvec vec = XVEC (PATTERN (tmp),
3235 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3236 int i, veclen = GET_NUM_ELEM (vec);
3238 for (i = 0; i < veclen; ++i)
3239 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3240 return true;
3243 return false;
3247 /* Return an estimate of the cost of computing rtx X.
3248 One use is in cse, to decide which expression to keep in the hash table.
3249 Another is in rtl generation, to pick the cheapest way to multiply.
3250 Other uses like the latter are expected in the future. */
3253 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3255 int i, j;
3256 enum rtx_code code;
3257 const char *fmt;
3258 int total;
3260 if (x == 0)
3261 return 0;
3263 /* Compute the default costs of certain things.
3264 Note that targetm.rtx_costs can override the defaults. */
3266 code = GET_CODE (x);
3267 switch (code)
3269 case MULT:
3270 total = COSTS_N_INSNS (5);
3271 break;
3272 case DIV:
3273 case UDIV:
3274 case MOD:
3275 case UMOD:
3276 total = COSTS_N_INSNS (7);
3277 break;
3278 case USE:
3279 /* Used in combine.c as a marker. */
3280 total = 0;
3281 break;
3282 default:
3283 total = COSTS_N_INSNS (1);
3286 switch (code)
3288 case REG:
3289 return 0;
3291 case SUBREG:
3292 total = 0;
3293 /* If we can't tie these modes, make this expensive. The larger
3294 the mode, the more expensive it is. */
3295 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3296 return COSTS_N_INSNS (2
3297 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3298 break;
3300 default:
3301 if (targetm.rtx_costs (x, code, outer_code, &total))
3302 return total;
3303 break;
3306 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3307 which is already in total. */
3309 fmt = GET_RTX_FORMAT (code);
3310 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3311 if (fmt[i] == 'e')
3312 total += rtx_cost (XEXP (x, i), code);
3313 else if (fmt[i] == 'E')
3314 for (j = 0; j < XVECLEN (x, i); j++)
3315 total += rtx_cost (XVECEXP (x, i, j), code);
3317 return total;
3320 /* Return cost of address expression X.
3321 Expect that X is properly formed address reference. */
3324 address_cost (rtx x, enum machine_mode mode)
3326 /* We may be asked for cost of various unusual addresses, such as operands
3327 of push instruction. It is not worthwhile to complicate writing
3328 of the target hook by such cases. */
3330 if (!memory_address_p (mode, x))
3331 return 1000;
3333 return targetm.address_cost (x);
3336 /* If the target doesn't override, compute the cost as with arithmetic. */
3339 default_address_cost (rtx x)
3341 return rtx_cost (x, MEM);
3345 unsigned HOST_WIDE_INT
3346 nonzero_bits (rtx x, enum machine_mode mode)
3348 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3351 unsigned int
3352 num_sign_bit_copies (rtx x, enum machine_mode mode)
3354 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3357 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3358 It avoids exponential behavior in nonzero_bits1 when X has
3359 identical subexpressions on the first or the second level. */
3361 static unsigned HOST_WIDE_INT
3362 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3363 enum machine_mode known_mode,
3364 unsigned HOST_WIDE_INT known_ret)
3366 if (x == known_x && mode == known_mode)
3367 return known_ret;
3369 /* Try to find identical subexpressions. If found call
3370 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3371 precomputed value for the subexpression as KNOWN_RET. */
3373 if (ARITHMETIC_P (x))
3375 rtx x0 = XEXP (x, 0);
3376 rtx x1 = XEXP (x, 1);
3378 /* Check the first level. */
3379 if (x0 == x1)
3380 return nonzero_bits1 (x, mode, x0, mode,
3381 cached_nonzero_bits (x0, mode, known_x,
3382 known_mode, known_ret));
3384 /* Check the second level. */
3385 if (ARITHMETIC_P (x0)
3386 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3387 return nonzero_bits1 (x, mode, x1, mode,
3388 cached_nonzero_bits (x1, mode, known_x,
3389 known_mode, known_ret));
3391 if (ARITHMETIC_P (x1)
3392 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3393 return nonzero_bits1 (x, mode, x0, mode,
3394 cached_nonzero_bits (x0, mode, known_x,
3395 known_mode, known_ret));
3398 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3401 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3402 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3403 is less useful. We can't allow both, because that results in exponential
3404 run time recursion. There is a nullstone testcase that triggered
3405 this. This macro avoids accidental uses of num_sign_bit_copies. */
3406 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3408 /* Given an expression, X, compute which bits in X can be nonzero.
3409 We don't care about bits outside of those defined in MODE.
3411 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3412 an arithmetic operation, we can do better. */
3414 static unsigned HOST_WIDE_INT
3415 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3416 enum machine_mode known_mode,
3417 unsigned HOST_WIDE_INT known_ret)
3419 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3420 unsigned HOST_WIDE_INT inner_nz;
3421 enum rtx_code code;
3422 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3424 /* For floating-point values, assume all bits are needed. */
3425 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3426 return nonzero;
3428 /* If X is wider than MODE, use its mode instead. */
3429 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3431 mode = GET_MODE (x);
3432 nonzero = GET_MODE_MASK (mode);
3433 mode_width = GET_MODE_BITSIZE (mode);
3436 if (mode_width > HOST_BITS_PER_WIDE_INT)
3437 /* Our only callers in this case look for single bit values. So
3438 just return the mode mask. Those tests will then be false. */
3439 return nonzero;
3441 #ifndef WORD_REGISTER_OPERATIONS
3442 /* If MODE is wider than X, but both are a single word for both the host
3443 and target machines, we can compute this from which bits of the
3444 object might be nonzero in its own mode, taking into account the fact
3445 that on many CISC machines, accessing an object in a wider mode
3446 causes the high-order bits to become undefined. So they are
3447 not known to be zero. */
3449 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3450 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3451 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3452 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3454 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3455 known_x, known_mode, known_ret);
3456 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3457 return nonzero;
3459 #endif
3461 code = GET_CODE (x);
3462 switch (code)
3464 case REG:
3465 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3466 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3467 all the bits above ptr_mode are known to be zero. */
3468 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3469 && REG_POINTER (x))
3470 nonzero &= GET_MODE_MASK (ptr_mode);
3471 #endif
3473 /* Include declared information about alignment of pointers. */
3474 /* ??? We don't properly preserve REG_POINTER changes across
3475 pointer-to-integer casts, so we can't trust it except for
3476 things that we know must be pointers. See execute/960116-1.c. */
3477 if ((x == stack_pointer_rtx
3478 || x == frame_pointer_rtx
3479 || x == arg_pointer_rtx)
3480 && REGNO_POINTER_ALIGN (REGNO (x)))
3482 unsigned HOST_WIDE_INT alignment
3483 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3485 #ifdef PUSH_ROUNDING
3486 /* If PUSH_ROUNDING is defined, it is possible for the
3487 stack to be momentarily aligned only to that amount,
3488 so we pick the least alignment. */
3489 if (x == stack_pointer_rtx && PUSH_ARGS)
3490 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3491 alignment);
3492 #endif
3494 nonzero &= ~(alignment - 1);
3498 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3499 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3500 known_mode, known_ret,
3501 &nonzero_for_hook);
3503 if (new)
3504 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3505 known_mode, known_ret);
3507 return nonzero_for_hook;
3510 case CONST_INT:
3511 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3512 /* If X is negative in MODE, sign-extend the value. */
3513 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3514 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3515 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3516 #endif
3518 return INTVAL (x);
3520 case MEM:
3521 #ifdef LOAD_EXTEND_OP
3522 /* In many, if not most, RISC machines, reading a byte from memory
3523 zeros the rest of the register. Noticing that fact saves a lot
3524 of extra zero-extends. */
3525 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3526 nonzero &= GET_MODE_MASK (GET_MODE (x));
3527 #endif
3528 break;
3530 case EQ: case NE:
3531 case UNEQ: case LTGT:
3532 case GT: case GTU: case UNGT:
3533 case LT: case LTU: case UNLT:
3534 case GE: case GEU: case UNGE:
3535 case LE: case LEU: case UNLE:
3536 case UNORDERED: case ORDERED:
3537 /* If this produces an integer result, we know which bits are set.
3538 Code here used to clear bits outside the mode of X, but that is
3539 now done above. */
3540 /* Mind that MODE is the mode the caller wants to look at this
3541 operation in, and not the actual operation mode. We can wind
3542 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3543 that describes the results of a vector compare. */
3544 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3545 && mode_width <= HOST_BITS_PER_WIDE_INT)
3546 nonzero = STORE_FLAG_VALUE;
3547 break;
3549 case NEG:
3550 #if 0
3551 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3552 and num_sign_bit_copies. */
3553 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3554 == GET_MODE_BITSIZE (GET_MODE (x)))
3555 nonzero = 1;
3556 #endif
3558 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3559 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3560 break;
3562 case ABS:
3563 #if 0
3564 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3565 and num_sign_bit_copies. */
3566 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3567 == GET_MODE_BITSIZE (GET_MODE (x)))
3568 nonzero = 1;
3569 #endif
3570 break;
3572 case TRUNCATE:
3573 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3574 known_x, known_mode, known_ret)
3575 & GET_MODE_MASK (mode));
3576 break;
3578 case ZERO_EXTEND:
3579 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3580 known_x, known_mode, known_ret);
3581 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3582 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3583 break;
3585 case SIGN_EXTEND:
3586 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3587 Otherwise, show all the bits in the outer mode but not the inner
3588 may be nonzero. */
3589 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3590 known_x, known_mode, known_ret);
3591 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3593 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3594 if (inner_nz
3595 & (((HOST_WIDE_INT) 1
3596 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3597 inner_nz |= (GET_MODE_MASK (mode)
3598 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3601 nonzero &= inner_nz;
3602 break;
3604 case AND:
3605 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3606 known_x, known_mode, known_ret)
3607 & cached_nonzero_bits (XEXP (x, 1), mode,
3608 known_x, known_mode, known_ret);
3609 break;
3611 case XOR: case IOR:
3612 case UMIN: case UMAX: case SMIN: case SMAX:
3614 unsigned HOST_WIDE_INT nonzero0 =
3615 cached_nonzero_bits (XEXP (x, 0), mode,
3616 known_x, known_mode, known_ret);
3618 /* Don't call nonzero_bits for the second time if it cannot change
3619 anything. */
3620 if ((nonzero & nonzero0) != nonzero)
3621 nonzero &= nonzero0
3622 | cached_nonzero_bits (XEXP (x, 1), mode,
3623 known_x, known_mode, known_ret);
3625 break;
3627 case PLUS: case MINUS:
3628 case MULT:
3629 case DIV: case UDIV:
3630 case MOD: case UMOD:
3631 /* We can apply the rules of arithmetic to compute the number of
3632 high- and low-order zero bits of these operations. We start by
3633 computing the width (position of the highest-order nonzero bit)
3634 and the number of low-order zero bits for each value. */
3636 unsigned HOST_WIDE_INT nz0 =
3637 cached_nonzero_bits (XEXP (x, 0), mode,
3638 known_x, known_mode, known_ret);
3639 unsigned HOST_WIDE_INT nz1 =
3640 cached_nonzero_bits (XEXP (x, 1), mode,
3641 known_x, known_mode, known_ret);
3642 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3643 int width0 = floor_log2 (nz0) + 1;
3644 int width1 = floor_log2 (nz1) + 1;
3645 int low0 = floor_log2 (nz0 & -nz0);
3646 int low1 = floor_log2 (nz1 & -nz1);
3647 HOST_WIDE_INT op0_maybe_minusp
3648 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3649 HOST_WIDE_INT op1_maybe_minusp
3650 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3651 unsigned int result_width = mode_width;
3652 int result_low = 0;
3654 switch (code)
3656 case PLUS:
3657 result_width = MAX (width0, width1) + 1;
3658 result_low = MIN (low0, low1);
3659 break;
3660 case MINUS:
3661 result_low = MIN (low0, low1);
3662 break;
3663 case MULT:
3664 result_width = width0 + width1;
3665 result_low = low0 + low1;
3666 break;
3667 case DIV:
3668 if (width1 == 0)
3669 break;
3670 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3671 result_width = width0;
3672 break;
3673 case UDIV:
3674 if (width1 == 0)
3675 break;
3676 result_width = width0;
3677 break;
3678 case MOD:
3679 if (width1 == 0)
3680 break;
3681 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3682 result_width = MIN (width0, width1);
3683 result_low = MIN (low0, low1);
3684 break;
3685 case UMOD:
3686 if (width1 == 0)
3687 break;
3688 result_width = MIN (width0, width1);
3689 result_low = MIN (low0, low1);
3690 break;
3691 default:
3692 gcc_unreachable ();
3695 if (result_width < mode_width)
3696 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3698 if (result_low > 0)
3699 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3701 #ifdef POINTERS_EXTEND_UNSIGNED
3702 /* If pointers extend unsigned and this is an addition or subtraction
3703 to a pointer in Pmode, all the bits above ptr_mode are known to be
3704 zero. */
3705 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3706 && (code == PLUS || code == MINUS)
3707 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3708 nonzero &= GET_MODE_MASK (ptr_mode);
3709 #endif
3711 break;
3713 case ZERO_EXTRACT:
3714 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3715 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3716 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3717 break;
3719 case SUBREG:
3720 /* If this is a SUBREG formed for a promoted variable that has
3721 been zero-extended, we know that at least the high-order bits
3722 are zero, though others might be too. */
3724 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3725 nonzero = GET_MODE_MASK (GET_MODE (x))
3726 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3727 known_x, known_mode, known_ret);
3729 /* If the inner mode is a single word for both the host and target
3730 machines, we can compute this from which bits of the inner
3731 object might be nonzero. */
3732 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3733 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3734 <= HOST_BITS_PER_WIDE_INT))
3736 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3737 known_x, known_mode, known_ret);
3739 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3740 /* If this is a typical RISC machine, we only have to worry
3741 about the way loads are extended. */
3742 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3743 ? (((nonzero
3744 & (((unsigned HOST_WIDE_INT) 1
3745 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3746 != 0))
3747 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3748 || !MEM_P (SUBREG_REG (x)))
3749 #endif
3751 /* On many CISC machines, accessing an object in a wider mode
3752 causes the high-order bits to become undefined. So they are
3753 not known to be zero. */
3754 if (GET_MODE_SIZE (GET_MODE (x))
3755 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3756 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3757 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3760 break;
3762 case ASHIFTRT:
3763 case LSHIFTRT:
3764 case ASHIFT:
3765 case ROTATE:
3766 /* The nonzero bits are in two classes: any bits within MODE
3767 that aren't in GET_MODE (x) are always significant. The rest of the
3768 nonzero bits are those that are significant in the operand of
3769 the shift when shifted the appropriate number of bits. This
3770 shows that high-order bits are cleared by the right shift and
3771 low-order bits by left shifts. */
3772 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3773 && INTVAL (XEXP (x, 1)) >= 0
3774 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3776 enum machine_mode inner_mode = GET_MODE (x);
3777 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3778 int count = INTVAL (XEXP (x, 1));
3779 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3780 unsigned HOST_WIDE_INT op_nonzero =
3781 cached_nonzero_bits (XEXP (x, 0), mode,
3782 known_x, known_mode, known_ret);
3783 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3784 unsigned HOST_WIDE_INT outer = 0;
3786 if (mode_width > width)
3787 outer = (op_nonzero & nonzero & ~mode_mask);
3789 if (code == LSHIFTRT)
3790 inner >>= count;
3791 else if (code == ASHIFTRT)
3793 inner >>= count;
3795 /* If the sign bit may have been nonzero before the shift, we
3796 need to mark all the places it could have been copied to
3797 by the shift as possibly nonzero. */
3798 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3799 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3801 else if (code == ASHIFT)
3802 inner <<= count;
3803 else
3804 inner = ((inner << (count % width)
3805 | (inner >> (width - (count % width)))) & mode_mask);
3807 nonzero &= (outer | inner);
3809 break;
3811 case FFS:
3812 case POPCOUNT:
3813 /* This is at most the number of bits in the mode. */
3814 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3815 break;
3817 case CLZ:
3818 /* If CLZ has a known value at zero, then the nonzero bits are
3819 that value, plus the number of bits in the mode minus one. */
3820 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3821 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3822 else
3823 nonzero = -1;
3824 break;
3826 case CTZ:
3827 /* If CTZ has a known value at zero, then the nonzero bits are
3828 that value, plus the number of bits in the mode minus one. */
3829 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3830 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3831 else
3832 nonzero = -1;
3833 break;
3835 case PARITY:
3836 nonzero = 1;
3837 break;
3839 case IF_THEN_ELSE:
3841 unsigned HOST_WIDE_INT nonzero_true =
3842 cached_nonzero_bits (XEXP (x, 1), mode,
3843 known_x, known_mode, known_ret);
3845 /* Don't call nonzero_bits for the second time if it cannot change
3846 anything. */
3847 if ((nonzero & nonzero_true) != nonzero)
3848 nonzero &= nonzero_true
3849 | cached_nonzero_bits (XEXP (x, 2), mode,
3850 known_x, known_mode, known_ret);
3852 break;
3854 default:
3855 break;
3858 return nonzero;
3861 /* See the macro definition above. */
3862 #undef cached_num_sign_bit_copies
3865 /* The function cached_num_sign_bit_copies is a wrapper around
3866 num_sign_bit_copies1. It avoids exponential behavior in
3867 num_sign_bit_copies1 when X has identical subexpressions on the
3868 first or the second level. */
3870 static unsigned int
3871 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
3872 enum machine_mode known_mode,
3873 unsigned int known_ret)
3875 if (x == known_x && mode == known_mode)
3876 return known_ret;
3878 /* Try to find identical subexpressions. If found call
3879 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3880 the precomputed value for the subexpression as KNOWN_RET. */
3882 if (ARITHMETIC_P (x))
3884 rtx x0 = XEXP (x, 0);
3885 rtx x1 = XEXP (x, 1);
3887 /* Check the first level. */
3888 if (x0 == x1)
3889 return
3890 num_sign_bit_copies1 (x, mode, x0, mode,
3891 cached_num_sign_bit_copies (x0, mode, known_x,
3892 known_mode,
3893 known_ret));
3895 /* Check the second level. */
3896 if (ARITHMETIC_P (x0)
3897 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3898 return
3899 num_sign_bit_copies1 (x, mode, x1, mode,
3900 cached_num_sign_bit_copies (x1, mode, known_x,
3901 known_mode,
3902 known_ret));
3904 if (ARITHMETIC_P (x1)
3905 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3906 return
3907 num_sign_bit_copies1 (x, mode, x0, mode,
3908 cached_num_sign_bit_copies (x0, mode, known_x,
3909 known_mode,
3910 known_ret));
3913 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
3916 /* Return the number of bits at the high-order end of X that are known to
3917 be equal to the sign bit. X will be used in mode MODE; if MODE is
3918 VOIDmode, X will be used in its own mode. The returned value will always
3919 be between 1 and the number of bits in MODE. */
3921 static unsigned int
3922 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
3923 enum machine_mode known_mode,
3924 unsigned int known_ret)
3926 enum rtx_code code = GET_CODE (x);
3927 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
3928 int num0, num1, result;
3929 unsigned HOST_WIDE_INT nonzero;
3931 /* If we weren't given a mode, use the mode of X. If the mode is still
3932 VOIDmode, we don't know anything. Likewise if one of the modes is
3933 floating-point. */
3935 if (mode == VOIDmode)
3936 mode = GET_MODE (x);
3938 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
3939 return 1;
3941 /* For a smaller object, just ignore the high bits. */
3942 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
3944 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
3945 known_x, known_mode, known_ret);
3946 return MAX (1,
3947 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
3950 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
3952 #ifndef WORD_REGISTER_OPERATIONS
3953 /* If this machine does not do all register operations on the entire
3954 register and MODE is wider than the mode of X, we can say nothing
3955 at all about the high-order bits. */
3956 return 1;
3957 #else
3958 /* Likewise on machines that do, if the mode of the object is smaller
3959 than a word and loads of that size don't sign extend, we can say
3960 nothing about the high order bits. */
3961 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
3962 #ifdef LOAD_EXTEND_OP
3963 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
3964 #endif
3966 return 1;
3967 #endif
3970 switch (code)
3972 case REG:
3974 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3975 /* If pointers extend signed and this is a pointer in Pmode, say that
3976 all the bits above ptr_mode are known to be sign bit copies. */
3977 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
3978 && REG_POINTER (x))
3979 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
3980 #endif
3983 unsigned int copies_for_hook = 1, copies = 1;
3984 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
3985 known_mode, known_ret,
3986 &copies_for_hook);
3988 if (new)
3989 copies = cached_num_sign_bit_copies (new, mode, known_x,
3990 known_mode, known_ret);
3992 if (copies > 1 || copies_for_hook > 1)
3993 return MAX (copies, copies_for_hook);
3995 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
3997 break;
3999 case MEM:
4000 #ifdef LOAD_EXTEND_OP
4001 /* Some RISC machines sign-extend all loads of smaller than a word. */
4002 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4003 return MAX (1, ((int) bitwidth
4004 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4005 #endif
4006 break;
4008 case CONST_INT:
4009 /* If the constant is negative, take its 1's complement and remask.
4010 Then see how many zero bits we have. */
4011 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4012 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4013 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4014 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4016 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4018 case SUBREG:
4019 /* If this is a SUBREG for a promoted object that is sign-extended
4020 and we are looking at it in a wider mode, we know that at least the
4021 high-order bits are known to be sign bit copies. */
4023 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4025 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4026 known_x, known_mode, known_ret);
4027 return MAX ((int) bitwidth
4028 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4029 num0);
4032 /* For a smaller object, just ignore the high bits. */
4033 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4035 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4036 known_x, known_mode, known_ret);
4037 return MAX (1, (num0
4038 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4039 - bitwidth)));
4042 #ifdef WORD_REGISTER_OPERATIONS
4043 #ifdef LOAD_EXTEND_OP
4044 /* For paradoxical SUBREGs on machines where all register operations
4045 affect the entire register, just look inside. Note that we are
4046 passing MODE to the recursive call, so the number of sign bit copies
4047 will remain relative to that mode, not the inner mode. */
4049 /* This works only if loads sign extend. Otherwise, if we get a
4050 reload for the inner part, it may be loaded from the stack, and
4051 then we lose all sign bit copies that existed before the store
4052 to the stack. */
4054 if ((GET_MODE_SIZE (GET_MODE (x))
4055 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4056 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4057 && MEM_P (SUBREG_REG (x)))
4058 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4059 known_x, known_mode, known_ret);
4060 #endif
4061 #endif
4062 break;
4064 case SIGN_EXTRACT:
4065 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4066 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4067 break;
4069 case SIGN_EXTEND:
4070 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4071 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4072 known_x, known_mode, known_ret));
4074 case TRUNCATE:
4075 /* For a smaller object, just ignore the high bits. */
4076 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4077 known_x, known_mode, known_ret);
4078 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4079 - bitwidth)));
4081 case NOT:
4082 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4083 known_x, known_mode, known_ret);
4085 case ROTATE: case ROTATERT:
4086 /* If we are rotating left by a number of bits less than the number
4087 of sign bit copies, we can just subtract that amount from the
4088 number. */
4089 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4090 && INTVAL (XEXP (x, 1)) >= 0
4091 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4093 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4094 known_x, known_mode, known_ret);
4095 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4096 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4098 break;
4100 case NEG:
4101 /* In general, this subtracts one sign bit copy. But if the value
4102 is known to be positive, the number of sign bit copies is the
4103 same as that of the input. Finally, if the input has just one bit
4104 that might be nonzero, all the bits are copies of the sign bit. */
4105 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4106 known_x, known_mode, known_ret);
4107 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4108 return num0 > 1 ? num0 - 1 : 1;
4110 nonzero = nonzero_bits (XEXP (x, 0), mode);
4111 if (nonzero == 1)
4112 return bitwidth;
4114 if (num0 > 1
4115 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4116 num0--;
4118 return num0;
4120 case IOR: case AND: case XOR:
4121 case SMIN: case SMAX: case UMIN: case UMAX:
4122 /* Logical operations will preserve the number of sign-bit copies.
4123 MIN and MAX operations always return one of the operands. */
4124 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4125 known_x, known_mode, known_ret);
4126 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4127 known_x, known_mode, known_ret);
4128 return MIN (num0, num1);
4130 case PLUS: case MINUS:
4131 /* For addition and subtraction, we can have a 1-bit carry. However,
4132 if we are subtracting 1 from a positive number, there will not
4133 be such a carry. Furthermore, if the positive number is known to
4134 be 0 or 1, we know the result is either -1 or 0. */
4136 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4137 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4139 nonzero = nonzero_bits (XEXP (x, 0), mode);
4140 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4141 return (nonzero == 1 || nonzero == 0 ? bitwidth
4142 : bitwidth - floor_log2 (nonzero) - 1);
4145 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4146 known_x, known_mode, known_ret);
4147 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4148 known_x, known_mode, known_ret);
4149 result = MAX (1, MIN (num0, num1) - 1);
4151 #ifdef POINTERS_EXTEND_UNSIGNED
4152 /* If pointers extend signed and this is an addition or subtraction
4153 to a pointer in Pmode, all the bits above ptr_mode are known to be
4154 sign bit copies. */
4155 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4156 && (code == PLUS || code == MINUS)
4157 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4158 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4159 - GET_MODE_BITSIZE (ptr_mode) + 1),
4160 result);
4161 #endif
4162 return result;
4164 case MULT:
4165 /* The number of bits of the product is the sum of the number of
4166 bits of both terms. However, unless one of the terms if known
4167 to be positive, we must allow for an additional bit since negating
4168 a negative number can remove one sign bit copy. */
4170 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4171 known_x, known_mode, known_ret);
4172 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4173 known_x, known_mode, known_ret);
4175 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4176 if (result > 0
4177 && (bitwidth > HOST_BITS_PER_WIDE_INT
4178 || (((nonzero_bits (XEXP (x, 0), mode)
4179 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4180 && ((nonzero_bits (XEXP (x, 1), mode)
4181 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4182 result--;
4184 return MAX (1, result);
4186 case UDIV:
4187 /* The result must be <= the first operand. If the first operand
4188 has the high bit set, we know nothing about the number of sign
4189 bit copies. */
4190 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4191 return 1;
4192 else if ((nonzero_bits (XEXP (x, 0), mode)
4193 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4194 return 1;
4195 else
4196 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4197 known_x, known_mode, known_ret);
4199 case UMOD:
4200 /* The result must be <= the second operand. */
4201 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4202 known_x, known_mode, known_ret);
4204 case DIV:
4205 /* Similar to unsigned division, except that we have to worry about
4206 the case where the divisor is negative, in which case we have
4207 to add 1. */
4208 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4209 known_x, known_mode, known_ret);
4210 if (result > 1
4211 && (bitwidth > HOST_BITS_PER_WIDE_INT
4212 || (nonzero_bits (XEXP (x, 1), mode)
4213 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4214 result--;
4216 return result;
4218 case MOD:
4219 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4220 known_x, known_mode, known_ret);
4221 if (result > 1
4222 && (bitwidth > HOST_BITS_PER_WIDE_INT
4223 || (nonzero_bits (XEXP (x, 1), mode)
4224 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4225 result--;
4227 return result;
4229 case ASHIFTRT:
4230 /* Shifts by a constant add to the number of bits equal to the
4231 sign bit. */
4232 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4233 known_x, known_mode, known_ret);
4234 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4235 && INTVAL (XEXP (x, 1)) > 0)
4236 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4238 return num0;
4240 case ASHIFT:
4241 /* Left shifts destroy copies. */
4242 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4243 || INTVAL (XEXP (x, 1)) < 0
4244 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4245 return 1;
4247 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4248 known_x, known_mode, known_ret);
4249 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4251 case IF_THEN_ELSE:
4252 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4253 known_x, known_mode, known_ret);
4254 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4255 known_x, known_mode, known_ret);
4256 return MIN (num0, num1);
4258 case EQ: case NE: case GE: case GT: case LE: case LT:
4259 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4260 case GEU: case GTU: case LEU: case LTU:
4261 case UNORDERED: case ORDERED:
4262 /* If the constant is negative, take its 1's complement and remask.
4263 Then see how many zero bits we have. */
4264 nonzero = STORE_FLAG_VALUE;
4265 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4266 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4267 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4269 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4271 default:
4272 break;
4275 /* If we haven't been able to figure it out by one of the above rules,
4276 see if some of the high-order bits are known to be zero. If so,
4277 count those bits and return one less than that amount. If we can't
4278 safely compute the mask for this mode, always return BITWIDTH. */
4280 bitwidth = GET_MODE_BITSIZE (mode);
4281 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4282 return 1;
4284 nonzero = nonzero_bits (x, mode);
4285 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4286 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4289 /* Calculate the rtx_cost of a single instruction. A return value of
4290 zero indicates an instruction pattern without a known cost. */
4293 insn_rtx_cost (rtx pat)
4295 int i, cost;
4296 rtx set;
4298 /* Extract the single set rtx from the instruction pattern.
4299 We can't use single_set since we only have the pattern. */
4300 if (GET_CODE (pat) == SET)
4301 set = pat;
4302 else if (GET_CODE (pat) == PARALLEL)
4304 set = NULL_RTX;
4305 for (i = 0; i < XVECLEN (pat, 0); i++)
4307 rtx x = XVECEXP (pat, 0, i);
4308 if (GET_CODE (x) == SET)
4310 if (set)
4311 return 0;
4312 set = x;
4315 if (!set)
4316 return 0;
4318 else
4319 return 0;
4321 cost = rtx_cost (SET_SRC (set), SET);
4322 return cost > 0 ? cost : COSTS_N_INSNS (1);
4325 /* Given an insn INSN and condition COND, return the condition in a
4326 canonical form to simplify testing by callers. Specifically:
4328 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4329 (2) Both operands will be machine operands; (cc0) will have been replaced.
4330 (3) If an operand is a constant, it will be the second operand.
4331 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4332 for GE, GEU, and LEU.
4334 If the condition cannot be understood, or is an inequality floating-point
4335 comparison which needs to be reversed, 0 will be returned.
4337 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4339 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4340 insn used in locating the condition was found. If a replacement test
4341 of the condition is desired, it should be placed in front of that
4342 insn and we will be sure that the inputs are still valid.
4344 If WANT_REG is nonzero, we wish the condition to be relative to that
4345 register, if possible. Therefore, do not canonicalize the condition
4346 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4347 to be a compare to a CC mode register.
4349 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4350 and at INSN. */
4353 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4354 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4356 enum rtx_code code;
4357 rtx prev = insn;
4358 rtx set;
4359 rtx tem;
4360 rtx op0, op1;
4361 int reverse_code = 0;
4362 enum machine_mode mode;
4363 basic_block bb = BLOCK_FOR_INSN (insn);
4365 code = GET_CODE (cond);
4366 mode = GET_MODE (cond);
4367 op0 = XEXP (cond, 0);
4368 op1 = XEXP (cond, 1);
4370 if (reverse)
4371 code = reversed_comparison_code (cond, insn);
4372 if (code == UNKNOWN)
4373 return 0;
4375 if (earliest)
4376 *earliest = insn;
4378 /* If we are comparing a register with zero, see if the register is set
4379 in the previous insn to a COMPARE or a comparison operation. Perform
4380 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4381 in cse.c */
4383 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4384 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4385 && op1 == CONST0_RTX (GET_MODE (op0))
4386 && op0 != want_reg)
4388 /* Set nonzero when we find something of interest. */
4389 rtx x = 0;
4391 #ifdef HAVE_cc0
4392 /* If comparison with cc0, import actual comparison from compare
4393 insn. */
4394 if (op0 == cc0_rtx)
4396 if ((prev = prev_nonnote_insn (prev)) == 0
4397 || !NONJUMP_INSN_P (prev)
4398 || (set = single_set (prev)) == 0
4399 || SET_DEST (set) != cc0_rtx)
4400 return 0;
4402 op0 = SET_SRC (set);
4403 op1 = CONST0_RTX (GET_MODE (op0));
4404 if (earliest)
4405 *earliest = prev;
4407 #endif
4409 /* If this is a COMPARE, pick up the two things being compared. */
4410 if (GET_CODE (op0) == COMPARE)
4412 op1 = XEXP (op0, 1);
4413 op0 = XEXP (op0, 0);
4414 continue;
4416 else if (!REG_P (op0))
4417 break;
4419 /* Go back to the previous insn. Stop if it is not an INSN. We also
4420 stop if it isn't a single set or if it has a REG_INC note because
4421 we don't want to bother dealing with it. */
4423 if ((prev = prev_nonnote_insn (prev)) == 0
4424 || !NONJUMP_INSN_P (prev)
4425 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4426 /* In cfglayout mode, there do not have to be labels at the
4427 beginning of a block, or jumps at the end, so the previous
4428 conditions would not stop us when we reach bb boundary. */
4429 || BLOCK_FOR_INSN (prev) != bb)
4430 break;
4432 set = set_of (op0, prev);
4434 if (set
4435 && (GET_CODE (set) != SET
4436 || !rtx_equal_p (SET_DEST (set), op0)))
4437 break;
4439 /* If this is setting OP0, get what it sets it to if it looks
4440 relevant. */
4441 if (set)
4443 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4444 #ifdef FLOAT_STORE_FLAG_VALUE
4445 REAL_VALUE_TYPE fsfv;
4446 #endif
4448 /* ??? We may not combine comparisons done in a CCmode with
4449 comparisons not done in a CCmode. This is to aid targets
4450 like Alpha that have an IEEE compliant EQ instruction, and
4451 a non-IEEE compliant BEQ instruction. The use of CCmode is
4452 actually artificial, simply to prevent the combination, but
4453 should not affect other platforms.
4455 However, we must allow VOIDmode comparisons to match either
4456 CCmode or non-CCmode comparison, because some ports have
4457 modeless comparisons inside branch patterns.
4459 ??? This mode check should perhaps look more like the mode check
4460 in simplify_comparison in combine. */
4462 if ((GET_CODE (SET_SRC (set)) == COMPARE
4463 || (((code == NE
4464 || (code == LT
4465 && GET_MODE_CLASS (inner_mode) == MODE_INT
4466 && (GET_MODE_BITSIZE (inner_mode)
4467 <= HOST_BITS_PER_WIDE_INT)
4468 && (STORE_FLAG_VALUE
4469 & ((HOST_WIDE_INT) 1
4470 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4471 #ifdef FLOAT_STORE_FLAG_VALUE
4472 || (code == LT
4473 && SCALAR_FLOAT_MODE_P (inner_mode)
4474 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4475 REAL_VALUE_NEGATIVE (fsfv)))
4476 #endif
4478 && COMPARISON_P (SET_SRC (set))))
4479 && (((GET_MODE_CLASS (mode) == MODE_CC)
4480 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4481 || mode == VOIDmode || inner_mode == VOIDmode))
4482 x = SET_SRC (set);
4483 else if (((code == EQ
4484 || (code == GE
4485 && (GET_MODE_BITSIZE (inner_mode)
4486 <= HOST_BITS_PER_WIDE_INT)
4487 && GET_MODE_CLASS (inner_mode) == MODE_INT
4488 && (STORE_FLAG_VALUE
4489 & ((HOST_WIDE_INT) 1
4490 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4491 #ifdef FLOAT_STORE_FLAG_VALUE
4492 || (code == GE
4493 && SCALAR_FLOAT_MODE_P (inner_mode)
4494 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4495 REAL_VALUE_NEGATIVE (fsfv)))
4496 #endif
4498 && COMPARISON_P (SET_SRC (set))
4499 && (((GET_MODE_CLASS (mode) == MODE_CC)
4500 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4501 || mode == VOIDmode || inner_mode == VOIDmode))
4504 reverse_code = 1;
4505 x = SET_SRC (set);
4507 else
4508 break;
4511 else if (reg_set_p (op0, prev))
4512 /* If this sets OP0, but not directly, we have to give up. */
4513 break;
4515 if (x)
4517 /* If the caller is expecting the condition to be valid at INSN,
4518 make sure X doesn't change before INSN. */
4519 if (valid_at_insn_p)
4520 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4521 break;
4522 if (COMPARISON_P (x))
4523 code = GET_CODE (x);
4524 if (reverse_code)
4526 code = reversed_comparison_code (x, prev);
4527 if (code == UNKNOWN)
4528 return 0;
4529 reverse_code = 0;
4532 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4533 if (earliest)
4534 *earliest = prev;
4538 /* If constant is first, put it last. */
4539 if (CONSTANT_P (op0))
4540 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4542 /* If OP0 is the result of a comparison, we weren't able to find what
4543 was really being compared, so fail. */
4544 if (!allow_cc_mode
4545 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4546 return 0;
4548 /* Canonicalize any ordered comparison with integers involving equality
4549 if we can do computations in the relevant mode and we do not
4550 overflow. */
4552 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4553 && GET_CODE (op1) == CONST_INT
4554 && GET_MODE (op0) != VOIDmode
4555 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4557 HOST_WIDE_INT const_val = INTVAL (op1);
4558 unsigned HOST_WIDE_INT uconst_val = const_val;
4559 unsigned HOST_WIDE_INT max_val
4560 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4562 switch (code)
4564 case LE:
4565 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4566 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4567 break;
4569 /* When cross-compiling, const_val might be sign-extended from
4570 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4571 case GE:
4572 if ((HOST_WIDE_INT) (const_val & max_val)
4573 != (((HOST_WIDE_INT) 1
4574 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4575 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4576 break;
4578 case LEU:
4579 if (uconst_val < max_val)
4580 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4581 break;
4583 case GEU:
4584 if (uconst_val != 0)
4585 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4586 break;
4588 default:
4589 break;
4593 /* Never return CC0; return zero instead. */
4594 if (CC0_P (op0))
4595 return 0;
4597 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4600 /* Given a jump insn JUMP, return the condition that will cause it to branch
4601 to its JUMP_LABEL. If the condition cannot be understood, or is an
4602 inequality floating-point comparison which needs to be reversed, 0 will
4603 be returned.
4605 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4606 insn used in locating the condition was found. If a replacement test
4607 of the condition is desired, it should be placed in front of that
4608 insn and we will be sure that the inputs are still valid. If EARLIEST
4609 is null, the returned condition will be valid at INSN.
4611 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4612 compare CC mode register.
4614 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4617 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4619 rtx cond;
4620 int reverse;
4621 rtx set;
4623 /* If this is not a standard conditional jump, we can't parse it. */
4624 if (!JUMP_P (jump)
4625 || ! any_condjump_p (jump))
4626 return 0;
4627 set = pc_set (jump);
4629 cond = XEXP (SET_SRC (set), 0);
4631 /* If this branches to JUMP_LABEL when the condition is false, reverse
4632 the condition. */
4633 reverse
4634 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4635 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4637 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4638 allow_cc_mode, valid_at_insn_p);
4641 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4642 TARGET_MODE_REP_EXTENDED.
4644 Note that we assume that the property of
4645 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4646 narrower than mode B. I.e., if A is a mode narrower than B then in
4647 order to be able to operate on it in mode B, mode A needs to
4648 satisfy the requirements set by the representation of mode B. */
4650 static void
4651 init_num_sign_bit_copies_in_rep (void)
4653 enum machine_mode mode, in_mode;
4655 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4656 in_mode = GET_MODE_WIDER_MODE (mode))
4657 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4658 mode = GET_MODE_WIDER_MODE (mode))
4660 enum machine_mode i;
4662 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4663 extends to the next widest mode. */
4664 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4665 || GET_MODE_WIDER_MODE (mode) == in_mode);
4667 /* We are in in_mode. Count how many bits outside of mode
4668 have to be copies of the sign-bit. */
4669 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4671 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4673 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4674 /* We can only check sign-bit copies starting from the
4675 top-bit. In order to be able to check the bits we
4676 have already seen we pretend that subsequent bits
4677 have to be sign-bit copies too. */
4678 || num_sign_bit_copies_in_rep [in_mode][mode])
4679 num_sign_bit_copies_in_rep [in_mode][mode]
4680 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4685 /* Suppose that truncation from the machine mode of X to MODE is not a
4686 no-op. See if there is anything special about X so that we can
4687 assume it already contains a truncated value of MODE. */
4689 bool
4690 truncated_to_mode (enum machine_mode mode, rtx x)
4692 /* This register has already been used in MODE without explicit
4693 truncation. */
4694 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4695 return true;
4697 /* See if we already satisfy the requirements of MODE. If yes we
4698 can just switch to MODE. */
4699 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4700 && (num_sign_bit_copies (x, GET_MODE (x))
4701 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4702 return true;
4704 return false;
4707 /* Initialize non_rtx_starting_operands, which is used to speed up
4708 for_each_rtx. */
4709 void
4710 init_rtlanal (void)
4712 int i;
4713 for (i = 0; i < NUM_RTX_CODE; i++)
4715 const char *format = GET_RTX_FORMAT (i);
4716 const char *first = strpbrk (format, "eEV");
4717 non_rtx_starting_operands[i] = first ? first - format : -1;
4720 init_num_sign_bit_copies_in_rep ();
4723 /* Check whether this is a constant pool constant. */
4724 bool
4725 constant_pool_constant_p (rtx x)
4727 x = avoid_constant_pool_reference (x);
4728 return GET_CODE (x) == CONST_DOUBLE;