Daily bump.
[official-gcc.git] / gcc / rtlanal.c
blobfd7fa017eec05a9f8296b0a226d7404c6e833a79
1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software
4 Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "toplev.h"
29 #include "rtl.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "target.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "regs.h"
39 #include "function.h"
41 /* Forward declarations */
42 static void set_of_1 (rtx, rtx, void *);
43 static bool covers_regno_p (rtx, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx, unsigned int);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
49 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50 rtx, enum machine_mode,
51 unsigned HOST_WIDE_INT);
52 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53 enum machine_mode,
54 unsigned HOST_WIDE_INT);
55 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56 enum machine_mode,
57 unsigned int);
58 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59 enum machine_mode, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands[NUM_RTX_CODE];
65 /* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
69 int target_flags;
71 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
72 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
73 SIGN_EXTEND then while narrowing we also have to enforce the
74 representation and sign-extend the value to mode DESTINATION_REP.
76 If the value is already sign-extended to DESTINATION_REP mode we
77 can just switch to DESTINATION mode on it. For each pair of
78 integral modes SOURCE and DESTINATION, when truncating from SOURCE
79 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
80 contains the number of high-order bits in SOURCE that have to be
81 copies of the sign-bit so that we can do this mode-switch to
82 DESTINATION. */
84 static unsigned int
85 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
87 /* Return 1 if the value of X is unstable
88 (would be different at a different point in the program).
89 The frame pointer, arg pointer, etc. are considered stable
90 (within one function) and so is anything marked `unchanging'. */
92 int
93 rtx_unstable_p (rtx x)
95 RTX_CODE code = GET_CODE (x);
96 int i;
97 const char *fmt;
99 switch (code)
101 case MEM:
102 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
104 case CONST:
105 case CONST_INT:
106 case CONST_DOUBLE:
107 case CONST_VECTOR:
108 case SYMBOL_REF:
109 case LABEL_REF:
110 return 0;
112 case REG:
113 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
114 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
115 /* The arg pointer varies if it is not a fixed register. */
116 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
117 return 0;
118 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
119 /* ??? When call-clobbered, the value is stable modulo the restore
120 that must happen after a call. This currently screws up local-alloc
121 into believing that the restore is not needed. */
122 if (x == pic_offset_table_rtx)
123 return 0;
124 #endif
125 return 1;
127 case ASM_OPERANDS:
128 if (MEM_VOLATILE_P (x))
129 return 1;
131 /* Fall through. */
133 default:
134 break;
137 fmt = GET_RTX_FORMAT (code);
138 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
139 if (fmt[i] == 'e')
141 if (rtx_unstable_p (XEXP (x, i)))
142 return 1;
144 else if (fmt[i] == 'E')
146 int j;
147 for (j = 0; j < XVECLEN (x, i); j++)
148 if (rtx_unstable_p (XVECEXP (x, i, j)))
149 return 1;
152 return 0;
155 /* Return 1 if X has a value that can vary even between two
156 executions of the program. 0 means X can be compared reliably
157 against certain constants or near-constants.
158 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
159 zero, we are slightly more conservative.
160 The frame pointer and the arg pointer are considered constant. */
163 rtx_varies_p (rtx x, int for_alias)
165 RTX_CODE code;
166 int i;
167 const char *fmt;
169 if (!x)
170 return 0;
172 code = GET_CODE (x);
173 switch (code)
175 case MEM:
176 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
178 case CONST:
179 case CONST_INT:
180 case CONST_DOUBLE:
181 case CONST_VECTOR:
182 case SYMBOL_REF:
183 case LABEL_REF:
184 return 0;
186 case REG:
187 /* Note that we have to test for the actual rtx used for the frame
188 and arg pointers and not just the register number in case we have
189 eliminated the frame and/or arg pointer and are using it
190 for pseudos. */
191 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
192 /* The arg pointer varies if it is not a fixed register. */
193 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
194 return 0;
195 if (x == pic_offset_table_rtx
196 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
197 /* ??? When call-clobbered, the value is stable modulo the restore
198 that must happen after a call. This currently screws up
199 local-alloc into believing that the restore is not needed, so we
200 must return 0 only if we are called from alias analysis. */
201 && for_alias
202 #endif
204 return 0;
205 return 1;
207 case LO_SUM:
208 /* The operand 0 of a LO_SUM is considered constant
209 (in fact it is related specifically to operand 1)
210 during alias analysis. */
211 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
212 || rtx_varies_p (XEXP (x, 1), for_alias);
214 case ASM_OPERANDS:
215 if (MEM_VOLATILE_P (x))
216 return 1;
218 /* Fall through. */
220 default:
221 break;
224 fmt = GET_RTX_FORMAT (code);
225 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
226 if (fmt[i] == 'e')
228 if (rtx_varies_p (XEXP (x, i), for_alias))
229 return 1;
231 else if (fmt[i] == 'E')
233 int j;
234 for (j = 0; j < XVECLEN (x, i); j++)
235 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
236 return 1;
239 return 0;
242 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
243 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
244 whether nonzero is returned for unaligned memory accesses on strict
245 alignment machines. */
247 static int
248 rtx_addr_can_trap_p_1 (rtx x, enum machine_mode mode, bool unaligned_mems)
250 enum rtx_code code = GET_CODE (x);
252 switch (code)
254 case SYMBOL_REF:
255 return SYMBOL_REF_WEAK (x);
257 case LABEL_REF:
258 return 0;
260 case REG:
261 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
262 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
263 || x == stack_pointer_rtx
264 /* The arg pointer varies if it is not a fixed register. */
265 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
266 return 0;
267 /* All of the virtual frame registers are stack references. */
268 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
269 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
270 return 0;
271 return 1;
273 case CONST:
274 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
276 case PLUS:
277 /* An address is assumed not to trap if:
278 - it is an address that can't trap plus a constant integer,
279 with the proper remainder modulo the mode size if we are
280 considering unaligned memory references. */
281 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
282 && GET_CODE (XEXP (x, 1)) == CONST_INT)
284 HOST_WIDE_INT offset;
286 if (!STRICT_ALIGNMENT
287 || !unaligned_mems
288 || GET_MODE_SIZE (mode) == 0)
289 return 0;
291 offset = INTVAL (XEXP (x, 1));
293 #ifdef SPARC_STACK_BOUNDARY_HACK
294 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
295 the real alignment of %sp. However, when it does this, the
296 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
297 if (SPARC_STACK_BOUNDARY_HACK
298 && (XEXP (x, 0) == stack_pointer_rtx
299 || XEXP (x, 0) == hard_frame_pointer_rtx))
300 offset -= STACK_POINTER_OFFSET;
301 #endif
303 return offset % GET_MODE_SIZE (mode) != 0;
306 /* - or it is the pic register plus a constant. */
307 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
308 return 0;
310 return 1;
312 case LO_SUM:
313 case PRE_MODIFY:
314 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
316 case PRE_DEC:
317 case PRE_INC:
318 case POST_DEC:
319 case POST_INC:
320 case POST_MODIFY:
321 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
323 default:
324 break;
327 /* If it isn't one of the case above, it can cause a trap. */
328 return 1;
331 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
334 rtx_addr_can_trap_p (rtx x)
336 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
339 /* Return true if X is an address that is known to not be zero. */
341 bool
342 nonzero_address_p (rtx x)
344 enum rtx_code code = GET_CODE (x);
346 switch (code)
348 case SYMBOL_REF:
349 return !SYMBOL_REF_WEAK (x);
351 case LABEL_REF:
352 return true;
354 case REG:
355 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
356 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
357 || x == stack_pointer_rtx
358 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
359 return true;
360 /* All of the virtual frame registers are stack references. */
361 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
362 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
363 return true;
364 return false;
366 case CONST:
367 return nonzero_address_p (XEXP (x, 0));
369 case PLUS:
370 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
371 return nonzero_address_p (XEXP (x, 0));
372 /* Handle PIC references. */
373 else if (XEXP (x, 0) == pic_offset_table_rtx
374 && CONSTANT_P (XEXP (x, 1)))
375 return true;
376 return false;
378 case PRE_MODIFY:
379 /* Similar to the above; allow positive offsets. Further, since
380 auto-inc is only allowed in memories, the register must be a
381 pointer. */
382 if (GET_CODE (XEXP (x, 1)) == CONST_INT
383 && INTVAL (XEXP (x, 1)) > 0)
384 return true;
385 return nonzero_address_p (XEXP (x, 0));
387 case PRE_INC:
388 /* Similarly. Further, the offset is always positive. */
389 return true;
391 case PRE_DEC:
392 case POST_DEC:
393 case POST_INC:
394 case POST_MODIFY:
395 return nonzero_address_p (XEXP (x, 0));
397 case LO_SUM:
398 return nonzero_address_p (XEXP (x, 1));
400 default:
401 break;
404 /* If it isn't one of the case above, might be zero. */
405 return false;
408 /* Return 1 if X refers to a memory location whose address
409 cannot be compared reliably with constant addresses,
410 or if X refers to a BLKmode memory object.
411 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
412 zero, we are slightly more conservative. */
415 rtx_addr_varies_p (rtx x, int for_alias)
417 enum rtx_code code;
418 int i;
419 const char *fmt;
421 if (x == 0)
422 return 0;
424 code = GET_CODE (x);
425 if (code == MEM)
426 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
428 fmt = GET_RTX_FORMAT (code);
429 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
430 if (fmt[i] == 'e')
432 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
433 return 1;
435 else if (fmt[i] == 'E')
437 int j;
438 for (j = 0; j < XVECLEN (x, i); j++)
439 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
440 return 1;
442 return 0;
445 /* Return the value of the integer term in X, if one is apparent;
446 otherwise return 0.
447 Only obvious integer terms are detected.
448 This is used in cse.c with the `related_value' field. */
450 HOST_WIDE_INT
451 get_integer_term (rtx x)
453 if (GET_CODE (x) == CONST)
454 x = XEXP (x, 0);
456 if (GET_CODE (x) == MINUS
457 && GET_CODE (XEXP (x, 1)) == CONST_INT)
458 return - INTVAL (XEXP (x, 1));
459 if (GET_CODE (x) == PLUS
460 && GET_CODE (XEXP (x, 1)) == CONST_INT)
461 return INTVAL (XEXP (x, 1));
462 return 0;
465 /* If X is a constant, return the value sans apparent integer term;
466 otherwise return 0.
467 Only obvious integer terms are detected. */
470 get_related_value (rtx x)
472 if (GET_CODE (x) != CONST)
473 return 0;
474 x = XEXP (x, 0);
475 if (GET_CODE (x) == PLUS
476 && GET_CODE (XEXP (x, 1)) == CONST_INT)
477 return XEXP (x, 0);
478 else if (GET_CODE (x) == MINUS
479 && GET_CODE (XEXP (x, 1)) == CONST_INT)
480 return XEXP (x, 0);
481 return 0;
484 /* Return the number of places FIND appears within X. If COUNT_DEST is
485 zero, we do not count occurrences inside the destination of a SET. */
488 count_occurrences (rtx x, rtx find, int count_dest)
490 int i, j;
491 enum rtx_code code;
492 const char *format_ptr;
493 int count;
495 if (x == find)
496 return 1;
498 code = GET_CODE (x);
500 switch (code)
502 case REG:
503 case CONST_INT:
504 case CONST_DOUBLE:
505 case CONST_VECTOR:
506 case SYMBOL_REF:
507 case CODE_LABEL:
508 case PC:
509 case CC0:
510 return 0;
512 case MEM:
513 if (MEM_P (find) && rtx_equal_p (x, find))
514 return 1;
515 break;
517 case SET:
518 if (SET_DEST (x) == find && ! count_dest)
519 return count_occurrences (SET_SRC (x), find, count_dest);
520 break;
522 default:
523 break;
526 format_ptr = GET_RTX_FORMAT (code);
527 count = 0;
529 for (i = 0; i < GET_RTX_LENGTH (code); i++)
531 switch (*format_ptr++)
533 case 'e':
534 count += count_occurrences (XEXP (x, i), find, count_dest);
535 break;
537 case 'E':
538 for (j = 0; j < XVECLEN (x, i); j++)
539 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
540 break;
543 return count;
546 /* Nonzero if register REG appears somewhere within IN.
547 Also works if REG is not a register; in this case it checks
548 for a subexpression of IN that is Lisp "equal" to REG. */
551 reg_mentioned_p (rtx reg, rtx in)
553 const char *fmt;
554 int i;
555 enum rtx_code code;
557 if (in == 0)
558 return 0;
560 if (reg == in)
561 return 1;
563 if (GET_CODE (in) == LABEL_REF)
564 return reg == XEXP (in, 0);
566 code = GET_CODE (in);
568 switch (code)
570 /* Compare registers by number. */
571 case REG:
572 return REG_P (reg) && REGNO (in) == REGNO (reg);
574 /* These codes have no constituent expressions
575 and are unique. */
576 case SCRATCH:
577 case CC0:
578 case PC:
579 return 0;
581 case CONST_INT:
582 case CONST_VECTOR:
583 case CONST_DOUBLE:
584 /* These are kept unique for a given value. */
585 return 0;
587 default:
588 break;
591 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
592 return 1;
594 fmt = GET_RTX_FORMAT (code);
596 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
598 if (fmt[i] == 'E')
600 int j;
601 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
602 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
603 return 1;
605 else if (fmt[i] == 'e'
606 && reg_mentioned_p (reg, XEXP (in, i)))
607 return 1;
609 return 0;
612 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
613 no CODE_LABEL insn. */
616 no_labels_between_p (rtx beg, rtx end)
618 rtx p;
619 if (beg == end)
620 return 0;
621 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
622 if (LABEL_P (p))
623 return 0;
624 return 1;
627 /* Nonzero if register REG is used in an insn between
628 FROM_INSN and TO_INSN (exclusive of those two). */
631 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
633 rtx insn;
635 if (from_insn == to_insn)
636 return 0;
638 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
639 if (INSN_P (insn)
640 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
641 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
642 return 1;
643 return 0;
646 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
647 is entirely replaced by a new value and the only use is as a SET_DEST,
648 we do not consider it a reference. */
651 reg_referenced_p (rtx x, rtx body)
653 int i;
655 switch (GET_CODE (body))
657 case SET:
658 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
659 return 1;
661 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
662 of a REG that occupies all of the REG, the insn references X if
663 it is mentioned in the destination. */
664 if (GET_CODE (SET_DEST (body)) != CC0
665 && GET_CODE (SET_DEST (body)) != PC
666 && !REG_P (SET_DEST (body))
667 && ! (GET_CODE (SET_DEST (body)) == SUBREG
668 && REG_P (SUBREG_REG (SET_DEST (body)))
669 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
670 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
671 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
672 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
673 && reg_overlap_mentioned_p (x, SET_DEST (body)))
674 return 1;
675 return 0;
677 case ASM_OPERANDS:
678 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
679 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
680 return 1;
681 return 0;
683 case CALL:
684 case USE:
685 case IF_THEN_ELSE:
686 return reg_overlap_mentioned_p (x, body);
688 case TRAP_IF:
689 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
691 case PREFETCH:
692 return reg_overlap_mentioned_p (x, XEXP (body, 0));
694 case UNSPEC:
695 case UNSPEC_VOLATILE:
696 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
697 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
698 return 1;
699 return 0;
701 case PARALLEL:
702 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
703 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
704 return 1;
705 return 0;
707 case CLOBBER:
708 if (MEM_P (XEXP (body, 0)))
709 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
710 return 1;
711 return 0;
713 case COND_EXEC:
714 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
715 return 1;
716 return reg_referenced_p (x, COND_EXEC_CODE (body));
718 default:
719 return 0;
723 /* Nonzero if register REG is set or clobbered in an insn between
724 FROM_INSN and TO_INSN (exclusive of those two). */
727 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
729 rtx insn;
731 if (from_insn == to_insn)
732 return 0;
734 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
735 if (INSN_P (insn) && reg_set_p (reg, insn))
736 return 1;
737 return 0;
740 /* Internals of reg_set_between_p. */
742 reg_set_p (rtx reg, rtx insn)
744 /* We can be passed an insn or part of one. If we are passed an insn,
745 check if a side-effect of the insn clobbers REG. */
746 if (INSN_P (insn)
747 && (FIND_REG_INC_NOTE (insn, reg)
748 || (CALL_P (insn)
749 && ((REG_P (reg)
750 && REGNO (reg) < FIRST_PSEUDO_REGISTER
751 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
752 REGNO (reg)))
753 || MEM_P (reg)
754 || find_reg_fusage (insn, CLOBBER, reg)))))
755 return 1;
757 return set_of (reg, insn) != NULL_RTX;
760 /* Similar to reg_set_between_p, but check all registers in X. Return 0
761 only if none of them are modified between START and END. Return 1 if
762 X contains a MEM; this routine does usememory aliasing. */
765 modified_between_p (rtx x, rtx start, rtx end)
767 enum rtx_code code = GET_CODE (x);
768 const char *fmt;
769 int i, j;
770 rtx insn;
772 if (start == end)
773 return 0;
775 switch (code)
777 case CONST_INT:
778 case CONST_DOUBLE:
779 case CONST_VECTOR:
780 case CONST:
781 case SYMBOL_REF:
782 case LABEL_REF:
783 return 0;
785 case PC:
786 case CC0:
787 return 1;
789 case MEM:
790 if (modified_between_p (XEXP (x, 0), start, end))
791 return 1;
792 if (MEM_READONLY_P (x))
793 return 0;
794 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
795 if (memory_modified_in_insn_p (x, insn))
796 return 1;
797 return 0;
798 break;
800 case REG:
801 return reg_set_between_p (x, start, end);
803 default:
804 break;
807 fmt = GET_RTX_FORMAT (code);
808 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
810 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
811 return 1;
813 else if (fmt[i] == 'E')
814 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
815 if (modified_between_p (XVECEXP (x, i, j), start, end))
816 return 1;
819 return 0;
822 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
823 of them are modified in INSN. Return 1 if X contains a MEM; this routine
824 does use memory aliasing. */
827 modified_in_p (rtx x, rtx insn)
829 enum rtx_code code = GET_CODE (x);
830 const char *fmt;
831 int i, j;
833 switch (code)
835 case CONST_INT:
836 case CONST_DOUBLE:
837 case CONST_VECTOR:
838 case CONST:
839 case SYMBOL_REF:
840 case LABEL_REF:
841 return 0;
843 case PC:
844 case CC0:
845 return 1;
847 case MEM:
848 if (modified_in_p (XEXP (x, 0), insn))
849 return 1;
850 if (MEM_READONLY_P (x))
851 return 0;
852 if (memory_modified_in_insn_p (x, insn))
853 return 1;
854 return 0;
855 break;
857 case REG:
858 return reg_set_p (x, insn);
860 default:
861 break;
864 fmt = GET_RTX_FORMAT (code);
865 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
867 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
868 return 1;
870 else if (fmt[i] == 'E')
871 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
872 if (modified_in_p (XVECEXP (x, i, j), insn))
873 return 1;
876 return 0;
879 /* Helper function for set_of. */
880 struct set_of_data
882 rtx found;
883 rtx pat;
886 static void
887 set_of_1 (rtx x, rtx pat, void *data1)
889 struct set_of_data *data = (struct set_of_data *) (data1);
890 if (rtx_equal_p (x, data->pat)
891 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
892 data->found = pat;
895 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
896 (either directly or via STRICT_LOW_PART and similar modifiers). */
898 set_of (rtx pat, rtx insn)
900 struct set_of_data data;
901 data.found = NULL_RTX;
902 data.pat = pat;
903 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
904 return data.found;
907 /* Given an INSN, return a SET expression if this insn has only a single SET.
908 It may also have CLOBBERs, USEs, or SET whose output
909 will not be used, which we ignore. */
912 single_set_2 (rtx insn, rtx pat)
914 rtx set = NULL;
915 int set_verified = 1;
916 int i;
918 if (GET_CODE (pat) == PARALLEL)
920 for (i = 0; i < XVECLEN (pat, 0); i++)
922 rtx sub = XVECEXP (pat, 0, i);
923 switch (GET_CODE (sub))
925 case USE:
926 case CLOBBER:
927 break;
929 case SET:
930 /* We can consider insns having multiple sets, where all
931 but one are dead as single set insns. In common case
932 only single set is present in the pattern so we want
933 to avoid checking for REG_UNUSED notes unless necessary.
935 When we reach set first time, we just expect this is
936 the single set we are looking for and only when more
937 sets are found in the insn, we check them. */
938 if (!set_verified)
940 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
941 && !side_effects_p (set))
942 set = NULL;
943 else
944 set_verified = 1;
946 if (!set)
947 set = sub, set_verified = 0;
948 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
949 || side_effects_p (sub))
950 return NULL_RTX;
951 break;
953 default:
954 return NULL_RTX;
958 return set;
961 /* Given an INSN, return nonzero if it has more than one SET, else return
962 zero. */
965 multiple_sets (rtx insn)
967 int found;
968 int i;
970 /* INSN must be an insn. */
971 if (! INSN_P (insn))
972 return 0;
974 /* Only a PARALLEL can have multiple SETs. */
975 if (GET_CODE (PATTERN (insn)) == PARALLEL)
977 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
978 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
980 /* If we have already found a SET, then return now. */
981 if (found)
982 return 1;
983 else
984 found = 1;
988 /* Either zero or one SET. */
989 return 0;
992 /* Return nonzero if the destination of SET equals the source
993 and there are no side effects. */
996 set_noop_p (rtx set)
998 rtx src = SET_SRC (set);
999 rtx dst = SET_DEST (set);
1001 if (dst == pc_rtx && src == pc_rtx)
1002 return 1;
1004 if (MEM_P (dst) && MEM_P (src))
1005 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1007 if (GET_CODE (dst) == ZERO_EXTRACT)
1008 return rtx_equal_p (XEXP (dst, 0), src)
1009 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1010 && !side_effects_p (src);
1012 if (GET_CODE (dst) == STRICT_LOW_PART)
1013 dst = XEXP (dst, 0);
1015 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1017 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1018 return 0;
1019 src = SUBREG_REG (src);
1020 dst = SUBREG_REG (dst);
1023 return (REG_P (src) && REG_P (dst)
1024 && REGNO (src) == REGNO (dst));
1027 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1028 value to itself. */
1031 noop_move_p (rtx insn)
1033 rtx pat = PATTERN (insn);
1035 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1036 return 1;
1038 /* Insns carrying these notes are useful later on. */
1039 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1040 return 0;
1042 /* For now treat an insn with a REG_RETVAL note as a
1043 a special insn which should not be considered a no-op. */
1044 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1045 return 0;
1047 if (GET_CODE (pat) == SET && set_noop_p (pat))
1048 return 1;
1050 if (GET_CODE (pat) == PARALLEL)
1052 int i;
1053 /* If nothing but SETs of registers to themselves,
1054 this insn can also be deleted. */
1055 for (i = 0; i < XVECLEN (pat, 0); i++)
1057 rtx tem = XVECEXP (pat, 0, i);
1059 if (GET_CODE (tem) == USE
1060 || GET_CODE (tem) == CLOBBER)
1061 continue;
1063 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1064 return 0;
1067 return 1;
1069 return 0;
1073 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1074 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1075 If the object was modified, if we hit a partial assignment to X, or hit a
1076 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1077 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1078 be the src. */
1081 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1083 rtx p;
1085 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1086 p = PREV_INSN (p))
1087 if (INSN_P (p))
1089 rtx set = single_set (p);
1090 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1092 if (set && rtx_equal_p (x, SET_DEST (set)))
1094 rtx src = SET_SRC (set);
1096 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1097 src = XEXP (note, 0);
1099 if ((valid_to == NULL_RTX
1100 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1101 /* Reject hard registers because we don't usually want
1102 to use them; we'd rather use a pseudo. */
1103 && (! (REG_P (src)
1104 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1106 *pinsn = p;
1107 return src;
1111 /* If set in non-simple way, we don't have a value. */
1112 if (reg_set_p (x, p))
1113 break;
1116 return x;
1119 /* Return nonzero if register in range [REGNO, ENDREGNO)
1120 appears either explicitly or implicitly in X
1121 other than being stored into.
1123 References contained within the substructure at LOC do not count.
1124 LOC may be zero, meaning don't ignore anything. */
1127 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1128 rtx *loc)
1130 int i;
1131 unsigned int x_regno;
1132 RTX_CODE code;
1133 const char *fmt;
1135 repeat:
1136 /* The contents of a REG_NONNEG note is always zero, so we must come here
1137 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1138 if (x == 0)
1139 return 0;
1141 code = GET_CODE (x);
1143 switch (code)
1145 case REG:
1146 x_regno = REGNO (x);
1148 /* If we modifying the stack, frame, or argument pointer, it will
1149 clobber a virtual register. In fact, we could be more precise,
1150 but it isn't worth it. */
1151 if ((x_regno == STACK_POINTER_REGNUM
1152 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1153 || x_regno == ARG_POINTER_REGNUM
1154 #endif
1155 || x_regno == FRAME_POINTER_REGNUM)
1156 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1157 return 1;
1159 return (endregno > x_regno
1160 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1161 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1162 : 1));
1164 case SUBREG:
1165 /* If this is a SUBREG of a hard reg, we can see exactly which
1166 registers are being modified. Otherwise, handle normally. */
1167 if (REG_P (SUBREG_REG (x))
1168 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1170 unsigned int inner_regno = subreg_regno (x);
1171 unsigned int inner_endregno
1172 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1173 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1175 return endregno > inner_regno && regno < inner_endregno;
1177 break;
1179 case CLOBBER:
1180 case SET:
1181 if (&SET_DEST (x) != loc
1182 /* Note setting a SUBREG counts as referring to the REG it is in for
1183 a pseudo but not for hard registers since we can
1184 treat each word individually. */
1185 && ((GET_CODE (SET_DEST (x)) == SUBREG
1186 && loc != &SUBREG_REG (SET_DEST (x))
1187 && REG_P (SUBREG_REG (SET_DEST (x)))
1188 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1189 && refers_to_regno_p (regno, endregno,
1190 SUBREG_REG (SET_DEST (x)), loc))
1191 || (!REG_P (SET_DEST (x))
1192 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1193 return 1;
1195 if (code == CLOBBER || loc == &SET_SRC (x))
1196 return 0;
1197 x = SET_SRC (x);
1198 goto repeat;
1200 default:
1201 break;
1204 /* X does not match, so try its subexpressions. */
1206 fmt = GET_RTX_FORMAT (code);
1207 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1209 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1211 if (i == 0)
1213 x = XEXP (x, 0);
1214 goto repeat;
1216 else
1217 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1218 return 1;
1220 else if (fmt[i] == 'E')
1222 int j;
1223 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1224 if (loc != &XVECEXP (x, i, j)
1225 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1226 return 1;
1229 return 0;
1232 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1233 we check if any register number in X conflicts with the relevant register
1234 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1235 contains a MEM (we don't bother checking for memory addresses that can't
1236 conflict because we expect this to be a rare case. */
1239 reg_overlap_mentioned_p (rtx x, rtx in)
1241 unsigned int regno, endregno;
1243 /* If either argument is a constant, then modifying X can not
1244 affect IN. Here we look at IN, we can profitably combine
1245 CONSTANT_P (x) with the switch statement below. */
1246 if (CONSTANT_P (in))
1247 return 0;
1249 recurse:
1250 switch (GET_CODE (x))
1252 case STRICT_LOW_PART:
1253 case ZERO_EXTRACT:
1254 case SIGN_EXTRACT:
1255 /* Overly conservative. */
1256 x = XEXP (x, 0);
1257 goto recurse;
1259 case SUBREG:
1260 regno = REGNO (SUBREG_REG (x));
1261 if (regno < FIRST_PSEUDO_REGISTER)
1262 regno = subreg_regno (x);
1263 goto do_reg;
1265 case REG:
1266 regno = REGNO (x);
1267 do_reg:
1268 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1269 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1270 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1272 case MEM:
1274 const char *fmt;
1275 int i;
1277 if (MEM_P (in))
1278 return 1;
1280 fmt = GET_RTX_FORMAT (GET_CODE (in));
1281 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1282 if (fmt[i] == 'e')
1284 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1285 return 1;
1287 else if (fmt[i] == 'E')
1289 int j;
1290 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1291 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1292 return 1;
1295 return 0;
1298 case SCRATCH:
1299 case PC:
1300 case CC0:
1301 return reg_mentioned_p (x, in);
1303 case PARALLEL:
1305 int i;
1307 /* If any register in here refers to it we return true. */
1308 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1309 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1310 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1311 return 1;
1312 return 0;
1315 default:
1316 gcc_assert (CONSTANT_P (x));
1317 return 0;
1321 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1322 (X would be the pattern of an insn).
1323 FUN receives two arguments:
1324 the REG, MEM, CC0 or PC being stored in or clobbered,
1325 the SET or CLOBBER rtx that does the store.
1327 If the item being stored in or clobbered is a SUBREG of a hard register,
1328 the SUBREG will be passed. */
1330 void
1331 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1333 int i;
1335 if (GET_CODE (x) == COND_EXEC)
1336 x = COND_EXEC_CODE (x);
1338 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1340 rtx dest = SET_DEST (x);
1342 while ((GET_CODE (dest) == SUBREG
1343 && (!REG_P (SUBREG_REG (dest))
1344 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1345 || GET_CODE (dest) == ZERO_EXTRACT
1346 || GET_CODE (dest) == STRICT_LOW_PART)
1347 dest = XEXP (dest, 0);
1349 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1350 each of whose first operand is a register. */
1351 if (GET_CODE (dest) == PARALLEL)
1353 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1354 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1355 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1357 else
1358 (*fun) (dest, x, data);
1361 else if (GET_CODE (x) == PARALLEL)
1362 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1363 note_stores (XVECEXP (x, 0, i), fun, data);
1366 /* Like notes_stores, but call FUN for each expression that is being
1367 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1368 FUN for each expression, not any interior subexpressions. FUN receives a
1369 pointer to the expression and the DATA passed to this function.
1371 Note that this is not quite the same test as that done in reg_referenced_p
1372 since that considers something as being referenced if it is being
1373 partially set, while we do not. */
1375 void
1376 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1378 rtx body = *pbody;
1379 int i;
1381 switch (GET_CODE (body))
1383 case COND_EXEC:
1384 (*fun) (&COND_EXEC_TEST (body), data);
1385 note_uses (&COND_EXEC_CODE (body), fun, data);
1386 return;
1388 case PARALLEL:
1389 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1390 note_uses (&XVECEXP (body, 0, i), fun, data);
1391 return;
1393 case USE:
1394 (*fun) (&XEXP (body, 0), data);
1395 return;
1397 case ASM_OPERANDS:
1398 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1399 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1400 return;
1402 case TRAP_IF:
1403 (*fun) (&TRAP_CONDITION (body), data);
1404 return;
1406 case PREFETCH:
1407 (*fun) (&XEXP (body, 0), data);
1408 return;
1410 case UNSPEC:
1411 case UNSPEC_VOLATILE:
1412 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1413 (*fun) (&XVECEXP (body, 0, i), data);
1414 return;
1416 case CLOBBER:
1417 if (MEM_P (XEXP (body, 0)))
1418 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1419 return;
1421 case SET:
1423 rtx dest = SET_DEST (body);
1425 /* For sets we replace everything in source plus registers in memory
1426 expression in store and operands of a ZERO_EXTRACT. */
1427 (*fun) (&SET_SRC (body), data);
1429 if (GET_CODE (dest) == ZERO_EXTRACT)
1431 (*fun) (&XEXP (dest, 1), data);
1432 (*fun) (&XEXP (dest, 2), data);
1435 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1436 dest = XEXP (dest, 0);
1438 if (MEM_P (dest))
1439 (*fun) (&XEXP (dest, 0), data);
1441 return;
1443 default:
1444 /* All the other possibilities never store. */
1445 (*fun) (pbody, data);
1446 return;
1450 /* Return nonzero if X's old contents don't survive after INSN.
1451 This will be true if X is (cc0) or if X is a register and
1452 X dies in INSN or because INSN entirely sets X.
1454 "Entirely set" means set directly and not through a SUBREG, or
1455 ZERO_EXTRACT, so no trace of the old contents remains.
1456 Likewise, REG_INC does not count.
1458 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1459 but for this use that makes no difference, since regs don't overlap
1460 during their lifetimes. Therefore, this function may be used
1461 at any time after deaths have been computed (in flow.c).
1463 If REG is a hard reg that occupies multiple machine registers, this
1464 function will only return 1 if each of those registers will be replaced
1465 by INSN. */
1468 dead_or_set_p (rtx insn, rtx x)
1470 unsigned int regno, last_regno;
1471 unsigned int i;
1473 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1474 if (GET_CODE (x) == CC0)
1475 return 1;
1477 gcc_assert (REG_P (x));
1479 regno = REGNO (x);
1480 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1481 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1483 for (i = regno; i <= last_regno; i++)
1484 if (! dead_or_set_regno_p (insn, i))
1485 return 0;
1487 return 1;
1490 /* Return TRUE iff DEST is a register or subreg of a register and
1491 doesn't change the number of words of the inner register, and any
1492 part of the register is TEST_REGNO. */
1494 static bool
1495 covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1497 unsigned int regno, endregno;
1499 if (GET_CODE (dest) == SUBREG
1500 && (((GET_MODE_SIZE (GET_MODE (dest))
1501 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1502 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1503 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1504 dest = SUBREG_REG (dest);
1506 if (!REG_P (dest))
1507 return false;
1509 regno = REGNO (dest);
1510 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1511 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1512 return (test_regno >= regno && test_regno < endregno);
1515 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1516 any member matches the covers_regno_no_parallel_p criteria. */
1518 static bool
1519 covers_regno_p (rtx dest, unsigned int test_regno)
1521 if (GET_CODE (dest) == PARALLEL)
1523 /* Some targets place small structures in registers for return
1524 values of functions, and those registers are wrapped in
1525 PARALLELs that we may see as the destination of a SET. */
1526 int i;
1528 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1530 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1531 if (inner != NULL_RTX
1532 && covers_regno_no_parallel_p (inner, test_regno))
1533 return true;
1536 return false;
1538 else
1539 return covers_regno_no_parallel_p (dest, test_regno);
1542 /* Utility function for dead_or_set_p to check an individual register. Also
1543 called from flow.c. */
1546 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1548 rtx pattern;
1550 /* See if there is a death note for something that includes TEST_REGNO. */
1551 if (find_regno_note (insn, REG_DEAD, test_regno))
1552 return 1;
1554 if (CALL_P (insn)
1555 && find_regno_fusage (insn, CLOBBER, test_regno))
1556 return 1;
1558 pattern = PATTERN (insn);
1560 if (GET_CODE (pattern) == COND_EXEC)
1561 pattern = COND_EXEC_CODE (pattern);
1563 if (GET_CODE (pattern) == SET)
1564 return covers_regno_p (SET_DEST (pattern), test_regno);
1565 else if (GET_CODE (pattern) == PARALLEL)
1567 int i;
1569 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1571 rtx body = XVECEXP (pattern, 0, i);
1573 if (GET_CODE (body) == COND_EXEC)
1574 body = COND_EXEC_CODE (body);
1576 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1577 && covers_regno_p (SET_DEST (body), test_regno))
1578 return 1;
1582 return 0;
1585 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1586 If DATUM is nonzero, look for one whose datum is DATUM. */
1589 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1591 rtx link;
1593 gcc_assert (insn);
1595 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1596 if (! INSN_P (insn))
1597 return 0;
1598 if (datum == 0)
1600 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1601 if (REG_NOTE_KIND (link) == kind)
1602 return link;
1603 return 0;
1606 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1607 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1608 return link;
1609 return 0;
1612 /* Return the reg-note of kind KIND in insn INSN which applies to register
1613 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1614 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1615 it might be the case that the note overlaps REGNO. */
1618 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1620 rtx link;
1622 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1623 if (! INSN_P (insn))
1624 return 0;
1626 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1627 if (REG_NOTE_KIND (link) == kind
1628 /* Verify that it is a register, so that scratch and MEM won't cause a
1629 problem here. */
1630 && REG_P (XEXP (link, 0))
1631 && REGNO (XEXP (link, 0)) <= regno
1632 && ((REGNO (XEXP (link, 0))
1633 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1634 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1635 [GET_MODE (XEXP (link, 0))]))
1636 > regno))
1637 return link;
1638 return 0;
1641 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1642 has such a note. */
1645 find_reg_equal_equiv_note (rtx insn)
1647 rtx link;
1649 if (!INSN_P (insn))
1650 return 0;
1651 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1652 if (REG_NOTE_KIND (link) == REG_EQUAL
1653 || REG_NOTE_KIND (link) == REG_EQUIV)
1655 if (single_set (insn) == 0)
1656 return 0;
1657 return link;
1659 return NULL;
1662 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1663 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1666 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1668 /* If it's not a CALL_INSN, it can't possibly have a
1669 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1670 if (!CALL_P (insn))
1671 return 0;
1673 gcc_assert (datum);
1675 if (!REG_P (datum))
1677 rtx link;
1679 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1680 link;
1681 link = XEXP (link, 1))
1682 if (GET_CODE (XEXP (link, 0)) == code
1683 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1684 return 1;
1686 else
1688 unsigned int regno = REGNO (datum);
1690 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1691 to pseudo registers, so don't bother checking. */
1693 if (regno < FIRST_PSEUDO_REGISTER)
1695 unsigned int end_regno
1696 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1697 unsigned int i;
1699 for (i = regno; i < end_regno; i++)
1700 if (find_regno_fusage (insn, code, i))
1701 return 1;
1705 return 0;
1708 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1709 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1712 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1714 rtx link;
1716 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1717 to pseudo registers, so don't bother checking. */
1719 if (regno >= FIRST_PSEUDO_REGISTER
1720 || !CALL_P (insn) )
1721 return 0;
1723 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1725 unsigned int regnote;
1726 rtx op, reg;
1728 if (GET_CODE (op = XEXP (link, 0)) == code
1729 && REG_P (reg = XEXP (op, 0))
1730 && (regnote = REGNO (reg)) <= regno
1731 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1732 return 1;
1735 return 0;
1738 /* Return true if INSN is a call to a pure function. */
1741 pure_call_p (rtx insn)
1743 rtx link;
1745 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1746 return 0;
1748 /* Look for the note that differentiates const and pure functions. */
1749 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1751 rtx u, m;
1753 if (GET_CODE (u = XEXP (link, 0)) == USE
1754 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1755 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1756 return 1;
1759 return 0;
1762 /* Remove register note NOTE from the REG_NOTES of INSN. */
1764 void
1765 remove_note (rtx insn, rtx note)
1767 rtx link;
1769 if (note == NULL_RTX)
1770 return;
1772 if (REG_NOTES (insn) == note)
1774 REG_NOTES (insn) = XEXP (note, 1);
1775 return;
1778 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1779 if (XEXP (link, 1) == note)
1781 XEXP (link, 1) = XEXP (note, 1);
1782 return;
1785 gcc_unreachable ();
1788 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1789 return 1 if it is found. A simple equality test is used to determine if
1790 NODE matches. */
1793 in_expr_list_p (rtx listp, rtx node)
1795 rtx x;
1797 for (x = listp; x; x = XEXP (x, 1))
1798 if (node == XEXP (x, 0))
1799 return 1;
1801 return 0;
1804 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1805 remove that entry from the list if it is found.
1807 A simple equality test is used to determine if NODE matches. */
1809 void
1810 remove_node_from_expr_list (rtx node, rtx *listp)
1812 rtx temp = *listp;
1813 rtx prev = NULL_RTX;
1815 while (temp)
1817 if (node == XEXP (temp, 0))
1819 /* Splice the node out of the list. */
1820 if (prev)
1821 XEXP (prev, 1) = XEXP (temp, 1);
1822 else
1823 *listp = XEXP (temp, 1);
1825 return;
1828 prev = temp;
1829 temp = XEXP (temp, 1);
1833 /* Nonzero if X contains any volatile instructions. These are instructions
1834 which may cause unpredictable machine state instructions, and thus no
1835 instructions should be moved or combined across them. This includes
1836 only volatile asms and UNSPEC_VOLATILE instructions. */
1839 volatile_insn_p (rtx x)
1841 RTX_CODE code;
1843 code = GET_CODE (x);
1844 switch (code)
1846 case LABEL_REF:
1847 case SYMBOL_REF:
1848 case CONST_INT:
1849 case CONST:
1850 case CONST_DOUBLE:
1851 case CONST_VECTOR:
1852 case CC0:
1853 case PC:
1854 case REG:
1855 case SCRATCH:
1856 case CLOBBER:
1857 case ADDR_VEC:
1858 case ADDR_DIFF_VEC:
1859 case CALL:
1860 case MEM:
1861 return 0;
1863 case UNSPEC_VOLATILE:
1864 /* case TRAP_IF: This isn't clear yet. */
1865 return 1;
1867 case ASM_INPUT:
1868 case ASM_OPERANDS:
1869 if (MEM_VOLATILE_P (x))
1870 return 1;
1872 default:
1873 break;
1876 /* Recursively scan the operands of this expression. */
1879 const char *fmt = GET_RTX_FORMAT (code);
1880 int i;
1882 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1884 if (fmt[i] == 'e')
1886 if (volatile_insn_p (XEXP (x, i)))
1887 return 1;
1889 else if (fmt[i] == 'E')
1891 int j;
1892 for (j = 0; j < XVECLEN (x, i); j++)
1893 if (volatile_insn_p (XVECEXP (x, i, j)))
1894 return 1;
1898 return 0;
1901 /* Nonzero if X contains any volatile memory references
1902 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1905 volatile_refs_p (rtx x)
1907 RTX_CODE code;
1909 code = GET_CODE (x);
1910 switch (code)
1912 case LABEL_REF:
1913 case SYMBOL_REF:
1914 case CONST_INT:
1915 case CONST:
1916 case CONST_DOUBLE:
1917 case CONST_VECTOR:
1918 case CC0:
1919 case PC:
1920 case REG:
1921 case SCRATCH:
1922 case CLOBBER:
1923 case ADDR_VEC:
1924 case ADDR_DIFF_VEC:
1925 return 0;
1927 case UNSPEC_VOLATILE:
1928 return 1;
1930 case MEM:
1931 case ASM_INPUT:
1932 case ASM_OPERANDS:
1933 if (MEM_VOLATILE_P (x))
1934 return 1;
1936 default:
1937 break;
1940 /* Recursively scan the operands of this expression. */
1943 const char *fmt = GET_RTX_FORMAT (code);
1944 int i;
1946 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1948 if (fmt[i] == 'e')
1950 if (volatile_refs_p (XEXP (x, i)))
1951 return 1;
1953 else if (fmt[i] == 'E')
1955 int j;
1956 for (j = 0; j < XVECLEN (x, i); j++)
1957 if (volatile_refs_p (XVECEXP (x, i, j)))
1958 return 1;
1962 return 0;
1965 /* Similar to above, except that it also rejects register pre- and post-
1966 incrementing. */
1969 side_effects_p (rtx x)
1971 RTX_CODE code;
1973 code = GET_CODE (x);
1974 switch (code)
1976 case LABEL_REF:
1977 case SYMBOL_REF:
1978 case CONST_INT:
1979 case CONST:
1980 case CONST_DOUBLE:
1981 case CONST_VECTOR:
1982 case CC0:
1983 case PC:
1984 case REG:
1985 case SCRATCH:
1986 case ADDR_VEC:
1987 case ADDR_DIFF_VEC:
1988 return 0;
1990 case CLOBBER:
1991 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
1992 when some combination can't be done. If we see one, don't think
1993 that we can simplify the expression. */
1994 return (GET_MODE (x) != VOIDmode);
1996 case PRE_INC:
1997 case PRE_DEC:
1998 case POST_INC:
1999 case POST_DEC:
2000 case PRE_MODIFY:
2001 case POST_MODIFY:
2002 case CALL:
2003 case UNSPEC_VOLATILE:
2004 /* case TRAP_IF: This isn't clear yet. */
2005 return 1;
2007 case MEM:
2008 case ASM_INPUT:
2009 case ASM_OPERANDS:
2010 if (MEM_VOLATILE_P (x))
2011 return 1;
2013 default:
2014 break;
2017 /* Recursively scan the operands of this expression. */
2020 const char *fmt = GET_RTX_FORMAT (code);
2021 int i;
2023 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2025 if (fmt[i] == 'e')
2027 if (side_effects_p (XEXP (x, i)))
2028 return 1;
2030 else if (fmt[i] == 'E')
2032 int j;
2033 for (j = 0; j < XVECLEN (x, i); j++)
2034 if (side_effects_p (XVECEXP (x, i, j)))
2035 return 1;
2039 return 0;
2042 enum may_trap_p_flags
2044 MTP_UNALIGNED_MEMS = 1,
2045 MTP_AFTER_MOVE = 2
2047 /* Return nonzero if evaluating rtx X might cause a trap.
2048 (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2049 unaligned memory accesses on strict alignment machines. If
2050 (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2051 cannot trap at its current location, but it might become trapping if moved
2052 elsewhere. */
2054 static int
2055 may_trap_p_1 (rtx x, unsigned flags)
2057 int i;
2058 enum rtx_code code;
2059 const char *fmt;
2060 bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2062 if (x == 0)
2063 return 0;
2064 code = GET_CODE (x);
2065 switch (code)
2067 /* Handle these cases quickly. */
2068 case CONST_INT:
2069 case CONST_DOUBLE:
2070 case CONST_VECTOR:
2071 case SYMBOL_REF:
2072 case LABEL_REF:
2073 case CONST:
2074 case PC:
2075 case CC0:
2076 case REG:
2077 case SCRATCH:
2078 return 0;
2080 case ASM_INPUT:
2081 case UNSPEC_VOLATILE:
2082 case TRAP_IF:
2083 return 1;
2085 case ASM_OPERANDS:
2086 return MEM_VOLATILE_P (x);
2088 /* Memory ref can trap unless it's a static var or a stack slot. */
2089 case MEM:
2090 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2091 reference; moving it out of condition might cause its address
2092 become invalid. */
2093 !(flags & MTP_AFTER_MOVE)
2094 && MEM_NOTRAP_P (x)
2095 && (!STRICT_ALIGNMENT || !unaligned_mems))
2096 return 0;
2097 return
2098 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2100 /* Division by a non-constant might trap. */
2101 case DIV:
2102 case MOD:
2103 case UDIV:
2104 case UMOD:
2105 if (HONOR_SNANS (GET_MODE (x)))
2106 return 1;
2107 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2108 return flag_trapping_math;
2109 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2110 return 1;
2111 break;
2113 case EXPR_LIST:
2114 /* An EXPR_LIST is used to represent a function call. This
2115 certainly may trap. */
2116 return 1;
2118 case GE:
2119 case GT:
2120 case LE:
2121 case LT:
2122 case LTGT:
2123 case COMPARE:
2124 /* Some floating point comparisons may trap. */
2125 if (!flag_trapping_math)
2126 break;
2127 /* ??? There is no machine independent way to check for tests that trap
2128 when COMPARE is used, though many targets do make this distinction.
2129 For instance, sparc uses CCFPE for compares which generate exceptions
2130 and CCFP for compares which do not generate exceptions. */
2131 if (HONOR_NANS (GET_MODE (x)))
2132 return 1;
2133 /* But often the compare has some CC mode, so check operand
2134 modes as well. */
2135 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2136 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2137 return 1;
2138 break;
2140 case EQ:
2141 case NE:
2142 if (HONOR_SNANS (GET_MODE (x)))
2143 return 1;
2144 /* Often comparison is CC mode, so check operand modes. */
2145 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2146 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2147 return 1;
2148 break;
2150 case FIX:
2151 /* Conversion of floating point might trap. */
2152 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2153 return 1;
2154 break;
2156 case NEG:
2157 case ABS:
2158 case SUBREG:
2159 /* These operations don't trap even with floating point. */
2160 break;
2162 default:
2163 /* Any floating arithmetic may trap. */
2164 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2165 && flag_trapping_math)
2166 return 1;
2169 fmt = GET_RTX_FORMAT (code);
2170 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2172 if (fmt[i] == 'e')
2174 if (may_trap_p_1 (XEXP (x, i), flags))
2175 return 1;
2177 else if (fmt[i] == 'E')
2179 int j;
2180 for (j = 0; j < XVECLEN (x, i); j++)
2181 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2182 return 1;
2185 return 0;
2188 /* Return nonzero if evaluating rtx X might cause a trap. */
2191 may_trap_p (rtx x)
2193 return may_trap_p_1 (x, 0);
2196 /* Return nonzero if evaluating rtx X might cause a trap, when the expression
2197 is moved from its current location by some optimization. */
2200 may_trap_after_code_motion_p (rtx x)
2202 return may_trap_p_1 (x, MTP_AFTER_MOVE);
2205 /* Same as above, but additionally return nonzero if evaluating rtx X might
2206 cause a fault. We define a fault for the purpose of this function as a
2207 erroneous execution condition that cannot be encountered during the normal
2208 execution of a valid program; the typical example is an unaligned memory
2209 access on a strict alignment machine. The compiler guarantees that it
2210 doesn't generate code that will fault from a valid program, but this
2211 guarantee doesn't mean anything for individual instructions. Consider
2212 the following example:
2214 struct S { int d; union { char *cp; int *ip; }; };
2216 int foo(struct S *s)
2218 if (s->d == 1)
2219 return *s->ip;
2220 else
2221 return *s->cp;
2224 on a strict alignment machine. In a valid program, foo will never be
2225 invoked on a structure for which d is equal to 1 and the underlying
2226 unique field of the union not aligned on a 4-byte boundary, but the
2227 expression *s->ip might cause a fault if considered individually.
2229 At the RTL level, potentially problematic expressions will almost always
2230 verify may_trap_p; for example, the above dereference can be emitted as
2231 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2232 However, suppose that foo is inlined in a caller that causes s->cp to
2233 point to a local character variable and guarantees that s->d is not set
2234 to 1; foo may have been effectively translated into pseudo-RTL as:
2236 if ((reg:SI) == 1)
2237 (set (reg:SI) (mem:SI (%fp - 7)))
2238 else
2239 (set (reg:QI) (mem:QI (%fp - 7)))
2241 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2242 memory reference to a stack slot, but it will certainly cause a fault
2243 on a strict alignment machine. */
2246 may_trap_or_fault_p (rtx x)
2248 return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2251 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2252 i.e., an inequality. */
2255 inequality_comparisons_p (rtx x)
2257 const char *fmt;
2258 int len, i;
2259 enum rtx_code code = GET_CODE (x);
2261 switch (code)
2263 case REG:
2264 case SCRATCH:
2265 case PC:
2266 case CC0:
2267 case CONST_INT:
2268 case CONST_DOUBLE:
2269 case CONST_VECTOR:
2270 case CONST:
2271 case LABEL_REF:
2272 case SYMBOL_REF:
2273 return 0;
2275 case LT:
2276 case LTU:
2277 case GT:
2278 case GTU:
2279 case LE:
2280 case LEU:
2281 case GE:
2282 case GEU:
2283 return 1;
2285 default:
2286 break;
2289 len = GET_RTX_LENGTH (code);
2290 fmt = GET_RTX_FORMAT (code);
2292 for (i = 0; i < len; i++)
2294 if (fmt[i] == 'e')
2296 if (inequality_comparisons_p (XEXP (x, i)))
2297 return 1;
2299 else if (fmt[i] == 'E')
2301 int j;
2302 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2303 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2304 return 1;
2308 return 0;
2311 /* Replace any occurrence of FROM in X with TO. The function does
2312 not enter into CONST_DOUBLE for the replace.
2314 Note that copying is not done so X must not be shared unless all copies
2315 are to be modified. */
2318 replace_rtx (rtx x, rtx from, rtx to)
2320 int i, j;
2321 const char *fmt;
2323 /* The following prevents loops occurrence when we change MEM in
2324 CONST_DOUBLE onto the same CONST_DOUBLE. */
2325 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2326 return x;
2328 if (x == from)
2329 return to;
2331 /* Allow this function to make replacements in EXPR_LISTs. */
2332 if (x == 0)
2333 return 0;
2335 if (GET_CODE (x) == SUBREG)
2337 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2339 if (GET_CODE (new) == CONST_INT)
2341 x = simplify_subreg (GET_MODE (x), new,
2342 GET_MODE (SUBREG_REG (x)),
2343 SUBREG_BYTE (x));
2344 gcc_assert (x);
2346 else
2347 SUBREG_REG (x) = new;
2349 return x;
2351 else if (GET_CODE (x) == ZERO_EXTEND)
2353 rtx new = replace_rtx (XEXP (x, 0), from, to);
2355 if (GET_CODE (new) == CONST_INT)
2357 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2358 new, GET_MODE (XEXP (x, 0)));
2359 gcc_assert (x);
2361 else
2362 XEXP (x, 0) = new;
2364 return x;
2367 fmt = GET_RTX_FORMAT (GET_CODE (x));
2368 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2370 if (fmt[i] == 'e')
2371 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2372 else if (fmt[i] == 'E')
2373 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2374 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2377 return x;
2380 /* Replace occurrences of the old label in *X with the new one.
2381 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2384 replace_label (rtx *x, void *data)
2386 rtx l = *x;
2387 rtx old_label = ((replace_label_data *) data)->r1;
2388 rtx new_label = ((replace_label_data *) data)->r2;
2389 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2391 if (l == NULL_RTX)
2392 return 0;
2394 if (GET_CODE (l) == SYMBOL_REF
2395 && CONSTANT_POOL_ADDRESS_P (l))
2397 rtx c = get_pool_constant (l);
2398 if (rtx_referenced_p (old_label, c))
2400 rtx new_c, new_l;
2401 replace_label_data *d = (replace_label_data *) data;
2403 /* Create a copy of constant C; replace the label inside
2404 but do not update LABEL_NUSES because uses in constant pool
2405 are not counted. */
2406 new_c = copy_rtx (c);
2407 d->update_label_nuses = false;
2408 for_each_rtx (&new_c, replace_label, data);
2409 d->update_label_nuses = update_label_nuses;
2411 /* Add the new constant NEW_C to constant pool and replace
2412 the old reference to constant by new reference. */
2413 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2414 *x = replace_rtx (l, l, new_l);
2416 return 0;
2419 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2420 field. This is not handled by for_each_rtx because it doesn't
2421 handle unprinted ('0') fields. */
2422 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2423 JUMP_LABEL (l) = new_label;
2425 if ((GET_CODE (l) == LABEL_REF
2426 || GET_CODE (l) == INSN_LIST)
2427 && XEXP (l, 0) == old_label)
2429 XEXP (l, 0) = new_label;
2430 if (update_label_nuses)
2432 ++LABEL_NUSES (new_label);
2433 --LABEL_NUSES (old_label);
2435 return 0;
2438 return 0;
2441 /* When *BODY is equal to X or X is directly referenced by *BODY
2442 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2443 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2445 static int
2446 rtx_referenced_p_1 (rtx *body, void *x)
2448 rtx y = (rtx) x;
2450 if (*body == NULL_RTX)
2451 return y == NULL_RTX;
2453 /* Return true if a label_ref *BODY refers to label Y. */
2454 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2455 return XEXP (*body, 0) == y;
2457 /* If *BODY is a reference to pool constant traverse the constant. */
2458 if (GET_CODE (*body) == SYMBOL_REF
2459 && CONSTANT_POOL_ADDRESS_P (*body))
2460 return rtx_referenced_p (y, get_pool_constant (*body));
2462 /* By default, compare the RTL expressions. */
2463 return rtx_equal_p (*body, y);
2466 /* Return true if X is referenced in BODY. */
2469 rtx_referenced_p (rtx x, rtx body)
2471 return for_each_rtx (&body, rtx_referenced_p_1, x);
2474 /* If INSN is a tablejump return true and store the label (before jump table) to
2475 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2477 bool
2478 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2480 rtx label, table;
2482 if (JUMP_P (insn)
2483 && (label = JUMP_LABEL (insn)) != NULL_RTX
2484 && (table = next_active_insn (label)) != NULL_RTX
2485 && JUMP_P (table)
2486 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2487 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2489 if (labelp)
2490 *labelp = label;
2491 if (tablep)
2492 *tablep = table;
2493 return true;
2495 return false;
2498 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2499 constant that is not in the constant pool and not in the condition
2500 of an IF_THEN_ELSE. */
2502 static int
2503 computed_jump_p_1 (rtx x)
2505 enum rtx_code code = GET_CODE (x);
2506 int i, j;
2507 const char *fmt;
2509 switch (code)
2511 case LABEL_REF:
2512 case PC:
2513 return 0;
2515 case CONST:
2516 case CONST_INT:
2517 case CONST_DOUBLE:
2518 case CONST_VECTOR:
2519 case SYMBOL_REF:
2520 case REG:
2521 return 1;
2523 case MEM:
2524 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2525 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2527 case IF_THEN_ELSE:
2528 return (computed_jump_p_1 (XEXP (x, 1))
2529 || computed_jump_p_1 (XEXP (x, 2)));
2531 default:
2532 break;
2535 fmt = GET_RTX_FORMAT (code);
2536 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2538 if (fmt[i] == 'e'
2539 && computed_jump_p_1 (XEXP (x, i)))
2540 return 1;
2542 else if (fmt[i] == 'E')
2543 for (j = 0; j < XVECLEN (x, i); j++)
2544 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2545 return 1;
2548 return 0;
2551 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2553 Tablejumps and casesi insns are not considered indirect jumps;
2554 we can recognize them by a (use (label_ref)). */
2557 computed_jump_p (rtx insn)
2559 int i;
2560 if (JUMP_P (insn))
2562 rtx pat = PATTERN (insn);
2564 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2565 return 0;
2566 else if (GET_CODE (pat) == PARALLEL)
2568 int len = XVECLEN (pat, 0);
2569 int has_use_labelref = 0;
2571 for (i = len - 1; i >= 0; i--)
2572 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2573 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2574 == LABEL_REF))
2575 has_use_labelref = 1;
2577 if (! has_use_labelref)
2578 for (i = len - 1; i >= 0; i--)
2579 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2580 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2581 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2582 return 1;
2584 else if (GET_CODE (pat) == SET
2585 && SET_DEST (pat) == pc_rtx
2586 && computed_jump_p_1 (SET_SRC (pat)))
2587 return 1;
2589 return 0;
2592 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2593 calls. Processes the subexpressions of EXP and passes them to F. */
2594 static int
2595 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2597 int result, i, j;
2598 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2599 rtx *x;
2601 for (; format[n] != '\0'; n++)
2603 switch (format[n])
2605 case 'e':
2606 /* Call F on X. */
2607 x = &XEXP (exp, n);
2608 result = (*f) (x, data);
2609 if (result == -1)
2610 /* Do not traverse sub-expressions. */
2611 continue;
2612 else if (result != 0)
2613 /* Stop the traversal. */
2614 return result;
2616 if (*x == NULL_RTX)
2617 /* There are no sub-expressions. */
2618 continue;
2620 i = non_rtx_starting_operands[GET_CODE (*x)];
2621 if (i >= 0)
2623 result = for_each_rtx_1 (*x, i, f, data);
2624 if (result != 0)
2625 return result;
2627 break;
2629 case 'V':
2630 case 'E':
2631 if (XVEC (exp, n) == 0)
2632 continue;
2633 for (j = 0; j < XVECLEN (exp, n); ++j)
2635 /* Call F on X. */
2636 x = &XVECEXP (exp, n, j);
2637 result = (*f) (x, data);
2638 if (result == -1)
2639 /* Do not traverse sub-expressions. */
2640 continue;
2641 else if (result != 0)
2642 /* Stop the traversal. */
2643 return result;
2645 if (*x == NULL_RTX)
2646 /* There are no sub-expressions. */
2647 continue;
2649 i = non_rtx_starting_operands[GET_CODE (*x)];
2650 if (i >= 0)
2652 result = for_each_rtx_1 (*x, i, f, data);
2653 if (result != 0)
2654 return result;
2657 break;
2659 default:
2660 /* Nothing to do. */
2661 break;
2665 return 0;
2668 /* Traverse X via depth-first search, calling F for each
2669 sub-expression (including X itself). F is also passed the DATA.
2670 If F returns -1, do not traverse sub-expressions, but continue
2671 traversing the rest of the tree. If F ever returns any other
2672 nonzero value, stop the traversal, and return the value returned
2673 by F. Otherwise, return 0. This function does not traverse inside
2674 tree structure that contains RTX_EXPRs, or into sub-expressions
2675 whose format code is `0' since it is not known whether or not those
2676 codes are actually RTL.
2678 This routine is very general, and could (should?) be used to
2679 implement many of the other routines in this file. */
2682 for_each_rtx (rtx *x, rtx_function f, void *data)
2684 int result;
2685 int i;
2687 /* Call F on X. */
2688 result = (*f) (x, data);
2689 if (result == -1)
2690 /* Do not traverse sub-expressions. */
2691 return 0;
2692 else if (result != 0)
2693 /* Stop the traversal. */
2694 return result;
2696 if (*x == NULL_RTX)
2697 /* There are no sub-expressions. */
2698 return 0;
2700 i = non_rtx_starting_operands[GET_CODE (*x)];
2701 if (i < 0)
2702 return 0;
2704 return for_each_rtx_1 (*x, i, f, data);
2708 /* Searches X for any reference to REGNO, returning the rtx of the
2709 reference found if any. Otherwise, returns NULL_RTX. */
2712 regno_use_in (unsigned int regno, rtx x)
2714 const char *fmt;
2715 int i, j;
2716 rtx tem;
2718 if (REG_P (x) && REGNO (x) == regno)
2719 return x;
2721 fmt = GET_RTX_FORMAT (GET_CODE (x));
2722 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2724 if (fmt[i] == 'e')
2726 if ((tem = regno_use_in (regno, XEXP (x, i))))
2727 return tem;
2729 else if (fmt[i] == 'E')
2730 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2731 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2732 return tem;
2735 return NULL_RTX;
2738 /* Return a value indicating whether OP, an operand of a commutative
2739 operation, is preferred as the first or second operand. The higher
2740 the value, the stronger the preference for being the first operand.
2741 We use negative values to indicate a preference for the first operand
2742 and positive values for the second operand. */
2745 commutative_operand_precedence (rtx op)
2747 enum rtx_code code = GET_CODE (op);
2749 /* Constants always come the second operand. Prefer "nice" constants. */
2750 if (code == CONST_INT)
2751 return -7;
2752 if (code == CONST_DOUBLE)
2753 return -6;
2754 op = avoid_constant_pool_reference (op);
2755 code = GET_CODE (op);
2757 switch (GET_RTX_CLASS (code))
2759 case RTX_CONST_OBJ:
2760 if (code == CONST_INT)
2761 return -5;
2762 if (code == CONST_DOUBLE)
2763 return -4;
2764 return -3;
2766 case RTX_EXTRA:
2767 /* SUBREGs of objects should come second. */
2768 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2769 return -2;
2771 if (!CONSTANT_P (op))
2772 return 0;
2773 else
2774 /* As for RTX_CONST_OBJ. */
2775 return -3;
2777 case RTX_OBJ:
2778 /* Complex expressions should be the first, so decrease priority
2779 of objects. */
2780 return -1;
2782 case RTX_COMM_ARITH:
2783 /* Prefer operands that are themselves commutative to be first.
2784 This helps to make things linear. In particular,
2785 (and (and (reg) (reg)) (not (reg))) is canonical. */
2786 return 4;
2788 case RTX_BIN_ARITH:
2789 /* If only one operand is a binary expression, it will be the first
2790 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2791 is canonical, although it will usually be further simplified. */
2792 return 2;
2794 case RTX_UNARY:
2795 /* Then prefer NEG and NOT. */
2796 if (code == NEG || code == NOT)
2797 return 1;
2799 default:
2800 return 0;
2804 /* Return 1 iff it is necessary to swap operands of commutative operation
2805 in order to canonicalize expression. */
2808 swap_commutative_operands_p (rtx x, rtx y)
2810 return (commutative_operand_precedence (x)
2811 < commutative_operand_precedence (y));
2814 /* Return 1 if X is an autoincrement side effect and the register is
2815 not the stack pointer. */
2817 auto_inc_p (rtx x)
2819 switch (GET_CODE (x))
2821 case PRE_INC:
2822 case POST_INC:
2823 case PRE_DEC:
2824 case POST_DEC:
2825 case PRE_MODIFY:
2826 case POST_MODIFY:
2827 /* There are no REG_INC notes for SP. */
2828 if (XEXP (x, 0) != stack_pointer_rtx)
2829 return 1;
2830 default:
2831 break;
2833 return 0;
2836 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2838 loc_mentioned_in_p (rtx *loc, rtx in)
2840 enum rtx_code code;
2841 const char *fmt;
2842 int i, j;
2844 if (!in)
2845 return 0;
2847 code = GET_CODE (in);
2848 fmt = GET_RTX_FORMAT (code);
2849 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2851 if (loc == &in->u.fld[i].rt_rtx)
2852 return 1;
2853 if (fmt[i] == 'e')
2855 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2856 return 1;
2858 else if (fmt[i] == 'E')
2859 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2860 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2861 return 1;
2863 return 0;
2866 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2867 and SUBREG_BYTE, return the bit offset where the subreg begins
2868 (counting from the least significant bit of the operand). */
2870 unsigned int
2871 subreg_lsb_1 (enum machine_mode outer_mode,
2872 enum machine_mode inner_mode,
2873 unsigned int subreg_byte)
2875 unsigned int bitpos;
2876 unsigned int byte;
2877 unsigned int word;
2879 /* A paradoxical subreg begins at bit position 0. */
2880 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
2881 return 0;
2883 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2884 /* If the subreg crosses a word boundary ensure that
2885 it also begins and ends on a word boundary. */
2886 gcc_assert (!((subreg_byte % UNITS_PER_WORD
2887 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
2888 && (subreg_byte % UNITS_PER_WORD
2889 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
2891 if (WORDS_BIG_ENDIAN)
2892 word = (GET_MODE_SIZE (inner_mode)
2893 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
2894 else
2895 word = subreg_byte / UNITS_PER_WORD;
2896 bitpos = word * BITS_PER_WORD;
2898 if (BYTES_BIG_ENDIAN)
2899 byte = (GET_MODE_SIZE (inner_mode)
2900 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
2901 else
2902 byte = subreg_byte % UNITS_PER_WORD;
2903 bitpos += byte * BITS_PER_UNIT;
2905 return bitpos;
2908 /* Given a subreg X, return the bit offset where the subreg begins
2909 (counting from the least significant bit of the reg). */
2911 unsigned int
2912 subreg_lsb (rtx x)
2914 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
2915 SUBREG_BYTE (x));
2918 /* This function returns the regno offset of a subreg expression.
2919 xregno - A regno of an inner hard subreg_reg (or what will become one).
2920 xmode - The mode of xregno.
2921 offset - The byte offset.
2922 ymode - The mode of a top level SUBREG (or what may become one).
2923 RETURN - The regno offset which would be used. */
2924 unsigned int
2925 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
2926 unsigned int offset, enum machine_mode ymode)
2928 int nregs_xmode, nregs_ymode;
2929 int mode_multiple, nregs_multiple;
2930 int y_offset;
2932 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
2934 /* Adjust nregs_xmode to allow for 'holes'. */
2935 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
2936 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
2937 else
2938 nregs_xmode = hard_regno_nregs[xregno][xmode];
2940 nregs_ymode = hard_regno_nregs[xregno][ymode];
2942 /* If this is a big endian paradoxical subreg, which uses more actual
2943 hard registers than the original register, we must return a negative
2944 offset so that we find the proper highpart of the register. */
2945 if (offset == 0
2946 && nregs_ymode > nregs_xmode
2947 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2948 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
2949 return nregs_xmode - nregs_ymode;
2951 if (offset == 0 || nregs_xmode == nregs_ymode)
2952 return 0;
2954 /* Size of ymode must not be greater than the size of xmode. */
2955 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
2956 gcc_assert (mode_multiple != 0);
2958 y_offset = offset / GET_MODE_SIZE (ymode);
2959 nregs_multiple = nregs_xmode / nregs_ymode;
2960 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
2963 /* This function returns true when the offset is representable via
2964 subreg_offset in the given regno.
2965 xregno - A regno of an inner hard subreg_reg (or what will become one).
2966 xmode - The mode of xregno.
2967 offset - The byte offset.
2968 ymode - The mode of a top level SUBREG (or what may become one).
2969 RETURN - Whether the offset is representable. */
2970 bool
2971 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
2972 unsigned int offset, enum machine_mode ymode)
2974 int nregs_xmode, nregs_ymode;
2975 int mode_multiple, nregs_multiple;
2976 int y_offset;
2977 int regsize_xmode, regsize_ymode;
2979 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
2981 /* If there are holes in a non-scalar mode in registers, we expect
2982 that it is made up of its units concatenated together. */
2983 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
2985 enum machine_mode xmode_unit;
2987 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
2988 if (GET_MODE_INNER (xmode) == VOIDmode)
2989 xmode_unit = xmode;
2990 else
2991 xmode_unit = GET_MODE_INNER (xmode);
2992 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
2993 gcc_assert (nregs_xmode
2994 == (GET_MODE_NUNITS (xmode)
2995 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
2996 gcc_assert (hard_regno_nregs[xregno][xmode]
2997 == (hard_regno_nregs[xregno][xmode_unit]
2998 * GET_MODE_NUNITS (xmode)));
3000 /* You can only ask for a SUBREG of a value with holes in the middle
3001 if you don't cross the holes. (Such a SUBREG should be done by
3002 picking a different register class, or doing it in memory if
3003 necessary.) An example of a value with holes is XCmode on 32-bit
3004 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3005 3 for each part, but in memory it's two 128-bit parts.
3006 Padding is assumed to be at the end (not necessarily the 'high part')
3007 of each unit. */
3008 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3009 < GET_MODE_NUNITS (xmode))
3010 && (offset / GET_MODE_SIZE (xmode_unit)
3011 != ((offset + GET_MODE_SIZE (ymode) - 1)
3012 / GET_MODE_SIZE (xmode_unit))))
3013 return false;
3015 else
3016 nregs_xmode = hard_regno_nregs[xregno][xmode];
3018 nregs_ymode = hard_regno_nregs[xregno][ymode];
3020 /* Paradoxical subregs are otherwise valid. */
3021 if (offset == 0
3022 && nregs_ymode > nregs_xmode
3023 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3024 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3025 return true;
3027 /* If registers store different numbers of bits in the different
3028 modes, we cannot generally form this subreg. */
3029 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3030 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3031 if (regsize_xmode > regsize_ymode && nregs_ymode > 1)
3032 return false;
3033 if (regsize_ymode > regsize_xmode && nregs_xmode > 1)
3034 return false;
3036 /* Lowpart subregs are otherwise valid. */
3037 if (offset == subreg_lowpart_offset (ymode, xmode))
3038 return true;
3040 /* This should always pass, otherwise we don't know how to verify
3041 the constraint. These conditions may be relaxed but
3042 subreg_regno_offset would need to be redesigned. */
3043 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3044 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3046 /* The XMODE value can be seen as a vector of NREGS_XMODE
3047 values. The subreg must represent a lowpart of given field.
3048 Compute what field it is. */
3049 offset -= subreg_lowpart_offset (ymode,
3050 mode_for_size (GET_MODE_BITSIZE (xmode)
3051 / nregs_xmode,
3052 MODE_INT, 0));
3054 /* Size of ymode must not be greater than the size of xmode. */
3055 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3056 gcc_assert (mode_multiple != 0);
3058 y_offset = offset / GET_MODE_SIZE (ymode);
3059 nregs_multiple = nregs_xmode / nregs_ymode;
3061 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3062 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3064 return (!(y_offset % (mode_multiple / nregs_multiple)));
3067 /* Return the final regno that a subreg expression refers to. */
3068 unsigned int
3069 subreg_regno (rtx x)
3071 unsigned int ret;
3072 rtx subreg = SUBREG_REG (x);
3073 int regno = REGNO (subreg);
3075 ret = regno + subreg_regno_offset (regno,
3076 GET_MODE (subreg),
3077 SUBREG_BYTE (x),
3078 GET_MODE (x));
3079 return ret;
3082 struct parms_set_data
3084 int nregs;
3085 HARD_REG_SET regs;
3088 /* Helper function for noticing stores to parameter registers. */
3089 static void
3090 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3092 struct parms_set_data *d = data;
3093 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3094 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3096 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3097 d->nregs--;
3101 /* Look backward for first parameter to be loaded.
3102 Note that loads of all parameters will not necessarily be
3103 found if CSE has eliminated some of them (e.g., an argument
3104 to the outer function is passed down as a parameter).
3105 Do not skip BOUNDARY. */
3107 find_first_parameter_load (rtx call_insn, rtx boundary)
3109 struct parms_set_data parm;
3110 rtx p, before, first_set;
3112 /* Since different machines initialize their parameter registers
3113 in different orders, assume nothing. Collect the set of all
3114 parameter registers. */
3115 CLEAR_HARD_REG_SET (parm.regs);
3116 parm.nregs = 0;
3117 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3118 if (GET_CODE (XEXP (p, 0)) == USE
3119 && REG_P (XEXP (XEXP (p, 0), 0)))
3121 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3123 /* We only care about registers which can hold function
3124 arguments. */
3125 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3126 continue;
3128 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3129 parm.nregs++;
3131 before = call_insn;
3132 first_set = call_insn;
3134 /* Search backward for the first set of a register in this set. */
3135 while (parm.nregs && before != boundary)
3137 before = PREV_INSN (before);
3139 /* It is possible that some loads got CSEed from one call to
3140 another. Stop in that case. */
3141 if (CALL_P (before))
3142 break;
3144 /* Our caller needs either ensure that we will find all sets
3145 (in case code has not been optimized yet), or take care
3146 for possible labels in a way by setting boundary to preceding
3147 CODE_LABEL. */
3148 if (LABEL_P (before))
3150 gcc_assert (before == boundary);
3151 break;
3154 if (INSN_P (before))
3156 int nregs_old = parm.nregs;
3157 note_stores (PATTERN (before), parms_set, &parm);
3158 /* If we found something that did not set a parameter reg,
3159 we're done. Do not keep going, as that might result
3160 in hoisting an insn before the setting of a pseudo
3161 that is used by the hoisted insn. */
3162 if (nregs_old != parm.nregs)
3163 first_set = before;
3164 else
3165 break;
3168 return first_set;
3171 /* Return true if we should avoid inserting code between INSN and preceding
3172 call instruction. */
3174 bool
3175 keep_with_call_p (rtx insn)
3177 rtx set;
3179 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3181 if (REG_P (SET_DEST (set))
3182 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3183 && fixed_regs[REGNO (SET_DEST (set))]
3184 && general_operand (SET_SRC (set), VOIDmode))
3185 return true;
3186 if (REG_P (SET_SRC (set))
3187 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3188 && REG_P (SET_DEST (set))
3189 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3190 return true;
3191 /* There may be a stack pop just after the call and before the store
3192 of the return register. Search for the actual store when deciding
3193 if we can break or not. */
3194 if (SET_DEST (set) == stack_pointer_rtx)
3196 rtx i2 = next_nonnote_insn (insn);
3197 if (i2 && keep_with_call_p (i2))
3198 return true;
3201 return false;
3204 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3205 to non-complex jumps. That is, direct unconditional, conditional,
3206 and tablejumps, but not computed jumps or returns. It also does
3207 not apply to the fallthru case of a conditional jump. */
3209 bool
3210 label_is_jump_target_p (rtx label, rtx jump_insn)
3212 rtx tmp = JUMP_LABEL (jump_insn);
3214 if (label == tmp)
3215 return true;
3217 if (tablejump_p (jump_insn, NULL, &tmp))
3219 rtvec vec = XVEC (PATTERN (tmp),
3220 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3221 int i, veclen = GET_NUM_ELEM (vec);
3223 for (i = 0; i < veclen; ++i)
3224 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3225 return true;
3228 return false;
3232 /* Return an estimate of the cost of computing rtx X.
3233 One use is in cse, to decide which expression to keep in the hash table.
3234 Another is in rtl generation, to pick the cheapest way to multiply.
3235 Other uses like the latter are expected in the future. */
3238 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3240 int i, j;
3241 enum rtx_code code;
3242 const char *fmt;
3243 int total;
3245 if (x == 0)
3246 return 0;
3248 /* Compute the default costs of certain things.
3249 Note that targetm.rtx_costs can override the defaults. */
3251 code = GET_CODE (x);
3252 switch (code)
3254 case MULT:
3255 total = COSTS_N_INSNS (5);
3256 break;
3257 case DIV:
3258 case UDIV:
3259 case MOD:
3260 case UMOD:
3261 total = COSTS_N_INSNS (7);
3262 break;
3263 case USE:
3264 /* Used in combine.c as a marker. */
3265 total = 0;
3266 break;
3267 default:
3268 total = COSTS_N_INSNS (1);
3271 switch (code)
3273 case REG:
3274 return 0;
3276 case SUBREG:
3277 total = 0;
3278 /* If we can't tie these modes, make this expensive. The larger
3279 the mode, the more expensive it is. */
3280 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3281 return COSTS_N_INSNS (2
3282 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3283 break;
3285 default:
3286 if (targetm.rtx_costs (x, code, outer_code, &total))
3287 return total;
3288 break;
3291 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3292 which is already in total. */
3294 fmt = GET_RTX_FORMAT (code);
3295 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3296 if (fmt[i] == 'e')
3297 total += rtx_cost (XEXP (x, i), code);
3298 else if (fmt[i] == 'E')
3299 for (j = 0; j < XVECLEN (x, i); j++)
3300 total += rtx_cost (XVECEXP (x, i, j), code);
3302 return total;
3305 /* Return cost of address expression X.
3306 Expect that X is properly formed address reference. */
3309 address_cost (rtx x, enum machine_mode mode)
3311 /* We may be asked for cost of various unusual addresses, such as operands
3312 of push instruction. It is not worthwhile to complicate writing
3313 of the target hook by such cases. */
3315 if (!memory_address_p (mode, x))
3316 return 1000;
3318 return targetm.address_cost (x);
3321 /* If the target doesn't override, compute the cost as with arithmetic. */
3324 default_address_cost (rtx x)
3326 return rtx_cost (x, MEM);
3330 unsigned HOST_WIDE_INT
3331 nonzero_bits (rtx x, enum machine_mode mode)
3333 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3336 unsigned int
3337 num_sign_bit_copies (rtx x, enum machine_mode mode)
3339 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3342 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3343 It avoids exponential behavior in nonzero_bits1 when X has
3344 identical subexpressions on the first or the second level. */
3346 static unsigned HOST_WIDE_INT
3347 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3348 enum machine_mode known_mode,
3349 unsigned HOST_WIDE_INT known_ret)
3351 if (x == known_x && mode == known_mode)
3352 return known_ret;
3354 /* Try to find identical subexpressions. If found call
3355 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3356 precomputed value for the subexpression as KNOWN_RET. */
3358 if (ARITHMETIC_P (x))
3360 rtx x0 = XEXP (x, 0);
3361 rtx x1 = XEXP (x, 1);
3363 /* Check the first level. */
3364 if (x0 == x1)
3365 return nonzero_bits1 (x, mode, x0, mode,
3366 cached_nonzero_bits (x0, mode, known_x,
3367 known_mode, known_ret));
3369 /* Check the second level. */
3370 if (ARITHMETIC_P (x0)
3371 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3372 return nonzero_bits1 (x, mode, x1, mode,
3373 cached_nonzero_bits (x1, mode, known_x,
3374 known_mode, known_ret));
3376 if (ARITHMETIC_P (x1)
3377 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3378 return nonzero_bits1 (x, mode, x0, mode,
3379 cached_nonzero_bits (x0, mode, known_x,
3380 known_mode, known_ret));
3383 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3386 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3387 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3388 is less useful. We can't allow both, because that results in exponential
3389 run time recursion. There is a nullstone testcase that triggered
3390 this. This macro avoids accidental uses of num_sign_bit_copies. */
3391 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3393 /* Given an expression, X, compute which bits in X can be nonzero.
3394 We don't care about bits outside of those defined in MODE.
3396 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3397 an arithmetic operation, we can do better. */
3399 static unsigned HOST_WIDE_INT
3400 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3401 enum machine_mode known_mode,
3402 unsigned HOST_WIDE_INT known_ret)
3404 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3405 unsigned HOST_WIDE_INT inner_nz;
3406 enum rtx_code code;
3407 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3409 /* For floating-point values, assume all bits are needed. */
3410 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3411 return nonzero;
3413 /* If X is wider than MODE, use its mode instead. */
3414 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3416 mode = GET_MODE (x);
3417 nonzero = GET_MODE_MASK (mode);
3418 mode_width = GET_MODE_BITSIZE (mode);
3421 if (mode_width > HOST_BITS_PER_WIDE_INT)
3422 /* Our only callers in this case look for single bit values. So
3423 just return the mode mask. Those tests will then be false. */
3424 return nonzero;
3426 #ifndef WORD_REGISTER_OPERATIONS
3427 /* If MODE is wider than X, but both are a single word for both the host
3428 and target machines, we can compute this from which bits of the
3429 object might be nonzero in its own mode, taking into account the fact
3430 that on many CISC machines, accessing an object in a wider mode
3431 causes the high-order bits to become undefined. So they are
3432 not known to be zero. */
3434 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3435 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3436 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3437 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3439 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3440 known_x, known_mode, known_ret);
3441 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3442 return nonzero;
3444 #endif
3446 code = GET_CODE (x);
3447 switch (code)
3449 case REG:
3450 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3451 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3452 all the bits above ptr_mode are known to be zero. */
3453 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3454 && REG_POINTER (x))
3455 nonzero &= GET_MODE_MASK (ptr_mode);
3456 #endif
3458 /* Include declared information about alignment of pointers. */
3459 /* ??? We don't properly preserve REG_POINTER changes across
3460 pointer-to-integer casts, so we can't trust it except for
3461 things that we know must be pointers. See execute/960116-1.c. */
3462 if ((x == stack_pointer_rtx
3463 || x == frame_pointer_rtx
3464 || x == arg_pointer_rtx)
3465 && REGNO_POINTER_ALIGN (REGNO (x)))
3467 unsigned HOST_WIDE_INT alignment
3468 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3470 #ifdef PUSH_ROUNDING
3471 /* If PUSH_ROUNDING is defined, it is possible for the
3472 stack to be momentarily aligned only to that amount,
3473 so we pick the least alignment. */
3474 if (x == stack_pointer_rtx && PUSH_ARGS)
3475 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3476 alignment);
3477 #endif
3479 nonzero &= ~(alignment - 1);
3483 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3484 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3485 known_mode, known_ret,
3486 &nonzero_for_hook);
3488 if (new)
3489 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3490 known_mode, known_ret);
3492 return nonzero_for_hook;
3495 case CONST_INT:
3496 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3497 /* If X is negative in MODE, sign-extend the value. */
3498 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3499 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3500 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3501 #endif
3503 return INTVAL (x);
3505 case MEM:
3506 #ifdef LOAD_EXTEND_OP
3507 /* In many, if not most, RISC machines, reading a byte from memory
3508 zeros the rest of the register. Noticing that fact saves a lot
3509 of extra zero-extends. */
3510 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3511 nonzero &= GET_MODE_MASK (GET_MODE (x));
3512 #endif
3513 break;
3515 case EQ: case NE:
3516 case UNEQ: case LTGT:
3517 case GT: case GTU: case UNGT:
3518 case LT: case LTU: case UNLT:
3519 case GE: case GEU: case UNGE:
3520 case LE: case LEU: case UNLE:
3521 case UNORDERED: case ORDERED:
3522 /* If this produces an integer result, we know which bits are set.
3523 Code here used to clear bits outside the mode of X, but that is
3524 now done above. */
3525 /* Mind that MODE is the mode the caller wants to look at this
3526 operation in, and not the actual operation mode. We can wind
3527 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3528 that describes the results of a vector compare. */
3529 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3530 && mode_width <= HOST_BITS_PER_WIDE_INT)
3531 nonzero = STORE_FLAG_VALUE;
3532 break;
3534 case NEG:
3535 #if 0
3536 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3537 and num_sign_bit_copies. */
3538 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3539 == GET_MODE_BITSIZE (GET_MODE (x)))
3540 nonzero = 1;
3541 #endif
3543 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3544 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3545 break;
3547 case ABS:
3548 #if 0
3549 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3550 and num_sign_bit_copies. */
3551 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3552 == GET_MODE_BITSIZE (GET_MODE (x)))
3553 nonzero = 1;
3554 #endif
3555 break;
3557 case TRUNCATE:
3558 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3559 known_x, known_mode, known_ret)
3560 & GET_MODE_MASK (mode));
3561 break;
3563 case ZERO_EXTEND:
3564 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3565 known_x, known_mode, known_ret);
3566 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3567 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3568 break;
3570 case SIGN_EXTEND:
3571 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3572 Otherwise, show all the bits in the outer mode but not the inner
3573 may be nonzero. */
3574 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3575 known_x, known_mode, known_ret);
3576 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3578 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3579 if (inner_nz
3580 & (((HOST_WIDE_INT) 1
3581 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3582 inner_nz |= (GET_MODE_MASK (mode)
3583 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3586 nonzero &= inner_nz;
3587 break;
3589 case AND:
3590 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3591 known_x, known_mode, known_ret)
3592 & cached_nonzero_bits (XEXP (x, 1), mode,
3593 known_x, known_mode, known_ret);
3594 break;
3596 case XOR: case IOR:
3597 case UMIN: case UMAX: case SMIN: case SMAX:
3599 unsigned HOST_WIDE_INT nonzero0 =
3600 cached_nonzero_bits (XEXP (x, 0), mode,
3601 known_x, known_mode, known_ret);
3603 /* Don't call nonzero_bits for the second time if it cannot change
3604 anything. */
3605 if ((nonzero & nonzero0) != nonzero)
3606 nonzero &= nonzero0
3607 | cached_nonzero_bits (XEXP (x, 1), mode,
3608 known_x, known_mode, known_ret);
3610 break;
3612 case PLUS: case MINUS:
3613 case MULT:
3614 case DIV: case UDIV:
3615 case MOD: case UMOD:
3616 /* We can apply the rules of arithmetic to compute the number of
3617 high- and low-order zero bits of these operations. We start by
3618 computing the width (position of the highest-order nonzero bit)
3619 and the number of low-order zero bits for each value. */
3621 unsigned HOST_WIDE_INT nz0 =
3622 cached_nonzero_bits (XEXP (x, 0), mode,
3623 known_x, known_mode, known_ret);
3624 unsigned HOST_WIDE_INT nz1 =
3625 cached_nonzero_bits (XEXP (x, 1), mode,
3626 known_x, known_mode, known_ret);
3627 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3628 int width0 = floor_log2 (nz0) + 1;
3629 int width1 = floor_log2 (nz1) + 1;
3630 int low0 = floor_log2 (nz0 & -nz0);
3631 int low1 = floor_log2 (nz1 & -nz1);
3632 HOST_WIDE_INT op0_maybe_minusp
3633 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3634 HOST_WIDE_INT op1_maybe_minusp
3635 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3636 unsigned int result_width = mode_width;
3637 int result_low = 0;
3639 switch (code)
3641 case PLUS:
3642 result_width = MAX (width0, width1) + 1;
3643 result_low = MIN (low0, low1);
3644 break;
3645 case MINUS:
3646 result_low = MIN (low0, low1);
3647 break;
3648 case MULT:
3649 result_width = width0 + width1;
3650 result_low = low0 + low1;
3651 break;
3652 case DIV:
3653 if (width1 == 0)
3654 break;
3655 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3656 result_width = width0;
3657 break;
3658 case UDIV:
3659 if (width1 == 0)
3660 break;
3661 result_width = width0;
3662 break;
3663 case MOD:
3664 if (width1 == 0)
3665 break;
3666 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3667 result_width = MIN (width0, width1);
3668 result_low = MIN (low0, low1);
3669 break;
3670 case UMOD:
3671 if (width1 == 0)
3672 break;
3673 result_width = MIN (width0, width1);
3674 result_low = MIN (low0, low1);
3675 break;
3676 default:
3677 gcc_unreachable ();
3680 if (result_width < mode_width)
3681 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3683 if (result_low > 0)
3684 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3686 #ifdef POINTERS_EXTEND_UNSIGNED
3687 /* If pointers extend unsigned and this is an addition or subtraction
3688 to a pointer in Pmode, all the bits above ptr_mode are known to be
3689 zero. */
3690 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3691 && (code == PLUS || code == MINUS)
3692 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3693 nonzero &= GET_MODE_MASK (ptr_mode);
3694 #endif
3696 break;
3698 case ZERO_EXTRACT:
3699 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3700 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3701 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3702 break;
3704 case SUBREG:
3705 /* If this is a SUBREG formed for a promoted variable that has
3706 been zero-extended, we know that at least the high-order bits
3707 are zero, though others might be too. */
3709 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3710 nonzero = GET_MODE_MASK (GET_MODE (x))
3711 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3712 known_x, known_mode, known_ret);
3714 /* If the inner mode is a single word for both the host and target
3715 machines, we can compute this from which bits of the inner
3716 object might be nonzero. */
3717 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3718 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3719 <= HOST_BITS_PER_WIDE_INT))
3721 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3722 known_x, known_mode, known_ret);
3724 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3725 /* If this is a typical RISC machine, we only have to worry
3726 about the way loads are extended. */
3727 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3728 ? (((nonzero
3729 & (((unsigned HOST_WIDE_INT) 1
3730 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3731 != 0))
3732 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3733 || !MEM_P (SUBREG_REG (x)))
3734 #endif
3736 /* On many CISC machines, accessing an object in a wider mode
3737 causes the high-order bits to become undefined. So they are
3738 not known to be zero. */
3739 if (GET_MODE_SIZE (GET_MODE (x))
3740 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3741 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3742 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3745 break;
3747 case ASHIFTRT:
3748 case LSHIFTRT:
3749 case ASHIFT:
3750 case ROTATE:
3751 /* The nonzero bits are in two classes: any bits within MODE
3752 that aren't in GET_MODE (x) are always significant. The rest of the
3753 nonzero bits are those that are significant in the operand of
3754 the shift when shifted the appropriate number of bits. This
3755 shows that high-order bits are cleared by the right shift and
3756 low-order bits by left shifts. */
3757 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3758 && INTVAL (XEXP (x, 1)) >= 0
3759 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3761 enum machine_mode inner_mode = GET_MODE (x);
3762 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3763 int count = INTVAL (XEXP (x, 1));
3764 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3765 unsigned HOST_WIDE_INT op_nonzero =
3766 cached_nonzero_bits (XEXP (x, 0), mode,
3767 known_x, known_mode, known_ret);
3768 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3769 unsigned HOST_WIDE_INT outer = 0;
3771 if (mode_width > width)
3772 outer = (op_nonzero & nonzero & ~mode_mask);
3774 if (code == LSHIFTRT)
3775 inner >>= count;
3776 else if (code == ASHIFTRT)
3778 inner >>= count;
3780 /* If the sign bit may have been nonzero before the shift, we
3781 need to mark all the places it could have been copied to
3782 by the shift as possibly nonzero. */
3783 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3784 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3786 else if (code == ASHIFT)
3787 inner <<= count;
3788 else
3789 inner = ((inner << (count % width)
3790 | (inner >> (width - (count % width)))) & mode_mask);
3792 nonzero &= (outer | inner);
3794 break;
3796 case FFS:
3797 case POPCOUNT:
3798 /* This is at most the number of bits in the mode. */
3799 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3800 break;
3802 case CLZ:
3803 /* If CLZ has a known value at zero, then the nonzero bits are
3804 that value, plus the number of bits in the mode minus one. */
3805 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3806 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3807 else
3808 nonzero = -1;
3809 break;
3811 case CTZ:
3812 /* If CTZ has a known value at zero, then the nonzero bits are
3813 that value, plus the number of bits in the mode minus one. */
3814 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3815 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3816 else
3817 nonzero = -1;
3818 break;
3820 case PARITY:
3821 nonzero = 1;
3822 break;
3824 case IF_THEN_ELSE:
3826 unsigned HOST_WIDE_INT nonzero_true =
3827 cached_nonzero_bits (XEXP (x, 1), mode,
3828 known_x, known_mode, known_ret);
3830 /* Don't call nonzero_bits for the second time if it cannot change
3831 anything. */
3832 if ((nonzero & nonzero_true) != nonzero)
3833 nonzero &= nonzero_true
3834 | cached_nonzero_bits (XEXP (x, 2), mode,
3835 known_x, known_mode, known_ret);
3837 break;
3839 default:
3840 break;
3843 return nonzero;
3846 /* See the macro definition above. */
3847 #undef cached_num_sign_bit_copies
3850 /* The function cached_num_sign_bit_copies is a wrapper around
3851 num_sign_bit_copies1. It avoids exponential behavior in
3852 num_sign_bit_copies1 when X has identical subexpressions on the
3853 first or the second level. */
3855 static unsigned int
3856 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
3857 enum machine_mode known_mode,
3858 unsigned int known_ret)
3860 if (x == known_x && mode == known_mode)
3861 return known_ret;
3863 /* Try to find identical subexpressions. If found call
3864 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3865 the precomputed value for the subexpression as KNOWN_RET. */
3867 if (ARITHMETIC_P (x))
3869 rtx x0 = XEXP (x, 0);
3870 rtx x1 = XEXP (x, 1);
3872 /* Check the first level. */
3873 if (x0 == x1)
3874 return
3875 num_sign_bit_copies1 (x, mode, x0, mode,
3876 cached_num_sign_bit_copies (x0, mode, known_x,
3877 known_mode,
3878 known_ret));
3880 /* Check the second level. */
3881 if (ARITHMETIC_P (x0)
3882 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3883 return
3884 num_sign_bit_copies1 (x, mode, x1, mode,
3885 cached_num_sign_bit_copies (x1, mode, known_x,
3886 known_mode,
3887 known_ret));
3889 if (ARITHMETIC_P (x1)
3890 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3891 return
3892 num_sign_bit_copies1 (x, mode, x0, mode,
3893 cached_num_sign_bit_copies (x0, mode, known_x,
3894 known_mode,
3895 known_ret));
3898 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
3901 /* Return the number of bits at the high-order end of X that are known to
3902 be equal to the sign bit. X will be used in mode MODE; if MODE is
3903 VOIDmode, X will be used in its own mode. The returned value will always
3904 be between 1 and the number of bits in MODE. */
3906 static unsigned int
3907 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
3908 enum machine_mode known_mode,
3909 unsigned int known_ret)
3911 enum rtx_code code = GET_CODE (x);
3912 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
3913 int num0, num1, result;
3914 unsigned HOST_WIDE_INT nonzero;
3916 /* If we weren't given a mode, use the mode of X. If the mode is still
3917 VOIDmode, we don't know anything. Likewise if one of the modes is
3918 floating-point. */
3920 if (mode == VOIDmode)
3921 mode = GET_MODE (x);
3923 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
3924 return 1;
3926 /* For a smaller object, just ignore the high bits. */
3927 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
3929 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
3930 known_x, known_mode, known_ret);
3931 return MAX (1,
3932 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
3935 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
3937 #ifndef WORD_REGISTER_OPERATIONS
3938 /* If this machine does not do all register operations on the entire
3939 register and MODE is wider than the mode of X, we can say nothing
3940 at all about the high-order bits. */
3941 return 1;
3942 #else
3943 /* Likewise on machines that do, if the mode of the object is smaller
3944 than a word and loads of that size don't sign extend, we can say
3945 nothing about the high order bits. */
3946 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
3947 #ifdef LOAD_EXTEND_OP
3948 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
3949 #endif
3951 return 1;
3952 #endif
3955 switch (code)
3957 case REG:
3959 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3960 /* If pointers extend signed and this is a pointer in Pmode, say that
3961 all the bits above ptr_mode are known to be sign bit copies. */
3962 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
3963 && REG_POINTER (x))
3964 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
3965 #endif
3968 unsigned int copies_for_hook = 1, copies = 1;
3969 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
3970 known_mode, known_ret,
3971 &copies_for_hook);
3973 if (new)
3974 copies = cached_num_sign_bit_copies (new, mode, known_x,
3975 known_mode, known_ret);
3977 if (copies > 1 || copies_for_hook > 1)
3978 return MAX (copies, copies_for_hook);
3980 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
3982 break;
3984 case MEM:
3985 #ifdef LOAD_EXTEND_OP
3986 /* Some RISC machines sign-extend all loads of smaller than a word. */
3987 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
3988 return MAX (1, ((int) bitwidth
3989 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
3990 #endif
3991 break;
3993 case CONST_INT:
3994 /* If the constant is negative, take its 1's complement and remask.
3995 Then see how many zero bits we have. */
3996 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
3997 if (bitwidth <= HOST_BITS_PER_WIDE_INT
3998 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
3999 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4001 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4003 case SUBREG:
4004 /* If this is a SUBREG for a promoted object that is sign-extended
4005 and we are looking at it in a wider mode, we know that at least the
4006 high-order bits are known to be sign bit copies. */
4008 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4010 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4011 known_x, known_mode, known_ret);
4012 return MAX ((int) bitwidth
4013 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4014 num0);
4017 /* For a smaller object, just ignore the high bits. */
4018 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4020 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4021 known_x, known_mode, known_ret);
4022 return MAX (1, (num0
4023 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4024 - bitwidth)));
4027 #ifdef WORD_REGISTER_OPERATIONS
4028 #ifdef LOAD_EXTEND_OP
4029 /* For paradoxical SUBREGs on machines where all register operations
4030 affect the entire register, just look inside. Note that we are
4031 passing MODE to the recursive call, so the number of sign bit copies
4032 will remain relative to that mode, not the inner mode. */
4034 /* This works only if loads sign extend. Otherwise, if we get a
4035 reload for the inner part, it may be loaded from the stack, and
4036 then we lose all sign bit copies that existed before the store
4037 to the stack. */
4039 if ((GET_MODE_SIZE (GET_MODE (x))
4040 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4041 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4042 && MEM_P (SUBREG_REG (x)))
4043 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4044 known_x, known_mode, known_ret);
4045 #endif
4046 #endif
4047 break;
4049 case SIGN_EXTRACT:
4050 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4051 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4052 break;
4054 case SIGN_EXTEND:
4055 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4056 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4057 known_x, known_mode, known_ret));
4059 case TRUNCATE:
4060 /* For a smaller object, just ignore the high bits. */
4061 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4062 known_x, known_mode, known_ret);
4063 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4064 - bitwidth)));
4066 case NOT:
4067 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4068 known_x, known_mode, known_ret);
4070 case ROTATE: case ROTATERT:
4071 /* If we are rotating left by a number of bits less than the number
4072 of sign bit copies, we can just subtract that amount from the
4073 number. */
4074 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4075 && INTVAL (XEXP (x, 1)) >= 0
4076 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4078 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4079 known_x, known_mode, known_ret);
4080 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4081 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4083 break;
4085 case NEG:
4086 /* In general, this subtracts one sign bit copy. But if the value
4087 is known to be positive, the number of sign bit copies is the
4088 same as that of the input. Finally, if the input has just one bit
4089 that might be nonzero, all the bits are copies of the sign bit. */
4090 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4091 known_x, known_mode, known_ret);
4092 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4093 return num0 > 1 ? num0 - 1 : 1;
4095 nonzero = nonzero_bits (XEXP (x, 0), mode);
4096 if (nonzero == 1)
4097 return bitwidth;
4099 if (num0 > 1
4100 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4101 num0--;
4103 return num0;
4105 case IOR: case AND: case XOR:
4106 case SMIN: case SMAX: case UMIN: case UMAX:
4107 /* Logical operations will preserve the number of sign-bit copies.
4108 MIN and MAX operations always return one of the operands. */
4109 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4110 known_x, known_mode, known_ret);
4111 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4112 known_x, known_mode, known_ret);
4113 return MIN (num0, num1);
4115 case PLUS: case MINUS:
4116 /* For addition and subtraction, we can have a 1-bit carry. However,
4117 if we are subtracting 1 from a positive number, there will not
4118 be such a carry. Furthermore, if the positive number is known to
4119 be 0 or 1, we know the result is either -1 or 0. */
4121 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4122 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4124 nonzero = nonzero_bits (XEXP (x, 0), mode);
4125 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4126 return (nonzero == 1 || nonzero == 0 ? bitwidth
4127 : bitwidth - floor_log2 (nonzero) - 1);
4130 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4131 known_x, known_mode, known_ret);
4132 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4133 known_x, known_mode, known_ret);
4134 result = MAX (1, MIN (num0, num1) - 1);
4136 #ifdef POINTERS_EXTEND_UNSIGNED
4137 /* If pointers extend signed and this is an addition or subtraction
4138 to a pointer in Pmode, all the bits above ptr_mode are known to be
4139 sign bit copies. */
4140 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4141 && (code == PLUS || code == MINUS)
4142 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4143 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4144 - GET_MODE_BITSIZE (ptr_mode) + 1),
4145 result);
4146 #endif
4147 return result;
4149 case MULT:
4150 /* The number of bits of the product is the sum of the number of
4151 bits of both terms. However, unless one of the terms if known
4152 to be positive, we must allow for an additional bit since negating
4153 a negative number can remove one sign bit copy. */
4155 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4156 known_x, known_mode, known_ret);
4157 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4158 known_x, known_mode, known_ret);
4160 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4161 if (result > 0
4162 && (bitwidth > HOST_BITS_PER_WIDE_INT
4163 || (((nonzero_bits (XEXP (x, 0), mode)
4164 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4165 && ((nonzero_bits (XEXP (x, 1), mode)
4166 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4167 result--;
4169 return MAX (1, result);
4171 case UDIV:
4172 /* The result must be <= the first operand. If the first operand
4173 has the high bit set, we know nothing about the number of sign
4174 bit copies. */
4175 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4176 return 1;
4177 else if ((nonzero_bits (XEXP (x, 0), mode)
4178 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4179 return 1;
4180 else
4181 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4182 known_x, known_mode, known_ret);
4184 case UMOD:
4185 /* The result must be <= the second operand. */
4186 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4187 known_x, known_mode, known_ret);
4189 case DIV:
4190 /* Similar to unsigned division, except that we have to worry about
4191 the case where the divisor is negative, in which case we have
4192 to add 1. */
4193 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4194 known_x, known_mode, known_ret);
4195 if (result > 1
4196 && (bitwidth > HOST_BITS_PER_WIDE_INT
4197 || (nonzero_bits (XEXP (x, 1), mode)
4198 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4199 result--;
4201 return result;
4203 case MOD:
4204 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4205 known_x, known_mode, known_ret);
4206 if (result > 1
4207 && (bitwidth > HOST_BITS_PER_WIDE_INT
4208 || (nonzero_bits (XEXP (x, 1), mode)
4209 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4210 result--;
4212 return result;
4214 case ASHIFTRT:
4215 /* Shifts by a constant add to the number of bits equal to the
4216 sign bit. */
4217 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4218 known_x, known_mode, known_ret);
4219 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4220 && INTVAL (XEXP (x, 1)) > 0)
4221 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4223 return num0;
4225 case ASHIFT:
4226 /* Left shifts destroy copies. */
4227 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4228 || INTVAL (XEXP (x, 1)) < 0
4229 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4230 return 1;
4232 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4233 known_x, known_mode, known_ret);
4234 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4236 case IF_THEN_ELSE:
4237 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4238 known_x, known_mode, known_ret);
4239 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4240 known_x, known_mode, known_ret);
4241 return MIN (num0, num1);
4243 case EQ: case NE: case GE: case GT: case LE: case LT:
4244 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4245 case GEU: case GTU: case LEU: case LTU:
4246 case UNORDERED: case ORDERED:
4247 /* If the constant is negative, take its 1's complement and remask.
4248 Then see how many zero bits we have. */
4249 nonzero = STORE_FLAG_VALUE;
4250 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4251 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4252 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4254 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4256 default:
4257 break;
4260 /* If we haven't been able to figure it out by one of the above rules,
4261 see if some of the high-order bits are known to be zero. If so,
4262 count those bits and return one less than that amount. If we can't
4263 safely compute the mask for this mode, always return BITWIDTH. */
4265 bitwidth = GET_MODE_BITSIZE (mode);
4266 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4267 return 1;
4269 nonzero = nonzero_bits (x, mode);
4270 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4271 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4274 /* Calculate the rtx_cost of a single instruction. A return value of
4275 zero indicates an instruction pattern without a known cost. */
4278 insn_rtx_cost (rtx pat)
4280 int i, cost;
4281 rtx set;
4283 /* Extract the single set rtx from the instruction pattern.
4284 We can't use single_set since we only have the pattern. */
4285 if (GET_CODE (pat) == SET)
4286 set = pat;
4287 else if (GET_CODE (pat) == PARALLEL)
4289 set = NULL_RTX;
4290 for (i = 0; i < XVECLEN (pat, 0); i++)
4292 rtx x = XVECEXP (pat, 0, i);
4293 if (GET_CODE (x) == SET)
4295 if (set)
4296 return 0;
4297 set = x;
4300 if (!set)
4301 return 0;
4303 else
4304 return 0;
4306 cost = rtx_cost (SET_SRC (set), SET);
4307 return cost > 0 ? cost : COSTS_N_INSNS (1);
4310 /* Given an insn INSN and condition COND, return the condition in a
4311 canonical form to simplify testing by callers. Specifically:
4313 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4314 (2) Both operands will be machine operands; (cc0) will have been replaced.
4315 (3) If an operand is a constant, it will be the second operand.
4316 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4317 for GE, GEU, and LEU.
4319 If the condition cannot be understood, or is an inequality floating-point
4320 comparison which needs to be reversed, 0 will be returned.
4322 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4324 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4325 insn used in locating the condition was found. If a replacement test
4326 of the condition is desired, it should be placed in front of that
4327 insn and we will be sure that the inputs are still valid.
4329 If WANT_REG is nonzero, we wish the condition to be relative to that
4330 register, if possible. Therefore, do not canonicalize the condition
4331 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4332 to be a compare to a CC mode register.
4334 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4335 and at INSN. */
4338 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4339 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4341 enum rtx_code code;
4342 rtx prev = insn;
4343 rtx set;
4344 rtx tem;
4345 rtx op0, op1;
4346 int reverse_code = 0;
4347 enum machine_mode mode;
4348 basic_block bb = BLOCK_FOR_INSN (insn);
4350 code = GET_CODE (cond);
4351 mode = GET_MODE (cond);
4352 op0 = XEXP (cond, 0);
4353 op1 = XEXP (cond, 1);
4355 if (reverse)
4356 code = reversed_comparison_code (cond, insn);
4357 if (code == UNKNOWN)
4358 return 0;
4360 if (earliest)
4361 *earliest = insn;
4363 /* If we are comparing a register with zero, see if the register is set
4364 in the previous insn to a COMPARE or a comparison operation. Perform
4365 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4366 in cse.c */
4368 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4369 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4370 && op1 == CONST0_RTX (GET_MODE (op0))
4371 && op0 != want_reg)
4373 /* Set nonzero when we find something of interest. */
4374 rtx x = 0;
4376 #ifdef HAVE_cc0
4377 /* If comparison with cc0, import actual comparison from compare
4378 insn. */
4379 if (op0 == cc0_rtx)
4381 if ((prev = prev_nonnote_insn (prev)) == 0
4382 || !NONJUMP_INSN_P (prev)
4383 || (set = single_set (prev)) == 0
4384 || SET_DEST (set) != cc0_rtx)
4385 return 0;
4387 op0 = SET_SRC (set);
4388 op1 = CONST0_RTX (GET_MODE (op0));
4389 if (earliest)
4390 *earliest = prev;
4392 #endif
4394 /* If this is a COMPARE, pick up the two things being compared. */
4395 if (GET_CODE (op0) == COMPARE)
4397 op1 = XEXP (op0, 1);
4398 op0 = XEXP (op0, 0);
4399 continue;
4401 else if (!REG_P (op0))
4402 break;
4404 /* Go back to the previous insn. Stop if it is not an INSN. We also
4405 stop if it isn't a single set or if it has a REG_INC note because
4406 we don't want to bother dealing with it. */
4408 if ((prev = prev_nonnote_insn (prev)) == 0
4409 || !NONJUMP_INSN_P (prev)
4410 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4411 /* In cfglayout mode, there do not have to be labels at the
4412 beginning of a block, or jumps at the end, so the previous
4413 conditions would not stop us when we reach bb boundary. */
4414 || BLOCK_FOR_INSN (prev) != bb)
4415 break;
4417 set = set_of (op0, prev);
4419 if (set
4420 && (GET_CODE (set) != SET
4421 || !rtx_equal_p (SET_DEST (set), op0)))
4422 break;
4424 /* If this is setting OP0, get what it sets it to if it looks
4425 relevant. */
4426 if (set)
4428 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4429 #ifdef FLOAT_STORE_FLAG_VALUE
4430 REAL_VALUE_TYPE fsfv;
4431 #endif
4433 /* ??? We may not combine comparisons done in a CCmode with
4434 comparisons not done in a CCmode. This is to aid targets
4435 like Alpha that have an IEEE compliant EQ instruction, and
4436 a non-IEEE compliant BEQ instruction. The use of CCmode is
4437 actually artificial, simply to prevent the combination, but
4438 should not affect other platforms.
4440 However, we must allow VOIDmode comparisons to match either
4441 CCmode or non-CCmode comparison, because some ports have
4442 modeless comparisons inside branch patterns.
4444 ??? This mode check should perhaps look more like the mode check
4445 in simplify_comparison in combine. */
4447 if ((GET_CODE (SET_SRC (set)) == COMPARE
4448 || (((code == NE
4449 || (code == LT
4450 && GET_MODE_CLASS (inner_mode) == MODE_INT
4451 && (GET_MODE_BITSIZE (inner_mode)
4452 <= HOST_BITS_PER_WIDE_INT)
4453 && (STORE_FLAG_VALUE
4454 & ((HOST_WIDE_INT) 1
4455 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4456 #ifdef FLOAT_STORE_FLAG_VALUE
4457 || (code == LT
4458 && SCALAR_FLOAT_MODE_P (inner_mode)
4459 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4460 REAL_VALUE_NEGATIVE (fsfv)))
4461 #endif
4463 && COMPARISON_P (SET_SRC (set))))
4464 && (((GET_MODE_CLASS (mode) == MODE_CC)
4465 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4466 || mode == VOIDmode || inner_mode == VOIDmode))
4467 x = SET_SRC (set);
4468 else if (((code == EQ
4469 || (code == GE
4470 && (GET_MODE_BITSIZE (inner_mode)
4471 <= HOST_BITS_PER_WIDE_INT)
4472 && GET_MODE_CLASS (inner_mode) == MODE_INT
4473 && (STORE_FLAG_VALUE
4474 & ((HOST_WIDE_INT) 1
4475 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4476 #ifdef FLOAT_STORE_FLAG_VALUE
4477 || (code == GE
4478 && SCALAR_FLOAT_MODE_P (inner_mode)
4479 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4480 REAL_VALUE_NEGATIVE (fsfv)))
4481 #endif
4483 && COMPARISON_P (SET_SRC (set))
4484 && (((GET_MODE_CLASS (mode) == MODE_CC)
4485 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4486 || mode == VOIDmode || inner_mode == VOIDmode))
4489 reverse_code = 1;
4490 x = SET_SRC (set);
4492 else
4493 break;
4496 else if (reg_set_p (op0, prev))
4497 /* If this sets OP0, but not directly, we have to give up. */
4498 break;
4500 if (x)
4502 /* If the caller is expecting the condition to be valid at INSN,
4503 make sure X doesn't change before INSN. */
4504 if (valid_at_insn_p)
4505 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4506 break;
4507 if (COMPARISON_P (x))
4508 code = GET_CODE (x);
4509 if (reverse_code)
4511 code = reversed_comparison_code (x, prev);
4512 if (code == UNKNOWN)
4513 return 0;
4514 reverse_code = 0;
4517 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4518 if (earliest)
4519 *earliest = prev;
4523 /* If constant is first, put it last. */
4524 if (CONSTANT_P (op0))
4525 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4527 /* If OP0 is the result of a comparison, we weren't able to find what
4528 was really being compared, so fail. */
4529 if (!allow_cc_mode
4530 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4531 return 0;
4533 /* Canonicalize any ordered comparison with integers involving equality
4534 if we can do computations in the relevant mode and we do not
4535 overflow. */
4537 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4538 && GET_CODE (op1) == CONST_INT
4539 && GET_MODE (op0) != VOIDmode
4540 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4542 HOST_WIDE_INT const_val = INTVAL (op1);
4543 unsigned HOST_WIDE_INT uconst_val = const_val;
4544 unsigned HOST_WIDE_INT max_val
4545 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4547 switch (code)
4549 case LE:
4550 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4551 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4552 break;
4554 /* When cross-compiling, const_val might be sign-extended from
4555 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4556 case GE:
4557 if ((HOST_WIDE_INT) (const_val & max_val)
4558 != (((HOST_WIDE_INT) 1
4559 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4560 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4561 break;
4563 case LEU:
4564 if (uconst_val < max_val)
4565 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4566 break;
4568 case GEU:
4569 if (uconst_val != 0)
4570 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4571 break;
4573 default:
4574 break;
4578 /* Never return CC0; return zero instead. */
4579 if (CC0_P (op0))
4580 return 0;
4582 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4585 /* Given a jump insn JUMP, return the condition that will cause it to branch
4586 to its JUMP_LABEL. If the condition cannot be understood, or is an
4587 inequality floating-point comparison which needs to be reversed, 0 will
4588 be returned.
4590 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4591 insn used in locating the condition was found. If a replacement test
4592 of the condition is desired, it should be placed in front of that
4593 insn and we will be sure that the inputs are still valid. If EARLIEST
4594 is null, the returned condition will be valid at INSN.
4596 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4597 compare CC mode register.
4599 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4602 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4604 rtx cond;
4605 int reverse;
4606 rtx set;
4608 /* If this is not a standard conditional jump, we can't parse it. */
4609 if (!JUMP_P (jump)
4610 || ! any_condjump_p (jump))
4611 return 0;
4612 set = pc_set (jump);
4614 cond = XEXP (SET_SRC (set), 0);
4616 /* If this branches to JUMP_LABEL when the condition is false, reverse
4617 the condition. */
4618 reverse
4619 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4620 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4622 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4623 allow_cc_mode, valid_at_insn_p);
4626 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4627 TARGET_MODE_REP_EXTENDED.
4629 Note that we assume that the property of
4630 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4631 narrower than mode B. I.e., if A is a mode narrower than B then in
4632 order to be able to operate on it in mode B, mode A needs to
4633 satisfy the requirements set by the representation of mode B. */
4635 static void
4636 init_num_sign_bit_copies_in_rep (void)
4638 enum machine_mode mode, in_mode;
4640 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4641 in_mode = GET_MODE_WIDER_MODE (mode))
4642 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4643 mode = GET_MODE_WIDER_MODE (mode))
4645 enum machine_mode i;
4647 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4648 extends to the next widest mode. */
4649 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4650 || GET_MODE_WIDER_MODE (mode) == in_mode);
4652 /* We are in in_mode. Count how many bits outside of mode
4653 have to be copies of the sign-bit. */
4654 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4656 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4658 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4659 /* We can only check sign-bit copies starting from the
4660 top-bit. In order to be able to check the bits we
4661 have already seen we pretend that subsequent bits
4662 have to be sign-bit copies too. */
4663 || num_sign_bit_copies_in_rep [in_mode][mode])
4664 num_sign_bit_copies_in_rep [in_mode][mode]
4665 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4670 /* Suppose that truncation from the machine mode of X to MODE is not a
4671 no-op. See if there is anything special about X so that we can
4672 assume it already contains a truncated value of MODE. */
4674 bool
4675 truncated_to_mode (enum machine_mode mode, rtx x)
4677 /* This register has already been used in MODE without explicit
4678 truncation. */
4679 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4680 return true;
4682 /* See if we already satisfy the requirements of MODE. If yes we
4683 can just switch to MODE. */
4684 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4685 && (num_sign_bit_copies (x, GET_MODE (x))
4686 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4687 return true;
4689 return false;
4692 /* Initialize non_rtx_starting_operands, which is used to speed up
4693 for_each_rtx. */
4694 void
4695 init_rtlanal (void)
4697 int i;
4698 for (i = 0; i < NUM_RTX_CODE; i++)
4700 const char *format = GET_RTX_FORMAT (i);
4701 const char *first = strpbrk (format, "eEV");
4702 non_rtx_starting_operands[i] = first ? first - format : -1;
4705 init_num_sign_bit_copies_in_rep ();
4708 /* Check whether this is a constant pool constant. */
4709 bool
4710 constant_pool_constant_p (rtx x)
4712 x = avoid_constant_pool_reference (x);
4713 return GET_CODE (x) == CONST_DOUBLE;