* config/xtensa/lib1funcs.asm (__umulsidi3): Restore a12-a15 on exit.
[official-gcc.git] / gcc / rtlanal.c
blob8a7c914022c6ddc3a2c20ccfd174875ca4c5ed31
1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006 Free Software
4 Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "tm.h"
28 #include "toplev.h"
29 #include "rtl.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "target.h"
34 #include "output.h"
35 #include "tm_p.h"
36 #include "flags.h"
37 #include "real.h"
38 #include "regs.h"
39 #include "function.h"
41 /* Forward declarations */
42 static void set_of_1 (rtx, rtx, void *);
43 static bool covers_regno_p (rtx, unsigned int);
44 static bool covers_regno_no_parallel_p (rtx, unsigned int);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (rtx);
47 static void parms_set (rtx, rtx, void *);
49 static unsigned HOST_WIDE_INT cached_nonzero_bits (rtx, enum machine_mode,
50 rtx, enum machine_mode,
51 unsigned HOST_WIDE_INT);
52 static unsigned HOST_WIDE_INT nonzero_bits1 (rtx, enum machine_mode, rtx,
53 enum machine_mode,
54 unsigned HOST_WIDE_INT);
55 static unsigned int cached_num_sign_bit_copies (rtx, enum machine_mode, rtx,
56 enum machine_mode,
57 unsigned int);
58 static unsigned int num_sign_bit_copies1 (rtx, enum machine_mode, rtx,
59 enum machine_mode, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands[NUM_RTX_CODE];
65 /* Bit flags that specify the machine subtype we are compiling for.
66 Bits are tested using macros TARGET_... defined in the tm.h file
67 and set by `-m...' switches. Must be defined in rtlanal.c. */
69 int target_flags;
71 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
72 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
73 SIGN_EXTEND then while narrowing we also have to enforce the
74 representation and sign-extend the value to mode DESTINATION_REP.
76 If the value is already sign-extended to DESTINATION_REP mode we
77 can just switch to DESTINATION mode on it. For each pair of
78 integral modes SOURCE and DESTINATION, when truncating from SOURCE
79 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
80 contains the number of high-order bits in SOURCE that have to be
81 copies of the sign-bit so that we can do this mode-switch to
82 DESTINATION. */
84 static unsigned int
85 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
87 /* Return 1 if the value of X is unstable
88 (would be different at a different point in the program).
89 The frame pointer, arg pointer, etc. are considered stable
90 (within one function) and so is anything marked `unchanging'. */
92 int
93 rtx_unstable_p (rtx x)
95 RTX_CODE code = GET_CODE (x);
96 int i;
97 const char *fmt;
99 switch (code)
101 case MEM:
102 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
104 case CONST:
105 case CONST_INT:
106 case CONST_DOUBLE:
107 case CONST_VECTOR:
108 case SYMBOL_REF:
109 case LABEL_REF:
110 return 0;
112 case REG:
113 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
114 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
115 /* The arg pointer varies if it is not a fixed register. */
116 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
117 return 0;
118 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
119 /* ??? When call-clobbered, the value is stable modulo the restore
120 that must happen after a call. This currently screws up local-alloc
121 into believing that the restore is not needed. */
122 if (x == pic_offset_table_rtx)
123 return 0;
124 #endif
125 return 1;
127 case ASM_OPERANDS:
128 if (MEM_VOLATILE_P (x))
129 return 1;
131 /* Fall through. */
133 default:
134 break;
137 fmt = GET_RTX_FORMAT (code);
138 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
139 if (fmt[i] == 'e')
141 if (rtx_unstable_p (XEXP (x, i)))
142 return 1;
144 else if (fmt[i] == 'E')
146 int j;
147 for (j = 0; j < XVECLEN (x, i); j++)
148 if (rtx_unstable_p (XVECEXP (x, i, j)))
149 return 1;
152 return 0;
155 /* Return 1 if X has a value that can vary even between two
156 executions of the program. 0 means X can be compared reliably
157 against certain constants or near-constants.
158 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
159 zero, we are slightly more conservative.
160 The frame pointer and the arg pointer are considered constant. */
163 rtx_varies_p (rtx x, int for_alias)
165 RTX_CODE code;
166 int i;
167 const char *fmt;
169 if (!x)
170 return 0;
172 code = GET_CODE (x);
173 switch (code)
175 case MEM:
176 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
178 case CONST:
179 case CONST_INT:
180 case CONST_DOUBLE:
181 case CONST_VECTOR:
182 case SYMBOL_REF:
183 case LABEL_REF:
184 return 0;
186 case REG:
187 /* Note that we have to test for the actual rtx used for the frame
188 and arg pointers and not just the register number in case we have
189 eliminated the frame and/or arg pointer and are using it
190 for pseudos. */
191 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
192 /* The arg pointer varies if it is not a fixed register. */
193 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
194 return 0;
195 if (x == pic_offset_table_rtx
196 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
197 /* ??? When call-clobbered, the value is stable modulo the restore
198 that must happen after a call. This currently screws up
199 local-alloc into believing that the restore is not needed, so we
200 must return 0 only if we are called from alias analysis. */
201 && for_alias
202 #endif
204 return 0;
205 return 1;
207 case LO_SUM:
208 /* The operand 0 of a LO_SUM is considered constant
209 (in fact it is related specifically to operand 1)
210 during alias analysis. */
211 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
212 || rtx_varies_p (XEXP (x, 1), for_alias);
214 case ASM_OPERANDS:
215 if (MEM_VOLATILE_P (x))
216 return 1;
218 /* Fall through. */
220 default:
221 break;
224 fmt = GET_RTX_FORMAT (code);
225 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
226 if (fmt[i] == 'e')
228 if (rtx_varies_p (XEXP (x, i), for_alias))
229 return 1;
231 else if (fmt[i] == 'E')
233 int j;
234 for (j = 0; j < XVECLEN (x, i); j++)
235 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
236 return 1;
239 return 0;
242 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
243 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
244 whether nonzero is returned for unaligned memory accesses on strict
245 alignment machines. */
247 static int
248 rtx_addr_can_trap_p_1 (rtx x, enum machine_mode mode, bool unaligned_mems)
250 enum rtx_code code = GET_CODE (x);
252 switch (code)
254 case SYMBOL_REF:
255 return SYMBOL_REF_WEAK (x);
257 case LABEL_REF:
258 return 0;
260 case REG:
261 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
262 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
263 || x == stack_pointer_rtx
264 /* The arg pointer varies if it is not a fixed register. */
265 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
266 return 0;
267 /* All of the virtual frame registers are stack references. */
268 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
269 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
270 return 0;
271 return 1;
273 case CONST:
274 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
276 case PLUS:
277 /* An address is assumed not to trap if:
278 - it is an address that can't trap plus a constant integer,
279 with the proper remainder modulo the mode size if we are
280 considering unaligned memory references. */
281 if (!rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems)
282 && GET_CODE (XEXP (x, 1)) == CONST_INT)
284 HOST_WIDE_INT offset;
286 if (!STRICT_ALIGNMENT
287 || !unaligned_mems
288 || GET_MODE_SIZE (mode) == 0)
289 return 0;
291 offset = INTVAL (XEXP (x, 1));
293 #ifdef SPARC_STACK_BOUNDARY_HACK
294 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
295 the real alignment of %sp. However, when it does this, the
296 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
297 if (SPARC_STACK_BOUNDARY_HACK
298 && (XEXP (x, 0) == stack_pointer_rtx
299 || XEXP (x, 0) == hard_frame_pointer_rtx))
300 offset -= STACK_POINTER_OFFSET;
301 #endif
303 return offset % GET_MODE_SIZE (mode) != 0;
306 /* - or it is the pic register plus a constant. */
307 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
308 return 0;
310 return 1;
312 case LO_SUM:
313 case PRE_MODIFY:
314 return rtx_addr_can_trap_p_1 (XEXP (x, 1), mode, unaligned_mems);
316 case PRE_DEC:
317 case PRE_INC:
318 case POST_DEC:
319 case POST_INC:
320 case POST_MODIFY:
321 return rtx_addr_can_trap_p_1 (XEXP (x, 0), mode, unaligned_mems);
323 default:
324 break;
327 /* If it isn't one of the case above, it can cause a trap. */
328 return 1;
331 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
334 rtx_addr_can_trap_p (rtx x)
336 return rtx_addr_can_trap_p_1 (x, VOIDmode, false);
339 /* Return true if X is an address that is known to not be zero. */
341 bool
342 nonzero_address_p (rtx x)
344 enum rtx_code code = GET_CODE (x);
346 switch (code)
348 case SYMBOL_REF:
349 return !SYMBOL_REF_WEAK (x);
351 case LABEL_REF:
352 return true;
354 case REG:
355 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
356 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
357 || x == stack_pointer_rtx
358 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
359 return true;
360 /* All of the virtual frame registers are stack references. */
361 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
362 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
363 return true;
364 return false;
366 case CONST:
367 return nonzero_address_p (XEXP (x, 0));
369 case PLUS:
370 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
371 return nonzero_address_p (XEXP (x, 0));
372 /* Handle PIC references. */
373 else if (XEXP (x, 0) == pic_offset_table_rtx
374 && CONSTANT_P (XEXP (x, 1)))
375 return true;
376 return false;
378 case PRE_MODIFY:
379 /* Similar to the above; allow positive offsets. Further, since
380 auto-inc is only allowed in memories, the register must be a
381 pointer. */
382 if (GET_CODE (XEXP (x, 1)) == CONST_INT
383 && INTVAL (XEXP (x, 1)) > 0)
384 return true;
385 return nonzero_address_p (XEXP (x, 0));
387 case PRE_INC:
388 /* Similarly. Further, the offset is always positive. */
389 return true;
391 case PRE_DEC:
392 case POST_DEC:
393 case POST_INC:
394 case POST_MODIFY:
395 return nonzero_address_p (XEXP (x, 0));
397 case LO_SUM:
398 return nonzero_address_p (XEXP (x, 1));
400 default:
401 break;
404 /* If it isn't one of the case above, might be zero. */
405 return false;
408 /* Return 1 if X refers to a memory location whose address
409 cannot be compared reliably with constant addresses,
410 or if X refers to a BLKmode memory object.
411 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
412 zero, we are slightly more conservative. */
415 rtx_addr_varies_p (rtx x, int for_alias)
417 enum rtx_code code;
418 int i;
419 const char *fmt;
421 if (x == 0)
422 return 0;
424 code = GET_CODE (x);
425 if (code == MEM)
426 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
428 fmt = GET_RTX_FORMAT (code);
429 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
430 if (fmt[i] == 'e')
432 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
433 return 1;
435 else if (fmt[i] == 'E')
437 int j;
438 for (j = 0; j < XVECLEN (x, i); j++)
439 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
440 return 1;
442 return 0;
445 /* Return the value of the integer term in X, if one is apparent;
446 otherwise return 0.
447 Only obvious integer terms are detected.
448 This is used in cse.c with the `related_value' field. */
450 HOST_WIDE_INT
451 get_integer_term (rtx x)
453 if (GET_CODE (x) == CONST)
454 x = XEXP (x, 0);
456 if (GET_CODE (x) == MINUS
457 && GET_CODE (XEXP (x, 1)) == CONST_INT)
458 return - INTVAL (XEXP (x, 1));
459 if (GET_CODE (x) == PLUS
460 && GET_CODE (XEXP (x, 1)) == CONST_INT)
461 return INTVAL (XEXP (x, 1));
462 return 0;
465 /* If X is a constant, return the value sans apparent integer term;
466 otherwise return 0.
467 Only obvious integer terms are detected. */
470 get_related_value (rtx x)
472 if (GET_CODE (x) != CONST)
473 return 0;
474 x = XEXP (x, 0);
475 if (GET_CODE (x) == PLUS
476 && GET_CODE (XEXP (x, 1)) == CONST_INT)
477 return XEXP (x, 0);
478 else if (GET_CODE (x) == MINUS
479 && GET_CODE (XEXP (x, 1)) == CONST_INT)
480 return XEXP (x, 0);
481 return 0;
484 /* Return the number of places FIND appears within X. If COUNT_DEST is
485 zero, we do not count occurrences inside the destination of a SET. */
488 count_occurrences (rtx x, rtx find, int count_dest)
490 int i, j;
491 enum rtx_code code;
492 const char *format_ptr;
493 int count;
495 if (x == find)
496 return 1;
498 code = GET_CODE (x);
500 switch (code)
502 case REG:
503 case CONST_INT:
504 case CONST_DOUBLE:
505 case CONST_VECTOR:
506 case SYMBOL_REF:
507 case CODE_LABEL:
508 case PC:
509 case CC0:
510 return 0;
512 case MEM:
513 if (MEM_P (find) && rtx_equal_p (x, find))
514 return 1;
515 break;
517 case SET:
518 if (SET_DEST (x) == find && ! count_dest)
519 return count_occurrences (SET_SRC (x), find, count_dest);
520 break;
522 default:
523 break;
526 format_ptr = GET_RTX_FORMAT (code);
527 count = 0;
529 for (i = 0; i < GET_RTX_LENGTH (code); i++)
531 switch (*format_ptr++)
533 case 'e':
534 count += count_occurrences (XEXP (x, i), find, count_dest);
535 break;
537 case 'E':
538 for (j = 0; j < XVECLEN (x, i); j++)
539 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
540 break;
543 return count;
546 /* Nonzero if register REG appears somewhere within IN.
547 Also works if REG is not a register; in this case it checks
548 for a subexpression of IN that is Lisp "equal" to REG. */
551 reg_mentioned_p (rtx reg, rtx in)
553 const char *fmt;
554 int i;
555 enum rtx_code code;
557 if (in == 0)
558 return 0;
560 if (reg == in)
561 return 1;
563 if (GET_CODE (in) == LABEL_REF)
564 return reg == XEXP (in, 0);
566 code = GET_CODE (in);
568 switch (code)
570 /* Compare registers by number. */
571 case REG:
572 return REG_P (reg) && REGNO (in) == REGNO (reg);
574 /* These codes have no constituent expressions
575 and are unique. */
576 case SCRATCH:
577 case CC0:
578 case PC:
579 return 0;
581 case CONST_INT:
582 case CONST_VECTOR:
583 case CONST_DOUBLE:
584 /* These are kept unique for a given value. */
585 return 0;
587 default:
588 break;
591 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
592 return 1;
594 fmt = GET_RTX_FORMAT (code);
596 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
598 if (fmt[i] == 'E')
600 int j;
601 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
602 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
603 return 1;
605 else if (fmt[i] == 'e'
606 && reg_mentioned_p (reg, XEXP (in, i)))
607 return 1;
609 return 0;
612 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
613 no CODE_LABEL insn. */
616 no_labels_between_p (rtx beg, rtx end)
618 rtx p;
619 if (beg == end)
620 return 0;
621 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
622 if (LABEL_P (p))
623 return 0;
624 return 1;
627 /* Nonzero if register REG is used in an insn between
628 FROM_INSN and TO_INSN (exclusive of those two). */
631 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
633 rtx insn;
635 if (from_insn == to_insn)
636 return 0;
638 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
639 if (INSN_P (insn)
640 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
641 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
642 return 1;
643 return 0;
646 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
647 is entirely replaced by a new value and the only use is as a SET_DEST,
648 we do not consider it a reference. */
651 reg_referenced_p (rtx x, rtx body)
653 int i;
655 switch (GET_CODE (body))
657 case SET:
658 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
659 return 1;
661 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
662 of a REG that occupies all of the REG, the insn references X if
663 it is mentioned in the destination. */
664 if (GET_CODE (SET_DEST (body)) != CC0
665 && GET_CODE (SET_DEST (body)) != PC
666 && !REG_P (SET_DEST (body))
667 && ! (GET_CODE (SET_DEST (body)) == SUBREG
668 && REG_P (SUBREG_REG (SET_DEST (body)))
669 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
670 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
671 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
672 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
673 && reg_overlap_mentioned_p (x, SET_DEST (body)))
674 return 1;
675 return 0;
677 case ASM_OPERANDS:
678 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
679 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
680 return 1;
681 return 0;
683 case CALL:
684 case USE:
685 case IF_THEN_ELSE:
686 return reg_overlap_mentioned_p (x, body);
688 case TRAP_IF:
689 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
691 case PREFETCH:
692 return reg_overlap_mentioned_p (x, XEXP (body, 0));
694 case UNSPEC:
695 case UNSPEC_VOLATILE:
696 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
697 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
698 return 1;
699 return 0;
701 case PARALLEL:
702 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
703 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
704 return 1;
705 return 0;
707 case CLOBBER:
708 if (MEM_P (XEXP (body, 0)))
709 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
710 return 1;
711 return 0;
713 case COND_EXEC:
714 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
715 return 1;
716 return reg_referenced_p (x, COND_EXEC_CODE (body));
718 default:
719 return 0;
723 /* Nonzero if register REG is set or clobbered in an insn between
724 FROM_INSN and TO_INSN (exclusive of those two). */
727 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
729 rtx insn;
731 if (from_insn == to_insn)
732 return 0;
734 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
735 if (INSN_P (insn) && reg_set_p (reg, insn))
736 return 1;
737 return 0;
740 /* Internals of reg_set_between_p. */
742 reg_set_p (rtx reg, rtx insn)
744 /* We can be passed an insn or part of one. If we are passed an insn,
745 check if a side-effect of the insn clobbers REG. */
746 if (INSN_P (insn)
747 && (FIND_REG_INC_NOTE (insn, reg)
748 || (CALL_P (insn)
749 && ((REG_P (reg)
750 && REGNO (reg) < FIRST_PSEUDO_REGISTER
751 && TEST_HARD_REG_BIT (regs_invalidated_by_call,
752 REGNO (reg)))
753 || MEM_P (reg)
754 || find_reg_fusage (insn, CLOBBER, reg)))))
755 return 1;
757 return set_of (reg, insn) != NULL_RTX;
760 /* Similar to reg_set_between_p, but check all registers in X. Return 0
761 only if none of them are modified between START and END. Return 1 if
762 X contains a MEM; this routine does usememory aliasing. */
765 modified_between_p (rtx x, rtx start, rtx end)
767 enum rtx_code code = GET_CODE (x);
768 const char *fmt;
769 int i, j;
770 rtx insn;
772 if (start == end)
773 return 0;
775 switch (code)
777 case CONST_INT:
778 case CONST_DOUBLE:
779 case CONST_VECTOR:
780 case CONST:
781 case SYMBOL_REF:
782 case LABEL_REF:
783 return 0;
785 case PC:
786 case CC0:
787 return 1;
789 case MEM:
790 if (modified_between_p (XEXP (x, 0), start, end))
791 return 1;
792 if (MEM_READONLY_P (x))
793 return 0;
794 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
795 if (memory_modified_in_insn_p (x, insn))
796 return 1;
797 return 0;
798 break;
800 case REG:
801 return reg_set_between_p (x, start, end);
803 default:
804 break;
807 fmt = GET_RTX_FORMAT (code);
808 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
810 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
811 return 1;
813 else if (fmt[i] == 'E')
814 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
815 if (modified_between_p (XVECEXP (x, i, j), start, end))
816 return 1;
819 return 0;
822 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
823 of them are modified in INSN. Return 1 if X contains a MEM; this routine
824 does use memory aliasing. */
827 modified_in_p (rtx x, rtx insn)
829 enum rtx_code code = GET_CODE (x);
830 const char *fmt;
831 int i, j;
833 switch (code)
835 case CONST_INT:
836 case CONST_DOUBLE:
837 case CONST_VECTOR:
838 case CONST:
839 case SYMBOL_REF:
840 case LABEL_REF:
841 return 0;
843 case PC:
844 case CC0:
845 return 1;
847 case MEM:
848 if (modified_in_p (XEXP (x, 0), insn))
849 return 1;
850 if (MEM_READONLY_P (x))
851 return 0;
852 if (memory_modified_in_insn_p (x, insn))
853 return 1;
854 return 0;
855 break;
857 case REG:
858 return reg_set_p (x, insn);
860 default:
861 break;
864 fmt = GET_RTX_FORMAT (code);
865 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
867 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
868 return 1;
870 else if (fmt[i] == 'E')
871 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
872 if (modified_in_p (XVECEXP (x, i, j), insn))
873 return 1;
876 return 0;
879 /* Helper function for set_of. */
880 struct set_of_data
882 rtx found;
883 rtx pat;
886 static void
887 set_of_1 (rtx x, rtx pat, void *data1)
889 struct set_of_data *data = (struct set_of_data *) (data1);
890 if (rtx_equal_p (x, data->pat)
891 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
892 data->found = pat;
895 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
896 (either directly or via STRICT_LOW_PART and similar modifiers). */
898 set_of (rtx pat, rtx insn)
900 struct set_of_data data;
901 data.found = NULL_RTX;
902 data.pat = pat;
903 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
904 return data.found;
907 /* Given an INSN, return a SET expression if this insn has only a single SET.
908 It may also have CLOBBERs, USEs, or SET whose output
909 will not be used, which we ignore. */
912 single_set_2 (rtx insn, rtx pat)
914 rtx set = NULL;
915 int set_verified = 1;
916 int i;
918 if (GET_CODE (pat) == PARALLEL)
920 for (i = 0; i < XVECLEN (pat, 0); i++)
922 rtx sub = XVECEXP (pat, 0, i);
923 switch (GET_CODE (sub))
925 case USE:
926 case CLOBBER:
927 break;
929 case SET:
930 /* We can consider insns having multiple sets, where all
931 but one are dead as single set insns. In common case
932 only single set is present in the pattern so we want
933 to avoid checking for REG_UNUSED notes unless necessary.
935 When we reach set first time, we just expect this is
936 the single set we are looking for and only when more
937 sets are found in the insn, we check them. */
938 if (!set_verified)
940 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
941 && !side_effects_p (set))
942 set = NULL;
943 else
944 set_verified = 1;
946 if (!set)
947 set = sub, set_verified = 0;
948 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
949 || side_effects_p (sub))
950 return NULL_RTX;
951 break;
953 default:
954 return NULL_RTX;
958 return set;
961 /* Given an INSN, return nonzero if it has more than one SET, else return
962 zero. */
965 multiple_sets (rtx insn)
967 int found;
968 int i;
970 /* INSN must be an insn. */
971 if (! INSN_P (insn))
972 return 0;
974 /* Only a PARALLEL can have multiple SETs. */
975 if (GET_CODE (PATTERN (insn)) == PARALLEL)
977 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
978 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
980 /* If we have already found a SET, then return now. */
981 if (found)
982 return 1;
983 else
984 found = 1;
988 /* Either zero or one SET. */
989 return 0;
992 /* Return nonzero if the destination of SET equals the source
993 and there are no side effects. */
996 set_noop_p (rtx set)
998 rtx src = SET_SRC (set);
999 rtx dst = SET_DEST (set);
1001 if (dst == pc_rtx && src == pc_rtx)
1002 return 1;
1004 if (MEM_P (dst) && MEM_P (src))
1005 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1007 if (GET_CODE (dst) == ZERO_EXTRACT)
1008 return rtx_equal_p (XEXP (dst, 0), src)
1009 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1010 && !side_effects_p (src);
1012 if (GET_CODE (dst) == STRICT_LOW_PART)
1013 dst = XEXP (dst, 0);
1015 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1017 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1018 return 0;
1019 src = SUBREG_REG (src);
1020 dst = SUBREG_REG (dst);
1023 return (REG_P (src) && REG_P (dst)
1024 && REGNO (src) == REGNO (dst));
1027 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1028 value to itself. */
1031 noop_move_p (rtx insn)
1033 rtx pat = PATTERN (insn);
1035 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1036 return 1;
1038 /* Insns carrying these notes are useful later on. */
1039 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1040 return 0;
1042 /* For now treat an insn with a REG_RETVAL note as a
1043 a special insn which should not be considered a no-op. */
1044 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1045 return 0;
1047 if (GET_CODE (pat) == SET && set_noop_p (pat))
1048 return 1;
1050 if (GET_CODE (pat) == PARALLEL)
1052 int i;
1053 /* If nothing but SETs of registers to themselves,
1054 this insn can also be deleted. */
1055 for (i = 0; i < XVECLEN (pat, 0); i++)
1057 rtx tem = XVECEXP (pat, 0, i);
1059 if (GET_CODE (tem) == USE
1060 || GET_CODE (tem) == CLOBBER)
1061 continue;
1063 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1064 return 0;
1067 return 1;
1069 return 0;
1073 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1074 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1075 If the object was modified, if we hit a partial assignment to X, or hit a
1076 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1077 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1078 be the src. */
1081 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1083 rtx p;
1085 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1086 p = PREV_INSN (p))
1087 if (INSN_P (p))
1089 rtx set = single_set (p);
1090 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1092 if (set && rtx_equal_p (x, SET_DEST (set)))
1094 rtx src = SET_SRC (set);
1096 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1097 src = XEXP (note, 0);
1099 if ((valid_to == NULL_RTX
1100 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1101 /* Reject hard registers because we don't usually want
1102 to use them; we'd rather use a pseudo. */
1103 && (! (REG_P (src)
1104 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1106 *pinsn = p;
1107 return src;
1111 /* If set in non-simple way, we don't have a value. */
1112 if (reg_set_p (x, p))
1113 break;
1116 return x;
1119 /* Return nonzero if register in range [REGNO, ENDREGNO)
1120 appears either explicitly or implicitly in X
1121 other than being stored into.
1123 References contained within the substructure at LOC do not count.
1124 LOC may be zero, meaning don't ignore anything. */
1127 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1128 rtx *loc)
1130 int i;
1131 unsigned int x_regno;
1132 RTX_CODE code;
1133 const char *fmt;
1135 repeat:
1136 /* The contents of a REG_NONNEG note is always zero, so we must come here
1137 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1138 if (x == 0)
1139 return 0;
1141 code = GET_CODE (x);
1143 switch (code)
1145 case REG:
1146 x_regno = REGNO (x);
1148 /* If we modifying the stack, frame, or argument pointer, it will
1149 clobber a virtual register. In fact, we could be more precise,
1150 but it isn't worth it. */
1151 if ((x_regno == STACK_POINTER_REGNUM
1152 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1153 || x_regno == ARG_POINTER_REGNUM
1154 #endif
1155 || x_regno == FRAME_POINTER_REGNUM)
1156 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1157 return 1;
1159 return (endregno > x_regno
1160 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1161 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1162 : 1));
1164 case SUBREG:
1165 /* If this is a SUBREG of a hard reg, we can see exactly which
1166 registers are being modified. Otherwise, handle normally. */
1167 if (REG_P (SUBREG_REG (x))
1168 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1170 unsigned int inner_regno = subreg_regno (x);
1171 unsigned int inner_endregno
1172 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1173 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1175 return endregno > inner_regno && regno < inner_endregno;
1177 break;
1179 case CLOBBER:
1180 case SET:
1181 if (&SET_DEST (x) != loc
1182 /* Note setting a SUBREG counts as referring to the REG it is in for
1183 a pseudo but not for hard registers since we can
1184 treat each word individually. */
1185 && ((GET_CODE (SET_DEST (x)) == SUBREG
1186 && loc != &SUBREG_REG (SET_DEST (x))
1187 && REG_P (SUBREG_REG (SET_DEST (x)))
1188 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1189 && refers_to_regno_p (regno, endregno,
1190 SUBREG_REG (SET_DEST (x)), loc))
1191 || (!REG_P (SET_DEST (x))
1192 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1193 return 1;
1195 if (code == CLOBBER || loc == &SET_SRC (x))
1196 return 0;
1197 x = SET_SRC (x);
1198 goto repeat;
1200 default:
1201 break;
1204 /* X does not match, so try its subexpressions. */
1206 fmt = GET_RTX_FORMAT (code);
1207 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1209 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1211 if (i == 0)
1213 x = XEXP (x, 0);
1214 goto repeat;
1216 else
1217 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1218 return 1;
1220 else if (fmt[i] == 'E')
1222 int j;
1223 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1224 if (loc != &XVECEXP (x, i, j)
1225 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1226 return 1;
1229 return 0;
1232 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1233 we check if any register number in X conflicts with the relevant register
1234 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1235 contains a MEM (we don't bother checking for memory addresses that can't
1236 conflict because we expect this to be a rare case. */
1239 reg_overlap_mentioned_p (rtx x, rtx in)
1241 unsigned int regno, endregno;
1243 /* If either argument is a constant, then modifying X can not
1244 affect IN. Here we look at IN, we can profitably combine
1245 CONSTANT_P (x) with the switch statement below. */
1246 if (CONSTANT_P (in))
1247 return 0;
1249 recurse:
1250 switch (GET_CODE (x))
1252 case STRICT_LOW_PART:
1253 case ZERO_EXTRACT:
1254 case SIGN_EXTRACT:
1255 /* Overly conservative. */
1256 x = XEXP (x, 0);
1257 goto recurse;
1259 case SUBREG:
1260 regno = REGNO (SUBREG_REG (x));
1261 if (regno < FIRST_PSEUDO_REGISTER)
1262 regno = subreg_regno (x);
1263 goto do_reg;
1265 case REG:
1266 regno = REGNO (x);
1267 do_reg:
1268 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1269 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1270 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1272 case MEM:
1274 const char *fmt;
1275 int i;
1277 if (MEM_P (in))
1278 return 1;
1280 fmt = GET_RTX_FORMAT (GET_CODE (in));
1281 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1282 if (fmt[i] == 'e')
1284 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1285 return 1;
1287 else if (fmt[i] == 'E')
1289 int j;
1290 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1291 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1292 return 1;
1295 return 0;
1298 case SCRATCH:
1299 case PC:
1300 case CC0:
1301 return reg_mentioned_p (x, in);
1303 case PARALLEL:
1305 int i;
1307 /* If any register in here refers to it we return true. */
1308 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1309 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1310 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1311 return 1;
1312 return 0;
1315 default:
1316 gcc_assert (CONSTANT_P (x));
1317 return 0;
1321 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1322 (X would be the pattern of an insn).
1323 FUN receives two arguments:
1324 the REG, MEM, CC0 or PC being stored in or clobbered,
1325 the SET or CLOBBER rtx that does the store.
1327 If the item being stored in or clobbered is a SUBREG of a hard register,
1328 the SUBREG will be passed. */
1330 void
1331 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1333 int i;
1335 if (GET_CODE (x) == COND_EXEC)
1336 x = COND_EXEC_CODE (x);
1338 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1340 rtx dest = SET_DEST (x);
1342 while ((GET_CODE (dest) == SUBREG
1343 && (!REG_P (SUBREG_REG (dest))
1344 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1345 || GET_CODE (dest) == ZERO_EXTRACT
1346 || GET_CODE (dest) == STRICT_LOW_PART)
1347 dest = XEXP (dest, 0);
1349 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1350 each of whose first operand is a register. */
1351 if (GET_CODE (dest) == PARALLEL)
1353 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1354 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1355 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1357 else
1358 (*fun) (dest, x, data);
1361 else if (GET_CODE (x) == PARALLEL)
1362 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1363 note_stores (XVECEXP (x, 0, i), fun, data);
1366 /* Like notes_stores, but call FUN for each expression that is being
1367 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1368 FUN for each expression, not any interior subexpressions. FUN receives a
1369 pointer to the expression and the DATA passed to this function.
1371 Note that this is not quite the same test as that done in reg_referenced_p
1372 since that considers something as being referenced if it is being
1373 partially set, while we do not. */
1375 void
1376 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1378 rtx body = *pbody;
1379 int i;
1381 switch (GET_CODE (body))
1383 case COND_EXEC:
1384 (*fun) (&COND_EXEC_TEST (body), data);
1385 note_uses (&COND_EXEC_CODE (body), fun, data);
1386 return;
1388 case PARALLEL:
1389 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1390 note_uses (&XVECEXP (body, 0, i), fun, data);
1391 return;
1393 case USE:
1394 (*fun) (&XEXP (body, 0), data);
1395 return;
1397 case ASM_OPERANDS:
1398 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1399 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1400 return;
1402 case TRAP_IF:
1403 (*fun) (&TRAP_CONDITION (body), data);
1404 return;
1406 case PREFETCH:
1407 (*fun) (&XEXP (body, 0), data);
1408 return;
1410 case UNSPEC:
1411 case UNSPEC_VOLATILE:
1412 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1413 (*fun) (&XVECEXP (body, 0, i), data);
1414 return;
1416 case CLOBBER:
1417 if (MEM_P (XEXP (body, 0)))
1418 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1419 return;
1421 case SET:
1423 rtx dest = SET_DEST (body);
1425 /* For sets we replace everything in source plus registers in memory
1426 expression in store and operands of a ZERO_EXTRACT. */
1427 (*fun) (&SET_SRC (body), data);
1429 if (GET_CODE (dest) == ZERO_EXTRACT)
1431 (*fun) (&XEXP (dest, 1), data);
1432 (*fun) (&XEXP (dest, 2), data);
1435 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1436 dest = XEXP (dest, 0);
1438 if (MEM_P (dest))
1439 (*fun) (&XEXP (dest, 0), data);
1441 return;
1443 default:
1444 /* All the other possibilities never store. */
1445 (*fun) (pbody, data);
1446 return;
1450 /* Return nonzero if X's old contents don't survive after INSN.
1451 This will be true if X is (cc0) or if X is a register and
1452 X dies in INSN or because INSN entirely sets X.
1454 "Entirely set" means set directly and not through a SUBREG, or
1455 ZERO_EXTRACT, so no trace of the old contents remains.
1456 Likewise, REG_INC does not count.
1458 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1459 but for this use that makes no difference, since regs don't overlap
1460 during their lifetimes. Therefore, this function may be used
1461 at any time after deaths have been computed (in flow.c).
1463 If REG is a hard reg that occupies multiple machine registers, this
1464 function will only return 1 if each of those registers will be replaced
1465 by INSN. */
1468 dead_or_set_p (rtx insn, rtx x)
1470 unsigned int regno, last_regno;
1471 unsigned int i;
1473 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1474 if (GET_CODE (x) == CC0)
1475 return 1;
1477 gcc_assert (REG_P (x));
1479 regno = REGNO (x);
1480 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1481 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1483 for (i = regno; i <= last_regno; i++)
1484 if (! dead_or_set_regno_p (insn, i))
1485 return 0;
1487 return 1;
1490 /* Return TRUE iff DEST is a register or subreg of a register and
1491 doesn't change the number of words of the inner register, and any
1492 part of the register is TEST_REGNO. */
1494 static bool
1495 covers_regno_no_parallel_p (rtx dest, unsigned int test_regno)
1497 unsigned int regno, endregno;
1499 if (GET_CODE (dest) == SUBREG
1500 && (((GET_MODE_SIZE (GET_MODE (dest))
1501 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1502 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1503 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1504 dest = SUBREG_REG (dest);
1506 if (!REG_P (dest))
1507 return false;
1509 regno = REGNO (dest);
1510 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1511 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1512 return (test_regno >= regno && test_regno < endregno);
1515 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1516 any member matches the covers_regno_no_parallel_p criteria. */
1518 static bool
1519 covers_regno_p (rtx dest, unsigned int test_regno)
1521 if (GET_CODE (dest) == PARALLEL)
1523 /* Some targets place small structures in registers for return
1524 values of functions, and those registers are wrapped in
1525 PARALLELs that we may see as the destination of a SET. */
1526 int i;
1528 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1530 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1531 if (inner != NULL_RTX
1532 && covers_regno_no_parallel_p (inner, test_regno))
1533 return true;
1536 return false;
1538 else
1539 return covers_regno_no_parallel_p (dest, test_regno);
1542 /* Utility function for dead_or_set_p to check an individual register. Also
1543 called from flow.c. */
1546 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1548 rtx pattern;
1550 /* See if there is a death note for something that includes TEST_REGNO. */
1551 if (find_regno_note (insn, REG_DEAD, test_regno))
1552 return 1;
1554 if (CALL_P (insn)
1555 && find_regno_fusage (insn, CLOBBER, test_regno))
1556 return 1;
1558 pattern = PATTERN (insn);
1560 if (GET_CODE (pattern) == COND_EXEC)
1561 pattern = COND_EXEC_CODE (pattern);
1563 if (GET_CODE (pattern) == SET)
1564 return covers_regno_p (SET_DEST (pattern), test_regno);
1565 else if (GET_CODE (pattern) == PARALLEL)
1567 int i;
1569 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1571 rtx body = XVECEXP (pattern, 0, i);
1573 if (GET_CODE (body) == COND_EXEC)
1574 body = COND_EXEC_CODE (body);
1576 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1577 && covers_regno_p (SET_DEST (body), test_regno))
1578 return 1;
1582 return 0;
1585 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1586 If DATUM is nonzero, look for one whose datum is DATUM. */
1589 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1591 rtx link;
1593 gcc_assert (insn);
1595 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1596 if (! INSN_P (insn))
1597 return 0;
1598 if (datum == 0)
1600 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1601 if (REG_NOTE_KIND (link) == kind)
1602 return link;
1603 return 0;
1606 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1607 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1608 return link;
1609 return 0;
1612 /* Return the reg-note of kind KIND in insn INSN which applies to register
1613 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1614 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1615 it might be the case that the note overlaps REGNO. */
1618 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1620 rtx link;
1622 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1623 if (! INSN_P (insn))
1624 return 0;
1626 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1627 if (REG_NOTE_KIND (link) == kind
1628 /* Verify that it is a register, so that scratch and MEM won't cause a
1629 problem here. */
1630 && REG_P (XEXP (link, 0))
1631 && REGNO (XEXP (link, 0)) <= regno
1632 && ((REGNO (XEXP (link, 0))
1633 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1634 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1635 [GET_MODE (XEXP (link, 0))]))
1636 > regno))
1637 return link;
1638 return 0;
1641 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1642 has such a note. */
1645 find_reg_equal_equiv_note (rtx insn)
1647 rtx link;
1649 if (!INSN_P (insn))
1650 return 0;
1651 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1652 if (REG_NOTE_KIND (link) == REG_EQUAL
1653 || REG_NOTE_KIND (link) == REG_EQUIV)
1655 if (single_set (insn) == 0)
1656 return 0;
1657 return link;
1659 return NULL;
1662 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1663 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1666 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1668 /* If it's not a CALL_INSN, it can't possibly have a
1669 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1670 if (!CALL_P (insn))
1671 return 0;
1673 gcc_assert (datum);
1675 if (!REG_P (datum))
1677 rtx link;
1679 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1680 link;
1681 link = XEXP (link, 1))
1682 if (GET_CODE (XEXP (link, 0)) == code
1683 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1684 return 1;
1686 else
1688 unsigned int regno = REGNO (datum);
1690 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1691 to pseudo registers, so don't bother checking. */
1693 if (regno < FIRST_PSEUDO_REGISTER)
1695 unsigned int end_regno
1696 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
1697 unsigned int i;
1699 for (i = regno; i < end_regno; i++)
1700 if (find_regno_fusage (insn, code, i))
1701 return 1;
1705 return 0;
1708 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1709 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1712 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
1714 rtx link;
1716 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1717 to pseudo registers, so don't bother checking. */
1719 if (regno >= FIRST_PSEUDO_REGISTER
1720 || !CALL_P (insn) )
1721 return 0;
1723 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1725 unsigned int regnote;
1726 rtx op, reg;
1728 if (GET_CODE (op = XEXP (link, 0)) == code
1729 && REG_P (reg = XEXP (op, 0))
1730 && (regnote = REGNO (reg)) <= regno
1731 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
1732 return 1;
1735 return 0;
1738 /* Return true if INSN is a call to a pure function. */
1741 pure_call_p (rtx insn)
1743 rtx link;
1745 if (!CALL_P (insn) || ! CONST_OR_PURE_CALL_P (insn))
1746 return 0;
1748 /* Look for the note that differentiates const and pure functions. */
1749 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1751 rtx u, m;
1753 if (GET_CODE (u = XEXP (link, 0)) == USE
1754 && MEM_P (m = XEXP (u, 0)) && GET_MODE (m) == BLKmode
1755 && GET_CODE (XEXP (m, 0)) == SCRATCH)
1756 return 1;
1759 return 0;
1762 /* Remove register note NOTE from the REG_NOTES of INSN. */
1764 void
1765 remove_note (rtx insn, rtx note)
1767 rtx link;
1769 if (note == NULL_RTX)
1770 return;
1772 if (REG_NOTES (insn) == note)
1774 REG_NOTES (insn) = XEXP (note, 1);
1775 return;
1778 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1779 if (XEXP (link, 1) == note)
1781 XEXP (link, 1) = XEXP (note, 1);
1782 return;
1785 gcc_unreachable ();
1788 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1789 return 1 if it is found. A simple equality test is used to determine if
1790 NODE matches. */
1793 in_expr_list_p (rtx listp, rtx node)
1795 rtx x;
1797 for (x = listp; x; x = XEXP (x, 1))
1798 if (node == XEXP (x, 0))
1799 return 1;
1801 return 0;
1804 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1805 remove that entry from the list if it is found.
1807 A simple equality test is used to determine if NODE matches. */
1809 void
1810 remove_node_from_expr_list (rtx node, rtx *listp)
1812 rtx temp = *listp;
1813 rtx prev = NULL_RTX;
1815 while (temp)
1817 if (node == XEXP (temp, 0))
1819 /* Splice the node out of the list. */
1820 if (prev)
1821 XEXP (prev, 1) = XEXP (temp, 1);
1822 else
1823 *listp = XEXP (temp, 1);
1825 return;
1828 prev = temp;
1829 temp = XEXP (temp, 1);
1833 /* Nonzero if X contains any volatile instructions. These are instructions
1834 which may cause unpredictable machine state instructions, and thus no
1835 instructions should be moved or combined across them. This includes
1836 only volatile asms and UNSPEC_VOLATILE instructions. */
1839 volatile_insn_p (rtx x)
1841 RTX_CODE code;
1843 code = GET_CODE (x);
1844 switch (code)
1846 case LABEL_REF:
1847 case SYMBOL_REF:
1848 case CONST_INT:
1849 case CONST:
1850 case CONST_DOUBLE:
1851 case CONST_VECTOR:
1852 case CC0:
1853 case PC:
1854 case REG:
1855 case SCRATCH:
1856 case CLOBBER:
1857 case ADDR_VEC:
1858 case ADDR_DIFF_VEC:
1859 case CALL:
1860 case MEM:
1861 return 0;
1863 case UNSPEC_VOLATILE:
1864 /* case TRAP_IF: This isn't clear yet. */
1865 return 1;
1867 case ASM_INPUT:
1868 case ASM_OPERANDS:
1869 if (MEM_VOLATILE_P (x))
1870 return 1;
1872 default:
1873 break;
1876 /* Recursively scan the operands of this expression. */
1879 const char *fmt = GET_RTX_FORMAT (code);
1880 int i;
1882 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1884 if (fmt[i] == 'e')
1886 if (volatile_insn_p (XEXP (x, i)))
1887 return 1;
1889 else if (fmt[i] == 'E')
1891 int j;
1892 for (j = 0; j < XVECLEN (x, i); j++)
1893 if (volatile_insn_p (XVECEXP (x, i, j)))
1894 return 1;
1898 return 0;
1901 /* Nonzero if X contains any volatile memory references
1902 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
1905 volatile_refs_p (rtx x)
1907 RTX_CODE code;
1909 code = GET_CODE (x);
1910 switch (code)
1912 case LABEL_REF:
1913 case SYMBOL_REF:
1914 case CONST_INT:
1915 case CONST:
1916 case CONST_DOUBLE:
1917 case CONST_VECTOR:
1918 case CC0:
1919 case PC:
1920 case REG:
1921 case SCRATCH:
1922 case CLOBBER:
1923 case ADDR_VEC:
1924 case ADDR_DIFF_VEC:
1925 return 0;
1927 case UNSPEC_VOLATILE:
1928 return 1;
1930 case MEM:
1931 case ASM_INPUT:
1932 case ASM_OPERANDS:
1933 if (MEM_VOLATILE_P (x))
1934 return 1;
1936 default:
1937 break;
1940 /* Recursively scan the operands of this expression. */
1943 const char *fmt = GET_RTX_FORMAT (code);
1944 int i;
1946 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1948 if (fmt[i] == 'e')
1950 if (volatile_refs_p (XEXP (x, i)))
1951 return 1;
1953 else if (fmt[i] == 'E')
1955 int j;
1956 for (j = 0; j < XVECLEN (x, i); j++)
1957 if (volatile_refs_p (XVECEXP (x, i, j)))
1958 return 1;
1962 return 0;
1965 /* Similar to above, except that it also rejects register pre- and post-
1966 incrementing. */
1969 side_effects_p (rtx x)
1971 RTX_CODE code;
1973 code = GET_CODE (x);
1974 switch (code)
1976 case LABEL_REF:
1977 case SYMBOL_REF:
1978 case CONST_INT:
1979 case CONST:
1980 case CONST_DOUBLE:
1981 case CONST_VECTOR:
1982 case CC0:
1983 case PC:
1984 case REG:
1985 case SCRATCH:
1986 case ADDR_VEC:
1987 case ADDR_DIFF_VEC:
1988 return 0;
1990 case CLOBBER:
1991 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
1992 when some combination can't be done. If we see one, don't think
1993 that we can simplify the expression. */
1994 return (GET_MODE (x) != VOIDmode);
1996 case PRE_INC:
1997 case PRE_DEC:
1998 case POST_INC:
1999 case POST_DEC:
2000 case PRE_MODIFY:
2001 case POST_MODIFY:
2002 case CALL:
2003 case UNSPEC_VOLATILE:
2004 /* case TRAP_IF: This isn't clear yet. */
2005 return 1;
2007 case MEM:
2008 case ASM_INPUT:
2009 case ASM_OPERANDS:
2010 if (MEM_VOLATILE_P (x))
2011 return 1;
2013 default:
2014 break;
2017 /* Recursively scan the operands of this expression. */
2020 const char *fmt = GET_RTX_FORMAT (code);
2021 int i;
2023 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2025 if (fmt[i] == 'e')
2027 if (side_effects_p (XEXP (x, i)))
2028 return 1;
2030 else if (fmt[i] == 'E')
2032 int j;
2033 for (j = 0; j < XVECLEN (x, i); j++)
2034 if (side_effects_p (XVECEXP (x, i, j)))
2035 return 1;
2039 return 0;
2042 enum may_trap_p_flags
2044 MTP_UNALIGNED_MEMS = 1,
2045 MTP_AFTER_MOVE = 2
2047 /* Return nonzero if evaluating rtx X might cause a trap.
2048 (FLAGS & MTP_UNALIGNED_MEMS) controls whether nonzero is returned for
2049 unaligned memory accesses on strict alignment machines. If
2050 (FLAGS & AFTER_MOVE) is true, returns nonzero even in case the expression
2051 cannot trap at its current location, but it might become trapping if moved
2052 elsewhere. */
2054 static int
2055 may_trap_p_1 (rtx x, unsigned flags)
2057 int i;
2058 enum rtx_code code;
2059 const char *fmt;
2060 bool unaligned_mems = (flags & MTP_UNALIGNED_MEMS) != 0;
2062 if (x == 0)
2063 return 0;
2064 code = GET_CODE (x);
2065 switch (code)
2067 /* Handle these cases quickly. */
2068 case CONST_INT:
2069 case CONST_DOUBLE:
2070 case CONST_VECTOR:
2071 case SYMBOL_REF:
2072 case LABEL_REF:
2073 case CONST:
2074 case PC:
2075 case CC0:
2076 case REG:
2077 case SCRATCH:
2078 return 0;
2080 case ASM_INPUT:
2081 case UNSPEC_VOLATILE:
2082 case TRAP_IF:
2083 return 1;
2085 case ASM_OPERANDS:
2086 return MEM_VOLATILE_P (x);
2088 /* Memory ref can trap unless it's a static var or a stack slot. */
2089 case MEM:
2090 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2091 reference; moving it out of condition might cause its address
2092 become invalid. */
2093 !(flags & MTP_AFTER_MOVE)
2094 && MEM_NOTRAP_P (x)
2095 && (!STRICT_ALIGNMENT || !unaligned_mems))
2096 return 0;
2097 return
2098 rtx_addr_can_trap_p_1 (XEXP (x, 0), GET_MODE (x), unaligned_mems);
2100 /* Division by a non-constant might trap. */
2101 case DIV:
2102 case MOD:
2103 case UDIV:
2104 case UMOD:
2105 if (HONOR_SNANS (GET_MODE (x)))
2106 return 1;
2107 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2108 return flag_trapping_math;
2109 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2110 return 1;
2111 break;
2113 case EXPR_LIST:
2114 /* An EXPR_LIST is used to represent a function call. This
2115 certainly may trap. */
2116 return 1;
2118 case GE:
2119 case GT:
2120 case LE:
2121 case LT:
2122 case LTGT:
2123 case COMPARE:
2124 /* Some floating point comparisons may trap. */
2125 if (!flag_trapping_math)
2126 break;
2127 /* ??? There is no machine independent way to check for tests that trap
2128 when COMPARE is used, though many targets do make this distinction.
2129 For instance, sparc uses CCFPE for compares which generate exceptions
2130 and CCFP for compares which do not generate exceptions. */
2131 if (HONOR_NANS (GET_MODE (x)))
2132 return 1;
2133 /* But often the compare has some CC mode, so check operand
2134 modes as well. */
2135 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2136 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2137 return 1;
2138 break;
2140 case EQ:
2141 case NE:
2142 if (HONOR_SNANS (GET_MODE (x)))
2143 return 1;
2144 /* Often comparison is CC mode, so check operand modes. */
2145 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2146 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2147 return 1;
2148 break;
2150 case FIX:
2151 /* Conversion of floating point might trap. */
2152 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2153 return 1;
2154 break;
2156 case NEG:
2157 case ABS:
2158 case SUBREG:
2159 /* These operations don't trap even with floating point. */
2160 break;
2162 default:
2163 /* Any floating arithmetic may trap. */
2164 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2165 && flag_trapping_math)
2166 return 1;
2169 fmt = GET_RTX_FORMAT (code);
2170 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2172 if (fmt[i] == 'e')
2174 if (may_trap_p_1 (XEXP (x, i), flags))
2175 return 1;
2177 else if (fmt[i] == 'E')
2179 int j;
2180 for (j = 0; j < XVECLEN (x, i); j++)
2181 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2182 return 1;
2185 return 0;
2188 /* Return nonzero if evaluating rtx X might cause a trap. */
2191 may_trap_p (rtx x)
2193 return may_trap_p_1 (x, 0);
2196 /* Return nonzero if evaluating rtx X might cause a trap, when the expression
2197 is moved from its current location by some optimization. */
2200 may_trap_after_code_motion_p (rtx x)
2202 return may_trap_p_1 (x, MTP_AFTER_MOVE);
2205 /* Same as above, but additionally return nonzero if evaluating rtx X might
2206 cause a fault. We define a fault for the purpose of this function as a
2207 erroneous execution condition that cannot be encountered during the normal
2208 execution of a valid program; the typical example is an unaligned memory
2209 access on a strict alignment machine. The compiler guarantees that it
2210 doesn't generate code that will fault from a valid program, but this
2211 guarantee doesn't mean anything for individual instructions. Consider
2212 the following example:
2214 struct S { int d; union { char *cp; int *ip; }; };
2216 int foo(struct S *s)
2218 if (s->d == 1)
2219 return *s->ip;
2220 else
2221 return *s->cp;
2224 on a strict alignment machine. In a valid program, foo will never be
2225 invoked on a structure for which d is equal to 1 and the underlying
2226 unique field of the union not aligned on a 4-byte boundary, but the
2227 expression *s->ip might cause a fault if considered individually.
2229 At the RTL level, potentially problematic expressions will almost always
2230 verify may_trap_p; for example, the above dereference can be emitted as
2231 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2232 However, suppose that foo is inlined in a caller that causes s->cp to
2233 point to a local character variable and guarantees that s->d is not set
2234 to 1; foo may have been effectively translated into pseudo-RTL as:
2236 if ((reg:SI) == 1)
2237 (set (reg:SI) (mem:SI (%fp - 7)))
2238 else
2239 (set (reg:QI) (mem:QI (%fp - 7)))
2241 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2242 memory reference to a stack slot, but it will certainly cause a fault
2243 on a strict alignment machine. */
2246 may_trap_or_fault_p (rtx x)
2248 return may_trap_p_1 (x, MTP_UNALIGNED_MEMS);
2251 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2252 i.e., an inequality. */
2255 inequality_comparisons_p (rtx x)
2257 const char *fmt;
2258 int len, i;
2259 enum rtx_code code = GET_CODE (x);
2261 switch (code)
2263 case REG:
2264 case SCRATCH:
2265 case PC:
2266 case CC0:
2267 case CONST_INT:
2268 case CONST_DOUBLE:
2269 case CONST_VECTOR:
2270 case CONST:
2271 case LABEL_REF:
2272 case SYMBOL_REF:
2273 return 0;
2275 case LT:
2276 case LTU:
2277 case GT:
2278 case GTU:
2279 case LE:
2280 case LEU:
2281 case GE:
2282 case GEU:
2283 return 1;
2285 default:
2286 break;
2289 len = GET_RTX_LENGTH (code);
2290 fmt = GET_RTX_FORMAT (code);
2292 for (i = 0; i < len; i++)
2294 if (fmt[i] == 'e')
2296 if (inequality_comparisons_p (XEXP (x, i)))
2297 return 1;
2299 else if (fmt[i] == 'E')
2301 int j;
2302 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2303 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2304 return 1;
2308 return 0;
2311 /* Replace any occurrence of FROM in X with TO. The function does
2312 not enter into CONST_DOUBLE for the replace.
2314 Note that copying is not done so X must not be shared unless all copies
2315 are to be modified. */
2318 replace_rtx (rtx x, rtx from, rtx to)
2320 int i, j;
2321 const char *fmt;
2323 /* The following prevents loops occurrence when we change MEM in
2324 CONST_DOUBLE onto the same CONST_DOUBLE. */
2325 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2326 return x;
2328 if (x == from)
2329 return to;
2331 /* Allow this function to make replacements in EXPR_LISTs. */
2332 if (x == 0)
2333 return 0;
2335 if (GET_CODE (x) == SUBREG)
2337 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2339 if (GET_CODE (new) == CONST_INT)
2341 x = simplify_subreg (GET_MODE (x), new,
2342 GET_MODE (SUBREG_REG (x)),
2343 SUBREG_BYTE (x));
2344 gcc_assert (x);
2346 else
2347 SUBREG_REG (x) = new;
2349 return x;
2351 else if (GET_CODE (x) == ZERO_EXTEND)
2353 rtx new = replace_rtx (XEXP (x, 0), from, to);
2355 if (GET_CODE (new) == CONST_INT)
2357 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2358 new, GET_MODE (XEXP (x, 0)));
2359 gcc_assert (x);
2361 else
2362 XEXP (x, 0) = new;
2364 return x;
2367 fmt = GET_RTX_FORMAT (GET_CODE (x));
2368 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2370 if (fmt[i] == 'e')
2371 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2372 else if (fmt[i] == 'E')
2373 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2374 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2377 return x;
2380 /* Replace occurrences of the old label in *X with the new one.
2381 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2384 replace_label (rtx *x, void *data)
2386 rtx l = *x;
2387 rtx old_label = ((replace_label_data *) data)->r1;
2388 rtx new_label = ((replace_label_data *) data)->r2;
2389 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2391 if (l == NULL_RTX)
2392 return 0;
2394 if (GET_CODE (l) == SYMBOL_REF
2395 && CONSTANT_POOL_ADDRESS_P (l))
2397 rtx c = get_pool_constant (l);
2398 if (rtx_referenced_p (old_label, c))
2400 rtx new_c, new_l;
2401 replace_label_data *d = (replace_label_data *) data;
2403 /* Create a copy of constant C; replace the label inside
2404 but do not update LABEL_NUSES because uses in constant pool
2405 are not counted. */
2406 new_c = copy_rtx (c);
2407 d->update_label_nuses = false;
2408 for_each_rtx (&new_c, replace_label, data);
2409 d->update_label_nuses = update_label_nuses;
2411 /* Add the new constant NEW_C to constant pool and replace
2412 the old reference to constant by new reference. */
2413 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2414 *x = replace_rtx (l, l, new_l);
2416 return 0;
2419 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2420 field. This is not handled by for_each_rtx because it doesn't
2421 handle unprinted ('0') fields. */
2422 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2423 JUMP_LABEL (l) = new_label;
2425 if ((GET_CODE (l) == LABEL_REF
2426 || GET_CODE (l) == INSN_LIST)
2427 && XEXP (l, 0) == old_label)
2429 XEXP (l, 0) = new_label;
2430 if (update_label_nuses)
2432 ++LABEL_NUSES (new_label);
2433 --LABEL_NUSES (old_label);
2435 return 0;
2438 return 0;
2441 /* When *BODY is equal to X or X is directly referenced by *BODY
2442 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2443 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2445 static int
2446 rtx_referenced_p_1 (rtx *body, void *x)
2448 rtx y = (rtx) x;
2450 if (*body == NULL_RTX)
2451 return y == NULL_RTX;
2453 /* Return true if a label_ref *BODY refers to label Y. */
2454 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2455 return XEXP (*body, 0) == y;
2457 /* If *BODY is a reference to pool constant traverse the constant. */
2458 if (GET_CODE (*body) == SYMBOL_REF
2459 && CONSTANT_POOL_ADDRESS_P (*body))
2460 return rtx_referenced_p (y, get_pool_constant (*body));
2462 /* By default, compare the RTL expressions. */
2463 return rtx_equal_p (*body, y);
2466 /* Return true if X is referenced in BODY. */
2469 rtx_referenced_p (rtx x, rtx body)
2471 return for_each_rtx (&body, rtx_referenced_p_1, x);
2474 /* If INSN is a tablejump return true and store the label (before jump table) to
2475 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2477 bool
2478 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2480 rtx label, table;
2482 if (JUMP_P (insn)
2483 && (label = JUMP_LABEL (insn)) != NULL_RTX
2484 && (table = next_active_insn (label)) != NULL_RTX
2485 && JUMP_P (table)
2486 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2487 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2489 if (labelp)
2490 *labelp = label;
2491 if (tablep)
2492 *tablep = table;
2493 return true;
2495 return false;
2498 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2499 constant that is not in the constant pool and not in the condition
2500 of an IF_THEN_ELSE. */
2502 static int
2503 computed_jump_p_1 (rtx x)
2505 enum rtx_code code = GET_CODE (x);
2506 int i, j;
2507 const char *fmt;
2509 switch (code)
2511 case LABEL_REF:
2512 case PC:
2513 return 0;
2515 case CONST:
2516 case CONST_INT:
2517 case CONST_DOUBLE:
2518 case CONST_VECTOR:
2519 case SYMBOL_REF:
2520 case REG:
2521 return 1;
2523 case MEM:
2524 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2525 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2527 case IF_THEN_ELSE:
2528 return (computed_jump_p_1 (XEXP (x, 1))
2529 || computed_jump_p_1 (XEXP (x, 2)));
2531 default:
2532 break;
2535 fmt = GET_RTX_FORMAT (code);
2536 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2538 if (fmt[i] == 'e'
2539 && computed_jump_p_1 (XEXP (x, i)))
2540 return 1;
2542 else if (fmt[i] == 'E')
2543 for (j = 0; j < XVECLEN (x, i); j++)
2544 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2545 return 1;
2548 return 0;
2551 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2553 Tablejumps and casesi insns are not considered indirect jumps;
2554 we can recognize them by a (use (label_ref)). */
2557 computed_jump_p (rtx insn)
2559 int i;
2560 if (JUMP_P (insn))
2562 rtx pat = PATTERN (insn);
2564 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2565 return 0;
2566 else if (GET_CODE (pat) == PARALLEL)
2568 int len = XVECLEN (pat, 0);
2569 int has_use_labelref = 0;
2571 for (i = len - 1; i >= 0; i--)
2572 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2573 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2574 == LABEL_REF))
2575 has_use_labelref = 1;
2577 if (! has_use_labelref)
2578 for (i = len - 1; i >= 0; i--)
2579 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2580 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2581 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2582 return 1;
2584 else if (GET_CODE (pat) == SET
2585 && SET_DEST (pat) == pc_rtx
2586 && computed_jump_p_1 (SET_SRC (pat)))
2587 return 1;
2589 return 0;
2592 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2593 calls. Processes the subexpressions of EXP and passes them to F. */
2594 static int
2595 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2597 int result, i, j;
2598 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2599 rtx *x;
2601 for (; format[n] != '\0'; n++)
2603 switch (format[n])
2605 case 'e':
2606 /* Call F on X. */
2607 x = &XEXP (exp, n);
2608 result = (*f) (x, data);
2609 if (result == -1)
2610 /* Do not traverse sub-expressions. */
2611 continue;
2612 else if (result != 0)
2613 /* Stop the traversal. */
2614 return result;
2616 if (*x == NULL_RTX)
2617 /* There are no sub-expressions. */
2618 continue;
2620 i = non_rtx_starting_operands[GET_CODE (*x)];
2621 if (i >= 0)
2623 result = for_each_rtx_1 (*x, i, f, data);
2624 if (result != 0)
2625 return result;
2627 break;
2629 case 'V':
2630 case 'E':
2631 if (XVEC (exp, n) == 0)
2632 continue;
2633 for (j = 0; j < XVECLEN (exp, n); ++j)
2635 /* Call F on X. */
2636 x = &XVECEXP (exp, n, j);
2637 result = (*f) (x, data);
2638 if (result == -1)
2639 /* Do not traverse sub-expressions. */
2640 continue;
2641 else if (result != 0)
2642 /* Stop the traversal. */
2643 return result;
2645 if (*x == NULL_RTX)
2646 /* There are no sub-expressions. */
2647 continue;
2649 i = non_rtx_starting_operands[GET_CODE (*x)];
2650 if (i >= 0)
2652 result = for_each_rtx_1 (*x, i, f, data);
2653 if (result != 0)
2654 return result;
2657 break;
2659 default:
2660 /* Nothing to do. */
2661 break;
2665 return 0;
2668 /* Traverse X via depth-first search, calling F for each
2669 sub-expression (including X itself). F is also passed the DATA.
2670 If F returns -1, do not traverse sub-expressions, but continue
2671 traversing the rest of the tree. If F ever returns any other
2672 nonzero value, stop the traversal, and return the value returned
2673 by F. Otherwise, return 0. This function does not traverse inside
2674 tree structure that contains RTX_EXPRs, or into sub-expressions
2675 whose format code is `0' since it is not known whether or not those
2676 codes are actually RTL.
2678 This routine is very general, and could (should?) be used to
2679 implement many of the other routines in this file. */
2682 for_each_rtx (rtx *x, rtx_function f, void *data)
2684 int result;
2685 int i;
2687 /* Call F on X. */
2688 result = (*f) (x, data);
2689 if (result == -1)
2690 /* Do not traverse sub-expressions. */
2691 return 0;
2692 else if (result != 0)
2693 /* Stop the traversal. */
2694 return result;
2696 if (*x == NULL_RTX)
2697 /* There are no sub-expressions. */
2698 return 0;
2700 i = non_rtx_starting_operands[GET_CODE (*x)];
2701 if (i < 0)
2702 return 0;
2704 return for_each_rtx_1 (*x, i, f, data);
2708 /* Searches X for any reference to REGNO, returning the rtx of the
2709 reference found if any. Otherwise, returns NULL_RTX. */
2712 regno_use_in (unsigned int regno, rtx x)
2714 const char *fmt;
2715 int i, j;
2716 rtx tem;
2718 if (REG_P (x) && REGNO (x) == regno)
2719 return x;
2721 fmt = GET_RTX_FORMAT (GET_CODE (x));
2722 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2724 if (fmt[i] == 'e')
2726 if ((tem = regno_use_in (regno, XEXP (x, i))))
2727 return tem;
2729 else if (fmt[i] == 'E')
2730 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2731 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2732 return tem;
2735 return NULL_RTX;
2738 /* Return a value indicating whether OP, an operand of a commutative
2739 operation, is preferred as the first or second operand. The higher
2740 the value, the stronger the preference for being the first operand.
2741 We use negative values to indicate a preference for the first operand
2742 and positive values for the second operand. */
2745 commutative_operand_precedence (rtx op)
2747 enum rtx_code code = GET_CODE (op);
2749 /* Constants always come the second operand. Prefer "nice" constants. */
2750 if (code == CONST_INT)
2751 return -7;
2752 if (code == CONST_DOUBLE)
2753 return -6;
2754 op = avoid_constant_pool_reference (op);
2755 code = GET_CODE (op);
2757 switch (GET_RTX_CLASS (code))
2759 case RTX_CONST_OBJ:
2760 if (code == CONST_INT)
2761 return -5;
2762 if (code == CONST_DOUBLE)
2763 return -4;
2764 return -3;
2766 case RTX_EXTRA:
2767 /* SUBREGs of objects should come second. */
2768 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
2769 return -2;
2771 if (!CONSTANT_P (op))
2772 return 0;
2773 else
2774 /* As for RTX_CONST_OBJ. */
2775 return -3;
2777 case RTX_OBJ:
2778 /* Complex expressions should be the first, so decrease priority
2779 of objects. */
2780 return -1;
2782 case RTX_COMM_ARITH:
2783 /* Prefer operands that are themselves commutative to be first.
2784 This helps to make things linear. In particular,
2785 (and (and (reg) (reg)) (not (reg))) is canonical. */
2786 return 4;
2788 case RTX_BIN_ARITH:
2789 /* If only one operand is a binary expression, it will be the first
2790 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
2791 is canonical, although it will usually be further simplified. */
2792 return 2;
2794 case RTX_UNARY:
2795 /* Then prefer NEG and NOT. */
2796 if (code == NEG || code == NOT)
2797 return 1;
2799 default:
2800 return 0;
2804 /* Return 1 iff it is necessary to swap operands of commutative operation
2805 in order to canonicalize expression. */
2808 swap_commutative_operands_p (rtx x, rtx y)
2810 return (commutative_operand_precedence (x)
2811 < commutative_operand_precedence (y));
2814 /* Return 1 if X is an autoincrement side effect and the register is
2815 not the stack pointer. */
2817 auto_inc_p (rtx x)
2819 switch (GET_CODE (x))
2821 case PRE_INC:
2822 case POST_INC:
2823 case PRE_DEC:
2824 case POST_DEC:
2825 case PRE_MODIFY:
2826 case POST_MODIFY:
2827 /* There are no REG_INC notes for SP. */
2828 if (XEXP (x, 0) != stack_pointer_rtx)
2829 return 1;
2830 default:
2831 break;
2833 return 0;
2836 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
2838 loc_mentioned_in_p (rtx *loc, rtx in)
2840 enum rtx_code code;
2841 const char *fmt;
2842 int i, j;
2844 if (!in)
2845 return 0;
2847 code = GET_CODE (in);
2848 fmt = GET_RTX_FORMAT (code);
2849 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2851 if (loc == &in->u.fld[i].rt_rtx)
2852 return 1;
2853 if (fmt[i] == 'e')
2855 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2856 return 1;
2858 else if (fmt[i] == 'E')
2859 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2860 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2861 return 1;
2863 return 0;
2866 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
2867 and SUBREG_BYTE, return the bit offset where the subreg begins
2868 (counting from the least significant bit of the operand). */
2870 unsigned int
2871 subreg_lsb_1 (enum machine_mode outer_mode,
2872 enum machine_mode inner_mode,
2873 unsigned int subreg_byte)
2875 unsigned int bitpos;
2876 unsigned int byte;
2877 unsigned int word;
2879 /* A paradoxical subreg begins at bit position 0. */
2880 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
2881 return 0;
2883 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2884 /* If the subreg crosses a word boundary ensure that
2885 it also begins and ends on a word boundary. */
2886 gcc_assert (!((subreg_byte % UNITS_PER_WORD
2887 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
2888 && (subreg_byte % UNITS_PER_WORD
2889 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
2891 if (WORDS_BIG_ENDIAN)
2892 word = (GET_MODE_SIZE (inner_mode)
2893 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
2894 else
2895 word = subreg_byte / UNITS_PER_WORD;
2896 bitpos = word * BITS_PER_WORD;
2898 if (BYTES_BIG_ENDIAN)
2899 byte = (GET_MODE_SIZE (inner_mode)
2900 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
2901 else
2902 byte = subreg_byte % UNITS_PER_WORD;
2903 bitpos += byte * BITS_PER_UNIT;
2905 return bitpos;
2908 /* Given a subreg X, return the bit offset where the subreg begins
2909 (counting from the least significant bit of the reg). */
2911 unsigned int
2912 subreg_lsb (rtx x)
2914 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
2915 SUBREG_BYTE (x));
2918 /* This function returns the regno offset of a subreg expression.
2919 xregno - A regno of an inner hard subreg_reg (or what will become one).
2920 xmode - The mode of xregno.
2921 offset - The byte offset.
2922 ymode - The mode of a top level SUBREG (or what may become one).
2923 RETURN - The regno offset which would be used. */
2924 unsigned int
2925 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
2926 unsigned int offset, enum machine_mode ymode)
2928 int nregs_xmode, nregs_ymode, nregs_xmode_unit_int;
2929 int mode_multiple, nregs_multiple;
2930 int y_offset;
2931 enum machine_mode xmode_unit, xmode_unit_int;
2933 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
2935 if (GET_MODE_INNER (xmode) == VOIDmode)
2936 xmode_unit = xmode;
2937 else
2938 xmode_unit = GET_MODE_INNER (xmode);
2940 if (FLOAT_MODE_P (xmode_unit))
2942 xmode_unit_int = int_mode_for_mode (xmode_unit);
2943 if (xmode_unit_int == BLKmode)
2944 /* It's probably bad to be here; a port should have an integer mode
2945 that's the same size as anything of which it takes a SUBREG. */
2946 xmode_unit_int = xmode_unit;
2948 else
2949 xmode_unit_int = xmode_unit;
2951 nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
2953 /* Adjust nregs_xmode to allow for 'holes'. */
2954 if (nregs_xmode_unit_int != hard_regno_nregs[xregno][xmode_unit])
2955 nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
2956 else
2957 nregs_xmode = hard_regno_nregs[xregno][xmode];
2959 nregs_ymode = hard_regno_nregs[xregno][ymode];
2961 /* If this is a big endian paradoxical subreg, which uses more actual
2962 hard registers than the original register, we must return a negative
2963 offset so that we find the proper highpart of the register. */
2964 if (offset == 0
2965 && nregs_ymode > nregs_xmode
2966 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
2967 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
2968 return nregs_xmode - nregs_ymode;
2970 if (offset == 0 || nregs_xmode == nregs_ymode)
2971 return 0;
2973 /* Size of ymode must not be greater than the size of xmode. */
2974 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
2975 gcc_assert (mode_multiple != 0);
2977 y_offset = offset / GET_MODE_SIZE (ymode);
2978 nregs_multiple = nregs_xmode / nregs_ymode;
2979 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
2982 /* This function returns true when the offset is representable via
2983 subreg_offset in the given regno.
2984 xregno - A regno of an inner hard subreg_reg (or what will become one).
2985 xmode - The mode of xregno.
2986 offset - The byte offset.
2987 ymode - The mode of a top level SUBREG (or what may become one).
2988 RETURN - Whether the offset is representable. */
2989 bool
2990 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
2991 unsigned int offset, enum machine_mode ymode)
2993 int nregs_xmode, nregs_ymode, nregs_xmode_unit, nregs_xmode_unit_int;
2994 int mode_multiple, nregs_multiple;
2995 int y_offset;
2996 enum machine_mode xmode_unit, xmode_unit_int;
2998 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3000 if (GET_MODE_INNER (xmode) == VOIDmode)
3001 xmode_unit = xmode;
3002 else
3003 xmode_unit = GET_MODE_INNER (xmode);
3005 if (FLOAT_MODE_P (xmode_unit))
3007 xmode_unit_int = int_mode_for_mode (xmode_unit);
3008 if (xmode_unit_int == BLKmode)
3009 /* It's probably bad to be here; a port should have an integer mode
3010 that's the same size as anything of which it takes a SUBREG. */
3011 xmode_unit_int = xmode_unit;
3013 else
3014 xmode_unit_int = xmode_unit;
3016 nregs_xmode_unit = hard_regno_nregs[xregno][xmode_unit];
3017 nregs_xmode_unit_int = hard_regno_nregs[xregno][xmode_unit_int];
3019 /* If there are holes in a non-scalar mode in registers, we expect
3020 that it is made up of its units concatenated together. */
3021 if (nregs_xmode_unit != nregs_xmode_unit_int)
3023 gcc_assert (nregs_xmode_unit * GET_MODE_NUNITS (xmode)
3024 == hard_regno_nregs[xregno][xmode]);
3026 /* You can only ask for a SUBREG of a value with holes in the middle
3027 if you don't cross the holes. (Such a SUBREG should be done by
3028 picking a different register class, or doing it in memory if
3029 necessary.) An example of a value with holes is XCmode on 32-bit
3030 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3031 3 for each part, but in memory it's two 128-bit parts.
3032 Padding is assumed to be at the end (not necessarily the 'high part')
3033 of each unit. */
3034 if (nregs_xmode_unit != nregs_xmode_unit_int
3035 && (offset / GET_MODE_SIZE (xmode_unit_int) + 1
3036 < GET_MODE_NUNITS (xmode))
3037 && (offset / GET_MODE_SIZE (xmode_unit_int)
3038 != ((offset + GET_MODE_SIZE (ymode) - 1)
3039 / GET_MODE_SIZE (xmode_unit_int))))
3040 return false;
3042 nregs_xmode = nregs_xmode_unit_int * GET_MODE_NUNITS (xmode);
3044 else
3045 nregs_xmode = hard_regno_nregs[xregno][xmode];
3047 nregs_ymode = hard_regno_nregs[xregno][ymode];
3049 /* Paradoxical subregs are otherwise valid. */
3050 if (offset == 0
3051 && nregs_ymode > nregs_xmode
3052 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3053 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3054 return true;
3056 /* Lowpart subregs are otherwise valid. */
3057 if (offset == subreg_lowpart_offset (ymode, xmode))
3058 return true;
3060 /* This should always pass, otherwise we don't know how to verify
3061 the constraint. These conditions may be relaxed but
3062 subreg_regno_offset would need to be redesigned. */
3063 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3064 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3066 /* The XMODE value can be seen as a vector of NREGS_XMODE
3067 values. The subreg must represent a lowpart of given field.
3068 Compute what field it is. */
3069 offset -= subreg_lowpart_offset (ymode,
3070 mode_for_size (GET_MODE_BITSIZE (xmode)
3071 / nregs_xmode,
3072 MODE_INT, 0));
3074 /* Size of ymode must not be greater than the size of xmode. */
3075 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3076 gcc_assert (mode_multiple != 0);
3078 y_offset = offset / GET_MODE_SIZE (ymode);
3079 nregs_multiple = nregs_xmode / nregs_ymode;
3081 gcc_assert ((offset % GET_MODE_SIZE (ymode)) == 0);
3082 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3084 return (!(y_offset % (mode_multiple / nregs_multiple)));
3087 /* Return the final regno that a subreg expression refers to. */
3088 unsigned int
3089 subreg_regno (rtx x)
3091 unsigned int ret;
3092 rtx subreg = SUBREG_REG (x);
3093 int regno = REGNO (subreg);
3095 ret = regno + subreg_regno_offset (regno,
3096 GET_MODE (subreg),
3097 SUBREG_BYTE (x),
3098 GET_MODE (x));
3099 return ret;
3102 struct parms_set_data
3104 int nregs;
3105 HARD_REG_SET regs;
3108 /* Helper function for noticing stores to parameter registers. */
3109 static void
3110 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3112 struct parms_set_data *d = data;
3113 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3114 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3116 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3117 d->nregs--;
3121 /* Look backward for first parameter to be loaded.
3122 Note that loads of all parameters will not necessarily be
3123 found if CSE has eliminated some of them (e.g., an argument
3124 to the outer function is passed down as a parameter).
3125 Do not skip BOUNDARY. */
3127 find_first_parameter_load (rtx call_insn, rtx boundary)
3129 struct parms_set_data parm;
3130 rtx p, before, first_set;
3132 /* Since different machines initialize their parameter registers
3133 in different orders, assume nothing. Collect the set of all
3134 parameter registers. */
3135 CLEAR_HARD_REG_SET (parm.regs);
3136 parm.nregs = 0;
3137 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3138 if (GET_CODE (XEXP (p, 0)) == USE
3139 && REG_P (XEXP (XEXP (p, 0), 0)))
3141 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3143 /* We only care about registers which can hold function
3144 arguments. */
3145 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3146 continue;
3148 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3149 parm.nregs++;
3151 before = call_insn;
3152 first_set = call_insn;
3154 /* Search backward for the first set of a register in this set. */
3155 while (parm.nregs && before != boundary)
3157 before = PREV_INSN (before);
3159 /* It is possible that some loads got CSEed from one call to
3160 another. Stop in that case. */
3161 if (CALL_P (before))
3162 break;
3164 /* Our caller needs either ensure that we will find all sets
3165 (in case code has not been optimized yet), or take care
3166 for possible labels in a way by setting boundary to preceding
3167 CODE_LABEL. */
3168 if (LABEL_P (before))
3170 gcc_assert (before == boundary);
3171 break;
3174 if (INSN_P (before))
3176 int nregs_old = parm.nregs;
3177 note_stores (PATTERN (before), parms_set, &parm);
3178 /* If we found something that did not set a parameter reg,
3179 we're done. Do not keep going, as that might result
3180 in hoisting an insn before the setting of a pseudo
3181 that is used by the hoisted insn. */
3182 if (nregs_old != parm.nregs)
3183 first_set = before;
3184 else
3185 break;
3188 return first_set;
3191 /* Return true if we should avoid inserting code between INSN and preceding
3192 call instruction. */
3194 bool
3195 keep_with_call_p (rtx insn)
3197 rtx set;
3199 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3201 if (REG_P (SET_DEST (set))
3202 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3203 && fixed_regs[REGNO (SET_DEST (set))]
3204 && general_operand (SET_SRC (set), VOIDmode))
3205 return true;
3206 if (REG_P (SET_SRC (set))
3207 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3208 && REG_P (SET_DEST (set))
3209 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3210 return true;
3211 /* There may be a stack pop just after the call and before the store
3212 of the return register. Search for the actual store when deciding
3213 if we can break or not. */
3214 if (SET_DEST (set) == stack_pointer_rtx)
3216 rtx i2 = next_nonnote_insn (insn);
3217 if (i2 && keep_with_call_p (i2))
3218 return true;
3221 return false;
3224 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3225 to non-complex jumps. That is, direct unconditional, conditional,
3226 and tablejumps, but not computed jumps or returns. It also does
3227 not apply to the fallthru case of a conditional jump. */
3229 bool
3230 label_is_jump_target_p (rtx label, rtx jump_insn)
3232 rtx tmp = JUMP_LABEL (jump_insn);
3234 if (label == tmp)
3235 return true;
3237 if (tablejump_p (jump_insn, NULL, &tmp))
3239 rtvec vec = XVEC (PATTERN (tmp),
3240 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3241 int i, veclen = GET_NUM_ELEM (vec);
3243 for (i = 0; i < veclen; ++i)
3244 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3245 return true;
3248 return false;
3252 /* Return an estimate of the cost of computing rtx X.
3253 One use is in cse, to decide which expression to keep in the hash table.
3254 Another is in rtl generation, to pick the cheapest way to multiply.
3255 Other uses like the latter are expected in the future. */
3258 rtx_cost (rtx x, enum rtx_code outer_code ATTRIBUTE_UNUSED)
3260 int i, j;
3261 enum rtx_code code;
3262 const char *fmt;
3263 int total;
3265 if (x == 0)
3266 return 0;
3268 /* Compute the default costs of certain things.
3269 Note that targetm.rtx_costs can override the defaults. */
3271 code = GET_CODE (x);
3272 switch (code)
3274 case MULT:
3275 total = COSTS_N_INSNS (5);
3276 break;
3277 case DIV:
3278 case UDIV:
3279 case MOD:
3280 case UMOD:
3281 total = COSTS_N_INSNS (7);
3282 break;
3283 case USE:
3284 /* Used in combine.c as a marker. */
3285 total = 0;
3286 break;
3287 default:
3288 total = COSTS_N_INSNS (1);
3291 switch (code)
3293 case REG:
3294 return 0;
3296 case SUBREG:
3297 total = 0;
3298 /* If we can't tie these modes, make this expensive. The larger
3299 the mode, the more expensive it is. */
3300 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3301 return COSTS_N_INSNS (2
3302 + GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD);
3303 break;
3305 default:
3306 if (targetm.rtx_costs (x, code, outer_code, &total))
3307 return total;
3308 break;
3311 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3312 which is already in total. */
3314 fmt = GET_RTX_FORMAT (code);
3315 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3316 if (fmt[i] == 'e')
3317 total += rtx_cost (XEXP (x, i), code);
3318 else if (fmt[i] == 'E')
3319 for (j = 0; j < XVECLEN (x, i); j++)
3320 total += rtx_cost (XVECEXP (x, i, j), code);
3322 return total;
3325 /* Return cost of address expression X.
3326 Expect that X is properly formed address reference. */
3329 address_cost (rtx x, enum machine_mode mode)
3331 /* We may be asked for cost of various unusual addresses, such as operands
3332 of push instruction. It is not worthwhile to complicate writing
3333 of the target hook by such cases. */
3335 if (!memory_address_p (mode, x))
3336 return 1000;
3338 return targetm.address_cost (x);
3341 /* If the target doesn't override, compute the cost as with arithmetic. */
3344 default_address_cost (rtx x)
3346 return rtx_cost (x, MEM);
3350 unsigned HOST_WIDE_INT
3351 nonzero_bits (rtx x, enum machine_mode mode)
3353 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3356 unsigned int
3357 num_sign_bit_copies (rtx x, enum machine_mode mode)
3359 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3362 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3363 It avoids exponential behavior in nonzero_bits1 when X has
3364 identical subexpressions on the first or the second level. */
3366 static unsigned HOST_WIDE_INT
3367 cached_nonzero_bits (rtx x, enum machine_mode mode, rtx known_x,
3368 enum machine_mode known_mode,
3369 unsigned HOST_WIDE_INT known_ret)
3371 if (x == known_x && mode == known_mode)
3372 return known_ret;
3374 /* Try to find identical subexpressions. If found call
3375 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3376 precomputed value for the subexpression as KNOWN_RET. */
3378 if (ARITHMETIC_P (x))
3380 rtx x0 = XEXP (x, 0);
3381 rtx x1 = XEXP (x, 1);
3383 /* Check the first level. */
3384 if (x0 == x1)
3385 return nonzero_bits1 (x, mode, x0, mode,
3386 cached_nonzero_bits (x0, mode, known_x,
3387 known_mode, known_ret));
3389 /* Check the second level. */
3390 if (ARITHMETIC_P (x0)
3391 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3392 return nonzero_bits1 (x, mode, x1, mode,
3393 cached_nonzero_bits (x1, mode, known_x,
3394 known_mode, known_ret));
3396 if (ARITHMETIC_P (x1)
3397 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3398 return nonzero_bits1 (x, mode, x0, mode,
3399 cached_nonzero_bits (x0, mode, known_x,
3400 known_mode, known_ret));
3403 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3406 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3407 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3408 is less useful. We can't allow both, because that results in exponential
3409 run time recursion. There is a nullstone testcase that triggered
3410 this. This macro avoids accidental uses of num_sign_bit_copies. */
3411 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3413 /* Given an expression, X, compute which bits in X can be nonzero.
3414 We don't care about bits outside of those defined in MODE.
3416 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3417 an arithmetic operation, we can do better. */
3419 static unsigned HOST_WIDE_INT
3420 nonzero_bits1 (rtx x, enum machine_mode mode, rtx known_x,
3421 enum machine_mode known_mode,
3422 unsigned HOST_WIDE_INT known_ret)
3424 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3425 unsigned HOST_WIDE_INT inner_nz;
3426 enum rtx_code code;
3427 unsigned int mode_width = GET_MODE_BITSIZE (mode);
3429 /* For floating-point values, assume all bits are needed. */
3430 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode))
3431 return nonzero;
3433 /* If X is wider than MODE, use its mode instead. */
3434 if (GET_MODE_BITSIZE (GET_MODE (x)) > mode_width)
3436 mode = GET_MODE (x);
3437 nonzero = GET_MODE_MASK (mode);
3438 mode_width = GET_MODE_BITSIZE (mode);
3441 if (mode_width > HOST_BITS_PER_WIDE_INT)
3442 /* Our only callers in this case look for single bit values. So
3443 just return the mode mask. Those tests will then be false. */
3444 return nonzero;
3446 #ifndef WORD_REGISTER_OPERATIONS
3447 /* If MODE is wider than X, but both are a single word for both the host
3448 and target machines, we can compute this from which bits of the
3449 object might be nonzero in its own mode, taking into account the fact
3450 that on many CISC machines, accessing an object in a wider mode
3451 causes the high-order bits to become undefined. So they are
3452 not known to be zero. */
3454 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3455 && GET_MODE_BITSIZE (GET_MODE (x)) <= BITS_PER_WORD
3456 && GET_MODE_BITSIZE (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3457 && GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (GET_MODE (x)))
3459 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3460 known_x, known_mode, known_ret);
3461 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3462 return nonzero;
3464 #endif
3466 code = GET_CODE (x);
3467 switch (code)
3469 case REG:
3470 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3471 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3472 all the bits above ptr_mode are known to be zero. */
3473 if (POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3474 && REG_POINTER (x))
3475 nonzero &= GET_MODE_MASK (ptr_mode);
3476 #endif
3478 /* Include declared information about alignment of pointers. */
3479 /* ??? We don't properly preserve REG_POINTER changes across
3480 pointer-to-integer casts, so we can't trust it except for
3481 things that we know must be pointers. See execute/960116-1.c. */
3482 if ((x == stack_pointer_rtx
3483 || x == frame_pointer_rtx
3484 || x == arg_pointer_rtx)
3485 && REGNO_POINTER_ALIGN (REGNO (x)))
3487 unsigned HOST_WIDE_INT alignment
3488 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
3490 #ifdef PUSH_ROUNDING
3491 /* If PUSH_ROUNDING is defined, it is possible for the
3492 stack to be momentarily aligned only to that amount,
3493 so we pick the least alignment. */
3494 if (x == stack_pointer_rtx && PUSH_ARGS)
3495 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
3496 alignment);
3497 #endif
3499 nonzero &= ~(alignment - 1);
3503 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
3504 rtx new = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
3505 known_mode, known_ret,
3506 &nonzero_for_hook);
3508 if (new)
3509 nonzero_for_hook &= cached_nonzero_bits (new, mode, known_x,
3510 known_mode, known_ret);
3512 return nonzero_for_hook;
3515 case CONST_INT:
3516 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
3517 /* If X is negative in MODE, sign-extend the value. */
3518 if (INTVAL (x) > 0 && mode_width < BITS_PER_WORD
3519 && 0 != (INTVAL (x) & ((HOST_WIDE_INT) 1 << (mode_width - 1))))
3520 return (INTVAL (x) | ((HOST_WIDE_INT) (-1) << mode_width));
3521 #endif
3523 return INTVAL (x);
3525 case MEM:
3526 #ifdef LOAD_EXTEND_OP
3527 /* In many, if not most, RISC machines, reading a byte from memory
3528 zeros the rest of the register. Noticing that fact saves a lot
3529 of extra zero-extends. */
3530 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
3531 nonzero &= GET_MODE_MASK (GET_MODE (x));
3532 #endif
3533 break;
3535 case EQ: case NE:
3536 case UNEQ: case LTGT:
3537 case GT: case GTU: case UNGT:
3538 case LT: case LTU: case UNLT:
3539 case GE: case GEU: case UNGE:
3540 case LE: case LEU: case UNLE:
3541 case UNORDERED: case ORDERED:
3542 /* If this produces an integer result, we know which bits are set.
3543 Code here used to clear bits outside the mode of X, but that is
3544 now done above. */
3545 /* Mind that MODE is the mode the caller wants to look at this
3546 operation in, and not the actual operation mode. We can wind
3547 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
3548 that describes the results of a vector compare. */
3549 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
3550 && mode_width <= HOST_BITS_PER_WIDE_INT)
3551 nonzero = STORE_FLAG_VALUE;
3552 break;
3554 case NEG:
3555 #if 0
3556 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3557 and num_sign_bit_copies. */
3558 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3559 == GET_MODE_BITSIZE (GET_MODE (x)))
3560 nonzero = 1;
3561 #endif
3563 if (GET_MODE_SIZE (GET_MODE (x)) < mode_width)
3564 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
3565 break;
3567 case ABS:
3568 #if 0
3569 /* Disabled to avoid exponential mutual recursion between nonzero_bits
3570 and num_sign_bit_copies. */
3571 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
3572 == GET_MODE_BITSIZE (GET_MODE (x)))
3573 nonzero = 1;
3574 #endif
3575 break;
3577 case TRUNCATE:
3578 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
3579 known_x, known_mode, known_ret)
3580 & GET_MODE_MASK (mode));
3581 break;
3583 case ZERO_EXTEND:
3584 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3585 known_x, known_mode, known_ret);
3586 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3587 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3588 break;
3590 case SIGN_EXTEND:
3591 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
3592 Otherwise, show all the bits in the outer mode but not the inner
3593 may be nonzero. */
3594 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
3595 known_x, known_mode, known_ret);
3596 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
3598 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
3599 if (inner_nz
3600 & (((HOST_WIDE_INT) 1
3601 << (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0))) - 1))))
3602 inner_nz |= (GET_MODE_MASK (mode)
3603 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
3606 nonzero &= inner_nz;
3607 break;
3609 case AND:
3610 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
3611 known_x, known_mode, known_ret)
3612 & cached_nonzero_bits (XEXP (x, 1), mode,
3613 known_x, known_mode, known_ret);
3614 break;
3616 case XOR: case IOR:
3617 case UMIN: case UMAX: case SMIN: case SMAX:
3619 unsigned HOST_WIDE_INT nonzero0 =
3620 cached_nonzero_bits (XEXP (x, 0), mode,
3621 known_x, known_mode, known_ret);
3623 /* Don't call nonzero_bits for the second time if it cannot change
3624 anything. */
3625 if ((nonzero & nonzero0) != nonzero)
3626 nonzero &= nonzero0
3627 | cached_nonzero_bits (XEXP (x, 1), mode,
3628 known_x, known_mode, known_ret);
3630 break;
3632 case PLUS: case MINUS:
3633 case MULT:
3634 case DIV: case UDIV:
3635 case MOD: case UMOD:
3636 /* We can apply the rules of arithmetic to compute the number of
3637 high- and low-order zero bits of these operations. We start by
3638 computing the width (position of the highest-order nonzero bit)
3639 and the number of low-order zero bits for each value. */
3641 unsigned HOST_WIDE_INT nz0 =
3642 cached_nonzero_bits (XEXP (x, 0), mode,
3643 known_x, known_mode, known_ret);
3644 unsigned HOST_WIDE_INT nz1 =
3645 cached_nonzero_bits (XEXP (x, 1), mode,
3646 known_x, known_mode, known_ret);
3647 int sign_index = GET_MODE_BITSIZE (GET_MODE (x)) - 1;
3648 int width0 = floor_log2 (nz0) + 1;
3649 int width1 = floor_log2 (nz1) + 1;
3650 int low0 = floor_log2 (nz0 & -nz0);
3651 int low1 = floor_log2 (nz1 & -nz1);
3652 HOST_WIDE_INT op0_maybe_minusp
3653 = (nz0 & ((HOST_WIDE_INT) 1 << sign_index));
3654 HOST_WIDE_INT op1_maybe_minusp
3655 = (nz1 & ((HOST_WIDE_INT) 1 << sign_index));
3656 unsigned int result_width = mode_width;
3657 int result_low = 0;
3659 switch (code)
3661 case PLUS:
3662 result_width = MAX (width0, width1) + 1;
3663 result_low = MIN (low0, low1);
3664 break;
3665 case MINUS:
3666 result_low = MIN (low0, low1);
3667 break;
3668 case MULT:
3669 result_width = width0 + width1;
3670 result_low = low0 + low1;
3671 break;
3672 case DIV:
3673 if (width1 == 0)
3674 break;
3675 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3676 result_width = width0;
3677 break;
3678 case UDIV:
3679 if (width1 == 0)
3680 break;
3681 result_width = width0;
3682 break;
3683 case MOD:
3684 if (width1 == 0)
3685 break;
3686 if (! op0_maybe_minusp && ! op1_maybe_minusp)
3687 result_width = MIN (width0, width1);
3688 result_low = MIN (low0, low1);
3689 break;
3690 case UMOD:
3691 if (width1 == 0)
3692 break;
3693 result_width = MIN (width0, width1);
3694 result_low = MIN (low0, low1);
3695 break;
3696 default:
3697 gcc_unreachable ();
3700 if (result_width < mode_width)
3701 nonzero &= ((HOST_WIDE_INT) 1 << result_width) - 1;
3703 if (result_low > 0)
3704 nonzero &= ~(((HOST_WIDE_INT) 1 << result_low) - 1);
3706 #ifdef POINTERS_EXTEND_UNSIGNED
3707 /* If pointers extend unsigned and this is an addition or subtraction
3708 to a pointer in Pmode, all the bits above ptr_mode are known to be
3709 zero. */
3710 if (POINTERS_EXTEND_UNSIGNED > 0 && GET_MODE (x) == Pmode
3711 && (code == PLUS || code == MINUS)
3712 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
3713 nonzero &= GET_MODE_MASK (ptr_mode);
3714 #endif
3716 break;
3718 case ZERO_EXTRACT:
3719 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3720 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3721 nonzero &= ((HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
3722 break;
3724 case SUBREG:
3725 /* If this is a SUBREG formed for a promoted variable that has
3726 been zero-extended, we know that at least the high-order bits
3727 are zero, though others might be too. */
3729 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
3730 nonzero = GET_MODE_MASK (GET_MODE (x))
3731 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
3732 known_x, known_mode, known_ret);
3734 /* If the inner mode is a single word for both the host and target
3735 machines, we can compute this from which bits of the inner
3736 object might be nonzero. */
3737 if (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) <= BITS_PER_WORD
3738 && (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
3739 <= HOST_BITS_PER_WIDE_INT))
3741 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
3742 known_x, known_mode, known_ret);
3744 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
3745 /* If this is a typical RISC machine, we only have to worry
3746 about the way loads are extended. */
3747 if ((LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
3748 ? (((nonzero
3749 & (((unsigned HOST_WIDE_INT) 1
3750 << (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))) - 1))))
3751 != 0))
3752 : LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) != ZERO_EXTEND)
3753 || !MEM_P (SUBREG_REG (x)))
3754 #endif
3756 /* On many CISC machines, accessing an object in a wider mode
3757 causes the high-order bits to become undefined. So they are
3758 not known to be zero. */
3759 if (GET_MODE_SIZE (GET_MODE (x))
3760 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
3761 nonzero |= (GET_MODE_MASK (GET_MODE (x))
3762 & ~GET_MODE_MASK (GET_MODE (SUBREG_REG (x))));
3765 break;
3767 case ASHIFTRT:
3768 case LSHIFTRT:
3769 case ASHIFT:
3770 case ROTATE:
3771 /* The nonzero bits are in two classes: any bits within MODE
3772 that aren't in GET_MODE (x) are always significant. The rest of the
3773 nonzero bits are those that are significant in the operand of
3774 the shift when shifted the appropriate number of bits. This
3775 shows that high-order bits are cleared by the right shift and
3776 low-order bits by left shifts. */
3777 if (GET_CODE (XEXP (x, 1)) == CONST_INT
3778 && INTVAL (XEXP (x, 1)) >= 0
3779 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
3781 enum machine_mode inner_mode = GET_MODE (x);
3782 unsigned int width = GET_MODE_BITSIZE (inner_mode);
3783 int count = INTVAL (XEXP (x, 1));
3784 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
3785 unsigned HOST_WIDE_INT op_nonzero =
3786 cached_nonzero_bits (XEXP (x, 0), mode,
3787 known_x, known_mode, known_ret);
3788 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
3789 unsigned HOST_WIDE_INT outer = 0;
3791 if (mode_width > width)
3792 outer = (op_nonzero & nonzero & ~mode_mask);
3794 if (code == LSHIFTRT)
3795 inner >>= count;
3796 else if (code == ASHIFTRT)
3798 inner >>= count;
3800 /* If the sign bit may have been nonzero before the shift, we
3801 need to mark all the places it could have been copied to
3802 by the shift as possibly nonzero. */
3803 if (inner & ((HOST_WIDE_INT) 1 << (width - 1 - count)))
3804 inner |= (((HOST_WIDE_INT) 1 << count) - 1) << (width - count);
3806 else if (code == ASHIFT)
3807 inner <<= count;
3808 else
3809 inner = ((inner << (count % width)
3810 | (inner >> (width - (count % width)))) & mode_mask);
3812 nonzero &= (outer | inner);
3814 break;
3816 case FFS:
3817 case POPCOUNT:
3818 /* This is at most the number of bits in the mode. */
3819 nonzero = ((HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
3820 break;
3822 case CLZ:
3823 /* If CLZ has a known value at zero, then the nonzero bits are
3824 that value, plus the number of bits in the mode minus one. */
3825 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3826 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3827 else
3828 nonzero = -1;
3829 break;
3831 case CTZ:
3832 /* If CTZ has a known value at zero, then the nonzero bits are
3833 that value, plus the number of bits in the mode minus one. */
3834 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
3835 nonzero |= ((HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
3836 else
3837 nonzero = -1;
3838 break;
3840 case PARITY:
3841 nonzero = 1;
3842 break;
3844 case IF_THEN_ELSE:
3846 unsigned HOST_WIDE_INT nonzero_true =
3847 cached_nonzero_bits (XEXP (x, 1), mode,
3848 known_x, known_mode, known_ret);
3850 /* Don't call nonzero_bits for the second time if it cannot change
3851 anything. */
3852 if ((nonzero & nonzero_true) != nonzero)
3853 nonzero &= nonzero_true
3854 | cached_nonzero_bits (XEXP (x, 2), mode,
3855 known_x, known_mode, known_ret);
3857 break;
3859 default:
3860 break;
3863 return nonzero;
3866 /* See the macro definition above. */
3867 #undef cached_num_sign_bit_copies
3870 /* The function cached_num_sign_bit_copies is a wrapper around
3871 num_sign_bit_copies1. It avoids exponential behavior in
3872 num_sign_bit_copies1 when X has identical subexpressions on the
3873 first or the second level. */
3875 static unsigned int
3876 cached_num_sign_bit_copies (rtx x, enum machine_mode mode, rtx known_x,
3877 enum machine_mode known_mode,
3878 unsigned int known_ret)
3880 if (x == known_x && mode == known_mode)
3881 return known_ret;
3883 /* Try to find identical subexpressions. If found call
3884 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
3885 the precomputed value for the subexpression as KNOWN_RET. */
3887 if (ARITHMETIC_P (x))
3889 rtx x0 = XEXP (x, 0);
3890 rtx x1 = XEXP (x, 1);
3892 /* Check the first level. */
3893 if (x0 == x1)
3894 return
3895 num_sign_bit_copies1 (x, mode, x0, mode,
3896 cached_num_sign_bit_copies (x0, mode, known_x,
3897 known_mode,
3898 known_ret));
3900 /* Check the second level. */
3901 if (ARITHMETIC_P (x0)
3902 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3903 return
3904 num_sign_bit_copies1 (x, mode, x1, mode,
3905 cached_num_sign_bit_copies (x1, mode, known_x,
3906 known_mode,
3907 known_ret));
3909 if (ARITHMETIC_P (x1)
3910 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3911 return
3912 num_sign_bit_copies1 (x, mode, x0, mode,
3913 cached_num_sign_bit_copies (x0, mode, known_x,
3914 known_mode,
3915 known_ret));
3918 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
3921 /* Return the number of bits at the high-order end of X that are known to
3922 be equal to the sign bit. X will be used in mode MODE; if MODE is
3923 VOIDmode, X will be used in its own mode. The returned value will always
3924 be between 1 and the number of bits in MODE. */
3926 static unsigned int
3927 num_sign_bit_copies1 (rtx x, enum machine_mode mode, rtx known_x,
3928 enum machine_mode known_mode,
3929 unsigned int known_ret)
3931 enum rtx_code code = GET_CODE (x);
3932 unsigned int bitwidth = GET_MODE_BITSIZE (mode);
3933 int num0, num1, result;
3934 unsigned HOST_WIDE_INT nonzero;
3936 /* If we weren't given a mode, use the mode of X. If the mode is still
3937 VOIDmode, we don't know anything. Likewise if one of the modes is
3938 floating-point. */
3940 if (mode == VOIDmode)
3941 mode = GET_MODE (x);
3943 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x)))
3944 return 1;
3946 /* For a smaller object, just ignore the high bits. */
3947 if (bitwidth < GET_MODE_BITSIZE (GET_MODE (x)))
3949 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
3950 known_x, known_mode, known_ret);
3951 return MAX (1,
3952 num0 - (int) (GET_MODE_BITSIZE (GET_MODE (x)) - bitwidth));
3955 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_BITSIZE (GET_MODE (x)))
3957 #ifndef WORD_REGISTER_OPERATIONS
3958 /* If this machine does not do all register operations on the entire
3959 register and MODE is wider than the mode of X, we can say nothing
3960 at all about the high-order bits. */
3961 return 1;
3962 #else
3963 /* Likewise on machines that do, if the mode of the object is smaller
3964 than a word and loads of that size don't sign extend, we can say
3965 nothing about the high order bits. */
3966 if (GET_MODE_BITSIZE (GET_MODE (x)) < BITS_PER_WORD
3967 #ifdef LOAD_EXTEND_OP
3968 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
3969 #endif
3971 return 1;
3972 #endif
3975 switch (code)
3977 case REG:
3979 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3980 /* If pointers extend signed and this is a pointer in Pmode, say that
3981 all the bits above ptr_mode are known to be sign bit copies. */
3982 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode && mode == Pmode
3983 && REG_POINTER (x))
3984 return GET_MODE_BITSIZE (Pmode) - GET_MODE_BITSIZE (ptr_mode) + 1;
3985 #endif
3988 unsigned int copies_for_hook = 1, copies = 1;
3989 rtx new = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
3990 known_mode, known_ret,
3991 &copies_for_hook);
3993 if (new)
3994 copies = cached_num_sign_bit_copies (new, mode, known_x,
3995 known_mode, known_ret);
3997 if (copies > 1 || copies_for_hook > 1)
3998 return MAX (copies, copies_for_hook);
4000 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4002 break;
4004 case MEM:
4005 #ifdef LOAD_EXTEND_OP
4006 /* Some RISC machines sign-extend all loads of smaller than a word. */
4007 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4008 return MAX (1, ((int) bitwidth
4009 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1));
4010 #endif
4011 break;
4013 case CONST_INT:
4014 /* If the constant is negative, take its 1's complement and remask.
4015 Then see how many zero bits we have. */
4016 nonzero = INTVAL (x) & GET_MODE_MASK (mode);
4017 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4018 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4019 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4021 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4023 case SUBREG:
4024 /* If this is a SUBREG for a promoted object that is sign-extended
4025 and we are looking at it in a wider mode, we know that at least the
4026 high-order bits are known to be sign bit copies. */
4028 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4030 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4031 known_x, known_mode, known_ret);
4032 return MAX ((int) bitwidth
4033 - (int) GET_MODE_BITSIZE (GET_MODE (x)) + 1,
4034 num0);
4037 /* For a smaller object, just ignore the high bits. */
4038 if (bitwidth <= GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
4040 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4041 known_x, known_mode, known_ret);
4042 return MAX (1, (num0
4043 - (int) (GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x)))
4044 - bitwidth)));
4047 #ifdef WORD_REGISTER_OPERATIONS
4048 #ifdef LOAD_EXTEND_OP
4049 /* For paradoxical SUBREGs on machines where all register operations
4050 affect the entire register, just look inside. Note that we are
4051 passing MODE to the recursive call, so the number of sign bit copies
4052 will remain relative to that mode, not the inner mode. */
4054 /* This works only if loads sign extend. Otherwise, if we get a
4055 reload for the inner part, it may be loaded from the stack, and
4056 then we lose all sign bit copies that existed before the store
4057 to the stack. */
4059 if ((GET_MODE_SIZE (GET_MODE (x))
4060 > GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))))
4061 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4062 && MEM_P (SUBREG_REG (x)))
4063 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4064 known_x, known_mode, known_ret);
4065 #endif
4066 #endif
4067 break;
4069 case SIGN_EXTRACT:
4070 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
4071 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4072 break;
4074 case SIGN_EXTEND:
4075 return (bitwidth - GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4076 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4077 known_x, known_mode, known_ret));
4079 case TRUNCATE:
4080 /* For a smaller object, just ignore the high bits. */
4081 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4082 known_x, known_mode, known_ret);
4083 return MAX (1, (num0 - (int) (GET_MODE_BITSIZE (GET_MODE (XEXP (x, 0)))
4084 - bitwidth)));
4086 case NOT:
4087 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4088 known_x, known_mode, known_ret);
4090 case ROTATE: case ROTATERT:
4091 /* If we are rotating left by a number of bits less than the number
4092 of sign bit copies, we can just subtract that amount from the
4093 number. */
4094 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4095 && INTVAL (XEXP (x, 1)) >= 0
4096 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4098 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4099 known_x, known_mode, known_ret);
4100 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4101 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4103 break;
4105 case NEG:
4106 /* In general, this subtracts one sign bit copy. But if the value
4107 is known to be positive, the number of sign bit copies is the
4108 same as that of the input. Finally, if the input has just one bit
4109 that might be nonzero, all the bits are copies of the sign bit. */
4110 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4111 known_x, known_mode, known_ret);
4112 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4113 return num0 > 1 ? num0 - 1 : 1;
4115 nonzero = nonzero_bits (XEXP (x, 0), mode);
4116 if (nonzero == 1)
4117 return bitwidth;
4119 if (num0 > 1
4120 && (((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4121 num0--;
4123 return num0;
4125 case IOR: case AND: case XOR:
4126 case SMIN: case SMAX: case UMIN: case UMAX:
4127 /* Logical operations will preserve the number of sign-bit copies.
4128 MIN and MAX operations always return one of the operands. */
4129 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4130 known_x, known_mode, known_ret);
4131 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4132 known_x, known_mode, known_ret);
4133 return MIN (num0, num1);
4135 case PLUS: case MINUS:
4136 /* For addition and subtraction, we can have a 1-bit carry. However,
4137 if we are subtracting 1 from a positive number, there will not
4138 be such a carry. Furthermore, if the positive number is known to
4139 be 0 or 1, we know the result is either -1 or 0. */
4141 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4142 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4144 nonzero = nonzero_bits (XEXP (x, 0), mode);
4145 if ((((HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4146 return (nonzero == 1 || nonzero == 0 ? bitwidth
4147 : bitwidth - floor_log2 (nonzero) - 1);
4150 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4151 known_x, known_mode, known_ret);
4152 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4153 known_x, known_mode, known_ret);
4154 result = MAX (1, MIN (num0, num1) - 1);
4156 #ifdef POINTERS_EXTEND_UNSIGNED
4157 /* If pointers extend signed and this is an addition or subtraction
4158 to a pointer in Pmode, all the bits above ptr_mode are known to be
4159 sign bit copies. */
4160 if (! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4161 && (code == PLUS || code == MINUS)
4162 && REG_P (XEXP (x, 0)) && REG_POINTER (XEXP (x, 0)))
4163 result = MAX ((int) (GET_MODE_BITSIZE (Pmode)
4164 - GET_MODE_BITSIZE (ptr_mode) + 1),
4165 result);
4166 #endif
4167 return result;
4169 case MULT:
4170 /* The number of bits of the product is the sum of the number of
4171 bits of both terms. However, unless one of the terms if known
4172 to be positive, we must allow for an additional bit since negating
4173 a negative number can remove one sign bit copy. */
4175 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4176 known_x, known_mode, known_ret);
4177 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4178 known_x, known_mode, known_ret);
4180 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4181 if (result > 0
4182 && (bitwidth > HOST_BITS_PER_WIDE_INT
4183 || (((nonzero_bits (XEXP (x, 0), mode)
4184 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4185 && ((nonzero_bits (XEXP (x, 1), mode)
4186 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))))
4187 result--;
4189 return MAX (1, result);
4191 case UDIV:
4192 /* The result must be <= the first operand. If the first operand
4193 has the high bit set, we know nothing about the number of sign
4194 bit copies. */
4195 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4196 return 1;
4197 else if ((nonzero_bits (XEXP (x, 0), mode)
4198 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4199 return 1;
4200 else
4201 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4202 known_x, known_mode, known_ret);
4204 case UMOD:
4205 /* The result must be <= the second operand. */
4206 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4207 known_x, known_mode, known_ret);
4209 case DIV:
4210 /* Similar to unsigned division, except that we have to worry about
4211 the case where the divisor is negative, in which case we have
4212 to add 1. */
4213 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4214 known_x, known_mode, known_ret);
4215 if (result > 1
4216 && (bitwidth > HOST_BITS_PER_WIDE_INT
4217 || (nonzero_bits (XEXP (x, 1), mode)
4218 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4219 result--;
4221 return result;
4223 case MOD:
4224 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4225 known_x, known_mode, known_ret);
4226 if (result > 1
4227 && (bitwidth > HOST_BITS_PER_WIDE_INT
4228 || (nonzero_bits (XEXP (x, 1), mode)
4229 & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4230 result--;
4232 return result;
4234 case ASHIFTRT:
4235 /* Shifts by a constant add to the number of bits equal to the
4236 sign bit. */
4237 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4238 known_x, known_mode, known_ret);
4239 if (GET_CODE (XEXP (x, 1)) == CONST_INT
4240 && INTVAL (XEXP (x, 1)) > 0)
4241 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4243 return num0;
4245 case ASHIFT:
4246 /* Left shifts destroy copies. */
4247 if (GET_CODE (XEXP (x, 1)) != CONST_INT
4248 || INTVAL (XEXP (x, 1)) < 0
4249 || INTVAL (XEXP (x, 1)) >= (int) bitwidth)
4250 return 1;
4252 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4253 known_x, known_mode, known_ret);
4254 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4256 case IF_THEN_ELSE:
4257 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4258 known_x, known_mode, known_ret);
4259 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4260 known_x, known_mode, known_ret);
4261 return MIN (num0, num1);
4263 case EQ: case NE: case GE: case GT: case LE: case LT:
4264 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4265 case GEU: case GTU: case LEU: case LTU:
4266 case UNORDERED: case ORDERED:
4267 /* If the constant is negative, take its 1's complement and remask.
4268 Then see how many zero bits we have. */
4269 nonzero = STORE_FLAG_VALUE;
4270 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4271 && (nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4272 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4274 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4276 default:
4277 break;
4280 /* If we haven't been able to figure it out by one of the above rules,
4281 see if some of the high-order bits are known to be zero. If so,
4282 count those bits and return one less than that amount. If we can't
4283 safely compute the mask for this mode, always return BITWIDTH. */
4285 bitwidth = GET_MODE_BITSIZE (mode);
4286 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4287 return 1;
4289 nonzero = nonzero_bits (x, mode);
4290 return nonzero & ((HOST_WIDE_INT) 1 << (bitwidth - 1))
4291 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4294 /* Calculate the rtx_cost of a single instruction. A return value of
4295 zero indicates an instruction pattern without a known cost. */
4298 insn_rtx_cost (rtx pat)
4300 int i, cost;
4301 rtx set;
4303 /* Extract the single set rtx from the instruction pattern.
4304 We can't use single_set since we only have the pattern. */
4305 if (GET_CODE (pat) == SET)
4306 set = pat;
4307 else if (GET_CODE (pat) == PARALLEL)
4309 set = NULL_RTX;
4310 for (i = 0; i < XVECLEN (pat, 0); i++)
4312 rtx x = XVECEXP (pat, 0, i);
4313 if (GET_CODE (x) == SET)
4315 if (set)
4316 return 0;
4317 set = x;
4320 if (!set)
4321 return 0;
4323 else
4324 return 0;
4326 cost = rtx_cost (SET_SRC (set), SET);
4327 return cost > 0 ? cost : COSTS_N_INSNS (1);
4330 /* Given an insn INSN and condition COND, return the condition in a
4331 canonical form to simplify testing by callers. Specifically:
4333 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4334 (2) Both operands will be machine operands; (cc0) will have been replaced.
4335 (3) If an operand is a constant, it will be the second operand.
4336 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4337 for GE, GEU, and LEU.
4339 If the condition cannot be understood, or is an inequality floating-point
4340 comparison which needs to be reversed, 0 will be returned.
4342 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4344 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4345 insn used in locating the condition was found. If a replacement test
4346 of the condition is desired, it should be placed in front of that
4347 insn and we will be sure that the inputs are still valid.
4349 If WANT_REG is nonzero, we wish the condition to be relative to that
4350 register, if possible. Therefore, do not canonicalize the condition
4351 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4352 to be a compare to a CC mode register.
4354 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4355 and at INSN. */
4358 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4359 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4361 enum rtx_code code;
4362 rtx prev = insn;
4363 rtx set;
4364 rtx tem;
4365 rtx op0, op1;
4366 int reverse_code = 0;
4367 enum machine_mode mode;
4368 basic_block bb = BLOCK_FOR_INSN (insn);
4370 code = GET_CODE (cond);
4371 mode = GET_MODE (cond);
4372 op0 = XEXP (cond, 0);
4373 op1 = XEXP (cond, 1);
4375 if (reverse)
4376 code = reversed_comparison_code (cond, insn);
4377 if (code == UNKNOWN)
4378 return 0;
4380 if (earliest)
4381 *earliest = insn;
4383 /* If we are comparing a register with zero, see if the register is set
4384 in the previous insn to a COMPARE or a comparison operation. Perform
4385 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4386 in cse.c */
4388 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4389 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4390 && op1 == CONST0_RTX (GET_MODE (op0))
4391 && op0 != want_reg)
4393 /* Set nonzero when we find something of interest. */
4394 rtx x = 0;
4396 #ifdef HAVE_cc0
4397 /* If comparison with cc0, import actual comparison from compare
4398 insn. */
4399 if (op0 == cc0_rtx)
4401 if ((prev = prev_nonnote_insn (prev)) == 0
4402 || !NONJUMP_INSN_P (prev)
4403 || (set = single_set (prev)) == 0
4404 || SET_DEST (set) != cc0_rtx)
4405 return 0;
4407 op0 = SET_SRC (set);
4408 op1 = CONST0_RTX (GET_MODE (op0));
4409 if (earliest)
4410 *earliest = prev;
4412 #endif
4414 /* If this is a COMPARE, pick up the two things being compared. */
4415 if (GET_CODE (op0) == COMPARE)
4417 op1 = XEXP (op0, 1);
4418 op0 = XEXP (op0, 0);
4419 continue;
4421 else if (!REG_P (op0))
4422 break;
4424 /* Go back to the previous insn. Stop if it is not an INSN. We also
4425 stop if it isn't a single set or if it has a REG_INC note because
4426 we don't want to bother dealing with it. */
4428 if ((prev = prev_nonnote_insn (prev)) == 0
4429 || !NONJUMP_INSN_P (prev)
4430 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4431 /* In cfglayout mode, there do not have to be labels at the
4432 beginning of a block, or jumps at the end, so the previous
4433 conditions would not stop us when we reach bb boundary. */
4434 || BLOCK_FOR_INSN (prev) != bb)
4435 break;
4437 set = set_of (op0, prev);
4439 if (set
4440 && (GET_CODE (set) != SET
4441 || !rtx_equal_p (SET_DEST (set), op0)))
4442 break;
4444 /* If this is setting OP0, get what it sets it to if it looks
4445 relevant. */
4446 if (set)
4448 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4449 #ifdef FLOAT_STORE_FLAG_VALUE
4450 REAL_VALUE_TYPE fsfv;
4451 #endif
4453 /* ??? We may not combine comparisons done in a CCmode with
4454 comparisons not done in a CCmode. This is to aid targets
4455 like Alpha that have an IEEE compliant EQ instruction, and
4456 a non-IEEE compliant BEQ instruction. The use of CCmode is
4457 actually artificial, simply to prevent the combination, but
4458 should not affect other platforms.
4460 However, we must allow VOIDmode comparisons to match either
4461 CCmode or non-CCmode comparison, because some ports have
4462 modeless comparisons inside branch patterns.
4464 ??? This mode check should perhaps look more like the mode check
4465 in simplify_comparison in combine. */
4467 if ((GET_CODE (SET_SRC (set)) == COMPARE
4468 || (((code == NE
4469 || (code == LT
4470 && GET_MODE_CLASS (inner_mode) == MODE_INT
4471 && (GET_MODE_BITSIZE (inner_mode)
4472 <= HOST_BITS_PER_WIDE_INT)
4473 && (STORE_FLAG_VALUE
4474 & ((HOST_WIDE_INT) 1
4475 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4476 #ifdef FLOAT_STORE_FLAG_VALUE
4477 || (code == LT
4478 && SCALAR_FLOAT_MODE_P (inner_mode)
4479 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4480 REAL_VALUE_NEGATIVE (fsfv)))
4481 #endif
4483 && COMPARISON_P (SET_SRC (set))))
4484 && (((GET_MODE_CLASS (mode) == MODE_CC)
4485 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4486 || mode == VOIDmode || inner_mode == VOIDmode))
4487 x = SET_SRC (set);
4488 else if (((code == EQ
4489 || (code == GE
4490 && (GET_MODE_BITSIZE (inner_mode)
4491 <= HOST_BITS_PER_WIDE_INT)
4492 && GET_MODE_CLASS (inner_mode) == MODE_INT
4493 && (STORE_FLAG_VALUE
4494 & ((HOST_WIDE_INT) 1
4495 << (GET_MODE_BITSIZE (inner_mode) - 1))))
4496 #ifdef FLOAT_STORE_FLAG_VALUE
4497 || (code == GE
4498 && SCALAR_FLOAT_MODE_P (inner_mode)
4499 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
4500 REAL_VALUE_NEGATIVE (fsfv)))
4501 #endif
4503 && COMPARISON_P (SET_SRC (set))
4504 && (((GET_MODE_CLASS (mode) == MODE_CC)
4505 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
4506 || mode == VOIDmode || inner_mode == VOIDmode))
4509 reverse_code = 1;
4510 x = SET_SRC (set);
4512 else
4513 break;
4516 else if (reg_set_p (op0, prev))
4517 /* If this sets OP0, but not directly, we have to give up. */
4518 break;
4520 if (x)
4522 /* If the caller is expecting the condition to be valid at INSN,
4523 make sure X doesn't change before INSN. */
4524 if (valid_at_insn_p)
4525 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
4526 break;
4527 if (COMPARISON_P (x))
4528 code = GET_CODE (x);
4529 if (reverse_code)
4531 code = reversed_comparison_code (x, prev);
4532 if (code == UNKNOWN)
4533 return 0;
4534 reverse_code = 0;
4537 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
4538 if (earliest)
4539 *earliest = prev;
4543 /* If constant is first, put it last. */
4544 if (CONSTANT_P (op0))
4545 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
4547 /* If OP0 is the result of a comparison, we weren't able to find what
4548 was really being compared, so fail. */
4549 if (!allow_cc_mode
4550 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
4551 return 0;
4553 /* Canonicalize any ordered comparison with integers involving equality
4554 if we can do computations in the relevant mode and we do not
4555 overflow. */
4557 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
4558 && GET_CODE (op1) == CONST_INT
4559 && GET_MODE (op0) != VOIDmode
4560 && GET_MODE_BITSIZE (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
4562 HOST_WIDE_INT const_val = INTVAL (op1);
4563 unsigned HOST_WIDE_INT uconst_val = const_val;
4564 unsigned HOST_WIDE_INT max_val
4565 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
4567 switch (code)
4569 case LE:
4570 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
4571 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
4572 break;
4574 /* When cross-compiling, const_val might be sign-extended from
4575 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
4576 case GE:
4577 if ((HOST_WIDE_INT) (const_val & max_val)
4578 != (((HOST_WIDE_INT) 1
4579 << (GET_MODE_BITSIZE (GET_MODE (op0)) - 1))))
4580 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
4581 break;
4583 case LEU:
4584 if (uconst_val < max_val)
4585 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
4586 break;
4588 case GEU:
4589 if (uconst_val != 0)
4590 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
4591 break;
4593 default:
4594 break;
4598 /* Never return CC0; return zero instead. */
4599 if (CC0_P (op0))
4600 return 0;
4602 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
4605 /* Given a jump insn JUMP, return the condition that will cause it to branch
4606 to its JUMP_LABEL. If the condition cannot be understood, or is an
4607 inequality floating-point comparison which needs to be reversed, 0 will
4608 be returned.
4610 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4611 insn used in locating the condition was found. If a replacement test
4612 of the condition is desired, it should be placed in front of that
4613 insn and we will be sure that the inputs are still valid. If EARLIEST
4614 is null, the returned condition will be valid at INSN.
4616 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
4617 compare CC mode register.
4619 VALID_AT_INSN_P is the same as for canonicalize_condition. */
4622 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
4624 rtx cond;
4625 int reverse;
4626 rtx set;
4628 /* If this is not a standard conditional jump, we can't parse it. */
4629 if (!JUMP_P (jump)
4630 || ! any_condjump_p (jump))
4631 return 0;
4632 set = pc_set (jump);
4634 cond = XEXP (SET_SRC (set), 0);
4636 /* If this branches to JUMP_LABEL when the condition is false, reverse
4637 the condition. */
4638 reverse
4639 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
4640 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
4642 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
4643 allow_cc_mode, valid_at_insn_p);
4646 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
4647 TARGET_MODE_REP_EXTENDED.
4649 Note that we assume that the property of
4650 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
4651 narrower than mode B. I.e., if A is a mode narrower than B then in
4652 order to be able to operate on it in mode B, mode A needs to
4653 satisfy the requirements set by the representation of mode B. */
4655 static void
4656 init_num_sign_bit_copies_in_rep (void)
4658 enum machine_mode mode, in_mode;
4660 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
4661 in_mode = GET_MODE_WIDER_MODE (mode))
4662 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
4663 mode = GET_MODE_WIDER_MODE (mode))
4665 enum machine_mode i;
4667 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
4668 extends to the next widest mode. */
4669 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
4670 || GET_MODE_WIDER_MODE (mode) == in_mode);
4672 /* We are in in_mode. Count how many bits outside of mode
4673 have to be copies of the sign-bit. */
4674 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
4676 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
4678 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
4679 /* We can only check sign-bit copies starting from the
4680 top-bit. In order to be able to check the bits we
4681 have already seen we pretend that subsequent bits
4682 have to be sign-bit copies too. */
4683 || num_sign_bit_copies_in_rep [in_mode][mode])
4684 num_sign_bit_copies_in_rep [in_mode][mode]
4685 += GET_MODE_BITSIZE (wider) - GET_MODE_BITSIZE (i);
4690 /* Suppose that truncation from the machine mode of X to MODE is not a
4691 no-op. See if there is anything special about X so that we can
4692 assume it already contains a truncated value of MODE. */
4694 bool
4695 truncated_to_mode (enum machine_mode mode, rtx x)
4697 /* This register has already been used in MODE without explicit
4698 truncation. */
4699 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
4700 return true;
4702 /* See if we already satisfy the requirements of MODE. If yes we
4703 can just switch to MODE. */
4704 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
4705 && (num_sign_bit_copies (x, GET_MODE (x))
4706 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
4707 return true;
4709 return false;
4712 /* Initialize non_rtx_starting_operands, which is used to speed up
4713 for_each_rtx. */
4714 void
4715 init_rtlanal (void)
4717 int i;
4718 for (i = 0; i < NUM_RTX_CODE; i++)
4720 const char *format = GET_RTX_FORMAT (i);
4721 const char *first = strpbrk (format, "eEV");
4722 non_rtx_starting_operands[i] = first ? first - format : -1;
4725 init_num_sign_bit_copies_in_rep ();
4728 /* Check whether this is a constant pool constant. */
4729 bool
4730 constant_pool_constant_p (rtx x)
4732 x = avoid_constant_pool_reference (x);
4733 return GET_CODE (x) == CONST_DOUBLE;