linker-map.gnu: Export __verbose_terminate_handler.
[official-gcc.git] / gcc / rtlanal.c
blob9348fd01d118d75f0959351fcde942a5e9d2b6a4
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "toplev.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "tm_p.h"
31 #include "flags.h"
33 /* Forward declarations */
34 static int global_reg_mentioned_p_1 PARAMS ((rtx *, void *));
35 static void set_of_1 PARAMS ((rtx, rtx, void *));
36 static void insn_dependent_p_1 PARAMS ((rtx, rtx, void *));
37 static int computed_jump_p_1 PARAMS ((rtx));
38 static void parms_set PARAMS ((rtx, rtx, void *));
40 /* Bit flags that specify the machine subtype we are compiling for.
41 Bits are tested using macros TARGET_... defined in the tm.h file
42 and set by `-m...' switches. Must be defined in rtlanal.c. */
44 int target_flags;
46 /* Return 1 if the value of X is unstable
47 (would be different at a different point in the program).
48 The frame pointer, arg pointer, etc. are considered stable
49 (within one function) and so is anything marked `unchanging'. */
51 int
52 rtx_unstable_p (x)
53 rtx x;
55 RTX_CODE code = GET_CODE (x);
56 int i;
57 const char *fmt;
59 switch (code)
61 case MEM:
62 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
64 case QUEUED:
65 return 1;
67 case ADDRESSOF:
68 case CONST:
69 case CONST_INT:
70 case CONST_DOUBLE:
71 case CONST_VECTOR:
72 case SYMBOL_REF:
73 case LABEL_REF:
74 return 0;
76 case REG:
77 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
78 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
79 /* The arg pointer varies if it is not a fixed register. */
80 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
81 || RTX_UNCHANGING_P (x))
82 return 0;
83 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
84 /* ??? When call-clobbered, the value is stable modulo the restore
85 that must happen after a call. This currently screws up local-alloc
86 into believing that the restore is not needed. */
87 if (x == pic_offset_table_rtx)
88 return 0;
89 #endif
90 return 1;
92 case ASM_OPERANDS:
93 if (MEM_VOLATILE_P (x))
94 return 1;
96 /* FALLTHROUGH */
98 default:
99 break;
102 fmt = GET_RTX_FORMAT (code);
103 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
104 if (fmt[i] == 'e')
106 if (rtx_unstable_p (XEXP (x, i)))
107 return 1;
109 else if (fmt[i] == 'E')
111 int j;
112 for (j = 0; j < XVECLEN (x, i); j++)
113 if (rtx_unstable_p (XVECEXP (x, i, j)))
114 return 1;
117 return 0;
120 /* Return 1 if X has a value that can vary even between two
121 executions of the program. 0 means X can be compared reliably
122 against certain constants or near-constants.
123 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
124 zero, we are slightly more conservative.
125 The frame pointer and the arg pointer are considered constant. */
128 rtx_varies_p (x, for_alias)
129 rtx x;
130 int for_alias;
132 RTX_CODE code = GET_CODE (x);
133 int i;
134 const char *fmt;
136 switch (code)
138 case MEM:
139 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
141 case QUEUED:
142 return 1;
144 case CONST:
145 case CONST_INT:
146 case CONST_DOUBLE:
147 case CONST_VECTOR:
148 case SYMBOL_REF:
149 case LABEL_REF:
150 return 0;
152 case REG:
153 /* Note that we have to test for the actual rtx used for the frame
154 and arg pointers and not just the register number in case we have
155 eliminated the frame and/or arg pointer and are using it
156 for pseudos. */
157 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
158 /* The arg pointer varies if it is not a fixed register. */
159 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
160 return 0;
161 if (x == pic_offset_table_rtx
162 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
163 /* ??? When call-clobbered, the value is stable modulo the restore
164 that must happen after a call. This currently screws up
165 local-alloc into believing that the restore is not needed, so we
166 must return 0 only if we are called from alias analysis. */
167 && for_alias
168 #endif
170 return 0;
171 return 1;
173 case LO_SUM:
174 /* The operand 0 of a LO_SUM is considered constant
175 (in fact it is related specifically to operand 1)
176 during alias analysis. */
177 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
178 || rtx_varies_p (XEXP (x, 1), for_alias);
180 case ASM_OPERANDS:
181 if (MEM_VOLATILE_P (x))
182 return 1;
184 /* FALLTHROUGH */
186 default:
187 break;
190 fmt = GET_RTX_FORMAT (code);
191 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
192 if (fmt[i] == 'e')
194 if (rtx_varies_p (XEXP (x, i), for_alias))
195 return 1;
197 else if (fmt[i] == 'E')
199 int j;
200 for (j = 0; j < XVECLEN (x, i); j++)
201 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
202 return 1;
205 return 0;
208 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
211 rtx_addr_can_trap_p (x)
212 rtx x;
214 enum rtx_code code = GET_CODE (x);
216 switch (code)
218 case SYMBOL_REF:
219 return SYMBOL_REF_WEAK (x);
221 case LABEL_REF:
222 return 0;
224 case REG:
225 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
226 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
227 || x == stack_pointer_rtx
228 /* The arg pointer varies if it is not a fixed register. */
229 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
230 return 0;
231 /* All of the virtual frame registers are stack references. */
232 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
233 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
234 return 0;
235 return 1;
237 case CONST:
238 return rtx_addr_can_trap_p (XEXP (x, 0));
240 case PLUS:
241 /* An address is assumed not to trap if it is an address that can't
242 trap plus a constant integer or it is the pic register plus a
243 constant. */
244 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
245 && GET_CODE (XEXP (x, 1)) == CONST_INT)
246 || (XEXP (x, 0) == pic_offset_table_rtx
247 && CONSTANT_P (XEXP (x, 1))));
249 case LO_SUM:
250 case PRE_MODIFY:
251 return rtx_addr_can_trap_p (XEXP (x, 1));
253 case PRE_DEC:
254 case PRE_INC:
255 case POST_DEC:
256 case POST_INC:
257 case POST_MODIFY:
258 return rtx_addr_can_trap_p (XEXP (x, 0));
260 default:
261 break;
264 /* If it isn't one of the case above, it can cause a trap. */
265 return 1;
268 /* Return 1 if X refers to a memory location whose address
269 cannot be compared reliably with constant addresses,
270 or if X refers to a BLKmode memory object.
271 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
272 zero, we are slightly more conservative. */
275 rtx_addr_varies_p (x, for_alias)
276 rtx x;
277 int for_alias;
279 enum rtx_code code;
280 int i;
281 const char *fmt;
283 if (x == 0)
284 return 0;
286 code = GET_CODE (x);
287 if (code == MEM)
288 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
290 fmt = GET_RTX_FORMAT (code);
291 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
292 if (fmt[i] == 'e')
294 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
295 return 1;
297 else if (fmt[i] == 'E')
299 int j;
300 for (j = 0; j < XVECLEN (x, i); j++)
301 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
302 return 1;
304 return 0;
307 /* Return the value of the integer term in X, if one is apparent;
308 otherwise return 0.
309 Only obvious integer terms are detected.
310 This is used in cse.c with the `related_value' field. */
312 HOST_WIDE_INT
313 get_integer_term (x)
314 rtx x;
316 if (GET_CODE (x) == CONST)
317 x = XEXP (x, 0);
319 if (GET_CODE (x) == MINUS
320 && GET_CODE (XEXP (x, 1)) == CONST_INT)
321 return - INTVAL (XEXP (x, 1));
322 if (GET_CODE (x) == PLUS
323 && GET_CODE (XEXP (x, 1)) == CONST_INT)
324 return INTVAL (XEXP (x, 1));
325 return 0;
328 /* If X is a constant, return the value sans apparent integer term;
329 otherwise return 0.
330 Only obvious integer terms are detected. */
333 get_related_value (x)
334 rtx x;
336 if (GET_CODE (x) != CONST)
337 return 0;
338 x = XEXP (x, 0);
339 if (GET_CODE (x) == PLUS
340 && GET_CODE (XEXP (x, 1)) == CONST_INT)
341 return XEXP (x, 0);
342 else if (GET_CODE (x) == MINUS
343 && GET_CODE (XEXP (x, 1)) == CONST_INT)
344 return XEXP (x, 0);
345 return 0;
348 /* Given a tablejump insn INSN, return the RTL expression for the offset
349 into the jump table. If the offset cannot be determined, then return
350 NULL_RTX.
352 If EARLIEST is non-zero, it is a pointer to a place where the earliest
353 insn used in locating the offset was found. */
356 get_jump_table_offset (insn, earliest)
357 rtx insn;
358 rtx *earliest;
360 rtx label;
361 rtx table;
362 rtx set;
363 rtx old_insn;
364 rtx x;
365 rtx old_x;
366 rtx y;
367 rtx old_y;
368 int i;
370 if (GET_CODE (insn) != JUMP_INSN
371 || ! (label = JUMP_LABEL (insn))
372 || ! (table = NEXT_INSN (label))
373 || GET_CODE (table) != JUMP_INSN
374 || (GET_CODE (PATTERN (table)) != ADDR_VEC
375 && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC)
376 || ! (set = single_set (insn)))
377 return NULL_RTX;
379 x = SET_SRC (set);
381 /* Some targets (eg, ARM) emit a tablejump that also
382 contains the out-of-range target. */
383 if (GET_CODE (x) == IF_THEN_ELSE
384 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
385 x = XEXP (x, 1);
387 /* Search backwards and locate the expression stored in X. */
388 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
389 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
392 /* If X is an expression using a relative address then strip
393 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
394 or the jump table label. */
395 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
396 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
398 for (i = 0; i < 2; i++)
400 old_insn = insn;
401 y = XEXP (x, i);
403 if (y == pc_rtx || y == pic_offset_table_rtx)
404 break;
406 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
407 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
410 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
411 break;
414 if (i >= 2)
415 return NULL_RTX;
417 x = XEXP (x, 1 - i);
419 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
420 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
424 /* Strip off any sign or zero extension. */
425 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
427 x = XEXP (x, 0);
429 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
430 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
434 /* If X isn't a MEM then this isn't a tablejump we understand. */
435 if (GET_CODE (x) != MEM)
436 return NULL_RTX;
438 /* Strip off the MEM. */
439 x = XEXP (x, 0);
441 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
442 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
445 /* If X isn't a PLUS than this isn't a tablejump we understand. */
446 if (GET_CODE (x) != PLUS)
447 return NULL_RTX;
449 /* At this point we should have an expression representing the jump table
450 plus an offset. Examine each operand in order to determine which one
451 represents the jump table. Knowing that tells us that the other operand
452 must represent the offset. */
453 for (i = 0; i < 2; i++)
455 old_insn = insn;
456 y = XEXP (x, i);
458 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
459 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
462 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
463 && reg_mentioned_p (label, y))
464 break;
467 if (i >= 2)
468 return NULL_RTX;
470 x = XEXP (x, 1 - i);
472 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
473 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
474 for (i = 0; i < 2; i++)
475 if (XEXP (x, i) == pic_offset_table_rtx)
477 x = XEXP (x, 1 - i);
478 break;
481 if (earliest)
482 *earliest = insn;
484 /* Return the RTL expression representing the offset. */
485 return x;
488 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
489 a global register. */
491 static int
492 global_reg_mentioned_p_1 (loc, data)
493 rtx *loc;
494 void *data ATTRIBUTE_UNUSED;
496 int regno;
497 rtx x = *loc;
499 if (! x)
500 return 0;
502 switch (GET_CODE (x))
504 case SUBREG:
505 if (GET_CODE (SUBREG_REG (x)) == REG)
507 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
508 && global_regs[subreg_regno (x)])
509 return 1;
510 return 0;
512 break;
514 case REG:
515 regno = REGNO (x);
516 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
517 return 1;
518 return 0;
520 case SCRATCH:
521 case PC:
522 case CC0:
523 case CONST_INT:
524 case CONST_DOUBLE:
525 case CONST:
526 case LABEL_REF:
527 return 0;
529 case CALL:
530 /* A non-constant call might use a global register. */
531 return 1;
533 default:
534 break;
537 return 0;
540 /* Returns non-zero if X mentions a global register. */
543 global_reg_mentioned_p (x)
544 rtx x;
547 if (INSN_P (x))
549 if (GET_CODE (x) == CALL_INSN)
551 if (! CONST_OR_PURE_CALL_P (x))
552 return 1;
553 x = CALL_INSN_FUNCTION_USAGE (x);
554 if (x == 0)
555 return 0;
557 else
558 x = PATTERN (x);
561 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
564 /* Return the number of places FIND appears within X. If COUNT_DEST is
565 zero, we do not count occurrences inside the destination of a SET. */
568 count_occurrences (x, find, count_dest)
569 rtx x, find;
570 int count_dest;
572 int i, j;
573 enum rtx_code code;
574 const char *format_ptr;
575 int count;
577 if (x == find)
578 return 1;
580 code = GET_CODE (x);
582 switch (code)
584 case REG:
585 case CONST_INT:
586 case CONST_DOUBLE:
587 case CONST_VECTOR:
588 case SYMBOL_REF:
589 case CODE_LABEL:
590 case PC:
591 case CC0:
592 return 0;
594 case MEM:
595 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
596 return 1;
597 break;
599 case SET:
600 if (SET_DEST (x) == find && ! count_dest)
601 return count_occurrences (SET_SRC (x), find, count_dest);
602 break;
604 default:
605 break;
608 format_ptr = GET_RTX_FORMAT (code);
609 count = 0;
611 for (i = 0; i < GET_RTX_LENGTH (code); i++)
613 switch (*format_ptr++)
615 case 'e':
616 count += count_occurrences (XEXP (x, i), find, count_dest);
617 break;
619 case 'E':
620 for (j = 0; j < XVECLEN (x, i); j++)
621 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
622 break;
625 return count;
628 /* Nonzero if register REG appears somewhere within IN.
629 Also works if REG is not a register; in this case it checks
630 for a subexpression of IN that is Lisp "equal" to REG. */
633 reg_mentioned_p (reg, in)
634 rtx reg, in;
636 const char *fmt;
637 int i;
638 enum rtx_code code;
640 if (in == 0)
641 return 0;
643 if (reg == in)
644 return 1;
646 if (GET_CODE (in) == LABEL_REF)
647 return reg == XEXP (in, 0);
649 code = GET_CODE (in);
651 switch (code)
653 /* Compare registers by number. */
654 case REG:
655 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
657 /* These codes have no constituent expressions
658 and are unique. */
659 case SCRATCH:
660 case CC0:
661 case PC:
662 return 0;
664 case CONST_INT:
665 return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg);
667 case CONST_VECTOR:
668 case CONST_DOUBLE:
669 /* These are kept unique for a given value. */
670 return 0;
672 default:
673 break;
676 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
677 return 1;
679 fmt = GET_RTX_FORMAT (code);
681 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
683 if (fmt[i] == 'E')
685 int j;
686 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
687 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
688 return 1;
690 else if (fmt[i] == 'e'
691 && reg_mentioned_p (reg, XEXP (in, i)))
692 return 1;
694 return 0;
697 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
698 no CODE_LABEL insn. */
701 no_labels_between_p (beg, end)
702 rtx beg, end;
704 rtx p;
705 if (beg == end)
706 return 0;
707 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
708 if (GET_CODE (p) == CODE_LABEL)
709 return 0;
710 return 1;
713 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
714 no JUMP_INSN insn. */
717 no_jumps_between_p (beg, end)
718 rtx beg, end;
720 rtx p;
721 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
722 if (GET_CODE (p) == JUMP_INSN)
723 return 0;
724 return 1;
727 /* Nonzero if register REG is used in an insn between
728 FROM_INSN and TO_INSN (exclusive of those two). */
731 reg_used_between_p (reg, from_insn, to_insn)
732 rtx reg, from_insn, to_insn;
734 rtx insn;
736 if (from_insn == to_insn)
737 return 0;
739 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
740 if (INSN_P (insn)
741 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
742 || (GET_CODE (insn) == CALL_INSN
743 && (find_reg_fusage (insn, USE, reg)
744 || find_reg_fusage (insn, CLOBBER, reg)))))
745 return 1;
746 return 0;
749 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
750 is entirely replaced by a new value and the only use is as a SET_DEST,
751 we do not consider it a reference. */
754 reg_referenced_p (x, body)
755 rtx x;
756 rtx body;
758 int i;
760 switch (GET_CODE (body))
762 case SET:
763 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
764 return 1;
766 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
767 of a REG that occupies all of the REG, the insn references X if
768 it is mentioned in the destination. */
769 if (GET_CODE (SET_DEST (body)) != CC0
770 && GET_CODE (SET_DEST (body)) != PC
771 && GET_CODE (SET_DEST (body)) != REG
772 && ! (GET_CODE (SET_DEST (body)) == SUBREG
773 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
774 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
775 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
776 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
777 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
778 && reg_overlap_mentioned_p (x, SET_DEST (body)))
779 return 1;
780 return 0;
782 case ASM_OPERANDS:
783 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
784 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
785 return 1;
786 return 0;
788 case CALL:
789 case USE:
790 case IF_THEN_ELSE:
791 return reg_overlap_mentioned_p (x, body);
793 case TRAP_IF:
794 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
796 case PREFETCH:
797 return reg_overlap_mentioned_p (x, XEXP (body, 0));
799 case UNSPEC:
800 case UNSPEC_VOLATILE:
801 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
802 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
803 return 1;
804 return 0;
806 case PARALLEL:
807 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
808 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
809 return 1;
810 return 0;
812 case CLOBBER:
813 if (GET_CODE (XEXP (body, 0)) == MEM)
814 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
815 return 1;
816 return 0;
818 case COND_EXEC:
819 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
820 return 1;
821 return reg_referenced_p (x, COND_EXEC_CODE (body));
823 default:
824 return 0;
828 /* Nonzero if register REG is referenced in an insn between
829 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
830 not count. */
833 reg_referenced_between_p (reg, from_insn, to_insn)
834 rtx reg, from_insn, to_insn;
836 rtx insn;
838 if (from_insn == to_insn)
839 return 0;
841 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
842 if (INSN_P (insn)
843 && (reg_referenced_p (reg, PATTERN (insn))
844 || (GET_CODE (insn) == CALL_INSN
845 && find_reg_fusage (insn, USE, reg))))
846 return 1;
847 return 0;
850 /* Nonzero if register REG is set or clobbered in an insn between
851 FROM_INSN and TO_INSN (exclusive of those two). */
854 reg_set_between_p (reg, from_insn, to_insn)
855 rtx reg, from_insn, to_insn;
857 rtx insn;
859 if (from_insn == to_insn)
860 return 0;
862 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
863 if (INSN_P (insn) && reg_set_p (reg, insn))
864 return 1;
865 return 0;
868 /* Internals of reg_set_between_p. */
870 reg_set_p (reg, insn)
871 rtx reg, insn;
873 rtx body = insn;
875 /* We can be passed an insn or part of one. If we are passed an insn,
876 check if a side-effect of the insn clobbers REG. */
877 if (INSN_P (insn))
879 if (FIND_REG_INC_NOTE (insn, reg)
880 || (GET_CODE (insn) == CALL_INSN
881 /* We'd like to test call_used_regs here, but rtlanal.c can't
882 reference that variable due to its use in genattrtab. So
883 we'll just be more conservative.
885 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
886 information holds all clobbered registers. */
887 && ((GET_CODE (reg) == REG
888 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
889 || GET_CODE (reg) == MEM
890 || find_reg_fusage (insn, CLOBBER, reg))))
891 return 1;
893 body = PATTERN (insn);
896 return set_of (reg, insn) != NULL_RTX;
899 /* Similar to reg_set_between_p, but check all registers in X. Return 0
900 only if none of them are modified between START and END. Do not
901 consider non-registers one way or the other. */
904 regs_set_between_p (x, start, end)
905 rtx x;
906 rtx start, end;
908 enum rtx_code code = GET_CODE (x);
909 const char *fmt;
910 int i, j;
912 switch (code)
914 case CONST_INT:
915 case CONST_DOUBLE:
916 case CONST_VECTOR:
917 case CONST:
918 case SYMBOL_REF:
919 case LABEL_REF:
920 case PC:
921 case CC0:
922 return 0;
924 case REG:
925 return reg_set_between_p (x, start, end);
927 default:
928 break;
931 fmt = GET_RTX_FORMAT (code);
932 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
934 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
935 return 1;
937 else if (fmt[i] == 'E')
938 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
939 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
940 return 1;
943 return 0;
946 /* Similar to reg_set_between_p, but check all registers in X. Return 0
947 only if none of them are modified between START and END. Return 1 if
948 X contains a MEM; this routine does not perform any memory aliasing. */
951 modified_between_p (x, start, end)
952 rtx x;
953 rtx start, end;
955 enum rtx_code code = GET_CODE (x);
956 const char *fmt;
957 int i, j;
959 switch (code)
961 case CONST_INT:
962 case CONST_DOUBLE:
963 case CONST_VECTOR:
964 case CONST:
965 case SYMBOL_REF:
966 case LABEL_REF:
967 return 0;
969 case PC:
970 case CC0:
971 return 1;
973 case MEM:
974 /* If the memory is not constant, assume it is modified. If it is
975 constant, we still have to check the address. */
976 if (! RTX_UNCHANGING_P (x))
977 return 1;
978 break;
980 case REG:
981 return reg_set_between_p (x, start, end);
983 default:
984 break;
987 fmt = GET_RTX_FORMAT (code);
988 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
990 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
991 return 1;
993 else if (fmt[i] == 'E')
994 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
995 if (modified_between_p (XVECEXP (x, i, j), start, end))
996 return 1;
999 return 0;
1002 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1003 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1004 does not perform any memory aliasing. */
1007 modified_in_p (x, insn)
1008 rtx x;
1009 rtx insn;
1011 enum rtx_code code = GET_CODE (x);
1012 const char *fmt;
1013 int i, j;
1015 switch (code)
1017 case CONST_INT:
1018 case CONST_DOUBLE:
1019 case CONST_VECTOR:
1020 case CONST:
1021 case SYMBOL_REF:
1022 case LABEL_REF:
1023 return 0;
1025 case PC:
1026 case CC0:
1027 return 1;
1029 case MEM:
1030 /* If the memory is not constant, assume it is modified. If it is
1031 constant, we still have to check the address. */
1032 if (! RTX_UNCHANGING_P (x))
1033 return 1;
1034 break;
1036 case REG:
1037 return reg_set_p (x, insn);
1039 default:
1040 break;
1043 fmt = GET_RTX_FORMAT (code);
1044 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1046 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1047 return 1;
1049 else if (fmt[i] == 'E')
1050 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1051 if (modified_in_p (XVECEXP (x, i, j), insn))
1052 return 1;
1055 return 0;
1058 /* Return true if anything in insn X is (anti,output,true) dependent on
1059 anything in insn Y. */
1062 insn_dependent_p (x, y)
1063 rtx x, y;
1065 rtx tmp;
1067 if (! INSN_P (x) || ! INSN_P (y))
1068 abort ();
1070 tmp = PATTERN (y);
1071 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1072 if (tmp == NULL_RTX)
1073 return 1;
1075 tmp = PATTERN (x);
1076 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1077 if (tmp == NULL_RTX)
1078 return 1;
1080 return 0;
1083 /* A helper routine for insn_dependent_p called through note_stores. */
1085 static void
1086 insn_dependent_p_1 (x, pat, data)
1087 rtx x;
1088 rtx pat ATTRIBUTE_UNUSED;
1089 void *data;
1091 rtx * pinsn = (rtx *) data;
1093 if (*pinsn && reg_mentioned_p (x, *pinsn))
1094 *pinsn = NULL_RTX;
1097 /* Helper function for set_of. */
1098 struct set_of_data
1100 rtx found;
1101 rtx pat;
1104 static void
1105 set_of_1 (x, pat, data1)
1106 rtx x;
1107 rtx pat;
1108 void *data1;
1110 struct set_of_data *data = (struct set_of_data *) (data1);
1111 if (rtx_equal_p (x, data->pat)
1112 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1113 data->found = pat;
1116 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1117 (either directly or via STRICT_LOW_PART and similar modifiers). */
1119 set_of (pat, insn)
1120 rtx pat, insn;
1122 struct set_of_data data;
1123 data.found = NULL_RTX;
1124 data.pat = pat;
1125 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1126 return data.found;
1129 /* Given an INSN, return a SET expression if this insn has only a single SET.
1130 It may also have CLOBBERs, USEs, or SET whose output
1131 will not be used, which we ignore. */
1134 single_set_2 (insn, pat)
1135 rtx insn, pat;
1137 rtx set = NULL;
1138 int set_verified = 1;
1139 int i;
1141 if (GET_CODE (pat) == PARALLEL)
1143 for (i = 0; i < XVECLEN (pat, 0); i++)
1145 rtx sub = XVECEXP (pat, 0, i);
1146 switch (GET_CODE (sub))
1148 case USE:
1149 case CLOBBER:
1150 break;
1152 case SET:
1153 /* We can consider insns having multiple sets, where all
1154 but one are dead as single set insns. In common case
1155 only single set is present in the pattern so we want
1156 to avoid checking for REG_UNUSED notes unless necessary.
1158 When we reach set first time, we just expect this is
1159 the single set we are looking for and only when more
1160 sets are found in the insn, we check them. */
1161 if (!set_verified)
1163 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1164 && !side_effects_p (set))
1165 set = NULL;
1166 else
1167 set_verified = 1;
1169 if (!set)
1170 set = sub, set_verified = 0;
1171 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1172 || side_effects_p (sub))
1173 return NULL_RTX;
1174 break;
1176 default:
1177 return NULL_RTX;
1181 return set;
1184 /* Given an INSN, return nonzero if it has more than one SET, else return
1185 zero. */
1188 multiple_sets (insn)
1189 rtx insn;
1191 int found;
1192 int i;
1194 /* INSN must be an insn. */
1195 if (! INSN_P (insn))
1196 return 0;
1198 /* Only a PARALLEL can have multiple SETs. */
1199 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1201 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1202 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1204 /* If we have already found a SET, then return now. */
1205 if (found)
1206 return 1;
1207 else
1208 found = 1;
1212 /* Either zero or one SET. */
1213 return 0;
1216 /* Return nonzero if the destination of SET equals the source
1217 and there are no side effects. */
1220 set_noop_p (set)
1221 rtx set;
1223 rtx src = SET_SRC (set);
1224 rtx dst = SET_DEST (set);
1226 if (side_effects_p (src) || side_effects_p (dst))
1227 return 0;
1229 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1230 return rtx_equal_p (dst, src);
1232 if (dst == pc_rtx && src == pc_rtx)
1233 return 1;
1235 if (GET_CODE (dst) == SIGN_EXTRACT
1236 || GET_CODE (dst) == ZERO_EXTRACT)
1237 return rtx_equal_p (XEXP (dst, 0), src)
1238 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx;
1240 if (GET_CODE (dst) == STRICT_LOW_PART)
1241 dst = XEXP (dst, 0);
1243 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1245 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1246 return 0;
1247 src = SUBREG_REG (src);
1248 dst = SUBREG_REG (dst);
1251 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1252 && REGNO (src) == REGNO (dst));
1255 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1256 value to itself. */
1259 noop_move_p (insn)
1260 rtx insn;
1262 rtx pat = PATTERN (insn);
1264 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1265 return 1;
1267 /* Insns carrying these notes are useful later on. */
1268 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1269 return 0;
1271 /* For now treat an insn with a REG_RETVAL note as a
1272 a special insn which should not be considered a no-op. */
1273 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1274 return 0;
1276 if (GET_CODE (pat) == SET && set_noop_p (pat))
1277 return 1;
1279 if (GET_CODE (pat) == PARALLEL)
1281 int i;
1282 /* If nothing but SETs of registers to themselves,
1283 this insn can also be deleted. */
1284 for (i = 0; i < XVECLEN (pat, 0); i++)
1286 rtx tem = XVECEXP (pat, 0, i);
1288 if (GET_CODE (tem) == USE
1289 || GET_CODE (tem) == CLOBBER)
1290 continue;
1292 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1293 return 0;
1296 return 1;
1298 return 0;
1302 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1303 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1304 If the object was modified, if we hit a partial assignment to X, or hit a
1305 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1306 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1307 be the src. */
1310 find_last_value (x, pinsn, valid_to, allow_hwreg)
1311 rtx x;
1312 rtx *pinsn;
1313 rtx valid_to;
1314 int allow_hwreg;
1316 rtx p;
1318 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1319 p = PREV_INSN (p))
1320 if (INSN_P (p))
1322 rtx set = single_set (p);
1323 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1325 if (set && rtx_equal_p (x, SET_DEST (set)))
1327 rtx src = SET_SRC (set);
1329 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1330 src = XEXP (note, 0);
1332 if ((valid_to == NULL_RTX
1333 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1334 /* Reject hard registers because we don't usually want
1335 to use them; we'd rather use a pseudo. */
1336 && (! (GET_CODE (src) == REG
1337 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1339 *pinsn = p;
1340 return src;
1344 /* If set in non-simple way, we don't have a value. */
1345 if (reg_set_p (x, p))
1346 break;
1349 return x;
1352 /* Return nonzero if register in range [REGNO, ENDREGNO)
1353 appears either explicitly or implicitly in X
1354 other than being stored into.
1356 References contained within the substructure at LOC do not count.
1357 LOC may be zero, meaning don't ignore anything. */
1360 refers_to_regno_p (regno, endregno, x, loc)
1361 unsigned int regno, endregno;
1362 rtx x;
1363 rtx *loc;
1365 int i;
1366 unsigned int x_regno;
1367 RTX_CODE code;
1368 const char *fmt;
1370 repeat:
1371 /* The contents of a REG_NONNEG note is always zero, so we must come here
1372 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1373 if (x == 0)
1374 return 0;
1376 code = GET_CODE (x);
1378 switch (code)
1380 case REG:
1381 x_regno = REGNO (x);
1383 /* If we modifying the stack, frame, or argument pointer, it will
1384 clobber a virtual register. In fact, we could be more precise,
1385 but it isn't worth it. */
1386 if ((x_regno == STACK_POINTER_REGNUM
1387 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1388 || x_regno == ARG_POINTER_REGNUM
1389 #endif
1390 || x_regno == FRAME_POINTER_REGNUM)
1391 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1392 return 1;
1394 return (endregno > x_regno
1395 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1396 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1397 : 1));
1399 case SUBREG:
1400 /* If this is a SUBREG of a hard reg, we can see exactly which
1401 registers are being modified. Otherwise, handle normally. */
1402 if (GET_CODE (SUBREG_REG (x)) == REG
1403 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1405 unsigned int inner_regno = subreg_regno (x);
1406 unsigned int inner_endregno
1407 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1408 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1410 return endregno > inner_regno && regno < inner_endregno;
1412 break;
1414 case CLOBBER:
1415 case SET:
1416 if (&SET_DEST (x) != loc
1417 /* Note setting a SUBREG counts as referring to the REG it is in for
1418 a pseudo but not for hard registers since we can
1419 treat each word individually. */
1420 && ((GET_CODE (SET_DEST (x)) == SUBREG
1421 && loc != &SUBREG_REG (SET_DEST (x))
1422 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1423 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1424 && refers_to_regno_p (regno, endregno,
1425 SUBREG_REG (SET_DEST (x)), loc))
1426 || (GET_CODE (SET_DEST (x)) != REG
1427 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1428 return 1;
1430 if (code == CLOBBER || loc == &SET_SRC (x))
1431 return 0;
1432 x = SET_SRC (x);
1433 goto repeat;
1435 default:
1436 break;
1439 /* X does not match, so try its subexpressions. */
1441 fmt = GET_RTX_FORMAT (code);
1442 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1444 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1446 if (i == 0)
1448 x = XEXP (x, 0);
1449 goto repeat;
1451 else
1452 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1453 return 1;
1455 else if (fmt[i] == 'E')
1457 int j;
1458 for (j = XVECLEN (x, i) - 1; j >=0; j--)
1459 if (loc != &XVECEXP (x, i, j)
1460 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1461 return 1;
1464 return 0;
1467 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1468 we check if any register number in X conflicts with the relevant register
1469 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1470 contains a MEM (we don't bother checking for memory addresses that can't
1471 conflict because we expect this to be a rare case. */
1474 reg_overlap_mentioned_p (x, in)
1475 rtx x, in;
1477 unsigned int regno, endregno;
1479 /* Overly conservative. */
1480 if (GET_CODE (x) == STRICT_LOW_PART)
1481 x = XEXP (x, 0);
1483 /* If either argument is a constant, then modifying X can not affect IN. */
1484 if (CONSTANT_P (x) || CONSTANT_P (in))
1485 return 0;
1487 switch (GET_CODE (x))
1489 case SUBREG:
1490 regno = REGNO (SUBREG_REG (x));
1491 if (regno < FIRST_PSEUDO_REGISTER)
1492 regno = subreg_regno (x);
1493 goto do_reg;
1495 case REG:
1496 regno = REGNO (x);
1497 do_reg:
1498 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1499 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1500 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1502 case MEM:
1504 const char *fmt;
1505 int i;
1507 if (GET_CODE (in) == MEM)
1508 return 1;
1510 fmt = GET_RTX_FORMAT (GET_CODE (in));
1511 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1512 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1513 return 1;
1515 return 0;
1518 case SCRATCH:
1519 case PC:
1520 case CC0:
1521 return reg_mentioned_p (x, in);
1523 case PARALLEL:
1525 int i;
1527 /* If any register in here refers to it we return true. */
1528 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1529 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1530 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1531 return 1;
1532 return 0;
1535 default:
1536 break;
1539 abort ();
1542 /* Return the last value to which REG was set prior to INSN. If we can't
1543 find it easily, return 0.
1545 We only return a REG, SUBREG, or constant because it is too hard to
1546 check if a MEM remains unchanged. */
1549 reg_set_last (x, insn)
1550 rtx x;
1551 rtx insn;
1553 rtx orig_insn = insn;
1555 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1556 Stop when we reach a label or X is a hard reg and we reach a
1557 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1559 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1561 /* We compare with <= here, because reg_set_last_last_regno
1562 is actually the number of the first reg *not* in X. */
1563 for (;
1564 insn && GET_CODE (insn) != CODE_LABEL
1565 && ! (GET_CODE (insn) == CALL_INSN
1566 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1567 insn = PREV_INSN (insn))
1568 if (INSN_P (insn))
1570 rtx set = set_of (x, insn);
1571 /* OK, this function modify our register. See if we understand it. */
1572 if (set)
1574 rtx last_value;
1575 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1576 return 0;
1577 last_value = SET_SRC (x);
1578 if (CONSTANT_P (last_value)
1579 || ((GET_CODE (last_value) == REG
1580 || GET_CODE (last_value) == SUBREG)
1581 && ! reg_set_between_p (last_value,
1582 insn, orig_insn)))
1583 return last_value;
1584 else
1585 return 0;
1589 return 0;
1592 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1593 (X would be the pattern of an insn).
1594 FUN receives two arguments:
1595 the REG, MEM, CC0 or PC being stored in or clobbered,
1596 the SET or CLOBBER rtx that does the store.
1598 If the item being stored in or clobbered is a SUBREG of a hard register,
1599 the SUBREG will be passed. */
1601 void
1602 note_stores (x, fun, data)
1603 rtx x;
1604 void (*fun) PARAMS ((rtx, rtx, void *));
1605 void *data;
1607 int i;
1609 if (GET_CODE (x) == COND_EXEC)
1610 x = COND_EXEC_CODE (x);
1612 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1614 rtx dest = SET_DEST (x);
1616 while ((GET_CODE (dest) == SUBREG
1617 && (GET_CODE (SUBREG_REG (dest)) != REG
1618 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1619 || GET_CODE (dest) == ZERO_EXTRACT
1620 || GET_CODE (dest) == SIGN_EXTRACT
1621 || GET_CODE (dest) == STRICT_LOW_PART)
1622 dest = XEXP (dest, 0);
1624 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1625 each of whose first operand is a register. We can't know what
1626 precisely is being set in these cases, so make up a CLOBBER to pass
1627 to the function. */
1628 if (GET_CODE (dest) == PARALLEL)
1630 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1631 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1632 (*fun) (XEXP (XVECEXP (dest, 0, i), 0),
1633 gen_rtx_CLOBBER (VOIDmode,
1634 XEXP (XVECEXP (dest, 0, i), 0)),
1635 data);
1637 else
1638 (*fun) (dest, x, data);
1641 else if (GET_CODE (x) == PARALLEL)
1642 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1643 note_stores (XVECEXP (x, 0, i), fun, data);
1646 /* Like notes_stores, but call FUN for each expression that is being
1647 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1648 FUN for each expression, not any interior subexpressions. FUN receives a
1649 pointer to the expression and the DATA passed to this function.
1651 Note that this is not quite the same test as that done in reg_referenced_p
1652 since that considers something as being referenced if it is being
1653 partially set, while we do not. */
1655 void
1656 note_uses (pbody, fun, data)
1657 rtx *pbody;
1658 void (*fun) PARAMS ((rtx *, void *));
1659 void *data;
1661 rtx body = *pbody;
1662 int i;
1664 switch (GET_CODE (body))
1666 case COND_EXEC:
1667 (*fun) (&COND_EXEC_TEST (body), data);
1668 note_uses (&COND_EXEC_CODE (body), fun, data);
1669 return;
1671 case PARALLEL:
1672 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1673 note_uses (&XVECEXP (body, 0, i), fun, data);
1674 return;
1676 case USE:
1677 (*fun) (&XEXP (body, 0), data);
1678 return;
1680 case ASM_OPERANDS:
1681 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1682 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1683 return;
1685 case TRAP_IF:
1686 (*fun) (&TRAP_CONDITION (body), data);
1687 return;
1689 case PREFETCH:
1690 (*fun) (&XEXP (body, 0), data);
1691 return;
1693 case UNSPEC:
1694 case UNSPEC_VOLATILE:
1695 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1696 (*fun) (&XVECEXP (body, 0, i), data);
1697 return;
1699 case CLOBBER:
1700 if (GET_CODE (XEXP (body, 0)) == MEM)
1701 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1702 return;
1704 case SET:
1706 rtx dest = SET_DEST (body);
1708 /* For sets we replace everything in source plus registers in memory
1709 expression in store and operands of a ZERO_EXTRACT. */
1710 (*fun) (&SET_SRC (body), data);
1712 if (GET_CODE (dest) == ZERO_EXTRACT)
1714 (*fun) (&XEXP (dest, 1), data);
1715 (*fun) (&XEXP (dest, 2), data);
1718 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1719 dest = XEXP (dest, 0);
1721 if (GET_CODE (dest) == MEM)
1722 (*fun) (&XEXP (dest, 0), data);
1724 return;
1726 default:
1727 /* All the other possibilities never store. */
1728 (*fun) (pbody, data);
1729 return;
1733 /* Return nonzero if X's old contents don't survive after INSN.
1734 This will be true if X is (cc0) or if X is a register and
1735 X dies in INSN or because INSN entirely sets X.
1737 "Entirely set" means set directly and not through a SUBREG,
1738 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1739 Likewise, REG_INC does not count.
1741 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1742 but for this use that makes no difference, since regs don't overlap
1743 during their lifetimes. Therefore, this function may be used
1744 at any time after deaths have been computed (in flow.c).
1746 If REG is a hard reg that occupies multiple machine registers, this
1747 function will only return 1 if each of those registers will be replaced
1748 by INSN. */
1751 dead_or_set_p (insn, x)
1752 rtx insn;
1753 rtx x;
1755 unsigned int regno, last_regno;
1756 unsigned int i;
1758 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1759 if (GET_CODE (x) == CC0)
1760 return 1;
1762 if (GET_CODE (x) != REG)
1763 abort ();
1765 regno = REGNO (x);
1766 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1767 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1769 for (i = regno; i <= last_regno; i++)
1770 if (! dead_or_set_regno_p (insn, i))
1771 return 0;
1773 return 1;
1776 /* Utility function for dead_or_set_p to check an individual register. Also
1777 called from flow.c. */
1780 dead_or_set_regno_p (insn, test_regno)
1781 rtx insn;
1782 unsigned int test_regno;
1784 unsigned int regno, endregno;
1785 rtx pattern;
1787 /* See if there is a death note for something that includes TEST_REGNO. */
1788 if (find_regno_note (insn, REG_DEAD, test_regno))
1789 return 1;
1791 if (GET_CODE (insn) == CALL_INSN
1792 && find_regno_fusage (insn, CLOBBER, test_regno))
1793 return 1;
1795 pattern = PATTERN (insn);
1797 if (GET_CODE (pattern) == COND_EXEC)
1798 pattern = COND_EXEC_CODE (pattern);
1800 if (GET_CODE (pattern) == SET)
1802 rtx dest = SET_DEST (PATTERN (insn));
1804 /* A value is totally replaced if it is the destination or the
1805 destination is a SUBREG of REGNO that does not change the number of
1806 words in it. */
1807 if (GET_CODE (dest) == SUBREG
1808 && (((GET_MODE_SIZE (GET_MODE (dest))
1809 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1810 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1811 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1812 dest = SUBREG_REG (dest);
1814 if (GET_CODE (dest) != REG)
1815 return 0;
1817 regno = REGNO (dest);
1818 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1819 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1821 return (test_regno >= regno && test_regno < endregno);
1823 else if (GET_CODE (pattern) == PARALLEL)
1825 int i;
1827 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1829 rtx body = XVECEXP (pattern, 0, i);
1831 if (GET_CODE (body) == COND_EXEC)
1832 body = COND_EXEC_CODE (body);
1834 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1836 rtx dest = SET_DEST (body);
1838 if (GET_CODE (dest) == SUBREG
1839 && (((GET_MODE_SIZE (GET_MODE (dest))
1840 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1841 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1842 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1843 dest = SUBREG_REG (dest);
1845 if (GET_CODE (dest) != REG)
1846 continue;
1848 regno = REGNO (dest);
1849 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1850 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1852 if (test_regno >= regno && test_regno < endregno)
1853 return 1;
1858 return 0;
1861 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1862 If DATUM is nonzero, look for one whose datum is DATUM. */
1865 find_reg_note (insn, kind, datum)
1866 rtx insn;
1867 enum reg_note kind;
1868 rtx datum;
1870 rtx link;
1872 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1873 if (! INSN_P (insn))
1874 return 0;
1876 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1877 if (REG_NOTE_KIND (link) == kind
1878 && (datum == 0 || datum == XEXP (link, 0)))
1879 return link;
1880 return 0;
1883 /* Return the reg-note of kind KIND in insn INSN which applies to register
1884 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1885 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1886 it might be the case that the note overlaps REGNO. */
1889 find_regno_note (insn, kind, regno)
1890 rtx insn;
1891 enum reg_note kind;
1892 unsigned int regno;
1894 rtx link;
1896 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1897 if (! INSN_P (insn))
1898 return 0;
1900 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1901 if (REG_NOTE_KIND (link) == kind
1902 /* Verify that it is a register, so that scratch and MEM won't cause a
1903 problem here. */
1904 && GET_CODE (XEXP (link, 0)) == REG
1905 && REGNO (XEXP (link, 0)) <= regno
1906 && ((REGNO (XEXP (link, 0))
1907 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1908 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1909 GET_MODE (XEXP (link, 0)))))
1910 > regno))
1911 return link;
1912 return 0;
1915 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1916 has such a note. */
1919 find_reg_equal_equiv_note (insn)
1920 rtx insn;
1922 rtx note;
1924 if (single_set (insn) == 0)
1925 return 0;
1926 else if ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1927 return note;
1928 else
1929 return find_reg_note (insn, REG_EQUAL, NULL_RTX);
1932 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1933 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1936 find_reg_fusage (insn, code, datum)
1937 rtx insn;
1938 enum rtx_code code;
1939 rtx datum;
1941 /* If it's not a CALL_INSN, it can't possibly have a
1942 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1943 if (GET_CODE (insn) != CALL_INSN)
1944 return 0;
1946 if (! datum)
1947 abort ();
1949 if (GET_CODE (datum) != REG)
1951 rtx link;
1953 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1954 link;
1955 link = XEXP (link, 1))
1956 if (GET_CODE (XEXP (link, 0)) == code
1957 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1958 return 1;
1960 else
1962 unsigned int regno = REGNO (datum);
1964 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1965 to pseudo registers, so don't bother checking. */
1967 if (regno < FIRST_PSEUDO_REGISTER)
1969 unsigned int end_regno
1970 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
1971 unsigned int i;
1973 for (i = regno; i < end_regno; i++)
1974 if (find_regno_fusage (insn, code, i))
1975 return 1;
1979 return 0;
1982 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1983 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1986 find_regno_fusage (insn, code, regno)
1987 rtx insn;
1988 enum rtx_code code;
1989 unsigned int regno;
1991 rtx link;
1993 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1994 to pseudo registers, so don't bother checking. */
1996 if (regno >= FIRST_PSEUDO_REGISTER
1997 || GET_CODE (insn) != CALL_INSN )
1998 return 0;
2000 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2002 unsigned int regnote;
2003 rtx op, reg;
2005 if (GET_CODE (op = XEXP (link, 0)) == code
2006 && GET_CODE (reg = XEXP (op, 0)) == REG
2007 && (regnote = REGNO (reg)) <= regno
2008 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
2009 return 1;
2012 return 0;
2015 /* Remove register note NOTE from the REG_NOTES of INSN. */
2017 void
2018 remove_note (insn, note)
2019 rtx insn;
2020 rtx note;
2022 rtx link;
2024 if (note == NULL_RTX)
2025 return;
2027 if (REG_NOTES (insn) == note)
2029 REG_NOTES (insn) = XEXP (note, 1);
2030 return;
2033 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2034 if (XEXP (link, 1) == note)
2036 XEXP (link, 1) = XEXP (note, 1);
2037 return;
2040 abort ();
2043 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2044 return 1 if it is found. A simple equality test is used to determine if
2045 NODE matches. */
2048 in_expr_list_p (listp, node)
2049 rtx listp;
2050 rtx node;
2052 rtx x;
2054 for (x = listp; x; x = XEXP (x, 1))
2055 if (node == XEXP (x, 0))
2056 return 1;
2058 return 0;
2061 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2062 remove that entry from the list if it is found.
2064 A simple equality test is used to determine if NODE matches. */
2066 void
2067 remove_node_from_expr_list (node, listp)
2068 rtx node;
2069 rtx *listp;
2071 rtx temp = *listp;
2072 rtx prev = NULL_RTX;
2074 while (temp)
2076 if (node == XEXP (temp, 0))
2078 /* Splice the node out of the list. */
2079 if (prev)
2080 XEXP (prev, 1) = XEXP (temp, 1);
2081 else
2082 *listp = XEXP (temp, 1);
2084 return;
2087 prev = temp;
2088 temp = XEXP (temp, 1);
2092 /* Nonzero if X contains any volatile instructions. These are instructions
2093 which may cause unpredictable machine state instructions, and thus no
2094 instructions should be moved or combined across them. This includes
2095 only volatile asms and UNSPEC_VOLATILE instructions. */
2098 volatile_insn_p (x)
2099 rtx x;
2101 RTX_CODE code;
2103 code = GET_CODE (x);
2104 switch (code)
2106 case LABEL_REF:
2107 case SYMBOL_REF:
2108 case CONST_INT:
2109 case CONST:
2110 case CONST_DOUBLE:
2111 case CONST_VECTOR:
2112 case CC0:
2113 case PC:
2114 case REG:
2115 case SCRATCH:
2116 case CLOBBER:
2117 case ASM_INPUT:
2118 case ADDR_VEC:
2119 case ADDR_DIFF_VEC:
2120 case CALL:
2121 case MEM:
2122 return 0;
2124 case UNSPEC_VOLATILE:
2125 /* case TRAP_IF: This isn't clear yet. */
2126 return 1;
2128 case ASM_OPERANDS:
2129 if (MEM_VOLATILE_P (x))
2130 return 1;
2132 default:
2133 break;
2136 /* Recursively scan the operands of this expression. */
2139 const char *fmt = GET_RTX_FORMAT (code);
2140 int i;
2142 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2144 if (fmt[i] == 'e')
2146 if (volatile_insn_p (XEXP (x, i)))
2147 return 1;
2149 else if (fmt[i] == 'E')
2151 int j;
2152 for (j = 0; j < XVECLEN (x, i); j++)
2153 if (volatile_insn_p (XVECEXP (x, i, j)))
2154 return 1;
2158 return 0;
2161 /* Nonzero if X contains any volatile memory references
2162 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2165 volatile_refs_p (x)
2166 rtx x;
2168 RTX_CODE code;
2170 code = GET_CODE (x);
2171 switch (code)
2173 case LABEL_REF:
2174 case SYMBOL_REF:
2175 case CONST_INT:
2176 case CONST:
2177 case CONST_DOUBLE:
2178 case CONST_VECTOR:
2179 case CC0:
2180 case PC:
2181 case REG:
2182 case SCRATCH:
2183 case CLOBBER:
2184 case ASM_INPUT:
2185 case ADDR_VEC:
2186 case ADDR_DIFF_VEC:
2187 return 0;
2189 case CALL:
2190 case UNSPEC_VOLATILE:
2191 /* case TRAP_IF: This isn't clear yet. */
2192 return 1;
2194 case MEM:
2195 case ASM_OPERANDS:
2196 if (MEM_VOLATILE_P (x))
2197 return 1;
2199 default:
2200 break;
2203 /* Recursively scan the operands of this expression. */
2206 const char *fmt = GET_RTX_FORMAT (code);
2207 int i;
2209 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2211 if (fmt[i] == 'e')
2213 if (volatile_refs_p (XEXP (x, i)))
2214 return 1;
2216 else if (fmt[i] == 'E')
2218 int j;
2219 for (j = 0; j < XVECLEN (x, i); j++)
2220 if (volatile_refs_p (XVECEXP (x, i, j)))
2221 return 1;
2225 return 0;
2228 /* Similar to above, except that it also rejects register pre- and post-
2229 incrementing. */
2232 side_effects_p (x)
2233 rtx x;
2235 RTX_CODE code;
2237 code = GET_CODE (x);
2238 switch (code)
2240 case LABEL_REF:
2241 case SYMBOL_REF:
2242 case CONST_INT:
2243 case CONST:
2244 case CONST_DOUBLE:
2245 case CONST_VECTOR:
2246 case CC0:
2247 case PC:
2248 case REG:
2249 case SCRATCH:
2250 case ASM_INPUT:
2251 case ADDR_VEC:
2252 case ADDR_DIFF_VEC:
2253 return 0;
2255 case CLOBBER:
2256 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2257 when some combination can't be done. If we see one, don't think
2258 that we can simplify the expression. */
2259 return (GET_MODE (x) != VOIDmode);
2261 case PRE_INC:
2262 case PRE_DEC:
2263 case POST_INC:
2264 case POST_DEC:
2265 case PRE_MODIFY:
2266 case POST_MODIFY:
2267 case CALL:
2268 case UNSPEC_VOLATILE:
2269 /* case TRAP_IF: This isn't clear yet. */
2270 return 1;
2272 case MEM:
2273 case ASM_OPERANDS:
2274 if (MEM_VOLATILE_P (x))
2275 return 1;
2277 default:
2278 break;
2281 /* Recursively scan the operands of this expression. */
2284 const char *fmt = GET_RTX_FORMAT (code);
2285 int i;
2287 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2289 if (fmt[i] == 'e')
2291 if (side_effects_p (XEXP (x, i)))
2292 return 1;
2294 else if (fmt[i] == 'E')
2296 int j;
2297 for (j = 0; j < XVECLEN (x, i); j++)
2298 if (side_effects_p (XVECEXP (x, i, j)))
2299 return 1;
2303 return 0;
2306 /* Return nonzero if evaluating rtx X might cause a trap. */
2309 may_trap_p (x)
2310 rtx x;
2312 int i;
2313 enum rtx_code code;
2314 const char *fmt;
2316 if (x == 0)
2317 return 0;
2318 code = GET_CODE (x);
2319 switch (code)
2321 /* Handle these cases quickly. */
2322 case CONST_INT:
2323 case CONST_DOUBLE:
2324 case CONST_VECTOR:
2325 case SYMBOL_REF:
2326 case LABEL_REF:
2327 case CONST:
2328 case PC:
2329 case CC0:
2330 case REG:
2331 case SCRATCH:
2332 return 0;
2334 case ASM_INPUT:
2335 case UNSPEC_VOLATILE:
2336 case TRAP_IF:
2337 return 1;
2339 case ASM_OPERANDS:
2340 return MEM_VOLATILE_P (x);
2342 /* Memory ref can trap unless it's a static var or a stack slot. */
2343 case MEM:
2344 return rtx_addr_can_trap_p (XEXP (x, 0));
2346 /* Division by a non-constant might trap. */
2347 case DIV:
2348 case MOD:
2349 case UDIV:
2350 case UMOD:
2351 if (! CONSTANT_P (XEXP (x, 1))
2352 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2353 && flag_trapping_math))
2354 return 1;
2355 /* This was const0_rtx, but by not using that,
2356 we can link this file into other programs. */
2357 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2358 return 1;
2359 break;
2361 case EXPR_LIST:
2362 /* An EXPR_LIST is used to represent a function call. This
2363 certainly may trap. */
2364 return 1;
2366 case GE:
2367 case GT:
2368 case LE:
2369 case LT:
2370 case COMPARE:
2371 /* Some floating point comparisons may trap. */
2372 if (!flag_trapping_math)
2373 break;
2374 /* ??? There is no machine independent way to check for tests that trap
2375 when COMPARE is used, though many targets do make this distinction.
2376 For instance, sparc uses CCFPE for compares which generate exceptions
2377 and CCFP for compares which do not generate exceptions. */
2378 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2379 return 1;
2380 /* But often the compare has some CC mode, so check operand
2381 modes as well. */
2382 if (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_FLOAT
2383 || GET_MODE_CLASS (GET_MODE (XEXP (x, 1))) == MODE_FLOAT)
2384 return 1;
2385 break;
2387 case NEG:
2388 case ABS:
2389 /* These operations don't trap even with floating point. */
2390 break;
2392 default:
2393 /* Any floating arithmetic may trap. */
2394 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2395 && flag_trapping_math)
2396 return 1;
2399 fmt = GET_RTX_FORMAT (code);
2400 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2402 if (fmt[i] == 'e')
2404 if (may_trap_p (XEXP (x, i)))
2405 return 1;
2407 else if (fmt[i] == 'E')
2409 int j;
2410 for (j = 0; j < XVECLEN (x, i); j++)
2411 if (may_trap_p (XVECEXP (x, i, j)))
2412 return 1;
2415 return 0;
2418 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2419 i.e., an inequality. */
2422 inequality_comparisons_p (x)
2423 rtx x;
2425 const char *fmt;
2426 int len, i;
2427 enum rtx_code code = GET_CODE (x);
2429 switch (code)
2431 case REG:
2432 case SCRATCH:
2433 case PC:
2434 case CC0:
2435 case CONST_INT:
2436 case CONST_DOUBLE:
2437 case CONST_VECTOR:
2438 case CONST:
2439 case LABEL_REF:
2440 case SYMBOL_REF:
2441 return 0;
2443 case LT:
2444 case LTU:
2445 case GT:
2446 case GTU:
2447 case LE:
2448 case LEU:
2449 case GE:
2450 case GEU:
2451 return 1;
2453 default:
2454 break;
2457 len = GET_RTX_LENGTH (code);
2458 fmt = GET_RTX_FORMAT (code);
2460 for (i = 0; i < len; i++)
2462 if (fmt[i] == 'e')
2464 if (inequality_comparisons_p (XEXP (x, i)))
2465 return 1;
2467 else if (fmt[i] == 'E')
2469 int j;
2470 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2471 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2472 return 1;
2476 return 0;
2479 /* Replace any occurrence of FROM in X with TO. The function does
2480 not enter into CONST_DOUBLE for the replace.
2482 Note that copying is not done so X must not be shared unless all copies
2483 are to be modified. */
2486 replace_rtx (x, from, to)
2487 rtx x, from, to;
2489 int i, j;
2490 const char *fmt;
2492 /* The following prevents loops occurrence when we change MEM in
2493 CONST_DOUBLE onto the same CONST_DOUBLE. */
2494 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2495 return x;
2497 if (x == from)
2498 return to;
2500 /* Allow this function to make replacements in EXPR_LISTs. */
2501 if (x == 0)
2502 return 0;
2504 if (GET_CODE (x) == SUBREG)
2506 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2508 if (GET_CODE (new) == CONST_INT)
2510 x = simplify_subreg (GET_MODE (x), new,
2511 GET_MODE (SUBREG_REG (x)),
2512 SUBREG_BYTE (x));
2513 if (! x)
2514 abort ();
2516 else
2517 SUBREG_REG (x) = new;
2519 return x;
2521 else if (GET_CODE (x) == ZERO_EXTEND)
2523 rtx new = replace_rtx (XEXP (x, 0), from, to);
2525 if (GET_CODE (new) == CONST_INT)
2527 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2528 new, GET_MODE (XEXP (x, 0)));
2529 if (! x)
2530 abort ();
2532 else
2533 XEXP (x, 0) = new;
2535 return x;
2538 fmt = GET_RTX_FORMAT (GET_CODE (x));
2539 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2541 if (fmt[i] == 'e')
2542 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2543 else if (fmt[i] == 'E')
2544 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2545 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2548 return x;
2551 /* Throughout the rtx X, replace many registers according to REG_MAP.
2552 Return the replacement for X (which may be X with altered contents).
2553 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2554 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2556 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2557 should not be mapped to pseudos or vice versa since validate_change
2558 is not called.
2560 If REPLACE_DEST is 1, replacements are also done in destinations;
2561 otherwise, only sources are replaced. */
2564 replace_regs (x, reg_map, nregs, replace_dest)
2565 rtx x;
2566 rtx *reg_map;
2567 unsigned int nregs;
2568 int replace_dest;
2570 enum rtx_code code;
2571 int i;
2572 const char *fmt;
2574 if (x == 0)
2575 return x;
2577 code = GET_CODE (x);
2578 switch (code)
2580 case SCRATCH:
2581 case PC:
2582 case CC0:
2583 case CONST_INT:
2584 case CONST_DOUBLE:
2585 case CONST_VECTOR:
2586 case CONST:
2587 case SYMBOL_REF:
2588 case LABEL_REF:
2589 return x;
2591 case REG:
2592 /* Verify that the register has an entry before trying to access it. */
2593 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2595 /* SUBREGs can't be shared. Always return a copy to ensure that if
2596 this replacement occurs more than once then each instance will
2597 get distinct rtx. */
2598 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2599 return copy_rtx (reg_map[REGNO (x)]);
2600 return reg_map[REGNO (x)];
2602 return x;
2604 case SUBREG:
2605 /* Prevent making nested SUBREGs. */
2606 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2607 && reg_map[REGNO (SUBREG_REG (x))] != 0
2608 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2610 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2611 return simplify_gen_subreg (GET_MODE (x), map_val,
2612 GET_MODE (SUBREG_REG (x)),
2613 SUBREG_BYTE (x));
2615 break;
2617 case SET:
2618 if (replace_dest)
2619 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2621 else if (GET_CODE (SET_DEST (x)) == MEM
2622 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2623 /* Even if we are not to replace destinations, replace register if it
2624 is CONTAINED in destination (destination is memory or
2625 STRICT_LOW_PART). */
2626 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2627 reg_map, nregs, 0);
2628 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2629 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2630 break;
2632 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2633 return x;
2635 default:
2636 break;
2639 fmt = GET_RTX_FORMAT (code);
2640 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2642 if (fmt[i] == 'e')
2643 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2644 else if (fmt[i] == 'E')
2646 int j;
2647 for (j = 0; j < XVECLEN (x, i); j++)
2648 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2649 nregs, replace_dest);
2652 return x;
2655 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2656 constant that is not in the constant pool and not in the condition
2657 of an IF_THEN_ELSE. */
2659 static int
2660 computed_jump_p_1 (x)
2661 rtx x;
2663 enum rtx_code code = GET_CODE (x);
2664 int i, j;
2665 const char *fmt;
2667 switch (code)
2669 case LABEL_REF:
2670 case PC:
2671 return 0;
2673 case CONST:
2674 case CONST_INT:
2675 case CONST_DOUBLE:
2676 case CONST_VECTOR:
2677 case SYMBOL_REF:
2678 case REG:
2679 return 1;
2681 case MEM:
2682 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2683 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2685 case IF_THEN_ELSE:
2686 return (computed_jump_p_1 (XEXP (x, 1))
2687 || computed_jump_p_1 (XEXP (x, 2)));
2689 default:
2690 break;
2693 fmt = GET_RTX_FORMAT (code);
2694 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2696 if (fmt[i] == 'e'
2697 && computed_jump_p_1 (XEXP (x, i)))
2698 return 1;
2700 else if (fmt[i] == 'E')
2701 for (j = 0; j < XVECLEN (x, i); j++)
2702 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2703 return 1;
2706 return 0;
2709 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2711 Tablejumps and casesi insns are not considered indirect jumps;
2712 we can recognize them by a (use (label_ref)). */
2715 computed_jump_p (insn)
2716 rtx insn;
2718 int i;
2719 if (GET_CODE (insn) == JUMP_INSN)
2721 rtx pat = PATTERN (insn);
2723 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2724 return 0;
2725 else if (GET_CODE (pat) == PARALLEL)
2727 int len = XVECLEN (pat, 0);
2728 int has_use_labelref = 0;
2730 for (i = len - 1; i >= 0; i--)
2731 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2732 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2733 == LABEL_REF))
2734 has_use_labelref = 1;
2736 if (! has_use_labelref)
2737 for (i = len - 1; i >= 0; i--)
2738 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2739 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2740 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2741 return 1;
2743 else if (GET_CODE (pat) == SET
2744 && SET_DEST (pat) == pc_rtx
2745 && computed_jump_p_1 (SET_SRC (pat)))
2746 return 1;
2748 return 0;
2751 /* Traverse X via depth-first search, calling F for each
2752 sub-expression (including X itself). F is also passed the DATA.
2753 If F returns -1, do not traverse sub-expressions, but continue
2754 traversing the rest of the tree. If F ever returns any other
2755 non-zero value, stop the traversal, and return the value returned
2756 by F. Otherwise, return 0. This function does not traverse inside
2757 tree structure that contains RTX_EXPRs, or into sub-expressions
2758 whose format code is `0' since it is not known whether or not those
2759 codes are actually RTL.
2761 This routine is very general, and could (should?) be used to
2762 implement many of the other routines in this file. */
2765 for_each_rtx (x, f, data)
2766 rtx *x;
2767 rtx_function f;
2768 void *data;
2770 int result;
2771 int length;
2772 const char *format;
2773 int i;
2775 /* Call F on X. */
2776 result = (*f) (x, data);
2777 if (result == -1)
2778 /* Do not traverse sub-expressions. */
2779 return 0;
2780 else if (result != 0)
2781 /* Stop the traversal. */
2782 return result;
2784 if (*x == NULL_RTX)
2785 /* There are no sub-expressions. */
2786 return 0;
2788 length = GET_RTX_LENGTH (GET_CODE (*x));
2789 format = GET_RTX_FORMAT (GET_CODE (*x));
2791 for (i = 0; i < length; ++i)
2793 switch (format[i])
2795 case 'e':
2796 result = for_each_rtx (&XEXP (*x, i), f, data);
2797 if (result != 0)
2798 return result;
2799 break;
2801 case 'V':
2802 case 'E':
2803 if (XVEC (*x, i) != 0)
2805 int j;
2806 for (j = 0; j < XVECLEN (*x, i); ++j)
2808 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2809 if (result != 0)
2810 return result;
2813 break;
2815 default:
2816 /* Nothing to do. */
2817 break;
2822 return 0;
2825 /* Searches X for any reference to REGNO, returning the rtx of the
2826 reference found if any. Otherwise, returns NULL_RTX. */
2829 regno_use_in (regno, x)
2830 unsigned int regno;
2831 rtx x;
2833 const char *fmt;
2834 int i, j;
2835 rtx tem;
2837 if (GET_CODE (x) == REG && REGNO (x) == regno)
2838 return x;
2840 fmt = GET_RTX_FORMAT (GET_CODE (x));
2841 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2843 if (fmt[i] == 'e')
2845 if ((tem = regno_use_in (regno, XEXP (x, i))))
2846 return tem;
2848 else if (fmt[i] == 'E')
2849 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2850 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2851 return tem;
2854 return NULL_RTX;
2857 /* Return a value indicating whether OP, an operand of a commutative
2858 operation, is preferred as the first or second operand. The higher
2859 the value, the stronger the preference for being the first operand.
2860 We use negative values to indicate a preference for the first operand
2861 and positive values for the second operand. */
2864 commutative_operand_precedence (op)
2865 rtx op;
2867 /* Constants always come the second operand. Prefer "nice" constants. */
2868 if (GET_CODE (op) == CONST_INT)
2869 return -5;
2870 if (GET_CODE (op) == CONST_DOUBLE)
2871 return -4;
2872 if (CONSTANT_P (op))
2873 return -3;
2875 /* SUBREGs of objects should come second. */
2876 if (GET_CODE (op) == SUBREG
2877 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
2878 return -2;
2880 /* If only one operand is a `neg', `not',
2881 `mult', `plus', or `minus' expression, it will be the first
2882 operand. */
2883 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
2884 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
2885 || GET_CODE (op) == MINUS)
2886 return 2;
2888 /* Complex expressions should be the first, so decrease priority
2889 of objects. */
2890 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
2891 return -1;
2892 return 0;
2895 /* Return 1 iff it is necessary to swap operands of commutative operation
2896 in order to canonicalize expression. */
2899 swap_commutative_operands_p (x, y)
2900 rtx x, y;
2902 return (commutative_operand_precedence (x)
2903 < commutative_operand_precedence (y));
2906 /* Return 1 if X is an autoincrement side effect and the register is
2907 not the stack pointer. */
2909 auto_inc_p (x)
2910 rtx x;
2912 switch (GET_CODE (x))
2914 case PRE_INC:
2915 case POST_INC:
2916 case PRE_DEC:
2917 case POST_DEC:
2918 case PRE_MODIFY:
2919 case POST_MODIFY:
2920 /* There are no REG_INC notes for SP. */
2921 if (XEXP (x, 0) != stack_pointer_rtx)
2922 return 1;
2923 default:
2924 break;
2926 return 0;
2929 /* Return 1 if the sequence of instructions beginning with FROM and up
2930 to and including TO is safe to move. If NEW_TO is non-NULL, and
2931 the sequence is not already safe to move, but can be easily
2932 extended to a sequence which is safe, then NEW_TO will point to the
2933 end of the extended sequence.
2935 For now, this function only checks that the region contains whole
2936 exception regions, but it could be extended to check additional
2937 conditions as well. */
2940 insns_safe_to_move_p (from, to, new_to)
2941 rtx from;
2942 rtx to;
2943 rtx *new_to;
2945 int eh_region_count = 0;
2946 int past_to_p = 0;
2947 rtx r = from;
2949 /* By default, assume the end of the region will be what was
2950 suggested. */
2951 if (new_to)
2952 *new_to = to;
2954 while (r)
2956 if (GET_CODE (r) == NOTE)
2958 switch (NOTE_LINE_NUMBER (r))
2960 case NOTE_INSN_EH_REGION_BEG:
2961 ++eh_region_count;
2962 break;
2964 case NOTE_INSN_EH_REGION_END:
2965 if (eh_region_count == 0)
2966 /* This sequence of instructions contains the end of
2967 an exception region, but not he beginning. Moving
2968 it will cause chaos. */
2969 return 0;
2971 --eh_region_count;
2972 break;
2974 default:
2975 break;
2978 else if (past_to_p)
2979 /* If we've passed TO, and we see a non-note instruction, we
2980 can't extend the sequence to a movable sequence. */
2981 return 0;
2983 if (r == to)
2985 if (!new_to)
2986 /* It's OK to move the sequence if there were matched sets of
2987 exception region notes. */
2988 return eh_region_count == 0;
2990 past_to_p = 1;
2993 /* It's OK to move the sequence if there were matched sets of
2994 exception region notes. */
2995 if (past_to_p && eh_region_count == 0)
2997 *new_to = r;
2998 return 1;
3001 /* Go to the next instruction. */
3002 r = NEXT_INSN (r);
3005 return 0;
3008 /* Return non-zero if IN contains a piece of rtl that has the address LOC */
3010 loc_mentioned_in_p (loc, in)
3011 rtx *loc, in;
3013 enum rtx_code code = GET_CODE (in);
3014 const char *fmt = GET_RTX_FORMAT (code);
3015 int i, j;
3017 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3019 if (loc == &in->fld[i].rtx)
3020 return 1;
3021 if (fmt[i] == 'e')
3023 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3024 return 1;
3026 else if (fmt[i] == 'E')
3027 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3028 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3029 return 1;
3031 return 0;
3034 /* Given a subreg X, return the bit offset where the subreg begins
3035 (counting from the least significant bit of the reg). */
3037 unsigned int
3038 subreg_lsb (x)
3039 rtx x;
3041 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
3042 enum machine_mode mode = GET_MODE (x);
3043 unsigned int bitpos;
3044 unsigned int byte;
3045 unsigned int word;
3047 /* A paradoxical subreg begins at bit position 0. */
3048 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
3049 return 0;
3051 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3052 /* If the subreg crosses a word boundary ensure that
3053 it also begins and ends on a word boundary. */
3054 if ((SUBREG_BYTE (x) % UNITS_PER_WORD
3055 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
3056 && (SUBREG_BYTE (x) % UNITS_PER_WORD
3057 || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
3058 abort ();
3060 if (WORDS_BIG_ENDIAN)
3061 word = (GET_MODE_SIZE (inner_mode)
3062 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
3063 else
3064 word = SUBREG_BYTE (x) / UNITS_PER_WORD;
3065 bitpos = word * BITS_PER_WORD;
3067 if (BYTES_BIG_ENDIAN)
3068 byte = (GET_MODE_SIZE (inner_mode)
3069 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
3070 else
3071 byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
3072 bitpos += byte * BITS_PER_UNIT;
3074 return bitpos;
3077 /* This function returns the regno offset of a subreg expression.
3078 xregno - A regno of an inner hard subreg_reg (or what will become one).
3079 xmode - The mode of xregno.
3080 offset - The byte offset.
3081 ymode - The mode of a top level SUBREG (or what may become one).
3082 RETURN - The regno offset which would be used. */
3083 unsigned int
3084 subreg_regno_offset (xregno, xmode, offset, ymode)
3085 unsigned int xregno;
3086 enum machine_mode xmode;
3087 unsigned int offset;
3088 enum machine_mode ymode;
3090 int nregs_xmode, nregs_ymode;
3091 int mode_multiple, nregs_multiple;
3092 int y_offset;
3094 if (xregno >= FIRST_PSEUDO_REGISTER)
3095 abort ();
3097 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3098 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3099 if (offset == 0 || nregs_xmode == nregs_ymode)
3100 return 0;
3102 /* size of ymode must not be greater than the size of xmode. */
3103 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3104 if (mode_multiple == 0)
3105 abort ();
3107 y_offset = offset / GET_MODE_SIZE (ymode);
3108 nregs_multiple = nregs_xmode / nregs_ymode;
3109 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3112 /* Return the final regno that a subreg expression refers to. */
3113 unsigned int
3114 subreg_regno (x)
3115 rtx x;
3117 unsigned int ret;
3118 rtx subreg = SUBREG_REG (x);
3119 int regno = REGNO (subreg);
3121 ret = regno + subreg_regno_offset (regno,
3122 GET_MODE (subreg),
3123 SUBREG_BYTE (x),
3124 GET_MODE (x));
3125 return ret;
3128 struct parms_set_data
3130 int nregs;
3131 HARD_REG_SET regs;
3134 /* Helper function for noticing stores to parameter registers. */
3135 static void
3136 parms_set (x, pat, data)
3137 rtx x, pat ATTRIBUTE_UNUSED;
3138 void *data;
3140 struct parms_set_data *d = data;
3141 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3142 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3144 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3145 d->nregs--;
3149 /* Look backward for first parameter to be loaded.
3150 Do not skip BOUNDARY. */
3152 find_first_parameter_load (call_insn, boundary)
3153 rtx call_insn, boundary;
3155 struct parms_set_data parm;
3156 rtx p, before;
3158 /* Since different machines initialize their parameter registers
3159 in different orders, assume nothing. Collect the set of all
3160 parameter registers. */
3161 CLEAR_HARD_REG_SET (parm.regs);
3162 parm.nregs = 0;
3163 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3164 if (GET_CODE (XEXP (p, 0)) == USE
3165 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3167 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3168 abort ();
3170 /* We only care about registers which can hold function
3171 arguments. */
3172 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3173 continue;
3175 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3176 parm.nregs++;
3178 before = call_insn;
3180 /* Search backward for the first set of a register in this set. */
3181 while (parm.nregs && before != boundary)
3183 before = PREV_INSN (before);
3185 /* It is possible that some loads got CSEed from one call to
3186 another. Stop in that case. */
3187 if (GET_CODE (before) == CALL_INSN)
3188 break;
3190 /* Our caller needs either ensure that we will find all sets
3191 (in case code has not been optimized yet), or take care
3192 for possible labels in a way by setting boundary to preceding
3193 CODE_LABEL. */
3194 if (GET_CODE (before) == CODE_LABEL)
3196 if (before != boundary)
3197 abort ();
3198 break;
3201 if (INSN_P (before))
3202 note_stores (PATTERN (before), parms_set, &parm);
3204 return before;
3207 /* Return true if we should avoid inserting code between INSN and preceeding
3208 call instruction. */
3210 bool
3211 keep_with_call_p (insn)
3212 rtx insn;
3214 rtx set;
3216 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3218 if (GET_CODE (SET_DEST (set)) == REG
3219 && fixed_regs[REGNO (SET_DEST (set))]
3220 && general_operand (SET_SRC (set), VOIDmode))
3221 return true;
3222 if (GET_CODE (SET_SRC (set)) == REG
3223 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3224 && GET_CODE (SET_DEST (set)) == REG
3225 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3226 return true;
3227 /* There may be a stack pop just after the call and before the store
3228 of the return register. Search for the actual store when deciding
3229 if we can break or not. */
3230 if (SET_DEST (set) == stack_pointer_rtx)
3232 rtx i2 = next_nonnote_insn (insn);
3233 if (i2 && keep_with_call_p (i2))
3234 return true;
3237 return false;