2002-02-19 Philip Blundell <philb@gnu.org>
[official-gcc.git] / gcc / rtlanal.c
blob37f1e64910acebee9188b8d25936ea59b7900662
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "toplev.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "tm_p.h"
30 /* Forward declarations */
31 static void set_of_1 PARAMS ((rtx, rtx, void *));
32 static void insn_dependent_p_1 PARAMS ((rtx, rtx, void *));
33 static int computed_jump_p_1 PARAMS ((rtx));
34 static void parms_set PARAMS ((rtx, rtx, void *));
36 /* Bit flags that specify the machine subtype we are compiling for.
37 Bits are tested using macros TARGET_... defined in the tm.h file
38 and set by `-m...' switches. Must be defined in rtlanal.c. */
40 int target_flags;
42 /* Return 1 if the value of X is unstable
43 (would be different at a different point in the program).
44 The frame pointer, arg pointer, etc. are considered stable
45 (within one function) and so is anything marked `unchanging'. */
47 int
48 rtx_unstable_p (x)
49 rtx x;
51 RTX_CODE code = GET_CODE (x);
52 int i;
53 const char *fmt;
55 switch (code)
57 case MEM:
58 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
60 case QUEUED:
61 return 1;
63 case ADDRESSOF:
64 case CONST:
65 case CONST_INT:
66 case CONST_DOUBLE:
67 case CONST_VECTOR:
68 case SYMBOL_REF:
69 case LABEL_REF:
70 return 0;
72 case REG:
73 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
74 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
75 /* The arg pointer varies if it is not a fixed register. */
76 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
77 || RTX_UNCHANGING_P (x))
78 return 0;
79 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
80 /* ??? When call-clobbered, the value is stable modulo the restore
81 that must happen after a call. This currently screws up local-alloc
82 into believing that the restore is not needed. */
83 if (x == pic_offset_table_rtx)
84 return 0;
85 #endif
86 return 1;
88 case ASM_OPERANDS:
89 if (MEM_VOLATILE_P (x))
90 return 1;
92 /* FALLTHROUGH */
94 default:
95 break;
98 fmt = GET_RTX_FORMAT (code);
99 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
100 if (fmt[i] == 'e')
102 if (rtx_unstable_p (XEXP (x, i)))
103 return 1;
105 else if (fmt[i] == 'E')
107 int j;
108 for (j = 0; j < XVECLEN (x, i); j++)
109 if (rtx_unstable_p (XVECEXP (x, i, j)))
110 return 1;
113 return 0;
116 /* Return 1 if X has a value that can vary even between two
117 executions of the program. 0 means X can be compared reliably
118 against certain constants or near-constants.
119 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
120 zero, we are slightly more conservative.
121 The frame pointer and the arg pointer are considered constant. */
124 rtx_varies_p (x, for_alias)
125 rtx x;
126 int for_alias;
128 RTX_CODE code = GET_CODE (x);
129 int i;
130 const char *fmt;
132 switch (code)
134 case MEM:
135 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
137 case QUEUED:
138 return 1;
140 case CONST:
141 case CONST_INT:
142 case CONST_DOUBLE:
143 case CONST_VECTOR:
144 case SYMBOL_REF:
145 case LABEL_REF:
146 return 0;
148 case REG:
149 /* Note that we have to test for the actual rtx used for the frame
150 and arg pointers and not just the register number in case we have
151 eliminated the frame and/or arg pointer and are using it
152 for pseudos. */
153 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
154 /* The arg pointer varies if it is not a fixed register. */
155 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
156 return 0;
157 if (x == pic_offset_table_rtx
158 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
159 /* ??? When call-clobbered, the value is stable modulo the restore
160 that must happen after a call. This currently screws up
161 local-alloc into believing that the restore is not needed, so we
162 must return 0 only if we are called from alias analysis. */
163 && for_alias
164 #endif
166 return 0;
167 return 1;
169 case LO_SUM:
170 /* The operand 0 of a LO_SUM is considered constant
171 (in fact it is related specifically to operand 1)
172 during alias analysis. */
173 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
174 || rtx_varies_p (XEXP (x, 1), for_alias);
176 case ASM_OPERANDS:
177 if (MEM_VOLATILE_P (x))
178 return 1;
180 /* FALLTHROUGH */
182 default:
183 break;
186 fmt = GET_RTX_FORMAT (code);
187 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
188 if (fmt[i] == 'e')
190 if (rtx_varies_p (XEXP (x, i), for_alias))
191 return 1;
193 else if (fmt[i] == 'E')
195 int j;
196 for (j = 0; j < XVECLEN (x, i); j++)
197 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
198 return 1;
201 return 0;
204 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
207 rtx_addr_can_trap_p (x)
208 rtx x;
210 enum rtx_code code = GET_CODE (x);
212 switch (code)
214 case SYMBOL_REF:
215 return SYMBOL_REF_WEAK (x);
217 case LABEL_REF:
218 return 0;
220 case REG:
221 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
222 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
223 || x == stack_pointer_rtx
224 /* The arg pointer varies if it is not a fixed register. */
225 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
226 return 0;
227 /* All of the virtual frame registers are stack references. */
228 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
229 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
230 return 0;
231 return 1;
233 case CONST:
234 return rtx_addr_can_trap_p (XEXP (x, 0));
236 case PLUS:
237 /* An address is assumed not to trap if it is an address that can't
238 trap plus a constant integer or it is the pic register plus a
239 constant. */
240 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
241 && GET_CODE (XEXP (x, 1)) == CONST_INT)
242 || (XEXP (x, 0) == pic_offset_table_rtx
243 && CONSTANT_P (XEXP (x, 1))));
245 case LO_SUM:
246 case PRE_MODIFY:
247 return rtx_addr_can_trap_p (XEXP (x, 1));
249 case PRE_DEC:
250 case PRE_INC:
251 case POST_DEC:
252 case POST_INC:
253 case POST_MODIFY:
254 return rtx_addr_can_trap_p (XEXP (x, 0));
256 default:
257 break;
260 /* If it isn't one of the case above, it can cause a trap. */
261 return 1;
264 /* Return 1 if X refers to a memory location whose address
265 cannot be compared reliably with constant addresses,
266 or if X refers to a BLKmode memory object.
267 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
268 zero, we are slightly more conservative. */
271 rtx_addr_varies_p (x, for_alias)
272 rtx x;
273 int for_alias;
275 enum rtx_code code;
276 int i;
277 const char *fmt;
279 if (x == 0)
280 return 0;
282 code = GET_CODE (x);
283 if (code == MEM)
284 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
286 fmt = GET_RTX_FORMAT (code);
287 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
288 if (fmt[i] == 'e')
290 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
291 return 1;
293 else if (fmt[i] == 'E')
295 int j;
296 for (j = 0; j < XVECLEN (x, i); j++)
297 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
298 return 1;
300 return 0;
303 /* Return the value of the integer term in X, if one is apparent;
304 otherwise return 0.
305 Only obvious integer terms are detected.
306 This is used in cse.c with the `related_value' field. */
308 HOST_WIDE_INT
309 get_integer_term (x)
310 rtx x;
312 if (GET_CODE (x) == CONST)
313 x = XEXP (x, 0);
315 if (GET_CODE (x) == MINUS
316 && GET_CODE (XEXP (x, 1)) == CONST_INT)
317 return - INTVAL (XEXP (x, 1));
318 if (GET_CODE (x) == PLUS
319 && GET_CODE (XEXP (x, 1)) == CONST_INT)
320 return INTVAL (XEXP (x, 1));
321 return 0;
324 /* If X is a constant, return the value sans apparent integer term;
325 otherwise return 0.
326 Only obvious integer terms are detected. */
329 get_related_value (x)
330 rtx x;
332 if (GET_CODE (x) != CONST)
333 return 0;
334 x = XEXP (x, 0);
335 if (GET_CODE (x) == PLUS
336 && GET_CODE (XEXP (x, 1)) == CONST_INT)
337 return XEXP (x, 0);
338 else if (GET_CODE (x) == MINUS
339 && GET_CODE (XEXP (x, 1)) == CONST_INT)
340 return XEXP (x, 0);
341 return 0;
344 /* Given a tablejump insn INSN, return the RTL expression for the offset
345 into the jump table. If the offset cannot be determined, then return
346 NULL_RTX.
348 If EARLIEST is non-zero, it is a pointer to a place where the earliest
349 insn used in locating the offset was found. */
352 get_jump_table_offset (insn, earliest)
353 rtx insn;
354 rtx *earliest;
356 rtx label;
357 rtx table;
358 rtx set;
359 rtx old_insn;
360 rtx x;
361 rtx old_x;
362 rtx y;
363 rtx old_y;
364 int i;
366 if (GET_CODE (insn) != JUMP_INSN
367 || ! (label = JUMP_LABEL (insn))
368 || ! (table = NEXT_INSN (label))
369 || GET_CODE (table) != JUMP_INSN
370 || (GET_CODE (PATTERN (table)) != ADDR_VEC
371 && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC)
372 || ! (set = single_set (insn)))
373 return NULL_RTX;
375 x = SET_SRC (set);
377 /* Some targets (eg, ARM) emit a tablejump that also
378 contains the out-of-range target. */
379 if (GET_CODE (x) == IF_THEN_ELSE
380 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
381 x = XEXP (x, 1);
383 /* Search backwards and locate the expression stored in X. */
384 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
385 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
388 /* If X is an expression using a relative address then strip
389 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
390 or the jump table label. */
391 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
392 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
394 for (i = 0; i < 2; i++)
396 old_insn = insn;
397 y = XEXP (x, i);
399 if (y == pc_rtx || y == pic_offset_table_rtx)
400 break;
402 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
403 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
406 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
407 break;
410 if (i >= 2)
411 return NULL_RTX;
413 x = XEXP (x, 1 - i);
415 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
416 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
420 /* Strip off any sign or zero extension. */
421 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
423 x = XEXP (x, 0);
425 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
426 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
430 /* If X isn't a MEM then this isn't a tablejump we understand. */
431 if (GET_CODE (x) != MEM)
432 return NULL_RTX;
434 /* Strip off the MEM. */
435 x = XEXP (x, 0);
437 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
438 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
441 /* If X isn't a PLUS than this isn't a tablejump we understand. */
442 if (GET_CODE (x) != PLUS)
443 return NULL_RTX;
445 /* At this point we should have an expression representing the jump table
446 plus an offset. Examine each operand in order to determine which one
447 represents the jump table. Knowing that tells us that the other operand
448 must represent the offset. */
449 for (i = 0; i < 2; i++)
451 old_insn = insn;
452 y = XEXP (x, i);
454 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
455 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
458 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
459 && reg_mentioned_p (label, y))
460 break;
463 if (i >= 2)
464 return NULL_RTX;
466 x = XEXP (x, 1 - i);
468 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
469 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
470 for (i = 0; i < 2; i++)
471 if (XEXP (x, i) == pic_offset_table_rtx)
473 x = XEXP (x, 1 - i);
474 break;
477 if (earliest)
478 *earliest = insn;
480 /* Return the RTL expression representing the offset. */
481 return x;
484 /* Return the number of places FIND appears within X. If COUNT_DEST is
485 zero, we do not count occurrences inside the destination of a SET. */
488 count_occurrences (x, find, count_dest)
489 rtx x, find;
490 int count_dest;
492 int i, j;
493 enum rtx_code code;
494 const char *format_ptr;
495 int count;
497 if (x == find)
498 return 1;
500 code = GET_CODE (x);
502 switch (code)
504 case REG:
505 case CONST_INT:
506 case CONST_DOUBLE:
507 case CONST_VECTOR:
508 case SYMBOL_REF:
509 case CODE_LABEL:
510 case PC:
511 case CC0:
512 return 0;
514 case MEM:
515 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
516 return 1;
517 break;
519 case SET:
520 if (SET_DEST (x) == find && ! count_dest)
521 return count_occurrences (SET_SRC (x), find, count_dest);
522 break;
524 default:
525 break;
528 format_ptr = GET_RTX_FORMAT (code);
529 count = 0;
531 for (i = 0; i < GET_RTX_LENGTH (code); i++)
533 switch (*format_ptr++)
535 case 'e':
536 count += count_occurrences (XEXP (x, i), find, count_dest);
537 break;
539 case 'E':
540 for (j = 0; j < XVECLEN (x, i); j++)
541 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
542 break;
545 return count;
548 /* Nonzero if register REG appears somewhere within IN.
549 Also works if REG is not a register; in this case it checks
550 for a subexpression of IN that is Lisp "equal" to REG. */
553 reg_mentioned_p (reg, in)
554 rtx reg, in;
556 const char *fmt;
557 int i;
558 enum rtx_code code;
560 if (in == 0)
561 return 0;
563 if (reg == in)
564 return 1;
566 if (GET_CODE (in) == LABEL_REF)
567 return reg == XEXP (in, 0);
569 code = GET_CODE (in);
571 switch (code)
573 /* Compare registers by number. */
574 case REG:
575 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
577 /* These codes have no constituent expressions
578 and are unique. */
579 case SCRATCH:
580 case CC0:
581 case PC:
582 return 0;
584 case CONST_INT:
585 return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg);
587 case CONST_VECTOR:
588 case CONST_DOUBLE:
589 /* These are kept unique for a given value. */
590 return 0;
592 default:
593 break;
596 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
597 return 1;
599 fmt = GET_RTX_FORMAT (code);
601 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
603 if (fmt[i] == 'E')
605 int j;
606 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
607 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
608 return 1;
610 else if (fmt[i] == 'e'
611 && reg_mentioned_p (reg, XEXP (in, i)))
612 return 1;
614 return 0;
617 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
618 no CODE_LABEL insn. */
621 no_labels_between_p (beg, end)
622 rtx beg, end;
624 rtx p;
625 if (beg == end)
626 return 0;
627 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
628 if (GET_CODE (p) == CODE_LABEL)
629 return 0;
630 return 1;
633 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
634 no JUMP_INSN insn. */
637 no_jumps_between_p (beg, end)
638 rtx beg, end;
640 rtx p;
641 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
642 if (GET_CODE (p) == JUMP_INSN)
643 return 0;
644 return 1;
647 /* Nonzero if register REG is used in an insn between
648 FROM_INSN and TO_INSN (exclusive of those two). */
651 reg_used_between_p (reg, from_insn, to_insn)
652 rtx reg, from_insn, to_insn;
654 rtx insn;
656 if (from_insn == to_insn)
657 return 0;
659 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
660 if (INSN_P (insn)
661 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
662 || (GET_CODE (insn) == CALL_INSN
663 && (find_reg_fusage (insn, USE, reg)
664 || find_reg_fusage (insn, CLOBBER, reg)))))
665 return 1;
666 return 0;
669 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
670 is entirely replaced by a new value and the only use is as a SET_DEST,
671 we do not consider it a reference. */
674 reg_referenced_p (x, body)
675 rtx x;
676 rtx body;
678 int i;
680 switch (GET_CODE (body))
682 case SET:
683 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
684 return 1;
686 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
687 of a REG that occupies all of the REG, the insn references X if
688 it is mentioned in the destination. */
689 if (GET_CODE (SET_DEST (body)) != CC0
690 && GET_CODE (SET_DEST (body)) != PC
691 && GET_CODE (SET_DEST (body)) != REG
692 && ! (GET_CODE (SET_DEST (body)) == SUBREG
693 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
694 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
695 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
696 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
697 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
698 && reg_overlap_mentioned_p (x, SET_DEST (body)))
699 return 1;
700 return 0;
702 case ASM_OPERANDS:
703 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
704 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
705 return 1;
706 return 0;
708 case CALL:
709 case USE:
710 case IF_THEN_ELSE:
711 return reg_overlap_mentioned_p (x, body);
713 case TRAP_IF:
714 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
716 case PREFETCH:
717 return reg_overlap_mentioned_p (x, XEXP (body, 0));
719 case UNSPEC:
720 case UNSPEC_VOLATILE:
721 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
722 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
723 return 1;
724 return 0;
726 case PARALLEL:
727 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
728 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
729 return 1;
730 return 0;
732 case CLOBBER:
733 if (GET_CODE (XEXP (body, 0)) == MEM)
734 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
735 return 1;
736 return 0;
738 case COND_EXEC:
739 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
740 return 1;
741 return reg_referenced_p (x, COND_EXEC_CODE (body));
743 default:
744 return 0;
748 /* Nonzero if register REG is referenced in an insn between
749 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
750 not count. */
753 reg_referenced_between_p (reg, from_insn, to_insn)
754 rtx reg, from_insn, to_insn;
756 rtx insn;
758 if (from_insn == to_insn)
759 return 0;
761 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
762 if (INSN_P (insn)
763 && (reg_referenced_p (reg, PATTERN (insn))
764 || (GET_CODE (insn) == CALL_INSN
765 && find_reg_fusage (insn, USE, reg))))
766 return 1;
767 return 0;
770 /* Nonzero if register REG is set or clobbered in an insn between
771 FROM_INSN and TO_INSN (exclusive of those two). */
774 reg_set_between_p (reg, from_insn, to_insn)
775 rtx reg, from_insn, to_insn;
777 rtx insn;
779 if (from_insn == to_insn)
780 return 0;
782 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
783 if (INSN_P (insn) && reg_set_p (reg, insn))
784 return 1;
785 return 0;
788 /* Internals of reg_set_between_p. */
790 reg_set_p (reg, insn)
791 rtx reg, insn;
793 rtx body = insn;
795 /* We can be passed an insn or part of one. If we are passed an insn,
796 check if a side-effect of the insn clobbers REG. */
797 if (INSN_P (insn))
799 if (FIND_REG_INC_NOTE (insn, reg)
800 || (GET_CODE (insn) == CALL_INSN
801 /* We'd like to test call_used_regs here, but rtlanal.c can't
802 reference that variable due to its use in genattrtab. So
803 we'll just be more conservative.
805 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
806 information holds all clobbered registers. */
807 && ((GET_CODE (reg) == REG
808 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
809 || GET_CODE (reg) == MEM
810 || find_reg_fusage (insn, CLOBBER, reg))))
811 return 1;
813 body = PATTERN (insn);
816 return set_of (reg, insn) != NULL_RTX;
819 /* Similar to reg_set_between_p, but check all registers in X. Return 0
820 only if none of them are modified between START and END. Do not
821 consider non-registers one way or the other. */
824 regs_set_between_p (x, start, end)
825 rtx x;
826 rtx start, end;
828 enum rtx_code code = GET_CODE (x);
829 const char *fmt;
830 int i, j;
832 switch (code)
834 case CONST_INT:
835 case CONST_DOUBLE:
836 case CONST_VECTOR:
837 case CONST:
838 case SYMBOL_REF:
839 case LABEL_REF:
840 case PC:
841 case CC0:
842 return 0;
844 case REG:
845 return reg_set_between_p (x, start, end);
847 default:
848 break;
851 fmt = GET_RTX_FORMAT (code);
852 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
854 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
855 return 1;
857 else if (fmt[i] == 'E')
858 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
859 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
860 return 1;
863 return 0;
866 /* Similar to reg_set_between_p, but check all registers in X. Return 0
867 only if none of them are modified between START and END. Return 1 if
868 X contains a MEM; this routine does not perform any memory aliasing. */
871 modified_between_p (x, start, end)
872 rtx x;
873 rtx start, end;
875 enum rtx_code code = GET_CODE (x);
876 const char *fmt;
877 int i, j;
879 switch (code)
881 case CONST_INT:
882 case CONST_DOUBLE:
883 case CONST_VECTOR:
884 case CONST:
885 case SYMBOL_REF:
886 case LABEL_REF:
887 return 0;
889 case PC:
890 case CC0:
891 return 1;
893 case MEM:
894 /* If the memory is not constant, assume it is modified. If it is
895 constant, we still have to check the address. */
896 if (! RTX_UNCHANGING_P (x))
897 return 1;
898 break;
900 case REG:
901 return reg_set_between_p (x, start, end);
903 default:
904 break;
907 fmt = GET_RTX_FORMAT (code);
908 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
910 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
911 return 1;
913 else if (fmt[i] == 'E')
914 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
915 if (modified_between_p (XVECEXP (x, i, j), start, end))
916 return 1;
919 return 0;
922 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
923 of them are modified in INSN. Return 1 if X contains a MEM; this routine
924 does not perform any memory aliasing. */
927 modified_in_p (x, insn)
928 rtx x;
929 rtx insn;
931 enum rtx_code code = GET_CODE (x);
932 const char *fmt;
933 int i, j;
935 switch (code)
937 case CONST_INT:
938 case CONST_DOUBLE:
939 case CONST_VECTOR:
940 case CONST:
941 case SYMBOL_REF:
942 case LABEL_REF:
943 return 0;
945 case PC:
946 case CC0:
947 return 1;
949 case MEM:
950 /* If the memory is not constant, assume it is modified. If it is
951 constant, we still have to check the address. */
952 if (! RTX_UNCHANGING_P (x))
953 return 1;
954 break;
956 case REG:
957 return reg_set_p (x, insn);
959 default:
960 break;
963 fmt = GET_RTX_FORMAT (code);
964 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
966 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
967 return 1;
969 else if (fmt[i] == 'E')
970 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
971 if (modified_in_p (XVECEXP (x, i, j), insn))
972 return 1;
975 return 0;
978 /* Return true if anything in insn X is (anti,output,true) dependent on
979 anything in insn Y. */
982 insn_dependent_p (x, y)
983 rtx x, y;
985 rtx tmp;
987 if (! INSN_P (x) || ! INSN_P (y))
988 abort ();
990 tmp = PATTERN (y);
991 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
992 if (tmp == NULL_RTX)
993 return 1;
995 tmp = PATTERN (x);
996 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
997 if (tmp == NULL_RTX)
998 return 1;
1000 return 0;
1003 /* A helper routine for insn_dependent_p called through note_stores. */
1005 static void
1006 insn_dependent_p_1 (x, pat, data)
1007 rtx x;
1008 rtx pat ATTRIBUTE_UNUSED;
1009 void *data;
1011 rtx * pinsn = (rtx *) data;
1013 if (*pinsn && reg_mentioned_p (x, *pinsn))
1014 *pinsn = NULL_RTX;
1017 /* Helper function for set_of. */
1018 struct set_of_data
1020 rtx found;
1021 rtx pat;
1024 static void
1025 set_of_1 (x, pat, data1)
1026 rtx x;
1027 rtx pat;
1028 void *data1;
1030 struct set_of_data *data = (struct set_of_data *) (data1);
1031 if (rtx_equal_p (x, data->pat)
1032 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1033 data->found = pat;
1036 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1037 (either directly or via STRICT_LOW_PART and similar modifiers). */
1039 set_of (pat, insn)
1040 rtx pat, insn;
1042 struct set_of_data data;
1043 data.found = NULL_RTX;
1044 data.pat = pat;
1045 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1046 return data.found;
1049 /* Given an INSN, return a SET expression if this insn has only a single SET.
1050 It may also have CLOBBERs, USEs, or SET whose output
1051 will not be used, which we ignore. */
1054 single_set_2 (insn, pat)
1055 rtx insn, pat;
1057 rtx set = NULL;
1058 int set_verified = 1;
1059 int i;
1061 if (GET_CODE (pat) == PARALLEL)
1063 for (i = 0; i < XVECLEN (pat, 0); i++)
1065 rtx sub = XVECEXP (pat, 0, i);
1066 switch (GET_CODE (sub))
1068 case USE:
1069 case CLOBBER:
1070 break;
1072 case SET:
1073 /* We can consider insns having multiple sets, where all
1074 but one are dead as single set insns. In common case
1075 only single set is present in the pattern so we want
1076 to avoid checking for REG_UNUSED notes unless necessary.
1078 When we reach set first time, we just expect this is
1079 the single set we are looking for and only when more
1080 sets are found in the insn, we check them. */
1081 if (!set_verified)
1083 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1084 && !side_effects_p (set))
1085 set = NULL;
1086 else
1087 set_verified = 1;
1089 if (!set)
1090 set = sub, set_verified = 0;
1091 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1092 || side_effects_p (sub))
1093 return NULL_RTX;
1094 break;
1096 default:
1097 return NULL_RTX;
1101 return set;
1104 /* Given an INSN, return nonzero if it has more than one SET, else return
1105 zero. */
1108 multiple_sets (insn)
1109 rtx insn;
1111 int found;
1112 int i;
1114 /* INSN must be an insn. */
1115 if (! INSN_P (insn))
1116 return 0;
1118 /* Only a PARALLEL can have multiple SETs. */
1119 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1121 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1122 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1124 /* If we have already found a SET, then return now. */
1125 if (found)
1126 return 1;
1127 else
1128 found = 1;
1132 /* Either zero or one SET. */
1133 return 0;
1136 /* Return nonzero if the destination of SET equals the source
1137 and there are no side effects. */
1140 set_noop_p (set)
1141 rtx set;
1143 rtx src = SET_SRC (set);
1144 rtx dst = SET_DEST (set);
1146 if (side_effects_p (src) || side_effects_p (dst))
1147 return 0;
1149 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1150 return rtx_equal_p (dst, src);
1152 if (dst == pc_rtx && src == pc_rtx)
1153 return 1;
1155 if (GET_CODE (dst) == SIGN_EXTRACT
1156 || GET_CODE (dst) == ZERO_EXTRACT)
1157 return rtx_equal_p (XEXP (dst, 0), src)
1158 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx;
1160 if (GET_CODE (dst) == STRICT_LOW_PART)
1161 dst = XEXP (dst, 0);
1163 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1165 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1166 return 0;
1167 src = SUBREG_REG (src);
1168 dst = SUBREG_REG (dst);
1171 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1172 && REGNO (src) == REGNO (dst));
1175 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1176 value to itself. */
1179 noop_move_p (insn)
1180 rtx insn;
1182 rtx pat = PATTERN (insn);
1184 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1185 return 1;
1187 /* Insns carrying these notes are useful later on. */
1188 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1189 return 0;
1191 /* For now treat an insn with a REG_RETVAL note as a
1192 a special insn which should not be considered a no-op. */
1193 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1194 return 0;
1196 if (GET_CODE (pat) == SET && set_noop_p (pat))
1197 return 1;
1199 if (GET_CODE (pat) == PARALLEL)
1201 int i;
1202 /* If nothing but SETs of registers to themselves,
1203 this insn can also be deleted. */
1204 for (i = 0; i < XVECLEN (pat, 0); i++)
1206 rtx tem = XVECEXP (pat, 0, i);
1208 if (GET_CODE (tem) == USE
1209 || GET_CODE (tem) == CLOBBER)
1210 continue;
1212 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1213 return 0;
1216 return 1;
1218 return 0;
1222 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1223 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1224 If the object was modified, if we hit a partial assignment to X, or hit a
1225 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1226 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1227 be the src. */
1230 find_last_value (x, pinsn, valid_to, allow_hwreg)
1231 rtx x;
1232 rtx *pinsn;
1233 rtx valid_to;
1234 int allow_hwreg;
1236 rtx p;
1238 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1239 p = PREV_INSN (p))
1240 if (INSN_P (p))
1242 rtx set = single_set (p);
1243 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1245 if (set && rtx_equal_p (x, SET_DEST (set)))
1247 rtx src = SET_SRC (set);
1249 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1250 src = XEXP (note, 0);
1252 if ((valid_to == NULL_RTX
1253 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1254 /* Reject hard registers because we don't usually want
1255 to use them; we'd rather use a pseudo. */
1256 && (! (GET_CODE (src) == REG
1257 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1259 *pinsn = p;
1260 return src;
1264 /* If set in non-simple way, we don't have a value. */
1265 if (reg_set_p (x, p))
1266 break;
1269 return x;
1272 /* Return nonzero if register in range [REGNO, ENDREGNO)
1273 appears either explicitly or implicitly in X
1274 other than being stored into.
1276 References contained within the substructure at LOC do not count.
1277 LOC may be zero, meaning don't ignore anything. */
1280 refers_to_regno_p (regno, endregno, x, loc)
1281 unsigned int regno, endregno;
1282 rtx x;
1283 rtx *loc;
1285 int i;
1286 unsigned int x_regno;
1287 RTX_CODE code;
1288 const char *fmt;
1290 repeat:
1291 /* The contents of a REG_NONNEG note is always zero, so we must come here
1292 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1293 if (x == 0)
1294 return 0;
1296 code = GET_CODE (x);
1298 switch (code)
1300 case REG:
1301 x_regno = REGNO (x);
1303 /* If we modifying the stack, frame, or argument pointer, it will
1304 clobber a virtual register. In fact, we could be more precise,
1305 but it isn't worth it. */
1306 if ((x_regno == STACK_POINTER_REGNUM
1307 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1308 || x_regno == ARG_POINTER_REGNUM
1309 #endif
1310 || x_regno == FRAME_POINTER_REGNUM)
1311 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1312 return 1;
1314 return (endregno > x_regno
1315 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1316 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1317 : 1));
1319 case SUBREG:
1320 /* If this is a SUBREG of a hard reg, we can see exactly which
1321 registers are being modified. Otherwise, handle normally. */
1322 if (GET_CODE (SUBREG_REG (x)) == REG
1323 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1325 unsigned int inner_regno = subreg_regno (x);
1326 unsigned int inner_endregno
1327 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1328 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1330 return endregno > inner_regno && regno < inner_endregno;
1332 break;
1334 case CLOBBER:
1335 case SET:
1336 if (&SET_DEST (x) != loc
1337 /* Note setting a SUBREG counts as referring to the REG it is in for
1338 a pseudo but not for hard registers since we can
1339 treat each word individually. */
1340 && ((GET_CODE (SET_DEST (x)) == SUBREG
1341 && loc != &SUBREG_REG (SET_DEST (x))
1342 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1343 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1344 && refers_to_regno_p (regno, endregno,
1345 SUBREG_REG (SET_DEST (x)), loc))
1346 || (GET_CODE (SET_DEST (x)) != REG
1347 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1348 return 1;
1350 if (code == CLOBBER || loc == &SET_SRC (x))
1351 return 0;
1352 x = SET_SRC (x);
1353 goto repeat;
1355 default:
1356 break;
1359 /* X does not match, so try its subexpressions. */
1361 fmt = GET_RTX_FORMAT (code);
1362 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1364 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1366 if (i == 0)
1368 x = XEXP (x, 0);
1369 goto repeat;
1371 else
1372 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1373 return 1;
1375 else if (fmt[i] == 'E')
1377 int j;
1378 for (j = XVECLEN (x, i) - 1; j >=0; j--)
1379 if (loc != &XVECEXP (x, i, j)
1380 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1381 return 1;
1384 return 0;
1387 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1388 we check if any register number in X conflicts with the relevant register
1389 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1390 contains a MEM (we don't bother checking for memory addresses that can't
1391 conflict because we expect this to be a rare case. */
1394 reg_overlap_mentioned_p (x, in)
1395 rtx x, in;
1397 unsigned int regno, endregno;
1399 /* Overly conservative. */
1400 if (GET_CODE (x) == STRICT_LOW_PART)
1401 x = XEXP (x, 0);
1403 /* If either argument is a constant, then modifying X can not affect IN. */
1404 if (CONSTANT_P (x) || CONSTANT_P (in))
1405 return 0;
1407 switch (GET_CODE (x))
1409 case SUBREG:
1410 regno = REGNO (SUBREG_REG (x));
1411 if (regno < FIRST_PSEUDO_REGISTER)
1412 regno = subreg_regno (x);
1413 goto do_reg;
1415 case REG:
1416 regno = REGNO (x);
1417 do_reg:
1418 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1419 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1420 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1422 case MEM:
1424 const char *fmt;
1425 int i;
1427 if (GET_CODE (in) == MEM)
1428 return 1;
1430 fmt = GET_RTX_FORMAT (GET_CODE (in));
1431 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1432 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1433 return 1;
1435 return 0;
1438 case SCRATCH:
1439 case PC:
1440 case CC0:
1441 return reg_mentioned_p (x, in);
1443 case PARALLEL:
1445 int i;
1447 /* If any register in here refers to it we return true. */
1448 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1449 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1450 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1451 return 1;
1452 return 0;
1455 default:
1456 break;
1459 abort ();
1462 /* Return the last value to which REG was set prior to INSN. If we can't
1463 find it easily, return 0.
1465 We only return a REG, SUBREG, or constant because it is too hard to
1466 check if a MEM remains unchanged. */
1469 reg_set_last (x, insn)
1470 rtx x;
1471 rtx insn;
1473 rtx orig_insn = insn;
1475 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1476 Stop when we reach a label or X is a hard reg and we reach a
1477 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1479 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1481 /* We compare with <= here, because reg_set_last_last_regno
1482 is actually the number of the first reg *not* in X. */
1483 for (;
1484 insn && GET_CODE (insn) != CODE_LABEL
1485 && ! (GET_CODE (insn) == CALL_INSN
1486 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1487 insn = PREV_INSN (insn))
1488 if (INSN_P (insn))
1490 rtx set = set_of (x, insn);
1491 /* OK, this function modify our register. See if we understand it. */
1492 if (set)
1494 rtx last_value;
1495 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1496 return 0;
1497 last_value = SET_SRC (x);
1498 if (CONSTANT_P (last_value)
1499 || ((GET_CODE (last_value) == REG
1500 || GET_CODE (last_value) == SUBREG)
1501 && ! reg_set_between_p (last_value,
1502 insn, orig_insn)))
1503 return last_value;
1504 else
1505 return 0;
1509 return 0;
1512 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1513 (X would be the pattern of an insn).
1514 FUN receives two arguments:
1515 the REG, MEM, CC0 or PC being stored in or clobbered,
1516 the SET or CLOBBER rtx that does the store.
1518 If the item being stored in or clobbered is a SUBREG of a hard register,
1519 the SUBREG will be passed. */
1521 void
1522 note_stores (x, fun, data)
1523 rtx x;
1524 void (*fun) PARAMS ((rtx, rtx, void *));
1525 void *data;
1527 int i;
1529 if (GET_CODE (x) == COND_EXEC)
1530 x = COND_EXEC_CODE (x);
1532 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1534 rtx dest = SET_DEST (x);
1536 while ((GET_CODE (dest) == SUBREG
1537 && (GET_CODE (SUBREG_REG (dest)) != REG
1538 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1539 || GET_CODE (dest) == ZERO_EXTRACT
1540 || GET_CODE (dest) == SIGN_EXTRACT
1541 || GET_CODE (dest) == STRICT_LOW_PART)
1542 dest = XEXP (dest, 0);
1544 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1545 each of whose first operand is a register. We can't know what
1546 precisely is being set in these cases, so make up a CLOBBER to pass
1547 to the function. */
1548 if (GET_CODE (dest) == PARALLEL)
1550 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1551 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1552 (*fun) (XEXP (XVECEXP (dest, 0, i), 0),
1553 gen_rtx_CLOBBER (VOIDmode,
1554 XEXP (XVECEXP (dest, 0, i), 0)),
1555 data);
1557 else
1558 (*fun) (dest, x, data);
1561 else if (GET_CODE (x) == PARALLEL)
1562 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1563 note_stores (XVECEXP (x, 0, i), fun, data);
1566 /* Like notes_stores, but call FUN for each expression that is being
1567 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1568 FUN for each expression, not any interior subexpressions. FUN receives a
1569 pointer to the expression and the DATA passed to this function.
1571 Note that this is not quite the same test as that done in reg_referenced_p
1572 since that considers something as being referenced if it is being
1573 partially set, while we do not. */
1575 void
1576 note_uses (pbody, fun, data)
1577 rtx *pbody;
1578 void (*fun) PARAMS ((rtx *, void *));
1579 void *data;
1581 rtx body = *pbody;
1582 int i;
1584 switch (GET_CODE (body))
1586 case COND_EXEC:
1587 (*fun) (&COND_EXEC_TEST (body), data);
1588 note_uses (&COND_EXEC_CODE (body), fun, data);
1589 return;
1591 case PARALLEL:
1592 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1593 note_uses (&XVECEXP (body, 0, i), fun, data);
1594 return;
1596 case USE:
1597 (*fun) (&XEXP (body, 0), data);
1598 return;
1600 case ASM_OPERANDS:
1601 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1602 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1603 return;
1605 case TRAP_IF:
1606 (*fun) (&TRAP_CONDITION (body), data);
1607 return;
1609 case PREFETCH:
1610 (*fun) (&XEXP (body, 0), data);
1611 return;
1613 case UNSPEC:
1614 case UNSPEC_VOLATILE:
1615 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1616 (*fun) (&XVECEXP (body, 0, i), data);
1617 return;
1619 case CLOBBER:
1620 if (GET_CODE (XEXP (body, 0)) == MEM)
1621 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1622 return;
1624 case SET:
1626 rtx dest = SET_DEST (body);
1628 /* For sets we replace everything in source plus registers in memory
1629 expression in store and operands of a ZERO_EXTRACT. */
1630 (*fun) (&SET_SRC (body), data);
1632 if (GET_CODE (dest) == ZERO_EXTRACT)
1634 (*fun) (&XEXP (dest, 1), data);
1635 (*fun) (&XEXP (dest, 2), data);
1638 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1639 dest = XEXP (dest, 0);
1641 if (GET_CODE (dest) == MEM)
1642 (*fun) (&XEXP (dest, 0), data);
1644 return;
1646 default:
1647 /* All the other possibilities never store. */
1648 (*fun) (pbody, data);
1649 return;
1653 /* Return nonzero if X's old contents don't survive after INSN.
1654 This will be true if X is (cc0) or if X is a register and
1655 X dies in INSN or because INSN entirely sets X.
1657 "Entirely set" means set directly and not through a SUBREG,
1658 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1659 Likewise, REG_INC does not count.
1661 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1662 but for this use that makes no difference, since regs don't overlap
1663 during their lifetimes. Therefore, this function may be used
1664 at any time after deaths have been computed (in flow.c).
1666 If REG is a hard reg that occupies multiple machine registers, this
1667 function will only return 1 if each of those registers will be replaced
1668 by INSN. */
1671 dead_or_set_p (insn, x)
1672 rtx insn;
1673 rtx x;
1675 unsigned int regno, last_regno;
1676 unsigned int i;
1678 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1679 if (GET_CODE (x) == CC0)
1680 return 1;
1682 if (GET_CODE (x) != REG)
1683 abort ();
1685 regno = REGNO (x);
1686 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1687 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1689 for (i = regno; i <= last_regno; i++)
1690 if (! dead_or_set_regno_p (insn, i))
1691 return 0;
1693 return 1;
1696 /* Utility function for dead_or_set_p to check an individual register. Also
1697 called from flow.c. */
1700 dead_or_set_regno_p (insn, test_regno)
1701 rtx insn;
1702 unsigned int test_regno;
1704 unsigned int regno, endregno;
1705 rtx pattern;
1707 /* See if there is a death note for something that includes TEST_REGNO. */
1708 if (find_regno_note (insn, REG_DEAD, test_regno))
1709 return 1;
1711 if (GET_CODE (insn) == CALL_INSN
1712 && find_regno_fusage (insn, CLOBBER, test_regno))
1713 return 1;
1715 pattern = PATTERN (insn);
1717 if (GET_CODE (pattern) == COND_EXEC)
1718 pattern = COND_EXEC_CODE (pattern);
1720 if (GET_CODE (pattern) == SET)
1722 rtx dest = SET_DEST (PATTERN (insn));
1724 /* A value is totally replaced if it is the destination or the
1725 destination is a SUBREG of REGNO that does not change the number of
1726 words in it. */
1727 if (GET_CODE (dest) == SUBREG
1728 && (((GET_MODE_SIZE (GET_MODE (dest))
1729 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1730 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1731 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1732 dest = SUBREG_REG (dest);
1734 if (GET_CODE (dest) != REG)
1735 return 0;
1737 regno = REGNO (dest);
1738 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1739 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1741 return (test_regno >= regno && test_regno < endregno);
1743 else if (GET_CODE (pattern) == PARALLEL)
1745 int i;
1747 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1749 rtx body = XVECEXP (pattern, 0, i);
1751 if (GET_CODE (body) == COND_EXEC)
1752 body = COND_EXEC_CODE (body);
1754 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1756 rtx dest = SET_DEST (body);
1758 if (GET_CODE (dest) == SUBREG
1759 && (((GET_MODE_SIZE (GET_MODE (dest))
1760 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1761 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1762 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1763 dest = SUBREG_REG (dest);
1765 if (GET_CODE (dest) != REG)
1766 continue;
1768 regno = REGNO (dest);
1769 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1770 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1772 if (test_regno >= regno && test_regno < endregno)
1773 return 1;
1778 return 0;
1781 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1782 If DATUM is nonzero, look for one whose datum is DATUM. */
1785 find_reg_note (insn, kind, datum)
1786 rtx insn;
1787 enum reg_note kind;
1788 rtx datum;
1790 rtx link;
1792 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1793 if (! INSN_P (insn))
1794 return 0;
1796 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1797 if (REG_NOTE_KIND (link) == kind
1798 && (datum == 0 || datum == XEXP (link, 0)))
1799 return link;
1800 return 0;
1803 /* Return the reg-note of kind KIND in insn INSN which applies to register
1804 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1805 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1806 it might be the case that the note overlaps REGNO. */
1809 find_regno_note (insn, kind, regno)
1810 rtx insn;
1811 enum reg_note kind;
1812 unsigned int regno;
1814 rtx link;
1816 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1817 if (! INSN_P (insn))
1818 return 0;
1820 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1821 if (REG_NOTE_KIND (link) == kind
1822 /* Verify that it is a register, so that scratch and MEM won't cause a
1823 problem here. */
1824 && GET_CODE (XEXP (link, 0)) == REG
1825 && REGNO (XEXP (link, 0)) <= regno
1826 && ((REGNO (XEXP (link, 0))
1827 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1828 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1829 GET_MODE (XEXP (link, 0)))))
1830 > regno))
1831 return link;
1832 return 0;
1835 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1836 has such a note. */
1839 find_reg_equal_equiv_note (insn)
1840 rtx insn;
1842 rtx note;
1844 if (single_set (insn) == 0)
1845 return 0;
1846 else if ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1847 return note;
1848 else
1849 return find_reg_note (insn, REG_EQUAL, NULL_RTX);
1852 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1853 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1856 find_reg_fusage (insn, code, datum)
1857 rtx insn;
1858 enum rtx_code code;
1859 rtx datum;
1861 /* If it's not a CALL_INSN, it can't possibly have a
1862 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1863 if (GET_CODE (insn) != CALL_INSN)
1864 return 0;
1866 if (! datum)
1867 abort ();
1869 if (GET_CODE (datum) != REG)
1871 rtx link;
1873 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1874 link;
1875 link = XEXP (link, 1))
1876 if (GET_CODE (XEXP (link, 0)) == code
1877 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1878 return 1;
1880 else
1882 unsigned int regno = REGNO (datum);
1884 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1885 to pseudo registers, so don't bother checking. */
1887 if (regno < FIRST_PSEUDO_REGISTER)
1889 unsigned int end_regno
1890 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
1891 unsigned int i;
1893 for (i = regno; i < end_regno; i++)
1894 if (find_regno_fusage (insn, code, i))
1895 return 1;
1899 return 0;
1902 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1903 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1906 find_regno_fusage (insn, code, regno)
1907 rtx insn;
1908 enum rtx_code code;
1909 unsigned int regno;
1911 rtx link;
1913 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1914 to pseudo registers, so don't bother checking. */
1916 if (regno >= FIRST_PSEUDO_REGISTER
1917 || GET_CODE (insn) != CALL_INSN )
1918 return 0;
1920 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1922 unsigned int regnote;
1923 rtx op, reg;
1925 if (GET_CODE (op = XEXP (link, 0)) == code
1926 && GET_CODE (reg = XEXP (op, 0)) == REG
1927 && (regnote = REGNO (reg)) <= regno
1928 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
1929 return 1;
1932 return 0;
1935 /* Remove register note NOTE from the REG_NOTES of INSN. */
1937 void
1938 remove_note (insn, note)
1939 rtx insn;
1940 rtx note;
1942 rtx link;
1944 if (note == NULL_RTX)
1945 return;
1947 if (REG_NOTES (insn) == note)
1949 REG_NOTES (insn) = XEXP (note, 1);
1950 return;
1953 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1954 if (XEXP (link, 1) == note)
1956 XEXP (link, 1) = XEXP (note, 1);
1957 return;
1960 abort ();
1963 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1964 return 1 if it is found. A simple equality test is used to determine if
1965 NODE matches. */
1968 in_expr_list_p (listp, node)
1969 rtx listp;
1970 rtx node;
1972 rtx x;
1974 for (x = listp; x; x = XEXP (x, 1))
1975 if (node == XEXP (x, 0))
1976 return 1;
1978 return 0;
1981 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
1982 remove that entry from the list if it is found.
1984 A simple equality test is used to determine if NODE matches. */
1986 void
1987 remove_node_from_expr_list (node, listp)
1988 rtx node;
1989 rtx *listp;
1991 rtx temp = *listp;
1992 rtx prev = NULL_RTX;
1994 while (temp)
1996 if (node == XEXP (temp, 0))
1998 /* Splice the node out of the list. */
1999 if (prev)
2000 XEXP (prev, 1) = XEXP (temp, 1);
2001 else
2002 *listp = XEXP (temp, 1);
2004 return;
2007 prev = temp;
2008 temp = XEXP (temp, 1);
2012 /* Nonzero if X contains any volatile instructions. These are instructions
2013 which may cause unpredictable machine state instructions, and thus no
2014 instructions should be moved or combined across them. This includes
2015 only volatile asms and UNSPEC_VOLATILE instructions. */
2018 volatile_insn_p (x)
2019 rtx x;
2021 RTX_CODE code;
2023 code = GET_CODE (x);
2024 switch (code)
2026 case LABEL_REF:
2027 case SYMBOL_REF:
2028 case CONST_INT:
2029 case CONST:
2030 case CONST_DOUBLE:
2031 case CONST_VECTOR:
2032 case CC0:
2033 case PC:
2034 case REG:
2035 case SCRATCH:
2036 case CLOBBER:
2037 case ASM_INPUT:
2038 case ADDR_VEC:
2039 case ADDR_DIFF_VEC:
2040 case CALL:
2041 case MEM:
2042 return 0;
2044 case UNSPEC_VOLATILE:
2045 /* case TRAP_IF: This isn't clear yet. */
2046 return 1;
2048 case ASM_OPERANDS:
2049 if (MEM_VOLATILE_P (x))
2050 return 1;
2052 default:
2053 break;
2056 /* Recursively scan the operands of this expression. */
2059 const char *fmt = GET_RTX_FORMAT (code);
2060 int i;
2062 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2064 if (fmt[i] == 'e')
2066 if (volatile_insn_p (XEXP (x, i)))
2067 return 1;
2069 else if (fmt[i] == 'E')
2071 int j;
2072 for (j = 0; j < XVECLEN (x, i); j++)
2073 if (volatile_insn_p (XVECEXP (x, i, j)))
2074 return 1;
2078 return 0;
2081 /* Nonzero if X contains any volatile memory references
2082 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2085 volatile_refs_p (x)
2086 rtx x;
2088 RTX_CODE code;
2090 code = GET_CODE (x);
2091 switch (code)
2093 case LABEL_REF:
2094 case SYMBOL_REF:
2095 case CONST_INT:
2096 case CONST:
2097 case CONST_DOUBLE:
2098 case CONST_VECTOR:
2099 case CC0:
2100 case PC:
2101 case REG:
2102 case SCRATCH:
2103 case CLOBBER:
2104 case ASM_INPUT:
2105 case ADDR_VEC:
2106 case ADDR_DIFF_VEC:
2107 return 0;
2109 case CALL:
2110 case UNSPEC_VOLATILE:
2111 /* case TRAP_IF: This isn't clear yet. */
2112 return 1;
2114 case MEM:
2115 case ASM_OPERANDS:
2116 if (MEM_VOLATILE_P (x))
2117 return 1;
2119 default:
2120 break;
2123 /* Recursively scan the operands of this expression. */
2126 const char *fmt = GET_RTX_FORMAT (code);
2127 int i;
2129 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2131 if (fmt[i] == 'e')
2133 if (volatile_refs_p (XEXP (x, i)))
2134 return 1;
2136 else if (fmt[i] == 'E')
2138 int j;
2139 for (j = 0; j < XVECLEN (x, i); j++)
2140 if (volatile_refs_p (XVECEXP (x, i, j)))
2141 return 1;
2145 return 0;
2148 /* Similar to above, except that it also rejects register pre- and post-
2149 incrementing. */
2152 side_effects_p (x)
2153 rtx x;
2155 RTX_CODE code;
2157 code = GET_CODE (x);
2158 switch (code)
2160 case LABEL_REF:
2161 case SYMBOL_REF:
2162 case CONST_INT:
2163 case CONST:
2164 case CONST_DOUBLE:
2165 case CONST_VECTOR:
2166 case CC0:
2167 case PC:
2168 case REG:
2169 case SCRATCH:
2170 case ASM_INPUT:
2171 case ADDR_VEC:
2172 case ADDR_DIFF_VEC:
2173 return 0;
2175 case CLOBBER:
2176 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2177 when some combination can't be done. If we see one, don't think
2178 that we can simplify the expression. */
2179 return (GET_MODE (x) != VOIDmode);
2181 case PRE_INC:
2182 case PRE_DEC:
2183 case POST_INC:
2184 case POST_DEC:
2185 case PRE_MODIFY:
2186 case POST_MODIFY:
2187 case CALL:
2188 case UNSPEC_VOLATILE:
2189 /* case TRAP_IF: This isn't clear yet. */
2190 return 1;
2192 case MEM:
2193 case ASM_OPERANDS:
2194 if (MEM_VOLATILE_P (x))
2195 return 1;
2197 default:
2198 break;
2201 /* Recursively scan the operands of this expression. */
2204 const char *fmt = GET_RTX_FORMAT (code);
2205 int i;
2207 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2209 if (fmt[i] == 'e')
2211 if (side_effects_p (XEXP (x, i)))
2212 return 1;
2214 else if (fmt[i] == 'E')
2216 int j;
2217 for (j = 0; j < XVECLEN (x, i); j++)
2218 if (side_effects_p (XVECEXP (x, i, j)))
2219 return 1;
2223 return 0;
2226 /* Return nonzero if evaluating rtx X might cause a trap. */
2229 may_trap_p (x)
2230 rtx x;
2232 int i;
2233 enum rtx_code code;
2234 const char *fmt;
2236 if (x == 0)
2237 return 0;
2238 code = GET_CODE (x);
2239 switch (code)
2241 /* Handle these cases quickly. */
2242 case CONST_INT:
2243 case CONST_DOUBLE:
2244 case CONST_VECTOR:
2245 case SYMBOL_REF:
2246 case LABEL_REF:
2247 case CONST:
2248 case PC:
2249 case CC0:
2250 case REG:
2251 case SCRATCH:
2252 return 0;
2254 case ASM_INPUT:
2255 case UNSPEC_VOLATILE:
2256 case TRAP_IF:
2257 return 1;
2259 case ASM_OPERANDS:
2260 return MEM_VOLATILE_P (x);
2262 /* Memory ref can trap unless it's a static var or a stack slot. */
2263 case MEM:
2264 return rtx_addr_can_trap_p (XEXP (x, 0));
2266 /* Division by a non-constant might trap. */
2267 case DIV:
2268 case MOD:
2269 case UDIV:
2270 case UMOD:
2271 if (! CONSTANT_P (XEXP (x, 1))
2272 || GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2273 return 1;
2274 /* This was const0_rtx, but by not using that,
2275 we can link this file into other programs. */
2276 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2277 return 1;
2278 break;
2280 case EXPR_LIST:
2281 /* An EXPR_LIST is used to represent a function call. This
2282 certainly may trap. */
2283 return 1;
2285 case GE:
2286 case GT:
2287 case LE:
2288 case LT:
2289 case COMPARE:
2290 /* Some floating point comparisons may trap. */
2291 /* ??? There is no machine independent way to check for tests that trap
2292 when COMPARE is used, though many targets do make this distinction.
2293 For instance, sparc uses CCFPE for compares which generate exceptions
2294 and CCFP for compares which do not generate exceptions. */
2295 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2296 return 1;
2297 /* But often the compare has some CC mode, so check operand
2298 modes as well. */
2299 if (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_FLOAT
2300 || GET_MODE_CLASS (GET_MODE (XEXP (x, 1))) == MODE_FLOAT)
2301 return 1;
2302 break;
2304 case NEG:
2305 case ABS:
2306 /* These operations don't trap even with floating point. */
2307 break;
2309 default:
2310 /* Any floating arithmetic may trap. */
2311 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2312 return 1;
2315 fmt = GET_RTX_FORMAT (code);
2316 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2318 if (fmt[i] == 'e')
2320 if (may_trap_p (XEXP (x, i)))
2321 return 1;
2323 else if (fmt[i] == 'E')
2325 int j;
2326 for (j = 0; j < XVECLEN (x, i); j++)
2327 if (may_trap_p (XVECEXP (x, i, j)))
2328 return 1;
2331 return 0;
2334 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2335 i.e., an inequality. */
2338 inequality_comparisons_p (x)
2339 rtx x;
2341 const char *fmt;
2342 int len, i;
2343 enum rtx_code code = GET_CODE (x);
2345 switch (code)
2347 case REG:
2348 case SCRATCH:
2349 case PC:
2350 case CC0:
2351 case CONST_INT:
2352 case CONST_DOUBLE:
2353 case CONST_VECTOR:
2354 case CONST:
2355 case LABEL_REF:
2356 case SYMBOL_REF:
2357 return 0;
2359 case LT:
2360 case LTU:
2361 case GT:
2362 case GTU:
2363 case LE:
2364 case LEU:
2365 case GE:
2366 case GEU:
2367 return 1;
2369 default:
2370 break;
2373 len = GET_RTX_LENGTH (code);
2374 fmt = GET_RTX_FORMAT (code);
2376 for (i = 0; i < len; i++)
2378 if (fmt[i] == 'e')
2380 if (inequality_comparisons_p (XEXP (x, i)))
2381 return 1;
2383 else if (fmt[i] == 'E')
2385 int j;
2386 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2387 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2388 return 1;
2392 return 0;
2395 /* Replace any occurrence of FROM in X with TO. The function does
2396 not enter into CONST_DOUBLE for the replace.
2398 Note that copying is not done so X must not be shared unless all copies
2399 are to be modified. */
2402 replace_rtx (x, from, to)
2403 rtx x, from, to;
2405 int i, j;
2406 const char *fmt;
2408 /* The following prevents loops occurrence when we change MEM in
2409 CONST_DOUBLE onto the same CONST_DOUBLE. */
2410 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2411 return x;
2413 if (x == from)
2414 return to;
2416 /* Allow this function to make replacements in EXPR_LISTs. */
2417 if (x == 0)
2418 return 0;
2420 fmt = GET_RTX_FORMAT (GET_CODE (x));
2421 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2423 if (fmt[i] == 'e')
2424 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2425 else if (fmt[i] == 'E')
2426 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2427 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2430 return x;
2433 /* Throughout the rtx X, replace many registers according to REG_MAP.
2434 Return the replacement for X (which may be X with altered contents).
2435 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2436 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2438 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2439 should not be mapped to pseudos or vice versa since validate_change
2440 is not called.
2442 If REPLACE_DEST is 1, replacements are also done in destinations;
2443 otherwise, only sources are replaced. */
2446 replace_regs (x, reg_map, nregs, replace_dest)
2447 rtx x;
2448 rtx *reg_map;
2449 unsigned int nregs;
2450 int replace_dest;
2452 enum rtx_code code;
2453 int i;
2454 const char *fmt;
2456 if (x == 0)
2457 return x;
2459 code = GET_CODE (x);
2460 switch (code)
2462 case SCRATCH:
2463 case PC:
2464 case CC0:
2465 case CONST_INT:
2466 case CONST_DOUBLE:
2467 case CONST_VECTOR:
2468 case CONST:
2469 case SYMBOL_REF:
2470 case LABEL_REF:
2471 return x;
2473 case REG:
2474 /* Verify that the register has an entry before trying to access it. */
2475 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2477 /* SUBREGs can't be shared. Always return a copy to ensure that if
2478 this replacement occurs more than once then each instance will
2479 get distinct rtx. */
2480 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2481 return copy_rtx (reg_map[REGNO (x)]);
2482 return reg_map[REGNO (x)];
2484 return x;
2486 case SUBREG:
2487 /* Prevent making nested SUBREGs. */
2488 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2489 && reg_map[REGNO (SUBREG_REG (x))] != 0
2490 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2492 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2493 return simplify_gen_subreg (GET_MODE (x), map_val,
2494 GET_MODE (SUBREG_REG (x)),
2495 SUBREG_BYTE (x));
2497 break;
2499 case SET:
2500 if (replace_dest)
2501 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2503 else if (GET_CODE (SET_DEST (x)) == MEM
2504 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2505 /* Even if we are not to replace destinations, replace register if it
2506 is CONTAINED in destination (destination is memory or
2507 STRICT_LOW_PART). */
2508 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2509 reg_map, nregs, 0);
2510 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2511 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2512 break;
2514 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2515 return x;
2517 default:
2518 break;
2521 fmt = GET_RTX_FORMAT (code);
2522 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2524 if (fmt[i] == 'e')
2525 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2526 else if (fmt[i] == 'E')
2528 int j;
2529 for (j = 0; j < XVECLEN (x, i); j++)
2530 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2531 nregs, replace_dest);
2534 return x;
2537 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2538 constant that is not in the constant pool and not in the condition
2539 of an IF_THEN_ELSE. */
2541 static int
2542 computed_jump_p_1 (x)
2543 rtx x;
2545 enum rtx_code code = GET_CODE (x);
2546 int i, j;
2547 const char *fmt;
2549 switch (code)
2551 case LABEL_REF:
2552 case PC:
2553 return 0;
2555 case CONST:
2556 case CONST_INT:
2557 case CONST_DOUBLE:
2558 case CONST_VECTOR:
2559 case SYMBOL_REF:
2560 case REG:
2561 return 1;
2563 case MEM:
2564 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2565 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2567 case IF_THEN_ELSE:
2568 return (computed_jump_p_1 (XEXP (x, 1))
2569 || computed_jump_p_1 (XEXP (x, 2)));
2571 default:
2572 break;
2575 fmt = GET_RTX_FORMAT (code);
2576 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2578 if (fmt[i] == 'e'
2579 && computed_jump_p_1 (XEXP (x, i)))
2580 return 1;
2582 else if (fmt[i] == 'E')
2583 for (j = 0; j < XVECLEN (x, i); j++)
2584 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2585 return 1;
2588 return 0;
2591 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2593 Tablejumps and casesi insns are not considered indirect jumps;
2594 we can recognize them by a (use (label_ref)). */
2597 computed_jump_p (insn)
2598 rtx insn;
2600 int i;
2601 if (GET_CODE (insn) == JUMP_INSN)
2603 rtx pat = PATTERN (insn);
2605 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2606 return 0;
2607 else if (GET_CODE (pat) == PARALLEL)
2609 int len = XVECLEN (pat, 0);
2610 int has_use_labelref = 0;
2612 for (i = len - 1; i >= 0; i--)
2613 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2614 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2615 == LABEL_REF))
2616 has_use_labelref = 1;
2618 if (! has_use_labelref)
2619 for (i = len - 1; i >= 0; i--)
2620 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2621 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2622 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2623 return 1;
2625 else if (GET_CODE (pat) == SET
2626 && SET_DEST (pat) == pc_rtx
2627 && computed_jump_p_1 (SET_SRC (pat)))
2628 return 1;
2630 return 0;
2633 /* Traverse X via depth-first search, calling F for each
2634 sub-expression (including X itself). F is also passed the DATA.
2635 If F returns -1, do not traverse sub-expressions, but continue
2636 traversing the rest of the tree. If F ever returns any other
2637 non-zero value, stop the traversal, and return the value returned
2638 by F. Otherwise, return 0. This function does not traverse inside
2639 tree structure that contains RTX_EXPRs, or into sub-expressions
2640 whose format code is `0' since it is not known whether or not those
2641 codes are actually RTL.
2643 This routine is very general, and could (should?) be used to
2644 implement many of the other routines in this file. */
2647 for_each_rtx (x, f, data)
2648 rtx *x;
2649 rtx_function f;
2650 void *data;
2652 int result;
2653 int length;
2654 const char *format;
2655 int i;
2657 /* Call F on X. */
2658 result = (*f) (x, data);
2659 if (result == -1)
2660 /* Do not traverse sub-expressions. */
2661 return 0;
2662 else if (result != 0)
2663 /* Stop the traversal. */
2664 return result;
2666 if (*x == NULL_RTX)
2667 /* There are no sub-expressions. */
2668 return 0;
2670 length = GET_RTX_LENGTH (GET_CODE (*x));
2671 format = GET_RTX_FORMAT (GET_CODE (*x));
2673 for (i = 0; i < length; ++i)
2675 switch (format[i])
2677 case 'e':
2678 result = for_each_rtx (&XEXP (*x, i), f, data);
2679 if (result != 0)
2680 return result;
2681 break;
2683 case 'V':
2684 case 'E':
2685 if (XVEC (*x, i) != 0)
2687 int j;
2688 for (j = 0; j < XVECLEN (*x, i); ++j)
2690 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2691 if (result != 0)
2692 return result;
2695 break;
2697 default:
2698 /* Nothing to do. */
2699 break;
2704 return 0;
2707 /* Searches X for any reference to REGNO, returning the rtx of the
2708 reference found if any. Otherwise, returns NULL_RTX. */
2711 regno_use_in (regno, x)
2712 unsigned int regno;
2713 rtx x;
2715 const char *fmt;
2716 int i, j;
2717 rtx tem;
2719 if (GET_CODE (x) == REG && REGNO (x) == regno)
2720 return x;
2722 fmt = GET_RTX_FORMAT (GET_CODE (x));
2723 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2725 if (fmt[i] == 'e')
2727 if ((tem = regno_use_in (regno, XEXP (x, i))))
2728 return tem;
2730 else if (fmt[i] == 'E')
2731 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2732 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2733 return tem;
2736 return NULL_RTX;
2739 /* Return a value indicating whether OP, an operand of a commutative
2740 operation, is preferred as the first or second operand. The higher
2741 the value, the stronger the preference for being the first operand.
2742 We use negative values to indicate a preference for the first operand
2743 and positive values for the second operand. */
2746 commutative_operand_precedence (op)
2747 rtx op;
2749 /* Constants always come the second operand. Prefer "nice" constants. */
2750 if (GET_CODE (op) == CONST_INT)
2751 return -5;
2752 if (GET_CODE (op) == CONST_DOUBLE)
2753 return -4;
2754 if (CONSTANT_P (op))
2755 return -3;
2757 /* SUBREGs of objects should come second. */
2758 if (GET_CODE (op) == SUBREG
2759 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
2760 return -2;
2762 /* If only one operand is a `neg', `not',
2763 `mult', `plus', or `minus' expression, it will be the first
2764 operand. */
2765 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
2766 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
2767 || GET_CODE (op) == MINUS)
2768 return 2;
2770 /* Complex expressions should be the first, so decrease priority
2771 of objects. */
2772 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
2773 return -1;
2774 return 0;
2777 /* Return 1 iff it is necessary to swap operands of commutative operation
2778 in order to canonicalize expression. */
2781 swap_commutative_operands_p (x, y)
2782 rtx x, y;
2784 return (commutative_operand_precedence (x)
2785 < commutative_operand_precedence (y));
2788 /* Return 1 if X is an autoincrement side effect and the register is
2789 not the stack pointer. */
2791 auto_inc_p (x)
2792 rtx x;
2794 switch (GET_CODE (x))
2796 case PRE_INC:
2797 case POST_INC:
2798 case PRE_DEC:
2799 case POST_DEC:
2800 case PRE_MODIFY:
2801 case POST_MODIFY:
2802 /* There are no REG_INC notes for SP. */
2803 if (XEXP (x, 0) != stack_pointer_rtx)
2804 return 1;
2805 default:
2806 break;
2808 return 0;
2811 /* Return 1 if the sequence of instructions beginning with FROM and up
2812 to and including TO is safe to move. If NEW_TO is non-NULL, and
2813 the sequence is not already safe to move, but can be easily
2814 extended to a sequence which is safe, then NEW_TO will point to the
2815 end of the extended sequence.
2817 For now, this function only checks that the region contains whole
2818 exception regions, but it could be extended to check additional
2819 conditions as well. */
2822 insns_safe_to_move_p (from, to, new_to)
2823 rtx from;
2824 rtx to;
2825 rtx *new_to;
2827 int eh_region_count = 0;
2828 int past_to_p = 0;
2829 rtx r = from;
2831 /* By default, assume the end of the region will be what was
2832 suggested. */
2833 if (new_to)
2834 *new_to = to;
2836 while (r)
2838 if (GET_CODE (r) == NOTE)
2840 switch (NOTE_LINE_NUMBER (r))
2842 case NOTE_INSN_EH_REGION_BEG:
2843 ++eh_region_count;
2844 break;
2846 case NOTE_INSN_EH_REGION_END:
2847 if (eh_region_count == 0)
2848 /* This sequence of instructions contains the end of
2849 an exception region, but not he beginning. Moving
2850 it will cause chaos. */
2851 return 0;
2853 --eh_region_count;
2854 break;
2856 default:
2857 break;
2860 else if (past_to_p)
2861 /* If we've passed TO, and we see a non-note instruction, we
2862 can't extend the sequence to a movable sequence. */
2863 return 0;
2865 if (r == to)
2867 if (!new_to)
2868 /* It's OK to move the sequence if there were matched sets of
2869 exception region notes. */
2870 return eh_region_count == 0;
2872 past_to_p = 1;
2875 /* It's OK to move the sequence if there were matched sets of
2876 exception region notes. */
2877 if (past_to_p && eh_region_count == 0)
2879 *new_to = r;
2880 return 1;
2883 /* Go to the next instruction. */
2884 r = NEXT_INSN (r);
2887 return 0;
2890 /* Return non-zero if IN contains a piece of rtl that has the address LOC */
2892 loc_mentioned_in_p (loc, in)
2893 rtx *loc, in;
2895 enum rtx_code code = GET_CODE (in);
2896 const char *fmt = GET_RTX_FORMAT (code);
2897 int i, j;
2899 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2901 if (loc == &in->fld[i].rtx)
2902 return 1;
2903 if (fmt[i] == 'e')
2905 if (loc_mentioned_in_p (loc, XEXP (in, i)))
2906 return 1;
2908 else if (fmt[i] == 'E')
2909 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
2910 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
2911 return 1;
2913 return 0;
2916 /* Given a subreg X, return the bit offset where the subreg begins
2917 (counting from the least significant bit of the reg). */
2919 unsigned int
2920 subreg_lsb (x)
2921 rtx x;
2923 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
2924 enum machine_mode mode = GET_MODE (x);
2925 unsigned int bitpos;
2926 unsigned int byte;
2927 unsigned int word;
2929 /* A paradoxical subreg begins at bit position 0. */
2930 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
2931 return 0;
2933 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
2934 /* If the subreg crosses a word boundary ensure that
2935 it also begins and ends on a word boundary. */
2936 if ((SUBREG_BYTE (x) % UNITS_PER_WORD
2937 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
2938 && (SUBREG_BYTE (x) % UNITS_PER_WORD
2939 || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
2940 abort ();
2942 if (WORDS_BIG_ENDIAN)
2943 word = (GET_MODE_SIZE (inner_mode)
2944 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
2945 else
2946 word = SUBREG_BYTE (x) / UNITS_PER_WORD;
2947 bitpos = word * BITS_PER_WORD;
2949 if (BYTES_BIG_ENDIAN)
2950 byte = (GET_MODE_SIZE (inner_mode)
2951 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
2952 else
2953 byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
2954 bitpos += byte * BITS_PER_UNIT;
2956 return bitpos;
2959 /* This function returns the regno offset of a subreg expression.
2960 xregno - A regno of an inner hard subreg_reg (or what will become one).
2961 xmode - The mode of xregno.
2962 offset - The byte offset.
2963 ymode - The mode of a top level SUBREG (or what may become one).
2964 RETURN - The regno offset which would be used. */
2965 unsigned int
2966 subreg_regno_offset (xregno, xmode, offset, ymode)
2967 unsigned int xregno;
2968 enum machine_mode xmode;
2969 unsigned int offset;
2970 enum machine_mode ymode;
2972 int nregs_xmode, nregs_ymode;
2973 int mode_multiple, nregs_multiple;
2974 int y_offset;
2976 if (xregno >= FIRST_PSEUDO_REGISTER)
2977 abort ();
2979 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
2980 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
2981 if (offset == 0 || nregs_xmode == nregs_ymode)
2982 return 0;
2984 /* size of ymode must not be greater than the size of xmode. */
2985 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
2986 if (mode_multiple == 0)
2987 abort ();
2989 y_offset = offset / GET_MODE_SIZE (ymode);
2990 nregs_multiple = nregs_xmode / nregs_ymode;
2991 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
2994 /* Return the final regno that a subreg expression refers to. */
2995 unsigned int
2996 subreg_regno (x)
2997 rtx x;
2999 unsigned int ret;
3000 rtx subreg = SUBREG_REG (x);
3001 int regno = REGNO (subreg);
3003 ret = regno + subreg_regno_offset (regno,
3004 GET_MODE (subreg),
3005 SUBREG_BYTE (x),
3006 GET_MODE (x));
3007 return ret;
3010 struct parms_set_data
3012 int nregs;
3013 HARD_REG_SET regs;
3016 /* Helper function for noticing stores to parameter registers. */
3017 static void
3018 parms_set (x, pat, data)
3019 rtx x, pat ATTRIBUTE_UNUSED;
3020 void *data;
3022 struct parms_set_data *d = data;
3023 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3024 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3026 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3027 d->nregs--;
3031 /* Look backward for first parameter to be loaded.
3032 Do not skip BOUNDARY. */
3034 find_first_parameter_load (call_insn, boundary)
3035 rtx call_insn, boundary;
3037 struct parms_set_data parm;
3038 rtx p, before;
3040 /* Since different machines initialize their parameter registers
3041 in different orders, assume nothing. Collect the set of all
3042 parameter registers. */
3043 CLEAR_HARD_REG_SET (parm.regs);
3044 parm.nregs = 0;
3045 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3046 if (GET_CODE (XEXP (p, 0)) == USE
3047 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3049 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3050 abort ();
3052 /* We only care about registers which can hold function
3053 arguments. */
3054 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3055 continue;
3057 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3058 parm.nregs++;
3060 before = call_insn;
3062 /* Search backward for the first set of a register in this set. */
3063 while (parm.nregs && before != boundary)
3065 before = PREV_INSN (before);
3067 /* It is possible that some loads got CSEed from one call to
3068 another. Stop in that case. */
3069 if (GET_CODE (before) == CALL_INSN)
3070 break;
3072 /* Our caller needs either ensure that we will find all sets
3073 (in case code has not been optimized yet), or take care
3074 for possible labels in a way by setting boundary to preceding
3075 CODE_LABEL. */
3076 if (GET_CODE (before) == CODE_LABEL)
3078 if (before != boundary)
3079 abort ();
3080 break;
3083 if (INSN_P (before))
3084 note_stores (PATTERN (before), parms_set, &parm);
3086 return before;