2002-11-21 Phil Edwards <pme@gcc.gnu.org>
[official-gcc.git] / gcc / rtlanal.c
blobc992c439bc3fbabbcd1297c9f873b06b5b700186
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "toplev.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "basic-block.h"
33 #include "real.h"
35 /* Forward declarations */
36 static int global_reg_mentioned_p_1 PARAMS ((rtx *, void *));
37 static void set_of_1 PARAMS ((rtx, rtx, void *));
38 static void insn_dependent_p_1 PARAMS ((rtx, rtx, void *));
39 static int computed_jump_p_1 PARAMS ((rtx));
40 static void parms_set PARAMS ((rtx, rtx, void *));
41 static bool hoist_test_store PARAMS ((rtx, rtx, regset));
42 static void hoist_update_store PARAMS ((rtx, rtx *, rtx, rtx));
44 /* Bit flags that specify the machine subtype we are compiling for.
45 Bits are tested using macros TARGET_... defined in the tm.h file
46 and set by `-m...' switches. Must be defined in rtlanal.c. */
48 int target_flags;
50 /* Return 1 if the value of X is unstable
51 (would be different at a different point in the program).
52 The frame pointer, arg pointer, etc. are considered stable
53 (within one function) and so is anything marked `unchanging'. */
55 int
56 rtx_unstable_p (x)
57 rtx x;
59 RTX_CODE code = GET_CODE (x);
60 int i;
61 const char *fmt;
63 switch (code)
65 case MEM:
66 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
68 case QUEUED:
69 return 1;
71 case ADDRESSOF:
72 case CONST:
73 case CONST_INT:
74 case CONST_DOUBLE:
75 case CONST_VECTOR:
76 case SYMBOL_REF:
77 case LABEL_REF:
78 return 0;
80 case REG:
81 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
82 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
83 /* The arg pointer varies if it is not a fixed register. */
84 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
85 || RTX_UNCHANGING_P (x))
86 return 0;
87 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
88 /* ??? When call-clobbered, the value is stable modulo the restore
89 that must happen after a call. This currently screws up local-alloc
90 into believing that the restore is not needed. */
91 if (x == pic_offset_table_rtx)
92 return 0;
93 #endif
94 return 1;
96 case ASM_OPERANDS:
97 if (MEM_VOLATILE_P (x))
98 return 1;
100 /* FALLTHROUGH */
102 default:
103 break;
106 fmt = GET_RTX_FORMAT (code);
107 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
108 if (fmt[i] == 'e')
110 if (rtx_unstable_p (XEXP (x, i)))
111 return 1;
113 else if (fmt[i] == 'E')
115 int j;
116 for (j = 0; j < XVECLEN (x, i); j++)
117 if (rtx_unstable_p (XVECEXP (x, i, j)))
118 return 1;
121 return 0;
124 /* Return 1 if X has a value that can vary even between two
125 executions of the program. 0 means X can be compared reliably
126 against certain constants or near-constants.
127 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
128 zero, we are slightly more conservative.
129 The frame pointer and the arg pointer are considered constant. */
132 rtx_varies_p (x, for_alias)
133 rtx x;
134 int for_alias;
136 RTX_CODE code = GET_CODE (x);
137 int i;
138 const char *fmt;
140 switch (code)
142 case MEM:
143 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
145 case QUEUED:
146 return 1;
148 case CONST:
149 case CONST_INT:
150 case CONST_DOUBLE:
151 case CONST_VECTOR:
152 case SYMBOL_REF:
153 case LABEL_REF:
154 return 0;
156 case REG:
157 /* Note that we have to test for the actual rtx used for the frame
158 and arg pointers and not just the register number in case we have
159 eliminated the frame and/or arg pointer and are using it
160 for pseudos. */
161 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
162 /* The arg pointer varies if it is not a fixed register. */
163 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
164 return 0;
165 if (x == pic_offset_table_rtx
166 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
167 /* ??? When call-clobbered, the value is stable modulo the restore
168 that must happen after a call. This currently screws up
169 local-alloc into believing that the restore is not needed, so we
170 must return 0 only if we are called from alias analysis. */
171 && for_alias
172 #endif
174 return 0;
175 return 1;
177 case LO_SUM:
178 /* The operand 0 of a LO_SUM is considered constant
179 (in fact it is related specifically to operand 1)
180 during alias analysis. */
181 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
182 || rtx_varies_p (XEXP (x, 1), for_alias);
184 case ASM_OPERANDS:
185 if (MEM_VOLATILE_P (x))
186 return 1;
188 /* FALLTHROUGH */
190 default:
191 break;
194 fmt = GET_RTX_FORMAT (code);
195 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
196 if (fmt[i] == 'e')
198 if (rtx_varies_p (XEXP (x, i), for_alias))
199 return 1;
201 else if (fmt[i] == 'E')
203 int j;
204 for (j = 0; j < XVECLEN (x, i); j++)
205 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
206 return 1;
209 return 0;
212 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
215 rtx_addr_can_trap_p (x)
216 rtx x;
218 enum rtx_code code = GET_CODE (x);
220 switch (code)
222 case SYMBOL_REF:
223 return SYMBOL_REF_WEAK (x);
225 case LABEL_REF:
226 return 0;
228 case REG:
229 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
230 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
231 || x == stack_pointer_rtx
232 /* The arg pointer varies if it is not a fixed register. */
233 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
234 return 0;
235 /* All of the virtual frame registers are stack references. */
236 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
237 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
238 return 0;
239 return 1;
241 case CONST:
242 return rtx_addr_can_trap_p (XEXP (x, 0));
244 case PLUS:
245 /* An address is assumed not to trap if it is an address that can't
246 trap plus a constant integer or it is the pic register plus a
247 constant. */
248 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
249 && GET_CODE (XEXP (x, 1)) == CONST_INT)
250 || (XEXP (x, 0) == pic_offset_table_rtx
251 && CONSTANT_P (XEXP (x, 1))));
253 case LO_SUM:
254 case PRE_MODIFY:
255 return rtx_addr_can_trap_p (XEXP (x, 1));
257 case PRE_DEC:
258 case PRE_INC:
259 case POST_DEC:
260 case POST_INC:
261 case POST_MODIFY:
262 return rtx_addr_can_trap_p (XEXP (x, 0));
264 default:
265 break;
268 /* If it isn't one of the case above, it can cause a trap. */
269 return 1;
272 /* Return 1 if X refers to a memory location whose address
273 cannot be compared reliably with constant addresses,
274 or if X refers to a BLKmode memory object.
275 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
276 zero, we are slightly more conservative. */
279 rtx_addr_varies_p (x, for_alias)
280 rtx x;
281 int for_alias;
283 enum rtx_code code;
284 int i;
285 const char *fmt;
287 if (x == 0)
288 return 0;
290 code = GET_CODE (x);
291 if (code == MEM)
292 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
294 fmt = GET_RTX_FORMAT (code);
295 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
296 if (fmt[i] == 'e')
298 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
299 return 1;
301 else if (fmt[i] == 'E')
303 int j;
304 for (j = 0; j < XVECLEN (x, i); j++)
305 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
306 return 1;
308 return 0;
311 /* Return the value of the integer term in X, if one is apparent;
312 otherwise return 0.
313 Only obvious integer terms are detected.
314 This is used in cse.c with the `related_value' field. */
316 HOST_WIDE_INT
317 get_integer_term (x)
318 rtx x;
320 if (GET_CODE (x) == CONST)
321 x = XEXP (x, 0);
323 if (GET_CODE (x) == MINUS
324 && GET_CODE (XEXP (x, 1)) == CONST_INT)
325 return - INTVAL (XEXP (x, 1));
326 if (GET_CODE (x) == PLUS
327 && GET_CODE (XEXP (x, 1)) == CONST_INT)
328 return INTVAL (XEXP (x, 1));
329 return 0;
332 /* If X is a constant, return the value sans apparent integer term;
333 otherwise return 0.
334 Only obvious integer terms are detected. */
337 get_related_value (x)
338 rtx x;
340 if (GET_CODE (x) != CONST)
341 return 0;
342 x = XEXP (x, 0);
343 if (GET_CODE (x) == PLUS
344 && GET_CODE (XEXP (x, 1)) == CONST_INT)
345 return XEXP (x, 0);
346 else if (GET_CODE (x) == MINUS
347 && GET_CODE (XEXP (x, 1)) == CONST_INT)
348 return XEXP (x, 0);
349 return 0;
352 /* Given a tablejump insn INSN, return the RTL expression for the offset
353 into the jump table. If the offset cannot be determined, then return
354 NULL_RTX.
356 If EARLIEST is nonzero, it is a pointer to a place where the earliest
357 insn used in locating the offset was found. */
360 get_jump_table_offset (insn, earliest)
361 rtx insn;
362 rtx *earliest;
364 rtx label;
365 rtx table;
366 rtx set;
367 rtx old_insn;
368 rtx x;
369 rtx old_x;
370 rtx y;
371 rtx old_y;
372 int i;
374 if (GET_CODE (insn) != JUMP_INSN
375 || ! (label = JUMP_LABEL (insn))
376 || ! (table = NEXT_INSN (label))
377 || GET_CODE (table) != JUMP_INSN
378 || (GET_CODE (PATTERN (table)) != ADDR_VEC
379 && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC)
380 || ! (set = single_set (insn)))
381 return NULL_RTX;
383 x = SET_SRC (set);
385 /* Some targets (eg, ARM) emit a tablejump that also
386 contains the out-of-range target. */
387 if (GET_CODE (x) == IF_THEN_ELSE
388 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
389 x = XEXP (x, 1);
391 /* Search backwards and locate the expression stored in X. */
392 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
393 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
396 /* If X is an expression using a relative address then strip
397 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
398 or the jump table label. */
399 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
400 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
402 for (i = 0; i < 2; i++)
404 old_insn = insn;
405 y = XEXP (x, i);
407 if (y == pc_rtx || y == pic_offset_table_rtx)
408 break;
410 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
411 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
414 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
415 break;
418 if (i >= 2)
419 return NULL_RTX;
421 x = XEXP (x, 1 - i);
423 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
424 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
428 /* Strip off any sign or zero extension. */
429 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
431 x = XEXP (x, 0);
433 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
434 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
438 /* If X isn't a MEM then this isn't a tablejump we understand. */
439 if (GET_CODE (x) != MEM)
440 return NULL_RTX;
442 /* Strip off the MEM. */
443 x = XEXP (x, 0);
445 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
446 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
449 /* If X isn't a PLUS than this isn't a tablejump we understand. */
450 if (GET_CODE (x) != PLUS)
451 return NULL_RTX;
453 /* At this point we should have an expression representing the jump table
454 plus an offset. Examine each operand in order to determine which one
455 represents the jump table. Knowing that tells us that the other operand
456 must represent the offset. */
457 for (i = 0; i < 2; i++)
459 old_insn = insn;
460 y = XEXP (x, i);
462 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
463 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
466 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
467 && reg_mentioned_p (label, y))
468 break;
471 if (i >= 2)
472 return NULL_RTX;
474 x = XEXP (x, 1 - i);
476 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
477 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
478 for (i = 0; i < 2; i++)
479 if (XEXP (x, i) == pic_offset_table_rtx)
481 x = XEXP (x, 1 - i);
482 break;
485 if (earliest)
486 *earliest = insn;
488 /* Return the RTL expression representing the offset. */
489 return x;
492 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
493 a global register. */
495 static int
496 global_reg_mentioned_p_1 (loc, data)
497 rtx *loc;
498 void *data ATTRIBUTE_UNUSED;
500 int regno;
501 rtx x = *loc;
503 if (! x)
504 return 0;
506 switch (GET_CODE (x))
508 case SUBREG:
509 if (GET_CODE (SUBREG_REG (x)) == REG)
511 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
512 && global_regs[subreg_regno (x)])
513 return 1;
514 return 0;
516 break;
518 case REG:
519 regno = REGNO (x);
520 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
521 return 1;
522 return 0;
524 case SCRATCH:
525 case PC:
526 case CC0:
527 case CONST_INT:
528 case CONST_DOUBLE:
529 case CONST:
530 case LABEL_REF:
531 return 0;
533 case CALL:
534 /* A non-constant call might use a global register. */
535 return 1;
537 default:
538 break;
541 return 0;
544 /* Returns nonzero if X mentions a global register. */
547 global_reg_mentioned_p (x)
548 rtx x;
551 if (INSN_P (x))
553 if (GET_CODE (x) == CALL_INSN)
555 if (! CONST_OR_PURE_CALL_P (x))
556 return 1;
557 x = CALL_INSN_FUNCTION_USAGE (x);
558 if (x == 0)
559 return 0;
561 else
562 x = PATTERN (x);
565 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
568 /* Return the number of places FIND appears within X. If COUNT_DEST is
569 zero, we do not count occurrences inside the destination of a SET. */
572 count_occurrences (x, find, count_dest)
573 rtx x, find;
574 int count_dest;
576 int i, j;
577 enum rtx_code code;
578 const char *format_ptr;
579 int count;
581 if (x == find)
582 return 1;
584 code = GET_CODE (x);
586 switch (code)
588 case REG:
589 case CONST_INT:
590 case CONST_DOUBLE:
591 case CONST_VECTOR:
592 case SYMBOL_REF:
593 case CODE_LABEL:
594 case PC:
595 case CC0:
596 return 0;
598 case MEM:
599 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
600 return 1;
601 break;
603 case SET:
604 if (SET_DEST (x) == find && ! count_dest)
605 return count_occurrences (SET_SRC (x), find, count_dest);
606 break;
608 default:
609 break;
612 format_ptr = GET_RTX_FORMAT (code);
613 count = 0;
615 for (i = 0; i < GET_RTX_LENGTH (code); i++)
617 switch (*format_ptr++)
619 case 'e':
620 count += count_occurrences (XEXP (x, i), find, count_dest);
621 break;
623 case 'E':
624 for (j = 0; j < XVECLEN (x, i); j++)
625 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
626 break;
629 return count;
632 /* Nonzero if register REG appears somewhere within IN.
633 Also works if REG is not a register; in this case it checks
634 for a subexpression of IN that is Lisp "equal" to REG. */
637 reg_mentioned_p (reg, in)
638 rtx reg, in;
640 const char *fmt;
641 int i;
642 enum rtx_code code;
644 if (in == 0)
645 return 0;
647 if (reg == in)
648 return 1;
650 if (GET_CODE (in) == LABEL_REF)
651 return reg == XEXP (in, 0);
653 code = GET_CODE (in);
655 switch (code)
657 /* Compare registers by number. */
658 case REG:
659 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
661 /* These codes have no constituent expressions
662 and are unique. */
663 case SCRATCH:
664 case CC0:
665 case PC:
666 return 0;
668 case CONST_INT:
669 return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg);
671 case CONST_VECTOR:
672 case CONST_DOUBLE:
673 /* These are kept unique for a given value. */
674 return 0;
676 default:
677 break;
680 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
681 return 1;
683 fmt = GET_RTX_FORMAT (code);
685 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
687 if (fmt[i] == 'E')
689 int j;
690 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
691 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
692 return 1;
694 else if (fmt[i] == 'e'
695 && reg_mentioned_p (reg, XEXP (in, i)))
696 return 1;
698 return 0;
701 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
702 no CODE_LABEL insn. */
705 no_labels_between_p (beg, end)
706 rtx beg, end;
708 rtx p;
709 if (beg == end)
710 return 0;
711 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
712 if (GET_CODE (p) == CODE_LABEL)
713 return 0;
714 return 1;
717 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
718 no JUMP_INSN insn. */
721 no_jumps_between_p (beg, end)
722 rtx beg, end;
724 rtx p;
725 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
726 if (GET_CODE (p) == JUMP_INSN)
727 return 0;
728 return 1;
731 /* Nonzero if register REG is used in an insn between
732 FROM_INSN and TO_INSN (exclusive of those two). */
735 reg_used_between_p (reg, from_insn, to_insn)
736 rtx reg, from_insn, to_insn;
738 rtx insn;
740 if (from_insn == to_insn)
741 return 0;
743 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
744 if (INSN_P (insn)
745 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
746 || (GET_CODE (insn) == CALL_INSN
747 && (find_reg_fusage (insn, USE, reg)
748 || find_reg_fusage (insn, CLOBBER, reg)))))
749 return 1;
750 return 0;
753 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
754 is entirely replaced by a new value and the only use is as a SET_DEST,
755 we do not consider it a reference. */
758 reg_referenced_p (x, body)
759 rtx x;
760 rtx body;
762 int i;
764 switch (GET_CODE (body))
766 case SET:
767 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
768 return 1;
770 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
771 of a REG that occupies all of the REG, the insn references X if
772 it is mentioned in the destination. */
773 if (GET_CODE (SET_DEST (body)) != CC0
774 && GET_CODE (SET_DEST (body)) != PC
775 && GET_CODE (SET_DEST (body)) != REG
776 && ! (GET_CODE (SET_DEST (body)) == SUBREG
777 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
778 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
779 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
780 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
781 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
782 && reg_overlap_mentioned_p (x, SET_DEST (body)))
783 return 1;
784 return 0;
786 case ASM_OPERANDS:
787 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
788 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
789 return 1;
790 return 0;
792 case CALL:
793 case USE:
794 case IF_THEN_ELSE:
795 return reg_overlap_mentioned_p (x, body);
797 case TRAP_IF:
798 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
800 case PREFETCH:
801 return reg_overlap_mentioned_p (x, XEXP (body, 0));
803 case UNSPEC:
804 case UNSPEC_VOLATILE:
805 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
806 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
807 return 1;
808 return 0;
810 case PARALLEL:
811 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
812 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
813 return 1;
814 return 0;
816 case CLOBBER:
817 if (GET_CODE (XEXP (body, 0)) == MEM)
818 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
819 return 1;
820 return 0;
822 case COND_EXEC:
823 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
824 return 1;
825 return reg_referenced_p (x, COND_EXEC_CODE (body));
827 default:
828 return 0;
832 /* Nonzero if register REG is referenced in an insn between
833 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
834 not count. */
837 reg_referenced_between_p (reg, from_insn, to_insn)
838 rtx reg, from_insn, to_insn;
840 rtx insn;
842 if (from_insn == to_insn)
843 return 0;
845 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
846 if (INSN_P (insn)
847 && (reg_referenced_p (reg, PATTERN (insn))
848 || (GET_CODE (insn) == CALL_INSN
849 && find_reg_fusage (insn, USE, reg))))
850 return 1;
851 return 0;
854 /* Nonzero if register REG is set or clobbered in an insn between
855 FROM_INSN and TO_INSN (exclusive of those two). */
858 reg_set_between_p (reg, from_insn, to_insn)
859 rtx reg, from_insn, to_insn;
861 rtx insn;
863 if (from_insn == to_insn)
864 return 0;
866 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
867 if (INSN_P (insn) && reg_set_p (reg, insn))
868 return 1;
869 return 0;
872 /* Internals of reg_set_between_p. */
874 reg_set_p (reg, insn)
875 rtx reg, insn;
877 rtx body = insn;
879 /* We can be passed an insn or part of one. If we are passed an insn,
880 check if a side-effect of the insn clobbers REG. */
881 if (INSN_P (insn))
883 if (FIND_REG_INC_NOTE (insn, reg)
884 || (GET_CODE (insn) == CALL_INSN
885 /* We'd like to test call_used_regs here, but rtlanal.c can't
886 reference that variable due to its use in genattrtab. So
887 we'll just be more conservative.
889 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
890 information holds all clobbered registers. */
891 && ((GET_CODE (reg) == REG
892 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
893 || GET_CODE (reg) == MEM
894 || find_reg_fusage (insn, CLOBBER, reg))))
895 return 1;
897 body = PATTERN (insn);
900 return set_of (reg, insn) != NULL_RTX;
903 /* Similar to reg_set_between_p, but check all registers in X. Return 0
904 only if none of them are modified between START and END. Do not
905 consider non-registers one way or the other. */
908 regs_set_between_p (x, start, end)
909 rtx x;
910 rtx start, end;
912 enum rtx_code code = GET_CODE (x);
913 const char *fmt;
914 int i, j;
916 switch (code)
918 case CONST_INT:
919 case CONST_DOUBLE:
920 case CONST_VECTOR:
921 case CONST:
922 case SYMBOL_REF:
923 case LABEL_REF:
924 case PC:
925 case CC0:
926 return 0;
928 case REG:
929 return reg_set_between_p (x, start, end);
931 default:
932 break;
935 fmt = GET_RTX_FORMAT (code);
936 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
938 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
939 return 1;
941 else if (fmt[i] == 'E')
942 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
943 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
944 return 1;
947 return 0;
950 /* Similar to reg_set_between_p, but check all registers in X. Return 0
951 only if none of them are modified between START and END. Return 1 if
952 X contains a MEM; this routine does not perform any memory aliasing. */
955 modified_between_p (x, start, end)
956 rtx x;
957 rtx start, end;
959 enum rtx_code code = GET_CODE (x);
960 const char *fmt;
961 int i, j;
963 switch (code)
965 case CONST_INT:
966 case CONST_DOUBLE:
967 case CONST_VECTOR:
968 case CONST:
969 case SYMBOL_REF:
970 case LABEL_REF:
971 return 0;
973 case PC:
974 case CC0:
975 return 1;
977 case MEM:
978 /* If the memory is not constant, assume it is modified. If it is
979 constant, we still have to check the address. */
980 if (! RTX_UNCHANGING_P (x))
981 return 1;
982 break;
984 case REG:
985 return reg_set_between_p (x, start, end);
987 default:
988 break;
991 fmt = GET_RTX_FORMAT (code);
992 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
994 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
995 return 1;
997 else if (fmt[i] == 'E')
998 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
999 if (modified_between_p (XVECEXP (x, i, j), start, end))
1000 return 1;
1003 return 0;
1006 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1007 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1008 does not perform any memory aliasing. */
1011 modified_in_p (x, insn)
1012 rtx x;
1013 rtx insn;
1015 enum rtx_code code = GET_CODE (x);
1016 const char *fmt;
1017 int i, j;
1019 switch (code)
1021 case CONST_INT:
1022 case CONST_DOUBLE:
1023 case CONST_VECTOR:
1024 case CONST:
1025 case SYMBOL_REF:
1026 case LABEL_REF:
1027 return 0;
1029 case PC:
1030 case CC0:
1031 return 1;
1033 case MEM:
1034 /* If the memory is not constant, assume it is modified. If it is
1035 constant, we still have to check the address. */
1036 if (! RTX_UNCHANGING_P (x))
1037 return 1;
1038 break;
1040 case REG:
1041 return reg_set_p (x, insn);
1043 default:
1044 break;
1047 fmt = GET_RTX_FORMAT (code);
1048 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1050 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1051 return 1;
1053 else if (fmt[i] == 'E')
1054 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1055 if (modified_in_p (XVECEXP (x, i, j), insn))
1056 return 1;
1059 return 0;
1062 /* Return true if anything in insn X is (anti,output,true) dependent on
1063 anything in insn Y. */
1066 insn_dependent_p (x, y)
1067 rtx x, y;
1069 rtx tmp;
1071 if (! INSN_P (x) || ! INSN_P (y))
1072 abort ();
1074 tmp = PATTERN (y);
1075 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1076 if (tmp == NULL_RTX)
1077 return 1;
1079 tmp = PATTERN (x);
1080 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1081 if (tmp == NULL_RTX)
1082 return 1;
1084 return 0;
1087 /* A helper routine for insn_dependent_p called through note_stores. */
1089 static void
1090 insn_dependent_p_1 (x, pat, data)
1091 rtx x;
1092 rtx pat ATTRIBUTE_UNUSED;
1093 void *data;
1095 rtx * pinsn = (rtx *) data;
1097 if (*pinsn && reg_mentioned_p (x, *pinsn))
1098 *pinsn = NULL_RTX;
1101 /* Helper function for set_of. */
1102 struct set_of_data
1104 rtx found;
1105 rtx pat;
1108 static void
1109 set_of_1 (x, pat, data1)
1110 rtx x;
1111 rtx pat;
1112 void *data1;
1114 struct set_of_data *data = (struct set_of_data *) (data1);
1115 if (rtx_equal_p (x, data->pat)
1116 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1117 data->found = pat;
1120 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1121 (either directly or via STRICT_LOW_PART and similar modifiers). */
1123 set_of (pat, insn)
1124 rtx pat, insn;
1126 struct set_of_data data;
1127 data.found = NULL_RTX;
1128 data.pat = pat;
1129 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1130 return data.found;
1133 /* Given an INSN, return a SET expression if this insn has only a single SET.
1134 It may also have CLOBBERs, USEs, or SET whose output
1135 will not be used, which we ignore. */
1138 single_set_2 (insn, pat)
1139 rtx insn, pat;
1141 rtx set = NULL;
1142 int set_verified = 1;
1143 int i;
1145 if (GET_CODE (pat) == PARALLEL)
1147 for (i = 0; i < XVECLEN (pat, 0); i++)
1149 rtx sub = XVECEXP (pat, 0, i);
1150 switch (GET_CODE (sub))
1152 case USE:
1153 case CLOBBER:
1154 break;
1156 case SET:
1157 /* We can consider insns having multiple sets, where all
1158 but one are dead as single set insns. In common case
1159 only single set is present in the pattern so we want
1160 to avoid checking for REG_UNUSED notes unless necessary.
1162 When we reach set first time, we just expect this is
1163 the single set we are looking for and only when more
1164 sets are found in the insn, we check them. */
1165 if (!set_verified)
1167 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1168 && !side_effects_p (set))
1169 set = NULL;
1170 else
1171 set_verified = 1;
1173 if (!set)
1174 set = sub, set_verified = 0;
1175 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1176 || side_effects_p (sub))
1177 return NULL_RTX;
1178 break;
1180 default:
1181 return NULL_RTX;
1185 return set;
1188 /* Given an INSN, return nonzero if it has more than one SET, else return
1189 zero. */
1192 multiple_sets (insn)
1193 rtx insn;
1195 int found;
1196 int i;
1198 /* INSN must be an insn. */
1199 if (! INSN_P (insn))
1200 return 0;
1202 /* Only a PARALLEL can have multiple SETs. */
1203 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1205 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1206 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1208 /* If we have already found a SET, then return now. */
1209 if (found)
1210 return 1;
1211 else
1212 found = 1;
1216 /* Either zero or one SET. */
1217 return 0;
1220 /* Return nonzero if the destination of SET equals the source
1221 and there are no side effects. */
1224 set_noop_p (set)
1225 rtx set;
1227 rtx src = SET_SRC (set);
1228 rtx dst = SET_DEST (set);
1230 if (side_effects_p (src) || side_effects_p (dst))
1231 return 0;
1233 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1234 return rtx_equal_p (dst, src);
1236 if (dst == pc_rtx && src == pc_rtx)
1237 return 1;
1239 if (GET_CODE (dst) == SIGN_EXTRACT
1240 || GET_CODE (dst) == ZERO_EXTRACT)
1241 return rtx_equal_p (XEXP (dst, 0), src)
1242 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx;
1244 if (GET_CODE (dst) == STRICT_LOW_PART)
1245 dst = XEXP (dst, 0);
1247 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1249 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1250 return 0;
1251 src = SUBREG_REG (src);
1252 dst = SUBREG_REG (dst);
1255 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1256 && REGNO (src) == REGNO (dst));
1259 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1260 value to itself. */
1263 noop_move_p (insn)
1264 rtx insn;
1266 rtx pat = PATTERN (insn);
1268 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1269 return 1;
1271 /* Insns carrying these notes are useful later on. */
1272 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1273 return 0;
1275 /* For now treat an insn with a REG_RETVAL note as a
1276 a special insn which should not be considered a no-op. */
1277 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1278 return 0;
1280 if (GET_CODE (pat) == SET && set_noop_p (pat))
1281 return 1;
1283 if (GET_CODE (pat) == PARALLEL)
1285 int i;
1286 /* If nothing but SETs of registers to themselves,
1287 this insn can also be deleted. */
1288 for (i = 0; i < XVECLEN (pat, 0); i++)
1290 rtx tem = XVECEXP (pat, 0, i);
1292 if (GET_CODE (tem) == USE
1293 || GET_CODE (tem) == CLOBBER)
1294 continue;
1296 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1297 return 0;
1300 return 1;
1302 return 0;
1306 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1307 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1308 If the object was modified, if we hit a partial assignment to X, or hit a
1309 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1310 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1311 be the src. */
1314 find_last_value (x, pinsn, valid_to, allow_hwreg)
1315 rtx x;
1316 rtx *pinsn;
1317 rtx valid_to;
1318 int allow_hwreg;
1320 rtx p;
1322 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1323 p = PREV_INSN (p))
1324 if (INSN_P (p))
1326 rtx set = single_set (p);
1327 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1329 if (set && rtx_equal_p (x, SET_DEST (set)))
1331 rtx src = SET_SRC (set);
1333 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1334 src = XEXP (note, 0);
1336 if ((valid_to == NULL_RTX
1337 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1338 /* Reject hard registers because we don't usually want
1339 to use them; we'd rather use a pseudo. */
1340 && (! (GET_CODE (src) == REG
1341 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1343 *pinsn = p;
1344 return src;
1348 /* If set in non-simple way, we don't have a value. */
1349 if (reg_set_p (x, p))
1350 break;
1353 return x;
1356 /* Return nonzero if register in range [REGNO, ENDREGNO)
1357 appears either explicitly or implicitly in X
1358 other than being stored into.
1360 References contained within the substructure at LOC do not count.
1361 LOC may be zero, meaning don't ignore anything. */
1364 refers_to_regno_p (regno, endregno, x, loc)
1365 unsigned int regno, endregno;
1366 rtx x;
1367 rtx *loc;
1369 int i;
1370 unsigned int x_regno;
1371 RTX_CODE code;
1372 const char *fmt;
1374 repeat:
1375 /* The contents of a REG_NONNEG note is always zero, so we must come here
1376 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1377 if (x == 0)
1378 return 0;
1380 code = GET_CODE (x);
1382 switch (code)
1384 case REG:
1385 x_regno = REGNO (x);
1387 /* If we modifying the stack, frame, or argument pointer, it will
1388 clobber a virtual register. In fact, we could be more precise,
1389 but it isn't worth it. */
1390 if ((x_regno == STACK_POINTER_REGNUM
1391 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1392 || x_regno == ARG_POINTER_REGNUM
1393 #endif
1394 || x_regno == FRAME_POINTER_REGNUM)
1395 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1396 return 1;
1398 return (endregno > x_regno
1399 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1400 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1401 : 1));
1403 case SUBREG:
1404 /* If this is a SUBREG of a hard reg, we can see exactly which
1405 registers are being modified. Otherwise, handle normally. */
1406 if (GET_CODE (SUBREG_REG (x)) == REG
1407 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1409 unsigned int inner_regno = subreg_regno (x);
1410 unsigned int inner_endregno
1411 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1412 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1414 return endregno > inner_regno && regno < inner_endregno;
1416 break;
1418 case CLOBBER:
1419 case SET:
1420 if (&SET_DEST (x) != loc
1421 /* Note setting a SUBREG counts as referring to the REG it is in for
1422 a pseudo but not for hard registers since we can
1423 treat each word individually. */
1424 && ((GET_CODE (SET_DEST (x)) == SUBREG
1425 && loc != &SUBREG_REG (SET_DEST (x))
1426 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1427 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1428 && refers_to_regno_p (regno, endregno,
1429 SUBREG_REG (SET_DEST (x)), loc))
1430 || (GET_CODE (SET_DEST (x)) != REG
1431 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1432 return 1;
1434 if (code == CLOBBER || loc == &SET_SRC (x))
1435 return 0;
1436 x = SET_SRC (x);
1437 goto repeat;
1439 default:
1440 break;
1443 /* X does not match, so try its subexpressions. */
1445 fmt = GET_RTX_FORMAT (code);
1446 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1448 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1450 if (i == 0)
1452 x = XEXP (x, 0);
1453 goto repeat;
1455 else
1456 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1457 return 1;
1459 else if (fmt[i] == 'E')
1461 int j;
1462 for (j = XVECLEN (x, i) - 1; j >=0; j--)
1463 if (loc != &XVECEXP (x, i, j)
1464 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1465 return 1;
1468 return 0;
1471 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1472 we check if any register number in X conflicts with the relevant register
1473 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1474 contains a MEM (we don't bother checking for memory addresses that can't
1475 conflict because we expect this to be a rare case. */
1478 reg_overlap_mentioned_p (x, in)
1479 rtx x, in;
1481 unsigned int regno, endregno;
1483 /* Overly conservative. */
1484 if (GET_CODE (x) == STRICT_LOW_PART)
1485 x = XEXP (x, 0);
1487 /* If either argument is a constant, then modifying X can not affect IN. */
1488 if (CONSTANT_P (x) || CONSTANT_P (in))
1489 return 0;
1491 switch (GET_CODE (x))
1493 case SUBREG:
1494 regno = REGNO (SUBREG_REG (x));
1495 if (regno < FIRST_PSEUDO_REGISTER)
1496 regno = subreg_regno (x);
1497 goto do_reg;
1499 case REG:
1500 regno = REGNO (x);
1501 do_reg:
1502 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1503 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1504 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1506 case MEM:
1508 const char *fmt;
1509 int i;
1511 if (GET_CODE (in) == MEM)
1512 return 1;
1514 fmt = GET_RTX_FORMAT (GET_CODE (in));
1515 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1516 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1517 return 1;
1519 return 0;
1522 case SCRATCH:
1523 case PC:
1524 case CC0:
1525 return reg_mentioned_p (x, in);
1527 case PARALLEL:
1529 int i;
1531 /* If any register in here refers to it we return true. */
1532 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1533 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1534 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1535 return 1;
1536 return 0;
1539 default:
1540 break;
1543 abort ();
1546 /* Return the last value to which REG was set prior to INSN. If we can't
1547 find it easily, return 0.
1549 We only return a REG, SUBREG, or constant because it is too hard to
1550 check if a MEM remains unchanged. */
1553 reg_set_last (x, insn)
1554 rtx x;
1555 rtx insn;
1557 rtx orig_insn = insn;
1559 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1560 Stop when we reach a label or X is a hard reg and we reach a
1561 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1563 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1565 /* We compare with <= here, because reg_set_last_last_regno
1566 is actually the number of the first reg *not* in X. */
1567 for (;
1568 insn && GET_CODE (insn) != CODE_LABEL
1569 && ! (GET_CODE (insn) == CALL_INSN
1570 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1571 insn = PREV_INSN (insn))
1572 if (INSN_P (insn))
1574 rtx set = set_of (x, insn);
1575 /* OK, this function modify our register. See if we understand it. */
1576 if (set)
1578 rtx last_value;
1579 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1580 return 0;
1581 last_value = SET_SRC (x);
1582 if (CONSTANT_P (last_value)
1583 || ((GET_CODE (last_value) == REG
1584 || GET_CODE (last_value) == SUBREG)
1585 && ! reg_set_between_p (last_value,
1586 insn, orig_insn)))
1587 return last_value;
1588 else
1589 return 0;
1593 return 0;
1596 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1597 (X would be the pattern of an insn).
1598 FUN receives two arguments:
1599 the REG, MEM, CC0 or PC being stored in or clobbered,
1600 the SET or CLOBBER rtx that does the store.
1602 If the item being stored in or clobbered is a SUBREG of a hard register,
1603 the SUBREG will be passed. */
1605 void
1606 note_stores (x, fun, data)
1607 rtx x;
1608 void (*fun) PARAMS ((rtx, rtx, void *));
1609 void *data;
1611 int i;
1613 if (GET_CODE (x) == COND_EXEC)
1614 x = COND_EXEC_CODE (x);
1616 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1618 rtx dest = SET_DEST (x);
1620 while ((GET_CODE (dest) == SUBREG
1621 && (GET_CODE (SUBREG_REG (dest)) != REG
1622 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1623 || GET_CODE (dest) == ZERO_EXTRACT
1624 || GET_CODE (dest) == SIGN_EXTRACT
1625 || GET_CODE (dest) == STRICT_LOW_PART)
1626 dest = XEXP (dest, 0);
1628 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1629 each of whose first operand is a register. */
1630 if (GET_CODE (dest) == PARALLEL)
1632 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1633 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1634 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1636 else
1637 (*fun) (dest, x, data);
1640 else if (GET_CODE (x) == PARALLEL)
1641 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1642 note_stores (XVECEXP (x, 0, i), fun, data);
1645 /* Like notes_stores, but call FUN for each expression that is being
1646 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1647 FUN for each expression, not any interior subexpressions. FUN receives a
1648 pointer to the expression and the DATA passed to this function.
1650 Note that this is not quite the same test as that done in reg_referenced_p
1651 since that considers something as being referenced if it is being
1652 partially set, while we do not. */
1654 void
1655 note_uses (pbody, fun, data)
1656 rtx *pbody;
1657 void (*fun) PARAMS ((rtx *, void *));
1658 void *data;
1660 rtx body = *pbody;
1661 int i;
1663 switch (GET_CODE (body))
1665 case COND_EXEC:
1666 (*fun) (&COND_EXEC_TEST (body), data);
1667 note_uses (&COND_EXEC_CODE (body), fun, data);
1668 return;
1670 case PARALLEL:
1671 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1672 note_uses (&XVECEXP (body, 0, i), fun, data);
1673 return;
1675 case USE:
1676 (*fun) (&XEXP (body, 0), data);
1677 return;
1679 case ASM_OPERANDS:
1680 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1681 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1682 return;
1684 case TRAP_IF:
1685 (*fun) (&TRAP_CONDITION (body), data);
1686 return;
1688 case PREFETCH:
1689 (*fun) (&XEXP (body, 0), data);
1690 return;
1692 case UNSPEC:
1693 case UNSPEC_VOLATILE:
1694 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1695 (*fun) (&XVECEXP (body, 0, i), data);
1696 return;
1698 case CLOBBER:
1699 if (GET_CODE (XEXP (body, 0)) == MEM)
1700 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1701 return;
1703 case SET:
1705 rtx dest = SET_DEST (body);
1707 /* For sets we replace everything in source plus registers in memory
1708 expression in store and operands of a ZERO_EXTRACT. */
1709 (*fun) (&SET_SRC (body), data);
1711 if (GET_CODE (dest) == ZERO_EXTRACT)
1713 (*fun) (&XEXP (dest, 1), data);
1714 (*fun) (&XEXP (dest, 2), data);
1717 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1718 dest = XEXP (dest, 0);
1720 if (GET_CODE (dest) == MEM)
1721 (*fun) (&XEXP (dest, 0), data);
1723 return;
1725 default:
1726 /* All the other possibilities never store. */
1727 (*fun) (pbody, data);
1728 return;
1732 /* Return nonzero if X's old contents don't survive after INSN.
1733 This will be true if X is (cc0) or if X is a register and
1734 X dies in INSN or because INSN entirely sets X.
1736 "Entirely set" means set directly and not through a SUBREG,
1737 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1738 Likewise, REG_INC does not count.
1740 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1741 but for this use that makes no difference, since regs don't overlap
1742 during their lifetimes. Therefore, this function may be used
1743 at any time after deaths have been computed (in flow.c).
1745 If REG is a hard reg that occupies multiple machine registers, this
1746 function will only return 1 if each of those registers will be replaced
1747 by INSN. */
1750 dead_or_set_p (insn, x)
1751 rtx insn;
1752 rtx x;
1754 unsigned int regno, last_regno;
1755 unsigned int i;
1757 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1758 if (GET_CODE (x) == CC0)
1759 return 1;
1761 if (GET_CODE (x) != REG)
1762 abort ();
1764 regno = REGNO (x);
1765 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1766 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1768 for (i = regno; i <= last_regno; i++)
1769 if (! dead_or_set_regno_p (insn, i))
1770 return 0;
1772 return 1;
1775 /* Utility function for dead_or_set_p to check an individual register. Also
1776 called from flow.c. */
1779 dead_or_set_regno_p (insn, test_regno)
1780 rtx insn;
1781 unsigned int test_regno;
1783 unsigned int regno, endregno;
1784 rtx pattern;
1786 /* See if there is a death note for something that includes TEST_REGNO. */
1787 if (find_regno_note (insn, REG_DEAD, test_regno))
1788 return 1;
1790 if (GET_CODE (insn) == CALL_INSN
1791 && find_regno_fusage (insn, CLOBBER, test_regno))
1792 return 1;
1794 pattern = PATTERN (insn);
1796 if (GET_CODE (pattern) == COND_EXEC)
1797 pattern = COND_EXEC_CODE (pattern);
1799 if (GET_CODE (pattern) == SET)
1801 rtx dest = SET_DEST (pattern);
1803 /* A value is totally replaced if it is the destination or the
1804 destination is a SUBREG of REGNO that does not change the number of
1805 words in it. */
1806 if (GET_CODE (dest) == SUBREG
1807 && (((GET_MODE_SIZE (GET_MODE (dest))
1808 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1809 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1810 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1811 dest = SUBREG_REG (dest);
1813 if (GET_CODE (dest) != REG)
1814 return 0;
1816 regno = REGNO (dest);
1817 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1818 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1820 return (test_regno >= regno && test_regno < endregno);
1822 else if (GET_CODE (pattern) == PARALLEL)
1824 int i;
1826 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1828 rtx body = XVECEXP (pattern, 0, i);
1830 if (GET_CODE (body) == COND_EXEC)
1831 body = COND_EXEC_CODE (body);
1833 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1835 rtx dest = SET_DEST (body);
1837 if (GET_CODE (dest) == SUBREG
1838 && (((GET_MODE_SIZE (GET_MODE (dest))
1839 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1840 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1841 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1842 dest = SUBREG_REG (dest);
1844 if (GET_CODE (dest) != REG)
1845 continue;
1847 regno = REGNO (dest);
1848 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1849 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1851 if (test_regno >= regno && test_regno < endregno)
1852 return 1;
1857 return 0;
1860 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1861 If DATUM is nonzero, look for one whose datum is DATUM. */
1864 find_reg_note (insn, kind, datum)
1865 rtx insn;
1866 enum reg_note kind;
1867 rtx datum;
1869 rtx link;
1871 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1872 if (! INSN_P (insn))
1873 return 0;
1875 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1876 if (REG_NOTE_KIND (link) == kind
1877 && (datum == 0 || datum == XEXP (link, 0)))
1878 return link;
1879 return 0;
1882 /* Return the reg-note of kind KIND in insn INSN which applies to register
1883 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1884 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1885 it might be the case that the note overlaps REGNO. */
1888 find_regno_note (insn, kind, regno)
1889 rtx insn;
1890 enum reg_note kind;
1891 unsigned int regno;
1893 rtx link;
1895 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1896 if (! INSN_P (insn))
1897 return 0;
1899 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1900 if (REG_NOTE_KIND (link) == kind
1901 /* Verify that it is a register, so that scratch and MEM won't cause a
1902 problem here. */
1903 && GET_CODE (XEXP (link, 0)) == REG
1904 && REGNO (XEXP (link, 0)) <= regno
1905 && ((REGNO (XEXP (link, 0))
1906 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1907 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1908 GET_MODE (XEXP (link, 0)))))
1909 > regno))
1910 return link;
1911 return 0;
1914 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1915 has such a note. */
1918 find_reg_equal_equiv_note (insn)
1919 rtx insn;
1921 rtx note;
1923 if (single_set (insn) == 0)
1924 return 0;
1925 else if ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1926 return note;
1927 else
1928 return find_reg_note (insn, REG_EQUAL, NULL_RTX);
1931 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1932 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1935 find_reg_fusage (insn, code, datum)
1936 rtx insn;
1937 enum rtx_code code;
1938 rtx datum;
1940 /* If it's not a CALL_INSN, it can't possibly have a
1941 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1942 if (GET_CODE (insn) != CALL_INSN)
1943 return 0;
1945 if (! datum)
1946 abort ();
1948 if (GET_CODE (datum) != REG)
1950 rtx link;
1952 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1953 link;
1954 link = XEXP (link, 1))
1955 if (GET_CODE (XEXP (link, 0)) == code
1956 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1957 return 1;
1959 else
1961 unsigned int regno = REGNO (datum);
1963 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1964 to pseudo registers, so don't bother checking. */
1966 if (regno < FIRST_PSEUDO_REGISTER)
1968 unsigned int end_regno
1969 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
1970 unsigned int i;
1972 for (i = regno; i < end_regno; i++)
1973 if (find_regno_fusage (insn, code, i))
1974 return 1;
1978 return 0;
1981 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1982 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1985 find_regno_fusage (insn, code, regno)
1986 rtx insn;
1987 enum rtx_code code;
1988 unsigned int regno;
1990 rtx link;
1992 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1993 to pseudo registers, so don't bother checking. */
1995 if (regno >= FIRST_PSEUDO_REGISTER
1996 || GET_CODE (insn) != CALL_INSN )
1997 return 0;
1999 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2001 unsigned int regnote;
2002 rtx op, reg;
2004 if (GET_CODE (op = XEXP (link, 0)) == code
2005 && GET_CODE (reg = XEXP (op, 0)) == REG
2006 && (regnote = REGNO (reg)) <= regno
2007 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
2008 return 1;
2011 return 0;
2014 /* Return true if INSN is a call to a pure function. */
2017 pure_call_p (insn)
2018 rtx insn;
2020 rtx link;
2022 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2023 return 0;
2025 /* Look for the note that differentiates const and pure functions. */
2026 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2028 rtx u, m;
2030 if (GET_CODE (u = XEXP (link, 0)) == USE
2031 && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2032 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2033 return 1;
2036 return 0;
2039 /* Remove register note NOTE from the REG_NOTES of INSN. */
2041 void
2042 remove_note (insn, note)
2043 rtx insn;
2044 rtx note;
2046 rtx link;
2048 if (note == NULL_RTX)
2049 return;
2051 if (REG_NOTES (insn) == note)
2053 REG_NOTES (insn) = XEXP (note, 1);
2054 return;
2057 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2058 if (XEXP (link, 1) == note)
2060 XEXP (link, 1) = XEXP (note, 1);
2061 return;
2064 abort ();
2067 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2068 return 1 if it is found. A simple equality test is used to determine if
2069 NODE matches. */
2072 in_expr_list_p (listp, node)
2073 rtx listp;
2074 rtx node;
2076 rtx x;
2078 for (x = listp; x; x = XEXP (x, 1))
2079 if (node == XEXP (x, 0))
2080 return 1;
2082 return 0;
2085 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2086 remove that entry from the list if it is found.
2088 A simple equality test is used to determine if NODE matches. */
2090 void
2091 remove_node_from_expr_list (node, listp)
2092 rtx node;
2093 rtx *listp;
2095 rtx temp = *listp;
2096 rtx prev = NULL_RTX;
2098 while (temp)
2100 if (node == XEXP (temp, 0))
2102 /* Splice the node out of the list. */
2103 if (prev)
2104 XEXP (prev, 1) = XEXP (temp, 1);
2105 else
2106 *listp = XEXP (temp, 1);
2108 return;
2111 prev = temp;
2112 temp = XEXP (temp, 1);
2116 /* Nonzero if X contains any volatile instructions. These are instructions
2117 which may cause unpredictable machine state instructions, and thus no
2118 instructions should be moved or combined across them. This includes
2119 only volatile asms and UNSPEC_VOLATILE instructions. */
2122 volatile_insn_p (x)
2123 rtx x;
2125 RTX_CODE code;
2127 code = GET_CODE (x);
2128 switch (code)
2130 case LABEL_REF:
2131 case SYMBOL_REF:
2132 case CONST_INT:
2133 case CONST:
2134 case CONST_DOUBLE:
2135 case CONST_VECTOR:
2136 case CC0:
2137 case PC:
2138 case REG:
2139 case SCRATCH:
2140 case CLOBBER:
2141 case ASM_INPUT:
2142 case ADDR_VEC:
2143 case ADDR_DIFF_VEC:
2144 case CALL:
2145 case MEM:
2146 return 0;
2148 case UNSPEC_VOLATILE:
2149 /* case TRAP_IF: This isn't clear yet. */
2150 return 1;
2152 case ASM_OPERANDS:
2153 if (MEM_VOLATILE_P (x))
2154 return 1;
2156 default:
2157 break;
2160 /* Recursively scan the operands of this expression. */
2163 const char *fmt = GET_RTX_FORMAT (code);
2164 int i;
2166 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2168 if (fmt[i] == 'e')
2170 if (volatile_insn_p (XEXP (x, i)))
2171 return 1;
2173 else if (fmt[i] == 'E')
2175 int j;
2176 for (j = 0; j < XVECLEN (x, i); j++)
2177 if (volatile_insn_p (XVECEXP (x, i, j)))
2178 return 1;
2182 return 0;
2185 /* Nonzero if X contains any volatile memory references
2186 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2189 volatile_refs_p (x)
2190 rtx x;
2192 RTX_CODE code;
2194 code = GET_CODE (x);
2195 switch (code)
2197 case LABEL_REF:
2198 case SYMBOL_REF:
2199 case CONST_INT:
2200 case CONST:
2201 case CONST_DOUBLE:
2202 case CONST_VECTOR:
2203 case CC0:
2204 case PC:
2205 case REG:
2206 case SCRATCH:
2207 case CLOBBER:
2208 case ASM_INPUT:
2209 case ADDR_VEC:
2210 case ADDR_DIFF_VEC:
2211 return 0;
2213 case UNSPEC_VOLATILE:
2214 return 1;
2216 case MEM:
2217 case ASM_OPERANDS:
2218 if (MEM_VOLATILE_P (x))
2219 return 1;
2221 default:
2222 break;
2225 /* Recursively scan the operands of this expression. */
2228 const char *fmt = GET_RTX_FORMAT (code);
2229 int i;
2231 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2233 if (fmt[i] == 'e')
2235 if (volatile_refs_p (XEXP (x, i)))
2236 return 1;
2238 else if (fmt[i] == 'E')
2240 int j;
2241 for (j = 0; j < XVECLEN (x, i); j++)
2242 if (volatile_refs_p (XVECEXP (x, i, j)))
2243 return 1;
2247 return 0;
2250 /* Similar to above, except that it also rejects register pre- and post-
2251 incrementing. */
2254 side_effects_p (x)
2255 rtx x;
2257 RTX_CODE code;
2259 code = GET_CODE (x);
2260 switch (code)
2262 case LABEL_REF:
2263 case SYMBOL_REF:
2264 case CONST_INT:
2265 case CONST:
2266 case CONST_DOUBLE:
2267 case CONST_VECTOR:
2268 case CC0:
2269 case PC:
2270 case REG:
2271 case SCRATCH:
2272 case ASM_INPUT:
2273 case ADDR_VEC:
2274 case ADDR_DIFF_VEC:
2275 return 0;
2277 case CLOBBER:
2278 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2279 when some combination can't be done. If we see one, don't think
2280 that we can simplify the expression. */
2281 return (GET_MODE (x) != VOIDmode);
2283 case PRE_INC:
2284 case PRE_DEC:
2285 case POST_INC:
2286 case POST_DEC:
2287 case PRE_MODIFY:
2288 case POST_MODIFY:
2289 case CALL:
2290 case UNSPEC_VOLATILE:
2291 /* case TRAP_IF: This isn't clear yet. */
2292 return 1;
2294 case MEM:
2295 case ASM_OPERANDS:
2296 if (MEM_VOLATILE_P (x))
2297 return 1;
2299 default:
2300 break;
2303 /* Recursively scan the operands of this expression. */
2306 const char *fmt = GET_RTX_FORMAT (code);
2307 int i;
2309 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2311 if (fmt[i] == 'e')
2313 if (side_effects_p (XEXP (x, i)))
2314 return 1;
2316 else if (fmt[i] == 'E')
2318 int j;
2319 for (j = 0; j < XVECLEN (x, i); j++)
2320 if (side_effects_p (XVECEXP (x, i, j)))
2321 return 1;
2325 return 0;
2328 /* Return nonzero if evaluating rtx X might cause a trap. */
2331 may_trap_p (x)
2332 rtx x;
2334 int i;
2335 enum rtx_code code;
2336 const char *fmt;
2338 if (x == 0)
2339 return 0;
2340 code = GET_CODE (x);
2341 switch (code)
2343 /* Handle these cases quickly. */
2344 case CONST_INT:
2345 case CONST_DOUBLE:
2346 case CONST_VECTOR:
2347 case SYMBOL_REF:
2348 case LABEL_REF:
2349 case CONST:
2350 case PC:
2351 case CC0:
2352 case REG:
2353 case SCRATCH:
2354 return 0;
2356 case ASM_INPUT:
2357 case UNSPEC_VOLATILE:
2358 case TRAP_IF:
2359 return 1;
2361 case ASM_OPERANDS:
2362 return MEM_VOLATILE_P (x);
2364 /* Memory ref can trap unless it's a static var or a stack slot. */
2365 case MEM:
2366 return rtx_addr_can_trap_p (XEXP (x, 0));
2368 /* Division by a non-constant might trap. */
2369 case DIV:
2370 case MOD:
2371 case UDIV:
2372 case UMOD:
2373 if (HONOR_SNANS (GET_MODE (x)))
2374 return 1;
2375 if (! CONSTANT_P (XEXP (x, 1))
2376 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2377 && flag_trapping_math))
2378 return 1;
2379 /* This was const0_rtx, but by not using that,
2380 we can link this file into other programs. */
2381 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2382 return 1;
2383 break;
2385 case EXPR_LIST:
2386 /* An EXPR_LIST is used to represent a function call. This
2387 certainly may trap. */
2388 return 1;
2390 case GE:
2391 case GT:
2392 case LE:
2393 case LT:
2394 case COMPARE:
2395 /* Some floating point comparisons may trap. */
2396 if (!flag_trapping_math)
2397 break;
2398 /* ??? There is no machine independent way to check for tests that trap
2399 when COMPARE is used, though many targets do make this distinction.
2400 For instance, sparc uses CCFPE for compares which generate exceptions
2401 and CCFP for compares which do not generate exceptions. */
2402 if (HONOR_NANS (GET_MODE (x)))
2403 return 1;
2404 /* But often the compare has some CC mode, so check operand
2405 modes as well. */
2406 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2407 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2408 return 1;
2409 break;
2411 case EQ:
2412 case NE:
2413 if (HONOR_SNANS (GET_MODE (x)))
2414 return 1;
2415 /* Often comparison is CC mode, so check operand modes. */
2416 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2417 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2418 return 1;
2419 break;
2421 case NEG:
2422 case ABS:
2423 /* These operations don't trap even with floating point. */
2424 break;
2426 default:
2427 /* Any floating arithmetic may trap. */
2428 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2429 && flag_trapping_math)
2430 return 1;
2433 fmt = GET_RTX_FORMAT (code);
2434 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2436 if (fmt[i] == 'e')
2438 if (may_trap_p (XEXP (x, i)))
2439 return 1;
2441 else if (fmt[i] == 'E')
2443 int j;
2444 for (j = 0; j < XVECLEN (x, i); j++)
2445 if (may_trap_p (XVECEXP (x, i, j)))
2446 return 1;
2449 return 0;
2452 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2453 i.e., an inequality. */
2456 inequality_comparisons_p (x)
2457 rtx x;
2459 const char *fmt;
2460 int len, i;
2461 enum rtx_code code = GET_CODE (x);
2463 switch (code)
2465 case REG:
2466 case SCRATCH:
2467 case PC:
2468 case CC0:
2469 case CONST_INT:
2470 case CONST_DOUBLE:
2471 case CONST_VECTOR:
2472 case CONST:
2473 case LABEL_REF:
2474 case SYMBOL_REF:
2475 return 0;
2477 case LT:
2478 case LTU:
2479 case GT:
2480 case GTU:
2481 case LE:
2482 case LEU:
2483 case GE:
2484 case GEU:
2485 return 1;
2487 default:
2488 break;
2491 len = GET_RTX_LENGTH (code);
2492 fmt = GET_RTX_FORMAT (code);
2494 for (i = 0; i < len; i++)
2496 if (fmt[i] == 'e')
2498 if (inequality_comparisons_p (XEXP (x, i)))
2499 return 1;
2501 else if (fmt[i] == 'E')
2503 int j;
2504 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2505 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2506 return 1;
2510 return 0;
2513 /* Replace any occurrence of FROM in X with TO. The function does
2514 not enter into CONST_DOUBLE for the replace.
2516 Note that copying is not done so X must not be shared unless all copies
2517 are to be modified. */
2520 replace_rtx (x, from, to)
2521 rtx x, from, to;
2523 int i, j;
2524 const char *fmt;
2526 /* The following prevents loops occurrence when we change MEM in
2527 CONST_DOUBLE onto the same CONST_DOUBLE. */
2528 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2529 return x;
2531 if (x == from)
2532 return to;
2534 /* Allow this function to make replacements in EXPR_LISTs. */
2535 if (x == 0)
2536 return 0;
2538 if (GET_CODE (x) == SUBREG)
2540 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2542 if (GET_CODE (new) == CONST_INT)
2544 x = simplify_subreg (GET_MODE (x), new,
2545 GET_MODE (SUBREG_REG (x)),
2546 SUBREG_BYTE (x));
2547 if (! x)
2548 abort ();
2550 else
2551 SUBREG_REG (x) = new;
2553 return x;
2555 else if (GET_CODE (x) == ZERO_EXTEND)
2557 rtx new = replace_rtx (XEXP (x, 0), from, to);
2559 if (GET_CODE (new) == CONST_INT)
2561 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2562 new, GET_MODE (XEXP (x, 0)));
2563 if (! x)
2564 abort ();
2566 else
2567 XEXP (x, 0) = new;
2569 return x;
2572 fmt = GET_RTX_FORMAT (GET_CODE (x));
2573 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2575 if (fmt[i] == 'e')
2576 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2577 else if (fmt[i] == 'E')
2578 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2579 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2582 return x;
2585 /* Throughout the rtx X, replace many registers according to REG_MAP.
2586 Return the replacement for X (which may be X with altered contents).
2587 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2588 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2590 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2591 should not be mapped to pseudos or vice versa since validate_change
2592 is not called.
2594 If REPLACE_DEST is 1, replacements are also done in destinations;
2595 otherwise, only sources are replaced. */
2598 replace_regs (x, reg_map, nregs, replace_dest)
2599 rtx x;
2600 rtx *reg_map;
2601 unsigned int nregs;
2602 int replace_dest;
2604 enum rtx_code code;
2605 int i;
2606 const char *fmt;
2608 if (x == 0)
2609 return x;
2611 code = GET_CODE (x);
2612 switch (code)
2614 case SCRATCH:
2615 case PC:
2616 case CC0:
2617 case CONST_INT:
2618 case CONST_DOUBLE:
2619 case CONST_VECTOR:
2620 case CONST:
2621 case SYMBOL_REF:
2622 case LABEL_REF:
2623 return x;
2625 case REG:
2626 /* Verify that the register has an entry before trying to access it. */
2627 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2629 /* SUBREGs can't be shared. Always return a copy to ensure that if
2630 this replacement occurs more than once then each instance will
2631 get distinct rtx. */
2632 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2633 return copy_rtx (reg_map[REGNO (x)]);
2634 return reg_map[REGNO (x)];
2636 return x;
2638 case SUBREG:
2639 /* Prevent making nested SUBREGs. */
2640 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2641 && reg_map[REGNO (SUBREG_REG (x))] != 0
2642 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2644 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2645 return simplify_gen_subreg (GET_MODE (x), map_val,
2646 GET_MODE (SUBREG_REG (x)),
2647 SUBREG_BYTE (x));
2649 break;
2651 case SET:
2652 if (replace_dest)
2653 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2655 else if (GET_CODE (SET_DEST (x)) == MEM
2656 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2657 /* Even if we are not to replace destinations, replace register if it
2658 is CONTAINED in destination (destination is memory or
2659 STRICT_LOW_PART). */
2660 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2661 reg_map, nregs, 0);
2662 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2663 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2664 break;
2666 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2667 return x;
2669 default:
2670 break;
2673 fmt = GET_RTX_FORMAT (code);
2674 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2676 if (fmt[i] == 'e')
2677 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2678 else if (fmt[i] == 'E')
2680 int j;
2681 for (j = 0; j < XVECLEN (x, i); j++)
2682 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2683 nregs, replace_dest);
2686 return x;
2689 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2690 constant that is not in the constant pool and not in the condition
2691 of an IF_THEN_ELSE. */
2693 static int
2694 computed_jump_p_1 (x)
2695 rtx x;
2697 enum rtx_code code = GET_CODE (x);
2698 int i, j;
2699 const char *fmt;
2701 switch (code)
2703 case LABEL_REF:
2704 case PC:
2705 return 0;
2707 case CONST:
2708 case CONST_INT:
2709 case CONST_DOUBLE:
2710 case CONST_VECTOR:
2711 case SYMBOL_REF:
2712 case REG:
2713 return 1;
2715 case MEM:
2716 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2717 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2719 case IF_THEN_ELSE:
2720 return (computed_jump_p_1 (XEXP (x, 1))
2721 || computed_jump_p_1 (XEXP (x, 2)));
2723 default:
2724 break;
2727 fmt = GET_RTX_FORMAT (code);
2728 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2730 if (fmt[i] == 'e'
2731 && computed_jump_p_1 (XEXP (x, i)))
2732 return 1;
2734 else if (fmt[i] == 'E')
2735 for (j = 0; j < XVECLEN (x, i); j++)
2736 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2737 return 1;
2740 return 0;
2743 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2745 Tablejumps and casesi insns are not considered indirect jumps;
2746 we can recognize them by a (use (label_ref)). */
2749 computed_jump_p (insn)
2750 rtx insn;
2752 int i;
2753 if (GET_CODE (insn) == JUMP_INSN)
2755 rtx pat = PATTERN (insn);
2757 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2758 return 0;
2759 else if (GET_CODE (pat) == PARALLEL)
2761 int len = XVECLEN (pat, 0);
2762 int has_use_labelref = 0;
2764 for (i = len - 1; i >= 0; i--)
2765 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2766 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2767 == LABEL_REF))
2768 has_use_labelref = 1;
2770 if (! has_use_labelref)
2771 for (i = len - 1; i >= 0; i--)
2772 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2773 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2774 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2775 return 1;
2777 else if (GET_CODE (pat) == SET
2778 && SET_DEST (pat) == pc_rtx
2779 && computed_jump_p_1 (SET_SRC (pat)))
2780 return 1;
2782 return 0;
2785 /* Traverse X via depth-first search, calling F for each
2786 sub-expression (including X itself). F is also passed the DATA.
2787 If F returns -1, do not traverse sub-expressions, but continue
2788 traversing the rest of the tree. If F ever returns any other
2789 nonzero value, stop the traversal, and return the value returned
2790 by F. Otherwise, return 0. This function does not traverse inside
2791 tree structure that contains RTX_EXPRs, or into sub-expressions
2792 whose format code is `0' since it is not known whether or not those
2793 codes are actually RTL.
2795 This routine is very general, and could (should?) be used to
2796 implement many of the other routines in this file. */
2799 for_each_rtx (x, f, data)
2800 rtx *x;
2801 rtx_function f;
2802 void *data;
2804 int result;
2805 int length;
2806 const char *format;
2807 int i;
2809 /* Call F on X. */
2810 result = (*f) (x, data);
2811 if (result == -1)
2812 /* Do not traverse sub-expressions. */
2813 return 0;
2814 else if (result != 0)
2815 /* Stop the traversal. */
2816 return result;
2818 if (*x == NULL_RTX)
2819 /* There are no sub-expressions. */
2820 return 0;
2822 length = GET_RTX_LENGTH (GET_CODE (*x));
2823 format = GET_RTX_FORMAT (GET_CODE (*x));
2825 for (i = 0; i < length; ++i)
2827 switch (format[i])
2829 case 'e':
2830 result = for_each_rtx (&XEXP (*x, i), f, data);
2831 if (result != 0)
2832 return result;
2833 break;
2835 case 'V':
2836 case 'E':
2837 if (XVEC (*x, i) != 0)
2839 int j;
2840 for (j = 0; j < XVECLEN (*x, i); ++j)
2842 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2843 if (result != 0)
2844 return result;
2847 break;
2849 default:
2850 /* Nothing to do. */
2851 break;
2856 return 0;
2859 /* Searches X for any reference to REGNO, returning the rtx of the
2860 reference found if any. Otherwise, returns NULL_RTX. */
2863 regno_use_in (regno, x)
2864 unsigned int regno;
2865 rtx x;
2867 const char *fmt;
2868 int i, j;
2869 rtx tem;
2871 if (GET_CODE (x) == REG && REGNO (x) == regno)
2872 return x;
2874 fmt = GET_RTX_FORMAT (GET_CODE (x));
2875 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2877 if (fmt[i] == 'e')
2879 if ((tem = regno_use_in (regno, XEXP (x, i))))
2880 return tem;
2882 else if (fmt[i] == 'E')
2883 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2884 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2885 return tem;
2888 return NULL_RTX;
2891 /* Return a value indicating whether OP, an operand of a commutative
2892 operation, is preferred as the first or second operand. The higher
2893 the value, the stronger the preference for being the first operand.
2894 We use negative values to indicate a preference for the first operand
2895 and positive values for the second operand. */
2898 commutative_operand_precedence (op)
2899 rtx op;
2901 /* Constants always come the second operand. Prefer "nice" constants. */
2902 if (GET_CODE (op) == CONST_INT)
2903 return -5;
2904 if (GET_CODE (op) == CONST_DOUBLE)
2905 return -4;
2906 if (CONSTANT_P (op))
2907 return -3;
2909 /* SUBREGs of objects should come second. */
2910 if (GET_CODE (op) == SUBREG
2911 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
2912 return -2;
2914 /* If only one operand is a `neg', `not',
2915 `mult', `plus', or `minus' expression, it will be the first
2916 operand. */
2917 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
2918 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
2919 || GET_CODE (op) == MINUS)
2920 return 2;
2922 /* Complex expressions should be the first, so decrease priority
2923 of objects. */
2924 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
2925 return -1;
2926 return 0;
2929 /* Return 1 iff it is necessary to swap operands of commutative operation
2930 in order to canonicalize expression. */
2933 swap_commutative_operands_p (x, y)
2934 rtx x, y;
2936 return (commutative_operand_precedence (x)
2937 < commutative_operand_precedence (y));
2940 /* Return 1 if X is an autoincrement side effect and the register is
2941 not the stack pointer. */
2943 auto_inc_p (x)
2944 rtx x;
2946 switch (GET_CODE (x))
2948 case PRE_INC:
2949 case POST_INC:
2950 case PRE_DEC:
2951 case POST_DEC:
2952 case PRE_MODIFY:
2953 case POST_MODIFY:
2954 /* There are no REG_INC notes for SP. */
2955 if (XEXP (x, 0) != stack_pointer_rtx)
2956 return 1;
2957 default:
2958 break;
2960 return 0;
2963 /* Return 1 if the sequence of instructions beginning with FROM and up
2964 to and including TO is safe to move. If NEW_TO is non-NULL, and
2965 the sequence is not already safe to move, but can be easily
2966 extended to a sequence which is safe, then NEW_TO will point to the
2967 end of the extended sequence.
2969 For now, this function only checks that the region contains whole
2970 exception regions, but it could be extended to check additional
2971 conditions as well. */
2974 insns_safe_to_move_p (from, to, new_to)
2975 rtx from;
2976 rtx to;
2977 rtx *new_to;
2979 int eh_region_count = 0;
2980 int past_to_p = 0;
2981 rtx r = from;
2983 /* By default, assume the end of the region will be what was
2984 suggested. */
2985 if (new_to)
2986 *new_to = to;
2988 while (r)
2990 if (GET_CODE (r) == NOTE)
2992 switch (NOTE_LINE_NUMBER (r))
2994 case NOTE_INSN_EH_REGION_BEG:
2995 ++eh_region_count;
2996 break;
2998 case NOTE_INSN_EH_REGION_END:
2999 if (eh_region_count == 0)
3000 /* This sequence of instructions contains the end of
3001 an exception region, but not he beginning. Moving
3002 it will cause chaos. */
3003 return 0;
3005 --eh_region_count;
3006 break;
3008 default:
3009 break;
3012 else if (past_to_p)
3013 /* If we've passed TO, and we see a non-note instruction, we
3014 can't extend the sequence to a movable sequence. */
3015 return 0;
3017 if (r == to)
3019 if (!new_to)
3020 /* It's OK to move the sequence if there were matched sets of
3021 exception region notes. */
3022 return eh_region_count == 0;
3024 past_to_p = 1;
3027 /* It's OK to move the sequence if there were matched sets of
3028 exception region notes. */
3029 if (past_to_p && eh_region_count == 0)
3031 *new_to = r;
3032 return 1;
3035 /* Go to the next instruction. */
3036 r = NEXT_INSN (r);
3039 return 0;
3042 /* Return nonzero if IN contains a piece of rtl that has the address LOC */
3044 loc_mentioned_in_p (loc, in)
3045 rtx *loc, in;
3047 enum rtx_code code = GET_CODE (in);
3048 const char *fmt = GET_RTX_FORMAT (code);
3049 int i, j;
3051 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3053 if (loc == &in->fld[i].rtx)
3054 return 1;
3055 if (fmt[i] == 'e')
3057 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3058 return 1;
3060 else if (fmt[i] == 'E')
3061 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3062 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3063 return 1;
3065 return 0;
3068 /* Given a subreg X, return the bit offset where the subreg begins
3069 (counting from the least significant bit of the reg). */
3071 unsigned int
3072 subreg_lsb (x)
3073 rtx x;
3075 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
3076 enum machine_mode mode = GET_MODE (x);
3077 unsigned int bitpos;
3078 unsigned int byte;
3079 unsigned int word;
3081 /* A paradoxical subreg begins at bit position 0. */
3082 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
3083 return 0;
3085 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3086 /* If the subreg crosses a word boundary ensure that
3087 it also begins and ends on a word boundary. */
3088 if ((SUBREG_BYTE (x) % UNITS_PER_WORD
3089 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
3090 && (SUBREG_BYTE (x) % UNITS_PER_WORD
3091 || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
3092 abort ();
3094 if (WORDS_BIG_ENDIAN)
3095 word = (GET_MODE_SIZE (inner_mode)
3096 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
3097 else
3098 word = SUBREG_BYTE (x) / UNITS_PER_WORD;
3099 bitpos = word * BITS_PER_WORD;
3101 if (BYTES_BIG_ENDIAN)
3102 byte = (GET_MODE_SIZE (inner_mode)
3103 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
3104 else
3105 byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
3106 bitpos += byte * BITS_PER_UNIT;
3108 return bitpos;
3111 /* This function returns the regno offset of a subreg expression.
3112 xregno - A regno of an inner hard subreg_reg (or what will become one).
3113 xmode - The mode of xregno.
3114 offset - The byte offset.
3115 ymode - The mode of a top level SUBREG (or what may become one).
3116 RETURN - The regno offset which would be used. */
3117 unsigned int
3118 subreg_regno_offset (xregno, xmode, offset, ymode)
3119 unsigned int xregno;
3120 enum machine_mode xmode;
3121 unsigned int offset;
3122 enum machine_mode ymode;
3124 int nregs_xmode, nregs_ymode;
3125 int mode_multiple, nregs_multiple;
3126 int y_offset;
3128 if (xregno >= FIRST_PSEUDO_REGISTER)
3129 abort ();
3131 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3132 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3134 /* If this is a big endian paradoxical subreg, which uses more actual
3135 hard registers than the original register, we must return a negative
3136 offset so that we find the proper highpart of the register. */
3137 if (offset == 0
3138 && nregs_ymode > nregs_xmode
3139 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3140 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3141 return nregs_xmode - nregs_ymode;
3143 if (offset == 0 || nregs_xmode == nregs_ymode)
3144 return 0;
3146 /* size of ymode must not be greater than the size of xmode. */
3147 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3148 if (mode_multiple == 0)
3149 abort ();
3151 y_offset = offset / GET_MODE_SIZE (ymode);
3152 nregs_multiple = nregs_xmode / nregs_ymode;
3153 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3156 /* Return the final regno that a subreg expression refers to. */
3157 unsigned int
3158 subreg_regno (x)
3159 rtx x;
3161 unsigned int ret;
3162 rtx subreg = SUBREG_REG (x);
3163 int regno = REGNO (subreg);
3165 ret = regno + subreg_regno_offset (regno,
3166 GET_MODE (subreg),
3167 SUBREG_BYTE (x),
3168 GET_MODE (x));
3169 return ret;
3172 struct parms_set_data
3174 int nregs;
3175 HARD_REG_SET regs;
3178 /* Helper function for noticing stores to parameter registers. */
3179 static void
3180 parms_set (x, pat, data)
3181 rtx x, pat ATTRIBUTE_UNUSED;
3182 void *data;
3184 struct parms_set_data *d = data;
3185 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3186 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3188 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3189 d->nregs--;
3193 /* Look backward for first parameter to be loaded.
3194 Do not skip BOUNDARY. */
3196 find_first_parameter_load (call_insn, boundary)
3197 rtx call_insn, boundary;
3199 struct parms_set_data parm;
3200 rtx p, before;
3202 /* Since different machines initialize their parameter registers
3203 in different orders, assume nothing. Collect the set of all
3204 parameter registers. */
3205 CLEAR_HARD_REG_SET (parm.regs);
3206 parm.nregs = 0;
3207 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3208 if (GET_CODE (XEXP (p, 0)) == USE
3209 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3211 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3212 abort ();
3214 /* We only care about registers which can hold function
3215 arguments. */
3216 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3217 continue;
3219 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3220 parm.nregs++;
3222 before = call_insn;
3224 /* Search backward for the first set of a register in this set. */
3225 while (parm.nregs && before != boundary)
3227 before = PREV_INSN (before);
3229 /* It is possible that some loads got CSEed from one call to
3230 another. Stop in that case. */
3231 if (GET_CODE (before) == CALL_INSN)
3232 break;
3234 /* Our caller needs either ensure that we will find all sets
3235 (in case code has not been optimized yet), or take care
3236 for possible labels in a way by setting boundary to preceding
3237 CODE_LABEL. */
3238 if (GET_CODE (before) == CODE_LABEL)
3240 if (before != boundary)
3241 abort ();
3242 break;
3245 if (INSN_P (before))
3246 note_stores (PATTERN (before), parms_set, &parm);
3248 return before;
3251 /* Return true if we should avoid inserting code between INSN and preceeding
3252 call instruction. */
3254 bool
3255 keep_with_call_p (insn)
3256 rtx insn;
3258 rtx set;
3260 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3262 if (GET_CODE (SET_DEST (set)) == REG
3263 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3264 && fixed_regs[REGNO (SET_DEST (set))]
3265 && general_operand (SET_SRC (set), VOIDmode))
3266 return true;
3267 if (GET_CODE (SET_SRC (set)) == REG
3268 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3269 && GET_CODE (SET_DEST (set)) == REG
3270 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3271 return true;
3272 /* There may be a stack pop just after the call and before the store
3273 of the return register. Search for the actual store when deciding
3274 if we can break or not. */
3275 if (SET_DEST (set) == stack_pointer_rtx)
3277 rtx i2 = next_nonnote_insn (insn);
3278 if (i2 && keep_with_call_p (i2))
3279 return true;
3282 return false;
3285 /* Return true when store to register X can be hoisted to the place
3286 with LIVE registers (can be NULL). Value VAL contains destination
3287 whose value will be used. */
3289 static bool
3290 hoist_test_store (x, val, live)
3291 rtx x, val;
3292 regset live;
3294 if (GET_CODE (x) == SCRATCH)
3295 return true;
3297 if (rtx_equal_p (x, val))
3298 return true;
3300 /* Allow subreg of X in case it is not writting just part of multireg pseudo.
3301 Then we would need to update all users to care hoisting the store too.
3302 Caller may represent that by specifying whole subreg as val. */
3304 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3306 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3307 && GET_MODE_BITSIZE (GET_MODE (x)) <
3308 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3309 return false;
3310 return true;
3312 if (GET_CODE (x) == SUBREG)
3313 x = SUBREG_REG (x);
3315 /* Anything except register store is not hoistable. This includes the
3316 partial stores to registers. */
3318 if (!REG_P (x))
3319 return false;
3321 /* Pseudo registers can be allways replaced by another pseudo to avoid
3322 the side effect, for hard register we must ensure that they are dead.
3323 Eventually we may want to add code to try turn pseudos to hards, but it
3324 is unlikely usefull. */
3326 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3328 int regno = REGNO (x);
3329 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3331 if (!live)
3332 return false;
3333 if (REGNO_REG_SET_P (live, regno))
3334 return false;
3335 while (--n > 0)
3336 if (REGNO_REG_SET_P (live, regno + n))
3337 return false;
3339 return true;
3343 /* Return true if INSN can be hoisted to place with LIVE hard registers
3344 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3345 and used by the hoisting pass. */
3347 bool
3348 can_hoist_insn_p (insn, val, live)
3349 rtx insn, val;
3350 regset live;
3352 rtx pat = PATTERN (insn);
3353 int i;
3355 /* It probably does not worth the complexity to handle multiple
3356 set stores. */
3357 if (!single_set (insn))
3358 return false;
3359 /* We can move CALL_INSN, but we need to check that all caller clobbered
3360 regs are dead. */
3361 if (GET_CODE (insn) == CALL_INSN)
3362 return false;
3363 /* In future we will handle hoisting of libcall sequences, but
3364 give up for now. */
3365 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3366 return false;
3367 switch (GET_CODE (pat))
3369 case SET:
3370 if (!hoist_test_store (SET_DEST (pat), val, live))
3371 return false;
3372 break;
3373 case USE:
3374 /* USES do have sick semantics, so do not move them. */
3375 return false;
3376 break;
3377 case CLOBBER:
3378 if (!hoist_test_store (XEXP (pat, 0), val, live))
3379 return false;
3380 break;
3381 case PARALLEL:
3382 for (i = 0; i < XVECLEN (pat, 0); i++)
3384 rtx x = XVECEXP (pat, 0, i);
3385 switch (GET_CODE (x))
3387 case SET:
3388 if (!hoist_test_store (SET_DEST (x), val, live))
3389 return false;
3390 break;
3391 case USE:
3392 /* We need to fix callers to really ensure availability
3393 of all values inisn uses, but for now it is safe to prohibit
3394 hoisting of any insn having such a hiden uses. */
3395 return false;
3396 break;
3397 case CLOBBER:
3398 if (!hoist_test_store (SET_DEST (x), val, live))
3399 return false;
3400 break;
3401 default:
3402 break;
3405 break;
3406 default:
3407 abort ();
3409 return true;
3412 /* Update store after hoisting - replace all stores to pseudo registers
3413 by new ones to avoid clobbering of values except for store to VAL that will
3414 be updated to NEW. */
3416 static void
3417 hoist_update_store (insn, xp, val, new)
3418 rtx insn, *xp, val, new;
3420 rtx x = *xp;
3422 if (GET_CODE (x) == SCRATCH)
3423 return;
3425 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3426 validate_change (insn, xp,
3427 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3428 SUBREG_BYTE (x)), 1);
3429 if (rtx_equal_p (x, val))
3431 validate_change (insn, xp, new, 1);
3432 return;
3434 if (GET_CODE (x) == SUBREG)
3436 xp = &SUBREG_REG (x);
3437 x = *xp;
3440 if (!REG_P (x))
3441 abort ();
3443 /* We've verified that hard registers are dead, so we may keep the side
3444 effect. Otherwise replace it by new pseudo. */
3445 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3446 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3447 REG_NOTES (insn)
3448 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3451 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3452 and each other side effect to pseudo register by new pseudo register. */
3455 hoist_insn_after (insn, after, val, new)
3456 rtx insn, after, val, new;
3458 rtx pat;
3459 int i;
3460 rtx note;
3462 insn = emit_copy_of_insn_after (insn, after);
3463 pat = PATTERN (insn);
3465 /* Remove REG_UNUSED notes as we will re-emit them. */
3466 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3467 remove_note (insn, note);
3469 /* To get this working callers must ensure to move everything referenced
3470 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3471 easier. */
3472 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3473 remove_note (insn, note);
3474 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3475 remove_note (insn, note);
3477 /* Remove REG_DEAD notes as they might not be valid anymore in case
3478 we create redundancy. */
3479 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3480 remove_note (insn, note);
3481 switch (GET_CODE (pat))
3483 case SET:
3484 hoist_update_store (insn, &SET_DEST (pat), val, new);
3485 break;
3486 case USE:
3487 break;
3488 case CLOBBER:
3489 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3490 break;
3491 case PARALLEL:
3492 for (i = 0; i < XVECLEN (pat, 0); i++)
3494 rtx x = XVECEXP (pat, 0, i);
3495 switch (GET_CODE (x))
3497 case SET:
3498 hoist_update_store (insn, &SET_DEST (x), val, new);
3499 break;
3500 case USE:
3501 break;
3502 case CLOBBER:
3503 hoist_update_store (insn, &SET_DEST (x), val, new);
3504 break;
3505 default:
3506 break;
3509 break;
3510 default:
3511 abort ();
3513 if (!apply_change_group ())
3514 abort ();
3516 return insn;
3520 hoist_insn_to_edge (insn, e, val, new)
3521 rtx insn, val, new;
3522 edge e;
3524 rtx new_insn;
3526 /* We cannot insert instructions on an abnormal critical edge.
3527 It will be easier to find the culprit if we die now. */
3528 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3529 abort ();
3531 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3532 stuff. We also emit CALL_INSNS and firends. */
3533 if (e->insns == NULL_RTX)
3535 start_sequence ();
3536 emit_note (NULL, NOTE_INSN_DELETED);
3538 else
3539 push_to_sequence (e->insns);
3541 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3543 e->insns = get_insns ();
3544 end_sequence ();
3545 return new_insn;