Merge from mainline
[official-gcc.git] / gcc / rtlanal.c
blobde7efae2d5fdf8c9f43dc0caf9d054f069d2be8f
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "toplev.h"
26 #include "rtl.h"
27 #include "hard-reg-set.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "tm_p.h"
31 #include "flags.h"
32 #include "basic-block.h"
34 /* Forward declarations */
35 static int global_reg_mentioned_p_1 PARAMS ((rtx *, void *));
36 static void set_of_1 PARAMS ((rtx, rtx, void *));
37 static void insn_dependent_p_1 PARAMS ((rtx, rtx, void *));
38 static int computed_jump_p_1 PARAMS ((rtx));
39 static void parms_set PARAMS ((rtx, rtx, void *));
40 static bool hoist_test_store PARAMS ((rtx, rtx, regset));
41 static void hoist_update_store PARAMS ((rtx, rtx *, rtx, rtx));
43 /* Bit flags that specify the machine subtype we are compiling for.
44 Bits are tested using macros TARGET_... defined in the tm.h file
45 and set by `-m...' switches. Must be defined in rtlanal.c. */
47 int target_flags;
49 /* Return 1 if the value of X is unstable
50 (would be different at a different point in the program).
51 The frame pointer, arg pointer, etc. are considered stable
52 (within one function) and so is anything marked `unchanging'. */
54 int
55 rtx_unstable_p (x)
56 rtx x;
58 RTX_CODE code = GET_CODE (x);
59 int i;
60 const char *fmt;
62 switch (code)
64 case MEM:
65 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
67 case QUEUED:
68 return 1;
70 case ADDRESSOF:
71 case CONST:
72 case CONST_INT:
73 case CONST_DOUBLE:
74 case CONST_VECTOR:
75 case SYMBOL_REF:
76 case LABEL_REF:
77 return 0;
79 case REG:
80 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
81 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
82 /* The arg pointer varies if it is not a fixed register. */
83 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
84 || RTX_UNCHANGING_P (x))
85 return 0;
86 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
87 /* ??? When call-clobbered, the value is stable modulo the restore
88 that must happen after a call. This currently screws up local-alloc
89 into believing that the restore is not needed. */
90 if (x == pic_offset_table_rtx)
91 return 0;
92 #endif
93 return 1;
95 case ASM_OPERANDS:
96 if (MEM_VOLATILE_P (x))
97 return 1;
99 /* FALLTHROUGH */
101 default:
102 break;
105 fmt = GET_RTX_FORMAT (code);
106 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
107 if (fmt[i] == 'e')
109 if (rtx_unstable_p (XEXP (x, i)))
110 return 1;
112 else if (fmt[i] == 'E')
114 int j;
115 for (j = 0; j < XVECLEN (x, i); j++)
116 if (rtx_unstable_p (XVECEXP (x, i, j)))
117 return 1;
120 return 0;
123 /* Return 1 if X has a value that can vary even between two
124 executions of the program. 0 means X can be compared reliably
125 against certain constants or near-constants.
126 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
127 zero, we are slightly more conservative.
128 The frame pointer and the arg pointer are considered constant. */
131 rtx_varies_p (x, for_alias)
132 rtx x;
133 int for_alias;
135 RTX_CODE code = GET_CODE (x);
136 int i;
137 const char *fmt;
139 switch (code)
141 case MEM:
142 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
144 case QUEUED:
145 return 1;
147 case CONST:
148 case CONST_INT:
149 case CONST_DOUBLE:
150 case CONST_VECTOR:
151 case SYMBOL_REF:
152 case LABEL_REF:
153 return 0;
155 case REG:
156 /* Note that we have to test for the actual rtx used for the frame
157 and arg pointers and not just the register number in case we have
158 eliminated the frame and/or arg pointer and are using it
159 for pseudos. */
160 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
161 /* The arg pointer varies if it is not a fixed register. */
162 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
163 return 0;
164 if (x == pic_offset_table_rtx
165 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
166 /* ??? When call-clobbered, the value is stable modulo the restore
167 that must happen after a call. This currently screws up
168 local-alloc into believing that the restore is not needed, so we
169 must return 0 only if we are called from alias analysis. */
170 && for_alias
171 #endif
173 return 0;
174 return 1;
176 case LO_SUM:
177 /* The operand 0 of a LO_SUM is considered constant
178 (in fact it is related specifically to operand 1)
179 during alias analysis. */
180 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
181 || rtx_varies_p (XEXP (x, 1), for_alias);
183 case ASM_OPERANDS:
184 if (MEM_VOLATILE_P (x))
185 return 1;
187 /* FALLTHROUGH */
189 default:
190 break;
193 fmt = GET_RTX_FORMAT (code);
194 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
195 if (fmt[i] == 'e')
197 if (rtx_varies_p (XEXP (x, i), for_alias))
198 return 1;
200 else if (fmt[i] == 'E')
202 int j;
203 for (j = 0; j < XVECLEN (x, i); j++)
204 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
205 return 1;
208 return 0;
211 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
214 rtx_addr_can_trap_p (x)
215 rtx x;
217 enum rtx_code code = GET_CODE (x);
219 switch (code)
221 case SYMBOL_REF:
222 return SYMBOL_REF_WEAK (x);
224 case LABEL_REF:
225 return 0;
227 case REG:
228 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
229 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
230 || x == stack_pointer_rtx
231 /* The arg pointer varies if it is not a fixed register. */
232 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
233 return 0;
234 /* All of the virtual frame registers are stack references. */
235 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
236 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
237 return 0;
238 return 1;
240 case CONST:
241 return rtx_addr_can_trap_p (XEXP (x, 0));
243 case PLUS:
244 /* An address is assumed not to trap if it is an address that can't
245 trap plus a constant integer or it is the pic register plus a
246 constant. */
247 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
248 && GET_CODE (XEXP (x, 1)) == CONST_INT)
249 || (XEXP (x, 0) == pic_offset_table_rtx
250 && CONSTANT_P (XEXP (x, 1))));
252 case LO_SUM:
253 case PRE_MODIFY:
254 return rtx_addr_can_trap_p (XEXP (x, 1));
256 case PRE_DEC:
257 case PRE_INC:
258 case POST_DEC:
259 case POST_INC:
260 case POST_MODIFY:
261 return rtx_addr_can_trap_p (XEXP (x, 0));
263 default:
264 break;
267 /* If it isn't one of the case above, it can cause a trap. */
268 return 1;
271 /* Return 1 if X refers to a memory location whose address
272 cannot be compared reliably with constant addresses,
273 or if X refers to a BLKmode memory object.
274 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
275 zero, we are slightly more conservative. */
278 rtx_addr_varies_p (x, for_alias)
279 rtx x;
280 int for_alias;
282 enum rtx_code code;
283 int i;
284 const char *fmt;
286 if (x == 0)
287 return 0;
289 code = GET_CODE (x);
290 if (code == MEM)
291 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
293 fmt = GET_RTX_FORMAT (code);
294 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
295 if (fmt[i] == 'e')
297 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
298 return 1;
300 else if (fmt[i] == 'E')
302 int j;
303 for (j = 0; j < XVECLEN (x, i); j++)
304 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
305 return 1;
307 return 0;
310 /* Return the value of the integer term in X, if one is apparent;
311 otherwise return 0.
312 Only obvious integer terms are detected.
313 This is used in cse.c with the `related_value' field. */
315 HOST_WIDE_INT
316 get_integer_term (x)
317 rtx x;
319 if (GET_CODE (x) == CONST)
320 x = XEXP (x, 0);
322 if (GET_CODE (x) == MINUS
323 && GET_CODE (XEXP (x, 1)) == CONST_INT)
324 return - INTVAL (XEXP (x, 1));
325 if (GET_CODE (x) == PLUS
326 && GET_CODE (XEXP (x, 1)) == CONST_INT)
327 return INTVAL (XEXP (x, 1));
328 return 0;
331 /* If X is a constant, return the value sans apparent integer term;
332 otherwise return 0.
333 Only obvious integer terms are detected. */
336 get_related_value (x)
337 rtx x;
339 if (GET_CODE (x) != CONST)
340 return 0;
341 x = XEXP (x, 0);
342 if (GET_CODE (x) == PLUS
343 && GET_CODE (XEXP (x, 1)) == CONST_INT)
344 return XEXP (x, 0);
345 else if (GET_CODE (x) == MINUS
346 && GET_CODE (XEXP (x, 1)) == CONST_INT)
347 return XEXP (x, 0);
348 return 0;
351 /* Given a tablejump insn INSN, return the RTL expression for the offset
352 into the jump table. If the offset cannot be determined, then return
353 NULL_RTX.
355 If EARLIEST is non-zero, it is a pointer to a place where the earliest
356 insn used in locating the offset was found. */
359 get_jump_table_offset (insn, earliest)
360 rtx insn;
361 rtx *earliest;
363 rtx label;
364 rtx table;
365 rtx set;
366 rtx old_insn;
367 rtx x;
368 rtx old_x;
369 rtx y;
370 rtx old_y;
371 int i;
373 if (GET_CODE (insn) != JUMP_INSN
374 || ! (label = JUMP_LABEL (insn))
375 || ! (table = NEXT_INSN (label))
376 || GET_CODE (table) != JUMP_INSN
377 || (GET_CODE (PATTERN (table)) != ADDR_VEC
378 && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC)
379 || ! (set = single_set (insn)))
380 return NULL_RTX;
382 x = SET_SRC (set);
384 /* Some targets (eg, ARM) emit a tablejump that also
385 contains the out-of-range target. */
386 if (GET_CODE (x) == IF_THEN_ELSE
387 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
388 x = XEXP (x, 1);
390 /* Search backwards and locate the expression stored in X. */
391 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
392 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
395 /* If X is an expression using a relative address then strip
396 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
397 or the jump table label. */
398 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
399 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
401 for (i = 0; i < 2; i++)
403 old_insn = insn;
404 y = XEXP (x, i);
406 if (y == pc_rtx || y == pic_offset_table_rtx)
407 break;
409 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
410 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
413 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
414 break;
417 if (i >= 2)
418 return NULL_RTX;
420 x = XEXP (x, 1 - i);
422 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
423 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
427 /* Strip off any sign or zero extension. */
428 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
430 x = XEXP (x, 0);
432 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
433 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
437 /* If X isn't a MEM then this isn't a tablejump we understand. */
438 if (GET_CODE (x) != MEM)
439 return NULL_RTX;
441 /* Strip off the MEM. */
442 x = XEXP (x, 0);
444 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
445 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
448 /* If X isn't a PLUS than this isn't a tablejump we understand. */
449 if (GET_CODE (x) != PLUS)
450 return NULL_RTX;
452 /* At this point we should have an expression representing the jump table
453 plus an offset. Examine each operand in order to determine which one
454 represents the jump table. Knowing that tells us that the other operand
455 must represent the offset. */
456 for (i = 0; i < 2; i++)
458 old_insn = insn;
459 y = XEXP (x, i);
461 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
462 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
465 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
466 && reg_mentioned_p (label, y))
467 break;
470 if (i >= 2)
471 return NULL_RTX;
473 x = XEXP (x, 1 - i);
475 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
476 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
477 for (i = 0; i < 2; i++)
478 if (XEXP (x, i) == pic_offset_table_rtx)
480 x = XEXP (x, 1 - i);
481 break;
484 if (earliest)
485 *earliest = insn;
487 /* Return the RTL expression representing the offset. */
488 return x;
491 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
492 a global register. */
494 static int
495 global_reg_mentioned_p_1 (loc, data)
496 rtx *loc;
497 void *data ATTRIBUTE_UNUSED;
499 int regno;
500 rtx x = *loc;
502 if (! x)
503 return 0;
505 switch (GET_CODE (x))
507 case SUBREG:
508 if (GET_CODE (SUBREG_REG (x)) == REG)
510 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
511 && global_regs[subreg_regno (x)])
512 return 1;
513 return 0;
515 break;
517 case REG:
518 regno = REGNO (x);
519 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
520 return 1;
521 return 0;
523 case SCRATCH:
524 case PC:
525 case CC0:
526 case CONST_INT:
527 case CONST_DOUBLE:
528 case CONST:
529 case LABEL_REF:
530 return 0;
532 case CALL:
533 /* A non-constant call might use a global register. */
534 return 1;
536 default:
537 break;
540 return 0;
543 /* Returns non-zero if X mentions a global register. */
546 global_reg_mentioned_p (x)
547 rtx x;
550 if (INSN_P (x))
552 if (GET_CODE (x) == CALL_INSN)
554 if (! CONST_OR_PURE_CALL_P (x))
555 return 1;
556 x = CALL_INSN_FUNCTION_USAGE (x);
557 if (x == 0)
558 return 0;
560 else
561 x = PATTERN (x);
564 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
567 /* Return the number of places FIND appears within X. If COUNT_DEST is
568 zero, we do not count occurrences inside the destination of a SET. */
571 count_occurrences (x, find, count_dest)
572 rtx x, find;
573 int count_dest;
575 int i, j;
576 enum rtx_code code;
577 const char *format_ptr;
578 int count;
580 if (x == find)
581 return 1;
583 code = GET_CODE (x);
585 switch (code)
587 case REG:
588 case CONST_INT:
589 case CONST_DOUBLE:
590 case CONST_VECTOR:
591 case SYMBOL_REF:
592 case CODE_LABEL:
593 case PC:
594 case CC0:
595 return 0;
597 case MEM:
598 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
599 return 1;
600 break;
602 case SET:
603 if (SET_DEST (x) == find && ! count_dest)
604 return count_occurrences (SET_SRC (x), find, count_dest);
605 break;
607 default:
608 break;
611 format_ptr = GET_RTX_FORMAT (code);
612 count = 0;
614 for (i = 0; i < GET_RTX_LENGTH (code); i++)
616 switch (*format_ptr++)
618 case 'e':
619 count += count_occurrences (XEXP (x, i), find, count_dest);
620 break;
622 case 'E':
623 for (j = 0; j < XVECLEN (x, i); j++)
624 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
625 break;
628 return count;
631 /* Nonzero if register REG appears somewhere within IN.
632 Also works if REG is not a register; in this case it checks
633 for a subexpression of IN that is Lisp "equal" to REG. */
636 reg_mentioned_p (reg, in)
637 rtx reg, in;
639 const char *fmt;
640 int i;
641 enum rtx_code code;
643 if (in == 0)
644 return 0;
646 if (reg == in)
647 return 1;
649 if (GET_CODE (in) == LABEL_REF)
650 return reg == XEXP (in, 0);
652 code = GET_CODE (in);
654 switch (code)
656 /* Compare registers by number. */
657 case REG:
658 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
660 /* These codes have no constituent expressions
661 and are unique. */
662 case SCRATCH:
663 case CC0:
664 case PC:
665 return 0;
667 case CONST_INT:
668 return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg);
670 case CONST_VECTOR:
671 case CONST_DOUBLE:
672 /* These are kept unique for a given value. */
673 return 0;
675 default:
676 break;
679 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
680 return 1;
682 fmt = GET_RTX_FORMAT (code);
684 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
686 if (fmt[i] == 'E')
688 int j;
689 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
690 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
691 return 1;
693 else if (fmt[i] == 'e'
694 && reg_mentioned_p (reg, XEXP (in, i)))
695 return 1;
697 return 0;
700 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
701 no CODE_LABEL insn. */
704 no_labels_between_p (beg, end)
705 rtx beg, end;
707 rtx p;
708 if (beg == end)
709 return 0;
710 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
711 if (GET_CODE (p) == CODE_LABEL)
712 return 0;
713 return 1;
716 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
717 no JUMP_INSN insn. */
720 no_jumps_between_p (beg, end)
721 rtx beg, end;
723 rtx p;
724 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
725 if (GET_CODE (p) == JUMP_INSN)
726 return 0;
727 return 1;
730 /* Nonzero if register REG is used in an insn between
731 FROM_INSN and TO_INSN (exclusive of those two). */
734 reg_used_between_p (reg, from_insn, to_insn)
735 rtx reg, from_insn, to_insn;
737 rtx insn;
739 if (from_insn == to_insn)
740 return 0;
742 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
743 if (INSN_P (insn)
744 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
745 || (GET_CODE (insn) == CALL_INSN
746 && (find_reg_fusage (insn, USE, reg)
747 || find_reg_fusage (insn, CLOBBER, reg)))))
748 return 1;
749 return 0;
752 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
753 is entirely replaced by a new value and the only use is as a SET_DEST,
754 we do not consider it a reference. */
757 reg_referenced_p (x, body)
758 rtx x;
759 rtx body;
761 int i;
763 switch (GET_CODE (body))
765 case SET:
766 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
767 return 1;
769 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
770 of a REG that occupies all of the REG, the insn references X if
771 it is mentioned in the destination. */
772 if (GET_CODE (SET_DEST (body)) != CC0
773 && GET_CODE (SET_DEST (body)) != PC
774 && GET_CODE (SET_DEST (body)) != REG
775 && ! (GET_CODE (SET_DEST (body)) == SUBREG
776 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
777 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
778 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
779 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
780 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
781 && reg_overlap_mentioned_p (x, SET_DEST (body)))
782 return 1;
783 return 0;
785 case ASM_OPERANDS:
786 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
787 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
788 return 1;
789 return 0;
791 case CALL:
792 case USE:
793 case IF_THEN_ELSE:
794 return reg_overlap_mentioned_p (x, body);
796 case TRAP_IF:
797 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
799 case PREFETCH:
800 return reg_overlap_mentioned_p (x, XEXP (body, 0));
802 case UNSPEC:
803 case UNSPEC_VOLATILE:
804 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
805 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
806 return 1;
807 return 0;
809 case PARALLEL:
810 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
811 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
812 return 1;
813 return 0;
815 case CLOBBER:
816 if (GET_CODE (XEXP (body, 0)) == MEM)
817 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
818 return 1;
819 return 0;
821 case COND_EXEC:
822 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
823 return 1;
824 return reg_referenced_p (x, COND_EXEC_CODE (body));
826 default:
827 return 0;
831 /* Nonzero if register REG is referenced in an insn between
832 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
833 not count. */
836 reg_referenced_between_p (reg, from_insn, to_insn)
837 rtx reg, from_insn, to_insn;
839 rtx insn;
841 if (from_insn == to_insn)
842 return 0;
844 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
845 if (INSN_P (insn)
846 && (reg_referenced_p (reg, PATTERN (insn))
847 || (GET_CODE (insn) == CALL_INSN
848 && find_reg_fusage (insn, USE, reg))))
849 return 1;
850 return 0;
853 /* Nonzero if register REG is set or clobbered in an insn between
854 FROM_INSN and TO_INSN (exclusive of those two). */
857 reg_set_between_p (reg, from_insn, to_insn)
858 rtx reg, from_insn, to_insn;
860 rtx insn;
862 if (from_insn == to_insn)
863 return 0;
865 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
866 if (INSN_P (insn) && reg_set_p (reg, insn))
867 return 1;
868 return 0;
871 /* Internals of reg_set_between_p. */
873 reg_set_p (reg, insn)
874 rtx reg, insn;
876 rtx body = insn;
878 /* We can be passed an insn or part of one. If we are passed an insn,
879 check if a side-effect of the insn clobbers REG. */
880 if (INSN_P (insn))
882 if (FIND_REG_INC_NOTE (insn, reg)
883 || (GET_CODE (insn) == CALL_INSN
884 /* We'd like to test call_used_regs here, but rtlanal.c can't
885 reference that variable due to its use in genattrtab. So
886 we'll just be more conservative.
888 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
889 information holds all clobbered registers. */
890 && ((GET_CODE (reg) == REG
891 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
892 || GET_CODE (reg) == MEM
893 || find_reg_fusage (insn, CLOBBER, reg))))
894 return 1;
896 body = PATTERN (insn);
899 return set_of (reg, insn) != NULL_RTX;
902 /* Similar to reg_set_between_p, but check all registers in X. Return 0
903 only if none of them are modified between START and END. Do not
904 consider non-registers one way or the other. */
907 regs_set_between_p (x, start, end)
908 rtx x;
909 rtx start, end;
911 enum rtx_code code = GET_CODE (x);
912 const char *fmt;
913 int i, j;
915 switch (code)
917 case CONST_INT:
918 case CONST_DOUBLE:
919 case CONST_VECTOR:
920 case CONST:
921 case SYMBOL_REF:
922 case LABEL_REF:
923 case PC:
924 case CC0:
925 return 0;
927 case REG:
928 return reg_set_between_p (x, start, end);
930 default:
931 break;
934 fmt = GET_RTX_FORMAT (code);
935 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
937 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
938 return 1;
940 else if (fmt[i] == 'E')
941 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
942 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
943 return 1;
946 return 0;
949 /* Similar to reg_set_between_p, but check all registers in X. Return 0
950 only if none of them are modified between START and END. Return 1 if
951 X contains a MEM; this routine does not perform any memory aliasing. */
954 modified_between_p (x, start, end)
955 rtx x;
956 rtx start, end;
958 enum rtx_code code = GET_CODE (x);
959 const char *fmt;
960 int i, j;
962 switch (code)
964 case CONST_INT:
965 case CONST_DOUBLE:
966 case CONST_VECTOR:
967 case CONST:
968 case SYMBOL_REF:
969 case LABEL_REF:
970 return 0;
972 case PC:
973 case CC0:
974 return 1;
976 case MEM:
977 /* If the memory is not constant, assume it is modified. If it is
978 constant, we still have to check the address. */
979 if (! RTX_UNCHANGING_P (x))
980 return 1;
981 break;
983 case REG:
984 return reg_set_between_p (x, start, end);
986 default:
987 break;
990 fmt = GET_RTX_FORMAT (code);
991 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
993 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
994 return 1;
996 else if (fmt[i] == 'E')
997 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
998 if (modified_between_p (XVECEXP (x, i, j), start, end))
999 return 1;
1002 return 0;
1005 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1006 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1007 does not perform any memory aliasing. */
1010 modified_in_p (x, insn)
1011 rtx x;
1012 rtx insn;
1014 enum rtx_code code = GET_CODE (x);
1015 const char *fmt;
1016 int i, j;
1018 switch (code)
1020 case CONST_INT:
1021 case CONST_DOUBLE:
1022 case CONST_VECTOR:
1023 case CONST:
1024 case SYMBOL_REF:
1025 case LABEL_REF:
1026 return 0;
1028 case PC:
1029 case CC0:
1030 return 1;
1032 case MEM:
1033 /* If the memory is not constant, assume it is modified. If it is
1034 constant, we still have to check the address. */
1035 if (! RTX_UNCHANGING_P (x))
1036 return 1;
1037 break;
1039 case REG:
1040 return reg_set_p (x, insn);
1042 default:
1043 break;
1046 fmt = GET_RTX_FORMAT (code);
1047 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1049 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1050 return 1;
1052 else if (fmt[i] == 'E')
1053 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1054 if (modified_in_p (XVECEXP (x, i, j), insn))
1055 return 1;
1058 return 0;
1061 /* Return true if anything in insn X is (anti,output,true) dependent on
1062 anything in insn Y. */
1065 insn_dependent_p (x, y)
1066 rtx x, y;
1068 rtx tmp;
1070 if (! INSN_P (x) || ! INSN_P (y))
1071 abort ();
1073 tmp = PATTERN (y);
1074 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1075 if (tmp == NULL_RTX)
1076 return 1;
1078 tmp = PATTERN (x);
1079 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1080 if (tmp == NULL_RTX)
1081 return 1;
1083 return 0;
1086 /* A helper routine for insn_dependent_p called through note_stores. */
1088 static void
1089 insn_dependent_p_1 (x, pat, data)
1090 rtx x;
1091 rtx pat ATTRIBUTE_UNUSED;
1092 void *data;
1094 rtx * pinsn = (rtx *) data;
1096 if (*pinsn && reg_mentioned_p (x, *pinsn))
1097 *pinsn = NULL_RTX;
1100 /* Helper function for set_of. */
1101 struct set_of_data
1103 rtx found;
1104 rtx pat;
1107 static void
1108 set_of_1 (x, pat, data1)
1109 rtx x;
1110 rtx pat;
1111 void *data1;
1113 struct set_of_data *data = (struct set_of_data *) (data1);
1114 if (rtx_equal_p (x, data->pat)
1115 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1116 data->found = pat;
1119 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1120 (either directly or via STRICT_LOW_PART and similar modifiers). */
1122 set_of (pat, insn)
1123 rtx pat, insn;
1125 struct set_of_data data;
1126 data.found = NULL_RTX;
1127 data.pat = pat;
1128 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1129 return data.found;
1132 /* Given an INSN, return a SET expression if this insn has only a single SET.
1133 It may also have CLOBBERs, USEs, or SET whose output
1134 will not be used, which we ignore. */
1137 single_set_2 (insn, pat)
1138 rtx insn, pat;
1140 rtx set = NULL;
1141 int set_verified = 1;
1142 int i;
1144 if (GET_CODE (pat) == PARALLEL)
1146 for (i = 0; i < XVECLEN (pat, 0); i++)
1148 rtx sub = XVECEXP (pat, 0, i);
1149 switch (GET_CODE (sub))
1151 case USE:
1152 case CLOBBER:
1153 break;
1155 case SET:
1156 /* We can consider insns having multiple sets, where all
1157 but one are dead as single set insns. In common case
1158 only single set is present in the pattern so we want
1159 to avoid checking for REG_UNUSED notes unless necessary.
1161 When we reach set first time, we just expect this is
1162 the single set we are looking for and only when more
1163 sets are found in the insn, we check them. */
1164 if (!set_verified)
1166 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1167 && !side_effects_p (set))
1168 set = NULL;
1169 else
1170 set_verified = 1;
1172 if (!set)
1173 set = sub, set_verified = 0;
1174 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1175 || side_effects_p (sub))
1176 return NULL_RTX;
1177 break;
1179 default:
1180 return NULL_RTX;
1184 return set;
1187 /* Given an INSN, return nonzero if it has more than one SET, else return
1188 zero. */
1191 multiple_sets (insn)
1192 rtx insn;
1194 int found;
1195 int i;
1197 /* INSN must be an insn. */
1198 if (! INSN_P (insn))
1199 return 0;
1201 /* Only a PARALLEL can have multiple SETs. */
1202 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1204 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1205 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1207 /* If we have already found a SET, then return now. */
1208 if (found)
1209 return 1;
1210 else
1211 found = 1;
1215 /* Either zero or one SET. */
1216 return 0;
1219 /* Return nonzero if the destination of SET equals the source
1220 and there are no side effects. */
1223 set_noop_p (set)
1224 rtx set;
1226 rtx src = SET_SRC (set);
1227 rtx dst = SET_DEST (set);
1229 if (side_effects_p (src) || side_effects_p (dst))
1230 return 0;
1232 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1233 return rtx_equal_p (dst, src);
1235 if (dst == pc_rtx && src == pc_rtx)
1236 return 1;
1238 if (GET_CODE (dst) == SIGN_EXTRACT
1239 || GET_CODE (dst) == ZERO_EXTRACT)
1240 return rtx_equal_p (XEXP (dst, 0), src)
1241 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx;
1243 if (GET_CODE (dst) == STRICT_LOW_PART)
1244 dst = XEXP (dst, 0);
1246 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1248 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1249 return 0;
1250 src = SUBREG_REG (src);
1251 dst = SUBREG_REG (dst);
1254 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1255 && REGNO (src) == REGNO (dst));
1258 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1259 value to itself. */
1262 noop_move_p (insn)
1263 rtx insn;
1265 rtx pat = PATTERN (insn);
1267 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1268 return 1;
1270 /* Insns carrying these notes are useful later on. */
1271 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1272 return 0;
1274 /* For now treat an insn with a REG_RETVAL note as a
1275 a special insn which should not be considered a no-op. */
1276 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1277 return 0;
1279 if (GET_CODE (pat) == SET && set_noop_p (pat))
1280 return 1;
1282 if (GET_CODE (pat) == PARALLEL)
1284 int i;
1285 /* If nothing but SETs of registers to themselves,
1286 this insn can also be deleted. */
1287 for (i = 0; i < XVECLEN (pat, 0); i++)
1289 rtx tem = XVECEXP (pat, 0, i);
1291 if (GET_CODE (tem) == USE
1292 || GET_CODE (tem) == CLOBBER)
1293 continue;
1295 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1296 return 0;
1299 return 1;
1301 return 0;
1305 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1306 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1307 If the object was modified, if we hit a partial assignment to X, or hit a
1308 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1309 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1310 be the src. */
1313 find_last_value (x, pinsn, valid_to, allow_hwreg)
1314 rtx x;
1315 rtx *pinsn;
1316 rtx valid_to;
1317 int allow_hwreg;
1319 rtx p;
1321 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1322 p = PREV_INSN (p))
1323 if (INSN_P (p))
1325 rtx set = single_set (p);
1326 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1328 if (set && rtx_equal_p (x, SET_DEST (set)))
1330 rtx src = SET_SRC (set);
1332 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1333 src = XEXP (note, 0);
1335 if ((valid_to == NULL_RTX
1336 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1337 /* Reject hard registers because we don't usually want
1338 to use them; we'd rather use a pseudo. */
1339 && (! (GET_CODE (src) == REG
1340 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1342 *pinsn = p;
1343 return src;
1347 /* If set in non-simple way, we don't have a value. */
1348 if (reg_set_p (x, p))
1349 break;
1352 return x;
1355 /* Return nonzero if register in range [REGNO, ENDREGNO)
1356 appears either explicitly or implicitly in X
1357 other than being stored into.
1359 References contained within the substructure at LOC do not count.
1360 LOC may be zero, meaning don't ignore anything. */
1363 refers_to_regno_p (regno, endregno, x, loc)
1364 unsigned int regno, endregno;
1365 rtx x;
1366 rtx *loc;
1368 int i;
1369 unsigned int x_regno;
1370 RTX_CODE code;
1371 const char *fmt;
1373 repeat:
1374 /* The contents of a REG_NONNEG note is always zero, so we must come here
1375 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1376 if (x == 0)
1377 return 0;
1379 code = GET_CODE (x);
1381 switch (code)
1383 case REG:
1384 x_regno = REGNO (x);
1386 /* If we modifying the stack, frame, or argument pointer, it will
1387 clobber a virtual register. In fact, we could be more precise,
1388 but it isn't worth it. */
1389 if ((x_regno == STACK_POINTER_REGNUM
1390 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1391 || x_regno == ARG_POINTER_REGNUM
1392 #endif
1393 || x_regno == FRAME_POINTER_REGNUM)
1394 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1395 return 1;
1397 return (endregno > x_regno
1398 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1399 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1400 : 1));
1402 case SUBREG:
1403 /* If this is a SUBREG of a hard reg, we can see exactly which
1404 registers are being modified. Otherwise, handle normally. */
1405 if (GET_CODE (SUBREG_REG (x)) == REG
1406 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1408 unsigned int inner_regno = subreg_regno (x);
1409 unsigned int inner_endregno
1410 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1411 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1413 return endregno > inner_regno && regno < inner_endregno;
1415 break;
1417 case CLOBBER:
1418 case SET:
1419 if (&SET_DEST (x) != loc
1420 /* Note setting a SUBREG counts as referring to the REG it is in for
1421 a pseudo but not for hard registers since we can
1422 treat each word individually. */
1423 && ((GET_CODE (SET_DEST (x)) == SUBREG
1424 && loc != &SUBREG_REG (SET_DEST (x))
1425 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1426 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1427 && refers_to_regno_p (regno, endregno,
1428 SUBREG_REG (SET_DEST (x)), loc))
1429 || (GET_CODE (SET_DEST (x)) != REG
1430 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1431 return 1;
1433 if (code == CLOBBER || loc == &SET_SRC (x))
1434 return 0;
1435 x = SET_SRC (x);
1436 goto repeat;
1438 default:
1439 break;
1442 /* X does not match, so try its subexpressions. */
1444 fmt = GET_RTX_FORMAT (code);
1445 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1447 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1449 if (i == 0)
1451 x = XEXP (x, 0);
1452 goto repeat;
1454 else
1455 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1456 return 1;
1458 else if (fmt[i] == 'E')
1460 int j;
1461 for (j = XVECLEN (x, i) - 1; j >=0; j--)
1462 if (loc != &XVECEXP (x, i, j)
1463 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1464 return 1;
1467 return 0;
1470 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1471 we check if any register number in X conflicts with the relevant register
1472 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1473 contains a MEM (we don't bother checking for memory addresses that can't
1474 conflict because we expect this to be a rare case. */
1477 reg_overlap_mentioned_p (x, in)
1478 rtx x, in;
1480 unsigned int regno, endregno;
1482 /* Overly conservative. */
1483 if (GET_CODE (x) == STRICT_LOW_PART)
1484 x = XEXP (x, 0);
1486 /* If either argument is a constant, then modifying X can not affect IN. */
1487 if (CONSTANT_P (x) || CONSTANT_P (in))
1488 return 0;
1490 switch (GET_CODE (x))
1492 case SUBREG:
1493 regno = REGNO (SUBREG_REG (x));
1494 if (regno < FIRST_PSEUDO_REGISTER)
1495 regno = subreg_regno (x);
1496 goto do_reg;
1498 case REG:
1499 regno = REGNO (x);
1500 do_reg:
1501 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1502 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1503 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1505 case MEM:
1507 const char *fmt;
1508 int i;
1510 if (GET_CODE (in) == MEM)
1511 return 1;
1513 fmt = GET_RTX_FORMAT (GET_CODE (in));
1514 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1515 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1516 return 1;
1518 return 0;
1521 case SCRATCH:
1522 case PC:
1523 case CC0:
1524 return reg_mentioned_p (x, in);
1526 case PARALLEL:
1528 int i;
1530 /* If any register in here refers to it we return true. */
1531 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1532 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1533 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1534 return 1;
1535 return 0;
1538 default:
1539 break;
1542 abort ();
1545 /* Return the last value to which REG was set prior to INSN. If we can't
1546 find it easily, return 0.
1548 We only return a REG, SUBREG, or constant because it is too hard to
1549 check if a MEM remains unchanged. */
1552 reg_set_last (x, insn)
1553 rtx x;
1554 rtx insn;
1556 rtx orig_insn = insn;
1558 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1559 Stop when we reach a label or X is a hard reg and we reach a
1560 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1562 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1564 /* We compare with <= here, because reg_set_last_last_regno
1565 is actually the number of the first reg *not* in X. */
1566 for (;
1567 insn && GET_CODE (insn) != CODE_LABEL
1568 && ! (GET_CODE (insn) == CALL_INSN
1569 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1570 insn = PREV_INSN (insn))
1571 if (INSN_P (insn))
1573 rtx set = set_of (x, insn);
1574 /* OK, this function modify our register. See if we understand it. */
1575 if (set)
1577 rtx last_value;
1578 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1579 return 0;
1580 last_value = SET_SRC (x);
1581 if (CONSTANT_P (last_value)
1582 || ((GET_CODE (last_value) == REG
1583 || GET_CODE (last_value) == SUBREG)
1584 && ! reg_set_between_p (last_value,
1585 insn, orig_insn)))
1586 return last_value;
1587 else
1588 return 0;
1592 return 0;
1595 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1596 (X would be the pattern of an insn).
1597 FUN receives two arguments:
1598 the REG, MEM, CC0 or PC being stored in or clobbered,
1599 the SET or CLOBBER rtx that does the store.
1601 If the item being stored in or clobbered is a SUBREG of a hard register,
1602 the SUBREG will be passed. */
1604 void
1605 note_stores (x, fun, data)
1606 rtx x;
1607 void (*fun) PARAMS ((rtx, rtx, void *));
1608 void *data;
1610 int i;
1612 if (GET_CODE (x) == COND_EXEC)
1613 x = COND_EXEC_CODE (x);
1615 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1617 rtx dest = SET_DEST (x);
1619 while ((GET_CODE (dest) == SUBREG
1620 && (GET_CODE (SUBREG_REG (dest)) != REG
1621 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1622 || GET_CODE (dest) == ZERO_EXTRACT
1623 || GET_CODE (dest) == SIGN_EXTRACT
1624 || GET_CODE (dest) == STRICT_LOW_PART)
1625 dest = XEXP (dest, 0);
1627 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1628 each of whose first operand is a register. */
1629 if (GET_CODE (dest) == PARALLEL)
1631 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1632 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1633 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1635 else
1636 (*fun) (dest, x, data);
1639 else if (GET_CODE (x) == PARALLEL)
1640 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1641 note_stores (XVECEXP (x, 0, i), fun, data);
1644 /* Like notes_stores, but call FUN for each expression that is being
1645 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1646 FUN for each expression, not any interior subexpressions. FUN receives a
1647 pointer to the expression and the DATA passed to this function.
1649 Note that this is not quite the same test as that done in reg_referenced_p
1650 since that considers something as being referenced if it is being
1651 partially set, while we do not. */
1653 void
1654 note_uses (pbody, fun, data)
1655 rtx *pbody;
1656 void (*fun) PARAMS ((rtx *, void *));
1657 void *data;
1659 rtx body = *pbody;
1660 int i;
1662 switch (GET_CODE (body))
1664 case COND_EXEC:
1665 (*fun) (&COND_EXEC_TEST (body), data);
1666 note_uses (&COND_EXEC_CODE (body), fun, data);
1667 return;
1669 case PARALLEL:
1670 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1671 note_uses (&XVECEXP (body, 0, i), fun, data);
1672 return;
1674 case USE:
1675 (*fun) (&XEXP (body, 0), data);
1676 return;
1678 case ASM_OPERANDS:
1679 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1680 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1681 return;
1683 case TRAP_IF:
1684 (*fun) (&TRAP_CONDITION (body), data);
1685 return;
1687 case PREFETCH:
1688 (*fun) (&XEXP (body, 0), data);
1689 return;
1691 case UNSPEC:
1692 case UNSPEC_VOLATILE:
1693 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1694 (*fun) (&XVECEXP (body, 0, i), data);
1695 return;
1697 case CLOBBER:
1698 if (GET_CODE (XEXP (body, 0)) == MEM)
1699 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1700 return;
1702 case SET:
1704 rtx dest = SET_DEST (body);
1706 /* For sets we replace everything in source plus registers in memory
1707 expression in store and operands of a ZERO_EXTRACT. */
1708 (*fun) (&SET_SRC (body), data);
1710 if (GET_CODE (dest) == ZERO_EXTRACT)
1712 (*fun) (&XEXP (dest, 1), data);
1713 (*fun) (&XEXP (dest, 2), data);
1716 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1717 dest = XEXP (dest, 0);
1719 if (GET_CODE (dest) == MEM)
1720 (*fun) (&XEXP (dest, 0), data);
1722 return;
1724 default:
1725 /* All the other possibilities never store. */
1726 (*fun) (pbody, data);
1727 return;
1731 /* Return nonzero if X's old contents don't survive after INSN.
1732 This will be true if X is (cc0) or if X is a register and
1733 X dies in INSN or because INSN entirely sets X.
1735 "Entirely set" means set directly and not through a SUBREG,
1736 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1737 Likewise, REG_INC does not count.
1739 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1740 but for this use that makes no difference, since regs don't overlap
1741 during their lifetimes. Therefore, this function may be used
1742 at any time after deaths have been computed (in flow.c).
1744 If REG is a hard reg that occupies multiple machine registers, this
1745 function will only return 1 if each of those registers will be replaced
1746 by INSN. */
1749 dead_or_set_p (insn, x)
1750 rtx insn;
1751 rtx x;
1753 unsigned int regno, last_regno;
1754 unsigned int i;
1756 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1757 if (GET_CODE (x) == CC0)
1758 return 1;
1760 if (GET_CODE (x) != REG)
1761 abort ();
1763 regno = REGNO (x);
1764 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1765 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1767 for (i = regno; i <= last_regno; i++)
1768 if (! dead_or_set_regno_p (insn, i))
1769 return 0;
1771 return 1;
1774 /* Utility function for dead_or_set_p to check an individual register. Also
1775 called from flow.c. */
1778 dead_or_set_regno_p (insn, test_regno)
1779 rtx insn;
1780 unsigned int test_regno;
1782 unsigned int regno, endregno;
1783 rtx pattern;
1785 /* See if there is a death note for something that includes TEST_REGNO. */
1786 if (find_regno_note (insn, REG_DEAD, test_regno))
1787 return 1;
1789 if (GET_CODE (insn) == CALL_INSN
1790 && find_regno_fusage (insn, CLOBBER, test_regno))
1791 return 1;
1793 pattern = PATTERN (insn);
1795 if (GET_CODE (pattern) == COND_EXEC)
1796 pattern = COND_EXEC_CODE (pattern);
1798 if (GET_CODE (pattern) == SET)
1800 rtx dest = SET_DEST (PATTERN (insn));
1802 /* A value is totally replaced if it is the destination or the
1803 destination is a SUBREG of REGNO that does not change the number of
1804 words in it. */
1805 if (GET_CODE (dest) == SUBREG
1806 && (((GET_MODE_SIZE (GET_MODE (dest))
1807 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1808 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1809 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1810 dest = SUBREG_REG (dest);
1812 if (GET_CODE (dest) != REG)
1813 return 0;
1815 regno = REGNO (dest);
1816 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1817 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1819 return (test_regno >= regno && test_regno < endregno);
1821 else if (GET_CODE (pattern) == PARALLEL)
1823 int i;
1825 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1827 rtx body = XVECEXP (pattern, 0, i);
1829 if (GET_CODE (body) == COND_EXEC)
1830 body = COND_EXEC_CODE (body);
1832 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1834 rtx dest = SET_DEST (body);
1836 if (GET_CODE (dest) == SUBREG
1837 && (((GET_MODE_SIZE (GET_MODE (dest))
1838 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1839 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1840 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1841 dest = SUBREG_REG (dest);
1843 if (GET_CODE (dest) != REG)
1844 continue;
1846 regno = REGNO (dest);
1847 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1848 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1850 if (test_regno >= regno && test_regno < endregno)
1851 return 1;
1856 return 0;
1859 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1860 If DATUM is nonzero, look for one whose datum is DATUM. */
1863 find_reg_note (insn, kind, datum)
1864 rtx insn;
1865 enum reg_note kind;
1866 rtx datum;
1868 rtx link;
1870 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1871 if (! INSN_P (insn))
1872 return 0;
1874 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1875 if (REG_NOTE_KIND (link) == kind
1876 && (datum == 0 || datum == XEXP (link, 0)))
1877 return link;
1878 return 0;
1881 /* Return the reg-note of kind KIND in insn INSN which applies to register
1882 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1883 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1884 it might be the case that the note overlaps REGNO. */
1887 find_regno_note (insn, kind, regno)
1888 rtx insn;
1889 enum reg_note kind;
1890 unsigned int regno;
1892 rtx link;
1894 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1895 if (! INSN_P (insn))
1896 return 0;
1898 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1899 if (REG_NOTE_KIND (link) == kind
1900 /* Verify that it is a register, so that scratch and MEM won't cause a
1901 problem here. */
1902 && GET_CODE (XEXP (link, 0)) == REG
1903 && REGNO (XEXP (link, 0)) <= regno
1904 && ((REGNO (XEXP (link, 0))
1905 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1906 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1907 GET_MODE (XEXP (link, 0)))))
1908 > regno))
1909 return link;
1910 return 0;
1913 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1914 has such a note. */
1917 find_reg_equal_equiv_note (insn)
1918 rtx insn;
1920 rtx note;
1922 if (single_set (insn) == 0)
1923 return 0;
1924 else if ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
1925 return note;
1926 else
1927 return find_reg_note (insn, REG_EQUAL, NULL_RTX);
1930 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1931 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1934 find_reg_fusage (insn, code, datum)
1935 rtx insn;
1936 enum rtx_code code;
1937 rtx datum;
1939 /* If it's not a CALL_INSN, it can't possibly have a
1940 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1941 if (GET_CODE (insn) != CALL_INSN)
1942 return 0;
1944 if (! datum)
1945 abort ();
1947 if (GET_CODE (datum) != REG)
1949 rtx link;
1951 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1952 link;
1953 link = XEXP (link, 1))
1954 if (GET_CODE (XEXP (link, 0)) == code
1955 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1956 return 1;
1958 else
1960 unsigned int regno = REGNO (datum);
1962 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1963 to pseudo registers, so don't bother checking. */
1965 if (regno < FIRST_PSEUDO_REGISTER)
1967 unsigned int end_regno
1968 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
1969 unsigned int i;
1971 for (i = regno; i < end_regno; i++)
1972 if (find_regno_fusage (insn, code, i))
1973 return 1;
1977 return 0;
1980 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1981 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1984 find_regno_fusage (insn, code, regno)
1985 rtx insn;
1986 enum rtx_code code;
1987 unsigned int regno;
1989 rtx link;
1991 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1992 to pseudo registers, so don't bother checking. */
1994 if (regno >= FIRST_PSEUDO_REGISTER
1995 || GET_CODE (insn) != CALL_INSN )
1996 return 0;
1998 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2000 unsigned int regnote;
2001 rtx op, reg;
2003 if (GET_CODE (op = XEXP (link, 0)) == code
2004 && GET_CODE (reg = XEXP (op, 0)) == REG
2005 && (regnote = REGNO (reg)) <= regno
2006 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
2007 return 1;
2010 return 0;
2013 /* Return true if INSN is a call to a pure function. */
2016 pure_call_p (insn)
2017 rtx insn;
2019 rtx link;
2021 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2022 return 0;
2024 /* Look for the note that differentiates const and pure functions. */
2025 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2027 rtx u, m;
2029 if (GET_CODE (u = XEXP (link, 0)) == USE
2030 && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2031 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2032 return 1;
2035 return 0;
2038 /* Remove register note NOTE from the REG_NOTES of INSN. */
2040 void
2041 remove_note (insn, note)
2042 rtx insn;
2043 rtx note;
2045 rtx link;
2047 if (note == NULL_RTX)
2048 return;
2050 if (REG_NOTES (insn) == note)
2052 REG_NOTES (insn) = XEXP (note, 1);
2053 return;
2056 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2057 if (XEXP (link, 1) == note)
2059 XEXP (link, 1) = XEXP (note, 1);
2060 return;
2063 abort ();
2066 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2067 return 1 if it is found. A simple equality test is used to determine if
2068 NODE matches. */
2071 in_expr_list_p (listp, node)
2072 rtx listp;
2073 rtx node;
2075 rtx x;
2077 for (x = listp; x; x = XEXP (x, 1))
2078 if (node == XEXP (x, 0))
2079 return 1;
2081 return 0;
2084 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2085 remove that entry from the list if it is found.
2087 A simple equality test is used to determine if NODE matches. */
2089 void
2090 remove_node_from_expr_list (node, listp)
2091 rtx node;
2092 rtx *listp;
2094 rtx temp = *listp;
2095 rtx prev = NULL_RTX;
2097 while (temp)
2099 if (node == XEXP (temp, 0))
2101 /* Splice the node out of the list. */
2102 if (prev)
2103 XEXP (prev, 1) = XEXP (temp, 1);
2104 else
2105 *listp = XEXP (temp, 1);
2107 return;
2110 prev = temp;
2111 temp = XEXP (temp, 1);
2115 /* Nonzero if X contains any volatile instructions. These are instructions
2116 which may cause unpredictable machine state instructions, and thus no
2117 instructions should be moved or combined across them. This includes
2118 only volatile asms and UNSPEC_VOLATILE instructions. */
2121 volatile_insn_p (x)
2122 rtx x;
2124 RTX_CODE code;
2126 code = GET_CODE (x);
2127 switch (code)
2129 case LABEL_REF:
2130 case SYMBOL_REF:
2131 case CONST_INT:
2132 case CONST:
2133 case CONST_DOUBLE:
2134 case CONST_VECTOR:
2135 case CC0:
2136 case PC:
2137 case REG:
2138 case SCRATCH:
2139 case CLOBBER:
2140 case ASM_INPUT:
2141 case ADDR_VEC:
2142 case ADDR_DIFF_VEC:
2143 case CALL:
2144 case MEM:
2145 return 0;
2147 case UNSPEC_VOLATILE:
2148 /* case TRAP_IF: This isn't clear yet. */
2149 return 1;
2151 case ASM_OPERANDS:
2152 if (MEM_VOLATILE_P (x))
2153 return 1;
2155 default:
2156 break;
2159 /* Recursively scan the operands of this expression. */
2162 const char *fmt = GET_RTX_FORMAT (code);
2163 int i;
2165 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2167 if (fmt[i] == 'e')
2169 if (volatile_insn_p (XEXP (x, i)))
2170 return 1;
2172 else if (fmt[i] == 'E')
2174 int j;
2175 for (j = 0; j < XVECLEN (x, i); j++)
2176 if (volatile_insn_p (XVECEXP (x, i, j)))
2177 return 1;
2181 return 0;
2184 /* Nonzero if X contains any volatile memory references
2185 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2188 volatile_refs_p (x)
2189 rtx x;
2191 RTX_CODE code;
2193 code = GET_CODE (x);
2194 switch (code)
2196 case LABEL_REF:
2197 case SYMBOL_REF:
2198 case CONST_INT:
2199 case CONST:
2200 case CONST_DOUBLE:
2201 case CONST_VECTOR:
2202 case CC0:
2203 case PC:
2204 case REG:
2205 case SCRATCH:
2206 case CLOBBER:
2207 case ASM_INPUT:
2208 case ADDR_VEC:
2209 case ADDR_DIFF_VEC:
2210 return 0;
2212 case UNSPEC_VOLATILE:
2213 return 1;
2215 case MEM:
2216 case ASM_OPERANDS:
2217 if (MEM_VOLATILE_P (x))
2218 return 1;
2220 default:
2221 break;
2224 /* Recursively scan the operands of this expression. */
2227 const char *fmt = GET_RTX_FORMAT (code);
2228 int i;
2230 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2232 if (fmt[i] == 'e')
2234 if (volatile_refs_p (XEXP (x, i)))
2235 return 1;
2237 else if (fmt[i] == 'E')
2239 int j;
2240 for (j = 0; j < XVECLEN (x, i); j++)
2241 if (volatile_refs_p (XVECEXP (x, i, j)))
2242 return 1;
2246 return 0;
2249 /* Similar to above, except that it also rejects register pre- and post-
2250 incrementing. */
2253 side_effects_p (x)
2254 rtx x;
2256 RTX_CODE code;
2258 code = GET_CODE (x);
2259 switch (code)
2261 case LABEL_REF:
2262 case SYMBOL_REF:
2263 case CONST_INT:
2264 case CONST:
2265 case CONST_DOUBLE:
2266 case CONST_VECTOR:
2267 case CC0:
2268 case PC:
2269 case REG:
2270 case SCRATCH:
2271 case ASM_INPUT:
2272 case ADDR_VEC:
2273 case ADDR_DIFF_VEC:
2274 return 0;
2276 case CLOBBER:
2277 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2278 when some combination can't be done. If we see one, don't think
2279 that we can simplify the expression. */
2280 return (GET_MODE (x) != VOIDmode);
2282 case PRE_INC:
2283 case PRE_DEC:
2284 case POST_INC:
2285 case POST_DEC:
2286 case PRE_MODIFY:
2287 case POST_MODIFY:
2288 case CALL:
2289 case UNSPEC_VOLATILE:
2290 /* case TRAP_IF: This isn't clear yet. */
2291 return 1;
2293 case MEM:
2294 case ASM_OPERANDS:
2295 if (MEM_VOLATILE_P (x))
2296 return 1;
2298 default:
2299 break;
2302 /* Recursively scan the operands of this expression. */
2305 const char *fmt = GET_RTX_FORMAT (code);
2306 int i;
2308 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2310 if (fmt[i] == 'e')
2312 if (side_effects_p (XEXP (x, i)))
2313 return 1;
2315 else if (fmt[i] == 'E')
2317 int j;
2318 for (j = 0; j < XVECLEN (x, i); j++)
2319 if (side_effects_p (XVECEXP (x, i, j)))
2320 return 1;
2324 return 0;
2327 /* Return nonzero if evaluating rtx X might cause a trap. */
2330 may_trap_p (x)
2331 rtx x;
2333 int i;
2334 enum rtx_code code;
2335 const char *fmt;
2337 if (x == 0)
2338 return 0;
2339 code = GET_CODE (x);
2340 switch (code)
2342 /* Handle these cases quickly. */
2343 case CONST_INT:
2344 case CONST_DOUBLE:
2345 case CONST_VECTOR:
2346 case SYMBOL_REF:
2347 case LABEL_REF:
2348 case CONST:
2349 case PC:
2350 case CC0:
2351 case REG:
2352 case SCRATCH:
2353 return 0;
2355 case ASM_INPUT:
2356 case UNSPEC_VOLATILE:
2357 case TRAP_IF:
2358 return 1;
2360 case ASM_OPERANDS:
2361 return MEM_VOLATILE_P (x);
2363 /* Memory ref can trap unless it's a static var or a stack slot. */
2364 case MEM:
2365 return rtx_addr_can_trap_p (XEXP (x, 0));
2367 /* Division by a non-constant might trap. */
2368 case DIV:
2369 case MOD:
2370 case UDIV:
2371 case UMOD:
2372 if (! CONSTANT_P (XEXP (x, 1))
2373 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2374 && flag_trapping_math))
2375 return 1;
2376 /* This was const0_rtx, but by not using that,
2377 we can link this file into other programs. */
2378 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2379 return 1;
2380 break;
2382 case EXPR_LIST:
2383 /* An EXPR_LIST is used to represent a function call. This
2384 certainly may trap. */
2385 return 1;
2387 case GE:
2388 case GT:
2389 case LE:
2390 case LT:
2391 case COMPARE:
2392 /* Some floating point comparisons may trap. */
2393 if (!flag_trapping_math)
2394 break;
2395 /* ??? There is no machine independent way to check for tests that trap
2396 when COMPARE is used, though many targets do make this distinction.
2397 For instance, sparc uses CCFPE for compares which generate exceptions
2398 and CCFP for compares which do not generate exceptions. */
2399 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT)
2400 return 1;
2401 /* But often the compare has some CC mode, so check operand
2402 modes as well. */
2403 if (GET_MODE_CLASS (GET_MODE (XEXP (x, 0))) == MODE_FLOAT
2404 || GET_MODE_CLASS (GET_MODE (XEXP (x, 1))) == MODE_FLOAT)
2405 return 1;
2406 break;
2408 case NEG:
2409 case ABS:
2410 /* These operations don't trap even with floating point. */
2411 break;
2413 default:
2414 /* Any floating arithmetic may trap. */
2415 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2416 && flag_trapping_math)
2417 return 1;
2420 fmt = GET_RTX_FORMAT (code);
2421 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2423 if (fmt[i] == 'e')
2425 if (may_trap_p (XEXP (x, i)))
2426 return 1;
2428 else if (fmt[i] == 'E')
2430 int j;
2431 for (j = 0; j < XVECLEN (x, i); j++)
2432 if (may_trap_p (XVECEXP (x, i, j)))
2433 return 1;
2436 return 0;
2439 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2440 i.e., an inequality. */
2443 inequality_comparisons_p (x)
2444 rtx x;
2446 const char *fmt;
2447 int len, i;
2448 enum rtx_code code = GET_CODE (x);
2450 switch (code)
2452 case REG:
2453 case SCRATCH:
2454 case PC:
2455 case CC0:
2456 case CONST_INT:
2457 case CONST_DOUBLE:
2458 case CONST_VECTOR:
2459 case CONST:
2460 case LABEL_REF:
2461 case SYMBOL_REF:
2462 return 0;
2464 case LT:
2465 case LTU:
2466 case GT:
2467 case GTU:
2468 case LE:
2469 case LEU:
2470 case GE:
2471 case GEU:
2472 return 1;
2474 default:
2475 break;
2478 len = GET_RTX_LENGTH (code);
2479 fmt = GET_RTX_FORMAT (code);
2481 for (i = 0; i < len; i++)
2483 if (fmt[i] == 'e')
2485 if (inequality_comparisons_p (XEXP (x, i)))
2486 return 1;
2488 else if (fmt[i] == 'E')
2490 int j;
2491 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2492 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2493 return 1;
2497 return 0;
2500 /* Replace any occurrence of FROM in X with TO. The function does
2501 not enter into CONST_DOUBLE for the replace.
2503 Note that copying is not done so X must not be shared unless all copies
2504 are to be modified. */
2507 replace_rtx (x, from, to)
2508 rtx x, from, to;
2510 int i, j;
2511 const char *fmt;
2513 /* The following prevents loops occurrence when we change MEM in
2514 CONST_DOUBLE onto the same CONST_DOUBLE. */
2515 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2516 return x;
2518 if (x == from)
2519 return to;
2521 /* Allow this function to make replacements in EXPR_LISTs. */
2522 if (x == 0)
2523 return 0;
2525 if (GET_CODE (x) == SUBREG)
2527 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2529 if (GET_CODE (new) == CONST_INT)
2531 x = simplify_subreg (GET_MODE (x), new,
2532 GET_MODE (SUBREG_REG (x)),
2533 SUBREG_BYTE (x));
2534 if (! x)
2535 abort ();
2537 else
2538 SUBREG_REG (x) = new;
2540 return x;
2542 else if (GET_CODE (x) == ZERO_EXTEND)
2544 rtx new = replace_rtx (XEXP (x, 0), from, to);
2546 if (GET_CODE (new) == CONST_INT)
2548 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2549 new, GET_MODE (XEXP (x, 0)));
2550 if (! x)
2551 abort ();
2553 else
2554 XEXP (x, 0) = new;
2556 return x;
2559 fmt = GET_RTX_FORMAT (GET_CODE (x));
2560 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2562 if (fmt[i] == 'e')
2563 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2564 else if (fmt[i] == 'E')
2565 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2566 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2569 return x;
2572 /* Throughout the rtx X, replace many registers according to REG_MAP.
2573 Return the replacement for X (which may be X with altered contents).
2574 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2575 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2577 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2578 should not be mapped to pseudos or vice versa since validate_change
2579 is not called.
2581 If REPLACE_DEST is 1, replacements are also done in destinations;
2582 otherwise, only sources are replaced. */
2585 replace_regs (x, reg_map, nregs, replace_dest)
2586 rtx x;
2587 rtx *reg_map;
2588 unsigned int nregs;
2589 int replace_dest;
2591 enum rtx_code code;
2592 int i;
2593 const char *fmt;
2595 if (x == 0)
2596 return x;
2598 code = GET_CODE (x);
2599 switch (code)
2601 case SCRATCH:
2602 case PC:
2603 case CC0:
2604 case CONST_INT:
2605 case CONST_DOUBLE:
2606 case CONST_VECTOR:
2607 case CONST:
2608 case SYMBOL_REF:
2609 case LABEL_REF:
2610 return x;
2612 case REG:
2613 /* Verify that the register has an entry before trying to access it. */
2614 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2616 /* SUBREGs can't be shared. Always return a copy to ensure that if
2617 this replacement occurs more than once then each instance will
2618 get distinct rtx. */
2619 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2620 return copy_rtx (reg_map[REGNO (x)]);
2621 return reg_map[REGNO (x)];
2623 return x;
2625 case SUBREG:
2626 /* Prevent making nested SUBREGs. */
2627 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2628 && reg_map[REGNO (SUBREG_REG (x))] != 0
2629 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2631 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2632 return simplify_gen_subreg (GET_MODE (x), map_val,
2633 GET_MODE (SUBREG_REG (x)),
2634 SUBREG_BYTE (x));
2636 break;
2638 case SET:
2639 if (replace_dest)
2640 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2642 else if (GET_CODE (SET_DEST (x)) == MEM
2643 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2644 /* Even if we are not to replace destinations, replace register if it
2645 is CONTAINED in destination (destination is memory or
2646 STRICT_LOW_PART). */
2647 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2648 reg_map, nregs, 0);
2649 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2650 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2651 break;
2653 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2654 return x;
2656 default:
2657 break;
2660 fmt = GET_RTX_FORMAT (code);
2661 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2663 if (fmt[i] == 'e')
2664 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2665 else if (fmt[i] == 'E')
2667 int j;
2668 for (j = 0; j < XVECLEN (x, i); j++)
2669 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2670 nregs, replace_dest);
2673 return x;
2676 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2677 constant that is not in the constant pool and not in the condition
2678 of an IF_THEN_ELSE. */
2680 static int
2681 computed_jump_p_1 (x)
2682 rtx x;
2684 enum rtx_code code = GET_CODE (x);
2685 int i, j;
2686 const char *fmt;
2688 switch (code)
2690 case LABEL_REF:
2691 case PC:
2692 return 0;
2694 case CONST:
2695 case CONST_INT:
2696 case CONST_DOUBLE:
2697 case CONST_VECTOR:
2698 case SYMBOL_REF:
2699 case REG:
2700 return 1;
2702 case MEM:
2703 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2704 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2706 case IF_THEN_ELSE:
2707 return (computed_jump_p_1 (XEXP (x, 1))
2708 || computed_jump_p_1 (XEXP (x, 2)));
2710 default:
2711 break;
2714 fmt = GET_RTX_FORMAT (code);
2715 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2717 if (fmt[i] == 'e'
2718 && computed_jump_p_1 (XEXP (x, i)))
2719 return 1;
2721 else if (fmt[i] == 'E')
2722 for (j = 0; j < XVECLEN (x, i); j++)
2723 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2724 return 1;
2727 return 0;
2730 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2732 Tablejumps and casesi insns are not considered indirect jumps;
2733 we can recognize them by a (use (label_ref)). */
2736 computed_jump_p (insn)
2737 rtx insn;
2739 int i;
2740 if (GET_CODE (insn) == JUMP_INSN)
2742 rtx pat = PATTERN (insn);
2744 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2745 return 0;
2746 else if (GET_CODE (pat) == PARALLEL)
2748 int len = XVECLEN (pat, 0);
2749 int has_use_labelref = 0;
2751 for (i = len - 1; i >= 0; i--)
2752 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2753 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2754 == LABEL_REF))
2755 has_use_labelref = 1;
2757 if (! has_use_labelref)
2758 for (i = len - 1; i >= 0; i--)
2759 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2760 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2761 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2762 return 1;
2764 else if (GET_CODE (pat) == SET
2765 && SET_DEST (pat) == pc_rtx
2766 && computed_jump_p_1 (SET_SRC (pat)))
2767 return 1;
2769 return 0;
2772 /* Traverse X via depth-first search, calling F for each
2773 sub-expression (including X itself). F is also passed the DATA.
2774 If F returns -1, do not traverse sub-expressions, but continue
2775 traversing the rest of the tree. If F ever returns any other
2776 non-zero value, stop the traversal, and return the value returned
2777 by F. Otherwise, return 0. This function does not traverse inside
2778 tree structure that contains RTX_EXPRs, or into sub-expressions
2779 whose format code is `0' since it is not known whether or not those
2780 codes are actually RTL.
2782 This routine is very general, and could (should?) be used to
2783 implement many of the other routines in this file. */
2786 for_each_rtx (x, f, data)
2787 rtx *x;
2788 rtx_function f;
2789 void *data;
2791 int result;
2792 int length;
2793 const char *format;
2794 int i;
2796 /* Call F on X. */
2797 result = (*f) (x, data);
2798 if (result == -1)
2799 /* Do not traverse sub-expressions. */
2800 return 0;
2801 else if (result != 0)
2802 /* Stop the traversal. */
2803 return result;
2805 if (*x == NULL_RTX)
2806 /* There are no sub-expressions. */
2807 return 0;
2809 length = GET_RTX_LENGTH (GET_CODE (*x));
2810 format = GET_RTX_FORMAT (GET_CODE (*x));
2812 for (i = 0; i < length; ++i)
2814 switch (format[i])
2816 case 'e':
2817 result = for_each_rtx (&XEXP (*x, i), f, data);
2818 if (result != 0)
2819 return result;
2820 break;
2822 case 'V':
2823 case 'E':
2824 if (XVEC (*x, i) != 0)
2826 int j;
2827 for (j = 0; j < XVECLEN (*x, i); ++j)
2829 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2830 if (result != 0)
2831 return result;
2834 break;
2836 default:
2837 /* Nothing to do. */
2838 break;
2843 return 0;
2846 /* Searches X for any reference to REGNO, returning the rtx of the
2847 reference found if any. Otherwise, returns NULL_RTX. */
2850 regno_use_in (regno, x)
2851 unsigned int regno;
2852 rtx x;
2854 const char *fmt;
2855 int i, j;
2856 rtx tem;
2858 if (GET_CODE (x) == REG && REGNO (x) == regno)
2859 return x;
2861 fmt = GET_RTX_FORMAT (GET_CODE (x));
2862 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2864 if (fmt[i] == 'e')
2866 if ((tem = regno_use_in (regno, XEXP (x, i))))
2867 return tem;
2869 else if (fmt[i] == 'E')
2870 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2871 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2872 return tem;
2875 return NULL_RTX;
2878 /* Return a value indicating whether OP, an operand of a commutative
2879 operation, is preferred as the first or second operand. The higher
2880 the value, the stronger the preference for being the first operand.
2881 We use negative values to indicate a preference for the first operand
2882 and positive values for the second operand. */
2885 commutative_operand_precedence (op)
2886 rtx op;
2888 /* Constants always come the second operand. Prefer "nice" constants. */
2889 if (GET_CODE (op) == CONST_INT)
2890 return -5;
2891 if (GET_CODE (op) == CONST_DOUBLE)
2892 return -4;
2893 if (CONSTANT_P (op))
2894 return -3;
2896 /* SUBREGs of objects should come second. */
2897 if (GET_CODE (op) == SUBREG
2898 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
2899 return -2;
2901 /* If only one operand is a `neg', `not',
2902 `mult', `plus', or `minus' expression, it will be the first
2903 operand. */
2904 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
2905 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
2906 || GET_CODE (op) == MINUS)
2907 return 2;
2909 /* Complex expressions should be the first, so decrease priority
2910 of objects. */
2911 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
2912 return -1;
2913 return 0;
2916 /* Return 1 iff it is necessary to swap operands of commutative operation
2917 in order to canonicalize expression. */
2920 swap_commutative_operands_p (x, y)
2921 rtx x, y;
2923 return (commutative_operand_precedence (x)
2924 < commutative_operand_precedence (y));
2927 /* Return 1 if X is an autoincrement side effect and the register is
2928 not the stack pointer. */
2930 auto_inc_p (x)
2931 rtx x;
2933 switch (GET_CODE (x))
2935 case PRE_INC:
2936 case POST_INC:
2937 case PRE_DEC:
2938 case POST_DEC:
2939 case PRE_MODIFY:
2940 case POST_MODIFY:
2941 /* There are no REG_INC notes for SP. */
2942 if (XEXP (x, 0) != stack_pointer_rtx)
2943 return 1;
2944 default:
2945 break;
2947 return 0;
2950 /* Return 1 if the sequence of instructions beginning with FROM and up
2951 to and including TO is safe to move. If NEW_TO is non-NULL, and
2952 the sequence is not already safe to move, but can be easily
2953 extended to a sequence which is safe, then NEW_TO will point to the
2954 end of the extended sequence.
2956 For now, this function only checks that the region contains whole
2957 exception regions, but it could be extended to check additional
2958 conditions as well. */
2961 insns_safe_to_move_p (from, to, new_to)
2962 rtx from;
2963 rtx to;
2964 rtx *new_to;
2966 int eh_region_count = 0;
2967 int past_to_p = 0;
2968 rtx r = from;
2970 /* By default, assume the end of the region will be what was
2971 suggested. */
2972 if (new_to)
2973 *new_to = to;
2975 while (r)
2977 if (GET_CODE (r) == NOTE)
2979 switch (NOTE_LINE_NUMBER (r))
2981 case NOTE_INSN_EH_REGION_BEG:
2982 ++eh_region_count;
2983 break;
2985 case NOTE_INSN_EH_REGION_END:
2986 if (eh_region_count == 0)
2987 /* This sequence of instructions contains the end of
2988 an exception region, but not he beginning. Moving
2989 it will cause chaos. */
2990 return 0;
2992 --eh_region_count;
2993 break;
2995 default:
2996 break;
2999 else if (past_to_p)
3000 /* If we've passed TO, and we see a non-note instruction, we
3001 can't extend the sequence to a movable sequence. */
3002 return 0;
3004 if (r == to)
3006 if (!new_to)
3007 /* It's OK to move the sequence if there were matched sets of
3008 exception region notes. */
3009 return eh_region_count == 0;
3011 past_to_p = 1;
3014 /* It's OK to move the sequence if there were matched sets of
3015 exception region notes. */
3016 if (past_to_p && eh_region_count == 0)
3018 *new_to = r;
3019 return 1;
3022 /* Go to the next instruction. */
3023 r = NEXT_INSN (r);
3026 return 0;
3029 /* Return non-zero if IN contains a piece of rtl that has the address LOC */
3031 loc_mentioned_in_p (loc, in)
3032 rtx *loc, in;
3034 enum rtx_code code = GET_CODE (in);
3035 const char *fmt = GET_RTX_FORMAT (code);
3036 int i, j;
3038 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3040 if (loc == &in->fld[i].rtx)
3041 return 1;
3042 if (fmt[i] == 'e')
3044 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3045 return 1;
3047 else if (fmt[i] == 'E')
3048 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3049 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3050 return 1;
3052 return 0;
3055 /* Given a subreg X, return the bit offset where the subreg begins
3056 (counting from the least significant bit of the reg). */
3058 unsigned int
3059 subreg_lsb (x)
3060 rtx x;
3062 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
3063 enum machine_mode mode = GET_MODE (x);
3064 unsigned int bitpos;
3065 unsigned int byte;
3066 unsigned int word;
3068 /* A paradoxical subreg begins at bit position 0. */
3069 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
3070 return 0;
3072 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3073 /* If the subreg crosses a word boundary ensure that
3074 it also begins and ends on a word boundary. */
3075 if ((SUBREG_BYTE (x) % UNITS_PER_WORD
3076 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
3077 && (SUBREG_BYTE (x) % UNITS_PER_WORD
3078 || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
3079 abort ();
3081 if (WORDS_BIG_ENDIAN)
3082 word = (GET_MODE_SIZE (inner_mode)
3083 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
3084 else
3085 word = SUBREG_BYTE (x) / UNITS_PER_WORD;
3086 bitpos = word * BITS_PER_WORD;
3088 if (BYTES_BIG_ENDIAN)
3089 byte = (GET_MODE_SIZE (inner_mode)
3090 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
3091 else
3092 byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
3093 bitpos += byte * BITS_PER_UNIT;
3095 return bitpos;
3098 /* This function returns the regno offset of a subreg expression.
3099 xregno - A regno of an inner hard subreg_reg (or what will become one).
3100 xmode - The mode of xregno.
3101 offset - The byte offset.
3102 ymode - The mode of a top level SUBREG (or what may become one).
3103 RETURN - The regno offset which would be used. */
3104 unsigned int
3105 subreg_regno_offset (xregno, xmode, offset, ymode)
3106 unsigned int xregno;
3107 enum machine_mode xmode;
3108 unsigned int offset;
3109 enum machine_mode ymode;
3111 int nregs_xmode, nregs_ymode;
3112 int mode_multiple, nregs_multiple;
3113 int y_offset;
3115 if (xregno >= FIRST_PSEUDO_REGISTER)
3116 abort ();
3118 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3119 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3120 if (offset == 0 || nregs_xmode == nregs_ymode)
3121 return 0;
3123 /* size of ymode must not be greater than the size of xmode. */
3124 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3125 if (mode_multiple == 0)
3126 abort ();
3128 y_offset = offset / GET_MODE_SIZE (ymode);
3129 nregs_multiple = nregs_xmode / nregs_ymode;
3130 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3133 /* Return the final regno that a subreg expression refers to. */
3134 unsigned int
3135 subreg_regno (x)
3136 rtx x;
3138 unsigned int ret;
3139 rtx subreg = SUBREG_REG (x);
3140 int regno = REGNO (subreg);
3142 ret = regno + subreg_regno_offset (regno,
3143 GET_MODE (subreg),
3144 SUBREG_BYTE (x),
3145 GET_MODE (x));
3146 return ret;
3149 struct parms_set_data
3151 int nregs;
3152 HARD_REG_SET regs;
3155 /* Helper function for noticing stores to parameter registers. */
3156 static void
3157 parms_set (x, pat, data)
3158 rtx x, pat ATTRIBUTE_UNUSED;
3159 void *data;
3161 struct parms_set_data *d = data;
3162 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3163 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3165 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3166 d->nregs--;
3170 /* Look backward for first parameter to be loaded.
3171 Do not skip BOUNDARY. */
3173 find_first_parameter_load (call_insn, boundary)
3174 rtx call_insn, boundary;
3176 struct parms_set_data parm;
3177 rtx p, before;
3179 /* Since different machines initialize their parameter registers
3180 in different orders, assume nothing. Collect the set of all
3181 parameter registers. */
3182 CLEAR_HARD_REG_SET (parm.regs);
3183 parm.nregs = 0;
3184 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3185 if (GET_CODE (XEXP (p, 0)) == USE
3186 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3188 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3189 abort ();
3191 /* We only care about registers which can hold function
3192 arguments. */
3193 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3194 continue;
3196 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3197 parm.nregs++;
3199 before = call_insn;
3201 /* Search backward for the first set of a register in this set. */
3202 while (parm.nregs && before != boundary)
3204 before = PREV_INSN (before);
3206 /* It is possible that some loads got CSEed from one call to
3207 another. Stop in that case. */
3208 if (GET_CODE (before) == CALL_INSN)
3209 break;
3211 /* Our caller needs either ensure that we will find all sets
3212 (in case code has not been optimized yet), or take care
3213 for possible labels in a way by setting boundary to preceding
3214 CODE_LABEL. */
3215 if (GET_CODE (before) == CODE_LABEL)
3217 if (before != boundary)
3218 abort ();
3219 break;
3222 if (INSN_P (before))
3223 note_stores (PATTERN (before), parms_set, &parm);
3225 return before;
3228 /* Return true if we should avoid inserting code between INSN and preceeding
3229 call instruction. */
3231 bool
3232 keep_with_call_p (insn)
3233 rtx insn;
3235 rtx set;
3237 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3239 if (GET_CODE (SET_DEST (set)) == REG
3240 && fixed_regs[REGNO (SET_DEST (set))]
3241 && general_operand (SET_SRC (set), VOIDmode))
3242 return true;
3243 if (GET_CODE (SET_SRC (set)) == REG
3244 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3245 && GET_CODE (SET_DEST (set)) == REG
3246 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3247 return true;
3248 /* There may be a stack pop just after the call and before the store
3249 of the return register. Search for the actual store when deciding
3250 if we can break or not. */
3251 if (SET_DEST (set) == stack_pointer_rtx)
3253 rtx i2 = next_nonnote_insn (insn);
3254 if (i2 && keep_with_call_p (i2))
3255 return true;
3258 return false;
3261 /* Return true when store to register X can be hoisted to the place
3262 with LIVE registers (can be NULL). Value VAL contains destination
3263 whose value will be used. */
3265 static bool
3266 hoist_test_store (x, val, live)
3267 rtx x, val;
3268 regset live;
3270 if (GET_CODE (x) == SCRATCH)
3271 return true;
3273 if (rtx_equal_p (x, val))
3274 return true;
3276 /* Allow subreg of X in case it is not writting just part of multireg pseudo.
3277 Then we would need to update all users to care hoisting the store too.
3278 Caller may represent that by specifying whole subreg as val. */
3280 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3282 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3283 && GET_MODE_BITSIZE (GET_MODE (x)) <
3284 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3285 return false;
3286 return true;
3288 if (GET_CODE (x) == SUBREG)
3289 x = SUBREG_REG (x);
3291 /* Anything except register store is not hoistable. This includes the
3292 partial stores to registers. */
3294 if (!REG_P (x))
3295 return false;
3297 /* Pseudo registers can be allways replaced by another pseudo to avoid
3298 the side effect, for hard register we must ensure that they are dead.
3299 Eventually we may want to add code to try turn pseudos to hards, but it
3300 is unlikely usefull. */
3302 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3304 int regno = REGNO (x);
3305 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3307 if (!live)
3308 return false;
3309 if (REGNO_REG_SET_P (live, regno))
3310 return false;
3311 while (--n > 0)
3312 if (REGNO_REG_SET_P (live, regno + n))
3313 return false;
3315 return true;
3319 /* Return true if INSN can be hoisted to place with LIVE hard registers
3320 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3321 and used by the hoisting pass. */
3323 bool
3324 can_hoist_insn_p (insn, val, live)
3325 rtx insn, val;
3326 regset live;
3328 rtx pat = PATTERN (insn);
3329 int i;
3331 /* It probably does not worth the complexity to handle multiple
3332 set stores. */
3333 if (!single_set (insn))
3334 return false;
3335 /* We can move CALL_INSN, but we need to check that all caller clobbered
3336 regs are dead. */
3337 if (GET_CODE (insn) == CALL_INSN)
3338 return false;
3339 /* In future we will handle hoisting of libcall sequences, but
3340 give up for now. */
3341 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3342 return false;
3343 switch (GET_CODE (pat))
3345 case SET:
3346 if (!hoist_test_store (SET_DEST (pat), val, live))
3347 return false;
3348 break;
3349 case USE:
3350 /* USES do have sick semantics, so do not move them. */
3351 return false;
3352 break;
3353 case CLOBBER:
3354 if (!hoist_test_store (XEXP (pat, 0), val, live))
3355 return false;
3356 break;
3357 case PARALLEL:
3358 for (i = 0; i < XVECLEN (pat, 0); i++)
3360 rtx x = XVECEXP (pat, 0, i);
3361 switch (GET_CODE (x))
3363 case SET:
3364 if (!hoist_test_store (SET_DEST (x), val, live))
3365 return false;
3366 break;
3367 case USE:
3368 /* We need to fix callers to really ensure availability
3369 of all values inisn uses, but for now it is safe to prohibit
3370 hoisting of any insn having such a hiden uses. */
3371 return false;
3372 break;
3373 case CLOBBER:
3374 if (!hoist_test_store (SET_DEST (x), val, live))
3375 return false;
3376 break;
3377 default:
3378 break;
3381 break;
3382 default:
3383 abort ();
3385 return true;
3388 /* Update store after hoisting - replace all stores to pseudo registers
3389 by new ones to avoid clobbering of values except for store to VAL that will
3390 be updated to NEW. */
3392 static void
3393 hoist_update_store (insn, xp, val, new)
3394 rtx insn, *xp, val, new;
3396 rtx x = *xp;
3398 if (GET_CODE (x) == SCRATCH)
3399 return;
3401 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3402 validate_change (insn, xp,
3403 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3404 SUBREG_BYTE (x)), 1);
3405 if (rtx_equal_p (x, val))
3407 validate_change (insn, xp, new, 1);
3408 return;
3410 if (GET_CODE (x) == SUBREG)
3412 xp = &SUBREG_REG (x);
3413 x = *xp;
3416 if (!REG_P (x))
3417 abort ();
3419 /* We've verified that hard registers are dead, so we may keep the side
3420 effect. Otherwise replace it by new pseudo. */
3421 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3422 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3423 REG_NOTES (insn)
3424 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3427 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3428 and each other side effect to pseudo register by new pseudo register. */
3431 hoist_insn_after (insn, after, val, new)
3432 rtx insn, after, val, new;
3434 rtx pat;
3435 int i;
3436 rtx note;
3438 insn = emit_copy_of_insn_after (insn, after);
3439 pat = PATTERN (insn);
3441 /* Remove REG_UNUSED notes as we will re-emit them. */
3442 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3443 remove_note (insn, note);
3445 /* To get this working callers must ensure to move everything referenced
3446 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3447 easier. */
3448 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3449 remove_note (insn, note);
3450 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3451 remove_note (insn, note);
3453 /* Remove REG_DEAD notes as they might not be valid anymore in case
3454 we create redundancy. */
3455 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3456 remove_note (insn, note);
3457 switch (GET_CODE (pat))
3459 case SET:
3460 hoist_update_store (insn, &SET_DEST (pat), val, new);
3461 break;
3462 case USE:
3463 break;
3464 case CLOBBER:
3465 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3466 break;
3467 case PARALLEL:
3468 for (i = 0; i < XVECLEN (pat, 0); i++)
3470 rtx x = XVECEXP (pat, 0, i);
3471 switch (GET_CODE (x))
3473 case SET:
3474 hoist_update_store (insn, &SET_DEST (x), val, new);
3475 break;
3476 case USE:
3477 break;
3478 case CLOBBER:
3479 hoist_update_store (insn, &SET_DEST (x), val, new);
3480 break;
3481 default:
3482 break;
3485 break;
3486 default:
3487 abort ();
3489 if (!apply_change_group ())
3490 abort ();
3492 return insn;
3496 hoist_insn_to_edge (insn, e, val, new)
3497 rtx insn, val, new;
3498 edge e;
3500 rtx new_insn;
3502 /* We cannot insert instructions on an abnormal critical edge.
3503 It will be easier to find the culprit if we die now. */
3504 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3505 abort ();
3507 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3508 stuff. We also emit CALL_INSNS and firends. */
3509 if (e->insns == NULL_RTX)
3511 start_sequence ();
3512 emit_note (NULL, NOTE_INSN_DELETED);
3514 else
3515 push_to_sequence (e->insns);
3517 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3519 e->insns = get_insns ();
3520 end_sequence ();
3521 return new_insn;