2004-02-11 Eric Christopher <echristo@redhat.com>
[official-gcc.git] / gcc / rtlanal.c
blob994d3892ee7b8f5da58436f7f7367d9c21087150
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "tm_p.h"
33 #include "flags.h"
34 #include "basic-block.h"
35 #include "real.h"
36 #include "regs.h"
38 /* Forward declarations */
39 static int global_reg_mentioned_p_1 (rtx *, void *);
40 static void set_of_1 (rtx, rtx, void *);
41 static void insn_dependent_p_1 (rtx, rtx, void *);
42 static int rtx_referenced_p_1 (rtx *, void *);
43 static int computed_jump_p_1 (rtx);
44 static void parms_set (rtx, rtx, void *);
45 static bool hoist_test_store (rtx, rtx, regset);
46 static void hoist_update_store (rtx, rtx *, rtx, rtx);
48 /* Bit flags that specify the machine subtype we are compiling for.
49 Bits are tested using macros TARGET_... defined in the tm.h file
50 and set by `-m...' switches. Must be defined in rtlanal.c. */
52 int target_flags;
54 /* Return 1 if the value of X is unstable
55 (would be different at a different point in the program).
56 The frame pointer, arg pointer, etc. are considered stable
57 (within one function) and so is anything marked `unchanging'. */
59 int
60 rtx_unstable_p (rtx x)
62 RTX_CODE code = GET_CODE (x);
63 int i;
64 const char *fmt;
66 switch (code)
68 case MEM:
69 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
71 case QUEUED:
72 return 1;
74 case ADDRESSOF:
75 case CONST:
76 case CONST_INT:
77 case CONST_DOUBLE:
78 case CONST_VECTOR:
79 case SYMBOL_REF:
80 case LABEL_REF:
81 return 0;
83 case REG:
84 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
85 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
86 /* The arg pointer varies if it is not a fixed register. */
87 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
88 || RTX_UNCHANGING_P (x))
89 return 0;
90 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
91 /* ??? When call-clobbered, the value is stable modulo the restore
92 that must happen after a call. This currently screws up local-alloc
93 into believing that the restore is not needed. */
94 if (x == pic_offset_table_rtx)
95 return 0;
96 #endif
97 return 1;
99 case ASM_OPERANDS:
100 if (MEM_VOLATILE_P (x))
101 return 1;
103 /* Fall through. */
105 default:
106 break;
109 fmt = GET_RTX_FORMAT (code);
110 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
111 if (fmt[i] == 'e')
113 if (rtx_unstable_p (XEXP (x, i)))
114 return 1;
116 else if (fmt[i] == 'E')
118 int j;
119 for (j = 0; j < XVECLEN (x, i); j++)
120 if (rtx_unstable_p (XVECEXP (x, i, j)))
121 return 1;
124 return 0;
127 /* Return 1 if X has a value that can vary even between two
128 executions of the program. 0 means X can be compared reliably
129 against certain constants or near-constants.
130 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
131 zero, we are slightly more conservative.
132 The frame pointer and the arg pointer are considered constant. */
135 rtx_varies_p (rtx x, int for_alias)
137 RTX_CODE code = GET_CODE (x);
138 int i;
139 const char *fmt;
141 switch (code)
143 case MEM:
144 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
146 case QUEUED:
147 return 1;
149 case CONST:
150 case CONST_INT:
151 case CONST_DOUBLE:
152 case CONST_VECTOR:
153 case SYMBOL_REF:
154 case LABEL_REF:
155 return 0;
157 case ADDRESSOF:
158 /* This will resolve to some offset from the frame pointer. */
159 return 0;
161 case REG:
162 /* Note that we have to test for the actual rtx used for the frame
163 and arg pointers and not just the register number in case we have
164 eliminated the frame and/or arg pointer and are using it
165 for pseudos. */
166 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
167 /* The arg pointer varies if it is not a fixed register. */
168 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
169 return 0;
170 if (x == pic_offset_table_rtx
171 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
172 /* ??? When call-clobbered, the value is stable modulo the restore
173 that must happen after a call. This currently screws up
174 local-alloc into believing that the restore is not needed, so we
175 must return 0 only if we are called from alias analysis. */
176 && for_alias
177 #endif
179 return 0;
180 return 1;
182 case LO_SUM:
183 /* The operand 0 of a LO_SUM is considered constant
184 (in fact it is related specifically to operand 1)
185 during alias analysis. */
186 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
187 || rtx_varies_p (XEXP (x, 1), for_alias);
189 case ASM_OPERANDS:
190 if (MEM_VOLATILE_P (x))
191 return 1;
193 /* Fall through. */
195 default:
196 break;
199 fmt = GET_RTX_FORMAT (code);
200 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
201 if (fmt[i] == 'e')
203 if (rtx_varies_p (XEXP (x, i), for_alias))
204 return 1;
206 else if (fmt[i] == 'E')
208 int j;
209 for (j = 0; j < XVECLEN (x, i); j++)
210 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
211 return 1;
214 return 0;
217 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
220 rtx_addr_can_trap_p (rtx x)
222 enum rtx_code code = GET_CODE (x);
224 switch (code)
226 case SYMBOL_REF:
227 return SYMBOL_REF_WEAK (x);
229 case LABEL_REF:
230 return 0;
232 case ADDRESSOF:
233 /* This will resolve to some offset from the frame pointer. */
234 return 0;
236 case REG:
237 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
238 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
239 || x == stack_pointer_rtx
240 /* The arg pointer varies if it is not a fixed register. */
241 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
242 return 0;
243 /* All of the virtual frame registers are stack references. */
244 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
245 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
246 return 0;
247 return 1;
249 case CONST:
250 return rtx_addr_can_trap_p (XEXP (x, 0));
252 case PLUS:
253 /* An address is assumed not to trap if it is an address that can't
254 trap plus a constant integer or it is the pic register plus a
255 constant. */
256 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
257 && GET_CODE (XEXP (x, 1)) == CONST_INT)
258 || (XEXP (x, 0) == pic_offset_table_rtx
259 && CONSTANT_P (XEXP (x, 1))));
261 case LO_SUM:
262 case PRE_MODIFY:
263 return rtx_addr_can_trap_p (XEXP (x, 1));
265 case PRE_DEC:
266 case PRE_INC:
267 case POST_DEC:
268 case POST_INC:
269 case POST_MODIFY:
270 return rtx_addr_can_trap_p (XEXP (x, 0));
272 default:
273 break;
276 /* If it isn't one of the case above, it can cause a trap. */
277 return 1;
280 /* Return true if X is an address that is known to not be zero. */
282 bool
283 nonzero_address_p (rtx x)
285 enum rtx_code code = GET_CODE (x);
287 switch (code)
289 case SYMBOL_REF:
290 return !SYMBOL_REF_WEAK (x);
292 case LABEL_REF:
293 return true;
295 case ADDRESSOF:
296 /* This will resolve to some offset from the frame pointer. */
297 return true;
299 case REG:
300 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
301 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
302 || x == stack_pointer_rtx
303 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
304 return true;
305 /* All of the virtual frame registers are stack references. */
306 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
307 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
308 return true;
309 return false;
311 case CONST:
312 return nonzero_address_p (XEXP (x, 0));
314 case PLUS:
315 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
317 /* Pointers aren't allowed to wrap. If we've got a register
318 that is known to be a pointer, and a positive offset, then
319 the composite can't be zero. */
320 if (INTVAL (XEXP (x, 1)) > 0
321 && REG_P (XEXP (x, 0))
322 && REG_POINTER (XEXP (x, 0)))
323 return true;
325 return nonzero_address_p (XEXP (x, 0));
327 /* Handle PIC references. */
328 else if (XEXP (x, 0) == pic_offset_table_rtx
329 && CONSTANT_P (XEXP (x, 1)))
330 return true;
331 return false;
333 case PRE_MODIFY:
334 /* Similar to the above; allow positive offsets. Further, since
335 auto-inc is only allowed in memories, the register must be a
336 pointer. */
337 if (GET_CODE (XEXP (x, 1)) == CONST_INT
338 && INTVAL (XEXP (x, 1)) > 0)
339 return true;
340 return nonzero_address_p (XEXP (x, 0));
342 case PRE_INC:
343 /* Similarly. Further, the offset is always positive. */
344 return true;
346 case PRE_DEC:
347 case POST_DEC:
348 case POST_INC:
349 case POST_MODIFY:
350 return nonzero_address_p (XEXP (x, 0));
352 case LO_SUM:
353 return nonzero_address_p (XEXP (x, 1));
355 default:
356 break;
359 /* If it isn't one of the case above, might be zero. */
360 return false;
363 /* Return 1 if X refers to a memory location whose address
364 cannot be compared reliably with constant addresses,
365 or if X refers to a BLKmode memory object.
366 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
367 zero, we are slightly more conservative. */
370 rtx_addr_varies_p (rtx x, int for_alias)
372 enum rtx_code code;
373 int i;
374 const char *fmt;
376 if (x == 0)
377 return 0;
379 code = GET_CODE (x);
380 if (code == MEM)
381 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
383 fmt = GET_RTX_FORMAT (code);
384 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
385 if (fmt[i] == 'e')
387 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
388 return 1;
390 else if (fmt[i] == 'E')
392 int j;
393 for (j = 0; j < XVECLEN (x, i); j++)
394 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
395 return 1;
397 return 0;
400 /* Return the value of the integer term in X, if one is apparent;
401 otherwise return 0.
402 Only obvious integer terms are detected.
403 This is used in cse.c with the `related_value' field. */
405 HOST_WIDE_INT
406 get_integer_term (rtx x)
408 if (GET_CODE (x) == CONST)
409 x = XEXP (x, 0);
411 if (GET_CODE (x) == MINUS
412 && GET_CODE (XEXP (x, 1)) == CONST_INT)
413 return - INTVAL (XEXP (x, 1));
414 if (GET_CODE (x) == PLUS
415 && GET_CODE (XEXP (x, 1)) == CONST_INT)
416 return INTVAL (XEXP (x, 1));
417 return 0;
420 /* If X is a constant, return the value sans apparent integer term;
421 otherwise return 0.
422 Only obvious integer terms are detected. */
425 get_related_value (rtx x)
427 if (GET_CODE (x) != CONST)
428 return 0;
429 x = XEXP (x, 0);
430 if (GET_CODE (x) == PLUS
431 && GET_CODE (XEXP (x, 1)) == CONST_INT)
432 return XEXP (x, 0);
433 else if (GET_CODE (x) == MINUS
434 && GET_CODE (XEXP (x, 1)) == CONST_INT)
435 return XEXP (x, 0);
436 return 0;
439 /* Given a tablejump insn INSN, return the RTL expression for the offset
440 into the jump table. If the offset cannot be determined, then return
441 NULL_RTX.
443 If EARLIEST is nonzero, it is a pointer to a place where the earliest
444 insn used in locating the offset was found. */
447 get_jump_table_offset (rtx insn, rtx *earliest)
449 rtx label;
450 rtx table;
451 rtx set;
452 rtx old_insn;
453 rtx x;
454 rtx old_x;
455 rtx y;
456 rtx old_y;
457 int i;
459 if (!tablejump_p (insn, &label, &table) || !(set = single_set (insn)))
460 return NULL_RTX;
462 x = SET_SRC (set);
464 /* Some targets (eg, ARM) emit a tablejump that also
465 contains the out-of-range target. */
466 if (GET_CODE (x) == IF_THEN_ELSE
467 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
468 x = XEXP (x, 1);
470 /* Search backwards and locate the expression stored in X. */
471 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
472 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
475 /* If X is an expression using a relative address then strip
476 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
477 or the jump table label. */
478 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
479 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
481 for (i = 0; i < 2; i++)
483 old_insn = insn;
484 y = XEXP (x, i);
486 if (y == pc_rtx || y == pic_offset_table_rtx)
487 break;
489 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
490 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
493 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
494 break;
497 if (i >= 2)
498 return NULL_RTX;
500 x = XEXP (x, 1 - i);
502 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
503 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
507 /* Strip off any sign or zero extension. */
508 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
510 x = XEXP (x, 0);
512 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
513 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
517 /* If X isn't a MEM then this isn't a tablejump we understand. */
518 if (GET_CODE (x) != MEM)
519 return NULL_RTX;
521 /* Strip off the MEM. */
522 x = XEXP (x, 0);
524 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
525 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
528 /* If X isn't a PLUS than this isn't a tablejump we understand. */
529 if (GET_CODE (x) != PLUS)
530 return NULL_RTX;
532 /* At this point we should have an expression representing the jump table
533 plus an offset. Examine each operand in order to determine which one
534 represents the jump table. Knowing that tells us that the other operand
535 must represent the offset. */
536 for (i = 0; i < 2; i++)
538 old_insn = insn;
539 y = XEXP (x, i);
541 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
542 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
545 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
546 && reg_mentioned_p (label, y))
547 break;
550 if (i >= 2)
551 return NULL_RTX;
553 x = XEXP (x, 1 - i);
555 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
556 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
557 for (i = 0; i < 2; i++)
558 if (XEXP (x, i) == pic_offset_table_rtx)
560 x = XEXP (x, 1 - i);
561 break;
564 if (earliest)
565 *earliest = insn;
567 /* Return the RTL expression representing the offset. */
568 return x;
571 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
572 a global register. */
574 static int
575 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
577 int regno;
578 rtx x = *loc;
580 if (! x)
581 return 0;
583 switch (GET_CODE (x))
585 case SUBREG:
586 if (GET_CODE (SUBREG_REG (x)) == REG)
588 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
589 && global_regs[subreg_regno (x)])
590 return 1;
591 return 0;
593 break;
595 case REG:
596 regno = REGNO (x);
597 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
598 return 1;
599 return 0;
601 case SCRATCH:
602 case PC:
603 case CC0:
604 case CONST_INT:
605 case CONST_DOUBLE:
606 case CONST:
607 case LABEL_REF:
608 return 0;
610 case CALL:
611 /* A non-constant call might use a global register. */
612 return 1;
614 default:
615 break;
618 return 0;
621 /* Returns nonzero if X mentions a global register. */
624 global_reg_mentioned_p (rtx x)
626 if (INSN_P (x))
628 if (GET_CODE (x) == CALL_INSN)
630 if (! CONST_OR_PURE_CALL_P (x))
631 return 1;
632 x = CALL_INSN_FUNCTION_USAGE (x);
633 if (x == 0)
634 return 0;
636 else
637 x = PATTERN (x);
640 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
643 /* Return the number of places FIND appears within X. If COUNT_DEST is
644 zero, we do not count occurrences inside the destination of a SET. */
647 count_occurrences (rtx x, rtx find, int count_dest)
649 int i, j;
650 enum rtx_code code;
651 const char *format_ptr;
652 int count;
654 if (x == find)
655 return 1;
657 code = GET_CODE (x);
659 switch (code)
661 case REG:
662 case CONST_INT:
663 case CONST_DOUBLE:
664 case CONST_VECTOR:
665 case SYMBOL_REF:
666 case CODE_LABEL:
667 case PC:
668 case CC0:
669 return 0;
671 case MEM:
672 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
673 return 1;
674 break;
676 case SET:
677 if (SET_DEST (x) == find && ! count_dest)
678 return count_occurrences (SET_SRC (x), find, count_dest);
679 break;
681 default:
682 break;
685 format_ptr = GET_RTX_FORMAT (code);
686 count = 0;
688 for (i = 0; i < GET_RTX_LENGTH (code); i++)
690 switch (*format_ptr++)
692 case 'e':
693 count += count_occurrences (XEXP (x, i), find, count_dest);
694 break;
696 case 'E':
697 for (j = 0; j < XVECLEN (x, i); j++)
698 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
699 break;
702 return count;
705 /* Nonzero if register REG appears somewhere within IN.
706 Also works if REG is not a register; in this case it checks
707 for a subexpression of IN that is Lisp "equal" to REG. */
710 reg_mentioned_p (rtx reg, rtx in)
712 const char *fmt;
713 int i;
714 enum rtx_code code;
716 if (in == 0)
717 return 0;
719 if (reg == in)
720 return 1;
722 if (GET_CODE (in) == LABEL_REF)
723 return reg == XEXP (in, 0);
725 code = GET_CODE (in);
727 switch (code)
729 /* Compare registers by number. */
730 case REG:
731 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
733 /* These codes have no constituent expressions
734 and are unique. */
735 case SCRATCH:
736 case CC0:
737 case PC:
738 return 0;
740 case CONST_INT:
741 case CONST_VECTOR:
742 case CONST_DOUBLE:
743 /* These are kept unique for a given value. */
744 return 0;
746 default:
747 break;
750 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
751 return 1;
753 fmt = GET_RTX_FORMAT (code);
755 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
757 if (fmt[i] == 'E')
759 int j;
760 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
761 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
762 return 1;
764 else if (fmt[i] == 'e'
765 && reg_mentioned_p (reg, XEXP (in, i)))
766 return 1;
768 return 0;
771 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
772 no CODE_LABEL insn. */
775 no_labels_between_p (rtx beg, rtx end)
777 rtx p;
778 if (beg == end)
779 return 0;
780 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
781 if (GET_CODE (p) == CODE_LABEL)
782 return 0;
783 return 1;
786 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
787 no JUMP_INSN insn. */
790 no_jumps_between_p (rtx beg, rtx end)
792 rtx p;
793 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
794 if (GET_CODE (p) == JUMP_INSN)
795 return 0;
796 return 1;
799 /* Nonzero if register REG is used in an insn between
800 FROM_INSN and TO_INSN (exclusive of those two). */
803 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
805 rtx insn;
807 if (from_insn == to_insn)
808 return 0;
810 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
811 if (INSN_P (insn)
812 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
813 || (GET_CODE (insn) == CALL_INSN
814 && (find_reg_fusage (insn, USE, reg)
815 || find_reg_fusage (insn, CLOBBER, reg)))))
816 return 1;
817 return 0;
820 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
821 is entirely replaced by a new value and the only use is as a SET_DEST,
822 we do not consider it a reference. */
825 reg_referenced_p (rtx x, rtx body)
827 int i;
829 switch (GET_CODE (body))
831 case SET:
832 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
833 return 1;
835 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
836 of a REG that occupies all of the REG, the insn references X if
837 it is mentioned in the destination. */
838 if (GET_CODE (SET_DEST (body)) != CC0
839 && GET_CODE (SET_DEST (body)) != PC
840 && GET_CODE (SET_DEST (body)) != REG
841 && ! (GET_CODE (SET_DEST (body)) == SUBREG
842 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
843 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
844 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
845 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
846 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
847 && reg_overlap_mentioned_p (x, SET_DEST (body)))
848 return 1;
849 return 0;
851 case ASM_OPERANDS:
852 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
853 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
854 return 1;
855 return 0;
857 case CALL:
858 case USE:
859 case IF_THEN_ELSE:
860 return reg_overlap_mentioned_p (x, body);
862 case TRAP_IF:
863 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
865 case PREFETCH:
866 return reg_overlap_mentioned_p (x, XEXP (body, 0));
868 case UNSPEC:
869 case UNSPEC_VOLATILE:
870 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
871 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
872 return 1;
873 return 0;
875 case PARALLEL:
876 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
877 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
878 return 1;
879 return 0;
881 case CLOBBER:
882 if (GET_CODE (XEXP (body, 0)) == MEM)
883 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
884 return 1;
885 return 0;
887 case COND_EXEC:
888 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
889 return 1;
890 return reg_referenced_p (x, COND_EXEC_CODE (body));
892 default:
893 return 0;
897 /* Nonzero if register REG is referenced in an insn between
898 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
899 not count. */
902 reg_referenced_between_p (rtx reg, rtx from_insn, rtx to_insn)
904 rtx insn;
906 if (from_insn == to_insn)
907 return 0;
909 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
910 if (INSN_P (insn)
911 && (reg_referenced_p (reg, PATTERN (insn))
912 || (GET_CODE (insn) == CALL_INSN
913 && find_reg_fusage (insn, USE, reg))))
914 return 1;
915 return 0;
918 /* Nonzero if register REG is set or clobbered in an insn between
919 FROM_INSN and TO_INSN (exclusive of those two). */
922 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
924 rtx insn;
926 if (from_insn == to_insn)
927 return 0;
929 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
930 if (INSN_P (insn) && reg_set_p (reg, insn))
931 return 1;
932 return 0;
935 /* Internals of reg_set_between_p. */
937 reg_set_p (rtx reg, rtx insn)
939 /* We can be passed an insn or part of one. If we are passed an insn,
940 check if a side-effect of the insn clobbers REG. */
941 if (INSN_P (insn)
942 && (FIND_REG_INC_NOTE (insn, reg)
943 || (GET_CODE (insn) == CALL_INSN
944 /* We'd like to test call_used_regs here, but rtlanal.c can't
945 reference that variable due to its use in genattrtab. So
946 we'll just be more conservative.
948 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
949 information holds all clobbered registers. */
950 && ((GET_CODE (reg) == REG
951 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
952 || GET_CODE (reg) == MEM
953 || find_reg_fusage (insn, CLOBBER, reg)))))
954 return 1;
956 return set_of (reg, insn) != NULL_RTX;
959 /* Similar to reg_set_between_p, but check all registers in X. Return 0
960 only if none of them are modified between START and END. Do not
961 consider non-registers one way or the other. */
964 regs_set_between_p (rtx x, rtx start, rtx end)
966 enum rtx_code code = GET_CODE (x);
967 const char *fmt;
968 int i, j;
970 switch (code)
972 case CONST_INT:
973 case CONST_DOUBLE:
974 case CONST_VECTOR:
975 case CONST:
976 case SYMBOL_REF:
977 case LABEL_REF:
978 case PC:
979 case CC0:
980 return 0;
982 case REG:
983 return reg_set_between_p (x, start, end);
985 default:
986 break;
989 fmt = GET_RTX_FORMAT (code);
990 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
992 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
993 return 1;
995 else if (fmt[i] == 'E')
996 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
997 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
998 return 1;
1001 return 0;
1004 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1005 only if none of them are modified between START and END. Return 1 if
1006 X contains a MEM; this routine does usememory aliasing. */
1009 modified_between_p (rtx x, rtx start, rtx end)
1011 enum rtx_code code = GET_CODE (x);
1012 const char *fmt;
1013 int i, j;
1014 rtx insn;
1016 if (start == end)
1017 return 0;
1019 switch (code)
1021 case CONST_INT:
1022 case CONST_DOUBLE:
1023 case CONST_VECTOR:
1024 case CONST:
1025 case SYMBOL_REF:
1026 case LABEL_REF:
1027 return 0;
1029 case PC:
1030 case CC0:
1031 return 1;
1033 case MEM:
1034 if (RTX_UNCHANGING_P (x))
1035 return 0;
1036 if (modified_between_p (XEXP (x, 0), start, end))
1037 return 1;
1038 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1039 if (memory_modified_in_insn_p (x, insn))
1040 return 1;
1041 return 0;
1042 break;
1044 case REG:
1045 return reg_set_between_p (x, start, end);
1047 default:
1048 break;
1051 fmt = GET_RTX_FORMAT (code);
1052 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1054 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1055 return 1;
1057 else if (fmt[i] == 'E')
1058 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1059 if (modified_between_p (XVECEXP (x, i, j), start, end))
1060 return 1;
1063 return 0;
1066 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1067 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1068 does use memory aliasing. */
1071 modified_in_p (rtx x, rtx insn)
1073 enum rtx_code code = GET_CODE (x);
1074 const char *fmt;
1075 int i, j;
1077 switch (code)
1079 case CONST_INT:
1080 case CONST_DOUBLE:
1081 case CONST_VECTOR:
1082 case CONST:
1083 case SYMBOL_REF:
1084 case LABEL_REF:
1085 return 0;
1087 case PC:
1088 case CC0:
1089 return 1;
1091 case MEM:
1092 if (RTX_UNCHANGING_P (x))
1093 return 0;
1094 if (modified_in_p (XEXP (x, 0), insn))
1095 return 1;
1096 if (memory_modified_in_insn_p (x, insn))
1097 return 1;
1098 return 0;
1099 break;
1101 case REG:
1102 return reg_set_p (x, insn);
1104 default:
1105 break;
1108 fmt = GET_RTX_FORMAT (code);
1109 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1111 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1112 return 1;
1114 else if (fmt[i] == 'E')
1115 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1116 if (modified_in_p (XVECEXP (x, i, j), insn))
1117 return 1;
1120 return 0;
1123 /* Return true if anything in insn X is (anti,output,true) dependent on
1124 anything in insn Y. */
1127 insn_dependent_p (rtx x, rtx y)
1129 rtx tmp;
1131 if (! INSN_P (x) || ! INSN_P (y))
1132 abort ();
1134 tmp = PATTERN (y);
1135 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1136 if (tmp == NULL_RTX)
1137 return 1;
1139 tmp = PATTERN (x);
1140 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1141 if (tmp == NULL_RTX)
1142 return 1;
1144 return 0;
1147 /* A helper routine for insn_dependent_p called through note_stores. */
1149 static void
1150 insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
1152 rtx * pinsn = (rtx *) data;
1154 if (*pinsn && reg_mentioned_p (x, *pinsn))
1155 *pinsn = NULL_RTX;
1158 /* Helper function for set_of. */
1159 struct set_of_data
1161 rtx found;
1162 rtx pat;
1165 static void
1166 set_of_1 (rtx x, rtx pat, void *data1)
1168 struct set_of_data *data = (struct set_of_data *) (data1);
1169 if (rtx_equal_p (x, data->pat)
1170 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1171 data->found = pat;
1174 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1175 (either directly or via STRICT_LOW_PART and similar modifiers). */
1177 set_of (rtx pat, rtx insn)
1179 struct set_of_data data;
1180 data.found = NULL_RTX;
1181 data.pat = pat;
1182 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1183 return data.found;
1186 /* Given an INSN, return a SET expression if this insn has only a single SET.
1187 It may also have CLOBBERs, USEs, or SET whose output
1188 will not be used, which we ignore. */
1191 single_set_2 (rtx insn, rtx pat)
1193 rtx set = NULL;
1194 int set_verified = 1;
1195 int i;
1197 if (GET_CODE (pat) == PARALLEL)
1199 for (i = 0; i < XVECLEN (pat, 0); i++)
1201 rtx sub = XVECEXP (pat, 0, i);
1202 switch (GET_CODE (sub))
1204 case USE:
1205 case CLOBBER:
1206 break;
1208 case SET:
1209 /* We can consider insns having multiple sets, where all
1210 but one are dead as single set insns. In common case
1211 only single set is present in the pattern so we want
1212 to avoid checking for REG_UNUSED notes unless necessary.
1214 When we reach set first time, we just expect this is
1215 the single set we are looking for and only when more
1216 sets are found in the insn, we check them. */
1217 if (!set_verified)
1219 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1220 && !side_effects_p (set))
1221 set = NULL;
1222 else
1223 set_verified = 1;
1225 if (!set)
1226 set = sub, set_verified = 0;
1227 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1228 || side_effects_p (sub))
1229 return NULL_RTX;
1230 break;
1232 default:
1233 return NULL_RTX;
1237 return set;
1240 /* Given an INSN, return nonzero if it has more than one SET, else return
1241 zero. */
1244 multiple_sets (rtx insn)
1246 int found;
1247 int i;
1249 /* INSN must be an insn. */
1250 if (! INSN_P (insn))
1251 return 0;
1253 /* Only a PARALLEL can have multiple SETs. */
1254 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1256 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1257 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1259 /* If we have already found a SET, then return now. */
1260 if (found)
1261 return 1;
1262 else
1263 found = 1;
1267 /* Either zero or one SET. */
1268 return 0;
1271 /* Return nonzero if the destination of SET equals the source
1272 and there are no side effects. */
1275 set_noop_p (rtx set)
1277 rtx src = SET_SRC (set);
1278 rtx dst = SET_DEST (set);
1280 if (dst == pc_rtx && src == pc_rtx)
1281 return 1;
1283 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1284 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1286 if (GET_CODE (dst) == SIGN_EXTRACT
1287 || GET_CODE (dst) == ZERO_EXTRACT)
1288 return rtx_equal_p (XEXP (dst, 0), src)
1289 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1290 && !side_effects_p (src);
1292 if (GET_CODE (dst) == STRICT_LOW_PART)
1293 dst = XEXP (dst, 0);
1295 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1297 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1298 return 0;
1299 src = SUBREG_REG (src);
1300 dst = SUBREG_REG (dst);
1303 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1304 && REGNO (src) == REGNO (dst));
1307 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1308 value to itself. */
1311 noop_move_p (rtx insn)
1313 rtx pat = PATTERN (insn);
1315 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1316 return 1;
1318 /* Insns carrying these notes are useful later on. */
1319 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1320 return 0;
1322 /* For now treat an insn with a REG_RETVAL note as a
1323 a special insn which should not be considered a no-op. */
1324 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1325 return 0;
1327 if (GET_CODE (pat) == SET && set_noop_p (pat))
1328 return 1;
1330 if (GET_CODE (pat) == PARALLEL)
1332 int i;
1333 /* If nothing but SETs of registers to themselves,
1334 this insn can also be deleted. */
1335 for (i = 0; i < XVECLEN (pat, 0); i++)
1337 rtx tem = XVECEXP (pat, 0, i);
1339 if (GET_CODE (tem) == USE
1340 || GET_CODE (tem) == CLOBBER)
1341 continue;
1343 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1344 return 0;
1347 return 1;
1349 return 0;
1353 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1354 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1355 If the object was modified, if we hit a partial assignment to X, or hit a
1356 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1357 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1358 be the src. */
1361 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1363 rtx p;
1365 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1366 p = PREV_INSN (p))
1367 if (INSN_P (p))
1369 rtx set = single_set (p);
1370 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1372 if (set && rtx_equal_p (x, SET_DEST (set)))
1374 rtx src = SET_SRC (set);
1376 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1377 src = XEXP (note, 0);
1379 if ((valid_to == NULL_RTX
1380 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1381 /* Reject hard registers because we don't usually want
1382 to use them; we'd rather use a pseudo. */
1383 && (! (GET_CODE (src) == REG
1384 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1386 *pinsn = p;
1387 return src;
1391 /* If set in non-simple way, we don't have a value. */
1392 if (reg_set_p (x, p))
1393 break;
1396 return x;
1399 /* Return nonzero if register in range [REGNO, ENDREGNO)
1400 appears either explicitly or implicitly in X
1401 other than being stored into.
1403 References contained within the substructure at LOC do not count.
1404 LOC may be zero, meaning don't ignore anything. */
1407 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1408 rtx *loc)
1410 int i;
1411 unsigned int x_regno;
1412 RTX_CODE code;
1413 const char *fmt;
1415 repeat:
1416 /* The contents of a REG_NONNEG note is always zero, so we must come here
1417 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1418 if (x == 0)
1419 return 0;
1421 code = GET_CODE (x);
1423 switch (code)
1425 case REG:
1426 x_regno = REGNO (x);
1428 /* If we modifying the stack, frame, or argument pointer, it will
1429 clobber a virtual register. In fact, we could be more precise,
1430 but it isn't worth it. */
1431 if ((x_regno == STACK_POINTER_REGNUM
1432 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1433 || x_regno == ARG_POINTER_REGNUM
1434 #endif
1435 || x_regno == FRAME_POINTER_REGNUM)
1436 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1437 return 1;
1439 return (endregno > x_regno
1440 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1441 ? hard_regno_nregs[x_regno][GET_MODE (x)]
1442 : 1));
1444 case SUBREG:
1445 /* If this is a SUBREG of a hard reg, we can see exactly which
1446 registers are being modified. Otherwise, handle normally. */
1447 if (GET_CODE (SUBREG_REG (x)) == REG
1448 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1450 unsigned int inner_regno = subreg_regno (x);
1451 unsigned int inner_endregno
1452 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1453 ? hard_regno_nregs[inner_regno][GET_MODE (x)] : 1);
1455 return endregno > inner_regno && regno < inner_endregno;
1457 break;
1459 case CLOBBER:
1460 case SET:
1461 if (&SET_DEST (x) != loc
1462 /* Note setting a SUBREG counts as referring to the REG it is in for
1463 a pseudo but not for hard registers since we can
1464 treat each word individually. */
1465 && ((GET_CODE (SET_DEST (x)) == SUBREG
1466 && loc != &SUBREG_REG (SET_DEST (x))
1467 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1468 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1469 && refers_to_regno_p (regno, endregno,
1470 SUBREG_REG (SET_DEST (x)), loc))
1471 || (GET_CODE (SET_DEST (x)) != REG
1472 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1473 return 1;
1475 if (code == CLOBBER || loc == &SET_SRC (x))
1476 return 0;
1477 x = SET_SRC (x);
1478 goto repeat;
1480 default:
1481 break;
1484 /* X does not match, so try its subexpressions. */
1486 fmt = GET_RTX_FORMAT (code);
1487 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1489 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1491 if (i == 0)
1493 x = XEXP (x, 0);
1494 goto repeat;
1496 else
1497 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1498 return 1;
1500 else if (fmt[i] == 'E')
1502 int j;
1503 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1504 if (loc != &XVECEXP (x, i, j)
1505 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1506 return 1;
1509 return 0;
1512 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1513 we check if any register number in X conflicts with the relevant register
1514 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1515 contains a MEM (we don't bother checking for memory addresses that can't
1516 conflict because we expect this to be a rare case. */
1519 reg_overlap_mentioned_p (rtx x, rtx in)
1521 unsigned int regno, endregno;
1523 /* If either argument is a constant, then modifying X can not
1524 affect IN. Here we look at IN, we can profitably combine
1525 CONSTANT_P (x) with the switch statement below. */
1526 if (CONSTANT_P (in))
1527 return 0;
1529 recurse:
1530 switch (GET_CODE (x))
1532 case STRICT_LOW_PART:
1533 case ZERO_EXTRACT:
1534 case SIGN_EXTRACT:
1535 /* Overly conservative. */
1536 x = XEXP (x, 0);
1537 goto recurse;
1539 case SUBREG:
1540 regno = REGNO (SUBREG_REG (x));
1541 if (regno < FIRST_PSEUDO_REGISTER)
1542 regno = subreg_regno (x);
1543 goto do_reg;
1545 case REG:
1546 regno = REGNO (x);
1547 do_reg:
1548 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1549 ? hard_regno_nregs[regno][GET_MODE (x)] : 1);
1550 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1552 case MEM:
1554 const char *fmt;
1555 int i;
1557 if (GET_CODE (in) == MEM)
1558 return 1;
1560 fmt = GET_RTX_FORMAT (GET_CODE (in));
1561 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1562 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1563 return 1;
1565 return 0;
1568 case SCRATCH:
1569 case PC:
1570 case CC0:
1571 return reg_mentioned_p (x, in);
1573 case PARALLEL:
1575 int i;
1577 /* If any register in here refers to it we return true. */
1578 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1579 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1580 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1581 return 1;
1582 return 0;
1585 default:
1586 #ifdef ENABLE_CHECKING
1587 if (!CONSTANT_P (x))
1588 abort ();
1589 #endif
1591 return 0;
1595 /* Return the last value to which REG was set prior to INSN. If we can't
1596 find it easily, return 0.
1598 We only return a REG, SUBREG, or constant because it is too hard to
1599 check if a MEM remains unchanged. */
1602 reg_set_last (rtx x, rtx insn)
1604 rtx orig_insn = insn;
1606 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1607 Stop when we reach a label or X is a hard reg and we reach a
1608 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1610 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1612 /* We compare with <= here, because reg_set_last_last_regno
1613 is actually the number of the first reg *not* in X. */
1614 for (;
1615 insn && GET_CODE (insn) != CODE_LABEL
1616 && ! (GET_CODE (insn) == CALL_INSN
1617 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1618 insn = PREV_INSN (insn))
1619 if (INSN_P (insn))
1621 rtx set = set_of (x, insn);
1622 /* OK, this function modify our register. See if we understand it. */
1623 if (set)
1625 rtx last_value;
1626 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1627 return 0;
1628 last_value = SET_SRC (x);
1629 if (CONSTANT_P (last_value)
1630 || ((GET_CODE (last_value) == REG
1631 || GET_CODE (last_value) == SUBREG)
1632 && ! reg_set_between_p (last_value,
1633 insn, orig_insn)))
1634 return last_value;
1635 else
1636 return 0;
1640 return 0;
1643 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1644 (X would be the pattern of an insn).
1645 FUN receives two arguments:
1646 the REG, MEM, CC0 or PC being stored in or clobbered,
1647 the SET or CLOBBER rtx that does the store.
1649 If the item being stored in or clobbered is a SUBREG of a hard register,
1650 the SUBREG will be passed. */
1652 void
1653 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1655 int i;
1657 if (GET_CODE (x) == COND_EXEC)
1658 x = COND_EXEC_CODE (x);
1660 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1662 rtx dest = SET_DEST (x);
1664 while ((GET_CODE (dest) == SUBREG
1665 && (GET_CODE (SUBREG_REG (dest)) != REG
1666 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1667 || GET_CODE (dest) == ZERO_EXTRACT
1668 || GET_CODE (dest) == SIGN_EXTRACT
1669 || GET_CODE (dest) == STRICT_LOW_PART)
1670 dest = XEXP (dest, 0);
1672 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1673 each of whose first operand is a register. */
1674 if (GET_CODE (dest) == PARALLEL)
1676 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1677 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1678 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1680 else
1681 (*fun) (dest, x, data);
1684 else if (GET_CODE (x) == PARALLEL)
1685 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1686 note_stores (XVECEXP (x, 0, i), fun, data);
1689 /* Like notes_stores, but call FUN for each expression that is being
1690 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1691 FUN for each expression, not any interior subexpressions. FUN receives a
1692 pointer to the expression and the DATA passed to this function.
1694 Note that this is not quite the same test as that done in reg_referenced_p
1695 since that considers something as being referenced if it is being
1696 partially set, while we do not. */
1698 void
1699 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1701 rtx body = *pbody;
1702 int i;
1704 switch (GET_CODE (body))
1706 case COND_EXEC:
1707 (*fun) (&COND_EXEC_TEST (body), data);
1708 note_uses (&COND_EXEC_CODE (body), fun, data);
1709 return;
1711 case PARALLEL:
1712 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1713 note_uses (&XVECEXP (body, 0, i), fun, data);
1714 return;
1716 case USE:
1717 (*fun) (&XEXP (body, 0), data);
1718 return;
1720 case ASM_OPERANDS:
1721 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1722 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1723 return;
1725 case TRAP_IF:
1726 (*fun) (&TRAP_CONDITION (body), data);
1727 return;
1729 case PREFETCH:
1730 (*fun) (&XEXP (body, 0), data);
1731 return;
1733 case UNSPEC:
1734 case UNSPEC_VOLATILE:
1735 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1736 (*fun) (&XVECEXP (body, 0, i), data);
1737 return;
1739 case CLOBBER:
1740 if (GET_CODE (XEXP (body, 0)) == MEM)
1741 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1742 return;
1744 case SET:
1746 rtx dest = SET_DEST (body);
1748 /* For sets we replace everything in source plus registers in memory
1749 expression in store and operands of a ZERO_EXTRACT. */
1750 (*fun) (&SET_SRC (body), data);
1752 if (GET_CODE (dest) == ZERO_EXTRACT)
1754 (*fun) (&XEXP (dest, 1), data);
1755 (*fun) (&XEXP (dest, 2), data);
1758 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1759 dest = XEXP (dest, 0);
1761 if (GET_CODE (dest) == MEM)
1762 (*fun) (&XEXP (dest, 0), data);
1764 return;
1766 default:
1767 /* All the other possibilities never store. */
1768 (*fun) (pbody, data);
1769 return;
1773 /* Return nonzero if X's old contents don't survive after INSN.
1774 This will be true if X is (cc0) or if X is a register and
1775 X dies in INSN or because INSN entirely sets X.
1777 "Entirely set" means set directly and not through a SUBREG,
1778 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1779 Likewise, REG_INC does not count.
1781 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1782 but for this use that makes no difference, since regs don't overlap
1783 during their lifetimes. Therefore, this function may be used
1784 at any time after deaths have been computed (in flow.c).
1786 If REG is a hard reg that occupies multiple machine registers, this
1787 function will only return 1 if each of those registers will be replaced
1788 by INSN. */
1791 dead_or_set_p (rtx insn, rtx x)
1793 unsigned int regno, last_regno;
1794 unsigned int i;
1796 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1797 if (GET_CODE (x) == CC0)
1798 return 1;
1800 if (GET_CODE (x) != REG)
1801 abort ();
1803 regno = REGNO (x);
1804 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1805 : regno + hard_regno_nregs[regno][GET_MODE (x)] - 1);
1807 for (i = regno; i <= last_regno; i++)
1808 if (! dead_or_set_regno_p (insn, i))
1809 return 0;
1811 return 1;
1814 /* Utility function for dead_or_set_p to check an individual register. Also
1815 called from flow.c. */
1818 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1820 unsigned int regno, endregno;
1821 rtx pattern;
1823 /* See if there is a death note for something that includes TEST_REGNO. */
1824 if (find_regno_note (insn, REG_DEAD, test_regno))
1825 return 1;
1827 if (GET_CODE (insn) == CALL_INSN
1828 && find_regno_fusage (insn, CLOBBER, test_regno))
1829 return 1;
1831 pattern = PATTERN (insn);
1833 if (GET_CODE (pattern) == COND_EXEC)
1834 pattern = COND_EXEC_CODE (pattern);
1836 if (GET_CODE (pattern) == SET)
1838 rtx dest = SET_DEST (pattern);
1840 /* A value is totally replaced if it is the destination or the
1841 destination is a SUBREG of REGNO that does not change the number of
1842 words in it. */
1843 if (GET_CODE (dest) == SUBREG
1844 && (((GET_MODE_SIZE (GET_MODE (dest))
1845 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1846 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1847 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1848 dest = SUBREG_REG (dest);
1850 if (GET_CODE (dest) != REG)
1851 return 0;
1853 regno = REGNO (dest);
1854 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1855 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1857 return (test_regno >= regno && test_regno < endregno);
1859 else if (GET_CODE (pattern) == PARALLEL)
1861 int i;
1863 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1865 rtx body = XVECEXP (pattern, 0, i);
1867 if (GET_CODE (body) == COND_EXEC)
1868 body = COND_EXEC_CODE (body);
1870 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1872 rtx dest = SET_DEST (body);
1874 if (GET_CODE (dest) == SUBREG
1875 && (((GET_MODE_SIZE (GET_MODE (dest))
1876 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1877 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1878 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1879 dest = SUBREG_REG (dest);
1881 if (GET_CODE (dest) != REG)
1882 continue;
1884 regno = REGNO (dest);
1885 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1886 : regno + hard_regno_nregs[regno][GET_MODE (dest)]);
1888 if (test_regno >= regno && test_regno < endregno)
1889 return 1;
1894 return 0;
1897 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1898 If DATUM is nonzero, look for one whose datum is DATUM. */
1901 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1903 rtx link;
1905 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1906 if (! INSN_P (insn))
1907 return 0;
1909 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1910 if (REG_NOTE_KIND (link) == kind
1911 && (datum == 0 || datum == XEXP (link, 0)))
1912 return link;
1913 return 0;
1916 /* Return the reg-note of kind KIND in insn INSN which applies to register
1917 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1918 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1919 it might be the case that the note overlaps REGNO. */
1922 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1924 rtx link;
1926 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1927 if (! INSN_P (insn))
1928 return 0;
1930 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1931 if (REG_NOTE_KIND (link) == kind
1932 /* Verify that it is a register, so that scratch and MEM won't cause a
1933 problem here. */
1934 && GET_CODE (XEXP (link, 0)) == REG
1935 && REGNO (XEXP (link, 0)) <= regno
1936 && ((REGNO (XEXP (link, 0))
1937 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1938 : hard_regno_nregs[REGNO (XEXP (link, 0))]
1939 [GET_MODE (XEXP (link, 0))]))
1940 > regno))
1941 return link;
1942 return 0;
1945 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1946 has such a note. */
1949 find_reg_equal_equiv_note (rtx insn)
1951 rtx link;
1953 if (!INSN_P (insn))
1954 return 0;
1955 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1956 if (REG_NOTE_KIND (link) == REG_EQUAL
1957 || REG_NOTE_KIND (link) == REG_EQUIV)
1959 if (single_set (insn) == 0)
1960 return 0;
1961 return link;
1963 return NULL;
1966 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1967 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1970 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1972 /* If it's not a CALL_INSN, it can't possibly have a
1973 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1974 if (GET_CODE (insn) != CALL_INSN)
1975 return 0;
1977 if (! datum)
1978 abort ();
1980 if (GET_CODE (datum) != REG)
1982 rtx link;
1984 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1985 link;
1986 link = XEXP (link, 1))
1987 if (GET_CODE (XEXP (link, 0)) == code
1988 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1989 return 1;
1991 else
1993 unsigned int regno = REGNO (datum);
1995 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1996 to pseudo registers, so don't bother checking. */
1998 if (regno < FIRST_PSEUDO_REGISTER)
2000 unsigned int end_regno
2001 = regno + hard_regno_nregs[regno][GET_MODE (datum)];
2002 unsigned int i;
2004 for (i = regno; i < end_regno; i++)
2005 if (find_regno_fusage (insn, code, i))
2006 return 1;
2010 return 0;
2013 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2014 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2017 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
2019 rtx link;
2021 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2022 to pseudo registers, so don't bother checking. */
2024 if (regno >= FIRST_PSEUDO_REGISTER
2025 || GET_CODE (insn) != CALL_INSN )
2026 return 0;
2028 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2030 unsigned int regnote;
2031 rtx op, reg;
2033 if (GET_CODE (op = XEXP (link, 0)) == code
2034 && GET_CODE (reg = XEXP (op, 0)) == REG
2035 && (regnote = REGNO (reg)) <= regno
2036 && regnote + hard_regno_nregs[regnote][GET_MODE (reg)] > regno)
2037 return 1;
2040 return 0;
2043 /* Return true if INSN is a call to a pure function. */
2046 pure_call_p (rtx insn)
2048 rtx link;
2050 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2051 return 0;
2053 /* Look for the note that differentiates const and pure functions. */
2054 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2056 rtx u, m;
2058 if (GET_CODE (u = XEXP (link, 0)) == USE
2059 && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2060 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2061 return 1;
2064 return 0;
2067 /* Remove register note NOTE from the REG_NOTES of INSN. */
2069 void
2070 remove_note (rtx insn, rtx note)
2072 rtx link;
2074 if (note == NULL_RTX)
2075 return;
2077 if (REG_NOTES (insn) == note)
2079 REG_NOTES (insn) = XEXP (note, 1);
2080 return;
2083 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2084 if (XEXP (link, 1) == note)
2086 XEXP (link, 1) = XEXP (note, 1);
2087 return;
2090 abort ();
2093 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2094 return 1 if it is found. A simple equality test is used to determine if
2095 NODE matches. */
2098 in_expr_list_p (rtx listp, rtx node)
2100 rtx x;
2102 for (x = listp; x; x = XEXP (x, 1))
2103 if (node == XEXP (x, 0))
2104 return 1;
2106 return 0;
2109 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2110 remove that entry from the list if it is found.
2112 A simple equality test is used to determine if NODE matches. */
2114 void
2115 remove_node_from_expr_list (rtx node, rtx *listp)
2117 rtx temp = *listp;
2118 rtx prev = NULL_RTX;
2120 while (temp)
2122 if (node == XEXP (temp, 0))
2124 /* Splice the node out of the list. */
2125 if (prev)
2126 XEXP (prev, 1) = XEXP (temp, 1);
2127 else
2128 *listp = XEXP (temp, 1);
2130 return;
2133 prev = temp;
2134 temp = XEXP (temp, 1);
2138 /* Nonzero if X contains any volatile instructions. These are instructions
2139 which may cause unpredictable machine state instructions, and thus no
2140 instructions should be moved or combined across them. This includes
2141 only volatile asms and UNSPEC_VOLATILE instructions. */
2144 volatile_insn_p (rtx x)
2146 RTX_CODE code;
2148 code = GET_CODE (x);
2149 switch (code)
2151 case LABEL_REF:
2152 case SYMBOL_REF:
2153 case CONST_INT:
2154 case CONST:
2155 case CONST_DOUBLE:
2156 case CONST_VECTOR:
2157 case CC0:
2158 case PC:
2159 case REG:
2160 case SCRATCH:
2161 case CLOBBER:
2162 case ADDR_VEC:
2163 case ADDR_DIFF_VEC:
2164 case CALL:
2165 case MEM:
2166 return 0;
2168 case UNSPEC_VOLATILE:
2169 /* case TRAP_IF: This isn't clear yet. */
2170 return 1;
2172 case ASM_INPUT:
2173 case ASM_OPERANDS:
2174 if (MEM_VOLATILE_P (x))
2175 return 1;
2177 default:
2178 break;
2181 /* Recursively scan the operands of this expression. */
2184 const char *fmt = GET_RTX_FORMAT (code);
2185 int i;
2187 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2189 if (fmt[i] == 'e')
2191 if (volatile_insn_p (XEXP (x, i)))
2192 return 1;
2194 else if (fmt[i] == 'E')
2196 int j;
2197 for (j = 0; j < XVECLEN (x, i); j++)
2198 if (volatile_insn_p (XVECEXP (x, i, j)))
2199 return 1;
2203 return 0;
2206 /* Nonzero if X contains any volatile memory references
2207 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2210 volatile_refs_p (rtx x)
2212 RTX_CODE code;
2214 code = GET_CODE (x);
2215 switch (code)
2217 case LABEL_REF:
2218 case SYMBOL_REF:
2219 case CONST_INT:
2220 case CONST:
2221 case CONST_DOUBLE:
2222 case CONST_VECTOR:
2223 case CC0:
2224 case PC:
2225 case REG:
2226 case SCRATCH:
2227 case CLOBBER:
2228 case ADDR_VEC:
2229 case ADDR_DIFF_VEC:
2230 return 0;
2232 case UNSPEC_VOLATILE:
2233 return 1;
2235 case MEM:
2236 case ASM_INPUT:
2237 case ASM_OPERANDS:
2238 if (MEM_VOLATILE_P (x))
2239 return 1;
2241 default:
2242 break;
2245 /* Recursively scan the operands of this expression. */
2248 const char *fmt = GET_RTX_FORMAT (code);
2249 int i;
2251 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2253 if (fmt[i] == 'e')
2255 if (volatile_refs_p (XEXP (x, i)))
2256 return 1;
2258 else if (fmt[i] == 'E')
2260 int j;
2261 for (j = 0; j < XVECLEN (x, i); j++)
2262 if (volatile_refs_p (XVECEXP (x, i, j)))
2263 return 1;
2267 return 0;
2270 /* Similar to above, except that it also rejects register pre- and post-
2271 incrementing. */
2274 side_effects_p (rtx x)
2276 RTX_CODE code;
2278 code = GET_CODE (x);
2279 switch (code)
2281 case LABEL_REF:
2282 case SYMBOL_REF:
2283 case CONST_INT:
2284 case CONST:
2285 case CONST_DOUBLE:
2286 case CONST_VECTOR:
2287 case CC0:
2288 case PC:
2289 case REG:
2290 case SCRATCH:
2291 case ADDR_VEC:
2292 case ADDR_DIFF_VEC:
2293 return 0;
2295 case CLOBBER:
2296 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2297 when some combination can't be done. If we see one, don't think
2298 that we can simplify the expression. */
2299 return (GET_MODE (x) != VOIDmode);
2301 case PRE_INC:
2302 case PRE_DEC:
2303 case POST_INC:
2304 case POST_DEC:
2305 case PRE_MODIFY:
2306 case POST_MODIFY:
2307 case CALL:
2308 case UNSPEC_VOLATILE:
2309 /* case TRAP_IF: This isn't clear yet. */
2310 return 1;
2312 case MEM:
2313 case ASM_INPUT:
2314 case ASM_OPERANDS:
2315 if (MEM_VOLATILE_P (x))
2316 return 1;
2318 default:
2319 break;
2322 /* Recursively scan the operands of this expression. */
2325 const char *fmt = GET_RTX_FORMAT (code);
2326 int i;
2328 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2330 if (fmt[i] == 'e')
2332 if (side_effects_p (XEXP (x, i)))
2333 return 1;
2335 else if (fmt[i] == 'E')
2337 int j;
2338 for (j = 0; j < XVECLEN (x, i); j++)
2339 if (side_effects_p (XVECEXP (x, i, j)))
2340 return 1;
2344 return 0;
2347 /* Return nonzero if evaluating rtx X might cause a trap. */
2350 may_trap_p (rtx x)
2352 int i;
2353 enum rtx_code code;
2354 const char *fmt;
2356 if (x == 0)
2357 return 0;
2358 code = GET_CODE (x);
2359 switch (code)
2361 /* Handle these cases quickly. */
2362 case CONST_INT:
2363 case CONST_DOUBLE:
2364 case CONST_VECTOR:
2365 case SYMBOL_REF:
2366 case LABEL_REF:
2367 case CONST:
2368 case PC:
2369 case CC0:
2370 case REG:
2371 case SCRATCH:
2372 return 0;
2374 case ASM_INPUT:
2375 case UNSPEC_VOLATILE:
2376 case TRAP_IF:
2377 return 1;
2379 case ASM_OPERANDS:
2380 return MEM_VOLATILE_P (x);
2382 /* Memory ref can trap unless it's a static var or a stack slot. */
2383 case MEM:
2384 if (MEM_NOTRAP_P (x))
2385 return 0;
2386 return rtx_addr_can_trap_p (XEXP (x, 0));
2388 /* Division by a non-constant might trap. */
2389 case DIV:
2390 case MOD:
2391 case UDIV:
2392 case UMOD:
2393 if (HONOR_SNANS (GET_MODE (x)))
2394 return 1;
2395 if (! CONSTANT_P (XEXP (x, 1))
2396 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2397 && flag_trapping_math))
2398 return 1;
2399 if (XEXP (x, 1) == const0_rtx)
2400 return 1;
2401 break;
2403 case EXPR_LIST:
2404 /* An EXPR_LIST is used to represent a function call. This
2405 certainly may trap. */
2406 return 1;
2408 case GE:
2409 case GT:
2410 case LE:
2411 case LT:
2412 case COMPARE:
2413 /* Some floating point comparisons may trap. */
2414 if (!flag_trapping_math)
2415 break;
2416 /* ??? There is no machine independent way to check for tests that trap
2417 when COMPARE is used, though many targets do make this distinction.
2418 For instance, sparc uses CCFPE for compares which generate exceptions
2419 and CCFP for compares which do not generate exceptions. */
2420 if (HONOR_NANS (GET_MODE (x)))
2421 return 1;
2422 /* But often the compare has some CC mode, so check operand
2423 modes as well. */
2424 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2425 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2426 return 1;
2427 break;
2429 case EQ:
2430 case NE:
2431 if (HONOR_SNANS (GET_MODE (x)))
2432 return 1;
2433 /* Often comparison is CC mode, so check operand modes. */
2434 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2435 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2436 return 1;
2437 break;
2439 case FIX:
2440 /* Conversion of floating point might trap. */
2441 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2442 return 1;
2443 break;
2445 case NEG:
2446 case ABS:
2447 /* These operations don't trap even with floating point. */
2448 break;
2450 default:
2451 /* Any floating arithmetic may trap. */
2452 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2453 && flag_trapping_math)
2454 return 1;
2457 fmt = GET_RTX_FORMAT (code);
2458 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2460 if (fmt[i] == 'e')
2462 if (may_trap_p (XEXP (x, i)))
2463 return 1;
2465 else if (fmt[i] == 'E')
2467 int j;
2468 for (j = 0; j < XVECLEN (x, i); j++)
2469 if (may_trap_p (XVECEXP (x, i, j)))
2470 return 1;
2473 return 0;
2476 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2477 i.e., an inequality. */
2480 inequality_comparisons_p (rtx x)
2482 const char *fmt;
2483 int len, i;
2484 enum rtx_code code = GET_CODE (x);
2486 switch (code)
2488 case REG:
2489 case SCRATCH:
2490 case PC:
2491 case CC0:
2492 case CONST_INT:
2493 case CONST_DOUBLE:
2494 case CONST_VECTOR:
2495 case CONST:
2496 case LABEL_REF:
2497 case SYMBOL_REF:
2498 return 0;
2500 case LT:
2501 case LTU:
2502 case GT:
2503 case GTU:
2504 case LE:
2505 case LEU:
2506 case GE:
2507 case GEU:
2508 return 1;
2510 default:
2511 break;
2514 len = GET_RTX_LENGTH (code);
2515 fmt = GET_RTX_FORMAT (code);
2517 for (i = 0; i < len; i++)
2519 if (fmt[i] == 'e')
2521 if (inequality_comparisons_p (XEXP (x, i)))
2522 return 1;
2524 else if (fmt[i] == 'E')
2526 int j;
2527 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2528 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2529 return 1;
2533 return 0;
2536 /* Replace any occurrence of FROM in X with TO. The function does
2537 not enter into CONST_DOUBLE for the replace.
2539 Note that copying is not done so X must not be shared unless all copies
2540 are to be modified. */
2543 replace_rtx (rtx x, rtx from, rtx to)
2545 int i, j;
2546 const char *fmt;
2548 /* The following prevents loops occurrence when we change MEM in
2549 CONST_DOUBLE onto the same CONST_DOUBLE. */
2550 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2551 return x;
2553 if (x == from)
2554 return to;
2556 /* Allow this function to make replacements in EXPR_LISTs. */
2557 if (x == 0)
2558 return 0;
2560 if (GET_CODE (x) == SUBREG)
2562 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2564 if (GET_CODE (new) == CONST_INT)
2566 x = simplify_subreg (GET_MODE (x), new,
2567 GET_MODE (SUBREG_REG (x)),
2568 SUBREG_BYTE (x));
2569 if (! x)
2570 abort ();
2572 else
2573 SUBREG_REG (x) = new;
2575 return x;
2577 else if (GET_CODE (x) == ZERO_EXTEND)
2579 rtx new = replace_rtx (XEXP (x, 0), from, to);
2581 if (GET_CODE (new) == CONST_INT)
2583 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2584 new, GET_MODE (XEXP (x, 0)));
2585 if (! x)
2586 abort ();
2588 else
2589 XEXP (x, 0) = new;
2591 return x;
2594 fmt = GET_RTX_FORMAT (GET_CODE (x));
2595 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2597 if (fmt[i] == 'e')
2598 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2599 else if (fmt[i] == 'E')
2600 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2601 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2604 return x;
2607 /* Throughout the rtx X, replace many registers according to REG_MAP.
2608 Return the replacement for X (which may be X with altered contents).
2609 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2610 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2612 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2613 should not be mapped to pseudos or vice versa since validate_change
2614 is not called.
2616 If REPLACE_DEST is 1, replacements are also done in destinations;
2617 otherwise, only sources are replaced. */
2620 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2622 enum rtx_code code;
2623 int i;
2624 const char *fmt;
2626 if (x == 0)
2627 return x;
2629 code = GET_CODE (x);
2630 switch (code)
2632 case SCRATCH:
2633 case PC:
2634 case CC0:
2635 case CONST_INT:
2636 case CONST_DOUBLE:
2637 case CONST_VECTOR:
2638 case CONST:
2639 case SYMBOL_REF:
2640 case LABEL_REF:
2641 return x;
2643 case REG:
2644 /* Verify that the register has an entry before trying to access it. */
2645 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2647 /* SUBREGs can't be shared. Always return a copy to ensure that if
2648 this replacement occurs more than once then each instance will
2649 get distinct rtx. */
2650 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2651 return copy_rtx (reg_map[REGNO (x)]);
2652 return reg_map[REGNO (x)];
2654 return x;
2656 case SUBREG:
2657 /* Prevent making nested SUBREGs. */
2658 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2659 && reg_map[REGNO (SUBREG_REG (x))] != 0
2660 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2662 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2663 return simplify_gen_subreg (GET_MODE (x), map_val,
2664 GET_MODE (SUBREG_REG (x)),
2665 SUBREG_BYTE (x));
2667 break;
2669 case SET:
2670 if (replace_dest)
2671 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2673 else if (GET_CODE (SET_DEST (x)) == MEM
2674 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2675 /* Even if we are not to replace destinations, replace register if it
2676 is CONTAINED in destination (destination is memory or
2677 STRICT_LOW_PART). */
2678 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2679 reg_map, nregs, 0);
2680 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2681 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2682 break;
2684 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2685 return x;
2687 default:
2688 break;
2691 fmt = GET_RTX_FORMAT (code);
2692 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2694 if (fmt[i] == 'e')
2695 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2696 else if (fmt[i] == 'E')
2698 int j;
2699 for (j = 0; j < XVECLEN (x, i); j++)
2700 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2701 nregs, replace_dest);
2704 return x;
2707 /* Replace occurrences of the old label in *X with the new one.
2708 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2711 replace_label (rtx *x, void *data)
2713 rtx l = *x;
2714 rtx tmp;
2715 rtx old_label = ((replace_label_data *) data)->r1;
2716 rtx new_label = ((replace_label_data *) data)->r2;
2717 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2719 if (l == NULL_RTX)
2720 return 0;
2722 if (GET_CODE (l) == MEM
2723 && (tmp = XEXP (l, 0)) != NULL_RTX
2724 && GET_CODE (tmp) == SYMBOL_REF
2725 && CONSTANT_POOL_ADDRESS_P (tmp))
2727 rtx c = get_pool_constant (tmp);
2728 if (rtx_referenced_p (old_label, c))
2730 rtx new_c, new_l;
2731 replace_label_data *d = (replace_label_data *) data;
2733 /* Create a copy of constant C; replace the label inside
2734 but do not update LABEL_NUSES because uses in constant pool
2735 are not counted. */
2736 new_c = copy_rtx (c);
2737 d->update_label_nuses = false;
2738 for_each_rtx (&new_c, replace_label, data);
2739 d->update_label_nuses = update_label_nuses;
2741 /* Add the new constant NEW_C to constant pool and replace
2742 the old reference to constant by new reference. */
2743 new_l = force_const_mem (get_pool_mode (tmp), new_c);
2744 *x = replace_rtx (l, l, new_l);
2746 return 0;
2749 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2750 field. This is not handled by for_each_rtx because it doesn't
2751 handle unprinted ('0') fields. */
2752 if (GET_CODE (l) == JUMP_INSN && JUMP_LABEL (l) == old_label)
2753 JUMP_LABEL (l) = new_label;
2755 if ((GET_CODE (l) == LABEL_REF
2756 || GET_CODE (l) == INSN_LIST)
2757 && XEXP (l, 0) == old_label)
2759 XEXP (l, 0) = new_label;
2760 if (update_label_nuses)
2762 ++LABEL_NUSES (new_label);
2763 --LABEL_NUSES (old_label);
2765 return 0;
2768 return 0;
2771 /* When *BODY is equal to X or X is directly referenced by *BODY
2772 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2773 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2775 static int
2776 rtx_referenced_p_1 (rtx *body, void *x)
2778 rtx y = (rtx) x;
2780 if (*body == NULL_RTX)
2781 return y == NULL_RTX;
2783 /* Return true if a label_ref *BODY refers to label Y. */
2784 if (GET_CODE (*body) == LABEL_REF && GET_CODE (y) == CODE_LABEL)
2785 return XEXP (*body, 0) == y;
2787 /* If *BODY is a reference to pool constant traverse the constant. */
2788 if (GET_CODE (*body) == SYMBOL_REF
2789 && CONSTANT_POOL_ADDRESS_P (*body))
2790 return rtx_referenced_p (y, get_pool_constant (*body));
2792 /* By default, compare the RTL expressions. */
2793 return rtx_equal_p (*body, y);
2796 /* Return true if X is referenced in BODY. */
2799 rtx_referenced_p (rtx x, rtx body)
2801 return for_each_rtx (&body, rtx_referenced_p_1, x);
2804 /* If INSN is a tablejump return true and store the label (before jump table) to
2805 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2807 bool
2808 tablejump_p (rtx insn, rtx *labelp, rtx *tablep)
2810 rtx label, table;
2812 if (GET_CODE (insn) == JUMP_INSN
2813 && (label = JUMP_LABEL (insn)) != NULL_RTX
2814 && (table = next_active_insn (label)) != NULL_RTX
2815 && GET_CODE (table) == JUMP_INSN
2816 && (GET_CODE (PATTERN (table)) == ADDR_VEC
2817 || GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC))
2819 if (labelp)
2820 *labelp = label;
2821 if (tablep)
2822 *tablep = table;
2823 return true;
2825 return false;
2828 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2829 constant that is not in the constant pool and not in the condition
2830 of an IF_THEN_ELSE. */
2832 static int
2833 computed_jump_p_1 (rtx x)
2835 enum rtx_code code = GET_CODE (x);
2836 int i, j;
2837 const char *fmt;
2839 switch (code)
2841 case LABEL_REF:
2842 case PC:
2843 return 0;
2845 case CONST:
2846 case CONST_INT:
2847 case CONST_DOUBLE:
2848 case CONST_VECTOR:
2849 case SYMBOL_REF:
2850 case REG:
2851 return 1;
2853 case MEM:
2854 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2855 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2857 case IF_THEN_ELSE:
2858 return (computed_jump_p_1 (XEXP (x, 1))
2859 || computed_jump_p_1 (XEXP (x, 2)));
2861 default:
2862 break;
2865 fmt = GET_RTX_FORMAT (code);
2866 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2868 if (fmt[i] == 'e'
2869 && computed_jump_p_1 (XEXP (x, i)))
2870 return 1;
2872 else if (fmt[i] == 'E')
2873 for (j = 0; j < XVECLEN (x, i); j++)
2874 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2875 return 1;
2878 return 0;
2881 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2883 Tablejumps and casesi insns are not considered indirect jumps;
2884 we can recognize them by a (use (label_ref)). */
2887 computed_jump_p (rtx insn)
2889 int i;
2890 if (GET_CODE (insn) == JUMP_INSN)
2892 rtx pat = PATTERN (insn);
2894 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2895 return 0;
2896 else if (GET_CODE (pat) == PARALLEL)
2898 int len = XVECLEN (pat, 0);
2899 int has_use_labelref = 0;
2901 for (i = len - 1; i >= 0; i--)
2902 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2903 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2904 == LABEL_REF))
2905 has_use_labelref = 1;
2907 if (! has_use_labelref)
2908 for (i = len - 1; i >= 0; i--)
2909 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2910 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2911 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2912 return 1;
2914 else if (GET_CODE (pat) == SET
2915 && SET_DEST (pat) == pc_rtx
2916 && computed_jump_p_1 (SET_SRC (pat)))
2917 return 1;
2919 return 0;
2922 /* Traverse X via depth-first search, calling F for each
2923 sub-expression (including X itself). F is also passed the DATA.
2924 If F returns -1, do not traverse sub-expressions, but continue
2925 traversing the rest of the tree. If F ever returns any other
2926 nonzero value, stop the traversal, and return the value returned
2927 by F. Otherwise, return 0. This function does not traverse inside
2928 tree structure that contains RTX_EXPRs, or into sub-expressions
2929 whose format code is `0' since it is not known whether or not those
2930 codes are actually RTL.
2932 This routine is very general, and could (should?) be used to
2933 implement many of the other routines in this file. */
2936 for_each_rtx (rtx *x, rtx_function f, void *data)
2938 int result;
2939 int length;
2940 const char *format;
2941 int i;
2943 /* Call F on X. */
2944 result = (*f) (x, data);
2945 if (result == -1)
2946 /* Do not traverse sub-expressions. */
2947 return 0;
2948 else if (result != 0)
2949 /* Stop the traversal. */
2950 return result;
2952 if (*x == NULL_RTX)
2953 /* There are no sub-expressions. */
2954 return 0;
2956 length = GET_RTX_LENGTH (GET_CODE (*x));
2957 format = GET_RTX_FORMAT (GET_CODE (*x));
2959 for (i = 0; i < length; ++i)
2961 switch (format[i])
2963 case 'e':
2964 result = for_each_rtx (&XEXP (*x, i), f, data);
2965 if (result != 0)
2966 return result;
2967 break;
2969 case 'V':
2970 case 'E':
2971 if (XVEC (*x, i) != 0)
2973 int j;
2974 for (j = 0; j < XVECLEN (*x, i); ++j)
2976 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2977 if (result != 0)
2978 return result;
2981 break;
2983 default:
2984 /* Nothing to do. */
2985 break;
2990 return 0;
2993 /* Searches X for any reference to REGNO, returning the rtx of the
2994 reference found if any. Otherwise, returns NULL_RTX. */
2997 regno_use_in (unsigned int regno, rtx x)
2999 const char *fmt;
3000 int i, j;
3001 rtx tem;
3003 if (GET_CODE (x) == REG && REGNO (x) == regno)
3004 return x;
3006 fmt = GET_RTX_FORMAT (GET_CODE (x));
3007 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3009 if (fmt[i] == 'e')
3011 if ((tem = regno_use_in (regno, XEXP (x, i))))
3012 return tem;
3014 else if (fmt[i] == 'E')
3015 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3016 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3017 return tem;
3020 return NULL_RTX;
3023 /* Return a value indicating whether OP, an operand of a commutative
3024 operation, is preferred as the first or second operand. The higher
3025 the value, the stronger the preference for being the first operand.
3026 We use negative values to indicate a preference for the first operand
3027 and positive values for the second operand. */
3030 commutative_operand_precedence (rtx op)
3032 /* Constants always come the second operand. Prefer "nice" constants. */
3033 if (GET_CODE (op) == CONST_INT)
3034 return -7;
3035 if (GET_CODE (op) == CONST_DOUBLE)
3036 return -6;
3037 op = avoid_constant_pool_reference (op);
3038 if (GET_CODE (op) == CONST_INT)
3039 return -5;
3040 if (GET_CODE (op) == CONST_DOUBLE)
3041 return -4;
3042 if (CONSTANT_P (op))
3043 return -3;
3045 /* SUBREGs of objects should come second. */
3046 if (GET_CODE (op) == SUBREG
3047 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
3048 return -2;
3050 /* If only one operand is a `neg', `not',
3051 `mult', `plus', or `minus' expression, it will be the first
3052 operand. */
3053 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
3054 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
3055 || GET_CODE (op) == MINUS)
3056 return 2;
3058 /* Complex expressions should be the first, so decrease priority
3059 of objects. */
3060 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
3061 return -1;
3062 return 0;
3065 /* Return 1 iff it is necessary to swap operands of commutative operation
3066 in order to canonicalize expression. */
3069 swap_commutative_operands_p (rtx x, rtx y)
3071 return (commutative_operand_precedence (x)
3072 < commutative_operand_precedence (y));
3075 /* Return 1 if X is an autoincrement side effect and the register is
3076 not the stack pointer. */
3078 auto_inc_p (rtx x)
3080 switch (GET_CODE (x))
3082 case PRE_INC:
3083 case POST_INC:
3084 case PRE_DEC:
3085 case POST_DEC:
3086 case PRE_MODIFY:
3087 case POST_MODIFY:
3088 /* There are no REG_INC notes for SP. */
3089 if (XEXP (x, 0) != stack_pointer_rtx)
3090 return 1;
3091 default:
3092 break;
3094 return 0;
3097 /* Return 1 if the sequence of instructions beginning with FROM and up
3098 to and including TO is safe to move. If NEW_TO is non-NULL, and
3099 the sequence is not already safe to move, but can be easily
3100 extended to a sequence which is safe, then NEW_TO will point to the
3101 end of the extended sequence.
3103 For now, this function only checks that the region contains whole
3104 exception regions, but it could be extended to check additional
3105 conditions as well. */
3108 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
3110 int eh_region_count = 0;
3111 int past_to_p = 0;
3112 rtx r = from;
3114 /* By default, assume the end of the region will be what was
3115 suggested. */
3116 if (new_to)
3117 *new_to = to;
3119 while (r)
3121 if (GET_CODE (r) == NOTE)
3123 switch (NOTE_LINE_NUMBER (r))
3125 case NOTE_INSN_EH_REGION_BEG:
3126 ++eh_region_count;
3127 break;
3129 case NOTE_INSN_EH_REGION_END:
3130 if (eh_region_count == 0)
3131 /* This sequence of instructions contains the end of
3132 an exception region, but not he beginning. Moving
3133 it will cause chaos. */
3134 return 0;
3136 --eh_region_count;
3137 break;
3139 default:
3140 break;
3143 else if (past_to_p)
3144 /* If we've passed TO, and we see a non-note instruction, we
3145 can't extend the sequence to a movable sequence. */
3146 return 0;
3148 if (r == to)
3150 if (!new_to)
3151 /* It's OK to move the sequence if there were matched sets of
3152 exception region notes. */
3153 return eh_region_count == 0;
3155 past_to_p = 1;
3158 /* It's OK to move the sequence if there were matched sets of
3159 exception region notes. */
3160 if (past_to_p && eh_region_count == 0)
3162 *new_to = r;
3163 return 1;
3166 /* Go to the next instruction. */
3167 r = NEXT_INSN (r);
3170 return 0;
3173 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3175 loc_mentioned_in_p (rtx *loc, rtx in)
3177 enum rtx_code code = GET_CODE (in);
3178 const char *fmt = GET_RTX_FORMAT (code);
3179 int i, j;
3181 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3183 if (loc == &in->u.fld[i].rtx)
3184 return 1;
3185 if (fmt[i] == 'e')
3187 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3188 return 1;
3190 else if (fmt[i] == 'E')
3191 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3192 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3193 return 1;
3195 return 0;
3198 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3199 and SUBREG_BYTE, return the bit offset where the subreg begins
3200 (counting from the least significant bit of the operand). */
3202 unsigned int
3203 subreg_lsb_1 (enum machine_mode outer_mode,
3204 enum machine_mode inner_mode,
3205 unsigned int subreg_byte)
3207 unsigned int bitpos;
3208 unsigned int byte;
3209 unsigned int word;
3211 /* A paradoxical subreg begins at bit position 0. */
3212 if (GET_MODE_BITSIZE (outer_mode) > GET_MODE_BITSIZE (inner_mode))
3213 return 0;
3215 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3216 /* If the subreg crosses a word boundary ensure that
3217 it also begins and ends on a word boundary. */
3218 if ((subreg_byte % UNITS_PER_WORD
3219 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3220 && (subreg_byte % UNITS_PER_WORD
3221 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD))
3222 abort ();
3224 if (WORDS_BIG_ENDIAN)
3225 word = (GET_MODE_SIZE (inner_mode)
3226 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3227 else
3228 word = subreg_byte / UNITS_PER_WORD;
3229 bitpos = word * BITS_PER_WORD;
3231 if (BYTES_BIG_ENDIAN)
3232 byte = (GET_MODE_SIZE (inner_mode)
3233 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3234 else
3235 byte = subreg_byte % UNITS_PER_WORD;
3236 bitpos += byte * BITS_PER_UNIT;
3238 return bitpos;
3241 /* Given a subreg X, return the bit offset where the subreg begins
3242 (counting from the least significant bit of the reg). */
3244 unsigned int
3245 subreg_lsb (rtx x)
3247 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3248 SUBREG_BYTE (x));
3251 /* This function returns the regno offset of a subreg expression.
3252 xregno - A regno of an inner hard subreg_reg (or what will become one).
3253 xmode - The mode of xregno.
3254 offset - The byte offset.
3255 ymode - The mode of a top level SUBREG (or what may become one).
3256 RETURN - The regno offset which would be used. */
3257 unsigned int
3258 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3259 unsigned int offset, enum machine_mode ymode)
3261 int nregs_xmode, nregs_ymode;
3262 int mode_multiple, nregs_multiple;
3263 int y_offset;
3265 if (xregno >= FIRST_PSEUDO_REGISTER)
3266 abort ();
3268 nregs_xmode = hard_regno_nregs[xregno][xmode];
3269 nregs_ymode = hard_regno_nregs[xregno][ymode];
3271 /* If this is a big endian paradoxical subreg, which uses more actual
3272 hard registers than the original register, we must return a negative
3273 offset so that we find the proper highpart of the register. */
3274 if (offset == 0
3275 && nregs_ymode > nregs_xmode
3276 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3277 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3278 return nregs_xmode - nregs_ymode;
3280 if (offset == 0 || nregs_xmode == nregs_ymode)
3281 return 0;
3283 /* size of ymode must not be greater than the size of xmode. */
3284 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3285 if (mode_multiple == 0)
3286 abort ();
3288 y_offset = offset / GET_MODE_SIZE (ymode);
3289 nregs_multiple = nregs_xmode / nregs_ymode;
3290 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3293 /* This function returns true when the offset is representable via
3294 subreg_offset in the given regno.
3295 xregno - A regno of an inner hard subreg_reg (or what will become one).
3296 xmode - The mode of xregno.
3297 offset - The byte offset.
3298 ymode - The mode of a top level SUBREG (or what may become one).
3299 RETURN - The regno offset which would be used. */
3300 bool
3301 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3302 unsigned int offset, enum machine_mode ymode)
3304 int nregs_xmode, nregs_ymode;
3305 int mode_multiple, nregs_multiple;
3306 int y_offset;
3308 if (xregno >= FIRST_PSEUDO_REGISTER)
3309 abort ();
3311 nregs_xmode = hard_regno_nregs[xregno][xmode];
3312 nregs_ymode = hard_regno_nregs[xregno][ymode];
3314 /* paradoxical subregs are always valid. */
3315 if (offset == 0
3316 && nregs_ymode > nregs_xmode
3317 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3318 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3319 return true;
3321 /* Lowpart subregs are always valid. */
3322 if (offset == subreg_lowpart_offset (ymode, xmode))
3323 return true;
3325 #ifdef ENABLE_CHECKING
3326 /* This should always pass, otherwise we don't know how to verify the
3327 constraint. These conditions may be relaxed but subreg_offset would
3328 need to be redesigned. */
3329 if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3330 || GET_MODE_SIZE (ymode) % nregs_ymode
3331 || nregs_xmode % nregs_ymode)
3332 abort ();
3333 #endif
3335 /* The XMODE value can be seen as a vector of NREGS_XMODE
3336 values. The subreg must represent a lowpart of given field.
3337 Compute what field it is. */
3338 offset -= subreg_lowpart_offset (ymode,
3339 mode_for_size (GET_MODE_BITSIZE (xmode)
3340 / nregs_xmode,
3341 MODE_INT, 0));
3343 /* size of ymode must not be greater than the size of xmode. */
3344 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3345 if (mode_multiple == 0)
3346 abort ();
3348 y_offset = offset / GET_MODE_SIZE (ymode);
3349 nregs_multiple = nregs_xmode / nregs_ymode;
3350 #ifdef ENABLE_CHECKING
3351 if (offset % GET_MODE_SIZE (ymode)
3352 || mode_multiple % nregs_multiple)
3353 abort ();
3354 #endif
3355 return (!(y_offset % (mode_multiple / nregs_multiple)));
3358 /* Return the final regno that a subreg expression refers to. */
3359 unsigned int
3360 subreg_regno (rtx x)
3362 unsigned int ret;
3363 rtx subreg = SUBREG_REG (x);
3364 int regno = REGNO (subreg);
3366 ret = regno + subreg_regno_offset (regno,
3367 GET_MODE (subreg),
3368 SUBREG_BYTE (x),
3369 GET_MODE (x));
3370 return ret;
3373 struct parms_set_data
3375 int nregs;
3376 HARD_REG_SET regs;
3379 /* Helper function for noticing stores to parameter registers. */
3380 static void
3381 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3383 struct parms_set_data *d = data;
3384 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3385 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3387 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3388 d->nregs--;
3392 /* Look backward for first parameter to be loaded.
3393 Do not skip BOUNDARY. */
3395 find_first_parameter_load (rtx call_insn, rtx boundary)
3397 struct parms_set_data parm;
3398 rtx p, before;
3400 /* Since different machines initialize their parameter registers
3401 in different orders, assume nothing. Collect the set of all
3402 parameter registers. */
3403 CLEAR_HARD_REG_SET (parm.regs);
3404 parm.nregs = 0;
3405 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3406 if (GET_CODE (XEXP (p, 0)) == USE
3407 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3409 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3410 abort ();
3412 /* We only care about registers which can hold function
3413 arguments. */
3414 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3415 continue;
3417 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3418 parm.nregs++;
3420 before = call_insn;
3422 /* Search backward for the first set of a register in this set. */
3423 while (parm.nregs && before != boundary)
3425 before = PREV_INSN (before);
3427 /* It is possible that some loads got CSEed from one call to
3428 another. Stop in that case. */
3429 if (GET_CODE (before) == CALL_INSN)
3430 break;
3432 /* Our caller needs either ensure that we will find all sets
3433 (in case code has not been optimized yet), or take care
3434 for possible labels in a way by setting boundary to preceding
3435 CODE_LABEL. */
3436 if (GET_CODE (before) == CODE_LABEL)
3438 if (before != boundary)
3439 abort ();
3440 break;
3443 if (INSN_P (before))
3444 note_stores (PATTERN (before), parms_set, &parm);
3446 return before;
3449 /* Return true if we should avoid inserting code between INSN and preceding
3450 call instruction. */
3452 bool
3453 keep_with_call_p (rtx insn)
3455 rtx set;
3457 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3459 if (GET_CODE (SET_DEST (set)) == REG
3460 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3461 && fixed_regs[REGNO (SET_DEST (set))]
3462 && general_operand (SET_SRC (set), VOIDmode))
3463 return true;
3464 if (GET_CODE (SET_SRC (set)) == REG
3465 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3466 && GET_CODE (SET_DEST (set)) == REG
3467 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3468 return true;
3469 /* There may be a stack pop just after the call and before the store
3470 of the return register. Search for the actual store when deciding
3471 if we can break or not. */
3472 if (SET_DEST (set) == stack_pointer_rtx)
3474 rtx i2 = next_nonnote_insn (insn);
3475 if (i2 && keep_with_call_p (i2))
3476 return true;
3479 return false;
3482 /* Return true when store to register X can be hoisted to the place
3483 with LIVE registers (can be NULL). Value VAL contains destination
3484 whose value will be used. */
3486 static bool
3487 hoist_test_store (rtx x, rtx val, regset live)
3489 if (GET_CODE (x) == SCRATCH)
3490 return true;
3492 if (rtx_equal_p (x, val))
3493 return true;
3495 /* Allow subreg of X in case it is not writing just part of multireg pseudo.
3496 Then we would need to update all users to care hoisting the store too.
3497 Caller may represent that by specifying whole subreg as val. */
3499 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3501 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3502 && GET_MODE_BITSIZE (GET_MODE (x)) <
3503 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3504 return false;
3505 return true;
3507 if (GET_CODE (x) == SUBREG)
3508 x = SUBREG_REG (x);
3510 /* Anything except register store is not hoistable. This includes the
3511 partial stores to registers. */
3513 if (!REG_P (x))
3514 return false;
3516 /* Pseudo registers can be always replaced by another pseudo to avoid
3517 the side effect, for hard register we must ensure that they are dead.
3518 Eventually we may want to add code to try turn pseudos to hards, but it
3519 is unlikely useful. */
3521 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3523 int regno = REGNO (x);
3524 int n = hard_regno_nregs[regno][GET_MODE (x)];
3526 if (!live)
3527 return false;
3528 if (REGNO_REG_SET_P (live, regno))
3529 return false;
3530 while (--n > 0)
3531 if (REGNO_REG_SET_P (live, regno + n))
3532 return false;
3534 return true;
3538 /* Return true if INSN can be hoisted to place with LIVE hard registers
3539 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3540 and used by the hoisting pass. */
3542 bool
3543 can_hoist_insn_p (rtx insn, rtx val, regset live)
3545 rtx pat = PATTERN (insn);
3546 int i;
3548 /* It probably does not worth the complexity to handle multiple
3549 set stores. */
3550 if (!single_set (insn))
3551 return false;
3552 /* We can move CALL_INSN, but we need to check that all caller clobbered
3553 regs are dead. */
3554 if (GET_CODE (insn) == CALL_INSN)
3555 return false;
3556 /* In future we will handle hoisting of libcall sequences, but
3557 give up for now. */
3558 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3559 return false;
3560 switch (GET_CODE (pat))
3562 case SET:
3563 if (!hoist_test_store (SET_DEST (pat), val, live))
3564 return false;
3565 break;
3566 case USE:
3567 /* USES do have sick semantics, so do not move them. */
3568 return false;
3569 break;
3570 case CLOBBER:
3571 if (!hoist_test_store (XEXP (pat, 0), val, live))
3572 return false;
3573 break;
3574 case PARALLEL:
3575 for (i = 0; i < XVECLEN (pat, 0); i++)
3577 rtx x = XVECEXP (pat, 0, i);
3578 switch (GET_CODE (x))
3580 case SET:
3581 if (!hoist_test_store (SET_DEST (x), val, live))
3582 return false;
3583 break;
3584 case USE:
3585 /* We need to fix callers to really ensure availability
3586 of all values insn uses, but for now it is safe to prohibit
3587 hoisting of any insn having such a hidden uses. */
3588 return false;
3589 break;
3590 case CLOBBER:
3591 if (!hoist_test_store (SET_DEST (x), val, live))
3592 return false;
3593 break;
3594 default:
3595 break;
3598 break;
3599 default:
3600 abort ();
3602 return true;
3605 /* Update store after hoisting - replace all stores to pseudo registers
3606 by new ones to avoid clobbering of values except for store to VAL that will
3607 be updated to NEW. */
3609 static void
3610 hoist_update_store (rtx insn, rtx *xp, rtx val, rtx new)
3612 rtx x = *xp;
3614 if (GET_CODE (x) == SCRATCH)
3615 return;
3617 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3618 validate_change (insn, xp,
3619 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3620 SUBREG_BYTE (x)), 1);
3621 if (rtx_equal_p (x, val))
3623 validate_change (insn, xp, new, 1);
3624 return;
3626 if (GET_CODE (x) == SUBREG)
3628 xp = &SUBREG_REG (x);
3629 x = *xp;
3632 if (!REG_P (x))
3633 abort ();
3635 /* We've verified that hard registers are dead, so we may keep the side
3636 effect. Otherwise replace it by new pseudo. */
3637 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3638 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3639 REG_NOTES (insn)
3640 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3643 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3644 and each other side effect to pseudo register by new pseudo register. */
3647 hoist_insn_after (rtx insn, rtx after, rtx val, rtx new)
3649 rtx pat;
3650 int i;
3651 rtx note;
3653 insn = emit_copy_of_insn_after (insn, after);
3654 pat = PATTERN (insn);
3656 /* Remove REG_UNUSED notes as we will re-emit them. */
3657 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3658 remove_note (insn, note);
3660 /* To get this working callers must ensure to move everything referenced
3661 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3662 easier. */
3663 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3664 remove_note (insn, note);
3665 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3666 remove_note (insn, note);
3668 /* Remove REG_DEAD notes as they might not be valid anymore in case
3669 we create redundancy. */
3670 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3671 remove_note (insn, note);
3672 switch (GET_CODE (pat))
3674 case SET:
3675 hoist_update_store (insn, &SET_DEST (pat), val, new);
3676 break;
3677 case USE:
3678 break;
3679 case CLOBBER:
3680 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3681 break;
3682 case PARALLEL:
3683 for (i = 0; i < XVECLEN (pat, 0); i++)
3685 rtx x = XVECEXP (pat, 0, i);
3686 switch (GET_CODE (x))
3688 case SET:
3689 hoist_update_store (insn, &SET_DEST (x), val, new);
3690 break;
3691 case USE:
3692 break;
3693 case CLOBBER:
3694 hoist_update_store (insn, &SET_DEST (x), val, new);
3695 break;
3696 default:
3697 break;
3700 break;
3701 default:
3702 abort ();
3704 if (!apply_change_group ())
3705 abort ();
3707 return insn;
3711 hoist_insn_to_edge (rtx insn, edge e, rtx val, rtx new)
3713 rtx new_insn;
3715 /* We cannot insert instructions on an abnormal critical edge.
3716 It will be easier to find the culprit if we die now. */
3717 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3718 abort ();
3720 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3721 stuff. We also emit CALL_INSNS and firends. */
3722 if (e->insns == NULL_RTX)
3724 start_sequence ();
3725 emit_note (NOTE_INSN_DELETED);
3727 else
3728 push_to_sequence (e->insns);
3730 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3732 e->insns = get_insns ();
3733 end_sequence ();
3734 return new_insn;
3737 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3738 to non-complex jumps. That is, direct unconditional, conditional,
3739 and tablejumps, but not computed jumps or returns. It also does
3740 not apply to the fallthru case of a conditional jump. */
3742 bool
3743 label_is_jump_target_p (rtx label, rtx jump_insn)
3745 rtx tmp = JUMP_LABEL (jump_insn);
3747 if (label == tmp)
3748 return true;
3750 if (tablejump_p (jump_insn, NULL, &tmp))
3752 rtvec vec = XVEC (PATTERN (tmp),
3753 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3754 int i, veclen = GET_NUM_ELEM (vec);
3756 for (i = 0; i < veclen; ++i)
3757 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3758 return true;
3761 return false;