* configure.in: Add ${libgcj} to noconfigdirs for xtensa-*-* targets.
[official-gcc.git] / gcc / rtlanal.c
bloba7d410267d6a4dd981c24c2c10dd27d63dbf0811
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "tm_p.h"
33 #include "flags.h"
34 #include "basic-block.h"
35 #include "real.h"
37 /* Forward declarations */
38 static int global_reg_mentioned_p_1 (rtx *, void *);
39 static void set_of_1 (rtx, rtx, void *);
40 static void insn_dependent_p_1 (rtx, rtx, void *);
41 static int rtx_referenced_p_1 (rtx *, void *);
42 static int computed_jump_p_1 (rtx);
43 static void parms_set (rtx, rtx, void *);
44 static bool hoist_test_store (rtx, rtx, regset);
45 static void hoist_update_store (rtx, rtx *, rtx, rtx);
47 /* Bit flags that specify the machine subtype we are compiling for.
48 Bits are tested using macros TARGET_... defined in the tm.h file
49 and set by `-m...' switches. Must be defined in rtlanal.c. */
51 int target_flags;
53 /* Return 1 if the value of X is unstable
54 (would be different at a different point in the program).
55 The frame pointer, arg pointer, etc. are considered stable
56 (within one function) and so is anything marked `unchanging'. */
58 int
59 rtx_unstable_p (rtx x)
61 RTX_CODE code = GET_CODE (x);
62 int i;
63 const char *fmt;
65 switch (code)
67 case MEM:
68 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
70 case QUEUED:
71 return 1;
73 case ADDRESSOF:
74 case CONST:
75 case CONST_INT:
76 case CONST_DOUBLE:
77 case CONST_VECTOR:
78 case SYMBOL_REF:
79 case LABEL_REF:
80 return 0;
82 case REG:
83 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
84 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
85 /* The arg pointer varies if it is not a fixed register. */
86 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
87 || RTX_UNCHANGING_P (x))
88 return 0;
89 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
90 /* ??? When call-clobbered, the value is stable modulo the restore
91 that must happen after a call. This currently screws up local-alloc
92 into believing that the restore is not needed. */
93 if (x == pic_offset_table_rtx)
94 return 0;
95 #endif
96 return 1;
98 case ASM_OPERANDS:
99 if (MEM_VOLATILE_P (x))
100 return 1;
102 /* FALLTHROUGH */
104 default:
105 break;
108 fmt = GET_RTX_FORMAT (code);
109 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
110 if (fmt[i] == 'e')
112 if (rtx_unstable_p (XEXP (x, i)))
113 return 1;
115 else if (fmt[i] == 'E')
117 int j;
118 for (j = 0; j < XVECLEN (x, i); j++)
119 if (rtx_unstable_p (XVECEXP (x, i, j)))
120 return 1;
123 return 0;
126 /* Return 1 if X has a value that can vary even between two
127 executions of the program. 0 means X can be compared reliably
128 against certain constants or near-constants.
129 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
130 zero, we are slightly more conservative.
131 The frame pointer and the arg pointer are considered constant. */
134 rtx_varies_p (rtx x, int for_alias)
136 RTX_CODE code = GET_CODE (x);
137 int i;
138 const char *fmt;
140 switch (code)
142 case MEM:
143 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
145 case QUEUED:
146 return 1;
148 case CONST:
149 case CONST_INT:
150 case CONST_DOUBLE:
151 case CONST_VECTOR:
152 case SYMBOL_REF:
153 case LABEL_REF:
154 return 0;
156 case ADDRESSOF:
157 /* This will resolve to some offset from the frame pointer. */
158 return 0;
160 case REG:
161 /* Note that we have to test for the actual rtx used for the frame
162 and arg pointers and not just the register number in case we have
163 eliminated the frame and/or arg pointer and are using it
164 for pseudos. */
165 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
166 /* The arg pointer varies if it is not a fixed register. */
167 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
168 return 0;
169 if (x == pic_offset_table_rtx
170 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
171 /* ??? When call-clobbered, the value is stable modulo the restore
172 that must happen after a call. This currently screws up
173 local-alloc into believing that the restore is not needed, so we
174 must return 0 only if we are called from alias analysis. */
175 && for_alias
176 #endif
178 return 0;
179 return 1;
181 case LO_SUM:
182 /* The operand 0 of a LO_SUM is considered constant
183 (in fact it is related specifically to operand 1)
184 during alias analysis. */
185 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
186 || rtx_varies_p (XEXP (x, 1), for_alias);
188 case ASM_OPERANDS:
189 if (MEM_VOLATILE_P (x))
190 return 1;
192 /* FALLTHROUGH */
194 default:
195 break;
198 fmt = GET_RTX_FORMAT (code);
199 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
200 if (fmt[i] == 'e')
202 if (rtx_varies_p (XEXP (x, i), for_alias))
203 return 1;
205 else if (fmt[i] == 'E')
207 int j;
208 for (j = 0; j < XVECLEN (x, i); j++)
209 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
210 return 1;
213 return 0;
216 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
219 rtx_addr_can_trap_p (rtx x)
221 enum rtx_code code = GET_CODE (x);
223 switch (code)
225 case SYMBOL_REF:
226 return SYMBOL_REF_WEAK (x);
228 case LABEL_REF:
229 return 0;
231 case ADDRESSOF:
232 /* This will resolve to some offset from the frame pointer. */
233 return 0;
235 case REG:
236 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
237 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
238 || x == stack_pointer_rtx
239 /* The arg pointer varies if it is not a fixed register. */
240 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
241 return 0;
242 /* All of the virtual frame registers are stack references. */
243 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
244 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
245 return 0;
246 return 1;
248 case CONST:
249 return rtx_addr_can_trap_p (XEXP (x, 0));
251 case PLUS:
252 /* An address is assumed not to trap if it is an address that can't
253 trap plus a constant integer or it is the pic register plus a
254 constant. */
255 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
256 && GET_CODE (XEXP (x, 1)) == CONST_INT)
257 || (XEXP (x, 0) == pic_offset_table_rtx
258 && CONSTANT_P (XEXP (x, 1))));
260 case LO_SUM:
261 case PRE_MODIFY:
262 return rtx_addr_can_trap_p (XEXP (x, 1));
264 case PRE_DEC:
265 case PRE_INC:
266 case POST_DEC:
267 case POST_INC:
268 case POST_MODIFY:
269 return rtx_addr_can_trap_p (XEXP (x, 0));
271 default:
272 break;
275 /* If it isn't one of the case above, it can cause a trap. */
276 return 1;
279 /* Return true if X is an address that is known to not be zero. */
281 bool
282 nonzero_address_p (rtx x)
284 enum rtx_code code = GET_CODE (x);
286 switch (code)
288 case SYMBOL_REF:
289 return !SYMBOL_REF_WEAK (x);
291 case LABEL_REF:
292 return true;
294 case ADDRESSOF:
295 /* This will resolve to some offset from the frame pointer. */
296 return true;
298 case REG:
299 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
300 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
301 || x == stack_pointer_rtx
302 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
303 return true;
304 /* All of the virtual frame registers are stack references. */
305 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
306 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
307 return true;
308 return false;
310 case CONST:
311 return nonzero_address_p (XEXP (x, 0));
313 case PLUS:
314 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
316 /* Pointers aren't allowed to wrap. If we've got a register
317 that is known to be a pointer, and a positive offset, then
318 the composite can't be zero. */
319 if (INTVAL (XEXP (x, 1)) > 0
320 && REG_P (XEXP (x, 0))
321 && REG_POINTER (XEXP (x, 0)))
322 return true;
324 return nonzero_address_p (XEXP (x, 0));
326 /* Handle PIC references. */
327 else if (XEXP (x, 0) == pic_offset_table_rtx
328 && CONSTANT_P (XEXP (x, 1)))
329 return true;
330 return false;
332 case PRE_MODIFY:
333 /* Similar to the above; allow positive offsets. Further, since
334 auto-inc is only allowed in memories, the register must be a
335 pointer. */
336 if (GET_CODE (XEXP (x, 1)) == CONST_INT
337 && INTVAL (XEXP (x, 1)) > 0)
338 return true;
339 return nonzero_address_p (XEXP (x, 0));
341 case PRE_INC:
342 /* Similarly. Further, the offset is always positive. */
343 return true;
345 case PRE_DEC:
346 case POST_DEC:
347 case POST_INC:
348 case POST_MODIFY:
349 return nonzero_address_p (XEXP (x, 0));
351 case LO_SUM:
352 return nonzero_address_p (XEXP (x, 1));
354 default:
355 break;
358 /* If it isn't one of the case above, might be zero. */
359 return false;
362 /* Return 1 if X refers to a memory location whose address
363 cannot be compared reliably with constant addresses,
364 or if X refers to a BLKmode memory object.
365 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
366 zero, we are slightly more conservative. */
369 rtx_addr_varies_p (rtx x, int for_alias)
371 enum rtx_code code;
372 int i;
373 const char *fmt;
375 if (x == 0)
376 return 0;
378 code = GET_CODE (x);
379 if (code == MEM)
380 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
382 fmt = GET_RTX_FORMAT (code);
383 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
384 if (fmt[i] == 'e')
386 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
387 return 1;
389 else if (fmt[i] == 'E')
391 int j;
392 for (j = 0; j < XVECLEN (x, i); j++)
393 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
394 return 1;
396 return 0;
399 /* Return the value of the integer term in X, if one is apparent;
400 otherwise return 0.
401 Only obvious integer terms are detected.
402 This is used in cse.c with the `related_value' field. */
404 HOST_WIDE_INT
405 get_integer_term (rtx x)
407 if (GET_CODE (x) == CONST)
408 x = XEXP (x, 0);
410 if (GET_CODE (x) == MINUS
411 && GET_CODE (XEXP (x, 1)) == CONST_INT)
412 return - INTVAL (XEXP (x, 1));
413 if (GET_CODE (x) == PLUS
414 && GET_CODE (XEXP (x, 1)) == CONST_INT)
415 return INTVAL (XEXP (x, 1));
416 return 0;
419 /* If X is a constant, return the value sans apparent integer term;
420 otherwise return 0.
421 Only obvious integer terms are detected. */
424 get_related_value (rtx x)
426 if (GET_CODE (x) != CONST)
427 return 0;
428 x = XEXP (x, 0);
429 if (GET_CODE (x) == PLUS
430 && GET_CODE (XEXP (x, 1)) == CONST_INT)
431 return XEXP (x, 0);
432 else if (GET_CODE (x) == MINUS
433 && GET_CODE (XEXP (x, 1)) == CONST_INT)
434 return XEXP (x, 0);
435 return 0;
438 /* Given a tablejump insn INSN, return the RTL expression for the offset
439 into the jump table. If the offset cannot be determined, then return
440 NULL_RTX.
442 If EARLIEST is nonzero, it is a pointer to a place where the earliest
443 insn used in locating the offset was found. */
446 get_jump_table_offset (rtx insn, rtx *earliest)
448 rtx label;
449 rtx table;
450 rtx set;
451 rtx old_insn;
452 rtx x;
453 rtx old_x;
454 rtx y;
455 rtx old_y;
456 int i;
458 if (!tablejump_p (insn, &label, &table) || !(set = single_set (insn)))
459 return NULL_RTX;
461 x = SET_SRC (set);
463 /* Some targets (eg, ARM) emit a tablejump that also
464 contains the out-of-range target. */
465 if (GET_CODE (x) == IF_THEN_ELSE
466 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
467 x = XEXP (x, 1);
469 /* Search backwards and locate the expression stored in X. */
470 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
471 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
474 /* If X is an expression using a relative address then strip
475 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
476 or the jump table label. */
477 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
478 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
480 for (i = 0; i < 2; i++)
482 old_insn = insn;
483 y = XEXP (x, i);
485 if (y == pc_rtx || y == pic_offset_table_rtx)
486 break;
488 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
489 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
492 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
493 break;
496 if (i >= 2)
497 return NULL_RTX;
499 x = XEXP (x, 1 - i);
501 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
502 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
506 /* Strip off any sign or zero extension. */
507 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
509 x = XEXP (x, 0);
511 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
512 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
516 /* If X isn't a MEM then this isn't a tablejump we understand. */
517 if (GET_CODE (x) != MEM)
518 return NULL_RTX;
520 /* Strip off the MEM. */
521 x = XEXP (x, 0);
523 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
524 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
527 /* If X isn't a PLUS than this isn't a tablejump we understand. */
528 if (GET_CODE (x) != PLUS)
529 return NULL_RTX;
531 /* At this point we should have an expression representing the jump table
532 plus an offset. Examine each operand in order to determine which one
533 represents the jump table. Knowing that tells us that the other operand
534 must represent the offset. */
535 for (i = 0; i < 2; i++)
537 old_insn = insn;
538 y = XEXP (x, i);
540 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
541 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
544 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
545 && reg_mentioned_p (label, y))
546 break;
549 if (i >= 2)
550 return NULL_RTX;
552 x = XEXP (x, 1 - i);
554 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
555 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
556 for (i = 0; i < 2; i++)
557 if (XEXP (x, i) == pic_offset_table_rtx)
559 x = XEXP (x, 1 - i);
560 break;
563 if (earliest)
564 *earliest = insn;
566 /* Return the RTL expression representing the offset. */
567 return x;
570 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
571 a global register. */
573 static int
574 global_reg_mentioned_p_1 (rtx *loc, void *data ATTRIBUTE_UNUSED)
576 int regno;
577 rtx x = *loc;
579 if (! x)
580 return 0;
582 switch (GET_CODE (x))
584 case SUBREG:
585 if (GET_CODE (SUBREG_REG (x)) == REG)
587 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
588 && global_regs[subreg_regno (x)])
589 return 1;
590 return 0;
592 break;
594 case REG:
595 regno = REGNO (x);
596 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
597 return 1;
598 return 0;
600 case SCRATCH:
601 case PC:
602 case CC0:
603 case CONST_INT:
604 case CONST_DOUBLE:
605 case CONST:
606 case LABEL_REF:
607 return 0;
609 case CALL:
610 /* A non-constant call might use a global register. */
611 return 1;
613 default:
614 break;
617 return 0;
620 /* Returns nonzero if X mentions a global register. */
623 global_reg_mentioned_p (rtx x)
625 if (INSN_P (x))
627 if (GET_CODE (x) == CALL_INSN)
629 if (! CONST_OR_PURE_CALL_P (x))
630 return 1;
631 x = CALL_INSN_FUNCTION_USAGE (x);
632 if (x == 0)
633 return 0;
635 else
636 x = PATTERN (x);
639 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
642 /* Return the number of places FIND appears within X. If COUNT_DEST is
643 zero, we do not count occurrences inside the destination of a SET. */
646 count_occurrences (rtx x, rtx find, int count_dest)
648 int i, j;
649 enum rtx_code code;
650 const char *format_ptr;
651 int count;
653 if (x == find)
654 return 1;
656 code = GET_CODE (x);
658 switch (code)
660 case REG:
661 case CONST_INT:
662 case CONST_DOUBLE:
663 case CONST_VECTOR:
664 case SYMBOL_REF:
665 case CODE_LABEL:
666 case PC:
667 case CC0:
668 return 0;
670 case MEM:
671 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
672 return 1;
673 break;
675 case SET:
676 if (SET_DEST (x) == find && ! count_dest)
677 return count_occurrences (SET_SRC (x), find, count_dest);
678 break;
680 default:
681 break;
684 format_ptr = GET_RTX_FORMAT (code);
685 count = 0;
687 for (i = 0; i < GET_RTX_LENGTH (code); i++)
689 switch (*format_ptr++)
691 case 'e':
692 count += count_occurrences (XEXP (x, i), find, count_dest);
693 break;
695 case 'E':
696 for (j = 0; j < XVECLEN (x, i); j++)
697 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
698 break;
701 return count;
704 /* Nonzero if register REG appears somewhere within IN.
705 Also works if REG is not a register; in this case it checks
706 for a subexpression of IN that is Lisp "equal" to REG. */
709 reg_mentioned_p (rtx reg, rtx in)
711 const char *fmt;
712 int i;
713 enum rtx_code code;
715 if (in == 0)
716 return 0;
718 if (reg == in)
719 return 1;
721 if (GET_CODE (in) == LABEL_REF)
722 return reg == XEXP (in, 0);
724 code = GET_CODE (in);
726 switch (code)
728 /* Compare registers by number. */
729 case REG:
730 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
732 /* These codes have no constituent expressions
733 and are unique. */
734 case SCRATCH:
735 case CC0:
736 case PC:
737 return 0;
739 case CONST_INT:
740 case CONST_VECTOR:
741 case CONST_DOUBLE:
742 /* These are kept unique for a given value. */
743 return 0;
745 default:
746 break;
749 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
750 return 1;
752 fmt = GET_RTX_FORMAT (code);
754 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
756 if (fmt[i] == 'E')
758 int j;
759 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
760 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
761 return 1;
763 else if (fmt[i] == 'e'
764 && reg_mentioned_p (reg, XEXP (in, i)))
765 return 1;
767 return 0;
770 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
771 no CODE_LABEL insn. */
774 no_labels_between_p (rtx beg, rtx end)
776 rtx p;
777 if (beg == end)
778 return 0;
779 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
780 if (GET_CODE (p) == CODE_LABEL)
781 return 0;
782 return 1;
785 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
786 no JUMP_INSN insn. */
789 no_jumps_between_p (rtx beg, rtx end)
791 rtx p;
792 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
793 if (GET_CODE (p) == JUMP_INSN)
794 return 0;
795 return 1;
798 /* Nonzero if register REG is used in an insn between
799 FROM_INSN and TO_INSN (exclusive of those two). */
802 reg_used_between_p (rtx reg, rtx from_insn, rtx to_insn)
804 rtx insn;
806 if (from_insn == to_insn)
807 return 0;
809 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
810 if (INSN_P (insn)
811 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
812 || (GET_CODE (insn) == CALL_INSN
813 && (find_reg_fusage (insn, USE, reg)
814 || find_reg_fusage (insn, CLOBBER, reg)))))
815 return 1;
816 return 0;
819 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
820 is entirely replaced by a new value and the only use is as a SET_DEST,
821 we do not consider it a reference. */
824 reg_referenced_p (rtx x, rtx body)
826 int i;
828 switch (GET_CODE (body))
830 case SET:
831 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
832 return 1;
834 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
835 of a REG that occupies all of the REG, the insn references X if
836 it is mentioned in the destination. */
837 if (GET_CODE (SET_DEST (body)) != CC0
838 && GET_CODE (SET_DEST (body)) != PC
839 && GET_CODE (SET_DEST (body)) != REG
840 && ! (GET_CODE (SET_DEST (body)) == SUBREG
841 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
842 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
843 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
844 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
845 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
846 && reg_overlap_mentioned_p (x, SET_DEST (body)))
847 return 1;
848 return 0;
850 case ASM_OPERANDS:
851 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
852 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
853 return 1;
854 return 0;
856 case CALL:
857 case USE:
858 case IF_THEN_ELSE:
859 return reg_overlap_mentioned_p (x, body);
861 case TRAP_IF:
862 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
864 case PREFETCH:
865 return reg_overlap_mentioned_p (x, XEXP (body, 0));
867 case UNSPEC:
868 case UNSPEC_VOLATILE:
869 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
870 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
871 return 1;
872 return 0;
874 case PARALLEL:
875 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
876 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
877 return 1;
878 return 0;
880 case CLOBBER:
881 if (GET_CODE (XEXP (body, 0)) == MEM)
882 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
883 return 1;
884 return 0;
886 case COND_EXEC:
887 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
888 return 1;
889 return reg_referenced_p (x, COND_EXEC_CODE (body));
891 default:
892 return 0;
896 /* Nonzero if register REG is referenced in an insn between
897 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
898 not count. */
901 reg_referenced_between_p (rtx reg, rtx from_insn, rtx to_insn)
903 rtx insn;
905 if (from_insn == to_insn)
906 return 0;
908 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
909 if (INSN_P (insn)
910 && (reg_referenced_p (reg, PATTERN (insn))
911 || (GET_CODE (insn) == CALL_INSN
912 && find_reg_fusage (insn, USE, reg))))
913 return 1;
914 return 0;
917 /* Nonzero if register REG is set or clobbered in an insn between
918 FROM_INSN and TO_INSN (exclusive of those two). */
921 reg_set_between_p (rtx reg, rtx from_insn, rtx to_insn)
923 rtx insn;
925 if (from_insn == to_insn)
926 return 0;
928 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
929 if (INSN_P (insn) && reg_set_p (reg, insn))
930 return 1;
931 return 0;
934 /* Internals of reg_set_between_p. */
936 reg_set_p (rtx reg, rtx insn)
938 /* We can be passed an insn or part of one. If we are passed an insn,
939 check if a side-effect of the insn clobbers REG. */
940 if (INSN_P (insn)
941 && (FIND_REG_INC_NOTE (insn, reg)
942 || (GET_CODE (insn) == CALL_INSN
943 /* We'd like to test call_used_regs here, but rtlanal.c can't
944 reference that variable due to its use in genattrtab. So
945 we'll just be more conservative.
947 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
948 information holds all clobbered registers. */
949 && ((GET_CODE (reg) == REG
950 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
951 || GET_CODE (reg) == MEM
952 || find_reg_fusage (insn, CLOBBER, reg)))))
953 return 1;
955 return set_of (reg, insn) != NULL_RTX;
958 /* Similar to reg_set_between_p, but check all registers in X. Return 0
959 only if none of them are modified between START and END. Do not
960 consider non-registers one way or the other. */
963 regs_set_between_p (rtx x, rtx start, rtx end)
965 enum rtx_code code = GET_CODE (x);
966 const char *fmt;
967 int i, j;
969 switch (code)
971 case CONST_INT:
972 case CONST_DOUBLE:
973 case CONST_VECTOR:
974 case CONST:
975 case SYMBOL_REF:
976 case LABEL_REF:
977 case PC:
978 case CC0:
979 return 0;
981 case REG:
982 return reg_set_between_p (x, start, end);
984 default:
985 break;
988 fmt = GET_RTX_FORMAT (code);
989 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
991 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
992 return 1;
994 else if (fmt[i] == 'E')
995 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
996 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
997 return 1;
1000 return 0;
1003 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1004 only if none of them are modified between START and END. Return 1 if
1005 X contains a MEM; this routine does usememory aliasing. */
1008 modified_between_p (rtx x, rtx start, rtx end)
1010 enum rtx_code code = GET_CODE (x);
1011 const char *fmt;
1012 int i, j;
1013 rtx insn;
1015 if (start == end)
1016 return 0;
1018 switch (code)
1020 case CONST_INT:
1021 case CONST_DOUBLE:
1022 case CONST_VECTOR:
1023 case CONST:
1024 case SYMBOL_REF:
1025 case LABEL_REF:
1026 return 0;
1028 case PC:
1029 case CC0:
1030 return 1;
1032 case MEM:
1033 if (RTX_UNCHANGING_P (x))
1034 return 0;
1035 if (modified_between_p (XEXP (x, 0), start, end))
1036 return 1;
1037 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1038 if (memory_modified_in_insn_p (x, insn))
1039 return 1;
1040 return 0;
1041 break;
1043 case REG:
1044 return reg_set_between_p (x, start, end);
1046 default:
1047 break;
1050 fmt = GET_RTX_FORMAT (code);
1051 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1053 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1054 return 1;
1056 else if (fmt[i] == 'E')
1057 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1058 if (modified_between_p (XVECEXP (x, i, j), start, end))
1059 return 1;
1062 return 0;
1065 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1066 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1067 does use memory aliasing. */
1070 modified_in_p (rtx x, rtx insn)
1072 enum rtx_code code = GET_CODE (x);
1073 const char *fmt;
1074 int i, j;
1076 switch (code)
1078 case CONST_INT:
1079 case CONST_DOUBLE:
1080 case CONST_VECTOR:
1081 case CONST:
1082 case SYMBOL_REF:
1083 case LABEL_REF:
1084 return 0;
1086 case PC:
1087 case CC0:
1088 return 1;
1090 case MEM:
1091 if (RTX_UNCHANGING_P (x))
1092 return 0;
1093 if (modified_in_p (XEXP (x, 0), insn))
1094 return 1;
1095 if (memory_modified_in_insn_p (x, insn))
1096 return 1;
1097 return 0;
1098 break;
1100 case REG:
1101 return reg_set_p (x, insn);
1103 default:
1104 break;
1107 fmt = GET_RTX_FORMAT (code);
1108 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1110 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1111 return 1;
1113 else if (fmt[i] == 'E')
1114 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1115 if (modified_in_p (XVECEXP (x, i, j), insn))
1116 return 1;
1119 return 0;
1122 /* Return true if anything in insn X is (anti,output,true) dependent on
1123 anything in insn Y. */
1126 insn_dependent_p (rtx x, rtx y)
1128 rtx tmp;
1130 if (! INSN_P (x) || ! INSN_P (y))
1131 abort ();
1133 tmp = PATTERN (y);
1134 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1135 if (tmp == NULL_RTX)
1136 return 1;
1138 tmp = PATTERN (x);
1139 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1140 if (tmp == NULL_RTX)
1141 return 1;
1143 return 0;
1146 /* A helper routine for insn_dependent_p called through note_stores. */
1148 static void
1149 insn_dependent_p_1 (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
1151 rtx * pinsn = (rtx *) data;
1153 if (*pinsn && reg_mentioned_p (x, *pinsn))
1154 *pinsn = NULL_RTX;
1157 /* Helper function for set_of. */
1158 struct set_of_data
1160 rtx found;
1161 rtx pat;
1164 static void
1165 set_of_1 (rtx x, rtx pat, void *data1)
1167 struct set_of_data *data = (struct set_of_data *) (data1);
1168 if (rtx_equal_p (x, data->pat)
1169 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1170 data->found = pat;
1173 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1174 (either directly or via STRICT_LOW_PART and similar modifiers). */
1176 set_of (rtx pat, rtx insn)
1178 struct set_of_data data;
1179 data.found = NULL_RTX;
1180 data.pat = pat;
1181 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1182 return data.found;
1185 /* Given an INSN, return a SET expression if this insn has only a single SET.
1186 It may also have CLOBBERs, USEs, or SET whose output
1187 will not be used, which we ignore. */
1190 single_set_2 (rtx insn, rtx pat)
1192 rtx set = NULL;
1193 int set_verified = 1;
1194 int i;
1196 if (GET_CODE (pat) == PARALLEL)
1198 for (i = 0; i < XVECLEN (pat, 0); i++)
1200 rtx sub = XVECEXP (pat, 0, i);
1201 switch (GET_CODE (sub))
1203 case USE:
1204 case CLOBBER:
1205 break;
1207 case SET:
1208 /* We can consider insns having multiple sets, where all
1209 but one are dead as single set insns. In common case
1210 only single set is present in the pattern so we want
1211 to avoid checking for REG_UNUSED notes unless necessary.
1213 When we reach set first time, we just expect this is
1214 the single set we are looking for and only when more
1215 sets are found in the insn, we check them. */
1216 if (!set_verified)
1218 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1219 && !side_effects_p (set))
1220 set = NULL;
1221 else
1222 set_verified = 1;
1224 if (!set)
1225 set = sub, set_verified = 0;
1226 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1227 || side_effects_p (sub))
1228 return NULL_RTX;
1229 break;
1231 default:
1232 return NULL_RTX;
1236 return set;
1239 /* Given an INSN, return nonzero if it has more than one SET, else return
1240 zero. */
1243 multiple_sets (rtx insn)
1245 int found;
1246 int i;
1248 /* INSN must be an insn. */
1249 if (! INSN_P (insn))
1250 return 0;
1252 /* Only a PARALLEL can have multiple SETs. */
1253 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1255 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1256 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1258 /* If we have already found a SET, then return now. */
1259 if (found)
1260 return 1;
1261 else
1262 found = 1;
1266 /* Either zero or one SET. */
1267 return 0;
1270 /* Return nonzero if the destination of SET equals the source
1271 and there are no side effects. */
1274 set_noop_p (rtx set)
1276 rtx src = SET_SRC (set);
1277 rtx dst = SET_DEST (set);
1279 if (dst == pc_rtx && src == pc_rtx)
1280 return 1;
1282 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1283 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1285 if (GET_CODE (dst) == SIGN_EXTRACT
1286 || GET_CODE (dst) == ZERO_EXTRACT)
1287 return rtx_equal_p (XEXP (dst, 0), src)
1288 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1289 && !side_effects_p (src);
1291 if (GET_CODE (dst) == STRICT_LOW_PART)
1292 dst = XEXP (dst, 0);
1294 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1296 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1297 return 0;
1298 src = SUBREG_REG (src);
1299 dst = SUBREG_REG (dst);
1302 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1303 && REGNO (src) == REGNO (dst));
1306 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1307 value to itself. */
1310 noop_move_p (rtx insn)
1312 rtx pat = PATTERN (insn);
1314 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1315 return 1;
1317 /* Insns carrying these notes are useful later on. */
1318 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1319 return 0;
1321 /* For now treat an insn with a REG_RETVAL note as a
1322 a special insn which should not be considered a no-op. */
1323 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1324 return 0;
1326 if (GET_CODE (pat) == SET && set_noop_p (pat))
1327 return 1;
1329 if (GET_CODE (pat) == PARALLEL)
1331 int i;
1332 /* If nothing but SETs of registers to themselves,
1333 this insn can also be deleted. */
1334 for (i = 0; i < XVECLEN (pat, 0); i++)
1336 rtx tem = XVECEXP (pat, 0, i);
1338 if (GET_CODE (tem) == USE
1339 || GET_CODE (tem) == CLOBBER)
1340 continue;
1342 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1343 return 0;
1346 return 1;
1348 return 0;
1352 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1353 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1354 If the object was modified, if we hit a partial assignment to X, or hit a
1355 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1356 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1357 be the src. */
1360 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1362 rtx p;
1364 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1365 p = PREV_INSN (p))
1366 if (INSN_P (p))
1368 rtx set = single_set (p);
1369 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1371 if (set && rtx_equal_p (x, SET_DEST (set)))
1373 rtx src = SET_SRC (set);
1375 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1376 src = XEXP (note, 0);
1378 if ((valid_to == NULL_RTX
1379 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1380 /* Reject hard registers because we don't usually want
1381 to use them; we'd rather use a pseudo. */
1382 && (! (GET_CODE (src) == REG
1383 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1385 *pinsn = p;
1386 return src;
1390 /* If set in non-simple way, we don't have a value. */
1391 if (reg_set_p (x, p))
1392 break;
1395 return x;
1398 /* Return nonzero if register in range [REGNO, ENDREGNO)
1399 appears either explicitly or implicitly in X
1400 other than being stored into.
1402 References contained within the substructure at LOC do not count.
1403 LOC may be zero, meaning don't ignore anything. */
1406 refers_to_regno_p (unsigned int regno, unsigned int endregno, rtx x,
1407 rtx *loc)
1409 int i;
1410 unsigned int x_regno;
1411 RTX_CODE code;
1412 const char *fmt;
1414 repeat:
1415 /* The contents of a REG_NONNEG note is always zero, so we must come here
1416 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1417 if (x == 0)
1418 return 0;
1420 code = GET_CODE (x);
1422 switch (code)
1424 case REG:
1425 x_regno = REGNO (x);
1427 /* If we modifying the stack, frame, or argument pointer, it will
1428 clobber a virtual register. In fact, we could be more precise,
1429 but it isn't worth it. */
1430 if ((x_regno == STACK_POINTER_REGNUM
1431 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1432 || x_regno == ARG_POINTER_REGNUM
1433 #endif
1434 || x_regno == FRAME_POINTER_REGNUM)
1435 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1436 return 1;
1438 return (endregno > x_regno
1439 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1440 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1441 : 1));
1443 case SUBREG:
1444 /* If this is a SUBREG of a hard reg, we can see exactly which
1445 registers are being modified. Otherwise, handle normally. */
1446 if (GET_CODE (SUBREG_REG (x)) == REG
1447 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1449 unsigned int inner_regno = subreg_regno (x);
1450 unsigned int inner_endregno
1451 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1452 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1454 return endregno > inner_regno && regno < inner_endregno;
1456 break;
1458 case CLOBBER:
1459 case SET:
1460 if (&SET_DEST (x) != loc
1461 /* Note setting a SUBREG counts as referring to the REG it is in for
1462 a pseudo but not for hard registers since we can
1463 treat each word individually. */
1464 && ((GET_CODE (SET_DEST (x)) == SUBREG
1465 && loc != &SUBREG_REG (SET_DEST (x))
1466 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1467 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1468 && refers_to_regno_p (regno, endregno,
1469 SUBREG_REG (SET_DEST (x)), loc))
1470 || (GET_CODE (SET_DEST (x)) != REG
1471 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1472 return 1;
1474 if (code == CLOBBER || loc == &SET_SRC (x))
1475 return 0;
1476 x = SET_SRC (x);
1477 goto repeat;
1479 default:
1480 break;
1483 /* X does not match, so try its subexpressions. */
1485 fmt = GET_RTX_FORMAT (code);
1486 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1488 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1490 if (i == 0)
1492 x = XEXP (x, 0);
1493 goto repeat;
1495 else
1496 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1497 return 1;
1499 else if (fmt[i] == 'E')
1501 int j;
1502 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1503 if (loc != &XVECEXP (x, i, j)
1504 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1505 return 1;
1508 return 0;
1511 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1512 we check if any register number in X conflicts with the relevant register
1513 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1514 contains a MEM (we don't bother checking for memory addresses that can't
1515 conflict because we expect this to be a rare case. */
1518 reg_overlap_mentioned_p (rtx x, rtx in)
1520 unsigned int regno, endregno;
1522 /* Overly conservative. */
1523 if (GET_CODE (x) == STRICT_LOW_PART
1524 || GET_CODE (x) == ZERO_EXTRACT
1525 || GET_CODE (x) == SIGN_EXTRACT)
1526 x = XEXP (x, 0);
1528 /* If either argument is a constant, then modifying X can not affect IN. */
1529 if (CONSTANT_P (x) || CONSTANT_P (in))
1530 return 0;
1532 switch (GET_CODE (x))
1534 case SUBREG:
1535 regno = REGNO (SUBREG_REG (x));
1536 if (regno < FIRST_PSEUDO_REGISTER)
1537 regno = subreg_regno (x);
1538 goto do_reg;
1540 case REG:
1541 regno = REGNO (x);
1542 do_reg:
1543 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1544 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1545 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1547 case MEM:
1549 const char *fmt;
1550 int i;
1552 if (GET_CODE (in) == MEM)
1553 return 1;
1555 fmt = GET_RTX_FORMAT (GET_CODE (in));
1556 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1557 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1558 return 1;
1560 return 0;
1563 case SCRATCH:
1564 case PC:
1565 case CC0:
1566 return reg_mentioned_p (x, in);
1568 case PARALLEL:
1570 int i;
1572 /* If any register in here refers to it we return true. */
1573 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1574 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1575 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1576 return 1;
1577 return 0;
1580 default:
1581 break;
1584 abort ();
1587 /* Return the last value to which REG was set prior to INSN. If we can't
1588 find it easily, return 0.
1590 We only return a REG, SUBREG, or constant because it is too hard to
1591 check if a MEM remains unchanged. */
1594 reg_set_last (rtx x, rtx insn)
1596 rtx orig_insn = insn;
1598 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1599 Stop when we reach a label or X is a hard reg and we reach a
1600 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1602 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1604 /* We compare with <= here, because reg_set_last_last_regno
1605 is actually the number of the first reg *not* in X. */
1606 for (;
1607 insn && GET_CODE (insn) != CODE_LABEL
1608 && ! (GET_CODE (insn) == CALL_INSN
1609 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1610 insn = PREV_INSN (insn))
1611 if (INSN_P (insn))
1613 rtx set = set_of (x, insn);
1614 /* OK, this function modify our register. See if we understand it. */
1615 if (set)
1617 rtx last_value;
1618 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1619 return 0;
1620 last_value = SET_SRC (x);
1621 if (CONSTANT_P (last_value)
1622 || ((GET_CODE (last_value) == REG
1623 || GET_CODE (last_value) == SUBREG)
1624 && ! reg_set_between_p (last_value,
1625 insn, orig_insn)))
1626 return last_value;
1627 else
1628 return 0;
1632 return 0;
1635 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1636 (X would be the pattern of an insn).
1637 FUN receives two arguments:
1638 the REG, MEM, CC0 or PC being stored in or clobbered,
1639 the SET or CLOBBER rtx that does the store.
1641 If the item being stored in or clobbered is a SUBREG of a hard register,
1642 the SUBREG will be passed. */
1644 void
1645 note_stores (rtx x, void (*fun) (rtx, rtx, void *), void *data)
1647 int i;
1649 if (GET_CODE (x) == COND_EXEC)
1650 x = COND_EXEC_CODE (x);
1652 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1654 rtx dest = SET_DEST (x);
1656 while ((GET_CODE (dest) == SUBREG
1657 && (GET_CODE (SUBREG_REG (dest)) != REG
1658 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1659 || GET_CODE (dest) == ZERO_EXTRACT
1660 || GET_CODE (dest) == SIGN_EXTRACT
1661 || GET_CODE (dest) == STRICT_LOW_PART)
1662 dest = XEXP (dest, 0);
1664 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1665 each of whose first operand is a register. */
1666 if (GET_CODE (dest) == PARALLEL)
1668 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1669 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1670 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1672 else
1673 (*fun) (dest, x, data);
1676 else if (GET_CODE (x) == PARALLEL)
1677 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1678 note_stores (XVECEXP (x, 0, i), fun, data);
1681 /* Like notes_stores, but call FUN for each expression that is being
1682 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1683 FUN for each expression, not any interior subexpressions. FUN receives a
1684 pointer to the expression and the DATA passed to this function.
1686 Note that this is not quite the same test as that done in reg_referenced_p
1687 since that considers something as being referenced if it is being
1688 partially set, while we do not. */
1690 void
1691 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1693 rtx body = *pbody;
1694 int i;
1696 switch (GET_CODE (body))
1698 case COND_EXEC:
1699 (*fun) (&COND_EXEC_TEST (body), data);
1700 note_uses (&COND_EXEC_CODE (body), fun, data);
1701 return;
1703 case PARALLEL:
1704 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1705 note_uses (&XVECEXP (body, 0, i), fun, data);
1706 return;
1708 case USE:
1709 (*fun) (&XEXP (body, 0), data);
1710 return;
1712 case ASM_OPERANDS:
1713 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1714 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1715 return;
1717 case TRAP_IF:
1718 (*fun) (&TRAP_CONDITION (body), data);
1719 return;
1721 case PREFETCH:
1722 (*fun) (&XEXP (body, 0), data);
1723 return;
1725 case UNSPEC:
1726 case UNSPEC_VOLATILE:
1727 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1728 (*fun) (&XVECEXP (body, 0, i), data);
1729 return;
1731 case CLOBBER:
1732 if (GET_CODE (XEXP (body, 0)) == MEM)
1733 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1734 return;
1736 case SET:
1738 rtx dest = SET_DEST (body);
1740 /* For sets we replace everything in source plus registers in memory
1741 expression in store and operands of a ZERO_EXTRACT. */
1742 (*fun) (&SET_SRC (body), data);
1744 if (GET_CODE (dest) == ZERO_EXTRACT)
1746 (*fun) (&XEXP (dest, 1), data);
1747 (*fun) (&XEXP (dest, 2), data);
1750 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1751 dest = XEXP (dest, 0);
1753 if (GET_CODE (dest) == MEM)
1754 (*fun) (&XEXP (dest, 0), data);
1756 return;
1758 default:
1759 /* All the other possibilities never store. */
1760 (*fun) (pbody, data);
1761 return;
1765 /* Return nonzero if X's old contents don't survive after INSN.
1766 This will be true if X is (cc0) or if X is a register and
1767 X dies in INSN or because INSN entirely sets X.
1769 "Entirely set" means set directly and not through a SUBREG,
1770 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1771 Likewise, REG_INC does not count.
1773 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1774 but for this use that makes no difference, since regs don't overlap
1775 during their lifetimes. Therefore, this function may be used
1776 at any time after deaths have been computed (in flow.c).
1778 If REG is a hard reg that occupies multiple machine registers, this
1779 function will only return 1 if each of those registers will be replaced
1780 by INSN. */
1783 dead_or_set_p (rtx insn, rtx x)
1785 unsigned int regno, last_regno;
1786 unsigned int i;
1788 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1789 if (GET_CODE (x) == CC0)
1790 return 1;
1792 if (GET_CODE (x) != REG)
1793 abort ();
1795 regno = REGNO (x);
1796 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1797 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1799 for (i = regno; i <= last_regno; i++)
1800 if (! dead_or_set_regno_p (insn, i))
1801 return 0;
1803 return 1;
1806 /* Utility function for dead_or_set_p to check an individual register. Also
1807 called from flow.c. */
1810 dead_or_set_regno_p (rtx insn, unsigned int test_regno)
1812 unsigned int regno, endregno;
1813 rtx pattern;
1815 /* See if there is a death note for something that includes TEST_REGNO. */
1816 if (find_regno_note (insn, REG_DEAD, test_regno))
1817 return 1;
1819 if (GET_CODE (insn) == CALL_INSN
1820 && find_regno_fusage (insn, CLOBBER, test_regno))
1821 return 1;
1823 pattern = PATTERN (insn);
1825 if (GET_CODE (pattern) == COND_EXEC)
1826 pattern = COND_EXEC_CODE (pattern);
1828 if (GET_CODE (pattern) == SET)
1830 rtx dest = SET_DEST (pattern);
1832 /* A value is totally replaced if it is the destination or the
1833 destination is a SUBREG of REGNO that does not change the number of
1834 words in it. */
1835 if (GET_CODE (dest) == SUBREG
1836 && (((GET_MODE_SIZE (GET_MODE (dest))
1837 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1838 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1839 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1840 dest = SUBREG_REG (dest);
1842 if (GET_CODE (dest) != REG)
1843 return 0;
1845 regno = REGNO (dest);
1846 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1847 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1849 return (test_regno >= regno && test_regno < endregno);
1851 else if (GET_CODE (pattern) == PARALLEL)
1853 int i;
1855 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1857 rtx body = XVECEXP (pattern, 0, i);
1859 if (GET_CODE (body) == COND_EXEC)
1860 body = COND_EXEC_CODE (body);
1862 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1864 rtx dest = SET_DEST (body);
1866 if (GET_CODE (dest) == SUBREG
1867 && (((GET_MODE_SIZE (GET_MODE (dest))
1868 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1869 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1870 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1871 dest = SUBREG_REG (dest);
1873 if (GET_CODE (dest) != REG)
1874 continue;
1876 regno = REGNO (dest);
1877 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1878 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1880 if (test_regno >= regno && test_regno < endregno)
1881 return 1;
1886 return 0;
1889 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1890 If DATUM is nonzero, look for one whose datum is DATUM. */
1893 find_reg_note (rtx insn, enum reg_note kind, rtx datum)
1895 rtx link;
1897 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1898 if (! INSN_P (insn))
1899 return 0;
1901 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1902 if (REG_NOTE_KIND (link) == kind
1903 && (datum == 0 || datum == XEXP (link, 0)))
1904 return link;
1905 return 0;
1908 /* Return the reg-note of kind KIND in insn INSN which applies to register
1909 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1910 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1911 it might be the case that the note overlaps REGNO. */
1914 find_regno_note (rtx insn, enum reg_note kind, unsigned int regno)
1916 rtx link;
1918 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1919 if (! INSN_P (insn))
1920 return 0;
1922 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1923 if (REG_NOTE_KIND (link) == kind
1924 /* Verify that it is a register, so that scratch and MEM won't cause a
1925 problem here. */
1926 && GET_CODE (XEXP (link, 0)) == REG
1927 && REGNO (XEXP (link, 0)) <= regno
1928 && ((REGNO (XEXP (link, 0))
1929 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1930 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1931 GET_MODE (XEXP (link, 0)))))
1932 > regno))
1933 return link;
1934 return 0;
1937 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1938 has such a note. */
1941 find_reg_equal_equiv_note (rtx insn)
1943 rtx link;
1945 if (!INSN_P (insn))
1946 return 0;
1947 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1948 if (REG_NOTE_KIND (link) == REG_EQUAL
1949 || REG_NOTE_KIND (link) == REG_EQUIV)
1951 if (single_set (insn) == 0)
1952 return 0;
1953 return link;
1955 return NULL;
1958 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1959 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1962 find_reg_fusage (rtx insn, enum rtx_code code, rtx datum)
1964 /* If it's not a CALL_INSN, it can't possibly have a
1965 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1966 if (GET_CODE (insn) != CALL_INSN)
1967 return 0;
1969 if (! datum)
1970 abort ();
1972 if (GET_CODE (datum) != REG)
1974 rtx link;
1976 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1977 link;
1978 link = XEXP (link, 1))
1979 if (GET_CODE (XEXP (link, 0)) == code
1980 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1981 return 1;
1983 else
1985 unsigned int regno = REGNO (datum);
1987 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1988 to pseudo registers, so don't bother checking. */
1990 if (regno < FIRST_PSEUDO_REGISTER)
1992 unsigned int end_regno
1993 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
1994 unsigned int i;
1996 for (i = regno; i < end_regno; i++)
1997 if (find_regno_fusage (insn, code, i))
1998 return 1;
2002 return 0;
2005 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2006 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2009 find_regno_fusage (rtx insn, enum rtx_code code, unsigned int regno)
2011 rtx link;
2013 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2014 to pseudo registers, so don't bother checking. */
2016 if (regno >= FIRST_PSEUDO_REGISTER
2017 || GET_CODE (insn) != CALL_INSN )
2018 return 0;
2020 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2022 unsigned int regnote;
2023 rtx op, reg;
2025 if (GET_CODE (op = XEXP (link, 0)) == code
2026 && GET_CODE (reg = XEXP (op, 0)) == REG
2027 && (regnote = REGNO (reg)) <= regno
2028 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
2029 return 1;
2032 return 0;
2035 /* Return true if INSN is a call to a pure function. */
2038 pure_call_p (rtx insn)
2040 rtx link;
2042 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2043 return 0;
2045 /* Look for the note that differentiates const and pure functions. */
2046 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2048 rtx u, m;
2050 if (GET_CODE (u = XEXP (link, 0)) == USE
2051 && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2052 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2053 return 1;
2056 return 0;
2059 /* Remove register note NOTE from the REG_NOTES of INSN. */
2061 void
2062 remove_note (rtx insn, rtx note)
2064 rtx link;
2066 if (note == NULL_RTX)
2067 return;
2069 if (REG_NOTES (insn) == note)
2071 REG_NOTES (insn) = XEXP (note, 1);
2072 return;
2075 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2076 if (XEXP (link, 1) == note)
2078 XEXP (link, 1) = XEXP (note, 1);
2079 return;
2082 abort ();
2085 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2086 return 1 if it is found. A simple equality test is used to determine if
2087 NODE matches. */
2090 in_expr_list_p (rtx listp, rtx node)
2092 rtx x;
2094 for (x = listp; x; x = XEXP (x, 1))
2095 if (node == XEXP (x, 0))
2096 return 1;
2098 return 0;
2101 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2102 remove that entry from the list if it is found.
2104 A simple equality test is used to determine if NODE matches. */
2106 void
2107 remove_node_from_expr_list (rtx node, rtx *listp)
2109 rtx temp = *listp;
2110 rtx prev = NULL_RTX;
2112 while (temp)
2114 if (node == XEXP (temp, 0))
2116 /* Splice the node out of the list. */
2117 if (prev)
2118 XEXP (prev, 1) = XEXP (temp, 1);
2119 else
2120 *listp = XEXP (temp, 1);
2122 return;
2125 prev = temp;
2126 temp = XEXP (temp, 1);
2130 /* Nonzero if X contains any volatile instructions. These are instructions
2131 which may cause unpredictable machine state instructions, and thus no
2132 instructions should be moved or combined across them. This includes
2133 only volatile asms and UNSPEC_VOLATILE instructions. */
2136 volatile_insn_p (rtx x)
2138 RTX_CODE code;
2140 code = GET_CODE (x);
2141 switch (code)
2143 case LABEL_REF:
2144 case SYMBOL_REF:
2145 case CONST_INT:
2146 case CONST:
2147 case CONST_DOUBLE:
2148 case CONST_VECTOR:
2149 case CC0:
2150 case PC:
2151 case REG:
2152 case SCRATCH:
2153 case CLOBBER:
2154 case ADDR_VEC:
2155 case ADDR_DIFF_VEC:
2156 case CALL:
2157 case MEM:
2158 return 0;
2160 case UNSPEC_VOLATILE:
2161 /* case TRAP_IF: This isn't clear yet. */
2162 return 1;
2164 case ASM_INPUT:
2165 case ASM_OPERANDS:
2166 if (MEM_VOLATILE_P (x))
2167 return 1;
2169 default:
2170 break;
2173 /* Recursively scan the operands of this expression. */
2176 const char *fmt = GET_RTX_FORMAT (code);
2177 int i;
2179 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2181 if (fmt[i] == 'e')
2183 if (volatile_insn_p (XEXP (x, i)))
2184 return 1;
2186 else if (fmt[i] == 'E')
2188 int j;
2189 for (j = 0; j < XVECLEN (x, i); j++)
2190 if (volatile_insn_p (XVECEXP (x, i, j)))
2191 return 1;
2195 return 0;
2198 /* Nonzero if X contains any volatile memory references
2199 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2202 volatile_refs_p (rtx x)
2204 RTX_CODE code;
2206 code = GET_CODE (x);
2207 switch (code)
2209 case LABEL_REF:
2210 case SYMBOL_REF:
2211 case CONST_INT:
2212 case CONST:
2213 case CONST_DOUBLE:
2214 case CONST_VECTOR:
2215 case CC0:
2216 case PC:
2217 case REG:
2218 case SCRATCH:
2219 case CLOBBER:
2220 case ADDR_VEC:
2221 case ADDR_DIFF_VEC:
2222 return 0;
2224 case UNSPEC_VOLATILE:
2225 return 1;
2227 case MEM:
2228 case ASM_INPUT:
2229 case ASM_OPERANDS:
2230 if (MEM_VOLATILE_P (x))
2231 return 1;
2233 default:
2234 break;
2237 /* Recursively scan the operands of this expression. */
2240 const char *fmt = GET_RTX_FORMAT (code);
2241 int i;
2243 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2245 if (fmt[i] == 'e')
2247 if (volatile_refs_p (XEXP (x, i)))
2248 return 1;
2250 else if (fmt[i] == 'E')
2252 int j;
2253 for (j = 0; j < XVECLEN (x, i); j++)
2254 if (volatile_refs_p (XVECEXP (x, i, j)))
2255 return 1;
2259 return 0;
2262 /* Similar to above, except that it also rejects register pre- and post-
2263 incrementing. */
2266 side_effects_p (rtx x)
2268 RTX_CODE code;
2270 code = GET_CODE (x);
2271 switch (code)
2273 case LABEL_REF:
2274 case SYMBOL_REF:
2275 case CONST_INT:
2276 case CONST:
2277 case CONST_DOUBLE:
2278 case CONST_VECTOR:
2279 case CC0:
2280 case PC:
2281 case REG:
2282 case SCRATCH:
2283 case ADDR_VEC:
2284 case ADDR_DIFF_VEC:
2285 return 0;
2287 case CLOBBER:
2288 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2289 when some combination can't be done. If we see one, don't think
2290 that we can simplify the expression. */
2291 return (GET_MODE (x) != VOIDmode);
2293 case PRE_INC:
2294 case PRE_DEC:
2295 case POST_INC:
2296 case POST_DEC:
2297 case PRE_MODIFY:
2298 case POST_MODIFY:
2299 case CALL:
2300 case UNSPEC_VOLATILE:
2301 /* case TRAP_IF: This isn't clear yet. */
2302 return 1;
2304 case MEM:
2305 case ASM_INPUT:
2306 case ASM_OPERANDS:
2307 if (MEM_VOLATILE_P (x))
2308 return 1;
2310 default:
2311 break;
2314 /* Recursively scan the operands of this expression. */
2317 const char *fmt = GET_RTX_FORMAT (code);
2318 int i;
2320 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2322 if (fmt[i] == 'e')
2324 if (side_effects_p (XEXP (x, i)))
2325 return 1;
2327 else if (fmt[i] == 'E')
2329 int j;
2330 for (j = 0; j < XVECLEN (x, i); j++)
2331 if (side_effects_p (XVECEXP (x, i, j)))
2332 return 1;
2336 return 0;
2339 /* Return nonzero if evaluating rtx X might cause a trap. */
2342 may_trap_p (rtx x)
2344 int i;
2345 enum rtx_code code;
2346 const char *fmt;
2348 if (x == 0)
2349 return 0;
2350 code = GET_CODE (x);
2351 switch (code)
2353 /* Handle these cases quickly. */
2354 case CONST_INT:
2355 case CONST_DOUBLE:
2356 case CONST_VECTOR:
2357 case SYMBOL_REF:
2358 case LABEL_REF:
2359 case CONST:
2360 case PC:
2361 case CC0:
2362 case REG:
2363 case SCRATCH:
2364 return 0;
2366 case ASM_INPUT:
2367 case UNSPEC_VOLATILE:
2368 case TRAP_IF:
2369 return 1;
2371 case ASM_OPERANDS:
2372 return MEM_VOLATILE_P (x);
2374 /* Memory ref can trap unless it's a static var or a stack slot. */
2375 case MEM:
2376 if (MEM_NOTRAP_P (x))
2377 return 0;
2378 return rtx_addr_can_trap_p (XEXP (x, 0));
2380 /* Division by a non-constant might trap. */
2381 case DIV:
2382 case MOD:
2383 case UDIV:
2384 case UMOD:
2385 if (HONOR_SNANS (GET_MODE (x)))
2386 return 1;
2387 if (! CONSTANT_P (XEXP (x, 1))
2388 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2389 && flag_trapping_math))
2390 return 1;
2391 /* This was const0_rtx, but by not using that,
2392 we can link this file into other programs. */
2393 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2394 return 1;
2395 break;
2397 case EXPR_LIST:
2398 /* An EXPR_LIST is used to represent a function call. This
2399 certainly may trap. */
2400 return 1;
2402 case GE:
2403 case GT:
2404 case LE:
2405 case LT:
2406 case COMPARE:
2407 /* Some floating point comparisons may trap. */
2408 if (!flag_trapping_math)
2409 break;
2410 /* ??? There is no machine independent way to check for tests that trap
2411 when COMPARE is used, though many targets do make this distinction.
2412 For instance, sparc uses CCFPE for compares which generate exceptions
2413 and CCFP for compares which do not generate exceptions. */
2414 if (HONOR_NANS (GET_MODE (x)))
2415 return 1;
2416 /* But often the compare has some CC mode, so check operand
2417 modes as well. */
2418 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2419 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2420 return 1;
2421 break;
2423 case EQ:
2424 case NE:
2425 if (HONOR_SNANS (GET_MODE (x)))
2426 return 1;
2427 /* Often comparison is CC mode, so check operand modes. */
2428 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2429 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2430 return 1;
2431 break;
2433 case FIX:
2434 /* Conversion of floating point might trap. */
2435 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2436 return 1;
2437 break;
2439 case NEG:
2440 case ABS:
2441 /* These operations don't trap even with floating point. */
2442 break;
2444 default:
2445 /* Any floating arithmetic may trap. */
2446 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2447 && flag_trapping_math)
2448 return 1;
2451 fmt = GET_RTX_FORMAT (code);
2452 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2454 if (fmt[i] == 'e')
2456 if (may_trap_p (XEXP (x, i)))
2457 return 1;
2459 else if (fmt[i] == 'E')
2461 int j;
2462 for (j = 0; j < XVECLEN (x, i); j++)
2463 if (may_trap_p (XVECEXP (x, i, j)))
2464 return 1;
2467 return 0;
2470 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2471 i.e., an inequality. */
2474 inequality_comparisons_p (rtx x)
2476 const char *fmt;
2477 int len, i;
2478 enum rtx_code code = GET_CODE (x);
2480 switch (code)
2482 case REG:
2483 case SCRATCH:
2484 case PC:
2485 case CC0:
2486 case CONST_INT:
2487 case CONST_DOUBLE:
2488 case CONST_VECTOR:
2489 case CONST:
2490 case LABEL_REF:
2491 case SYMBOL_REF:
2492 return 0;
2494 case LT:
2495 case LTU:
2496 case GT:
2497 case GTU:
2498 case LE:
2499 case LEU:
2500 case GE:
2501 case GEU:
2502 return 1;
2504 default:
2505 break;
2508 len = GET_RTX_LENGTH (code);
2509 fmt = GET_RTX_FORMAT (code);
2511 for (i = 0; i < len; i++)
2513 if (fmt[i] == 'e')
2515 if (inequality_comparisons_p (XEXP (x, i)))
2516 return 1;
2518 else if (fmt[i] == 'E')
2520 int j;
2521 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2522 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2523 return 1;
2527 return 0;
2530 /* Replace any occurrence of FROM in X with TO. The function does
2531 not enter into CONST_DOUBLE for the replace.
2533 Note that copying is not done so X must not be shared unless all copies
2534 are to be modified. */
2537 replace_rtx (rtx x, rtx from, rtx to)
2539 int i, j;
2540 const char *fmt;
2542 /* The following prevents loops occurrence when we change MEM in
2543 CONST_DOUBLE onto the same CONST_DOUBLE. */
2544 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2545 return x;
2547 if (x == from)
2548 return to;
2550 /* Allow this function to make replacements in EXPR_LISTs. */
2551 if (x == 0)
2552 return 0;
2554 if (GET_CODE (x) == SUBREG)
2556 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2558 if (GET_CODE (new) == CONST_INT)
2560 x = simplify_subreg (GET_MODE (x), new,
2561 GET_MODE (SUBREG_REG (x)),
2562 SUBREG_BYTE (x));
2563 if (! x)
2564 abort ();
2566 else
2567 SUBREG_REG (x) = new;
2569 return x;
2571 else if (GET_CODE (x) == ZERO_EXTEND)
2573 rtx new = replace_rtx (XEXP (x, 0), from, to);
2575 if (GET_CODE (new) == CONST_INT)
2577 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2578 new, GET_MODE (XEXP (x, 0)));
2579 if (! x)
2580 abort ();
2582 else
2583 XEXP (x, 0) = new;
2585 return x;
2588 fmt = GET_RTX_FORMAT (GET_CODE (x));
2589 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2591 if (fmt[i] == 'e')
2592 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2593 else if (fmt[i] == 'E')
2594 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2595 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2598 return x;
2601 /* Throughout the rtx X, replace many registers according to REG_MAP.
2602 Return the replacement for X (which may be X with altered contents).
2603 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2604 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2606 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2607 should not be mapped to pseudos or vice versa since validate_change
2608 is not called.
2610 If REPLACE_DEST is 1, replacements are also done in destinations;
2611 otherwise, only sources are replaced. */
2614 replace_regs (rtx x, rtx *reg_map, unsigned int nregs, int replace_dest)
2616 enum rtx_code code;
2617 int i;
2618 const char *fmt;
2620 if (x == 0)
2621 return x;
2623 code = GET_CODE (x);
2624 switch (code)
2626 case SCRATCH:
2627 case PC:
2628 case CC0:
2629 case CONST_INT:
2630 case CONST_DOUBLE:
2631 case CONST_VECTOR:
2632 case CONST:
2633 case SYMBOL_REF:
2634 case LABEL_REF:
2635 return x;
2637 case REG:
2638 /* Verify that the register has an entry before trying to access it. */
2639 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2641 /* SUBREGs can't be shared. Always return a copy to ensure that if
2642 this replacement occurs more than once then each instance will
2643 get distinct rtx. */
2644 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2645 return copy_rtx (reg_map[REGNO (x)]);
2646 return reg_map[REGNO (x)];
2648 return x;
2650 case SUBREG:
2651 /* Prevent making nested SUBREGs. */
2652 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2653 && reg_map[REGNO (SUBREG_REG (x))] != 0
2654 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2656 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2657 return simplify_gen_subreg (GET_MODE (x), map_val,
2658 GET_MODE (SUBREG_REG (x)),
2659 SUBREG_BYTE (x));
2661 break;
2663 case SET:
2664 if (replace_dest)
2665 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2667 else if (GET_CODE (SET_DEST (x)) == MEM
2668 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2669 /* Even if we are not to replace destinations, replace register if it
2670 is CONTAINED in destination (destination is memory or
2671 STRICT_LOW_PART). */
2672 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2673 reg_map, nregs, 0);
2674 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2675 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2676 break;
2678 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2679 return x;
2681 default:
2682 break;
2685 fmt = GET_RTX_FORMAT (code);
2686 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2688 if (fmt[i] == 'e')
2689 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2690 else if (fmt[i] == 'E')
2692 int j;
2693 for (j = 0; j < XVECLEN (x, i); j++)
2694 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2695 nregs, replace_dest);
2698 return x;
2701 /* Replace occurrences of the old label in *X with the new one.
2702 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2705 replace_label (rtx *x, void *data)
2707 rtx l = *x;
2708 rtx tmp;
2709 rtx old_label = ((replace_label_data *) data)->r1;
2710 rtx new_label = ((replace_label_data *) data)->r2;
2711 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2713 if (l == NULL_RTX)
2714 return 0;
2716 if (GET_CODE (l) == MEM
2717 && (tmp = XEXP (l, 0)) != NULL_RTX
2718 && GET_CODE (tmp) == SYMBOL_REF
2719 && CONSTANT_POOL_ADDRESS_P (tmp))
2721 rtx c = get_pool_constant (tmp);
2722 if (rtx_referenced_p (old_label, c))
2724 rtx new_c, new_l;
2725 replace_label_data *d = (replace_label_data *) data;
2727 /* Create a copy of constant C; replace the label inside
2728 but do not update LABEL_NUSES because uses in constant pool
2729 are not counted. */
2730 new_c = copy_rtx (c);
2731 d->update_label_nuses = false;
2732 for_each_rtx (&new_c, replace_label, data);
2733 d->update_label_nuses = update_label_nuses;
2735 /* Add the new constant NEW_C to constant pool and replace
2736 the old reference to constant by new reference. */
2737 new_l = force_const_mem (get_pool_mode (tmp), new_c);
2738 *x = replace_rtx (l, l, new_l);
2740 return 0;
2743 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2744 field. This is not handled by for_each_rtx because it doesn't
2745 handle unprinted ('0') fields. */
2746 if (GET_CODE (l) == JUMP_INSN && JUMP_LABEL (l) == old_label)
2747 JUMP_LABEL (l) = new_label;
2749 if ((GET_CODE (l) == LABEL_REF
2750 || GET_CODE (l) == INSN_LIST)
2751 && XEXP (l, 0) == old_label)
2753 XEXP (l, 0) = new_label;
2754 if (update_label_nuses)
2756 ++LABEL_NUSES (new_label);
2757 --LABEL_NUSES (old_label);
2759 return 0;
2762 return 0;
2765 /* When *BODY is equal to X or X is directly referenced by *BODY
2766 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2767 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2769 static int
2770 rtx_referenced_p_1 (rtx *body, void *x)
2772 rtx y = (rtx) x;
2774 if (*body == NULL_RTX)
2775 return y == NULL_RTX;
2777 /* Return true if a label_ref *BODY refers to label Y. */
2778 if (GET_CODE (*body) == LABEL_REF && GET_CODE (y) == CODE_LABEL)
2779 return XEXP (*body, 0) == y;
2781 /* If *BODY is a reference to pool constant traverse the constant. */
2782 if (GET_CODE (*body) == SYMBOL_REF
2783 && CONSTANT_POOL_ADDRESS_P (*body))
2784 return rtx_referenced_p (y, get_pool_constant (*body));
2786 /* By default, compare the RTL expressions. */
2787 return rtx_equal_p (*body, y);
2790 /* Return true if X is referenced in BODY. */
2793 rtx_referenced_p (rtx x, rtx body)
2795 return for_each_rtx (&body, rtx_referenced_p_1, x);
2798 /* If INSN is a jump to jumptable insn rturn true and store the label (which
2799 INSN jumps to) to *LABEL and the tablejump insn to *TABLE.
2800 LABEL and TABLE may be NULL. */
2802 bool
2803 tablejump_p (rtx insn, rtx *label, rtx *table)
2805 rtx l, t;
2807 if (onlyjump_p (insn)
2808 && (l = JUMP_LABEL (insn)) != NULL_RTX
2809 && (t = NEXT_INSN (l)) != NULL_RTX
2810 && GET_CODE (t) == JUMP_INSN
2811 && (GET_CODE (PATTERN (t)) == ADDR_VEC
2812 || GET_CODE (PATTERN (t)) == ADDR_DIFF_VEC))
2814 if (label)
2815 *label = l;
2816 if (table)
2817 *table = t;
2818 return true;
2820 return false;
2823 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2824 constant that is not in the constant pool and not in the condition
2825 of an IF_THEN_ELSE. */
2827 static int
2828 computed_jump_p_1 (rtx x)
2830 enum rtx_code code = GET_CODE (x);
2831 int i, j;
2832 const char *fmt;
2834 switch (code)
2836 case LABEL_REF:
2837 case PC:
2838 return 0;
2840 case CONST:
2841 case CONST_INT:
2842 case CONST_DOUBLE:
2843 case CONST_VECTOR:
2844 case SYMBOL_REF:
2845 case REG:
2846 return 1;
2848 case MEM:
2849 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2850 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2852 case IF_THEN_ELSE:
2853 return (computed_jump_p_1 (XEXP (x, 1))
2854 || computed_jump_p_1 (XEXP (x, 2)));
2856 default:
2857 break;
2860 fmt = GET_RTX_FORMAT (code);
2861 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2863 if (fmt[i] == 'e'
2864 && computed_jump_p_1 (XEXP (x, i)))
2865 return 1;
2867 else if (fmt[i] == 'E')
2868 for (j = 0; j < XVECLEN (x, i); j++)
2869 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2870 return 1;
2873 return 0;
2876 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2878 Tablejumps and casesi insns are not considered indirect jumps;
2879 we can recognize them by a (use (label_ref)). */
2882 computed_jump_p (rtx insn)
2884 int i;
2885 if (GET_CODE (insn) == JUMP_INSN)
2887 rtx pat = PATTERN (insn);
2889 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2890 return 0;
2891 else if (GET_CODE (pat) == PARALLEL)
2893 int len = XVECLEN (pat, 0);
2894 int has_use_labelref = 0;
2896 for (i = len - 1; i >= 0; i--)
2897 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2898 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2899 == LABEL_REF))
2900 has_use_labelref = 1;
2902 if (! has_use_labelref)
2903 for (i = len - 1; i >= 0; i--)
2904 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2905 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2906 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2907 return 1;
2909 else if (GET_CODE (pat) == SET
2910 && SET_DEST (pat) == pc_rtx
2911 && computed_jump_p_1 (SET_SRC (pat)))
2912 return 1;
2914 return 0;
2917 /* Traverse X via depth-first search, calling F for each
2918 sub-expression (including X itself). F is also passed the DATA.
2919 If F returns -1, do not traverse sub-expressions, but continue
2920 traversing the rest of the tree. If F ever returns any other
2921 nonzero value, stop the traversal, and return the value returned
2922 by F. Otherwise, return 0. This function does not traverse inside
2923 tree structure that contains RTX_EXPRs, or into sub-expressions
2924 whose format code is `0' since it is not known whether or not those
2925 codes are actually RTL.
2927 This routine is very general, and could (should?) be used to
2928 implement many of the other routines in this file. */
2931 for_each_rtx (rtx *x, rtx_function f, void *data)
2933 int result;
2934 int length;
2935 const char *format;
2936 int i;
2938 /* Call F on X. */
2939 result = (*f) (x, data);
2940 if (result == -1)
2941 /* Do not traverse sub-expressions. */
2942 return 0;
2943 else if (result != 0)
2944 /* Stop the traversal. */
2945 return result;
2947 if (*x == NULL_RTX)
2948 /* There are no sub-expressions. */
2949 return 0;
2951 length = GET_RTX_LENGTH (GET_CODE (*x));
2952 format = GET_RTX_FORMAT (GET_CODE (*x));
2954 for (i = 0; i < length; ++i)
2956 switch (format[i])
2958 case 'e':
2959 result = for_each_rtx (&XEXP (*x, i), f, data);
2960 if (result != 0)
2961 return result;
2962 break;
2964 case 'V':
2965 case 'E':
2966 if (XVEC (*x, i) != 0)
2968 int j;
2969 for (j = 0; j < XVECLEN (*x, i); ++j)
2971 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2972 if (result != 0)
2973 return result;
2976 break;
2978 default:
2979 /* Nothing to do. */
2980 break;
2985 return 0;
2988 /* Searches X for any reference to REGNO, returning the rtx of the
2989 reference found if any. Otherwise, returns NULL_RTX. */
2992 regno_use_in (unsigned int regno, rtx x)
2994 const char *fmt;
2995 int i, j;
2996 rtx tem;
2998 if (GET_CODE (x) == REG && REGNO (x) == regno)
2999 return x;
3001 fmt = GET_RTX_FORMAT (GET_CODE (x));
3002 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3004 if (fmt[i] == 'e')
3006 if ((tem = regno_use_in (regno, XEXP (x, i))))
3007 return tem;
3009 else if (fmt[i] == 'E')
3010 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3011 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3012 return tem;
3015 return NULL_RTX;
3018 /* Return a value indicating whether OP, an operand of a commutative
3019 operation, is preferred as the first or second operand. The higher
3020 the value, the stronger the preference for being the first operand.
3021 We use negative values to indicate a preference for the first operand
3022 and positive values for the second operand. */
3025 commutative_operand_precedence (rtx op)
3027 /* Constants always come the second operand. Prefer "nice" constants. */
3028 if (GET_CODE (op) == CONST_INT)
3029 return -5;
3030 if (GET_CODE (op) == CONST_DOUBLE)
3031 return -4;
3032 if (CONSTANT_P (op))
3033 return -3;
3035 /* SUBREGs of objects should come second. */
3036 if (GET_CODE (op) == SUBREG
3037 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
3038 return -2;
3040 /* If only one operand is a `neg', `not',
3041 `mult', `plus', or `minus' expression, it will be the first
3042 operand. */
3043 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
3044 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
3045 || GET_CODE (op) == MINUS)
3046 return 2;
3048 /* Complex expressions should be the first, so decrease priority
3049 of objects. */
3050 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
3051 return -1;
3052 return 0;
3055 /* Return 1 iff it is necessary to swap operands of commutative operation
3056 in order to canonicalize expression. */
3059 swap_commutative_operands_p (rtx x, rtx y)
3061 return (commutative_operand_precedence (x)
3062 < commutative_operand_precedence (y));
3065 /* Return 1 if X is an autoincrement side effect and the register is
3066 not the stack pointer. */
3068 auto_inc_p (rtx x)
3070 switch (GET_CODE (x))
3072 case PRE_INC:
3073 case POST_INC:
3074 case PRE_DEC:
3075 case POST_DEC:
3076 case PRE_MODIFY:
3077 case POST_MODIFY:
3078 /* There are no REG_INC notes for SP. */
3079 if (XEXP (x, 0) != stack_pointer_rtx)
3080 return 1;
3081 default:
3082 break;
3084 return 0;
3087 /* Return 1 if the sequence of instructions beginning with FROM and up
3088 to and including TO is safe to move. If NEW_TO is non-NULL, and
3089 the sequence is not already safe to move, but can be easily
3090 extended to a sequence which is safe, then NEW_TO will point to the
3091 end of the extended sequence.
3093 For now, this function only checks that the region contains whole
3094 exception regions, but it could be extended to check additional
3095 conditions as well. */
3098 insns_safe_to_move_p (rtx from, rtx to, rtx *new_to)
3100 int eh_region_count = 0;
3101 int past_to_p = 0;
3102 rtx r = from;
3104 /* By default, assume the end of the region will be what was
3105 suggested. */
3106 if (new_to)
3107 *new_to = to;
3109 while (r)
3111 if (GET_CODE (r) == NOTE)
3113 switch (NOTE_LINE_NUMBER (r))
3115 case NOTE_INSN_EH_REGION_BEG:
3116 ++eh_region_count;
3117 break;
3119 case NOTE_INSN_EH_REGION_END:
3120 if (eh_region_count == 0)
3121 /* This sequence of instructions contains the end of
3122 an exception region, but not he beginning. Moving
3123 it will cause chaos. */
3124 return 0;
3126 --eh_region_count;
3127 break;
3129 default:
3130 break;
3133 else if (past_to_p)
3134 /* If we've passed TO, and we see a non-note instruction, we
3135 can't extend the sequence to a movable sequence. */
3136 return 0;
3138 if (r == to)
3140 if (!new_to)
3141 /* It's OK to move the sequence if there were matched sets of
3142 exception region notes. */
3143 return eh_region_count == 0;
3145 past_to_p = 1;
3148 /* It's OK to move the sequence if there were matched sets of
3149 exception region notes. */
3150 if (past_to_p && eh_region_count == 0)
3152 *new_to = r;
3153 return 1;
3156 /* Go to the next instruction. */
3157 r = NEXT_INSN (r);
3160 return 0;
3163 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3165 loc_mentioned_in_p (rtx *loc, rtx in)
3167 enum rtx_code code = GET_CODE (in);
3168 const char *fmt = GET_RTX_FORMAT (code);
3169 int i, j;
3171 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3173 if (loc == &in->fld[i].rtx)
3174 return 1;
3175 if (fmt[i] == 'e')
3177 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3178 return 1;
3180 else if (fmt[i] == 'E')
3181 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3182 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3183 return 1;
3185 return 0;
3188 /* Given a subreg X, return the bit offset where the subreg begins
3189 (counting from the least significant bit of the reg). */
3191 unsigned int
3192 subreg_lsb (rtx x)
3194 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
3195 enum machine_mode mode = GET_MODE (x);
3196 unsigned int bitpos;
3197 unsigned int byte;
3198 unsigned int word;
3200 /* A paradoxical subreg begins at bit position 0. */
3201 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
3202 return 0;
3204 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3205 /* If the subreg crosses a word boundary ensure that
3206 it also begins and ends on a word boundary. */
3207 if ((SUBREG_BYTE (x) % UNITS_PER_WORD
3208 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
3209 && (SUBREG_BYTE (x) % UNITS_PER_WORD
3210 || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
3211 abort ();
3213 if (WORDS_BIG_ENDIAN)
3214 word = (GET_MODE_SIZE (inner_mode)
3215 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
3216 else
3217 word = SUBREG_BYTE (x) / UNITS_PER_WORD;
3218 bitpos = word * BITS_PER_WORD;
3220 if (BYTES_BIG_ENDIAN)
3221 byte = (GET_MODE_SIZE (inner_mode)
3222 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
3223 else
3224 byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
3225 bitpos += byte * BITS_PER_UNIT;
3227 return bitpos;
3230 /* This function returns the regno offset of a subreg expression.
3231 xregno - A regno of an inner hard subreg_reg (or what will become one).
3232 xmode - The mode of xregno.
3233 offset - The byte offset.
3234 ymode - The mode of a top level SUBREG (or what may become one).
3235 RETURN - The regno offset which would be used. */
3236 unsigned int
3237 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3238 unsigned int offset, enum machine_mode ymode)
3240 int nregs_xmode, nregs_ymode;
3241 int mode_multiple, nregs_multiple;
3242 int y_offset;
3244 if (xregno >= FIRST_PSEUDO_REGISTER)
3245 abort ();
3247 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3248 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3250 /* If this is a big endian paradoxical subreg, which uses more actual
3251 hard registers than the original register, we must return a negative
3252 offset so that we find the proper highpart of the register. */
3253 if (offset == 0
3254 && nregs_ymode > nregs_xmode
3255 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3256 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3257 return nregs_xmode - nregs_ymode;
3259 if (offset == 0 || nregs_xmode == nregs_ymode)
3260 return 0;
3262 /* size of ymode must not be greater than the size of xmode. */
3263 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3264 if (mode_multiple == 0)
3265 abort ();
3267 y_offset = offset / GET_MODE_SIZE (ymode);
3268 nregs_multiple = nregs_xmode / nregs_ymode;
3269 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3272 /* This function returns true when the offset is representable via
3273 subreg_offset in the given regno.
3274 xregno - A regno of an inner hard subreg_reg (or what will become one).
3275 xmode - The mode of xregno.
3276 offset - The byte offset.
3277 ymode - The mode of a top level SUBREG (or what may become one).
3278 RETURN - The regno offset which would be used. */
3279 bool
3280 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3281 unsigned int offset, enum machine_mode ymode)
3283 int nregs_xmode, nregs_ymode;
3284 int mode_multiple, nregs_multiple;
3285 int y_offset;
3287 if (xregno >= FIRST_PSEUDO_REGISTER)
3288 abort ();
3290 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3291 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3293 /* paradoxical subregs are always valid. */
3294 if (offset == 0
3295 && nregs_ymode > nregs_xmode
3296 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3297 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3298 return true;
3300 /* Lowpart subregs are always valid. */
3301 if (offset == subreg_lowpart_offset (ymode, xmode))
3302 return true;
3304 #ifdef ENABLE_CHECKING
3305 /* This should always pass, otherwise we don't know how to verify the
3306 constraint. These conditions may be relaxed but subreg_offset would
3307 need to be redesigned. */
3308 if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3309 || GET_MODE_SIZE (ymode) % nregs_ymode
3310 || nregs_xmode % nregs_ymode)
3311 abort ();
3312 #endif
3314 /* The XMODE value can be seen as a vector of NREGS_XMODE
3315 values. The subreg must represent an lowpart of given field.
3316 Compute what field it is. */
3317 offset -= subreg_lowpart_offset (ymode,
3318 mode_for_size (GET_MODE_BITSIZE (xmode)
3319 / nregs_xmode,
3320 MODE_INT, 0));
3322 /* size of ymode must not be greater than the size of xmode. */
3323 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3324 if (mode_multiple == 0)
3325 abort ();
3327 y_offset = offset / GET_MODE_SIZE (ymode);
3328 nregs_multiple = nregs_xmode / nregs_ymode;
3329 #ifdef ENABLE_CHECKING
3330 if (offset % GET_MODE_SIZE (ymode)
3331 || mode_multiple % nregs_multiple)
3332 abort ();
3333 #endif
3334 return (!(y_offset % (mode_multiple / nregs_multiple)));
3337 /* Return the final regno that a subreg expression refers to. */
3338 unsigned int
3339 subreg_regno (rtx x)
3341 unsigned int ret;
3342 rtx subreg = SUBREG_REG (x);
3343 int regno = REGNO (subreg);
3345 ret = regno + subreg_regno_offset (regno,
3346 GET_MODE (subreg),
3347 SUBREG_BYTE (x),
3348 GET_MODE (x));
3349 return ret;
3352 struct parms_set_data
3354 int nregs;
3355 HARD_REG_SET regs;
3358 /* Helper function for noticing stores to parameter registers. */
3359 static void
3360 parms_set (rtx x, rtx pat ATTRIBUTE_UNUSED, void *data)
3362 struct parms_set_data *d = data;
3363 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3364 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3366 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3367 d->nregs--;
3371 /* Look backward for first parameter to be loaded.
3372 Do not skip BOUNDARY. */
3374 find_first_parameter_load (rtx call_insn, rtx boundary)
3376 struct parms_set_data parm;
3377 rtx p, before;
3379 /* Since different machines initialize their parameter registers
3380 in different orders, assume nothing. Collect the set of all
3381 parameter registers. */
3382 CLEAR_HARD_REG_SET (parm.regs);
3383 parm.nregs = 0;
3384 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3385 if (GET_CODE (XEXP (p, 0)) == USE
3386 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3388 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3389 abort ();
3391 /* We only care about registers which can hold function
3392 arguments. */
3393 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3394 continue;
3396 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3397 parm.nregs++;
3399 before = call_insn;
3401 /* Search backward for the first set of a register in this set. */
3402 while (parm.nregs && before != boundary)
3404 before = PREV_INSN (before);
3406 /* It is possible that some loads got CSEed from one call to
3407 another. Stop in that case. */
3408 if (GET_CODE (before) == CALL_INSN)
3409 break;
3411 /* Our caller needs either ensure that we will find all sets
3412 (in case code has not been optimized yet), or take care
3413 for possible labels in a way by setting boundary to preceding
3414 CODE_LABEL. */
3415 if (GET_CODE (before) == CODE_LABEL)
3417 if (before != boundary)
3418 abort ();
3419 break;
3422 if (INSN_P (before))
3423 note_stores (PATTERN (before), parms_set, &parm);
3425 return before;
3428 /* Return true if we should avoid inserting code between INSN and preceding
3429 call instruction. */
3431 bool
3432 keep_with_call_p (rtx insn)
3434 rtx set;
3436 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3438 if (GET_CODE (SET_DEST (set)) == REG
3439 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3440 && fixed_regs[REGNO (SET_DEST (set))]
3441 && general_operand (SET_SRC (set), VOIDmode))
3442 return true;
3443 if (GET_CODE (SET_SRC (set)) == REG
3444 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3445 && GET_CODE (SET_DEST (set)) == REG
3446 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3447 return true;
3448 /* There may be a stack pop just after the call and before the store
3449 of the return register. Search for the actual store when deciding
3450 if we can break or not. */
3451 if (SET_DEST (set) == stack_pointer_rtx)
3453 rtx i2 = next_nonnote_insn (insn);
3454 if (i2 && keep_with_call_p (i2))
3455 return true;
3458 return false;
3461 /* Return true when store to register X can be hoisted to the place
3462 with LIVE registers (can be NULL). Value VAL contains destination
3463 whose value will be used. */
3465 static bool
3466 hoist_test_store (rtx x, rtx val, regset live)
3468 if (GET_CODE (x) == SCRATCH)
3469 return true;
3471 if (rtx_equal_p (x, val))
3472 return true;
3474 /* Allow subreg of X in case it is not writing just part of multireg pseudo.
3475 Then we would need to update all users to care hoisting the store too.
3476 Caller may represent that by specifying whole subreg as val. */
3478 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3480 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3481 && GET_MODE_BITSIZE (GET_MODE (x)) <
3482 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3483 return false;
3484 return true;
3486 if (GET_CODE (x) == SUBREG)
3487 x = SUBREG_REG (x);
3489 /* Anything except register store is not hoistable. This includes the
3490 partial stores to registers. */
3492 if (!REG_P (x))
3493 return false;
3495 /* Pseudo registers can be always replaced by another pseudo to avoid
3496 the side effect, for hard register we must ensure that they are dead.
3497 Eventually we may want to add code to try turn pseudos to hards, but it
3498 is unlikely useful. */
3500 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3502 int regno = REGNO (x);
3503 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3505 if (!live)
3506 return false;
3507 if (REGNO_REG_SET_P (live, regno))
3508 return false;
3509 while (--n > 0)
3510 if (REGNO_REG_SET_P (live, regno + n))
3511 return false;
3513 return true;
3517 /* Return true if INSN can be hoisted to place with LIVE hard registers
3518 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3519 and used by the hoisting pass. */
3521 bool
3522 can_hoist_insn_p (rtx insn, rtx val, regset live)
3524 rtx pat = PATTERN (insn);
3525 int i;
3527 /* It probably does not worth the complexity to handle multiple
3528 set stores. */
3529 if (!single_set (insn))
3530 return false;
3531 /* We can move CALL_INSN, but we need to check that all caller clobbered
3532 regs are dead. */
3533 if (GET_CODE (insn) == CALL_INSN)
3534 return false;
3535 /* In future we will handle hoisting of libcall sequences, but
3536 give up for now. */
3537 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3538 return false;
3539 switch (GET_CODE (pat))
3541 case SET:
3542 if (!hoist_test_store (SET_DEST (pat), val, live))
3543 return false;
3544 break;
3545 case USE:
3546 /* USES do have sick semantics, so do not move them. */
3547 return false;
3548 break;
3549 case CLOBBER:
3550 if (!hoist_test_store (XEXP (pat, 0), val, live))
3551 return false;
3552 break;
3553 case PARALLEL:
3554 for (i = 0; i < XVECLEN (pat, 0); i++)
3556 rtx x = XVECEXP (pat, 0, i);
3557 switch (GET_CODE (x))
3559 case SET:
3560 if (!hoist_test_store (SET_DEST (x), val, live))
3561 return false;
3562 break;
3563 case USE:
3564 /* We need to fix callers to really ensure availability
3565 of all values insn uses, but for now it is safe to prohibit
3566 hoisting of any insn having such a hidden uses. */
3567 return false;
3568 break;
3569 case CLOBBER:
3570 if (!hoist_test_store (SET_DEST (x), val, live))
3571 return false;
3572 break;
3573 default:
3574 break;
3577 break;
3578 default:
3579 abort ();
3581 return true;
3584 /* Update store after hoisting - replace all stores to pseudo registers
3585 by new ones to avoid clobbering of values except for store to VAL that will
3586 be updated to NEW. */
3588 static void
3589 hoist_update_store (rtx insn, rtx *xp, rtx val, rtx new)
3591 rtx x = *xp;
3593 if (GET_CODE (x) == SCRATCH)
3594 return;
3596 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3597 validate_change (insn, xp,
3598 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3599 SUBREG_BYTE (x)), 1);
3600 if (rtx_equal_p (x, val))
3602 validate_change (insn, xp, new, 1);
3603 return;
3605 if (GET_CODE (x) == SUBREG)
3607 xp = &SUBREG_REG (x);
3608 x = *xp;
3611 if (!REG_P (x))
3612 abort ();
3614 /* We've verified that hard registers are dead, so we may keep the side
3615 effect. Otherwise replace it by new pseudo. */
3616 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3617 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3618 REG_NOTES (insn)
3619 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3622 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3623 and each other side effect to pseudo register by new pseudo register. */
3626 hoist_insn_after (rtx insn, rtx after, rtx val, rtx new)
3628 rtx pat;
3629 int i;
3630 rtx note;
3632 insn = emit_copy_of_insn_after (insn, after);
3633 pat = PATTERN (insn);
3635 /* Remove REG_UNUSED notes as we will re-emit them. */
3636 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3637 remove_note (insn, note);
3639 /* To get this working callers must ensure to move everything referenced
3640 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3641 easier. */
3642 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3643 remove_note (insn, note);
3644 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3645 remove_note (insn, note);
3647 /* Remove REG_DEAD notes as they might not be valid anymore in case
3648 we create redundancy. */
3649 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3650 remove_note (insn, note);
3651 switch (GET_CODE (pat))
3653 case SET:
3654 hoist_update_store (insn, &SET_DEST (pat), val, new);
3655 break;
3656 case USE:
3657 break;
3658 case CLOBBER:
3659 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3660 break;
3661 case PARALLEL:
3662 for (i = 0; i < XVECLEN (pat, 0); i++)
3664 rtx x = XVECEXP (pat, 0, i);
3665 switch (GET_CODE (x))
3667 case SET:
3668 hoist_update_store (insn, &SET_DEST (x), val, new);
3669 break;
3670 case USE:
3671 break;
3672 case CLOBBER:
3673 hoist_update_store (insn, &SET_DEST (x), val, new);
3674 break;
3675 default:
3676 break;
3679 break;
3680 default:
3681 abort ();
3683 if (!apply_change_group ())
3684 abort ();
3686 return insn;
3690 hoist_insn_to_edge (rtx insn, edge e, rtx val, rtx new)
3692 rtx new_insn;
3694 /* We cannot insert instructions on an abnormal critical edge.
3695 It will be easier to find the culprit if we die now. */
3696 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3697 abort ();
3699 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3700 stuff. We also emit CALL_INSNS and firends. */
3701 if (e->insns == NULL_RTX)
3703 start_sequence ();
3704 emit_note (NOTE_INSN_DELETED);
3706 else
3707 push_to_sequence (e->insns);
3709 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3711 e->insns = get_insns ();
3712 end_sequence ();
3713 return new_insn;