Missed one in last change.
[official-gcc.git] / gcc / rtlanal.c
blob8ad6281aa4d7487395fc4b28a163b1d20d5be163
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "tm_p.h"
33 #include "flags.h"
34 #include "basic-block.h"
35 #include "real.h"
37 /* Forward declarations */
38 static int global_reg_mentioned_p_1 PARAMS ((rtx *, void *));
39 static void set_of_1 PARAMS ((rtx, rtx, void *));
40 static void insn_dependent_p_1 PARAMS ((rtx, rtx, void *));
41 static int rtx_referenced_p_1 PARAMS ((rtx *, void *));
42 static int computed_jump_p_1 PARAMS ((rtx));
43 static void parms_set PARAMS ((rtx, rtx, void *));
44 static bool hoist_test_store PARAMS ((rtx, rtx, regset));
45 static void hoist_update_store PARAMS ((rtx, rtx *, rtx, rtx));
47 /* Bit flags that specify the machine subtype we are compiling for.
48 Bits are tested using macros TARGET_... defined in the tm.h file
49 and set by `-m...' switches. Must be defined in rtlanal.c. */
51 int target_flags;
53 /* Return 1 if the value of X is unstable
54 (would be different at a different point in the program).
55 The frame pointer, arg pointer, etc. are considered stable
56 (within one function) and so is anything marked `unchanging'. */
58 int
59 rtx_unstable_p (x)
60 rtx x;
62 RTX_CODE code = GET_CODE (x);
63 int i;
64 const char *fmt;
66 switch (code)
68 case MEM:
69 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
71 case QUEUED:
72 return 1;
74 case ADDRESSOF:
75 case CONST:
76 case CONST_INT:
77 case CONST_DOUBLE:
78 case CONST_VECTOR:
79 case SYMBOL_REF:
80 case LABEL_REF:
81 return 0;
83 case REG:
84 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
85 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
86 /* The arg pointer varies if it is not a fixed register. */
87 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
88 || RTX_UNCHANGING_P (x))
89 return 0;
90 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
91 /* ??? When call-clobbered, the value is stable modulo the restore
92 that must happen after a call. This currently screws up local-alloc
93 into believing that the restore is not needed. */
94 if (x == pic_offset_table_rtx)
95 return 0;
96 #endif
97 return 1;
99 case ASM_OPERANDS:
100 if (MEM_VOLATILE_P (x))
101 return 1;
103 /* FALLTHROUGH */
105 default:
106 break;
109 fmt = GET_RTX_FORMAT (code);
110 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
111 if (fmt[i] == 'e')
113 if (rtx_unstable_p (XEXP (x, i)))
114 return 1;
116 else if (fmt[i] == 'E')
118 int j;
119 for (j = 0; j < XVECLEN (x, i); j++)
120 if (rtx_unstable_p (XVECEXP (x, i, j)))
121 return 1;
124 return 0;
127 /* Return 1 if X has a value that can vary even between two
128 executions of the program. 0 means X can be compared reliably
129 against certain constants or near-constants.
130 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
131 zero, we are slightly more conservative.
132 The frame pointer and the arg pointer are considered constant. */
135 rtx_varies_p (x, for_alias)
136 rtx x;
137 int for_alias;
139 RTX_CODE code = GET_CODE (x);
140 int i;
141 const char *fmt;
143 switch (code)
145 case MEM:
146 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
148 case QUEUED:
149 return 1;
151 case CONST:
152 case CONST_INT:
153 case CONST_DOUBLE:
154 case CONST_VECTOR:
155 case SYMBOL_REF:
156 case LABEL_REF:
157 return 0;
159 case ADDRESSOF:
160 /* This will resolve to some offset from the frame pointer. */
161 return 0;
163 case REG:
164 /* Note that we have to test for the actual rtx used for the frame
165 and arg pointers and not just the register number in case we have
166 eliminated the frame and/or arg pointer and are using it
167 for pseudos. */
168 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
169 /* The arg pointer varies if it is not a fixed register. */
170 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
171 return 0;
172 if (x == pic_offset_table_rtx
173 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
174 /* ??? When call-clobbered, the value is stable modulo the restore
175 that must happen after a call. This currently screws up
176 local-alloc into believing that the restore is not needed, so we
177 must return 0 only if we are called from alias analysis. */
178 && for_alias
179 #endif
181 return 0;
182 return 1;
184 case LO_SUM:
185 /* The operand 0 of a LO_SUM is considered constant
186 (in fact it is related specifically to operand 1)
187 during alias analysis. */
188 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
189 || rtx_varies_p (XEXP (x, 1), for_alias);
191 case ASM_OPERANDS:
192 if (MEM_VOLATILE_P (x))
193 return 1;
195 /* FALLTHROUGH */
197 default:
198 break;
201 fmt = GET_RTX_FORMAT (code);
202 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
203 if (fmt[i] == 'e')
205 if (rtx_varies_p (XEXP (x, i), for_alias))
206 return 1;
208 else if (fmt[i] == 'E')
210 int j;
211 for (j = 0; j < XVECLEN (x, i); j++)
212 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
213 return 1;
216 return 0;
219 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
222 rtx_addr_can_trap_p (x)
223 rtx x;
225 enum rtx_code code = GET_CODE (x);
227 switch (code)
229 case SYMBOL_REF:
230 return SYMBOL_REF_WEAK (x);
232 case LABEL_REF:
233 return 0;
235 case ADDRESSOF:
236 /* This will resolve to some offset from the frame pointer. */
237 return 0;
239 case REG:
240 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
241 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
242 || x == stack_pointer_rtx
243 /* The arg pointer varies if it is not a fixed register. */
244 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
245 return 0;
246 /* All of the virtual frame registers are stack references. */
247 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
248 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
249 return 0;
250 return 1;
252 case CONST:
253 return rtx_addr_can_trap_p (XEXP (x, 0));
255 case PLUS:
256 /* An address is assumed not to trap if it is an address that can't
257 trap plus a constant integer or it is the pic register plus a
258 constant. */
259 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
260 && GET_CODE (XEXP (x, 1)) == CONST_INT)
261 || (XEXP (x, 0) == pic_offset_table_rtx
262 && CONSTANT_P (XEXP (x, 1))));
264 case LO_SUM:
265 case PRE_MODIFY:
266 return rtx_addr_can_trap_p (XEXP (x, 1));
268 case PRE_DEC:
269 case PRE_INC:
270 case POST_DEC:
271 case POST_INC:
272 case POST_MODIFY:
273 return rtx_addr_can_trap_p (XEXP (x, 0));
275 default:
276 break;
279 /* If it isn't one of the case above, it can cause a trap. */
280 return 1;
283 /* Return true if X is an address that is known to not be zero. */
285 bool
286 nonzero_address_p (x)
287 rtx x;
289 enum rtx_code code = GET_CODE (x);
291 switch (code)
293 case SYMBOL_REF:
294 return !SYMBOL_REF_WEAK (x);
296 case LABEL_REF:
297 return true;
299 case ADDRESSOF:
300 /* This will resolve to some offset from the frame pointer. */
301 return true;
303 case REG:
304 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
305 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
306 || x == stack_pointer_rtx
307 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
308 return true;
309 /* All of the virtual frame registers are stack references. */
310 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
311 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
312 return true;
313 return false;
315 case CONST:
316 return nonzero_address_p (XEXP (x, 0));
318 case PLUS:
319 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
321 /* Pointers aren't allowed to wrap. If we've got a register
322 that is known to be a pointer, and a positive offset, then
323 the composite can't be zero. */
324 if (INTVAL (XEXP (x, 1)) > 0
325 && REG_P (XEXP (x, 0))
326 && REG_POINTER (XEXP (x, 0)))
327 return true;
329 return nonzero_address_p (XEXP (x, 0));
331 /* Handle PIC references. */
332 else if (XEXP (x, 0) == pic_offset_table_rtx
333 && CONSTANT_P (XEXP (x, 1)))
334 return true;
335 return false;
337 case PRE_MODIFY:
338 /* Similar to the above; allow positive offsets. Further, since
339 auto-inc is only allowed in memories, the register must be a
340 pointer. */
341 if (GET_CODE (XEXP (x, 1)) == CONST_INT
342 && INTVAL (XEXP (x, 1)) > 0)
343 return true;
344 return nonzero_address_p (XEXP (x, 0));
346 case PRE_INC:
347 /* Similarly. Further, the offset is always positive. */
348 return true;
350 case PRE_DEC:
351 case POST_DEC:
352 case POST_INC:
353 case POST_MODIFY:
354 return nonzero_address_p (XEXP (x, 0));
356 case LO_SUM:
357 return nonzero_address_p (XEXP (x, 1));
359 default:
360 break;
363 /* If it isn't one of the case above, might be zero. */
364 return false;
367 /* Return 1 if X refers to a memory location whose address
368 cannot be compared reliably with constant addresses,
369 or if X refers to a BLKmode memory object.
370 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
371 zero, we are slightly more conservative. */
374 rtx_addr_varies_p (x, for_alias)
375 rtx x;
376 int for_alias;
378 enum rtx_code code;
379 int i;
380 const char *fmt;
382 if (x == 0)
383 return 0;
385 code = GET_CODE (x);
386 if (code == MEM)
387 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
389 fmt = GET_RTX_FORMAT (code);
390 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
391 if (fmt[i] == 'e')
393 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
394 return 1;
396 else if (fmt[i] == 'E')
398 int j;
399 for (j = 0; j < XVECLEN (x, i); j++)
400 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
401 return 1;
403 return 0;
406 /* Return the value of the integer term in X, if one is apparent;
407 otherwise return 0.
408 Only obvious integer terms are detected.
409 This is used in cse.c with the `related_value' field. */
411 HOST_WIDE_INT
412 get_integer_term (x)
413 rtx x;
415 if (GET_CODE (x) == CONST)
416 x = XEXP (x, 0);
418 if (GET_CODE (x) == MINUS
419 && GET_CODE (XEXP (x, 1)) == CONST_INT)
420 return - INTVAL (XEXP (x, 1));
421 if (GET_CODE (x) == PLUS
422 && GET_CODE (XEXP (x, 1)) == CONST_INT)
423 return INTVAL (XEXP (x, 1));
424 return 0;
427 /* If X is a constant, return the value sans apparent integer term;
428 otherwise return 0.
429 Only obvious integer terms are detected. */
432 get_related_value (x)
433 rtx x;
435 if (GET_CODE (x) != CONST)
436 return 0;
437 x = XEXP (x, 0);
438 if (GET_CODE (x) == PLUS
439 && GET_CODE (XEXP (x, 1)) == CONST_INT)
440 return XEXP (x, 0);
441 else if (GET_CODE (x) == MINUS
442 && GET_CODE (XEXP (x, 1)) == CONST_INT)
443 return XEXP (x, 0);
444 return 0;
447 /* Given a tablejump insn INSN, return the RTL expression for the offset
448 into the jump table. If the offset cannot be determined, then return
449 NULL_RTX.
451 If EARLIEST is nonzero, it is a pointer to a place where the earliest
452 insn used in locating the offset was found. */
455 get_jump_table_offset (insn, earliest)
456 rtx insn;
457 rtx *earliest;
459 rtx label;
460 rtx table;
461 rtx set;
462 rtx old_insn;
463 rtx x;
464 rtx old_x;
465 rtx y;
466 rtx old_y;
467 int i;
469 if (!tablejump_p (insn, &label, &table) || !(set = single_set (insn)))
470 return NULL_RTX;
472 x = SET_SRC (set);
474 /* Some targets (eg, ARM) emit a tablejump that also
475 contains the out-of-range target. */
476 if (GET_CODE (x) == IF_THEN_ELSE
477 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
478 x = XEXP (x, 1);
480 /* Search backwards and locate the expression stored in X. */
481 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
482 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
485 /* If X is an expression using a relative address then strip
486 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
487 or the jump table label. */
488 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
489 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
491 for (i = 0; i < 2; i++)
493 old_insn = insn;
494 y = XEXP (x, i);
496 if (y == pc_rtx || y == pic_offset_table_rtx)
497 break;
499 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
500 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
503 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
504 break;
507 if (i >= 2)
508 return NULL_RTX;
510 x = XEXP (x, 1 - i);
512 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
513 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
517 /* Strip off any sign or zero extension. */
518 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
520 x = XEXP (x, 0);
522 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
523 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
527 /* If X isn't a MEM then this isn't a tablejump we understand. */
528 if (GET_CODE (x) != MEM)
529 return NULL_RTX;
531 /* Strip off the MEM. */
532 x = XEXP (x, 0);
534 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
535 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
538 /* If X isn't a PLUS than this isn't a tablejump we understand. */
539 if (GET_CODE (x) != PLUS)
540 return NULL_RTX;
542 /* At this point we should have an expression representing the jump table
543 plus an offset. Examine each operand in order to determine which one
544 represents the jump table. Knowing that tells us that the other operand
545 must represent the offset. */
546 for (i = 0; i < 2; i++)
548 old_insn = insn;
549 y = XEXP (x, i);
551 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
552 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
555 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
556 && reg_mentioned_p (label, y))
557 break;
560 if (i >= 2)
561 return NULL_RTX;
563 x = XEXP (x, 1 - i);
565 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
566 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
567 for (i = 0; i < 2; i++)
568 if (XEXP (x, i) == pic_offset_table_rtx)
570 x = XEXP (x, 1 - i);
571 break;
574 if (earliest)
575 *earliest = insn;
577 /* Return the RTL expression representing the offset. */
578 return x;
581 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
582 a global register. */
584 static int
585 global_reg_mentioned_p_1 (loc, data)
586 rtx *loc;
587 void *data ATTRIBUTE_UNUSED;
589 int regno;
590 rtx x = *loc;
592 if (! x)
593 return 0;
595 switch (GET_CODE (x))
597 case SUBREG:
598 if (GET_CODE (SUBREG_REG (x)) == REG)
600 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
601 && global_regs[subreg_regno (x)])
602 return 1;
603 return 0;
605 break;
607 case REG:
608 regno = REGNO (x);
609 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
610 return 1;
611 return 0;
613 case SCRATCH:
614 case PC:
615 case CC0:
616 case CONST_INT:
617 case CONST_DOUBLE:
618 case CONST:
619 case LABEL_REF:
620 return 0;
622 case CALL:
623 /* A non-constant call might use a global register. */
624 return 1;
626 default:
627 break;
630 return 0;
633 /* Returns nonzero if X mentions a global register. */
636 global_reg_mentioned_p (x)
637 rtx x;
639 if (INSN_P (x))
641 if (GET_CODE (x) == CALL_INSN)
643 if (! CONST_OR_PURE_CALL_P (x))
644 return 1;
645 x = CALL_INSN_FUNCTION_USAGE (x);
646 if (x == 0)
647 return 0;
649 else
650 x = PATTERN (x);
653 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
656 /* Return the number of places FIND appears within X. If COUNT_DEST is
657 zero, we do not count occurrences inside the destination of a SET. */
660 count_occurrences (x, find, count_dest)
661 rtx x, find;
662 int count_dest;
664 int i, j;
665 enum rtx_code code;
666 const char *format_ptr;
667 int count;
669 if (x == find)
670 return 1;
672 code = GET_CODE (x);
674 switch (code)
676 case REG:
677 case CONST_INT:
678 case CONST_DOUBLE:
679 case CONST_VECTOR:
680 case SYMBOL_REF:
681 case CODE_LABEL:
682 case PC:
683 case CC0:
684 return 0;
686 case MEM:
687 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
688 return 1;
689 break;
691 case SET:
692 if (SET_DEST (x) == find && ! count_dest)
693 return count_occurrences (SET_SRC (x), find, count_dest);
694 break;
696 default:
697 break;
700 format_ptr = GET_RTX_FORMAT (code);
701 count = 0;
703 for (i = 0; i < GET_RTX_LENGTH (code); i++)
705 switch (*format_ptr++)
707 case 'e':
708 count += count_occurrences (XEXP (x, i), find, count_dest);
709 break;
711 case 'E':
712 for (j = 0; j < XVECLEN (x, i); j++)
713 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
714 break;
717 return count;
720 /* Nonzero if register REG appears somewhere within IN.
721 Also works if REG is not a register; in this case it checks
722 for a subexpression of IN that is Lisp "equal" to REG. */
725 reg_mentioned_p (reg, in)
726 rtx reg, in;
728 const char *fmt;
729 int i;
730 enum rtx_code code;
732 if (in == 0)
733 return 0;
735 if (reg == in)
736 return 1;
738 if (GET_CODE (in) == LABEL_REF)
739 return reg == XEXP (in, 0);
741 code = GET_CODE (in);
743 switch (code)
745 /* Compare registers by number. */
746 case REG:
747 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
749 /* These codes have no constituent expressions
750 and are unique. */
751 case SCRATCH:
752 case CC0:
753 case PC:
754 return 0;
756 case CONST_INT:
757 case CONST_VECTOR:
758 case CONST_DOUBLE:
759 /* These are kept unique for a given value. */
760 return 0;
762 default:
763 break;
766 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
767 return 1;
769 fmt = GET_RTX_FORMAT (code);
771 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
773 if (fmt[i] == 'E')
775 int j;
776 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
777 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
778 return 1;
780 else if (fmt[i] == 'e'
781 && reg_mentioned_p (reg, XEXP (in, i)))
782 return 1;
784 return 0;
787 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
788 no CODE_LABEL insn. */
791 no_labels_between_p (beg, end)
792 rtx beg, end;
794 rtx p;
795 if (beg == end)
796 return 0;
797 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
798 if (GET_CODE (p) == CODE_LABEL)
799 return 0;
800 return 1;
803 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
804 no JUMP_INSN insn. */
807 no_jumps_between_p (beg, end)
808 rtx beg, end;
810 rtx p;
811 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
812 if (GET_CODE (p) == JUMP_INSN)
813 return 0;
814 return 1;
817 /* Nonzero if register REG is used in an insn between
818 FROM_INSN and TO_INSN (exclusive of those two). */
821 reg_used_between_p (reg, from_insn, to_insn)
822 rtx reg, from_insn, to_insn;
824 rtx insn;
826 if (from_insn == to_insn)
827 return 0;
829 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
830 if (INSN_P (insn)
831 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
832 || (GET_CODE (insn) == CALL_INSN
833 && (find_reg_fusage (insn, USE, reg)
834 || find_reg_fusage (insn, CLOBBER, reg)))))
835 return 1;
836 return 0;
839 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
840 is entirely replaced by a new value and the only use is as a SET_DEST,
841 we do not consider it a reference. */
844 reg_referenced_p (x, body)
845 rtx x;
846 rtx body;
848 int i;
850 switch (GET_CODE (body))
852 case SET:
853 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
854 return 1;
856 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
857 of a REG that occupies all of the REG, the insn references X if
858 it is mentioned in the destination. */
859 if (GET_CODE (SET_DEST (body)) != CC0
860 && GET_CODE (SET_DEST (body)) != PC
861 && GET_CODE (SET_DEST (body)) != REG
862 && ! (GET_CODE (SET_DEST (body)) == SUBREG
863 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
864 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
865 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
866 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
867 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
868 && reg_overlap_mentioned_p (x, SET_DEST (body)))
869 return 1;
870 return 0;
872 case ASM_OPERANDS:
873 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
874 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
875 return 1;
876 return 0;
878 case CALL:
879 case USE:
880 case IF_THEN_ELSE:
881 return reg_overlap_mentioned_p (x, body);
883 case TRAP_IF:
884 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
886 case PREFETCH:
887 return reg_overlap_mentioned_p (x, XEXP (body, 0));
889 case UNSPEC:
890 case UNSPEC_VOLATILE:
891 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
892 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
893 return 1;
894 return 0;
896 case PARALLEL:
897 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
898 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
899 return 1;
900 return 0;
902 case CLOBBER:
903 if (GET_CODE (XEXP (body, 0)) == MEM)
904 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
905 return 1;
906 return 0;
908 case COND_EXEC:
909 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
910 return 1;
911 return reg_referenced_p (x, COND_EXEC_CODE (body));
913 default:
914 return 0;
918 /* Nonzero if register REG is referenced in an insn between
919 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
920 not count. */
923 reg_referenced_between_p (reg, from_insn, to_insn)
924 rtx reg, from_insn, to_insn;
926 rtx insn;
928 if (from_insn == to_insn)
929 return 0;
931 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
932 if (INSN_P (insn)
933 && (reg_referenced_p (reg, PATTERN (insn))
934 || (GET_CODE (insn) == CALL_INSN
935 && find_reg_fusage (insn, USE, reg))))
936 return 1;
937 return 0;
940 /* Nonzero if register REG is set or clobbered in an insn between
941 FROM_INSN and TO_INSN (exclusive of those two). */
944 reg_set_between_p (reg, from_insn, to_insn)
945 rtx reg, from_insn, to_insn;
947 rtx insn;
949 if (from_insn == to_insn)
950 return 0;
952 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
953 if (INSN_P (insn) && reg_set_p (reg, insn))
954 return 1;
955 return 0;
958 /* Internals of reg_set_between_p. */
960 reg_set_p (reg, insn)
961 rtx reg, insn;
963 /* We can be passed an insn or part of one. If we are passed an insn,
964 check if a side-effect of the insn clobbers REG. */
965 if (INSN_P (insn)
966 && (FIND_REG_INC_NOTE (insn, reg)
967 || (GET_CODE (insn) == CALL_INSN
968 /* We'd like to test call_used_regs here, but rtlanal.c can't
969 reference that variable due to its use in genattrtab. So
970 we'll just be more conservative.
972 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
973 information holds all clobbered registers. */
974 && ((GET_CODE (reg) == REG
975 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
976 || GET_CODE (reg) == MEM
977 || find_reg_fusage (insn, CLOBBER, reg)))))
978 return 1;
980 return set_of (reg, insn) != NULL_RTX;
983 /* Similar to reg_set_between_p, but check all registers in X. Return 0
984 only if none of them are modified between START and END. Do not
985 consider non-registers one way or the other. */
988 regs_set_between_p (x, start, end)
989 rtx x;
990 rtx start, end;
992 enum rtx_code code = GET_CODE (x);
993 const char *fmt;
994 int i, j;
996 switch (code)
998 case CONST_INT:
999 case CONST_DOUBLE:
1000 case CONST_VECTOR:
1001 case CONST:
1002 case SYMBOL_REF:
1003 case LABEL_REF:
1004 case PC:
1005 case CC0:
1006 return 0;
1008 case REG:
1009 return reg_set_between_p (x, start, end);
1011 default:
1012 break;
1015 fmt = GET_RTX_FORMAT (code);
1016 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1018 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
1019 return 1;
1021 else if (fmt[i] == 'E')
1022 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1023 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
1024 return 1;
1027 return 0;
1030 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1031 only if none of them are modified between START and END. Return 1 if
1032 X contains a MEM; this routine does usememory aliasing. */
1035 modified_between_p (x, start, end)
1036 rtx x;
1037 rtx start, end;
1039 enum rtx_code code = GET_CODE (x);
1040 const char *fmt;
1041 int i, j;
1042 rtx insn;
1044 if (start == end)
1045 return 0;
1047 switch (code)
1049 case CONST_INT:
1050 case CONST_DOUBLE:
1051 case CONST_VECTOR:
1052 case CONST:
1053 case SYMBOL_REF:
1054 case LABEL_REF:
1055 return 0;
1057 case PC:
1058 case CC0:
1059 return 1;
1061 case MEM:
1062 if (RTX_UNCHANGING_P (x))
1063 return 0;
1064 if (modified_between_p (XEXP (x, 0), start, end))
1065 return 1;
1066 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1067 if (memory_modified_in_insn_p (x, insn))
1068 return 1;
1069 return 0;
1070 break;
1072 case REG:
1073 return reg_set_between_p (x, start, end);
1075 default:
1076 break;
1079 fmt = GET_RTX_FORMAT (code);
1080 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1082 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1083 return 1;
1085 else if (fmt[i] == 'E')
1086 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1087 if (modified_between_p (XVECEXP (x, i, j), start, end))
1088 return 1;
1091 return 0;
1094 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1095 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1096 does use memory aliasing. */
1099 modified_in_p (x, insn)
1100 rtx x;
1101 rtx insn;
1103 enum rtx_code code = GET_CODE (x);
1104 const char *fmt;
1105 int i, j;
1107 switch (code)
1109 case CONST_INT:
1110 case CONST_DOUBLE:
1111 case CONST_VECTOR:
1112 case CONST:
1113 case SYMBOL_REF:
1114 case LABEL_REF:
1115 return 0;
1117 case PC:
1118 case CC0:
1119 return 1;
1121 case MEM:
1122 if (RTX_UNCHANGING_P (x))
1123 return 0;
1124 if (modified_in_p (XEXP (x, 0), insn))
1125 return 1;
1126 if (memory_modified_in_insn_p (x, insn))
1127 return 1;
1128 return 0;
1129 break;
1131 case REG:
1132 return reg_set_p (x, insn);
1134 default:
1135 break;
1138 fmt = GET_RTX_FORMAT (code);
1139 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1141 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1142 return 1;
1144 else if (fmt[i] == 'E')
1145 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1146 if (modified_in_p (XVECEXP (x, i, j), insn))
1147 return 1;
1150 return 0;
1153 /* Return true if anything in insn X is (anti,output,true) dependent on
1154 anything in insn Y. */
1157 insn_dependent_p (x, y)
1158 rtx x, y;
1160 rtx tmp;
1162 if (! INSN_P (x) || ! INSN_P (y))
1163 abort ();
1165 tmp = PATTERN (y);
1166 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1167 if (tmp == NULL_RTX)
1168 return 1;
1170 tmp = PATTERN (x);
1171 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1172 if (tmp == NULL_RTX)
1173 return 1;
1175 return 0;
1178 /* A helper routine for insn_dependent_p called through note_stores. */
1180 static void
1181 insn_dependent_p_1 (x, pat, data)
1182 rtx x;
1183 rtx pat ATTRIBUTE_UNUSED;
1184 void *data;
1186 rtx * pinsn = (rtx *) data;
1188 if (*pinsn && reg_mentioned_p (x, *pinsn))
1189 *pinsn = NULL_RTX;
1192 /* Helper function for set_of. */
1193 struct set_of_data
1195 rtx found;
1196 rtx pat;
1199 static void
1200 set_of_1 (x, pat, data1)
1201 rtx x;
1202 rtx pat;
1203 void *data1;
1205 struct set_of_data *data = (struct set_of_data *) (data1);
1206 if (rtx_equal_p (x, data->pat)
1207 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1208 data->found = pat;
1211 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1212 (either directly or via STRICT_LOW_PART and similar modifiers). */
1214 set_of (pat, insn)
1215 rtx pat, insn;
1217 struct set_of_data data;
1218 data.found = NULL_RTX;
1219 data.pat = pat;
1220 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1221 return data.found;
1224 /* Given an INSN, return a SET expression if this insn has only a single SET.
1225 It may also have CLOBBERs, USEs, or SET whose output
1226 will not be used, which we ignore. */
1229 single_set_2 (insn, pat)
1230 rtx insn, pat;
1232 rtx set = NULL;
1233 int set_verified = 1;
1234 int i;
1236 if (GET_CODE (pat) == PARALLEL)
1238 for (i = 0; i < XVECLEN (pat, 0); i++)
1240 rtx sub = XVECEXP (pat, 0, i);
1241 switch (GET_CODE (sub))
1243 case USE:
1244 case CLOBBER:
1245 break;
1247 case SET:
1248 /* We can consider insns having multiple sets, where all
1249 but one are dead as single set insns. In common case
1250 only single set is present in the pattern so we want
1251 to avoid checking for REG_UNUSED notes unless necessary.
1253 When we reach set first time, we just expect this is
1254 the single set we are looking for and only when more
1255 sets are found in the insn, we check them. */
1256 if (!set_verified)
1258 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1259 && !side_effects_p (set))
1260 set = NULL;
1261 else
1262 set_verified = 1;
1264 if (!set)
1265 set = sub, set_verified = 0;
1266 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1267 || side_effects_p (sub))
1268 return NULL_RTX;
1269 break;
1271 default:
1272 return NULL_RTX;
1276 return set;
1279 /* Given an INSN, return nonzero if it has more than one SET, else return
1280 zero. */
1283 multiple_sets (insn)
1284 rtx insn;
1286 int found;
1287 int i;
1289 /* INSN must be an insn. */
1290 if (! INSN_P (insn))
1291 return 0;
1293 /* Only a PARALLEL can have multiple SETs. */
1294 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1296 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1297 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1299 /* If we have already found a SET, then return now. */
1300 if (found)
1301 return 1;
1302 else
1303 found = 1;
1307 /* Either zero or one SET. */
1308 return 0;
1311 /* Return nonzero if the destination of SET equals the source
1312 and there are no side effects. */
1315 set_noop_p (set)
1316 rtx set;
1318 rtx src = SET_SRC (set);
1319 rtx dst = SET_DEST (set);
1321 if (dst == pc_rtx && src == pc_rtx)
1322 return 1;
1324 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1325 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1327 if (GET_CODE (dst) == SIGN_EXTRACT
1328 || GET_CODE (dst) == ZERO_EXTRACT)
1329 return rtx_equal_p (XEXP (dst, 0), src)
1330 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1331 && !side_effects_p (src);
1333 if (GET_CODE (dst) == STRICT_LOW_PART)
1334 dst = XEXP (dst, 0);
1336 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1338 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1339 return 0;
1340 src = SUBREG_REG (src);
1341 dst = SUBREG_REG (dst);
1344 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1345 && REGNO (src) == REGNO (dst));
1348 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1349 value to itself. */
1352 noop_move_p (insn)
1353 rtx insn;
1355 rtx pat = PATTERN (insn);
1357 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1358 return 1;
1360 /* Insns carrying these notes are useful later on. */
1361 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1362 return 0;
1364 /* For now treat an insn with a REG_RETVAL note as a
1365 a special insn which should not be considered a no-op. */
1366 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1367 return 0;
1369 if (GET_CODE (pat) == SET && set_noop_p (pat))
1370 return 1;
1372 if (GET_CODE (pat) == PARALLEL)
1374 int i;
1375 /* If nothing but SETs of registers to themselves,
1376 this insn can also be deleted. */
1377 for (i = 0; i < XVECLEN (pat, 0); i++)
1379 rtx tem = XVECEXP (pat, 0, i);
1381 if (GET_CODE (tem) == USE
1382 || GET_CODE (tem) == CLOBBER)
1383 continue;
1385 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1386 return 0;
1389 return 1;
1391 return 0;
1395 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1396 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1397 If the object was modified, if we hit a partial assignment to X, or hit a
1398 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1399 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1400 be the src. */
1403 find_last_value (x, pinsn, valid_to, allow_hwreg)
1404 rtx x;
1405 rtx *pinsn;
1406 rtx valid_to;
1407 int allow_hwreg;
1409 rtx p;
1411 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1412 p = PREV_INSN (p))
1413 if (INSN_P (p))
1415 rtx set = single_set (p);
1416 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1418 if (set && rtx_equal_p (x, SET_DEST (set)))
1420 rtx src = SET_SRC (set);
1422 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1423 src = XEXP (note, 0);
1425 if ((valid_to == NULL_RTX
1426 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1427 /* Reject hard registers because we don't usually want
1428 to use them; we'd rather use a pseudo. */
1429 && (! (GET_CODE (src) == REG
1430 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1432 *pinsn = p;
1433 return src;
1437 /* If set in non-simple way, we don't have a value. */
1438 if (reg_set_p (x, p))
1439 break;
1442 return x;
1445 /* Return nonzero if register in range [REGNO, ENDREGNO)
1446 appears either explicitly or implicitly in X
1447 other than being stored into.
1449 References contained within the substructure at LOC do not count.
1450 LOC may be zero, meaning don't ignore anything. */
1453 refers_to_regno_p (regno, endregno, x, loc)
1454 unsigned int regno, endregno;
1455 rtx x;
1456 rtx *loc;
1458 int i;
1459 unsigned int x_regno;
1460 RTX_CODE code;
1461 const char *fmt;
1463 repeat:
1464 /* The contents of a REG_NONNEG note is always zero, so we must come here
1465 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1466 if (x == 0)
1467 return 0;
1469 code = GET_CODE (x);
1471 switch (code)
1473 case REG:
1474 x_regno = REGNO (x);
1476 /* If we modifying the stack, frame, or argument pointer, it will
1477 clobber a virtual register. In fact, we could be more precise,
1478 but it isn't worth it. */
1479 if ((x_regno == STACK_POINTER_REGNUM
1480 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1481 || x_regno == ARG_POINTER_REGNUM
1482 #endif
1483 || x_regno == FRAME_POINTER_REGNUM)
1484 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1485 return 1;
1487 return (endregno > x_regno
1488 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1489 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1490 : 1));
1492 case SUBREG:
1493 /* If this is a SUBREG of a hard reg, we can see exactly which
1494 registers are being modified. Otherwise, handle normally. */
1495 if (GET_CODE (SUBREG_REG (x)) == REG
1496 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1498 unsigned int inner_regno = subreg_regno (x);
1499 unsigned int inner_endregno
1500 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1501 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1503 return endregno > inner_regno && regno < inner_endregno;
1505 break;
1507 case CLOBBER:
1508 case SET:
1509 if (&SET_DEST (x) != loc
1510 /* Note setting a SUBREG counts as referring to the REG it is in for
1511 a pseudo but not for hard registers since we can
1512 treat each word individually. */
1513 && ((GET_CODE (SET_DEST (x)) == SUBREG
1514 && loc != &SUBREG_REG (SET_DEST (x))
1515 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1516 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1517 && refers_to_regno_p (regno, endregno,
1518 SUBREG_REG (SET_DEST (x)), loc))
1519 || (GET_CODE (SET_DEST (x)) != REG
1520 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1521 return 1;
1523 if (code == CLOBBER || loc == &SET_SRC (x))
1524 return 0;
1525 x = SET_SRC (x);
1526 goto repeat;
1528 default:
1529 break;
1532 /* X does not match, so try its subexpressions. */
1534 fmt = GET_RTX_FORMAT (code);
1535 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1537 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1539 if (i == 0)
1541 x = XEXP (x, 0);
1542 goto repeat;
1544 else
1545 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1546 return 1;
1548 else if (fmt[i] == 'E')
1550 int j;
1551 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1552 if (loc != &XVECEXP (x, i, j)
1553 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1554 return 1;
1557 return 0;
1560 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1561 we check if any register number in X conflicts with the relevant register
1562 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1563 contains a MEM (we don't bother checking for memory addresses that can't
1564 conflict because we expect this to be a rare case. */
1567 reg_overlap_mentioned_p (x, in)
1568 rtx x, in;
1570 unsigned int regno, endregno;
1572 /* Overly conservative. */
1573 if (GET_CODE (x) == STRICT_LOW_PART
1574 || GET_CODE (x) == ZERO_EXTRACT
1575 || GET_CODE (x) == SIGN_EXTRACT)
1576 x = XEXP (x, 0);
1578 /* If either argument is a constant, then modifying X can not affect IN. */
1579 if (CONSTANT_P (x) || CONSTANT_P (in))
1580 return 0;
1582 switch (GET_CODE (x))
1584 case SUBREG:
1585 regno = REGNO (SUBREG_REG (x));
1586 if (regno < FIRST_PSEUDO_REGISTER)
1587 regno = subreg_regno (x);
1588 goto do_reg;
1590 case REG:
1591 regno = REGNO (x);
1592 do_reg:
1593 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1594 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1595 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1597 case MEM:
1599 const char *fmt;
1600 int i;
1602 if (GET_CODE (in) == MEM)
1603 return 1;
1605 fmt = GET_RTX_FORMAT (GET_CODE (in));
1606 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1607 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1608 return 1;
1610 return 0;
1613 case SCRATCH:
1614 case PC:
1615 case CC0:
1616 return reg_mentioned_p (x, in);
1618 case PARALLEL:
1620 int i;
1622 /* If any register in here refers to it we return true. */
1623 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1624 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1625 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1626 return 1;
1627 return 0;
1630 default:
1631 break;
1634 abort ();
1637 /* Return the last value to which REG was set prior to INSN. If we can't
1638 find it easily, return 0.
1640 We only return a REG, SUBREG, or constant because it is too hard to
1641 check if a MEM remains unchanged. */
1644 reg_set_last (x, insn)
1645 rtx x;
1646 rtx insn;
1648 rtx orig_insn = insn;
1650 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1651 Stop when we reach a label or X is a hard reg and we reach a
1652 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1654 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1656 /* We compare with <= here, because reg_set_last_last_regno
1657 is actually the number of the first reg *not* in X. */
1658 for (;
1659 insn && GET_CODE (insn) != CODE_LABEL
1660 && ! (GET_CODE (insn) == CALL_INSN
1661 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1662 insn = PREV_INSN (insn))
1663 if (INSN_P (insn))
1665 rtx set = set_of (x, insn);
1666 /* OK, this function modify our register. See if we understand it. */
1667 if (set)
1669 rtx last_value;
1670 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1671 return 0;
1672 last_value = SET_SRC (x);
1673 if (CONSTANT_P (last_value)
1674 || ((GET_CODE (last_value) == REG
1675 || GET_CODE (last_value) == SUBREG)
1676 && ! reg_set_between_p (last_value,
1677 insn, orig_insn)))
1678 return last_value;
1679 else
1680 return 0;
1684 return 0;
1687 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1688 (X would be the pattern of an insn).
1689 FUN receives two arguments:
1690 the REG, MEM, CC0 or PC being stored in or clobbered,
1691 the SET or CLOBBER rtx that does the store.
1693 If the item being stored in or clobbered is a SUBREG of a hard register,
1694 the SUBREG will be passed. */
1696 void
1697 note_stores (x, fun, data)
1698 rtx x;
1699 void (*fun) PARAMS ((rtx, rtx, void *));
1700 void *data;
1702 int i;
1704 if (GET_CODE (x) == COND_EXEC)
1705 x = COND_EXEC_CODE (x);
1707 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1709 rtx dest = SET_DEST (x);
1711 while ((GET_CODE (dest) == SUBREG
1712 && (GET_CODE (SUBREG_REG (dest)) != REG
1713 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1714 || GET_CODE (dest) == ZERO_EXTRACT
1715 || GET_CODE (dest) == SIGN_EXTRACT
1716 || GET_CODE (dest) == STRICT_LOW_PART)
1717 dest = XEXP (dest, 0);
1719 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1720 each of whose first operand is a register. */
1721 if (GET_CODE (dest) == PARALLEL)
1723 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1724 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1725 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1727 else
1728 (*fun) (dest, x, data);
1731 else if (GET_CODE (x) == PARALLEL)
1732 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1733 note_stores (XVECEXP (x, 0, i), fun, data);
1736 /* Like notes_stores, but call FUN for each expression that is being
1737 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1738 FUN for each expression, not any interior subexpressions. FUN receives a
1739 pointer to the expression and the DATA passed to this function.
1741 Note that this is not quite the same test as that done in reg_referenced_p
1742 since that considers something as being referenced if it is being
1743 partially set, while we do not. */
1745 void
1746 note_uses (pbody, fun, data)
1747 rtx *pbody;
1748 void (*fun) PARAMS ((rtx *, void *));
1749 void *data;
1751 rtx body = *pbody;
1752 int i;
1754 switch (GET_CODE (body))
1756 case COND_EXEC:
1757 (*fun) (&COND_EXEC_TEST (body), data);
1758 note_uses (&COND_EXEC_CODE (body), fun, data);
1759 return;
1761 case PARALLEL:
1762 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1763 note_uses (&XVECEXP (body, 0, i), fun, data);
1764 return;
1766 case USE:
1767 (*fun) (&XEXP (body, 0), data);
1768 return;
1770 case ASM_OPERANDS:
1771 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1772 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1773 return;
1775 case TRAP_IF:
1776 (*fun) (&TRAP_CONDITION (body), data);
1777 return;
1779 case PREFETCH:
1780 (*fun) (&XEXP (body, 0), data);
1781 return;
1783 case UNSPEC:
1784 case UNSPEC_VOLATILE:
1785 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1786 (*fun) (&XVECEXP (body, 0, i), data);
1787 return;
1789 case CLOBBER:
1790 if (GET_CODE (XEXP (body, 0)) == MEM)
1791 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1792 return;
1794 case SET:
1796 rtx dest = SET_DEST (body);
1798 /* For sets we replace everything in source plus registers in memory
1799 expression in store and operands of a ZERO_EXTRACT. */
1800 (*fun) (&SET_SRC (body), data);
1802 if (GET_CODE (dest) == ZERO_EXTRACT)
1804 (*fun) (&XEXP (dest, 1), data);
1805 (*fun) (&XEXP (dest, 2), data);
1808 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1809 dest = XEXP (dest, 0);
1811 if (GET_CODE (dest) == MEM)
1812 (*fun) (&XEXP (dest, 0), data);
1814 return;
1816 default:
1817 /* All the other possibilities never store. */
1818 (*fun) (pbody, data);
1819 return;
1823 /* Return nonzero if X's old contents don't survive after INSN.
1824 This will be true if X is (cc0) or if X is a register and
1825 X dies in INSN or because INSN entirely sets X.
1827 "Entirely set" means set directly and not through a SUBREG,
1828 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1829 Likewise, REG_INC does not count.
1831 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1832 but for this use that makes no difference, since regs don't overlap
1833 during their lifetimes. Therefore, this function may be used
1834 at any time after deaths have been computed (in flow.c).
1836 If REG is a hard reg that occupies multiple machine registers, this
1837 function will only return 1 if each of those registers will be replaced
1838 by INSN. */
1841 dead_or_set_p (insn, x)
1842 rtx insn;
1843 rtx x;
1845 unsigned int regno, last_regno;
1846 unsigned int i;
1848 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1849 if (GET_CODE (x) == CC0)
1850 return 1;
1852 if (GET_CODE (x) != REG)
1853 abort ();
1855 regno = REGNO (x);
1856 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1857 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1859 for (i = regno; i <= last_regno; i++)
1860 if (! dead_or_set_regno_p (insn, i))
1861 return 0;
1863 return 1;
1866 /* Utility function for dead_or_set_p to check an individual register. Also
1867 called from flow.c. */
1870 dead_or_set_regno_p (insn, test_regno)
1871 rtx insn;
1872 unsigned int test_regno;
1874 unsigned int regno, endregno;
1875 rtx pattern;
1877 /* See if there is a death note for something that includes TEST_REGNO. */
1878 if (find_regno_note (insn, REG_DEAD, test_regno))
1879 return 1;
1881 if (GET_CODE (insn) == CALL_INSN
1882 && find_regno_fusage (insn, CLOBBER, test_regno))
1883 return 1;
1885 pattern = PATTERN (insn);
1887 if (GET_CODE (pattern) == COND_EXEC)
1888 pattern = COND_EXEC_CODE (pattern);
1890 if (GET_CODE (pattern) == SET)
1892 rtx dest = SET_DEST (pattern);
1894 /* A value is totally replaced if it is the destination or the
1895 destination is a SUBREG of REGNO that does not change the number of
1896 words in it. */
1897 if (GET_CODE (dest) == SUBREG
1898 && (((GET_MODE_SIZE (GET_MODE (dest))
1899 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1900 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1901 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1902 dest = SUBREG_REG (dest);
1904 if (GET_CODE (dest) != REG)
1905 return 0;
1907 regno = REGNO (dest);
1908 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1909 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1911 return (test_regno >= regno && test_regno < endregno);
1913 else if (GET_CODE (pattern) == PARALLEL)
1915 int i;
1917 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1919 rtx body = XVECEXP (pattern, 0, i);
1921 if (GET_CODE (body) == COND_EXEC)
1922 body = COND_EXEC_CODE (body);
1924 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1926 rtx dest = SET_DEST (body);
1928 if (GET_CODE (dest) == SUBREG
1929 && (((GET_MODE_SIZE (GET_MODE (dest))
1930 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1931 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1932 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1933 dest = SUBREG_REG (dest);
1935 if (GET_CODE (dest) != REG)
1936 continue;
1938 regno = REGNO (dest);
1939 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1940 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1942 if (test_regno >= regno && test_regno < endregno)
1943 return 1;
1948 return 0;
1951 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1952 If DATUM is nonzero, look for one whose datum is DATUM. */
1955 find_reg_note (insn, kind, datum)
1956 rtx insn;
1957 enum reg_note kind;
1958 rtx datum;
1960 rtx link;
1962 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1963 if (! INSN_P (insn))
1964 return 0;
1966 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1967 if (REG_NOTE_KIND (link) == kind
1968 && (datum == 0 || datum == XEXP (link, 0)))
1969 return link;
1970 return 0;
1973 /* Return the reg-note of kind KIND in insn INSN which applies to register
1974 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1975 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1976 it might be the case that the note overlaps REGNO. */
1979 find_regno_note (insn, kind, regno)
1980 rtx insn;
1981 enum reg_note kind;
1982 unsigned int regno;
1984 rtx link;
1986 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1987 if (! INSN_P (insn))
1988 return 0;
1990 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1991 if (REG_NOTE_KIND (link) == kind
1992 /* Verify that it is a register, so that scratch and MEM won't cause a
1993 problem here. */
1994 && GET_CODE (XEXP (link, 0)) == REG
1995 && REGNO (XEXP (link, 0)) <= regno
1996 && ((REGNO (XEXP (link, 0))
1997 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
1998 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
1999 GET_MODE (XEXP (link, 0)))))
2000 > regno))
2001 return link;
2002 return 0;
2005 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
2006 has such a note. */
2009 find_reg_equal_equiv_note (insn)
2010 rtx insn;
2012 rtx link;
2014 if (!INSN_P (insn))
2015 return 0;
2016 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2017 if (REG_NOTE_KIND (link) == REG_EQUAL
2018 || REG_NOTE_KIND (link) == REG_EQUIV)
2020 if (single_set (insn) == 0)
2021 return 0;
2022 return link;
2024 return NULL;
2027 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
2028 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2031 find_reg_fusage (insn, code, datum)
2032 rtx insn;
2033 enum rtx_code code;
2034 rtx datum;
2036 /* If it's not a CALL_INSN, it can't possibly have a
2037 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
2038 if (GET_CODE (insn) != CALL_INSN)
2039 return 0;
2041 if (! datum)
2042 abort ();
2044 if (GET_CODE (datum) != REG)
2046 rtx link;
2048 for (link = CALL_INSN_FUNCTION_USAGE (insn);
2049 link;
2050 link = XEXP (link, 1))
2051 if (GET_CODE (XEXP (link, 0)) == code
2052 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
2053 return 1;
2055 else
2057 unsigned int regno = REGNO (datum);
2059 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2060 to pseudo registers, so don't bother checking. */
2062 if (regno < FIRST_PSEUDO_REGISTER)
2064 unsigned int end_regno
2065 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
2066 unsigned int i;
2068 for (i = regno; i < end_regno; i++)
2069 if (find_regno_fusage (insn, code, i))
2070 return 1;
2074 return 0;
2077 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2078 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2081 find_regno_fusage (insn, code, regno)
2082 rtx insn;
2083 enum rtx_code code;
2084 unsigned int regno;
2086 rtx link;
2088 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2089 to pseudo registers, so don't bother checking. */
2091 if (regno >= FIRST_PSEUDO_REGISTER
2092 || GET_CODE (insn) != CALL_INSN )
2093 return 0;
2095 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2097 unsigned int regnote;
2098 rtx op, reg;
2100 if (GET_CODE (op = XEXP (link, 0)) == code
2101 && GET_CODE (reg = XEXP (op, 0)) == REG
2102 && (regnote = REGNO (reg)) <= regno
2103 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
2104 return 1;
2107 return 0;
2110 /* Return true if INSN is a call to a pure function. */
2113 pure_call_p (insn)
2114 rtx insn;
2116 rtx link;
2118 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2119 return 0;
2121 /* Look for the note that differentiates const and pure functions. */
2122 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2124 rtx u, m;
2126 if (GET_CODE (u = XEXP (link, 0)) == USE
2127 && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2128 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2129 return 1;
2132 return 0;
2135 /* Remove register note NOTE from the REG_NOTES of INSN. */
2137 void
2138 remove_note (insn, note)
2139 rtx insn;
2140 rtx note;
2142 rtx link;
2144 if (note == NULL_RTX)
2145 return;
2147 if (REG_NOTES (insn) == note)
2149 REG_NOTES (insn) = XEXP (note, 1);
2150 return;
2153 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2154 if (XEXP (link, 1) == note)
2156 XEXP (link, 1) = XEXP (note, 1);
2157 return;
2160 abort ();
2163 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2164 return 1 if it is found. A simple equality test is used to determine if
2165 NODE matches. */
2168 in_expr_list_p (listp, node)
2169 rtx listp;
2170 rtx node;
2172 rtx x;
2174 for (x = listp; x; x = XEXP (x, 1))
2175 if (node == XEXP (x, 0))
2176 return 1;
2178 return 0;
2181 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2182 remove that entry from the list if it is found.
2184 A simple equality test is used to determine if NODE matches. */
2186 void
2187 remove_node_from_expr_list (node, listp)
2188 rtx node;
2189 rtx *listp;
2191 rtx temp = *listp;
2192 rtx prev = NULL_RTX;
2194 while (temp)
2196 if (node == XEXP (temp, 0))
2198 /* Splice the node out of the list. */
2199 if (prev)
2200 XEXP (prev, 1) = XEXP (temp, 1);
2201 else
2202 *listp = XEXP (temp, 1);
2204 return;
2207 prev = temp;
2208 temp = XEXP (temp, 1);
2212 /* Nonzero if X contains any volatile instructions. These are instructions
2213 which may cause unpredictable machine state instructions, and thus no
2214 instructions should be moved or combined across them. This includes
2215 only volatile asms and UNSPEC_VOLATILE instructions. */
2218 volatile_insn_p (x)
2219 rtx x;
2221 RTX_CODE code;
2223 code = GET_CODE (x);
2224 switch (code)
2226 case LABEL_REF:
2227 case SYMBOL_REF:
2228 case CONST_INT:
2229 case CONST:
2230 case CONST_DOUBLE:
2231 case CONST_VECTOR:
2232 case CC0:
2233 case PC:
2234 case REG:
2235 case SCRATCH:
2236 case CLOBBER:
2237 case ADDR_VEC:
2238 case ADDR_DIFF_VEC:
2239 case CALL:
2240 case MEM:
2241 return 0;
2243 case UNSPEC_VOLATILE:
2244 /* case TRAP_IF: This isn't clear yet. */
2245 return 1;
2247 case ASM_INPUT:
2248 case ASM_OPERANDS:
2249 if (MEM_VOLATILE_P (x))
2250 return 1;
2252 default:
2253 break;
2256 /* Recursively scan the operands of this expression. */
2259 const char *fmt = GET_RTX_FORMAT (code);
2260 int i;
2262 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2264 if (fmt[i] == 'e')
2266 if (volatile_insn_p (XEXP (x, i)))
2267 return 1;
2269 else if (fmt[i] == 'E')
2271 int j;
2272 for (j = 0; j < XVECLEN (x, i); j++)
2273 if (volatile_insn_p (XVECEXP (x, i, j)))
2274 return 1;
2278 return 0;
2281 /* Nonzero if X contains any volatile memory references
2282 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2285 volatile_refs_p (x)
2286 rtx x;
2288 RTX_CODE code;
2290 code = GET_CODE (x);
2291 switch (code)
2293 case LABEL_REF:
2294 case SYMBOL_REF:
2295 case CONST_INT:
2296 case CONST:
2297 case CONST_DOUBLE:
2298 case CONST_VECTOR:
2299 case CC0:
2300 case PC:
2301 case REG:
2302 case SCRATCH:
2303 case CLOBBER:
2304 case ADDR_VEC:
2305 case ADDR_DIFF_VEC:
2306 return 0;
2308 case UNSPEC_VOLATILE:
2309 return 1;
2311 case MEM:
2312 case ASM_INPUT:
2313 case ASM_OPERANDS:
2314 if (MEM_VOLATILE_P (x))
2315 return 1;
2317 default:
2318 break;
2321 /* Recursively scan the operands of this expression. */
2324 const char *fmt = GET_RTX_FORMAT (code);
2325 int i;
2327 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2329 if (fmt[i] == 'e')
2331 if (volatile_refs_p (XEXP (x, i)))
2332 return 1;
2334 else if (fmt[i] == 'E')
2336 int j;
2337 for (j = 0; j < XVECLEN (x, i); j++)
2338 if (volatile_refs_p (XVECEXP (x, i, j)))
2339 return 1;
2343 return 0;
2346 /* Similar to above, except that it also rejects register pre- and post-
2347 incrementing. */
2350 side_effects_p (x)
2351 rtx x;
2353 RTX_CODE code;
2355 code = GET_CODE (x);
2356 switch (code)
2358 case LABEL_REF:
2359 case SYMBOL_REF:
2360 case CONST_INT:
2361 case CONST:
2362 case CONST_DOUBLE:
2363 case CONST_VECTOR:
2364 case CC0:
2365 case PC:
2366 case REG:
2367 case SCRATCH:
2368 case ADDR_VEC:
2369 case ADDR_DIFF_VEC:
2370 return 0;
2372 case CLOBBER:
2373 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2374 when some combination can't be done. If we see one, don't think
2375 that we can simplify the expression. */
2376 return (GET_MODE (x) != VOIDmode);
2378 case PRE_INC:
2379 case PRE_DEC:
2380 case POST_INC:
2381 case POST_DEC:
2382 case PRE_MODIFY:
2383 case POST_MODIFY:
2384 case CALL:
2385 case UNSPEC_VOLATILE:
2386 /* case TRAP_IF: This isn't clear yet. */
2387 return 1;
2389 case MEM:
2390 case ASM_INPUT:
2391 case ASM_OPERANDS:
2392 if (MEM_VOLATILE_P (x))
2393 return 1;
2395 default:
2396 break;
2399 /* Recursively scan the operands of this expression. */
2402 const char *fmt = GET_RTX_FORMAT (code);
2403 int i;
2405 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2407 if (fmt[i] == 'e')
2409 if (side_effects_p (XEXP (x, i)))
2410 return 1;
2412 else if (fmt[i] == 'E')
2414 int j;
2415 for (j = 0; j < XVECLEN (x, i); j++)
2416 if (side_effects_p (XVECEXP (x, i, j)))
2417 return 1;
2421 return 0;
2424 /* Return nonzero if evaluating rtx X might cause a trap. */
2427 may_trap_p (x)
2428 rtx x;
2430 int i;
2431 enum rtx_code code;
2432 const char *fmt;
2434 if (x == 0)
2435 return 0;
2436 code = GET_CODE (x);
2437 switch (code)
2439 /* Handle these cases quickly. */
2440 case CONST_INT:
2441 case CONST_DOUBLE:
2442 case CONST_VECTOR:
2443 case SYMBOL_REF:
2444 case LABEL_REF:
2445 case CONST:
2446 case PC:
2447 case CC0:
2448 case REG:
2449 case SCRATCH:
2450 return 0;
2452 case ASM_INPUT:
2453 case UNSPEC_VOLATILE:
2454 case TRAP_IF:
2455 return 1;
2457 case ASM_OPERANDS:
2458 return MEM_VOLATILE_P (x);
2460 /* Memory ref can trap unless it's a static var or a stack slot. */
2461 case MEM:
2462 if (MEM_NOTRAP_P (x))
2463 return 0;
2464 return rtx_addr_can_trap_p (XEXP (x, 0));
2466 /* Division by a non-constant might trap. */
2467 case DIV:
2468 case MOD:
2469 case UDIV:
2470 case UMOD:
2471 if (HONOR_SNANS (GET_MODE (x)))
2472 return 1;
2473 if (! CONSTANT_P (XEXP (x, 1))
2474 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2475 && flag_trapping_math))
2476 return 1;
2477 /* This was const0_rtx, but by not using that,
2478 we can link this file into other programs. */
2479 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2480 return 1;
2481 break;
2483 case EXPR_LIST:
2484 /* An EXPR_LIST is used to represent a function call. This
2485 certainly may trap. */
2486 return 1;
2488 case GE:
2489 case GT:
2490 case LE:
2491 case LT:
2492 case COMPARE:
2493 /* Some floating point comparisons may trap. */
2494 if (!flag_trapping_math)
2495 break;
2496 /* ??? There is no machine independent way to check for tests that trap
2497 when COMPARE is used, though many targets do make this distinction.
2498 For instance, sparc uses CCFPE for compares which generate exceptions
2499 and CCFP for compares which do not generate exceptions. */
2500 if (HONOR_NANS (GET_MODE (x)))
2501 return 1;
2502 /* But often the compare has some CC mode, so check operand
2503 modes as well. */
2504 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2505 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2506 return 1;
2507 break;
2509 case EQ:
2510 case NE:
2511 if (HONOR_SNANS (GET_MODE (x)))
2512 return 1;
2513 /* Often comparison is CC mode, so check operand modes. */
2514 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2515 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2516 return 1;
2517 break;
2519 case FIX:
2520 /* Conversion of floating point might trap. */
2521 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2522 return 1;
2523 break;
2525 case NEG:
2526 case ABS:
2527 /* These operations don't trap even with floating point. */
2528 break;
2530 default:
2531 /* Any floating arithmetic may trap. */
2532 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2533 && flag_trapping_math)
2534 return 1;
2537 fmt = GET_RTX_FORMAT (code);
2538 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2540 if (fmt[i] == 'e')
2542 if (may_trap_p (XEXP (x, i)))
2543 return 1;
2545 else if (fmt[i] == 'E')
2547 int j;
2548 for (j = 0; j < XVECLEN (x, i); j++)
2549 if (may_trap_p (XVECEXP (x, i, j)))
2550 return 1;
2553 return 0;
2556 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2557 i.e., an inequality. */
2560 inequality_comparisons_p (x)
2561 rtx x;
2563 const char *fmt;
2564 int len, i;
2565 enum rtx_code code = GET_CODE (x);
2567 switch (code)
2569 case REG:
2570 case SCRATCH:
2571 case PC:
2572 case CC0:
2573 case CONST_INT:
2574 case CONST_DOUBLE:
2575 case CONST_VECTOR:
2576 case CONST:
2577 case LABEL_REF:
2578 case SYMBOL_REF:
2579 return 0;
2581 case LT:
2582 case LTU:
2583 case GT:
2584 case GTU:
2585 case LE:
2586 case LEU:
2587 case GE:
2588 case GEU:
2589 return 1;
2591 default:
2592 break;
2595 len = GET_RTX_LENGTH (code);
2596 fmt = GET_RTX_FORMAT (code);
2598 for (i = 0; i < len; i++)
2600 if (fmt[i] == 'e')
2602 if (inequality_comparisons_p (XEXP (x, i)))
2603 return 1;
2605 else if (fmt[i] == 'E')
2607 int j;
2608 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2609 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2610 return 1;
2614 return 0;
2617 /* Replace any occurrence of FROM in X with TO. The function does
2618 not enter into CONST_DOUBLE for the replace.
2620 Note that copying is not done so X must not be shared unless all copies
2621 are to be modified. */
2624 replace_rtx (x, from, to)
2625 rtx x, from, to;
2627 int i, j;
2628 const char *fmt;
2630 /* The following prevents loops occurrence when we change MEM in
2631 CONST_DOUBLE onto the same CONST_DOUBLE. */
2632 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2633 return x;
2635 if (x == from)
2636 return to;
2638 /* Allow this function to make replacements in EXPR_LISTs. */
2639 if (x == 0)
2640 return 0;
2642 if (GET_CODE (x) == SUBREG)
2644 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2646 if (GET_CODE (new) == CONST_INT)
2648 x = simplify_subreg (GET_MODE (x), new,
2649 GET_MODE (SUBREG_REG (x)),
2650 SUBREG_BYTE (x));
2651 if (! x)
2652 abort ();
2654 else
2655 SUBREG_REG (x) = new;
2657 return x;
2659 else if (GET_CODE (x) == ZERO_EXTEND)
2661 rtx new = replace_rtx (XEXP (x, 0), from, to);
2663 if (GET_CODE (new) == CONST_INT)
2665 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2666 new, GET_MODE (XEXP (x, 0)));
2667 if (! x)
2668 abort ();
2670 else
2671 XEXP (x, 0) = new;
2673 return x;
2676 fmt = GET_RTX_FORMAT (GET_CODE (x));
2677 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2679 if (fmt[i] == 'e')
2680 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2681 else if (fmt[i] == 'E')
2682 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2683 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2686 return x;
2689 /* Throughout the rtx X, replace many registers according to REG_MAP.
2690 Return the replacement for X (which may be X with altered contents).
2691 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2692 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2694 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2695 should not be mapped to pseudos or vice versa since validate_change
2696 is not called.
2698 If REPLACE_DEST is 1, replacements are also done in destinations;
2699 otherwise, only sources are replaced. */
2702 replace_regs (x, reg_map, nregs, replace_dest)
2703 rtx x;
2704 rtx *reg_map;
2705 unsigned int nregs;
2706 int replace_dest;
2708 enum rtx_code code;
2709 int i;
2710 const char *fmt;
2712 if (x == 0)
2713 return x;
2715 code = GET_CODE (x);
2716 switch (code)
2718 case SCRATCH:
2719 case PC:
2720 case CC0:
2721 case CONST_INT:
2722 case CONST_DOUBLE:
2723 case CONST_VECTOR:
2724 case CONST:
2725 case SYMBOL_REF:
2726 case LABEL_REF:
2727 return x;
2729 case REG:
2730 /* Verify that the register has an entry before trying to access it. */
2731 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2733 /* SUBREGs can't be shared. Always return a copy to ensure that if
2734 this replacement occurs more than once then each instance will
2735 get distinct rtx. */
2736 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2737 return copy_rtx (reg_map[REGNO (x)]);
2738 return reg_map[REGNO (x)];
2740 return x;
2742 case SUBREG:
2743 /* Prevent making nested SUBREGs. */
2744 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2745 && reg_map[REGNO (SUBREG_REG (x))] != 0
2746 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2748 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2749 return simplify_gen_subreg (GET_MODE (x), map_val,
2750 GET_MODE (SUBREG_REG (x)),
2751 SUBREG_BYTE (x));
2753 break;
2755 case SET:
2756 if (replace_dest)
2757 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2759 else if (GET_CODE (SET_DEST (x)) == MEM
2760 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2761 /* Even if we are not to replace destinations, replace register if it
2762 is CONTAINED in destination (destination is memory or
2763 STRICT_LOW_PART). */
2764 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2765 reg_map, nregs, 0);
2766 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2767 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2768 break;
2770 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2771 return x;
2773 default:
2774 break;
2777 fmt = GET_RTX_FORMAT (code);
2778 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2780 if (fmt[i] == 'e')
2781 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2782 else if (fmt[i] == 'E')
2784 int j;
2785 for (j = 0; j < XVECLEN (x, i); j++)
2786 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2787 nregs, replace_dest);
2790 return x;
2793 /* Replace occurrences of the old label in *X with the new one.
2794 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2797 replace_label (x, data)
2798 rtx *x;
2799 void *data;
2801 rtx l = *x;
2802 rtx tmp;
2803 rtx old_label = ((replace_label_data *) data)->r1;
2804 rtx new_label = ((replace_label_data *) data)->r2;
2805 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2807 if (l == NULL_RTX)
2808 return 0;
2810 if (GET_CODE (l) == MEM
2811 && (tmp = XEXP (l, 0)) != NULL_RTX
2812 && GET_CODE (tmp) == SYMBOL_REF
2813 && CONSTANT_POOL_ADDRESS_P (tmp))
2815 rtx c = get_pool_constant (tmp);
2816 if (rtx_referenced_p (old_label, c))
2818 rtx new_c, new_l;
2819 replace_label_data *d = (replace_label_data *) data;
2821 /* Create a copy of constant C; replace the label inside
2822 but do not update LABEL_NUSES because uses in constant pool
2823 are not counted. */
2824 new_c = copy_rtx (c);
2825 d->update_label_nuses = false;
2826 for_each_rtx (&new_c, replace_label, data);
2827 d->update_label_nuses = update_label_nuses;
2829 /* Add the new constant NEW_C to constant pool and replace
2830 the old reference to constant by new reference. */
2831 new_l = force_const_mem (get_pool_mode (tmp), new_c);
2832 *x = replace_rtx (l, l, new_l);
2834 return 0;
2837 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2838 field. This is not handled by for_each_rtx because it doesn't
2839 handle unprinted ('0') fields. */
2840 if (GET_CODE (l) == JUMP_INSN && JUMP_LABEL (l) == old_label)
2841 JUMP_LABEL (l) = new_label;
2843 if ((GET_CODE (l) == LABEL_REF
2844 || GET_CODE (l) == INSN_LIST)
2845 && XEXP (l, 0) == old_label)
2847 XEXP (l, 0) = new_label;
2848 if (update_label_nuses)
2850 ++LABEL_NUSES (new_label);
2851 --LABEL_NUSES (old_label);
2853 return 0;
2856 return 0;
2859 /* When *BODY is equal to X or X is directly referenced by *BODY
2860 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2861 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2863 static int
2864 rtx_referenced_p_1 (body, x)
2865 rtx *body;
2866 void *x;
2868 rtx y = (rtx) x;
2870 if (*body == NULL_RTX)
2871 return y == NULL_RTX;
2873 /* Return true if a label_ref *BODY refers to label Y. */
2874 if (GET_CODE (*body) == LABEL_REF && GET_CODE (y) == CODE_LABEL)
2875 return XEXP (*body, 0) == y;
2877 /* If *BODY is a reference to pool constant traverse the constant. */
2878 if (GET_CODE (*body) == SYMBOL_REF
2879 && CONSTANT_POOL_ADDRESS_P (*body))
2880 return rtx_referenced_p (y, get_pool_constant (*body));
2882 /* By default, compare the RTL expressions. */
2883 return rtx_equal_p (*body, y);
2886 /* Return true if X is referenced in BODY. */
2889 rtx_referenced_p (x, body)
2890 rtx x;
2891 rtx body;
2893 return for_each_rtx (&body, rtx_referenced_p_1, x);
2896 /* If INSN is a jump to jumptable insn rturn true and store the label (which
2897 INSN jumps to) to *LABEL and the tablejump insn to *TABLE.
2898 LABEL and TABLE may be NULL. */
2900 bool
2901 tablejump_p (insn, label, table)
2902 rtx insn;
2903 rtx *label;
2904 rtx *table;
2906 rtx l, t;
2908 if (onlyjump_p (insn)
2909 && (l = JUMP_LABEL (insn)) != NULL_RTX
2910 && (t = NEXT_INSN (l)) != NULL_RTX
2911 && GET_CODE (t) == JUMP_INSN
2912 && (GET_CODE (PATTERN (t)) == ADDR_VEC
2913 || GET_CODE (PATTERN (t)) == ADDR_DIFF_VEC))
2915 if (label)
2916 *label = l;
2917 if (table)
2918 *table = t;
2919 return true;
2921 return false;
2924 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2925 constant that is not in the constant pool and not in the condition
2926 of an IF_THEN_ELSE. */
2928 static int
2929 computed_jump_p_1 (x)
2930 rtx x;
2932 enum rtx_code code = GET_CODE (x);
2933 int i, j;
2934 const char *fmt;
2936 switch (code)
2938 case LABEL_REF:
2939 case PC:
2940 return 0;
2942 case CONST:
2943 case CONST_INT:
2944 case CONST_DOUBLE:
2945 case CONST_VECTOR:
2946 case SYMBOL_REF:
2947 case REG:
2948 return 1;
2950 case MEM:
2951 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2952 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2954 case IF_THEN_ELSE:
2955 return (computed_jump_p_1 (XEXP (x, 1))
2956 || computed_jump_p_1 (XEXP (x, 2)));
2958 default:
2959 break;
2962 fmt = GET_RTX_FORMAT (code);
2963 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2965 if (fmt[i] == 'e'
2966 && computed_jump_p_1 (XEXP (x, i)))
2967 return 1;
2969 else if (fmt[i] == 'E')
2970 for (j = 0; j < XVECLEN (x, i); j++)
2971 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2972 return 1;
2975 return 0;
2978 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2980 Tablejumps and casesi insns are not considered indirect jumps;
2981 we can recognize them by a (use (label_ref)). */
2984 computed_jump_p (insn)
2985 rtx insn;
2987 int i;
2988 if (GET_CODE (insn) == JUMP_INSN)
2990 rtx pat = PATTERN (insn);
2992 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2993 return 0;
2994 else if (GET_CODE (pat) == PARALLEL)
2996 int len = XVECLEN (pat, 0);
2997 int has_use_labelref = 0;
2999 for (i = len - 1; i >= 0; i--)
3000 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
3001 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
3002 == LABEL_REF))
3003 has_use_labelref = 1;
3005 if (! has_use_labelref)
3006 for (i = len - 1; i >= 0; i--)
3007 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
3008 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
3009 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
3010 return 1;
3012 else if (GET_CODE (pat) == SET
3013 && SET_DEST (pat) == pc_rtx
3014 && computed_jump_p_1 (SET_SRC (pat)))
3015 return 1;
3017 return 0;
3020 /* Traverse X via depth-first search, calling F for each
3021 sub-expression (including X itself). F is also passed the DATA.
3022 If F returns -1, do not traverse sub-expressions, but continue
3023 traversing the rest of the tree. If F ever returns any other
3024 nonzero value, stop the traversal, and return the value returned
3025 by F. Otherwise, return 0. This function does not traverse inside
3026 tree structure that contains RTX_EXPRs, or into sub-expressions
3027 whose format code is `0' since it is not known whether or not those
3028 codes are actually RTL.
3030 This routine is very general, and could (should?) be used to
3031 implement many of the other routines in this file. */
3034 for_each_rtx (x, f, data)
3035 rtx *x;
3036 rtx_function f;
3037 void *data;
3039 int result;
3040 int length;
3041 const char *format;
3042 int i;
3044 /* Call F on X. */
3045 result = (*f) (x, data);
3046 if (result == -1)
3047 /* Do not traverse sub-expressions. */
3048 return 0;
3049 else if (result != 0)
3050 /* Stop the traversal. */
3051 return result;
3053 if (*x == NULL_RTX)
3054 /* There are no sub-expressions. */
3055 return 0;
3057 length = GET_RTX_LENGTH (GET_CODE (*x));
3058 format = GET_RTX_FORMAT (GET_CODE (*x));
3060 for (i = 0; i < length; ++i)
3062 switch (format[i])
3064 case 'e':
3065 result = for_each_rtx (&XEXP (*x, i), f, data);
3066 if (result != 0)
3067 return result;
3068 break;
3070 case 'V':
3071 case 'E':
3072 if (XVEC (*x, i) != 0)
3074 int j;
3075 for (j = 0; j < XVECLEN (*x, i); ++j)
3077 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
3078 if (result != 0)
3079 return result;
3082 break;
3084 default:
3085 /* Nothing to do. */
3086 break;
3091 return 0;
3094 /* Searches X for any reference to REGNO, returning the rtx of the
3095 reference found if any. Otherwise, returns NULL_RTX. */
3098 regno_use_in (regno, x)
3099 unsigned int regno;
3100 rtx x;
3102 const char *fmt;
3103 int i, j;
3104 rtx tem;
3106 if (GET_CODE (x) == REG && REGNO (x) == regno)
3107 return x;
3109 fmt = GET_RTX_FORMAT (GET_CODE (x));
3110 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3112 if (fmt[i] == 'e')
3114 if ((tem = regno_use_in (regno, XEXP (x, i))))
3115 return tem;
3117 else if (fmt[i] == 'E')
3118 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3119 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3120 return tem;
3123 return NULL_RTX;
3126 /* Return a value indicating whether OP, an operand of a commutative
3127 operation, is preferred as the first or second operand. The higher
3128 the value, the stronger the preference for being the first operand.
3129 We use negative values to indicate a preference for the first operand
3130 and positive values for the second operand. */
3133 commutative_operand_precedence (op)
3134 rtx op;
3136 /* Constants always come the second operand. Prefer "nice" constants. */
3137 if (GET_CODE (op) == CONST_INT)
3138 return -5;
3139 if (GET_CODE (op) == CONST_DOUBLE)
3140 return -4;
3141 if (CONSTANT_P (op))
3142 return -3;
3144 /* SUBREGs of objects should come second. */
3145 if (GET_CODE (op) == SUBREG
3146 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
3147 return -2;
3149 /* If only one operand is a `neg', `not',
3150 `mult', `plus', or `minus' expression, it will be the first
3151 operand. */
3152 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
3153 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
3154 || GET_CODE (op) == MINUS)
3155 return 2;
3157 /* Complex expressions should be the first, so decrease priority
3158 of objects. */
3159 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
3160 return -1;
3161 return 0;
3164 /* Return 1 iff it is necessary to swap operands of commutative operation
3165 in order to canonicalize expression. */
3168 swap_commutative_operands_p (x, y)
3169 rtx x, y;
3171 return (commutative_operand_precedence (x)
3172 < commutative_operand_precedence (y));
3175 /* Return 1 if X is an autoincrement side effect and the register is
3176 not the stack pointer. */
3178 auto_inc_p (x)
3179 rtx x;
3181 switch (GET_CODE (x))
3183 case PRE_INC:
3184 case POST_INC:
3185 case PRE_DEC:
3186 case POST_DEC:
3187 case PRE_MODIFY:
3188 case POST_MODIFY:
3189 /* There are no REG_INC notes for SP. */
3190 if (XEXP (x, 0) != stack_pointer_rtx)
3191 return 1;
3192 default:
3193 break;
3195 return 0;
3198 /* Return 1 if the sequence of instructions beginning with FROM and up
3199 to and including TO is safe to move. If NEW_TO is non-NULL, and
3200 the sequence is not already safe to move, but can be easily
3201 extended to a sequence which is safe, then NEW_TO will point to the
3202 end of the extended sequence.
3204 For now, this function only checks that the region contains whole
3205 exception regions, but it could be extended to check additional
3206 conditions as well. */
3209 insns_safe_to_move_p (from, to, new_to)
3210 rtx from;
3211 rtx to;
3212 rtx *new_to;
3214 int eh_region_count = 0;
3215 int past_to_p = 0;
3216 rtx r = from;
3218 /* By default, assume the end of the region will be what was
3219 suggested. */
3220 if (new_to)
3221 *new_to = to;
3223 while (r)
3225 if (GET_CODE (r) == NOTE)
3227 switch (NOTE_LINE_NUMBER (r))
3229 case NOTE_INSN_EH_REGION_BEG:
3230 ++eh_region_count;
3231 break;
3233 case NOTE_INSN_EH_REGION_END:
3234 if (eh_region_count == 0)
3235 /* This sequence of instructions contains the end of
3236 an exception region, but not he beginning. Moving
3237 it will cause chaos. */
3238 return 0;
3240 --eh_region_count;
3241 break;
3243 default:
3244 break;
3247 else if (past_to_p)
3248 /* If we've passed TO, and we see a non-note instruction, we
3249 can't extend the sequence to a movable sequence. */
3250 return 0;
3252 if (r == to)
3254 if (!new_to)
3255 /* It's OK to move the sequence if there were matched sets of
3256 exception region notes. */
3257 return eh_region_count == 0;
3259 past_to_p = 1;
3262 /* It's OK to move the sequence if there were matched sets of
3263 exception region notes. */
3264 if (past_to_p && eh_region_count == 0)
3266 *new_to = r;
3267 return 1;
3270 /* Go to the next instruction. */
3271 r = NEXT_INSN (r);
3274 return 0;
3277 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3279 loc_mentioned_in_p (loc, in)
3280 rtx *loc, in;
3282 enum rtx_code code = GET_CODE (in);
3283 const char *fmt = GET_RTX_FORMAT (code);
3284 int i, j;
3286 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3288 if (loc == &in->fld[i].rtx)
3289 return 1;
3290 if (fmt[i] == 'e')
3292 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3293 return 1;
3295 else if (fmt[i] == 'E')
3296 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3297 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3298 return 1;
3300 return 0;
3303 /* Given a subreg X, return the bit offset where the subreg begins
3304 (counting from the least significant bit of the reg). */
3306 unsigned int
3307 subreg_lsb (x)
3308 rtx x;
3310 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
3311 enum machine_mode mode = GET_MODE (x);
3312 unsigned int bitpos;
3313 unsigned int byte;
3314 unsigned int word;
3316 /* A paradoxical subreg begins at bit position 0. */
3317 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
3318 return 0;
3320 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3321 /* If the subreg crosses a word boundary ensure that
3322 it also begins and ends on a word boundary. */
3323 if ((SUBREG_BYTE (x) % UNITS_PER_WORD
3324 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
3325 && (SUBREG_BYTE (x) % UNITS_PER_WORD
3326 || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
3327 abort ();
3329 if (WORDS_BIG_ENDIAN)
3330 word = (GET_MODE_SIZE (inner_mode)
3331 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
3332 else
3333 word = SUBREG_BYTE (x) / UNITS_PER_WORD;
3334 bitpos = word * BITS_PER_WORD;
3336 if (BYTES_BIG_ENDIAN)
3337 byte = (GET_MODE_SIZE (inner_mode)
3338 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
3339 else
3340 byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
3341 bitpos += byte * BITS_PER_UNIT;
3343 return bitpos;
3346 /* This function returns the regno offset of a subreg expression.
3347 xregno - A regno of an inner hard subreg_reg (or what will become one).
3348 xmode - The mode of xregno.
3349 offset - The byte offset.
3350 ymode - The mode of a top level SUBREG (or what may become one).
3351 RETURN - The regno offset which would be used. */
3352 unsigned int
3353 subreg_regno_offset (xregno, xmode, offset, ymode)
3354 unsigned int xregno;
3355 enum machine_mode xmode;
3356 unsigned int offset;
3357 enum machine_mode ymode;
3359 int nregs_xmode, nregs_ymode;
3360 int mode_multiple, nregs_multiple;
3361 int y_offset;
3363 if (xregno >= FIRST_PSEUDO_REGISTER)
3364 abort ();
3366 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3367 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3369 /* If this is a big endian paradoxical subreg, which uses more actual
3370 hard registers than the original register, we must return a negative
3371 offset so that we find the proper highpart of the register. */
3372 if (offset == 0
3373 && nregs_ymode > nregs_xmode
3374 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3375 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3376 return nregs_xmode - nregs_ymode;
3378 if (offset == 0 || nregs_xmode == nregs_ymode)
3379 return 0;
3381 /* size of ymode must not be greater than the size of xmode. */
3382 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3383 if (mode_multiple == 0)
3384 abort ();
3386 y_offset = offset / GET_MODE_SIZE (ymode);
3387 nregs_multiple = nregs_xmode / nregs_ymode;
3388 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3391 /* This function returns true when the offset is representable via
3392 subreg_offset in the given regno.
3393 xregno - A regno of an inner hard subreg_reg (or what will become one).
3394 xmode - The mode of xregno.
3395 offset - The byte offset.
3396 ymode - The mode of a top level SUBREG (or what may become one).
3397 RETURN - The regno offset which would be used. */
3398 bool
3399 subreg_offset_representable_p (xregno, xmode, offset, ymode)
3400 unsigned int xregno;
3401 enum machine_mode xmode;
3402 unsigned int offset;
3403 enum machine_mode ymode;
3405 int nregs_xmode, nregs_ymode;
3406 int mode_multiple, nregs_multiple;
3407 int y_offset;
3409 if (xregno >= FIRST_PSEUDO_REGISTER)
3410 abort ();
3412 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3413 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3415 /* paradoxical subregs are always valid. */
3416 if (offset == 0
3417 && nregs_ymode > nregs_xmode
3418 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3419 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3420 return true;
3422 /* Lowpart subregs are always valid. */
3423 if (offset == subreg_lowpart_offset (ymode, xmode))
3424 return true;
3426 #ifdef ENABLE_CHECKING
3427 /* This should always pass, otherwise we don't know how to verify the
3428 constraint. These conditions may be relaxed but subreg_offset would
3429 need to be redesigned. */
3430 if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3431 || GET_MODE_SIZE (ymode) % nregs_ymode
3432 || nregs_xmode % nregs_ymode)
3433 abort ();
3434 #endif
3436 /* The XMODE value can be seen as a vector of NREGS_XMODE
3437 values. The subreg must represent an lowpart of given field.
3438 Compute what field it is. */
3439 offset -= subreg_lowpart_offset (ymode,
3440 mode_for_size (GET_MODE_BITSIZE (xmode)
3441 / nregs_xmode,
3442 MODE_INT, 0));
3444 /* size of ymode must not be greater than the size of xmode. */
3445 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3446 if (mode_multiple == 0)
3447 abort ();
3449 y_offset = offset / GET_MODE_SIZE (ymode);
3450 nregs_multiple = nregs_xmode / nregs_ymode;
3451 #ifdef ENABLE_CHECKING
3452 if (offset % GET_MODE_SIZE (ymode)
3453 || mode_multiple % nregs_multiple)
3454 abort ();
3455 #endif
3456 return (!(y_offset % (mode_multiple / nregs_multiple)));
3459 /* Return the final regno that a subreg expression refers to. */
3460 unsigned int
3461 subreg_regno (x)
3462 rtx x;
3464 unsigned int ret;
3465 rtx subreg = SUBREG_REG (x);
3466 int regno = REGNO (subreg);
3468 ret = regno + subreg_regno_offset (regno,
3469 GET_MODE (subreg),
3470 SUBREG_BYTE (x),
3471 GET_MODE (x));
3472 return ret;
3475 struct parms_set_data
3477 int nregs;
3478 HARD_REG_SET regs;
3481 /* Helper function for noticing stores to parameter registers. */
3482 static void
3483 parms_set (x, pat, data)
3484 rtx x, pat ATTRIBUTE_UNUSED;
3485 void *data;
3487 struct parms_set_data *d = data;
3488 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3489 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3491 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3492 d->nregs--;
3496 /* Look backward for first parameter to be loaded.
3497 Do not skip BOUNDARY. */
3499 find_first_parameter_load (call_insn, boundary)
3500 rtx call_insn, boundary;
3502 struct parms_set_data parm;
3503 rtx p, before;
3505 /* Since different machines initialize their parameter registers
3506 in different orders, assume nothing. Collect the set of all
3507 parameter registers. */
3508 CLEAR_HARD_REG_SET (parm.regs);
3509 parm.nregs = 0;
3510 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3511 if (GET_CODE (XEXP (p, 0)) == USE
3512 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3514 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3515 abort ();
3517 /* We only care about registers which can hold function
3518 arguments. */
3519 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3520 continue;
3522 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3523 parm.nregs++;
3525 before = call_insn;
3527 /* Search backward for the first set of a register in this set. */
3528 while (parm.nregs && before != boundary)
3530 before = PREV_INSN (before);
3532 /* It is possible that some loads got CSEed from one call to
3533 another. Stop in that case. */
3534 if (GET_CODE (before) == CALL_INSN)
3535 break;
3537 /* Our caller needs either ensure that we will find all sets
3538 (in case code has not been optimized yet), or take care
3539 for possible labels in a way by setting boundary to preceding
3540 CODE_LABEL. */
3541 if (GET_CODE (before) == CODE_LABEL)
3543 if (before != boundary)
3544 abort ();
3545 break;
3548 if (INSN_P (before))
3549 note_stores (PATTERN (before), parms_set, &parm);
3551 return before;
3554 /* Return true if we should avoid inserting code between INSN and preceding
3555 call instruction. */
3557 bool
3558 keep_with_call_p (insn)
3559 rtx insn;
3561 rtx set;
3563 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3565 if (GET_CODE (SET_DEST (set)) == REG
3566 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3567 && fixed_regs[REGNO (SET_DEST (set))]
3568 && general_operand (SET_SRC (set), VOIDmode))
3569 return true;
3570 if (GET_CODE (SET_SRC (set)) == REG
3571 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3572 && GET_CODE (SET_DEST (set)) == REG
3573 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3574 return true;
3575 /* There may be a stack pop just after the call and before the store
3576 of the return register. Search for the actual store when deciding
3577 if we can break or not. */
3578 if (SET_DEST (set) == stack_pointer_rtx)
3580 rtx i2 = next_nonnote_insn (insn);
3581 if (i2 && keep_with_call_p (i2))
3582 return true;
3585 return false;
3588 /* Return true when store to register X can be hoisted to the place
3589 with LIVE registers (can be NULL). Value VAL contains destination
3590 whose value will be used. */
3592 static bool
3593 hoist_test_store (x, val, live)
3594 rtx x, val;
3595 regset live;
3597 if (GET_CODE (x) == SCRATCH)
3598 return true;
3600 if (rtx_equal_p (x, val))
3601 return true;
3603 /* Allow subreg of X in case it is not writing just part of multireg pseudo.
3604 Then we would need to update all users to care hoisting the store too.
3605 Caller may represent that by specifying whole subreg as val. */
3607 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3609 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3610 && GET_MODE_BITSIZE (GET_MODE (x)) <
3611 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3612 return false;
3613 return true;
3615 if (GET_CODE (x) == SUBREG)
3616 x = SUBREG_REG (x);
3618 /* Anything except register store is not hoistable. This includes the
3619 partial stores to registers. */
3621 if (!REG_P (x))
3622 return false;
3624 /* Pseudo registers can be always replaced by another pseudo to avoid
3625 the side effect, for hard register we must ensure that they are dead.
3626 Eventually we may want to add code to try turn pseudos to hards, but it
3627 is unlikely useful. */
3629 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3631 int regno = REGNO (x);
3632 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3634 if (!live)
3635 return false;
3636 if (REGNO_REG_SET_P (live, regno))
3637 return false;
3638 while (--n > 0)
3639 if (REGNO_REG_SET_P (live, regno + n))
3640 return false;
3642 return true;
3646 /* Return true if INSN can be hoisted to place with LIVE hard registers
3647 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3648 and used by the hoisting pass. */
3650 bool
3651 can_hoist_insn_p (insn, val, live)
3652 rtx insn, val;
3653 regset live;
3655 rtx pat = PATTERN (insn);
3656 int i;
3658 /* It probably does not worth the complexity to handle multiple
3659 set stores. */
3660 if (!single_set (insn))
3661 return false;
3662 /* We can move CALL_INSN, but we need to check that all caller clobbered
3663 regs are dead. */
3664 if (GET_CODE (insn) == CALL_INSN)
3665 return false;
3666 /* In future we will handle hoisting of libcall sequences, but
3667 give up for now. */
3668 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3669 return false;
3670 switch (GET_CODE (pat))
3672 case SET:
3673 if (!hoist_test_store (SET_DEST (pat), val, live))
3674 return false;
3675 break;
3676 case USE:
3677 /* USES do have sick semantics, so do not move them. */
3678 return false;
3679 break;
3680 case CLOBBER:
3681 if (!hoist_test_store (XEXP (pat, 0), val, live))
3682 return false;
3683 break;
3684 case PARALLEL:
3685 for (i = 0; i < XVECLEN (pat, 0); i++)
3687 rtx x = XVECEXP (pat, 0, i);
3688 switch (GET_CODE (x))
3690 case SET:
3691 if (!hoist_test_store (SET_DEST (x), val, live))
3692 return false;
3693 break;
3694 case USE:
3695 /* We need to fix callers to really ensure availability
3696 of all values inisn uses, but for now it is safe to prohibit
3697 hoisting of any insn having such a hidden uses. */
3698 return false;
3699 break;
3700 case CLOBBER:
3701 if (!hoist_test_store (SET_DEST (x), val, live))
3702 return false;
3703 break;
3704 default:
3705 break;
3708 break;
3709 default:
3710 abort ();
3712 return true;
3715 /* Update store after hoisting - replace all stores to pseudo registers
3716 by new ones to avoid clobbering of values except for store to VAL that will
3717 be updated to NEW. */
3719 static void
3720 hoist_update_store (insn, xp, val, new)
3721 rtx insn, *xp, val, new;
3723 rtx x = *xp;
3725 if (GET_CODE (x) == SCRATCH)
3726 return;
3728 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3729 validate_change (insn, xp,
3730 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3731 SUBREG_BYTE (x)), 1);
3732 if (rtx_equal_p (x, val))
3734 validate_change (insn, xp, new, 1);
3735 return;
3737 if (GET_CODE (x) == SUBREG)
3739 xp = &SUBREG_REG (x);
3740 x = *xp;
3743 if (!REG_P (x))
3744 abort ();
3746 /* We've verified that hard registers are dead, so we may keep the side
3747 effect. Otherwise replace it by new pseudo. */
3748 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3749 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3750 REG_NOTES (insn)
3751 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3754 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3755 and each other side effect to pseudo register by new pseudo register. */
3758 hoist_insn_after (insn, after, val, new)
3759 rtx insn, after, val, new;
3761 rtx pat;
3762 int i;
3763 rtx note;
3765 insn = emit_copy_of_insn_after (insn, after);
3766 pat = PATTERN (insn);
3768 /* Remove REG_UNUSED notes as we will re-emit them. */
3769 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3770 remove_note (insn, note);
3772 /* To get this working callers must ensure to move everything referenced
3773 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3774 easier. */
3775 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3776 remove_note (insn, note);
3777 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3778 remove_note (insn, note);
3780 /* Remove REG_DEAD notes as they might not be valid anymore in case
3781 we create redundancy. */
3782 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3783 remove_note (insn, note);
3784 switch (GET_CODE (pat))
3786 case SET:
3787 hoist_update_store (insn, &SET_DEST (pat), val, new);
3788 break;
3789 case USE:
3790 break;
3791 case CLOBBER:
3792 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3793 break;
3794 case PARALLEL:
3795 for (i = 0; i < XVECLEN (pat, 0); i++)
3797 rtx x = XVECEXP (pat, 0, i);
3798 switch (GET_CODE (x))
3800 case SET:
3801 hoist_update_store (insn, &SET_DEST (x), val, new);
3802 break;
3803 case USE:
3804 break;
3805 case CLOBBER:
3806 hoist_update_store (insn, &SET_DEST (x), val, new);
3807 break;
3808 default:
3809 break;
3812 break;
3813 default:
3814 abort ();
3816 if (!apply_change_group ())
3817 abort ();
3819 return insn;
3823 hoist_insn_to_edge (insn, e, val, new)
3824 rtx insn, val, new;
3825 edge e;
3827 rtx new_insn;
3829 /* We cannot insert instructions on an abnormal critical edge.
3830 It will be easier to find the culprit if we die now. */
3831 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3832 abort ();
3834 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3835 stuff. We also emit CALL_INSNS and firends. */
3836 if (e->insns == NULL_RTX)
3838 start_sequence ();
3839 emit_note (NOTE_INSN_DELETED);
3841 else
3842 push_to_sequence (e->insns);
3844 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3846 e->insns = get_insns ();
3847 end_sequence ();
3848 return new_insn;