PR target/11183
[official-gcc.git] / gcc / rtlanal.c
blob2ce25956331fe1580fa4a060ff705aca6d9ad54e
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "tm_p.h"
33 #include "flags.h"
34 #include "basic-block.h"
35 #include "real.h"
37 /* Forward declarations */
38 static int global_reg_mentioned_p_1 PARAMS ((rtx *, void *));
39 static void set_of_1 PARAMS ((rtx, rtx, void *));
40 static void insn_dependent_p_1 PARAMS ((rtx, rtx, void *));
41 static int rtx_referenced_p_1 PARAMS ((rtx *, void *));
42 static int computed_jump_p_1 PARAMS ((rtx));
43 static void parms_set PARAMS ((rtx, rtx, void *));
44 static bool hoist_test_store PARAMS ((rtx, rtx, regset));
45 static void hoist_update_store PARAMS ((rtx, rtx *, rtx, rtx));
47 /* Bit flags that specify the machine subtype we are compiling for.
48 Bits are tested using macros TARGET_... defined in the tm.h file
49 and set by `-m...' switches. Must be defined in rtlanal.c. */
51 int target_flags;
53 /* Return 1 if the value of X is unstable
54 (would be different at a different point in the program).
55 The frame pointer, arg pointer, etc. are considered stable
56 (within one function) and so is anything marked `unchanging'. */
58 int
59 rtx_unstable_p (x)
60 rtx x;
62 RTX_CODE code = GET_CODE (x);
63 int i;
64 const char *fmt;
66 switch (code)
68 case MEM:
69 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
71 case QUEUED:
72 return 1;
74 case ADDRESSOF:
75 case CONST:
76 case CONST_INT:
77 case CONST_DOUBLE:
78 case CONST_VECTOR:
79 case SYMBOL_REF:
80 case LABEL_REF:
81 return 0;
83 case REG:
84 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
85 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
86 /* The arg pointer varies if it is not a fixed register. */
87 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
88 || RTX_UNCHANGING_P (x))
89 return 0;
90 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
91 /* ??? When call-clobbered, the value is stable modulo the restore
92 that must happen after a call. This currently screws up local-alloc
93 into believing that the restore is not needed. */
94 if (x == pic_offset_table_rtx)
95 return 0;
96 #endif
97 return 1;
99 case ASM_OPERANDS:
100 if (MEM_VOLATILE_P (x))
101 return 1;
103 /* FALLTHROUGH */
105 default:
106 break;
109 fmt = GET_RTX_FORMAT (code);
110 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
111 if (fmt[i] == 'e')
113 if (rtx_unstable_p (XEXP (x, i)))
114 return 1;
116 else if (fmt[i] == 'E')
118 int j;
119 for (j = 0; j < XVECLEN (x, i); j++)
120 if (rtx_unstable_p (XVECEXP (x, i, j)))
121 return 1;
124 return 0;
127 /* Return 1 if X has a value that can vary even between two
128 executions of the program. 0 means X can be compared reliably
129 against certain constants or near-constants.
130 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
131 zero, we are slightly more conservative.
132 The frame pointer and the arg pointer are considered constant. */
135 rtx_varies_p (x, for_alias)
136 rtx x;
137 int for_alias;
139 RTX_CODE code = GET_CODE (x);
140 int i;
141 const char *fmt;
143 switch (code)
145 case MEM:
146 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
148 case QUEUED:
149 return 1;
151 case CONST:
152 case CONST_INT:
153 case CONST_DOUBLE:
154 case CONST_VECTOR:
155 case SYMBOL_REF:
156 case LABEL_REF:
157 return 0;
159 case ADDRESSOF:
160 /* This will resolve to some offset from the frame pointer. */
161 return 0;
163 case REG:
164 /* Note that we have to test for the actual rtx used for the frame
165 and arg pointers and not just the register number in case we have
166 eliminated the frame and/or arg pointer and are using it
167 for pseudos. */
168 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
169 /* The arg pointer varies if it is not a fixed register. */
170 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
171 return 0;
172 if (x == pic_offset_table_rtx
173 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
174 /* ??? When call-clobbered, the value is stable modulo the restore
175 that must happen after a call. This currently screws up
176 local-alloc into believing that the restore is not needed, so we
177 must return 0 only if we are called from alias analysis. */
178 && for_alias
179 #endif
181 return 0;
182 return 1;
184 case LO_SUM:
185 /* The operand 0 of a LO_SUM is considered constant
186 (in fact it is related specifically to operand 1)
187 during alias analysis. */
188 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
189 || rtx_varies_p (XEXP (x, 1), for_alias);
191 case ASM_OPERANDS:
192 if (MEM_VOLATILE_P (x))
193 return 1;
195 /* FALLTHROUGH */
197 default:
198 break;
201 fmt = GET_RTX_FORMAT (code);
202 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
203 if (fmt[i] == 'e')
205 if (rtx_varies_p (XEXP (x, i), for_alias))
206 return 1;
208 else if (fmt[i] == 'E')
210 int j;
211 for (j = 0; j < XVECLEN (x, i); j++)
212 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
213 return 1;
216 return 0;
219 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
222 rtx_addr_can_trap_p (x)
223 rtx x;
225 enum rtx_code code = GET_CODE (x);
227 switch (code)
229 case SYMBOL_REF:
230 return SYMBOL_REF_WEAK (x);
232 case LABEL_REF:
233 return 0;
235 case ADDRESSOF:
236 /* This will resolve to some offset from the frame pointer. */
237 return 0;
239 case REG:
240 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
241 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
242 || x == stack_pointer_rtx
243 /* The arg pointer varies if it is not a fixed register. */
244 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
245 return 0;
246 /* All of the virtual frame registers are stack references. */
247 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
248 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
249 return 0;
250 return 1;
252 case CONST:
253 return rtx_addr_can_trap_p (XEXP (x, 0));
255 case PLUS:
256 /* An address is assumed not to trap if it is an address that can't
257 trap plus a constant integer or it is the pic register plus a
258 constant. */
259 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
260 && GET_CODE (XEXP (x, 1)) == CONST_INT)
261 || (XEXP (x, 0) == pic_offset_table_rtx
262 && CONSTANT_P (XEXP (x, 1))));
264 case LO_SUM:
265 case PRE_MODIFY:
266 return rtx_addr_can_trap_p (XEXP (x, 1));
268 case PRE_DEC:
269 case PRE_INC:
270 case POST_DEC:
271 case POST_INC:
272 case POST_MODIFY:
273 return rtx_addr_can_trap_p (XEXP (x, 0));
275 default:
276 break;
279 /* If it isn't one of the case above, it can cause a trap. */
280 return 1;
283 /* Return true if X is an address that is known to not be zero. */
285 bool
286 nonzero_address_p (x)
287 rtx x;
289 enum rtx_code code = GET_CODE (x);
291 switch (code)
293 case SYMBOL_REF:
294 return !SYMBOL_REF_WEAK (x);
296 case LABEL_REF:
297 return true;
299 case ADDRESSOF:
300 /* This will resolve to some offset from the frame pointer. */
301 return true;
303 case REG:
304 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
305 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
306 || x == stack_pointer_rtx
307 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
308 return true;
309 /* All of the virtual frame registers are stack references. */
310 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
311 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
312 return true;
313 return false;
315 case CONST:
316 return nonzero_address_p (XEXP (x, 0));
318 case PLUS:
319 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
321 /* Pointers aren't allowed to wrap. If we've got a register
322 that is known to be a pointer, and a positive offset, then
323 the composite can't be zero. */
324 if (INTVAL (XEXP (x, 1)) > 0
325 && REG_P (XEXP (x, 0))
326 && REG_POINTER (XEXP (x, 0)))
327 return true;
329 return nonzero_address_p (XEXP (x, 0));
331 /* Handle PIC references. */
332 else if (XEXP (x, 0) == pic_offset_table_rtx
333 && CONSTANT_P (XEXP (x, 1)))
334 return true;
335 return false;
337 case PRE_MODIFY:
338 /* Similar to the above; allow positive offsets. Further, since
339 auto-inc is only allowed in memories, the register must be a
340 pointer. */
341 if (GET_CODE (XEXP (x, 1)) == CONST_INT
342 && INTVAL (XEXP (x, 1)) > 0)
343 return true;
344 return nonzero_address_p (XEXP (x, 0));
346 case PRE_INC:
347 /* Similarly. Further, the offset is always positive. */
348 return true;
350 case PRE_DEC:
351 case POST_DEC:
352 case POST_INC:
353 case POST_MODIFY:
354 return nonzero_address_p (XEXP (x, 0));
356 case LO_SUM:
357 return nonzero_address_p (XEXP (x, 1));
359 default:
360 break;
363 /* If it isn't one of the case above, might be zero. */
364 return false;
367 /* Return 1 if X refers to a memory location whose address
368 cannot be compared reliably with constant addresses,
369 or if X refers to a BLKmode memory object.
370 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
371 zero, we are slightly more conservative. */
374 rtx_addr_varies_p (x, for_alias)
375 rtx x;
376 int for_alias;
378 enum rtx_code code;
379 int i;
380 const char *fmt;
382 if (x == 0)
383 return 0;
385 code = GET_CODE (x);
386 if (code == MEM)
387 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
389 fmt = GET_RTX_FORMAT (code);
390 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
391 if (fmt[i] == 'e')
393 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
394 return 1;
396 else if (fmt[i] == 'E')
398 int j;
399 for (j = 0; j < XVECLEN (x, i); j++)
400 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
401 return 1;
403 return 0;
406 /* Return the value of the integer term in X, if one is apparent;
407 otherwise return 0.
408 Only obvious integer terms are detected.
409 This is used in cse.c with the `related_value' field. */
411 HOST_WIDE_INT
412 get_integer_term (x)
413 rtx x;
415 if (GET_CODE (x) == CONST)
416 x = XEXP (x, 0);
418 if (GET_CODE (x) == MINUS
419 && GET_CODE (XEXP (x, 1)) == CONST_INT)
420 return - INTVAL (XEXP (x, 1));
421 if (GET_CODE (x) == PLUS
422 && GET_CODE (XEXP (x, 1)) == CONST_INT)
423 return INTVAL (XEXP (x, 1));
424 return 0;
427 /* If X is a constant, return the value sans apparent integer term;
428 otherwise return 0.
429 Only obvious integer terms are detected. */
432 get_related_value (x)
433 rtx x;
435 if (GET_CODE (x) != CONST)
436 return 0;
437 x = XEXP (x, 0);
438 if (GET_CODE (x) == PLUS
439 && GET_CODE (XEXP (x, 1)) == CONST_INT)
440 return XEXP (x, 0);
441 else if (GET_CODE (x) == MINUS
442 && GET_CODE (XEXP (x, 1)) == CONST_INT)
443 return XEXP (x, 0);
444 return 0;
447 /* Given a tablejump insn INSN, return the RTL expression for the offset
448 into the jump table. If the offset cannot be determined, then return
449 NULL_RTX.
451 If EARLIEST is nonzero, it is a pointer to a place where the earliest
452 insn used in locating the offset was found. */
455 get_jump_table_offset (insn, earliest)
456 rtx insn;
457 rtx *earliest;
459 rtx label;
460 rtx table;
461 rtx set;
462 rtx old_insn;
463 rtx x;
464 rtx old_x;
465 rtx y;
466 rtx old_y;
467 int i;
469 if (!tablejump_p (insn, &label, &table) || !(set = single_set (insn)))
470 return NULL_RTX;
472 x = SET_SRC (set);
474 /* Some targets (eg, ARM) emit a tablejump that also
475 contains the out-of-range target. */
476 if (GET_CODE (x) == IF_THEN_ELSE
477 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
478 x = XEXP (x, 1);
480 /* Search backwards and locate the expression stored in X. */
481 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
482 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
485 /* If X is an expression using a relative address then strip
486 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
487 or the jump table label. */
488 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
489 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
491 for (i = 0; i < 2; i++)
493 old_insn = insn;
494 y = XEXP (x, i);
496 if (y == pc_rtx || y == pic_offset_table_rtx)
497 break;
499 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
500 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
503 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
504 break;
507 if (i >= 2)
508 return NULL_RTX;
510 x = XEXP (x, 1 - i);
512 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
513 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
517 /* Strip off any sign or zero extension. */
518 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
520 x = XEXP (x, 0);
522 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
523 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
527 /* If X isn't a MEM then this isn't a tablejump we understand. */
528 if (GET_CODE (x) != MEM)
529 return NULL_RTX;
531 /* Strip off the MEM. */
532 x = XEXP (x, 0);
534 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
535 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
538 /* If X isn't a PLUS than this isn't a tablejump we understand. */
539 if (GET_CODE (x) != PLUS)
540 return NULL_RTX;
542 /* At this point we should have an expression representing the jump table
543 plus an offset. Examine each operand in order to determine which one
544 represents the jump table. Knowing that tells us that the other operand
545 must represent the offset. */
546 for (i = 0; i < 2; i++)
548 old_insn = insn;
549 y = XEXP (x, i);
551 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
552 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
555 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
556 && reg_mentioned_p (label, y))
557 break;
560 if (i >= 2)
561 return NULL_RTX;
563 x = XEXP (x, 1 - i);
565 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
566 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
567 for (i = 0; i < 2; i++)
568 if (XEXP (x, i) == pic_offset_table_rtx)
570 x = XEXP (x, 1 - i);
571 break;
574 if (earliest)
575 *earliest = insn;
577 /* Return the RTL expression representing the offset. */
578 return x;
581 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
582 a global register. */
584 static int
585 global_reg_mentioned_p_1 (loc, data)
586 rtx *loc;
587 void *data ATTRIBUTE_UNUSED;
589 int regno;
590 rtx x = *loc;
592 if (! x)
593 return 0;
595 switch (GET_CODE (x))
597 case SUBREG:
598 if (GET_CODE (SUBREG_REG (x)) == REG)
600 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
601 && global_regs[subreg_regno (x)])
602 return 1;
603 return 0;
605 break;
607 case REG:
608 regno = REGNO (x);
609 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
610 return 1;
611 return 0;
613 case SCRATCH:
614 case PC:
615 case CC0:
616 case CONST_INT:
617 case CONST_DOUBLE:
618 case CONST:
619 case LABEL_REF:
620 return 0;
622 case CALL:
623 /* A non-constant call might use a global register. */
624 return 1;
626 default:
627 break;
630 return 0;
633 /* Returns nonzero if X mentions a global register. */
636 global_reg_mentioned_p (x)
637 rtx x;
639 if (INSN_P (x))
641 if (GET_CODE (x) == CALL_INSN)
643 if (! CONST_OR_PURE_CALL_P (x))
644 return 1;
645 x = CALL_INSN_FUNCTION_USAGE (x);
646 if (x == 0)
647 return 0;
649 else
650 x = PATTERN (x);
653 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
656 /* Return the number of places FIND appears within X. If COUNT_DEST is
657 zero, we do not count occurrences inside the destination of a SET. */
660 count_occurrences (x, find, count_dest)
661 rtx x, find;
662 int count_dest;
664 int i, j;
665 enum rtx_code code;
666 const char *format_ptr;
667 int count;
669 if (x == find)
670 return 1;
672 code = GET_CODE (x);
674 switch (code)
676 case REG:
677 case CONST_INT:
678 case CONST_DOUBLE:
679 case CONST_VECTOR:
680 case SYMBOL_REF:
681 case CODE_LABEL:
682 case PC:
683 case CC0:
684 return 0;
686 case MEM:
687 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
688 return 1;
689 break;
691 case SET:
692 if (SET_DEST (x) == find && ! count_dest)
693 return count_occurrences (SET_SRC (x), find, count_dest);
694 break;
696 default:
697 break;
700 format_ptr = GET_RTX_FORMAT (code);
701 count = 0;
703 for (i = 0; i < GET_RTX_LENGTH (code); i++)
705 switch (*format_ptr++)
707 case 'e':
708 count += count_occurrences (XEXP (x, i), find, count_dest);
709 break;
711 case 'E':
712 for (j = 0; j < XVECLEN (x, i); j++)
713 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
714 break;
717 return count;
720 /* Nonzero if register REG appears somewhere within IN.
721 Also works if REG is not a register; in this case it checks
722 for a subexpression of IN that is Lisp "equal" to REG. */
725 reg_mentioned_p (reg, in)
726 rtx reg, in;
728 const char *fmt;
729 int i;
730 enum rtx_code code;
732 if (in == 0)
733 return 0;
735 if (reg == in)
736 return 1;
738 if (GET_CODE (in) == LABEL_REF)
739 return reg == XEXP (in, 0);
741 code = GET_CODE (in);
743 switch (code)
745 /* Compare registers by number. */
746 case REG:
747 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
749 /* These codes have no constituent expressions
750 and are unique. */
751 case SCRATCH:
752 case CC0:
753 case PC:
754 return 0;
756 case CONST_INT:
757 return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg);
759 case CONST_VECTOR:
760 case CONST_DOUBLE:
761 /* These are kept unique for a given value. */
762 return 0;
764 default:
765 break;
768 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
769 return 1;
771 fmt = GET_RTX_FORMAT (code);
773 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
775 if (fmt[i] == 'E')
777 int j;
778 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
779 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
780 return 1;
782 else if (fmt[i] == 'e'
783 && reg_mentioned_p (reg, XEXP (in, i)))
784 return 1;
786 return 0;
789 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
790 no CODE_LABEL insn. */
793 no_labels_between_p (beg, end)
794 rtx beg, end;
796 rtx p;
797 if (beg == end)
798 return 0;
799 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
800 if (GET_CODE (p) == CODE_LABEL)
801 return 0;
802 return 1;
805 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
806 no JUMP_INSN insn. */
809 no_jumps_between_p (beg, end)
810 rtx beg, end;
812 rtx p;
813 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
814 if (GET_CODE (p) == JUMP_INSN)
815 return 0;
816 return 1;
819 /* Nonzero if register REG is used in an insn between
820 FROM_INSN and TO_INSN (exclusive of those two). */
823 reg_used_between_p (reg, from_insn, to_insn)
824 rtx reg, from_insn, to_insn;
826 rtx insn;
828 if (from_insn == to_insn)
829 return 0;
831 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
832 if (INSN_P (insn)
833 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
834 || (GET_CODE (insn) == CALL_INSN
835 && (find_reg_fusage (insn, USE, reg)
836 || find_reg_fusage (insn, CLOBBER, reg)))))
837 return 1;
838 return 0;
841 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
842 is entirely replaced by a new value and the only use is as a SET_DEST,
843 we do not consider it a reference. */
846 reg_referenced_p (x, body)
847 rtx x;
848 rtx body;
850 int i;
852 switch (GET_CODE (body))
854 case SET:
855 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
856 return 1;
858 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
859 of a REG that occupies all of the REG, the insn references X if
860 it is mentioned in the destination. */
861 if (GET_CODE (SET_DEST (body)) != CC0
862 && GET_CODE (SET_DEST (body)) != PC
863 && GET_CODE (SET_DEST (body)) != REG
864 && ! (GET_CODE (SET_DEST (body)) == SUBREG
865 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
866 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
867 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
868 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
869 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
870 && reg_overlap_mentioned_p (x, SET_DEST (body)))
871 return 1;
872 return 0;
874 case ASM_OPERANDS:
875 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
876 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
877 return 1;
878 return 0;
880 case CALL:
881 case USE:
882 case IF_THEN_ELSE:
883 return reg_overlap_mentioned_p (x, body);
885 case TRAP_IF:
886 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
888 case PREFETCH:
889 return reg_overlap_mentioned_p (x, XEXP (body, 0));
891 case UNSPEC:
892 case UNSPEC_VOLATILE:
893 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
894 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
895 return 1;
896 return 0;
898 case PARALLEL:
899 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
900 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
901 return 1;
902 return 0;
904 case CLOBBER:
905 if (GET_CODE (XEXP (body, 0)) == MEM)
906 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
907 return 1;
908 return 0;
910 case COND_EXEC:
911 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
912 return 1;
913 return reg_referenced_p (x, COND_EXEC_CODE (body));
915 default:
916 return 0;
920 /* Nonzero if register REG is referenced in an insn between
921 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
922 not count. */
925 reg_referenced_between_p (reg, from_insn, to_insn)
926 rtx reg, from_insn, to_insn;
928 rtx insn;
930 if (from_insn == to_insn)
931 return 0;
933 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
934 if (INSN_P (insn)
935 && (reg_referenced_p (reg, PATTERN (insn))
936 || (GET_CODE (insn) == CALL_INSN
937 && find_reg_fusage (insn, USE, reg))))
938 return 1;
939 return 0;
942 /* Nonzero if register REG is set or clobbered in an insn between
943 FROM_INSN and TO_INSN (exclusive of those two). */
946 reg_set_between_p (reg, from_insn, to_insn)
947 rtx reg, from_insn, to_insn;
949 rtx insn;
951 if (from_insn == to_insn)
952 return 0;
954 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
955 if (INSN_P (insn) && reg_set_p (reg, insn))
956 return 1;
957 return 0;
960 /* Internals of reg_set_between_p. */
962 reg_set_p (reg, insn)
963 rtx reg, insn;
965 /* We can be passed an insn or part of one. If we are passed an insn,
966 check if a side-effect of the insn clobbers REG. */
967 if (INSN_P (insn)
968 && (FIND_REG_INC_NOTE (insn, reg)
969 || (GET_CODE (insn) == CALL_INSN
970 /* We'd like to test call_used_regs here, but rtlanal.c can't
971 reference that variable due to its use in genattrtab. So
972 we'll just be more conservative.
974 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
975 information holds all clobbered registers. */
976 && ((GET_CODE (reg) == REG
977 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
978 || GET_CODE (reg) == MEM
979 || find_reg_fusage (insn, CLOBBER, reg)))))
980 return 1;
982 return set_of (reg, insn) != NULL_RTX;
985 /* Similar to reg_set_between_p, but check all registers in X. Return 0
986 only if none of them are modified between START and END. Do not
987 consider non-registers one way or the other. */
990 regs_set_between_p (x, start, end)
991 rtx x;
992 rtx start, end;
994 enum rtx_code code = GET_CODE (x);
995 const char *fmt;
996 int i, j;
998 switch (code)
1000 case CONST_INT:
1001 case CONST_DOUBLE:
1002 case CONST_VECTOR:
1003 case CONST:
1004 case SYMBOL_REF:
1005 case LABEL_REF:
1006 case PC:
1007 case CC0:
1008 return 0;
1010 case REG:
1011 return reg_set_between_p (x, start, end);
1013 default:
1014 break;
1017 fmt = GET_RTX_FORMAT (code);
1018 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1020 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
1021 return 1;
1023 else if (fmt[i] == 'E')
1024 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1025 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
1026 return 1;
1029 return 0;
1032 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1033 only if none of them are modified between START and END. Return 1 if
1034 X contains a MEM; this routine does usememory aliasing. */
1037 modified_between_p (x, start, end)
1038 rtx x;
1039 rtx start, end;
1041 enum rtx_code code = GET_CODE (x);
1042 const char *fmt;
1043 int i, j;
1044 rtx insn;
1046 if (start == end)
1047 return 0;
1049 switch (code)
1051 case CONST_INT:
1052 case CONST_DOUBLE:
1053 case CONST_VECTOR:
1054 case CONST:
1055 case SYMBOL_REF:
1056 case LABEL_REF:
1057 return 0;
1059 case PC:
1060 case CC0:
1061 return 1;
1063 case MEM:
1064 if (RTX_UNCHANGING_P (x))
1065 return 0;
1066 if (modified_between_p (XEXP (x, 0), start, end))
1067 return 1;
1068 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1069 if (memory_modified_in_insn_p (x, insn))
1070 return 1;
1071 return 0;
1072 break;
1074 case REG:
1075 return reg_set_between_p (x, start, end);
1077 default:
1078 break;
1081 fmt = GET_RTX_FORMAT (code);
1082 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1084 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1085 return 1;
1087 else if (fmt[i] == 'E')
1088 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1089 if (modified_between_p (XVECEXP (x, i, j), start, end))
1090 return 1;
1093 return 0;
1096 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1097 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1098 does use memory aliasing. */
1101 modified_in_p (x, insn)
1102 rtx x;
1103 rtx insn;
1105 enum rtx_code code = GET_CODE (x);
1106 const char *fmt;
1107 int i, j;
1109 switch (code)
1111 case CONST_INT:
1112 case CONST_DOUBLE:
1113 case CONST_VECTOR:
1114 case CONST:
1115 case SYMBOL_REF:
1116 case LABEL_REF:
1117 return 0;
1119 case PC:
1120 case CC0:
1121 return 1;
1123 case MEM:
1124 if (RTX_UNCHANGING_P (x))
1125 return 0;
1126 if (modified_in_p (XEXP (x, 0), insn))
1127 return 1;
1128 if (memory_modified_in_insn_p (x, insn))
1129 return 1;
1130 return 0;
1131 break;
1133 case REG:
1134 return reg_set_p (x, insn);
1136 default:
1137 break;
1140 fmt = GET_RTX_FORMAT (code);
1141 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1143 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1144 return 1;
1146 else if (fmt[i] == 'E')
1147 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1148 if (modified_in_p (XVECEXP (x, i, j), insn))
1149 return 1;
1152 return 0;
1155 /* Return true if anything in insn X is (anti,output,true) dependent on
1156 anything in insn Y. */
1159 insn_dependent_p (x, y)
1160 rtx x, y;
1162 rtx tmp;
1164 if (! INSN_P (x) || ! INSN_P (y))
1165 abort ();
1167 tmp = PATTERN (y);
1168 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1169 if (tmp == NULL_RTX)
1170 return 1;
1172 tmp = PATTERN (x);
1173 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1174 if (tmp == NULL_RTX)
1175 return 1;
1177 return 0;
1180 /* A helper routine for insn_dependent_p called through note_stores. */
1182 static void
1183 insn_dependent_p_1 (x, pat, data)
1184 rtx x;
1185 rtx pat ATTRIBUTE_UNUSED;
1186 void *data;
1188 rtx * pinsn = (rtx *) data;
1190 if (*pinsn && reg_mentioned_p (x, *pinsn))
1191 *pinsn = NULL_RTX;
1194 /* Helper function for set_of. */
1195 struct set_of_data
1197 rtx found;
1198 rtx pat;
1201 static void
1202 set_of_1 (x, pat, data1)
1203 rtx x;
1204 rtx pat;
1205 void *data1;
1207 struct set_of_data *data = (struct set_of_data *) (data1);
1208 if (rtx_equal_p (x, data->pat)
1209 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1210 data->found = pat;
1213 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1214 (either directly or via STRICT_LOW_PART and similar modifiers). */
1216 set_of (pat, insn)
1217 rtx pat, insn;
1219 struct set_of_data data;
1220 data.found = NULL_RTX;
1221 data.pat = pat;
1222 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1223 return data.found;
1226 /* Given an INSN, return a SET expression if this insn has only a single SET.
1227 It may also have CLOBBERs, USEs, or SET whose output
1228 will not be used, which we ignore. */
1231 single_set_2 (insn, pat)
1232 rtx insn, pat;
1234 rtx set = NULL;
1235 int set_verified = 1;
1236 int i;
1238 if (GET_CODE (pat) == PARALLEL)
1240 for (i = 0; i < XVECLEN (pat, 0); i++)
1242 rtx sub = XVECEXP (pat, 0, i);
1243 switch (GET_CODE (sub))
1245 case USE:
1246 case CLOBBER:
1247 break;
1249 case SET:
1250 /* We can consider insns having multiple sets, where all
1251 but one are dead as single set insns. In common case
1252 only single set is present in the pattern so we want
1253 to avoid checking for REG_UNUSED notes unless necessary.
1255 When we reach set first time, we just expect this is
1256 the single set we are looking for and only when more
1257 sets are found in the insn, we check them. */
1258 if (!set_verified)
1260 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1261 && !side_effects_p (set))
1262 set = NULL;
1263 else
1264 set_verified = 1;
1266 if (!set)
1267 set = sub, set_verified = 0;
1268 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1269 || side_effects_p (sub))
1270 return NULL_RTX;
1271 break;
1273 default:
1274 return NULL_RTX;
1278 return set;
1281 /* Given an INSN, return nonzero if it has more than one SET, else return
1282 zero. */
1285 multiple_sets (insn)
1286 rtx insn;
1288 int found;
1289 int i;
1291 /* INSN must be an insn. */
1292 if (! INSN_P (insn))
1293 return 0;
1295 /* Only a PARALLEL can have multiple SETs. */
1296 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1298 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1299 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1301 /* If we have already found a SET, then return now. */
1302 if (found)
1303 return 1;
1304 else
1305 found = 1;
1309 /* Either zero or one SET. */
1310 return 0;
1313 /* Return nonzero if the destination of SET equals the source
1314 and there are no side effects. */
1317 set_noop_p (set)
1318 rtx set;
1320 rtx src = SET_SRC (set);
1321 rtx dst = SET_DEST (set);
1323 if (dst == pc_rtx && src == pc_rtx)
1324 return 1;
1326 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1327 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1329 if (GET_CODE (dst) == SIGN_EXTRACT
1330 || GET_CODE (dst) == ZERO_EXTRACT)
1331 return rtx_equal_p (XEXP (dst, 0), src)
1332 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1333 && !side_effects_p (src);
1335 if (GET_CODE (dst) == STRICT_LOW_PART)
1336 dst = XEXP (dst, 0);
1338 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1340 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1341 return 0;
1342 src = SUBREG_REG (src);
1343 dst = SUBREG_REG (dst);
1346 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1347 && REGNO (src) == REGNO (dst));
1350 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1351 value to itself. */
1354 noop_move_p (insn)
1355 rtx insn;
1357 rtx pat = PATTERN (insn);
1359 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1360 return 1;
1362 /* Insns carrying these notes are useful later on. */
1363 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1364 return 0;
1366 /* For now treat an insn with a REG_RETVAL note as a
1367 a special insn which should not be considered a no-op. */
1368 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1369 return 0;
1371 if (GET_CODE (pat) == SET && set_noop_p (pat))
1372 return 1;
1374 if (GET_CODE (pat) == PARALLEL)
1376 int i;
1377 /* If nothing but SETs of registers to themselves,
1378 this insn can also be deleted. */
1379 for (i = 0; i < XVECLEN (pat, 0); i++)
1381 rtx tem = XVECEXP (pat, 0, i);
1383 if (GET_CODE (tem) == USE
1384 || GET_CODE (tem) == CLOBBER)
1385 continue;
1387 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1388 return 0;
1391 return 1;
1393 return 0;
1397 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1398 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1399 If the object was modified, if we hit a partial assignment to X, or hit a
1400 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1401 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1402 be the src. */
1405 find_last_value (x, pinsn, valid_to, allow_hwreg)
1406 rtx x;
1407 rtx *pinsn;
1408 rtx valid_to;
1409 int allow_hwreg;
1411 rtx p;
1413 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1414 p = PREV_INSN (p))
1415 if (INSN_P (p))
1417 rtx set = single_set (p);
1418 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1420 if (set && rtx_equal_p (x, SET_DEST (set)))
1422 rtx src = SET_SRC (set);
1424 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1425 src = XEXP (note, 0);
1427 if ((valid_to == NULL_RTX
1428 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1429 /* Reject hard registers because we don't usually want
1430 to use them; we'd rather use a pseudo. */
1431 && (! (GET_CODE (src) == REG
1432 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1434 *pinsn = p;
1435 return src;
1439 /* If set in non-simple way, we don't have a value. */
1440 if (reg_set_p (x, p))
1441 break;
1444 return x;
1447 /* Return nonzero if register in range [REGNO, ENDREGNO)
1448 appears either explicitly or implicitly in X
1449 other than being stored into.
1451 References contained within the substructure at LOC do not count.
1452 LOC may be zero, meaning don't ignore anything. */
1455 refers_to_regno_p (regno, endregno, x, loc)
1456 unsigned int regno, endregno;
1457 rtx x;
1458 rtx *loc;
1460 int i;
1461 unsigned int x_regno;
1462 RTX_CODE code;
1463 const char *fmt;
1465 repeat:
1466 /* The contents of a REG_NONNEG note is always zero, so we must come here
1467 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1468 if (x == 0)
1469 return 0;
1471 code = GET_CODE (x);
1473 switch (code)
1475 case REG:
1476 x_regno = REGNO (x);
1478 /* If we modifying the stack, frame, or argument pointer, it will
1479 clobber a virtual register. In fact, we could be more precise,
1480 but it isn't worth it. */
1481 if ((x_regno == STACK_POINTER_REGNUM
1482 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1483 || x_regno == ARG_POINTER_REGNUM
1484 #endif
1485 || x_regno == FRAME_POINTER_REGNUM)
1486 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1487 return 1;
1489 return (endregno > x_regno
1490 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1491 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1492 : 1));
1494 case SUBREG:
1495 /* If this is a SUBREG of a hard reg, we can see exactly which
1496 registers are being modified. Otherwise, handle normally. */
1497 if (GET_CODE (SUBREG_REG (x)) == REG
1498 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1500 unsigned int inner_regno = subreg_regno (x);
1501 unsigned int inner_endregno
1502 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1503 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1505 return endregno > inner_regno && regno < inner_endregno;
1507 break;
1509 case CLOBBER:
1510 case SET:
1511 if (&SET_DEST (x) != loc
1512 /* Note setting a SUBREG counts as referring to the REG it is in for
1513 a pseudo but not for hard registers since we can
1514 treat each word individually. */
1515 && ((GET_CODE (SET_DEST (x)) == SUBREG
1516 && loc != &SUBREG_REG (SET_DEST (x))
1517 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1518 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1519 && refers_to_regno_p (regno, endregno,
1520 SUBREG_REG (SET_DEST (x)), loc))
1521 || (GET_CODE (SET_DEST (x)) != REG
1522 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1523 return 1;
1525 if (code == CLOBBER || loc == &SET_SRC (x))
1526 return 0;
1527 x = SET_SRC (x);
1528 goto repeat;
1530 default:
1531 break;
1534 /* X does not match, so try its subexpressions. */
1536 fmt = GET_RTX_FORMAT (code);
1537 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1539 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1541 if (i == 0)
1543 x = XEXP (x, 0);
1544 goto repeat;
1546 else
1547 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1548 return 1;
1550 else if (fmt[i] == 'E')
1552 int j;
1553 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1554 if (loc != &XVECEXP (x, i, j)
1555 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1556 return 1;
1559 return 0;
1562 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1563 we check if any register number in X conflicts with the relevant register
1564 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1565 contains a MEM (we don't bother checking for memory addresses that can't
1566 conflict because we expect this to be a rare case. */
1569 reg_overlap_mentioned_p (x, in)
1570 rtx x, in;
1572 unsigned int regno, endregno;
1574 /* Overly conservative. */
1575 if (GET_CODE (x) == STRICT_LOW_PART
1576 || GET_CODE (x) == ZERO_EXTRACT
1577 || GET_CODE (x) == SIGN_EXTRACT)
1578 x = XEXP (x, 0);
1580 /* If either argument is a constant, then modifying X can not affect IN. */
1581 if (CONSTANT_P (x) || CONSTANT_P (in))
1582 return 0;
1584 switch (GET_CODE (x))
1586 case SUBREG:
1587 regno = REGNO (SUBREG_REG (x));
1588 if (regno < FIRST_PSEUDO_REGISTER)
1589 regno = subreg_regno (x);
1590 goto do_reg;
1592 case REG:
1593 regno = REGNO (x);
1594 do_reg:
1595 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1596 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1597 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1599 case MEM:
1601 const char *fmt;
1602 int i;
1604 if (GET_CODE (in) == MEM)
1605 return 1;
1607 fmt = GET_RTX_FORMAT (GET_CODE (in));
1608 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1609 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1610 return 1;
1612 return 0;
1615 case SCRATCH:
1616 case PC:
1617 case CC0:
1618 return reg_mentioned_p (x, in);
1620 case PARALLEL:
1622 int i;
1624 /* If any register in here refers to it we return true. */
1625 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1626 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1627 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1628 return 1;
1629 return 0;
1632 default:
1633 break;
1636 abort ();
1639 /* Return the last value to which REG was set prior to INSN. If we can't
1640 find it easily, return 0.
1642 We only return a REG, SUBREG, or constant because it is too hard to
1643 check if a MEM remains unchanged. */
1646 reg_set_last (x, insn)
1647 rtx x;
1648 rtx insn;
1650 rtx orig_insn = insn;
1652 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1653 Stop when we reach a label or X is a hard reg and we reach a
1654 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1656 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1658 /* We compare with <= here, because reg_set_last_last_regno
1659 is actually the number of the first reg *not* in X. */
1660 for (;
1661 insn && GET_CODE (insn) != CODE_LABEL
1662 && ! (GET_CODE (insn) == CALL_INSN
1663 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1664 insn = PREV_INSN (insn))
1665 if (INSN_P (insn))
1667 rtx set = set_of (x, insn);
1668 /* OK, this function modify our register. See if we understand it. */
1669 if (set)
1671 rtx last_value;
1672 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1673 return 0;
1674 last_value = SET_SRC (x);
1675 if (CONSTANT_P (last_value)
1676 || ((GET_CODE (last_value) == REG
1677 || GET_CODE (last_value) == SUBREG)
1678 && ! reg_set_between_p (last_value,
1679 insn, orig_insn)))
1680 return last_value;
1681 else
1682 return 0;
1686 return 0;
1689 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1690 (X would be the pattern of an insn).
1691 FUN receives two arguments:
1692 the REG, MEM, CC0 or PC being stored in or clobbered,
1693 the SET or CLOBBER rtx that does the store.
1695 If the item being stored in or clobbered is a SUBREG of a hard register,
1696 the SUBREG will be passed. */
1698 void
1699 note_stores (x, fun, data)
1700 rtx x;
1701 void (*fun) PARAMS ((rtx, rtx, void *));
1702 void *data;
1704 int i;
1706 if (GET_CODE (x) == COND_EXEC)
1707 x = COND_EXEC_CODE (x);
1709 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1711 rtx dest = SET_DEST (x);
1713 while ((GET_CODE (dest) == SUBREG
1714 && (GET_CODE (SUBREG_REG (dest)) != REG
1715 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1716 || GET_CODE (dest) == ZERO_EXTRACT
1717 || GET_CODE (dest) == SIGN_EXTRACT
1718 || GET_CODE (dest) == STRICT_LOW_PART)
1719 dest = XEXP (dest, 0);
1721 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1722 each of whose first operand is a register. */
1723 if (GET_CODE (dest) == PARALLEL)
1725 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1726 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1727 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1729 else
1730 (*fun) (dest, x, data);
1733 else if (GET_CODE (x) == PARALLEL)
1734 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1735 note_stores (XVECEXP (x, 0, i), fun, data);
1738 /* Like notes_stores, but call FUN for each expression that is being
1739 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1740 FUN for each expression, not any interior subexpressions. FUN receives a
1741 pointer to the expression and the DATA passed to this function.
1743 Note that this is not quite the same test as that done in reg_referenced_p
1744 since that considers something as being referenced if it is being
1745 partially set, while we do not. */
1747 void
1748 note_uses (pbody, fun, data)
1749 rtx *pbody;
1750 void (*fun) PARAMS ((rtx *, void *));
1751 void *data;
1753 rtx body = *pbody;
1754 int i;
1756 switch (GET_CODE (body))
1758 case COND_EXEC:
1759 (*fun) (&COND_EXEC_TEST (body), data);
1760 note_uses (&COND_EXEC_CODE (body), fun, data);
1761 return;
1763 case PARALLEL:
1764 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1765 note_uses (&XVECEXP (body, 0, i), fun, data);
1766 return;
1768 case USE:
1769 (*fun) (&XEXP (body, 0), data);
1770 return;
1772 case ASM_OPERANDS:
1773 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1774 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1775 return;
1777 case TRAP_IF:
1778 (*fun) (&TRAP_CONDITION (body), data);
1779 return;
1781 case PREFETCH:
1782 (*fun) (&XEXP (body, 0), data);
1783 return;
1785 case UNSPEC:
1786 case UNSPEC_VOLATILE:
1787 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1788 (*fun) (&XVECEXP (body, 0, i), data);
1789 return;
1791 case CLOBBER:
1792 if (GET_CODE (XEXP (body, 0)) == MEM)
1793 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1794 return;
1796 case SET:
1798 rtx dest = SET_DEST (body);
1800 /* For sets we replace everything in source plus registers in memory
1801 expression in store and operands of a ZERO_EXTRACT. */
1802 (*fun) (&SET_SRC (body), data);
1804 if (GET_CODE (dest) == ZERO_EXTRACT)
1806 (*fun) (&XEXP (dest, 1), data);
1807 (*fun) (&XEXP (dest, 2), data);
1810 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1811 dest = XEXP (dest, 0);
1813 if (GET_CODE (dest) == MEM)
1814 (*fun) (&XEXP (dest, 0), data);
1816 return;
1818 default:
1819 /* All the other possibilities never store. */
1820 (*fun) (pbody, data);
1821 return;
1825 /* Return nonzero if X's old contents don't survive after INSN.
1826 This will be true if X is (cc0) or if X is a register and
1827 X dies in INSN or because INSN entirely sets X.
1829 "Entirely set" means set directly and not through a SUBREG,
1830 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1831 Likewise, REG_INC does not count.
1833 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1834 but for this use that makes no difference, since regs don't overlap
1835 during their lifetimes. Therefore, this function may be used
1836 at any time after deaths have been computed (in flow.c).
1838 If REG is a hard reg that occupies multiple machine registers, this
1839 function will only return 1 if each of those registers will be replaced
1840 by INSN. */
1843 dead_or_set_p (insn, x)
1844 rtx insn;
1845 rtx x;
1847 unsigned int regno, last_regno;
1848 unsigned int i;
1850 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1851 if (GET_CODE (x) == CC0)
1852 return 1;
1854 if (GET_CODE (x) != REG)
1855 abort ();
1857 regno = REGNO (x);
1858 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1859 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1861 for (i = regno; i <= last_regno; i++)
1862 if (! dead_or_set_regno_p (insn, i))
1863 return 0;
1865 return 1;
1868 /* Utility function for dead_or_set_p to check an individual register. Also
1869 called from flow.c. */
1872 dead_or_set_regno_p (insn, test_regno)
1873 rtx insn;
1874 unsigned int test_regno;
1876 unsigned int regno, endregno;
1877 rtx pattern;
1879 /* See if there is a death note for something that includes TEST_REGNO. */
1880 if (find_regno_note (insn, REG_DEAD, test_regno))
1881 return 1;
1883 if (GET_CODE (insn) == CALL_INSN
1884 && find_regno_fusage (insn, CLOBBER, test_regno))
1885 return 1;
1887 pattern = PATTERN (insn);
1889 if (GET_CODE (pattern) == COND_EXEC)
1890 pattern = COND_EXEC_CODE (pattern);
1892 if (GET_CODE (pattern) == SET)
1894 rtx dest = SET_DEST (pattern);
1896 /* A value is totally replaced if it is the destination or the
1897 destination is a SUBREG of REGNO that does not change the number of
1898 words in it. */
1899 if (GET_CODE (dest) == SUBREG
1900 && (((GET_MODE_SIZE (GET_MODE (dest))
1901 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1902 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1903 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1904 dest = SUBREG_REG (dest);
1906 if (GET_CODE (dest) != REG)
1907 return 0;
1909 regno = REGNO (dest);
1910 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1911 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1913 return (test_regno >= regno && test_regno < endregno);
1915 else if (GET_CODE (pattern) == PARALLEL)
1917 int i;
1919 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1921 rtx body = XVECEXP (pattern, 0, i);
1923 if (GET_CODE (body) == COND_EXEC)
1924 body = COND_EXEC_CODE (body);
1926 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1928 rtx dest = SET_DEST (body);
1930 if (GET_CODE (dest) == SUBREG
1931 && (((GET_MODE_SIZE (GET_MODE (dest))
1932 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1933 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1934 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1935 dest = SUBREG_REG (dest);
1937 if (GET_CODE (dest) != REG)
1938 continue;
1940 regno = REGNO (dest);
1941 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1942 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1944 if (test_regno >= regno && test_regno < endregno)
1945 return 1;
1950 return 0;
1953 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1954 If DATUM is nonzero, look for one whose datum is DATUM. */
1957 find_reg_note (insn, kind, datum)
1958 rtx insn;
1959 enum reg_note kind;
1960 rtx datum;
1962 rtx link;
1964 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1965 if (! INSN_P (insn))
1966 return 0;
1968 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1969 if (REG_NOTE_KIND (link) == kind
1970 && (datum == 0 || datum == XEXP (link, 0)))
1971 return link;
1972 return 0;
1975 /* Return the reg-note of kind KIND in insn INSN which applies to register
1976 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1977 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1978 it might be the case that the note overlaps REGNO. */
1981 find_regno_note (insn, kind, regno)
1982 rtx insn;
1983 enum reg_note kind;
1984 unsigned int regno;
1986 rtx link;
1988 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1989 if (! INSN_P (insn))
1990 return 0;
1992 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1993 if (REG_NOTE_KIND (link) == kind
1994 /* Verify that it is a register, so that scratch and MEM won't cause a
1995 problem here. */
1996 && GET_CODE (XEXP (link, 0)) == REG
1997 && REGNO (XEXP (link, 0)) <= regno
1998 && ((REGNO (XEXP (link, 0))
1999 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
2000 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
2001 GET_MODE (XEXP (link, 0)))))
2002 > regno))
2003 return link;
2004 return 0;
2007 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
2008 has such a note. */
2011 find_reg_equal_equiv_note (insn)
2012 rtx insn;
2014 rtx link;
2016 if (!INSN_P (insn))
2017 return 0;
2018 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2019 if (REG_NOTE_KIND (link) == REG_EQUAL
2020 || REG_NOTE_KIND (link) == REG_EQUIV)
2022 if (single_set (insn) == 0)
2023 return 0;
2024 return link;
2026 return NULL;
2029 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
2030 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2033 find_reg_fusage (insn, code, datum)
2034 rtx insn;
2035 enum rtx_code code;
2036 rtx datum;
2038 /* If it's not a CALL_INSN, it can't possibly have a
2039 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
2040 if (GET_CODE (insn) != CALL_INSN)
2041 return 0;
2043 if (! datum)
2044 abort ();
2046 if (GET_CODE (datum) != REG)
2048 rtx link;
2050 for (link = CALL_INSN_FUNCTION_USAGE (insn);
2051 link;
2052 link = XEXP (link, 1))
2053 if (GET_CODE (XEXP (link, 0)) == code
2054 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
2055 return 1;
2057 else
2059 unsigned int regno = REGNO (datum);
2061 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2062 to pseudo registers, so don't bother checking. */
2064 if (regno < FIRST_PSEUDO_REGISTER)
2066 unsigned int end_regno
2067 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
2068 unsigned int i;
2070 for (i = regno; i < end_regno; i++)
2071 if (find_regno_fusage (insn, code, i))
2072 return 1;
2076 return 0;
2079 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2080 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2083 find_regno_fusage (insn, code, regno)
2084 rtx insn;
2085 enum rtx_code code;
2086 unsigned int regno;
2088 rtx link;
2090 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2091 to pseudo registers, so don't bother checking. */
2093 if (regno >= FIRST_PSEUDO_REGISTER
2094 || GET_CODE (insn) != CALL_INSN )
2095 return 0;
2097 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2099 unsigned int regnote;
2100 rtx op, reg;
2102 if (GET_CODE (op = XEXP (link, 0)) == code
2103 && GET_CODE (reg = XEXP (op, 0)) == REG
2104 && (regnote = REGNO (reg)) <= regno
2105 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
2106 return 1;
2109 return 0;
2112 /* Return true if INSN is a call to a pure function. */
2115 pure_call_p (insn)
2116 rtx insn;
2118 rtx link;
2120 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2121 return 0;
2123 /* Look for the note that differentiates const and pure functions. */
2124 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2126 rtx u, m;
2128 if (GET_CODE (u = XEXP (link, 0)) == USE
2129 && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2130 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2131 return 1;
2134 return 0;
2137 /* Remove register note NOTE from the REG_NOTES of INSN. */
2139 void
2140 remove_note (insn, note)
2141 rtx insn;
2142 rtx note;
2144 rtx link;
2146 if (note == NULL_RTX)
2147 return;
2149 if (REG_NOTES (insn) == note)
2151 REG_NOTES (insn) = XEXP (note, 1);
2152 return;
2155 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2156 if (XEXP (link, 1) == note)
2158 XEXP (link, 1) = XEXP (note, 1);
2159 return;
2162 abort ();
2165 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2166 return 1 if it is found. A simple equality test is used to determine if
2167 NODE matches. */
2170 in_expr_list_p (listp, node)
2171 rtx listp;
2172 rtx node;
2174 rtx x;
2176 for (x = listp; x; x = XEXP (x, 1))
2177 if (node == XEXP (x, 0))
2178 return 1;
2180 return 0;
2183 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2184 remove that entry from the list if it is found.
2186 A simple equality test is used to determine if NODE matches. */
2188 void
2189 remove_node_from_expr_list (node, listp)
2190 rtx node;
2191 rtx *listp;
2193 rtx temp = *listp;
2194 rtx prev = NULL_RTX;
2196 while (temp)
2198 if (node == XEXP (temp, 0))
2200 /* Splice the node out of the list. */
2201 if (prev)
2202 XEXP (prev, 1) = XEXP (temp, 1);
2203 else
2204 *listp = XEXP (temp, 1);
2206 return;
2209 prev = temp;
2210 temp = XEXP (temp, 1);
2214 /* Nonzero if X contains any volatile instructions. These are instructions
2215 which may cause unpredictable machine state instructions, and thus no
2216 instructions should be moved or combined across them. This includes
2217 only volatile asms and UNSPEC_VOLATILE instructions. */
2220 volatile_insn_p (x)
2221 rtx x;
2223 RTX_CODE code;
2225 code = GET_CODE (x);
2226 switch (code)
2228 case LABEL_REF:
2229 case SYMBOL_REF:
2230 case CONST_INT:
2231 case CONST:
2232 case CONST_DOUBLE:
2233 case CONST_VECTOR:
2234 case CC0:
2235 case PC:
2236 case REG:
2237 case SCRATCH:
2238 case CLOBBER:
2239 case ADDR_VEC:
2240 case ADDR_DIFF_VEC:
2241 case CALL:
2242 case MEM:
2243 return 0;
2245 case UNSPEC_VOLATILE:
2246 /* case TRAP_IF: This isn't clear yet. */
2247 return 1;
2249 case ASM_INPUT:
2250 case ASM_OPERANDS:
2251 if (MEM_VOLATILE_P (x))
2252 return 1;
2254 default:
2255 break;
2258 /* Recursively scan the operands of this expression. */
2261 const char *fmt = GET_RTX_FORMAT (code);
2262 int i;
2264 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2266 if (fmt[i] == 'e')
2268 if (volatile_insn_p (XEXP (x, i)))
2269 return 1;
2271 else if (fmt[i] == 'E')
2273 int j;
2274 for (j = 0; j < XVECLEN (x, i); j++)
2275 if (volatile_insn_p (XVECEXP (x, i, j)))
2276 return 1;
2280 return 0;
2283 /* Nonzero if X contains any volatile memory references
2284 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2287 volatile_refs_p (x)
2288 rtx x;
2290 RTX_CODE code;
2292 code = GET_CODE (x);
2293 switch (code)
2295 case LABEL_REF:
2296 case SYMBOL_REF:
2297 case CONST_INT:
2298 case CONST:
2299 case CONST_DOUBLE:
2300 case CONST_VECTOR:
2301 case CC0:
2302 case PC:
2303 case REG:
2304 case SCRATCH:
2305 case CLOBBER:
2306 case ADDR_VEC:
2307 case ADDR_DIFF_VEC:
2308 return 0;
2310 case UNSPEC_VOLATILE:
2311 return 1;
2313 case MEM:
2314 case ASM_INPUT:
2315 case ASM_OPERANDS:
2316 if (MEM_VOLATILE_P (x))
2317 return 1;
2319 default:
2320 break;
2323 /* Recursively scan the operands of this expression. */
2326 const char *fmt = GET_RTX_FORMAT (code);
2327 int i;
2329 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2331 if (fmt[i] == 'e')
2333 if (volatile_refs_p (XEXP (x, i)))
2334 return 1;
2336 else if (fmt[i] == 'E')
2338 int j;
2339 for (j = 0; j < XVECLEN (x, i); j++)
2340 if (volatile_refs_p (XVECEXP (x, i, j)))
2341 return 1;
2345 return 0;
2348 /* Similar to above, except that it also rejects register pre- and post-
2349 incrementing. */
2352 side_effects_p (x)
2353 rtx x;
2355 RTX_CODE code;
2357 code = GET_CODE (x);
2358 switch (code)
2360 case LABEL_REF:
2361 case SYMBOL_REF:
2362 case CONST_INT:
2363 case CONST:
2364 case CONST_DOUBLE:
2365 case CONST_VECTOR:
2366 case CC0:
2367 case PC:
2368 case REG:
2369 case SCRATCH:
2370 case ADDR_VEC:
2371 case ADDR_DIFF_VEC:
2372 return 0;
2374 case CLOBBER:
2375 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2376 when some combination can't be done. If we see one, don't think
2377 that we can simplify the expression. */
2378 return (GET_MODE (x) != VOIDmode);
2380 case PRE_INC:
2381 case PRE_DEC:
2382 case POST_INC:
2383 case POST_DEC:
2384 case PRE_MODIFY:
2385 case POST_MODIFY:
2386 case CALL:
2387 case UNSPEC_VOLATILE:
2388 /* case TRAP_IF: This isn't clear yet. */
2389 return 1;
2391 case MEM:
2392 case ASM_INPUT:
2393 case ASM_OPERANDS:
2394 if (MEM_VOLATILE_P (x))
2395 return 1;
2397 default:
2398 break;
2401 /* Recursively scan the operands of this expression. */
2404 const char *fmt = GET_RTX_FORMAT (code);
2405 int i;
2407 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2409 if (fmt[i] == 'e')
2411 if (side_effects_p (XEXP (x, i)))
2412 return 1;
2414 else if (fmt[i] == 'E')
2416 int j;
2417 for (j = 0; j < XVECLEN (x, i); j++)
2418 if (side_effects_p (XVECEXP (x, i, j)))
2419 return 1;
2423 return 0;
2426 /* Return nonzero if evaluating rtx X might cause a trap. */
2429 may_trap_p (x)
2430 rtx x;
2432 int i;
2433 enum rtx_code code;
2434 const char *fmt;
2436 if (x == 0)
2437 return 0;
2438 code = GET_CODE (x);
2439 switch (code)
2441 /* Handle these cases quickly. */
2442 case CONST_INT:
2443 case CONST_DOUBLE:
2444 case CONST_VECTOR:
2445 case SYMBOL_REF:
2446 case LABEL_REF:
2447 case CONST:
2448 case PC:
2449 case CC0:
2450 case REG:
2451 case SCRATCH:
2452 return 0;
2454 case ASM_INPUT:
2455 case UNSPEC_VOLATILE:
2456 case TRAP_IF:
2457 return 1;
2459 case ASM_OPERANDS:
2460 return MEM_VOLATILE_P (x);
2462 /* Memory ref can trap unless it's a static var or a stack slot. */
2463 case MEM:
2464 if (MEM_NOTRAP_P (x))
2465 return 0;
2466 return rtx_addr_can_trap_p (XEXP (x, 0));
2468 /* Division by a non-constant might trap. */
2469 case DIV:
2470 case MOD:
2471 case UDIV:
2472 case UMOD:
2473 if (HONOR_SNANS (GET_MODE (x)))
2474 return 1;
2475 if (! CONSTANT_P (XEXP (x, 1))
2476 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2477 && flag_trapping_math))
2478 return 1;
2479 /* This was const0_rtx, but by not using that,
2480 we can link this file into other programs. */
2481 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2482 return 1;
2483 break;
2485 case EXPR_LIST:
2486 /* An EXPR_LIST is used to represent a function call. This
2487 certainly may trap. */
2488 return 1;
2490 case GE:
2491 case GT:
2492 case LE:
2493 case LT:
2494 case COMPARE:
2495 /* Some floating point comparisons may trap. */
2496 if (!flag_trapping_math)
2497 break;
2498 /* ??? There is no machine independent way to check for tests that trap
2499 when COMPARE is used, though many targets do make this distinction.
2500 For instance, sparc uses CCFPE for compares which generate exceptions
2501 and CCFP for compares which do not generate exceptions. */
2502 if (HONOR_NANS (GET_MODE (x)))
2503 return 1;
2504 /* But often the compare has some CC mode, so check operand
2505 modes as well. */
2506 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2507 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2508 return 1;
2509 break;
2511 case EQ:
2512 case NE:
2513 if (HONOR_SNANS (GET_MODE (x)))
2514 return 1;
2515 /* Often comparison is CC mode, so check operand modes. */
2516 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2517 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2518 return 1;
2519 break;
2521 case FIX:
2522 /* Conversion of floating point might trap. */
2523 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2524 return 1;
2525 break;
2527 case NEG:
2528 case ABS:
2529 /* These operations don't trap even with floating point. */
2530 break;
2532 default:
2533 /* Any floating arithmetic may trap. */
2534 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2535 && flag_trapping_math)
2536 return 1;
2539 fmt = GET_RTX_FORMAT (code);
2540 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2542 if (fmt[i] == 'e')
2544 if (may_trap_p (XEXP (x, i)))
2545 return 1;
2547 else if (fmt[i] == 'E')
2549 int j;
2550 for (j = 0; j < XVECLEN (x, i); j++)
2551 if (may_trap_p (XVECEXP (x, i, j)))
2552 return 1;
2555 return 0;
2558 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2559 i.e., an inequality. */
2562 inequality_comparisons_p (x)
2563 rtx x;
2565 const char *fmt;
2566 int len, i;
2567 enum rtx_code code = GET_CODE (x);
2569 switch (code)
2571 case REG:
2572 case SCRATCH:
2573 case PC:
2574 case CC0:
2575 case CONST_INT:
2576 case CONST_DOUBLE:
2577 case CONST_VECTOR:
2578 case CONST:
2579 case LABEL_REF:
2580 case SYMBOL_REF:
2581 return 0;
2583 case LT:
2584 case LTU:
2585 case GT:
2586 case GTU:
2587 case LE:
2588 case LEU:
2589 case GE:
2590 case GEU:
2591 return 1;
2593 default:
2594 break;
2597 len = GET_RTX_LENGTH (code);
2598 fmt = GET_RTX_FORMAT (code);
2600 for (i = 0; i < len; i++)
2602 if (fmt[i] == 'e')
2604 if (inequality_comparisons_p (XEXP (x, i)))
2605 return 1;
2607 else if (fmt[i] == 'E')
2609 int j;
2610 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2611 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2612 return 1;
2616 return 0;
2619 /* Replace any occurrence of FROM in X with TO. The function does
2620 not enter into CONST_DOUBLE for the replace.
2622 Note that copying is not done so X must not be shared unless all copies
2623 are to be modified. */
2626 replace_rtx (x, from, to)
2627 rtx x, from, to;
2629 int i, j;
2630 const char *fmt;
2632 /* The following prevents loops occurrence when we change MEM in
2633 CONST_DOUBLE onto the same CONST_DOUBLE. */
2634 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2635 return x;
2637 if (x == from)
2638 return to;
2640 /* Allow this function to make replacements in EXPR_LISTs. */
2641 if (x == 0)
2642 return 0;
2644 if (GET_CODE (x) == SUBREG)
2646 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2648 if (GET_CODE (new) == CONST_INT)
2650 x = simplify_subreg (GET_MODE (x), new,
2651 GET_MODE (SUBREG_REG (x)),
2652 SUBREG_BYTE (x));
2653 if (! x)
2654 abort ();
2656 else
2657 SUBREG_REG (x) = new;
2659 return x;
2661 else if (GET_CODE (x) == ZERO_EXTEND)
2663 rtx new = replace_rtx (XEXP (x, 0), from, to);
2665 if (GET_CODE (new) == CONST_INT)
2667 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2668 new, GET_MODE (XEXP (x, 0)));
2669 if (! x)
2670 abort ();
2672 else
2673 XEXP (x, 0) = new;
2675 return x;
2678 fmt = GET_RTX_FORMAT (GET_CODE (x));
2679 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2681 if (fmt[i] == 'e')
2682 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2683 else if (fmt[i] == 'E')
2684 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2685 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2688 return x;
2691 /* Throughout the rtx X, replace many registers according to REG_MAP.
2692 Return the replacement for X (which may be X with altered contents).
2693 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2694 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2696 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2697 should not be mapped to pseudos or vice versa since validate_change
2698 is not called.
2700 If REPLACE_DEST is 1, replacements are also done in destinations;
2701 otherwise, only sources are replaced. */
2704 replace_regs (x, reg_map, nregs, replace_dest)
2705 rtx x;
2706 rtx *reg_map;
2707 unsigned int nregs;
2708 int replace_dest;
2710 enum rtx_code code;
2711 int i;
2712 const char *fmt;
2714 if (x == 0)
2715 return x;
2717 code = GET_CODE (x);
2718 switch (code)
2720 case SCRATCH:
2721 case PC:
2722 case CC0:
2723 case CONST_INT:
2724 case CONST_DOUBLE:
2725 case CONST_VECTOR:
2726 case CONST:
2727 case SYMBOL_REF:
2728 case LABEL_REF:
2729 return x;
2731 case REG:
2732 /* Verify that the register has an entry before trying to access it. */
2733 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2735 /* SUBREGs can't be shared. Always return a copy to ensure that if
2736 this replacement occurs more than once then each instance will
2737 get distinct rtx. */
2738 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2739 return copy_rtx (reg_map[REGNO (x)]);
2740 return reg_map[REGNO (x)];
2742 return x;
2744 case SUBREG:
2745 /* Prevent making nested SUBREGs. */
2746 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2747 && reg_map[REGNO (SUBREG_REG (x))] != 0
2748 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2750 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2751 return simplify_gen_subreg (GET_MODE (x), map_val,
2752 GET_MODE (SUBREG_REG (x)),
2753 SUBREG_BYTE (x));
2755 break;
2757 case SET:
2758 if (replace_dest)
2759 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2761 else if (GET_CODE (SET_DEST (x)) == MEM
2762 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2763 /* Even if we are not to replace destinations, replace register if it
2764 is CONTAINED in destination (destination is memory or
2765 STRICT_LOW_PART). */
2766 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2767 reg_map, nregs, 0);
2768 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2769 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2770 break;
2772 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2773 return x;
2775 default:
2776 break;
2779 fmt = GET_RTX_FORMAT (code);
2780 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2782 if (fmt[i] == 'e')
2783 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2784 else if (fmt[i] == 'E')
2786 int j;
2787 for (j = 0; j < XVECLEN (x, i); j++)
2788 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2789 nregs, replace_dest);
2792 return x;
2795 /* Replace occurrences of the old label in *X with the new one.
2796 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2799 replace_label (x, data)
2800 rtx *x;
2801 void *data;
2803 rtx l = *x;
2804 rtx tmp;
2805 rtx old_label = ((replace_label_data *) data)->r1;
2806 rtx new_label = ((replace_label_data *) data)->r2;
2807 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2809 if (l == NULL_RTX)
2810 return 0;
2812 if (GET_CODE (l) == MEM
2813 && (tmp = XEXP (l, 0)) != NULL_RTX
2814 && GET_CODE (tmp) == SYMBOL_REF
2815 && CONSTANT_POOL_ADDRESS_P (tmp))
2817 rtx c = get_pool_constant (tmp);
2818 if (rtx_referenced_p (old_label, c))
2820 rtx new_c, new_l;
2821 replace_label_data *d = (replace_label_data *) data;
2823 /* Create a copy of constant C; replace the label inside
2824 but do not update LABEL_NUSES because uses in constant pool
2825 are not counted. */
2826 new_c = copy_rtx (c);
2827 d->update_label_nuses = false;
2828 for_each_rtx (&new_c, replace_label, data);
2829 d->update_label_nuses = update_label_nuses;
2831 /* Add the new constant NEW_C to constant pool and replace
2832 the old reference to constant by new reference. */
2833 new_l = force_const_mem (get_pool_mode (tmp), new_c);
2834 *x = replace_rtx (l, l, new_l);
2836 return 0;
2839 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2840 field. This is not handled by for_each_rtx because it doesn't
2841 handle unprinted ('0') fields. */
2842 if (GET_CODE (l) == JUMP_INSN && JUMP_LABEL (l) == old_label)
2843 JUMP_LABEL (l) = new_label;
2845 if ((GET_CODE (l) == LABEL_REF
2846 || GET_CODE (l) == INSN_LIST)
2847 && XEXP (l, 0) == old_label)
2849 XEXP (l, 0) = new_label;
2850 if (update_label_nuses)
2852 ++LABEL_NUSES (new_label);
2853 --LABEL_NUSES (old_label);
2855 return 0;
2858 return 0;
2861 /* When *BODY is equal to X or X is directly referenced by *BODY
2862 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2863 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2865 static int
2866 rtx_referenced_p_1 (body, x)
2867 rtx *body;
2868 void *x;
2870 rtx y = (rtx) x;
2872 if (*body == NULL_RTX)
2873 return y == NULL_RTX;
2875 /* Return true if a label_ref *BODY refers to label Y. */
2876 if (GET_CODE (*body) == LABEL_REF && GET_CODE (y) == CODE_LABEL)
2877 return XEXP (*body, 0) == y;
2879 /* If *BODY is a reference to pool constant traverse the constant. */
2880 if (GET_CODE (*body) == SYMBOL_REF
2881 && CONSTANT_POOL_ADDRESS_P (*body))
2882 return rtx_referenced_p (y, get_pool_constant (*body));
2884 /* By default, compare the RTL expressions. */
2885 return rtx_equal_p (*body, y);
2888 /* Return true if X is referenced in BODY. */
2891 rtx_referenced_p (x, body)
2892 rtx x;
2893 rtx body;
2895 return for_each_rtx (&body, rtx_referenced_p_1, x);
2898 /* If INSN is a jump to jumptable insn rturn true and store the label (which
2899 INSN jumps to) to *LABEL and the tablejump insn to *TABLE.
2900 LABEL and TABLE may be NULL. */
2902 bool
2903 tablejump_p (insn, label, table)
2904 rtx insn;
2905 rtx *label;
2906 rtx *table;
2908 rtx l, t;
2910 if (onlyjump_p (insn)
2911 && (l = JUMP_LABEL (insn)) != NULL_RTX
2912 && (t = NEXT_INSN (l)) != NULL_RTX
2913 && GET_CODE (t) == JUMP_INSN
2914 && (GET_CODE (PATTERN (t)) == ADDR_VEC
2915 || GET_CODE (PATTERN (t)) == ADDR_DIFF_VEC))
2917 if (label)
2918 *label = l;
2919 if (table)
2920 *table = t;
2921 return true;
2923 return false;
2926 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2927 constant that is not in the constant pool and not in the condition
2928 of an IF_THEN_ELSE. */
2930 static int
2931 computed_jump_p_1 (x)
2932 rtx x;
2934 enum rtx_code code = GET_CODE (x);
2935 int i, j;
2936 const char *fmt;
2938 switch (code)
2940 case LABEL_REF:
2941 case PC:
2942 return 0;
2944 case CONST:
2945 case CONST_INT:
2946 case CONST_DOUBLE:
2947 case CONST_VECTOR:
2948 case SYMBOL_REF:
2949 case REG:
2950 return 1;
2952 case MEM:
2953 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2954 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2956 case IF_THEN_ELSE:
2957 return (computed_jump_p_1 (XEXP (x, 1))
2958 || computed_jump_p_1 (XEXP (x, 2)));
2960 default:
2961 break;
2964 fmt = GET_RTX_FORMAT (code);
2965 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2967 if (fmt[i] == 'e'
2968 && computed_jump_p_1 (XEXP (x, i)))
2969 return 1;
2971 else if (fmt[i] == 'E')
2972 for (j = 0; j < XVECLEN (x, i); j++)
2973 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2974 return 1;
2977 return 0;
2980 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2982 Tablejumps and casesi insns are not considered indirect jumps;
2983 we can recognize them by a (use (label_ref)). */
2986 computed_jump_p (insn)
2987 rtx insn;
2989 int i;
2990 if (GET_CODE (insn) == JUMP_INSN)
2992 rtx pat = PATTERN (insn);
2994 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2995 return 0;
2996 else if (GET_CODE (pat) == PARALLEL)
2998 int len = XVECLEN (pat, 0);
2999 int has_use_labelref = 0;
3001 for (i = len - 1; i >= 0; i--)
3002 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
3003 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
3004 == LABEL_REF))
3005 has_use_labelref = 1;
3007 if (! has_use_labelref)
3008 for (i = len - 1; i >= 0; i--)
3009 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
3010 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
3011 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
3012 return 1;
3014 else if (GET_CODE (pat) == SET
3015 && SET_DEST (pat) == pc_rtx
3016 && computed_jump_p_1 (SET_SRC (pat)))
3017 return 1;
3019 return 0;
3022 /* Traverse X via depth-first search, calling F for each
3023 sub-expression (including X itself). F is also passed the DATA.
3024 If F returns -1, do not traverse sub-expressions, but continue
3025 traversing the rest of the tree. If F ever returns any other
3026 nonzero value, stop the traversal, and return the value returned
3027 by F. Otherwise, return 0. This function does not traverse inside
3028 tree structure that contains RTX_EXPRs, or into sub-expressions
3029 whose format code is `0' since it is not known whether or not those
3030 codes are actually RTL.
3032 This routine is very general, and could (should?) be used to
3033 implement many of the other routines in this file. */
3036 for_each_rtx (x, f, data)
3037 rtx *x;
3038 rtx_function f;
3039 void *data;
3041 int result;
3042 int length;
3043 const char *format;
3044 int i;
3046 /* Call F on X. */
3047 result = (*f) (x, data);
3048 if (result == -1)
3049 /* Do not traverse sub-expressions. */
3050 return 0;
3051 else if (result != 0)
3052 /* Stop the traversal. */
3053 return result;
3055 if (*x == NULL_RTX)
3056 /* There are no sub-expressions. */
3057 return 0;
3059 length = GET_RTX_LENGTH (GET_CODE (*x));
3060 format = GET_RTX_FORMAT (GET_CODE (*x));
3062 for (i = 0; i < length; ++i)
3064 switch (format[i])
3066 case 'e':
3067 result = for_each_rtx (&XEXP (*x, i), f, data);
3068 if (result != 0)
3069 return result;
3070 break;
3072 case 'V':
3073 case 'E':
3074 if (XVEC (*x, i) != 0)
3076 int j;
3077 for (j = 0; j < XVECLEN (*x, i); ++j)
3079 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
3080 if (result != 0)
3081 return result;
3084 break;
3086 default:
3087 /* Nothing to do. */
3088 break;
3093 return 0;
3096 /* Searches X for any reference to REGNO, returning the rtx of the
3097 reference found if any. Otherwise, returns NULL_RTX. */
3100 regno_use_in (regno, x)
3101 unsigned int regno;
3102 rtx x;
3104 const char *fmt;
3105 int i, j;
3106 rtx tem;
3108 if (GET_CODE (x) == REG && REGNO (x) == regno)
3109 return x;
3111 fmt = GET_RTX_FORMAT (GET_CODE (x));
3112 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3114 if (fmt[i] == 'e')
3116 if ((tem = regno_use_in (regno, XEXP (x, i))))
3117 return tem;
3119 else if (fmt[i] == 'E')
3120 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3121 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3122 return tem;
3125 return NULL_RTX;
3128 /* Return a value indicating whether OP, an operand of a commutative
3129 operation, is preferred as the first or second operand. The higher
3130 the value, the stronger the preference for being the first operand.
3131 We use negative values to indicate a preference for the first operand
3132 and positive values for the second operand. */
3135 commutative_operand_precedence (op)
3136 rtx op;
3138 /* Constants always come the second operand. Prefer "nice" constants. */
3139 if (GET_CODE (op) == CONST_INT)
3140 return -5;
3141 if (GET_CODE (op) == CONST_DOUBLE)
3142 return -4;
3143 if (CONSTANT_P (op))
3144 return -3;
3146 /* SUBREGs of objects should come second. */
3147 if (GET_CODE (op) == SUBREG
3148 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
3149 return -2;
3151 /* If only one operand is a `neg', `not',
3152 `mult', `plus', or `minus' expression, it will be the first
3153 operand. */
3154 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
3155 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
3156 || GET_CODE (op) == MINUS)
3157 return 2;
3159 /* Complex expressions should be the first, so decrease priority
3160 of objects. */
3161 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
3162 return -1;
3163 return 0;
3166 /* Return 1 iff it is necessary to swap operands of commutative operation
3167 in order to canonicalize expression. */
3170 swap_commutative_operands_p (x, y)
3171 rtx x, y;
3173 return (commutative_operand_precedence (x)
3174 < commutative_operand_precedence (y));
3177 /* Return 1 if X is an autoincrement side effect and the register is
3178 not the stack pointer. */
3180 auto_inc_p (x)
3181 rtx x;
3183 switch (GET_CODE (x))
3185 case PRE_INC:
3186 case POST_INC:
3187 case PRE_DEC:
3188 case POST_DEC:
3189 case PRE_MODIFY:
3190 case POST_MODIFY:
3191 /* There are no REG_INC notes for SP. */
3192 if (XEXP (x, 0) != stack_pointer_rtx)
3193 return 1;
3194 default:
3195 break;
3197 return 0;
3200 /* Return 1 if the sequence of instructions beginning with FROM and up
3201 to and including TO is safe to move. If NEW_TO is non-NULL, and
3202 the sequence is not already safe to move, but can be easily
3203 extended to a sequence which is safe, then NEW_TO will point to the
3204 end of the extended sequence.
3206 For now, this function only checks that the region contains whole
3207 exception regions, but it could be extended to check additional
3208 conditions as well. */
3211 insns_safe_to_move_p (from, to, new_to)
3212 rtx from;
3213 rtx to;
3214 rtx *new_to;
3216 int eh_region_count = 0;
3217 int past_to_p = 0;
3218 rtx r = from;
3220 /* By default, assume the end of the region will be what was
3221 suggested. */
3222 if (new_to)
3223 *new_to = to;
3225 while (r)
3227 if (GET_CODE (r) == NOTE)
3229 switch (NOTE_LINE_NUMBER (r))
3231 case NOTE_INSN_EH_REGION_BEG:
3232 ++eh_region_count;
3233 break;
3235 case NOTE_INSN_EH_REGION_END:
3236 if (eh_region_count == 0)
3237 /* This sequence of instructions contains the end of
3238 an exception region, but not he beginning. Moving
3239 it will cause chaos. */
3240 return 0;
3242 --eh_region_count;
3243 break;
3245 default:
3246 break;
3249 else if (past_to_p)
3250 /* If we've passed TO, and we see a non-note instruction, we
3251 can't extend the sequence to a movable sequence. */
3252 return 0;
3254 if (r == to)
3256 if (!new_to)
3257 /* It's OK to move the sequence if there were matched sets of
3258 exception region notes. */
3259 return eh_region_count == 0;
3261 past_to_p = 1;
3264 /* It's OK to move the sequence if there were matched sets of
3265 exception region notes. */
3266 if (past_to_p && eh_region_count == 0)
3268 *new_to = r;
3269 return 1;
3272 /* Go to the next instruction. */
3273 r = NEXT_INSN (r);
3276 return 0;
3279 /* Return nonzero if IN contains a piece of rtl that has the address LOC */
3281 loc_mentioned_in_p (loc, in)
3282 rtx *loc, in;
3284 enum rtx_code code = GET_CODE (in);
3285 const char *fmt = GET_RTX_FORMAT (code);
3286 int i, j;
3288 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3290 if (loc == &in->fld[i].rtx)
3291 return 1;
3292 if (fmt[i] == 'e')
3294 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3295 return 1;
3297 else if (fmt[i] == 'E')
3298 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3299 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3300 return 1;
3302 return 0;
3305 /* Given a subreg X, return the bit offset where the subreg begins
3306 (counting from the least significant bit of the reg). */
3308 unsigned int
3309 subreg_lsb (x)
3310 rtx x;
3312 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
3313 enum machine_mode mode = GET_MODE (x);
3314 unsigned int bitpos;
3315 unsigned int byte;
3316 unsigned int word;
3318 /* A paradoxical subreg begins at bit position 0. */
3319 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
3320 return 0;
3322 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3323 /* If the subreg crosses a word boundary ensure that
3324 it also begins and ends on a word boundary. */
3325 if ((SUBREG_BYTE (x) % UNITS_PER_WORD
3326 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
3327 && (SUBREG_BYTE (x) % UNITS_PER_WORD
3328 || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
3329 abort ();
3331 if (WORDS_BIG_ENDIAN)
3332 word = (GET_MODE_SIZE (inner_mode)
3333 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
3334 else
3335 word = SUBREG_BYTE (x) / UNITS_PER_WORD;
3336 bitpos = word * BITS_PER_WORD;
3338 if (BYTES_BIG_ENDIAN)
3339 byte = (GET_MODE_SIZE (inner_mode)
3340 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
3341 else
3342 byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
3343 bitpos += byte * BITS_PER_UNIT;
3345 return bitpos;
3348 /* This function returns the regno offset of a subreg expression.
3349 xregno - A regno of an inner hard subreg_reg (or what will become one).
3350 xmode - The mode of xregno.
3351 offset - The byte offset.
3352 ymode - The mode of a top level SUBREG (or what may become one).
3353 RETURN - The regno offset which would be used. */
3354 unsigned int
3355 subreg_regno_offset (xregno, xmode, offset, ymode)
3356 unsigned int xregno;
3357 enum machine_mode xmode;
3358 unsigned int offset;
3359 enum machine_mode ymode;
3361 int nregs_xmode, nregs_ymode;
3362 int mode_multiple, nregs_multiple;
3363 int y_offset;
3365 if (xregno >= FIRST_PSEUDO_REGISTER)
3366 abort ();
3368 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3369 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3371 /* If this is a big endian paradoxical subreg, which uses more actual
3372 hard registers than the original register, we must return a negative
3373 offset so that we find the proper highpart of the register. */
3374 if (offset == 0
3375 && nregs_ymode > nregs_xmode
3376 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3377 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3378 return nregs_xmode - nregs_ymode;
3380 if (offset == 0 || nregs_xmode == nregs_ymode)
3381 return 0;
3383 /* size of ymode must not be greater than the size of xmode. */
3384 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3385 if (mode_multiple == 0)
3386 abort ();
3388 y_offset = offset / GET_MODE_SIZE (ymode);
3389 nregs_multiple = nregs_xmode / nregs_ymode;
3390 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3393 /* This function returns true when the offset is representable via
3394 subreg_offset in the given regno.
3395 xregno - A regno of an inner hard subreg_reg (or what will become one).
3396 xmode - The mode of xregno.
3397 offset - The byte offset.
3398 ymode - The mode of a top level SUBREG (or what may become one).
3399 RETURN - The regno offset which would be used. */
3400 bool
3401 subreg_offset_representable_p (xregno, xmode, offset, ymode)
3402 unsigned int xregno;
3403 enum machine_mode xmode;
3404 unsigned int offset;
3405 enum machine_mode ymode;
3407 int nregs_xmode, nregs_ymode;
3408 int mode_multiple, nregs_multiple;
3409 int y_offset;
3411 if (xregno >= FIRST_PSEUDO_REGISTER)
3412 abort ();
3414 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3415 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3417 /* paradoxical subregs are always valid. */
3418 if (offset == 0
3419 && nregs_ymode > nregs_xmode
3420 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3421 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3422 return true;
3424 /* Lowpart subregs are always valid. */
3425 if (offset == subreg_lowpart_offset (ymode, xmode))
3426 return true;
3428 #ifdef ENABLE_CHECKING
3429 /* This should always pass, otherwise we don't know how to verify the
3430 constraint. These conditions may be relaxed but subreg_offset would
3431 need to be redesigned. */
3432 if (GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)
3433 || GET_MODE_SIZE (ymode) % nregs_ymode
3434 || nregs_xmode % nregs_ymode)
3435 abort ();
3436 #endif
3438 /* The XMODE value can be seen as an vector of NREGS_XMODE
3439 values. The subreg must represent an lowpart of given field.
3440 Compute what field it is. */
3441 offset -= subreg_lowpart_offset (ymode,
3442 mode_for_size (GET_MODE_BITSIZE (xmode)
3443 / nregs_xmode,
3444 MODE_INT, 0));
3446 /* size of ymode must not be greater than the size of xmode. */
3447 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3448 if (mode_multiple == 0)
3449 abort ();
3451 y_offset = offset / GET_MODE_SIZE (ymode);
3452 nregs_multiple = nregs_xmode / nregs_ymode;
3453 #ifdef ENABLE_CHECKING
3454 if (offset % GET_MODE_SIZE (ymode)
3455 || mode_multiple % nregs_multiple)
3456 abort ();
3457 #endif
3458 return (!(y_offset % (mode_multiple / nregs_multiple)));
3461 /* Return the final regno that a subreg expression refers to. */
3462 unsigned int
3463 subreg_regno (x)
3464 rtx x;
3466 unsigned int ret;
3467 rtx subreg = SUBREG_REG (x);
3468 int regno = REGNO (subreg);
3470 ret = regno + subreg_regno_offset (regno,
3471 GET_MODE (subreg),
3472 SUBREG_BYTE (x),
3473 GET_MODE (x));
3474 return ret;
3477 struct parms_set_data
3479 int nregs;
3480 HARD_REG_SET regs;
3483 /* Helper function for noticing stores to parameter registers. */
3484 static void
3485 parms_set (x, pat, data)
3486 rtx x, pat ATTRIBUTE_UNUSED;
3487 void *data;
3489 struct parms_set_data *d = data;
3490 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3491 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3493 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3494 d->nregs--;
3498 /* Look backward for first parameter to be loaded.
3499 Do not skip BOUNDARY. */
3501 find_first_parameter_load (call_insn, boundary)
3502 rtx call_insn, boundary;
3504 struct parms_set_data parm;
3505 rtx p, before;
3507 /* Since different machines initialize their parameter registers
3508 in different orders, assume nothing. Collect the set of all
3509 parameter registers. */
3510 CLEAR_HARD_REG_SET (parm.regs);
3511 parm.nregs = 0;
3512 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3513 if (GET_CODE (XEXP (p, 0)) == USE
3514 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3516 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3517 abort ();
3519 /* We only care about registers which can hold function
3520 arguments. */
3521 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3522 continue;
3524 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3525 parm.nregs++;
3527 before = call_insn;
3529 /* Search backward for the first set of a register in this set. */
3530 while (parm.nregs && before != boundary)
3532 before = PREV_INSN (before);
3534 /* It is possible that some loads got CSEed from one call to
3535 another. Stop in that case. */
3536 if (GET_CODE (before) == CALL_INSN)
3537 break;
3539 /* Our caller needs either ensure that we will find all sets
3540 (in case code has not been optimized yet), or take care
3541 for possible labels in a way by setting boundary to preceding
3542 CODE_LABEL. */
3543 if (GET_CODE (before) == CODE_LABEL)
3545 if (before != boundary)
3546 abort ();
3547 break;
3550 if (INSN_P (before))
3551 note_stores (PATTERN (before), parms_set, &parm);
3553 return before;
3556 /* Return true if we should avoid inserting code between INSN and preceding
3557 call instruction. */
3559 bool
3560 keep_with_call_p (insn)
3561 rtx insn;
3563 rtx set;
3565 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3567 if (GET_CODE (SET_DEST (set)) == REG
3568 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3569 && fixed_regs[REGNO (SET_DEST (set))]
3570 && general_operand (SET_SRC (set), VOIDmode))
3571 return true;
3572 if (GET_CODE (SET_SRC (set)) == REG
3573 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3574 && GET_CODE (SET_DEST (set)) == REG
3575 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3576 return true;
3577 /* There may be a stack pop just after the call and before the store
3578 of the return register. Search for the actual store when deciding
3579 if we can break or not. */
3580 if (SET_DEST (set) == stack_pointer_rtx)
3582 rtx i2 = next_nonnote_insn (insn);
3583 if (i2 && keep_with_call_p (i2))
3584 return true;
3587 return false;
3590 /* Return true when store to register X can be hoisted to the place
3591 with LIVE registers (can be NULL). Value VAL contains destination
3592 whose value will be used. */
3594 static bool
3595 hoist_test_store (x, val, live)
3596 rtx x, val;
3597 regset live;
3599 if (GET_CODE (x) == SCRATCH)
3600 return true;
3602 if (rtx_equal_p (x, val))
3603 return true;
3605 /* Allow subreg of X in case it is not writting just part of multireg pseudo.
3606 Then we would need to update all users to care hoisting the store too.
3607 Caller may represent that by specifying whole subreg as val. */
3609 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3611 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3612 && GET_MODE_BITSIZE (GET_MODE (x)) <
3613 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3614 return false;
3615 return true;
3617 if (GET_CODE (x) == SUBREG)
3618 x = SUBREG_REG (x);
3620 /* Anything except register store is not hoistable. This includes the
3621 partial stores to registers. */
3623 if (!REG_P (x))
3624 return false;
3626 /* Pseudo registers can be allways replaced by another pseudo to avoid
3627 the side effect, for hard register we must ensure that they are dead.
3628 Eventually we may want to add code to try turn pseudos to hards, but it
3629 is unlikely useful. */
3631 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3633 int regno = REGNO (x);
3634 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3636 if (!live)
3637 return false;
3638 if (REGNO_REG_SET_P (live, regno))
3639 return false;
3640 while (--n > 0)
3641 if (REGNO_REG_SET_P (live, regno + n))
3642 return false;
3644 return true;
3648 /* Return true if INSN can be hoisted to place with LIVE hard registers
3649 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3650 and used by the hoisting pass. */
3652 bool
3653 can_hoist_insn_p (insn, val, live)
3654 rtx insn, val;
3655 regset live;
3657 rtx pat = PATTERN (insn);
3658 int i;
3660 /* It probably does not worth the complexity to handle multiple
3661 set stores. */
3662 if (!single_set (insn))
3663 return false;
3664 /* We can move CALL_INSN, but we need to check that all caller clobbered
3665 regs are dead. */
3666 if (GET_CODE (insn) == CALL_INSN)
3667 return false;
3668 /* In future we will handle hoisting of libcall sequences, but
3669 give up for now. */
3670 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3671 return false;
3672 switch (GET_CODE (pat))
3674 case SET:
3675 if (!hoist_test_store (SET_DEST (pat), val, live))
3676 return false;
3677 break;
3678 case USE:
3679 /* USES do have sick semantics, so do not move them. */
3680 return false;
3681 break;
3682 case CLOBBER:
3683 if (!hoist_test_store (XEXP (pat, 0), val, live))
3684 return false;
3685 break;
3686 case PARALLEL:
3687 for (i = 0; i < XVECLEN (pat, 0); i++)
3689 rtx x = XVECEXP (pat, 0, i);
3690 switch (GET_CODE (x))
3692 case SET:
3693 if (!hoist_test_store (SET_DEST (x), val, live))
3694 return false;
3695 break;
3696 case USE:
3697 /* We need to fix callers to really ensure availability
3698 of all values inisn uses, but for now it is safe to prohibit
3699 hoisting of any insn having such a hidden uses. */
3700 return false;
3701 break;
3702 case CLOBBER:
3703 if (!hoist_test_store (SET_DEST (x), val, live))
3704 return false;
3705 break;
3706 default:
3707 break;
3710 break;
3711 default:
3712 abort ();
3714 return true;
3717 /* Update store after hoisting - replace all stores to pseudo registers
3718 by new ones to avoid clobbering of values except for store to VAL that will
3719 be updated to NEW. */
3721 static void
3722 hoist_update_store (insn, xp, val, new)
3723 rtx insn, *xp, val, new;
3725 rtx x = *xp;
3727 if (GET_CODE (x) == SCRATCH)
3728 return;
3730 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3731 validate_change (insn, xp,
3732 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3733 SUBREG_BYTE (x)), 1);
3734 if (rtx_equal_p (x, val))
3736 validate_change (insn, xp, new, 1);
3737 return;
3739 if (GET_CODE (x) == SUBREG)
3741 xp = &SUBREG_REG (x);
3742 x = *xp;
3745 if (!REG_P (x))
3746 abort ();
3748 /* We've verified that hard registers are dead, so we may keep the side
3749 effect. Otherwise replace it by new pseudo. */
3750 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3751 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3752 REG_NOTES (insn)
3753 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3756 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3757 and each other side effect to pseudo register by new pseudo register. */
3760 hoist_insn_after (insn, after, val, new)
3761 rtx insn, after, val, new;
3763 rtx pat;
3764 int i;
3765 rtx note;
3767 insn = emit_copy_of_insn_after (insn, after);
3768 pat = PATTERN (insn);
3770 /* Remove REG_UNUSED notes as we will re-emit them. */
3771 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3772 remove_note (insn, note);
3774 /* To get this working callers must ensure to move everything referenced
3775 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3776 easier. */
3777 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3778 remove_note (insn, note);
3779 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3780 remove_note (insn, note);
3782 /* Remove REG_DEAD notes as they might not be valid anymore in case
3783 we create redundancy. */
3784 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3785 remove_note (insn, note);
3786 switch (GET_CODE (pat))
3788 case SET:
3789 hoist_update_store (insn, &SET_DEST (pat), val, new);
3790 break;
3791 case USE:
3792 break;
3793 case CLOBBER:
3794 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3795 break;
3796 case PARALLEL:
3797 for (i = 0; i < XVECLEN (pat, 0); i++)
3799 rtx x = XVECEXP (pat, 0, i);
3800 switch (GET_CODE (x))
3802 case SET:
3803 hoist_update_store (insn, &SET_DEST (x), val, new);
3804 break;
3805 case USE:
3806 break;
3807 case CLOBBER:
3808 hoist_update_store (insn, &SET_DEST (x), val, new);
3809 break;
3810 default:
3811 break;
3814 break;
3815 default:
3816 abort ();
3818 if (!apply_change_group ())
3819 abort ();
3821 return insn;
3825 hoist_insn_to_edge (insn, e, val, new)
3826 rtx insn, val, new;
3827 edge e;
3829 rtx new_insn;
3831 /* We cannot insert instructions on an abnormal critical edge.
3832 It will be easier to find the culprit if we die now. */
3833 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3834 abort ();
3836 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3837 stuff. We also emit CALL_INSNS and firends. */
3838 if (e->insns == NULL_RTX)
3840 start_sequence ();
3841 emit_note (NULL, NOTE_INSN_DELETED);
3843 else
3844 push_to_sequence (e->insns);
3846 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3848 e->insns = get_insns ();
3849 end_sequence ();
3850 return new_insn;