Update copyright.
[official-gcc.git] / gcc / rtlanal.c
blob8bd7b259a6d3e7885ac710164b6e918cff22c9df
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "tm_p.h"
33 #include "flags.h"
34 #include "basic-block.h"
35 #include "real.h"
37 /* Forward declarations */
38 static int global_reg_mentioned_p_1 PARAMS ((rtx *, void *));
39 static void set_of_1 PARAMS ((rtx, rtx, void *));
40 static void insn_dependent_p_1 PARAMS ((rtx, rtx, void *));
41 static int computed_jump_p_1 PARAMS ((rtx));
42 static void parms_set PARAMS ((rtx, rtx, void *));
43 static bool hoist_test_store PARAMS ((rtx, rtx, regset));
44 static void hoist_update_store PARAMS ((rtx, rtx *, rtx, rtx));
46 /* Bit flags that specify the machine subtype we are compiling for.
47 Bits are tested using macros TARGET_... defined in the tm.h file
48 and set by `-m...' switches. Must be defined in rtlanal.c. */
50 int target_flags;
52 /* Return 1 if the value of X is unstable
53 (would be different at a different point in the program).
54 The frame pointer, arg pointer, etc. are considered stable
55 (within one function) and so is anything marked `unchanging'. */
57 int
58 rtx_unstable_p (x)
59 rtx x;
61 RTX_CODE code = GET_CODE (x);
62 int i;
63 const char *fmt;
65 switch (code)
67 case MEM:
68 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
70 case QUEUED:
71 return 1;
73 case ADDRESSOF:
74 case CONST:
75 case CONST_INT:
76 case CONST_DOUBLE:
77 case CONST_VECTOR:
78 case SYMBOL_REF:
79 case LABEL_REF:
80 return 0;
82 case REG:
83 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
84 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
85 /* The arg pointer varies if it is not a fixed register. */
86 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
87 || RTX_UNCHANGING_P (x))
88 return 0;
89 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
90 /* ??? When call-clobbered, the value is stable modulo the restore
91 that must happen after a call. This currently screws up local-alloc
92 into believing that the restore is not needed. */
93 if (x == pic_offset_table_rtx)
94 return 0;
95 #endif
96 return 1;
98 case ASM_OPERANDS:
99 if (MEM_VOLATILE_P (x))
100 return 1;
102 /* FALLTHROUGH */
104 default:
105 break;
108 fmt = GET_RTX_FORMAT (code);
109 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
110 if (fmt[i] == 'e')
112 if (rtx_unstable_p (XEXP (x, i)))
113 return 1;
115 else if (fmt[i] == 'E')
117 int j;
118 for (j = 0; j < XVECLEN (x, i); j++)
119 if (rtx_unstable_p (XVECEXP (x, i, j)))
120 return 1;
123 return 0;
126 /* Return 1 if X has a value that can vary even between two
127 executions of the program. 0 means X can be compared reliably
128 against certain constants or near-constants.
129 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
130 zero, we are slightly more conservative.
131 The frame pointer and the arg pointer are considered constant. */
134 rtx_varies_p (x, for_alias)
135 rtx x;
136 int for_alias;
138 RTX_CODE code = GET_CODE (x);
139 int i;
140 const char *fmt;
142 switch (code)
144 case MEM:
145 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
147 case QUEUED:
148 return 1;
150 case CONST:
151 case CONST_INT:
152 case CONST_DOUBLE:
153 case CONST_VECTOR:
154 case SYMBOL_REF:
155 case LABEL_REF:
156 return 0;
158 case ADDRESSOF:
159 /* This will resolve to some offset from the frame pointer. */
160 return 0;
162 case REG:
163 /* Note that we have to test for the actual rtx used for the frame
164 and arg pointers and not just the register number in case we have
165 eliminated the frame and/or arg pointer and are using it
166 for pseudos. */
167 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
168 /* The arg pointer varies if it is not a fixed register. */
169 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
170 return 0;
171 if (x == pic_offset_table_rtx
172 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
173 /* ??? When call-clobbered, the value is stable modulo the restore
174 that must happen after a call. This currently screws up
175 local-alloc into believing that the restore is not needed, so we
176 must return 0 only if we are called from alias analysis. */
177 && for_alias
178 #endif
180 return 0;
181 return 1;
183 case LO_SUM:
184 /* The operand 0 of a LO_SUM is considered constant
185 (in fact it is related specifically to operand 1)
186 during alias analysis. */
187 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
188 || rtx_varies_p (XEXP (x, 1), for_alias);
190 case ASM_OPERANDS:
191 if (MEM_VOLATILE_P (x))
192 return 1;
194 /* FALLTHROUGH */
196 default:
197 break;
200 fmt = GET_RTX_FORMAT (code);
201 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
202 if (fmt[i] == 'e')
204 if (rtx_varies_p (XEXP (x, i), for_alias))
205 return 1;
207 else if (fmt[i] == 'E')
209 int j;
210 for (j = 0; j < XVECLEN (x, i); j++)
211 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
212 return 1;
215 return 0;
218 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
221 rtx_addr_can_trap_p (x)
222 rtx x;
224 enum rtx_code code = GET_CODE (x);
226 switch (code)
228 case SYMBOL_REF:
229 return SYMBOL_REF_WEAK (x);
231 case LABEL_REF:
232 return 0;
234 case ADDRESSOF:
235 /* This will resolve to some offset from the frame pointer. */
236 return 0;
238 case REG:
239 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
240 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
241 || x == stack_pointer_rtx
242 /* The arg pointer varies if it is not a fixed register. */
243 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
244 return 0;
245 /* All of the virtual frame registers are stack references. */
246 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
247 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
248 return 0;
249 return 1;
251 case CONST:
252 return rtx_addr_can_trap_p (XEXP (x, 0));
254 case PLUS:
255 /* An address is assumed not to trap if it is an address that can't
256 trap plus a constant integer or it is the pic register plus a
257 constant. */
258 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
259 && GET_CODE (XEXP (x, 1)) == CONST_INT)
260 || (XEXP (x, 0) == pic_offset_table_rtx
261 && CONSTANT_P (XEXP (x, 1))));
263 case LO_SUM:
264 case PRE_MODIFY:
265 return rtx_addr_can_trap_p (XEXP (x, 1));
267 case PRE_DEC:
268 case PRE_INC:
269 case POST_DEC:
270 case POST_INC:
271 case POST_MODIFY:
272 return rtx_addr_can_trap_p (XEXP (x, 0));
274 default:
275 break;
278 /* If it isn't one of the case above, it can cause a trap. */
279 return 1;
282 /* Return true if X is an address that is known to not be zero. */
284 bool
285 nonzero_address_p (x)
286 rtx x;
288 enum rtx_code code = GET_CODE (x);
290 switch (code)
292 case SYMBOL_REF:
293 return !SYMBOL_REF_WEAK (x);
295 case LABEL_REF:
296 return true;
298 case ADDRESSOF:
299 /* This will resolve to some offset from the frame pointer. */
300 return true;
302 case REG:
303 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
304 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
305 || x == stack_pointer_rtx
306 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
307 return true;
308 /* All of the virtual frame registers are stack references. */
309 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
310 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
311 return true;
312 return false;
314 case CONST:
315 return nonzero_address_p (XEXP (x, 0));
317 case PLUS:
318 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
320 /* Pointers aren't allowed to wrap. If we've got a register
321 that is known to be a pointer, and a positive offset, then
322 the composite can't be zero. */
323 if (INTVAL (XEXP (x, 1)) > 0
324 && REG_P (XEXP (x, 0))
325 && REG_POINTER (XEXP (x, 0)))
326 return true;
328 return nonzero_address_p (XEXP (x, 0));
330 /* Handle PIC references. */
331 else if (XEXP (x, 0) == pic_offset_table_rtx
332 && CONSTANT_P (XEXP (x, 1)))
333 return true;
334 return false;
336 case PRE_MODIFY:
337 /* Similar to the above; allow positive offsets. Further, since
338 auto-inc is only allowed in memories, the register must be a
339 pointer. */
340 if (GET_CODE (XEXP (x, 1)) == CONST_INT
341 && INTVAL (XEXP (x, 1)) > 0)
342 return true;
343 return nonzero_address_p (XEXP (x, 0));
345 case PRE_INC:
346 /* Similarly. Further, the offset is always positive. */
347 return true;
349 case PRE_DEC:
350 case POST_DEC:
351 case POST_INC:
352 case POST_MODIFY:
353 return nonzero_address_p (XEXP (x, 0));
355 case LO_SUM:
356 return nonzero_address_p (XEXP (x, 1));
358 default:
359 break;
362 /* If it isn't one of the case above, might be zero. */
363 return false;
366 /* Return 1 if X refers to a memory location whose address
367 cannot be compared reliably with constant addresses,
368 or if X refers to a BLKmode memory object.
369 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
370 zero, we are slightly more conservative. */
373 rtx_addr_varies_p (x, for_alias)
374 rtx x;
375 int for_alias;
377 enum rtx_code code;
378 int i;
379 const char *fmt;
381 if (x == 0)
382 return 0;
384 code = GET_CODE (x);
385 if (code == MEM)
386 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
388 fmt = GET_RTX_FORMAT (code);
389 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
390 if (fmt[i] == 'e')
392 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
393 return 1;
395 else if (fmt[i] == 'E')
397 int j;
398 for (j = 0; j < XVECLEN (x, i); j++)
399 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
400 return 1;
402 return 0;
405 /* Return the value of the integer term in X, if one is apparent;
406 otherwise return 0.
407 Only obvious integer terms are detected.
408 This is used in cse.c with the `related_value' field. */
410 HOST_WIDE_INT
411 get_integer_term (x)
412 rtx x;
414 if (GET_CODE (x) == CONST)
415 x = XEXP (x, 0);
417 if (GET_CODE (x) == MINUS
418 && GET_CODE (XEXP (x, 1)) == CONST_INT)
419 return - INTVAL (XEXP (x, 1));
420 if (GET_CODE (x) == PLUS
421 && GET_CODE (XEXP (x, 1)) == CONST_INT)
422 return INTVAL (XEXP (x, 1));
423 return 0;
426 /* If X is a constant, return the value sans apparent integer term;
427 otherwise return 0.
428 Only obvious integer terms are detected. */
431 get_related_value (x)
432 rtx x;
434 if (GET_CODE (x) != CONST)
435 return 0;
436 x = XEXP (x, 0);
437 if (GET_CODE (x) == PLUS
438 && GET_CODE (XEXP (x, 1)) == CONST_INT)
439 return XEXP (x, 0);
440 else if (GET_CODE (x) == MINUS
441 && GET_CODE (XEXP (x, 1)) == CONST_INT)
442 return XEXP (x, 0);
443 return 0;
446 /* Given a tablejump insn INSN, return the RTL expression for the offset
447 into the jump table. If the offset cannot be determined, then return
448 NULL_RTX.
450 If EARLIEST is nonzero, it is a pointer to a place where the earliest
451 insn used in locating the offset was found. */
454 get_jump_table_offset (insn, earliest)
455 rtx insn;
456 rtx *earliest;
458 rtx label;
459 rtx table;
460 rtx set;
461 rtx old_insn;
462 rtx x;
463 rtx old_x;
464 rtx y;
465 rtx old_y;
466 int i;
468 if (GET_CODE (insn) != JUMP_INSN
469 || ! (label = JUMP_LABEL (insn))
470 || ! (table = NEXT_INSN (label))
471 || GET_CODE (table) != JUMP_INSN
472 || (GET_CODE (PATTERN (table)) != ADDR_VEC
473 && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC)
474 || ! (set = single_set (insn)))
475 return NULL_RTX;
477 x = SET_SRC (set);
479 /* Some targets (eg, ARM) emit a tablejump that also
480 contains the out-of-range target. */
481 if (GET_CODE (x) == IF_THEN_ELSE
482 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
483 x = XEXP (x, 1);
485 /* Search backwards and locate the expression stored in X. */
486 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
487 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
490 /* If X is an expression using a relative address then strip
491 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
492 or the jump table label. */
493 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
494 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
496 for (i = 0; i < 2; i++)
498 old_insn = insn;
499 y = XEXP (x, i);
501 if (y == pc_rtx || y == pic_offset_table_rtx)
502 break;
504 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
505 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
508 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
509 break;
512 if (i >= 2)
513 return NULL_RTX;
515 x = XEXP (x, 1 - i);
517 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
518 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
522 /* Strip off any sign or zero extension. */
523 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
525 x = XEXP (x, 0);
527 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
528 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
532 /* If X isn't a MEM then this isn't a tablejump we understand. */
533 if (GET_CODE (x) != MEM)
534 return NULL_RTX;
536 /* Strip off the MEM. */
537 x = XEXP (x, 0);
539 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
540 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
543 /* If X isn't a PLUS than this isn't a tablejump we understand. */
544 if (GET_CODE (x) != PLUS)
545 return NULL_RTX;
547 /* At this point we should have an expression representing the jump table
548 plus an offset. Examine each operand in order to determine which one
549 represents the jump table. Knowing that tells us that the other operand
550 must represent the offset. */
551 for (i = 0; i < 2; i++)
553 old_insn = insn;
554 y = XEXP (x, i);
556 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
557 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
560 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
561 && reg_mentioned_p (label, y))
562 break;
565 if (i >= 2)
566 return NULL_RTX;
568 x = XEXP (x, 1 - i);
570 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
571 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
572 for (i = 0; i < 2; i++)
573 if (XEXP (x, i) == pic_offset_table_rtx)
575 x = XEXP (x, 1 - i);
576 break;
579 if (earliest)
580 *earliest = insn;
582 /* Return the RTL expression representing the offset. */
583 return x;
586 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
587 a global register. */
589 static int
590 global_reg_mentioned_p_1 (loc, data)
591 rtx *loc;
592 void *data ATTRIBUTE_UNUSED;
594 int regno;
595 rtx x = *loc;
597 if (! x)
598 return 0;
600 switch (GET_CODE (x))
602 case SUBREG:
603 if (GET_CODE (SUBREG_REG (x)) == REG)
605 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
606 && global_regs[subreg_regno (x)])
607 return 1;
608 return 0;
610 break;
612 case REG:
613 regno = REGNO (x);
614 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
615 return 1;
616 return 0;
618 case SCRATCH:
619 case PC:
620 case CC0:
621 case CONST_INT:
622 case CONST_DOUBLE:
623 case CONST:
624 case LABEL_REF:
625 return 0;
627 case CALL:
628 /* A non-constant call might use a global register. */
629 return 1;
631 default:
632 break;
635 return 0;
638 /* Returns nonzero if X mentions a global register. */
641 global_reg_mentioned_p (x)
642 rtx x;
645 if (INSN_P (x))
647 if (GET_CODE (x) == CALL_INSN)
649 if (! CONST_OR_PURE_CALL_P (x))
650 return 1;
651 x = CALL_INSN_FUNCTION_USAGE (x);
652 if (x == 0)
653 return 0;
655 else
656 x = PATTERN (x);
659 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
662 /* Return the number of places FIND appears within X. If COUNT_DEST is
663 zero, we do not count occurrences inside the destination of a SET. */
666 count_occurrences (x, find, count_dest)
667 rtx x, find;
668 int count_dest;
670 int i, j;
671 enum rtx_code code;
672 const char *format_ptr;
673 int count;
675 if (x == find)
676 return 1;
678 code = GET_CODE (x);
680 switch (code)
682 case REG:
683 case CONST_INT:
684 case CONST_DOUBLE:
685 case CONST_VECTOR:
686 case SYMBOL_REF:
687 case CODE_LABEL:
688 case PC:
689 case CC0:
690 return 0;
692 case MEM:
693 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
694 return 1;
695 break;
697 case SET:
698 if (SET_DEST (x) == find && ! count_dest)
699 return count_occurrences (SET_SRC (x), find, count_dest);
700 break;
702 default:
703 break;
706 format_ptr = GET_RTX_FORMAT (code);
707 count = 0;
709 for (i = 0; i < GET_RTX_LENGTH (code); i++)
711 switch (*format_ptr++)
713 case 'e':
714 count += count_occurrences (XEXP (x, i), find, count_dest);
715 break;
717 case 'E':
718 for (j = 0; j < XVECLEN (x, i); j++)
719 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
720 break;
723 return count;
726 /* Nonzero if register REG appears somewhere within IN.
727 Also works if REG is not a register; in this case it checks
728 for a subexpression of IN that is Lisp "equal" to REG. */
731 reg_mentioned_p (reg, in)
732 rtx reg, in;
734 const char *fmt;
735 int i;
736 enum rtx_code code;
738 if (in == 0)
739 return 0;
741 if (reg == in)
742 return 1;
744 if (GET_CODE (in) == LABEL_REF)
745 return reg == XEXP (in, 0);
747 code = GET_CODE (in);
749 switch (code)
751 /* Compare registers by number. */
752 case REG:
753 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
755 /* These codes have no constituent expressions
756 and are unique. */
757 case SCRATCH:
758 case CC0:
759 case PC:
760 return 0;
762 case CONST_INT:
763 return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg);
765 case CONST_VECTOR:
766 case CONST_DOUBLE:
767 /* These are kept unique for a given value. */
768 return 0;
770 default:
771 break;
774 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
775 return 1;
777 fmt = GET_RTX_FORMAT (code);
779 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
781 if (fmt[i] == 'E')
783 int j;
784 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
785 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
786 return 1;
788 else if (fmt[i] == 'e'
789 && reg_mentioned_p (reg, XEXP (in, i)))
790 return 1;
792 return 0;
795 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
796 no CODE_LABEL insn. */
799 no_labels_between_p (beg, end)
800 rtx beg, end;
802 rtx p;
803 if (beg == end)
804 return 0;
805 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
806 if (GET_CODE (p) == CODE_LABEL)
807 return 0;
808 return 1;
811 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
812 no JUMP_INSN insn. */
815 no_jumps_between_p (beg, end)
816 rtx beg, end;
818 rtx p;
819 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
820 if (GET_CODE (p) == JUMP_INSN)
821 return 0;
822 return 1;
825 /* Nonzero if register REG is used in an insn between
826 FROM_INSN and TO_INSN (exclusive of those two). */
829 reg_used_between_p (reg, from_insn, to_insn)
830 rtx reg, from_insn, to_insn;
832 rtx insn;
834 if (from_insn == to_insn)
835 return 0;
837 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
838 if (INSN_P (insn)
839 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
840 || (GET_CODE (insn) == CALL_INSN
841 && (find_reg_fusage (insn, USE, reg)
842 || find_reg_fusage (insn, CLOBBER, reg)))))
843 return 1;
844 return 0;
847 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
848 is entirely replaced by a new value and the only use is as a SET_DEST,
849 we do not consider it a reference. */
852 reg_referenced_p (x, body)
853 rtx x;
854 rtx body;
856 int i;
858 switch (GET_CODE (body))
860 case SET:
861 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
862 return 1;
864 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
865 of a REG that occupies all of the REG, the insn references X if
866 it is mentioned in the destination. */
867 if (GET_CODE (SET_DEST (body)) != CC0
868 && GET_CODE (SET_DEST (body)) != PC
869 && GET_CODE (SET_DEST (body)) != REG
870 && ! (GET_CODE (SET_DEST (body)) == SUBREG
871 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
872 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
873 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
874 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
875 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
876 && reg_overlap_mentioned_p (x, SET_DEST (body)))
877 return 1;
878 return 0;
880 case ASM_OPERANDS:
881 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
882 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
883 return 1;
884 return 0;
886 case CALL:
887 case USE:
888 case IF_THEN_ELSE:
889 return reg_overlap_mentioned_p (x, body);
891 case TRAP_IF:
892 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
894 case PREFETCH:
895 return reg_overlap_mentioned_p (x, XEXP (body, 0));
897 case UNSPEC:
898 case UNSPEC_VOLATILE:
899 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
900 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
901 return 1;
902 return 0;
904 case PARALLEL:
905 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
906 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
907 return 1;
908 return 0;
910 case CLOBBER:
911 if (GET_CODE (XEXP (body, 0)) == MEM)
912 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
913 return 1;
914 return 0;
916 case COND_EXEC:
917 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
918 return 1;
919 return reg_referenced_p (x, COND_EXEC_CODE (body));
921 default:
922 return 0;
926 /* Nonzero if register REG is referenced in an insn between
927 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
928 not count. */
931 reg_referenced_between_p (reg, from_insn, to_insn)
932 rtx reg, from_insn, to_insn;
934 rtx insn;
936 if (from_insn == to_insn)
937 return 0;
939 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
940 if (INSN_P (insn)
941 && (reg_referenced_p (reg, PATTERN (insn))
942 || (GET_CODE (insn) == CALL_INSN
943 && find_reg_fusage (insn, USE, reg))))
944 return 1;
945 return 0;
948 /* Nonzero if register REG is set or clobbered in an insn between
949 FROM_INSN and TO_INSN (exclusive of those two). */
952 reg_set_between_p (reg, from_insn, to_insn)
953 rtx reg, from_insn, to_insn;
955 rtx insn;
957 if (from_insn == to_insn)
958 return 0;
960 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
961 if (INSN_P (insn) && reg_set_p (reg, insn))
962 return 1;
963 return 0;
966 /* Internals of reg_set_between_p. */
968 reg_set_p (reg, insn)
969 rtx reg, insn;
971 /* We can be passed an insn or part of one. If we are passed an insn,
972 check if a side-effect of the insn clobbers REG. */
973 if (INSN_P (insn)
974 && (FIND_REG_INC_NOTE (insn, reg)
975 || (GET_CODE (insn) == CALL_INSN
976 /* We'd like to test call_used_regs here, but rtlanal.c can't
977 reference that variable due to its use in genattrtab. So
978 we'll just be more conservative.
980 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
981 information holds all clobbered registers. */
982 && ((GET_CODE (reg) == REG
983 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
984 || GET_CODE (reg) == MEM
985 || find_reg_fusage (insn, CLOBBER, reg)))))
986 return 1;
988 return set_of (reg, insn) != NULL_RTX;
991 /* Similar to reg_set_between_p, but check all registers in X. Return 0
992 only if none of them are modified between START and END. Do not
993 consider non-registers one way or the other. */
996 regs_set_between_p (x, start, end)
997 rtx x;
998 rtx start, end;
1000 enum rtx_code code = GET_CODE (x);
1001 const char *fmt;
1002 int i, j;
1004 switch (code)
1006 case CONST_INT:
1007 case CONST_DOUBLE:
1008 case CONST_VECTOR:
1009 case CONST:
1010 case SYMBOL_REF:
1011 case LABEL_REF:
1012 case PC:
1013 case CC0:
1014 return 0;
1016 case REG:
1017 return reg_set_between_p (x, start, end);
1019 default:
1020 break;
1023 fmt = GET_RTX_FORMAT (code);
1024 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1026 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
1027 return 1;
1029 else if (fmt[i] == 'E')
1030 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1031 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
1032 return 1;
1035 return 0;
1038 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1039 only if none of them are modified between START and END. Return 1 if
1040 X contains a MEM; this routine does usememory aliasing. */
1043 modified_between_p (x, start, end)
1044 rtx x;
1045 rtx start, end;
1047 enum rtx_code code = GET_CODE (x);
1048 const char *fmt;
1049 int i, j;
1050 rtx insn;
1052 if (start == end)
1053 return 0;
1055 switch (code)
1057 case CONST_INT:
1058 case CONST_DOUBLE:
1059 case CONST_VECTOR:
1060 case CONST:
1061 case SYMBOL_REF:
1062 case LABEL_REF:
1063 return 0;
1065 case PC:
1066 case CC0:
1067 return 1;
1069 case MEM:
1070 if (RTX_UNCHANGING_P (x))
1071 return 0;
1072 if (modified_between_p (XEXP (x, 0), start, end))
1073 return 1;
1074 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1075 if (memory_modified_in_insn_p (x, insn))
1076 return 1;
1077 return 0;
1078 break;
1080 case REG:
1081 return reg_set_between_p (x, start, end);
1083 default:
1084 break;
1087 fmt = GET_RTX_FORMAT (code);
1088 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1090 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1091 return 1;
1093 else if (fmt[i] == 'E')
1094 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1095 if (modified_between_p (XVECEXP (x, i, j), start, end))
1096 return 1;
1099 return 0;
1102 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1103 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1104 does use memory aliasing. */
1107 modified_in_p (x, insn)
1108 rtx x;
1109 rtx insn;
1111 enum rtx_code code = GET_CODE (x);
1112 const char *fmt;
1113 int i, j;
1115 switch (code)
1117 case CONST_INT:
1118 case CONST_DOUBLE:
1119 case CONST_VECTOR:
1120 case CONST:
1121 case SYMBOL_REF:
1122 case LABEL_REF:
1123 return 0;
1125 case PC:
1126 case CC0:
1127 return 1;
1129 case MEM:
1130 if (RTX_UNCHANGING_P (x))
1131 return 0;
1132 if (modified_in_p (XEXP (x, 0), insn))
1133 return 1;
1134 if (memory_modified_in_insn_p (x, insn))
1135 return 1;
1136 return 0;
1137 break;
1139 case REG:
1140 return reg_set_p (x, insn);
1142 default:
1143 break;
1146 fmt = GET_RTX_FORMAT (code);
1147 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1149 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1150 return 1;
1152 else if (fmt[i] == 'E')
1153 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1154 if (modified_in_p (XVECEXP (x, i, j), insn))
1155 return 1;
1158 return 0;
1161 /* Return true if anything in insn X is (anti,output,true) dependent on
1162 anything in insn Y. */
1165 insn_dependent_p (x, y)
1166 rtx x, y;
1168 rtx tmp;
1170 if (! INSN_P (x) || ! INSN_P (y))
1171 abort ();
1173 tmp = PATTERN (y);
1174 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1175 if (tmp == NULL_RTX)
1176 return 1;
1178 tmp = PATTERN (x);
1179 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1180 if (tmp == NULL_RTX)
1181 return 1;
1183 return 0;
1186 /* A helper routine for insn_dependent_p called through note_stores. */
1188 static void
1189 insn_dependent_p_1 (x, pat, data)
1190 rtx x;
1191 rtx pat ATTRIBUTE_UNUSED;
1192 void *data;
1194 rtx * pinsn = (rtx *) data;
1196 if (*pinsn && reg_mentioned_p (x, *pinsn))
1197 *pinsn = NULL_RTX;
1200 /* Helper function for set_of. */
1201 struct set_of_data
1203 rtx found;
1204 rtx pat;
1207 static void
1208 set_of_1 (x, pat, data1)
1209 rtx x;
1210 rtx pat;
1211 void *data1;
1213 struct set_of_data *data = (struct set_of_data *) (data1);
1214 if (rtx_equal_p (x, data->pat)
1215 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1216 data->found = pat;
1219 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1220 (either directly or via STRICT_LOW_PART and similar modifiers). */
1222 set_of (pat, insn)
1223 rtx pat, insn;
1225 struct set_of_data data;
1226 data.found = NULL_RTX;
1227 data.pat = pat;
1228 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1229 return data.found;
1232 /* Given an INSN, return a SET expression if this insn has only a single SET.
1233 It may also have CLOBBERs, USEs, or SET whose output
1234 will not be used, which we ignore. */
1237 single_set_2 (insn, pat)
1238 rtx insn, pat;
1240 rtx set = NULL;
1241 int set_verified = 1;
1242 int i;
1244 if (GET_CODE (pat) == PARALLEL)
1246 for (i = 0; i < XVECLEN (pat, 0); i++)
1248 rtx sub = XVECEXP (pat, 0, i);
1249 switch (GET_CODE (sub))
1251 case USE:
1252 case CLOBBER:
1253 break;
1255 case SET:
1256 /* We can consider insns having multiple sets, where all
1257 but one are dead as single set insns. In common case
1258 only single set is present in the pattern so we want
1259 to avoid checking for REG_UNUSED notes unless necessary.
1261 When we reach set first time, we just expect this is
1262 the single set we are looking for and only when more
1263 sets are found in the insn, we check them. */
1264 if (!set_verified)
1266 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1267 && !side_effects_p (set))
1268 set = NULL;
1269 else
1270 set_verified = 1;
1272 if (!set)
1273 set = sub, set_verified = 0;
1274 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1275 || side_effects_p (sub))
1276 return NULL_RTX;
1277 break;
1279 default:
1280 return NULL_RTX;
1284 return set;
1287 /* Given an INSN, return nonzero if it has more than one SET, else return
1288 zero. */
1291 multiple_sets (insn)
1292 rtx insn;
1294 int found;
1295 int i;
1297 /* INSN must be an insn. */
1298 if (! INSN_P (insn))
1299 return 0;
1301 /* Only a PARALLEL can have multiple SETs. */
1302 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1304 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1305 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1307 /* If we have already found a SET, then return now. */
1308 if (found)
1309 return 1;
1310 else
1311 found = 1;
1315 /* Either zero or one SET. */
1316 return 0;
1319 /* Return nonzero if the destination of SET equals the source
1320 and there are no side effects. */
1323 set_noop_p (set)
1324 rtx set;
1326 rtx src = SET_SRC (set);
1327 rtx dst = SET_DEST (set);
1329 if (side_effects_p (src) || side_effects_p (dst))
1330 return 0;
1332 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1333 return rtx_equal_p (dst, src);
1335 if (dst == pc_rtx && src == pc_rtx)
1336 return 1;
1338 if (GET_CODE (dst) == SIGN_EXTRACT
1339 || GET_CODE (dst) == ZERO_EXTRACT)
1340 return rtx_equal_p (XEXP (dst, 0), src)
1341 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx;
1343 if (GET_CODE (dst) == STRICT_LOW_PART)
1344 dst = XEXP (dst, 0);
1346 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1348 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1349 return 0;
1350 src = SUBREG_REG (src);
1351 dst = SUBREG_REG (dst);
1354 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1355 && REGNO (src) == REGNO (dst));
1358 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1359 value to itself. */
1362 noop_move_p (insn)
1363 rtx insn;
1365 rtx pat = PATTERN (insn);
1367 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1368 return 1;
1370 /* Insns carrying these notes are useful later on. */
1371 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1372 return 0;
1374 /* For now treat an insn with a REG_RETVAL note as a
1375 a special insn which should not be considered a no-op. */
1376 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1377 return 0;
1379 if (GET_CODE (pat) == SET && set_noop_p (pat))
1380 return 1;
1382 if (GET_CODE (pat) == PARALLEL)
1384 int i;
1385 /* If nothing but SETs of registers to themselves,
1386 this insn can also be deleted. */
1387 for (i = 0; i < XVECLEN (pat, 0); i++)
1389 rtx tem = XVECEXP (pat, 0, i);
1391 if (GET_CODE (tem) == USE
1392 || GET_CODE (tem) == CLOBBER)
1393 continue;
1395 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1396 return 0;
1399 return 1;
1401 return 0;
1405 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1406 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1407 If the object was modified, if we hit a partial assignment to X, or hit a
1408 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1409 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1410 be the src. */
1413 find_last_value (x, pinsn, valid_to, allow_hwreg)
1414 rtx x;
1415 rtx *pinsn;
1416 rtx valid_to;
1417 int allow_hwreg;
1419 rtx p;
1421 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1422 p = PREV_INSN (p))
1423 if (INSN_P (p))
1425 rtx set = single_set (p);
1426 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1428 if (set && rtx_equal_p (x, SET_DEST (set)))
1430 rtx src = SET_SRC (set);
1432 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1433 src = XEXP (note, 0);
1435 if ((valid_to == NULL_RTX
1436 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1437 /* Reject hard registers because we don't usually want
1438 to use them; we'd rather use a pseudo. */
1439 && (! (GET_CODE (src) == REG
1440 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1442 *pinsn = p;
1443 return src;
1447 /* If set in non-simple way, we don't have a value. */
1448 if (reg_set_p (x, p))
1449 break;
1452 return x;
1455 /* Return nonzero if register in range [REGNO, ENDREGNO)
1456 appears either explicitly or implicitly in X
1457 other than being stored into.
1459 References contained within the substructure at LOC do not count.
1460 LOC may be zero, meaning don't ignore anything. */
1463 refers_to_regno_p (regno, endregno, x, loc)
1464 unsigned int regno, endregno;
1465 rtx x;
1466 rtx *loc;
1468 int i;
1469 unsigned int x_regno;
1470 RTX_CODE code;
1471 const char *fmt;
1473 repeat:
1474 /* The contents of a REG_NONNEG note is always zero, so we must come here
1475 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1476 if (x == 0)
1477 return 0;
1479 code = GET_CODE (x);
1481 switch (code)
1483 case REG:
1484 x_regno = REGNO (x);
1486 /* If we modifying the stack, frame, or argument pointer, it will
1487 clobber a virtual register. In fact, we could be more precise,
1488 but it isn't worth it. */
1489 if ((x_regno == STACK_POINTER_REGNUM
1490 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1491 || x_regno == ARG_POINTER_REGNUM
1492 #endif
1493 || x_regno == FRAME_POINTER_REGNUM)
1494 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1495 return 1;
1497 return (endregno > x_regno
1498 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1499 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1500 : 1));
1502 case SUBREG:
1503 /* If this is a SUBREG of a hard reg, we can see exactly which
1504 registers are being modified. Otherwise, handle normally. */
1505 if (GET_CODE (SUBREG_REG (x)) == REG
1506 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1508 unsigned int inner_regno = subreg_regno (x);
1509 unsigned int inner_endregno
1510 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1511 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1513 return endregno > inner_regno && regno < inner_endregno;
1515 break;
1517 case CLOBBER:
1518 case SET:
1519 if (&SET_DEST (x) != loc
1520 /* Note setting a SUBREG counts as referring to the REG it is in for
1521 a pseudo but not for hard registers since we can
1522 treat each word individually. */
1523 && ((GET_CODE (SET_DEST (x)) == SUBREG
1524 && loc != &SUBREG_REG (SET_DEST (x))
1525 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1526 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1527 && refers_to_regno_p (regno, endregno,
1528 SUBREG_REG (SET_DEST (x)), loc))
1529 || (GET_CODE (SET_DEST (x)) != REG
1530 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1531 return 1;
1533 if (code == CLOBBER || loc == &SET_SRC (x))
1534 return 0;
1535 x = SET_SRC (x);
1536 goto repeat;
1538 default:
1539 break;
1542 /* X does not match, so try its subexpressions. */
1544 fmt = GET_RTX_FORMAT (code);
1545 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1547 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1549 if (i == 0)
1551 x = XEXP (x, 0);
1552 goto repeat;
1554 else
1555 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1556 return 1;
1558 else if (fmt[i] == 'E')
1560 int j;
1561 for (j = XVECLEN (x, i) - 1; j >=0; j--)
1562 if (loc != &XVECEXP (x, i, j)
1563 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1564 return 1;
1567 return 0;
1570 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1571 we check if any register number in X conflicts with the relevant register
1572 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1573 contains a MEM (we don't bother checking for memory addresses that can't
1574 conflict because we expect this to be a rare case. */
1577 reg_overlap_mentioned_p (x, in)
1578 rtx x, in;
1580 unsigned int regno, endregno;
1582 /* Overly conservative. */
1583 if (GET_CODE (x) == STRICT_LOW_PART)
1584 x = XEXP (x, 0);
1586 /* If either argument is a constant, then modifying X can not affect IN. */
1587 if (CONSTANT_P (x) || CONSTANT_P (in))
1588 return 0;
1590 switch (GET_CODE (x))
1592 case SUBREG:
1593 regno = REGNO (SUBREG_REG (x));
1594 if (regno < FIRST_PSEUDO_REGISTER)
1595 regno = subreg_regno (x);
1596 goto do_reg;
1598 case REG:
1599 regno = REGNO (x);
1600 do_reg:
1601 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1602 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1603 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1605 case MEM:
1607 const char *fmt;
1608 int i;
1610 if (GET_CODE (in) == MEM)
1611 return 1;
1613 fmt = GET_RTX_FORMAT (GET_CODE (in));
1614 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1615 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1616 return 1;
1618 return 0;
1621 case SCRATCH:
1622 case PC:
1623 case CC0:
1624 return reg_mentioned_p (x, in);
1626 case PARALLEL:
1628 int i;
1630 /* If any register in here refers to it we return true. */
1631 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1632 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1633 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1634 return 1;
1635 return 0;
1638 default:
1639 break;
1642 abort ();
1645 /* Return the last value to which REG was set prior to INSN. If we can't
1646 find it easily, return 0.
1648 We only return a REG, SUBREG, or constant because it is too hard to
1649 check if a MEM remains unchanged. */
1652 reg_set_last (x, insn)
1653 rtx x;
1654 rtx insn;
1656 rtx orig_insn = insn;
1658 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1659 Stop when we reach a label or X is a hard reg and we reach a
1660 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1662 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1664 /* We compare with <= here, because reg_set_last_last_regno
1665 is actually the number of the first reg *not* in X. */
1666 for (;
1667 insn && GET_CODE (insn) != CODE_LABEL
1668 && ! (GET_CODE (insn) == CALL_INSN
1669 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1670 insn = PREV_INSN (insn))
1671 if (INSN_P (insn))
1673 rtx set = set_of (x, insn);
1674 /* OK, this function modify our register. See if we understand it. */
1675 if (set)
1677 rtx last_value;
1678 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1679 return 0;
1680 last_value = SET_SRC (x);
1681 if (CONSTANT_P (last_value)
1682 || ((GET_CODE (last_value) == REG
1683 || GET_CODE (last_value) == SUBREG)
1684 && ! reg_set_between_p (last_value,
1685 insn, orig_insn)))
1686 return last_value;
1687 else
1688 return 0;
1692 return 0;
1695 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1696 (X would be the pattern of an insn).
1697 FUN receives two arguments:
1698 the REG, MEM, CC0 or PC being stored in or clobbered,
1699 the SET or CLOBBER rtx that does the store.
1701 If the item being stored in or clobbered is a SUBREG of a hard register,
1702 the SUBREG will be passed. */
1704 void
1705 note_stores (x, fun, data)
1706 rtx x;
1707 void (*fun) PARAMS ((rtx, rtx, void *));
1708 void *data;
1710 int i;
1712 if (GET_CODE (x) == COND_EXEC)
1713 x = COND_EXEC_CODE (x);
1715 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1717 rtx dest = SET_DEST (x);
1719 while ((GET_CODE (dest) == SUBREG
1720 && (GET_CODE (SUBREG_REG (dest)) != REG
1721 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1722 || GET_CODE (dest) == ZERO_EXTRACT
1723 || GET_CODE (dest) == SIGN_EXTRACT
1724 || GET_CODE (dest) == STRICT_LOW_PART)
1725 dest = XEXP (dest, 0);
1727 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1728 each of whose first operand is a register. */
1729 if (GET_CODE (dest) == PARALLEL)
1731 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1732 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1733 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1735 else
1736 (*fun) (dest, x, data);
1739 else if (GET_CODE (x) == PARALLEL)
1740 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1741 note_stores (XVECEXP (x, 0, i), fun, data);
1744 /* Like notes_stores, but call FUN for each expression that is being
1745 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1746 FUN for each expression, not any interior subexpressions. FUN receives a
1747 pointer to the expression and the DATA passed to this function.
1749 Note that this is not quite the same test as that done in reg_referenced_p
1750 since that considers something as being referenced if it is being
1751 partially set, while we do not. */
1753 void
1754 note_uses (pbody, fun, data)
1755 rtx *pbody;
1756 void (*fun) PARAMS ((rtx *, void *));
1757 void *data;
1759 rtx body = *pbody;
1760 int i;
1762 switch (GET_CODE (body))
1764 case COND_EXEC:
1765 (*fun) (&COND_EXEC_TEST (body), data);
1766 note_uses (&COND_EXEC_CODE (body), fun, data);
1767 return;
1769 case PARALLEL:
1770 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1771 note_uses (&XVECEXP (body, 0, i), fun, data);
1772 return;
1774 case USE:
1775 (*fun) (&XEXP (body, 0), data);
1776 return;
1778 case ASM_OPERANDS:
1779 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1780 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1781 return;
1783 case TRAP_IF:
1784 (*fun) (&TRAP_CONDITION (body), data);
1785 return;
1787 case PREFETCH:
1788 (*fun) (&XEXP (body, 0), data);
1789 return;
1791 case UNSPEC:
1792 case UNSPEC_VOLATILE:
1793 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1794 (*fun) (&XVECEXP (body, 0, i), data);
1795 return;
1797 case CLOBBER:
1798 if (GET_CODE (XEXP (body, 0)) == MEM)
1799 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1800 return;
1802 case SET:
1804 rtx dest = SET_DEST (body);
1806 /* For sets we replace everything in source plus registers in memory
1807 expression in store and operands of a ZERO_EXTRACT. */
1808 (*fun) (&SET_SRC (body), data);
1810 if (GET_CODE (dest) == ZERO_EXTRACT)
1812 (*fun) (&XEXP (dest, 1), data);
1813 (*fun) (&XEXP (dest, 2), data);
1816 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1817 dest = XEXP (dest, 0);
1819 if (GET_CODE (dest) == MEM)
1820 (*fun) (&XEXP (dest, 0), data);
1822 return;
1824 default:
1825 /* All the other possibilities never store. */
1826 (*fun) (pbody, data);
1827 return;
1831 /* Return nonzero if X's old contents don't survive after INSN.
1832 This will be true if X is (cc0) or if X is a register and
1833 X dies in INSN or because INSN entirely sets X.
1835 "Entirely set" means set directly and not through a SUBREG,
1836 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1837 Likewise, REG_INC does not count.
1839 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1840 but for this use that makes no difference, since regs don't overlap
1841 during their lifetimes. Therefore, this function may be used
1842 at any time after deaths have been computed (in flow.c).
1844 If REG is a hard reg that occupies multiple machine registers, this
1845 function will only return 1 if each of those registers will be replaced
1846 by INSN. */
1849 dead_or_set_p (insn, x)
1850 rtx insn;
1851 rtx x;
1853 unsigned int regno, last_regno;
1854 unsigned int i;
1856 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1857 if (GET_CODE (x) == CC0)
1858 return 1;
1860 if (GET_CODE (x) != REG)
1861 abort ();
1863 regno = REGNO (x);
1864 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1865 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1867 for (i = regno; i <= last_regno; i++)
1868 if (! dead_or_set_regno_p (insn, i))
1869 return 0;
1871 return 1;
1874 /* Utility function for dead_or_set_p to check an individual register. Also
1875 called from flow.c. */
1878 dead_or_set_regno_p (insn, test_regno)
1879 rtx insn;
1880 unsigned int test_regno;
1882 unsigned int regno, endregno;
1883 rtx pattern;
1885 /* See if there is a death note for something that includes TEST_REGNO. */
1886 if (find_regno_note (insn, REG_DEAD, test_regno))
1887 return 1;
1889 if (GET_CODE (insn) == CALL_INSN
1890 && find_regno_fusage (insn, CLOBBER, test_regno))
1891 return 1;
1893 pattern = PATTERN (insn);
1895 if (GET_CODE (pattern) == COND_EXEC)
1896 pattern = COND_EXEC_CODE (pattern);
1898 if (GET_CODE (pattern) == SET)
1900 rtx dest = SET_DEST (pattern);
1902 /* A value is totally replaced if it is the destination or the
1903 destination is a SUBREG of REGNO that does not change the number of
1904 words in it. */
1905 if (GET_CODE (dest) == SUBREG
1906 && (((GET_MODE_SIZE (GET_MODE (dest))
1907 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1908 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1909 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1910 dest = SUBREG_REG (dest);
1912 if (GET_CODE (dest) != REG)
1913 return 0;
1915 regno = REGNO (dest);
1916 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1917 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1919 return (test_regno >= regno && test_regno < endregno);
1921 else if (GET_CODE (pattern) == PARALLEL)
1923 int i;
1925 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1927 rtx body = XVECEXP (pattern, 0, i);
1929 if (GET_CODE (body) == COND_EXEC)
1930 body = COND_EXEC_CODE (body);
1932 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1934 rtx dest = SET_DEST (body);
1936 if (GET_CODE (dest) == SUBREG
1937 && (((GET_MODE_SIZE (GET_MODE (dest))
1938 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1939 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1940 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1941 dest = SUBREG_REG (dest);
1943 if (GET_CODE (dest) != REG)
1944 continue;
1946 regno = REGNO (dest);
1947 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1948 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1950 if (test_regno >= regno && test_regno < endregno)
1951 return 1;
1956 return 0;
1959 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1960 If DATUM is nonzero, look for one whose datum is DATUM. */
1963 find_reg_note (insn, kind, datum)
1964 rtx insn;
1965 enum reg_note kind;
1966 rtx datum;
1968 rtx link;
1970 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1971 if (! INSN_P (insn))
1972 return 0;
1974 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1975 if (REG_NOTE_KIND (link) == kind
1976 && (datum == 0 || datum == XEXP (link, 0)))
1977 return link;
1978 return 0;
1981 /* Return the reg-note of kind KIND in insn INSN which applies to register
1982 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1983 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1984 it might be the case that the note overlaps REGNO. */
1987 find_regno_note (insn, kind, regno)
1988 rtx insn;
1989 enum reg_note kind;
1990 unsigned int regno;
1992 rtx link;
1994 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1995 if (! INSN_P (insn))
1996 return 0;
1998 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1999 if (REG_NOTE_KIND (link) == kind
2000 /* Verify that it is a register, so that scratch and MEM won't cause a
2001 problem here. */
2002 && GET_CODE (XEXP (link, 0)) == REG
2003 && REGNO (XEXP (link, 0)) <= regno
2004 && ((REGNO (XEXP (link, 0))
2005 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
2006 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
2007 GET_MODE (XEXP (link, 0)))))
2008 > regno))
2009 return link;
2010 return 0;
2013 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
2014 has such a note. */
2017 find_reg_equal_equiv_note (insn)
2018 rtx insn;
2020 rtx note;
2022 if (single_set (insn) == 0)
2023 return 0;
2024 else if ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
2025 return note;
2026 else
2027 return find_reg_note (insn, REG_EQUAL, NULL_RTX);
2030 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
2031 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2034 find_reg_fusage (insn, code, datum)
2035 rtx insn;
2036 enum rtx_code code;
2037 rtx datum;
2039 /* If it's not a CALL_INSN, it can't possibly have a
2040 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
2041 if (GET_CODE (insn) != CALL_INSN)
2042 return 0;
2044 if (! datum)
2045 abort ();
2047 if (GET_CODE (datum) != REG)
2049 rtx link;
2051 for (link = CALL_INSN_FUNCTION_USAGE (insn);
2052 link;
2053 link = XEXP (link, 1))
2054 if (GET_CODE (XEXP (link, 0)) == code
2055 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
2056 return 1;
2058 else
2060 unsigned int regno = REGNO (datum);
2062 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2063 to pseudo registers, so don't bother checking. */
2065 if (regno < FIRST_PSEUDO_REGISTER)
2067 unsigned int end_regno
2068 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
2069 unsigned int i;
2071 for (i = regno; i < end_regno; i++)
2072 if (find_regno_fusage (insn, code, i))
2073 return 1;
2077 return 0;
2080 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2081 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2084 find_regno_fusage (insn, code, regno)
2085 rtx insn;
2086 enum rtx_code code;
2087 unsigned int regno;
2089 rtx link;
2091 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2092 to pseudo registers, so don't bother checking. */
2094 if (regno >= FIRST_PSEUDO_REGISTER
2095 || GET_CODE (insn) != CALL_INSN )
2096 return 0;
2098 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2100 unsigned int regnote;
2101 rtx op, reg;
2103 if (GET_CODE (op = XEXP (link, 0)) == code
2104 && GET_CODE (reg = XEXP (op, 0)) == REG
2105 && (regnote = REGNO (reg)) <= regno
2106 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
2107 return 1;
2110 return 0;
2113 /* Return true if INSN is a call to a pure function. */
2116 pure_call_p (insn)
2117 rtx insn;
2119 rtx link;
2121 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2122 return 0;
2124 /* Look for the note that differentiates const and pure functions. */
2125 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2127 rtx u, m;
2129 if (GET_CODE (u = XEXP (link, 0)) == USE
2130 && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2131 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2132 return 1;
2135 return 0;
2138 /* Remove register note NOTE from the REG_NOTES of INSN. */
2140 void
2141 remove_note (insn, note)
2142 rtx insn;
2143 rtx note;
2145 rtx link;
2147 if (note == NULL_RTX)
2148 return;
2150 if (REG_NOTES (insn) == note)
2152 REG_NOTES (insn) = XEXP (note, 1);
2153 return;
2156 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2157 if (XEXP (link, 1) == note)
2159 XEXP (link, 1) = XEXP (note, 1);
2160 return;
2163 abort ();
2166 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2167 return 1 if it is found. A simple equality test is used to determine if
2168 NODE matches. */
2171 in_expr_list_p (listp, node)
2172 rtx listp;
2173 rtx node;
2175 rtx x;
2177 for (x = listp; x; x = XEXP (x, 1))
2178 if (node == XEXP (x, 0))
2179 return 1;
2181 return 0;
2184 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2185 remove that entry from the list if it is found.
2187 A simple equality test is used to determine if NODE matches. */
2189 void
2190 remove_node_from_expr_list (node, listp)
2191 rtx node;
2192 rtx *listp;
2194 rtx temp = *listp;
2195 rtx prev = NULL_RTX;
2197 while (temp)
2199 if (node == XEXP (temp, 0))
2201 /* Splice the node out of the list. */
2202 if (prev)
2203 XEXP (prev, 1) = XEXP (temp, 1);
2204 else
2205 *listp = XEXP (temp, 1);
2207 return;
2210 prev = temp;
2211 temp = XEXP (temp, 1);
2215 /* Nonzero if X contains any volatile instructions. These are instructions
2216 which may cause unpredictable machine state instructions, and thus no
2217 instructions should be moved or combined across them. This includes
2218 only volatile asms and UNSPEC_VOLATILE instructions. */
2221 volatile_insn_p (x)
2222 rtx x;
2224 RTX_CODE code;
2226 code = GET_CODE (x);
2227 switch (code)
2229 case LABEL_REF:
2230 case SYMBOL_REF:
2231 case CONST_INT:
2232 case CONST:
2233 case CONST_DOUBLE:
2234 case CONST_VECTOR:
2235 case CC0:
2236 case PC:
2237 case REG:
2238 case SCRATCH:
2239 case CLOBBER:
2240 case ADDR_VEC:
2241 case ADDR_DIFF_VEC:
2242 case CALL:
2243 case MEM:
2244 return 0;
2246 case UNSPEC_VOLATILE:
2247 /* case TRAP_IF: This isn't clear yet. */
2248 return 1;
2250 case ASM_INPUT:
2251 case ASM_OPERANDS:
2252 if (MEM_VOLATILE_P (x))
2253 return 1;
2255 default:
2256 break;
2259 /* Recursively scan the operands of this expression. */
2262 const char *fmt = GET_RTX_FORMAT (code);
2263 int i;
2265 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2267 if (fmt[i] == 'e')
2269 if (volatile_insn_p (XEXP (x, i)))
2270 return 1;
2272 else if (fmt[i] == 'E')
2274 int j;
2275 for (j = 0; j < XVECLEN (x, i); j++)
2276 if (volatile_insn_p (XVECEXP (x, i, j)))
2277 return 1;
2281 return 0;
2284 /* Nonzero if X contains any volatile memory references
2285 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2288 volatile_refs_p (x)
2289 rtx x;
2291 RTX_CODE code;
2293 code = GET_CODE (x);
2294 switch (code)
2296 case LABEL_REF:
2297 case SYMBOL_REF:
2298 case CONST_INT:
2299 case CONST:
2300 case CONST_DOUBLE:
2301 case CONST_VECTOR:
2302 case CC0:
2303 case PC:
2304 case REG:
2305 case SCRATCH:
2306 case CLOBBER:
2307 case ADDR_VEC:
2308 case ADDR_DIFF_VEC:
2309 return 0;
2311 case UNSPEC_VOLATILE:
2312 return 1;
2314 case MEM:
2315 case ASM_INPUT:
2316 case ASM_OPERANDS:
2317 if (MEM_VOLATILE_P (x))
2318 return 1;
2320 default:
2321 break;
2324 /* Recursively scan the operands of this expression. */
2327 const char *fmt = GET_RTX_FORMAT (code);
2328 int i;
2330 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2332 if (fmt[i] == 'e')
2334 if (volatile_refs_p (XEXP (x, i)))
2335 return 1;
2337 else if (fmt[i] == 'E')
2339 int j;
2340 for (j = 0; j < XVECLEN (x, i); j++)
2341 if (volatile_refs_p (XVECEXP (x, i, j)))
2342 return 1;
2346 return 0;
2349 /* Similar to above, except that it also rejects register pre- and post-
2350 incrementing. */
2353 side_effects_p (x)
2354 rtx x;
2356 RTX_CODE code;
2358 code = GET_CODE (x);
2359 switch (code)
2361 case LABEL_REF:
2362 case SYMBOL_REF:
2363 case CONST_INT:
2364 case CONST:
2365 case CONST_DOUBLE:
2366 case CONST_VECTOR:
2367 case CC0:
2368 case PC:
2369 case REG:
2370 case SCRATCH:
2371 case ADDR_VEC:
2372 case ADDR_DIFF_VEC:
2373 return 0;
2375 case CLOBBER:
2376 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2377 when some combination can't be done. If we see one, don't think
2378 that we can simplify the expression. */
2379 return (GET_MODE (x) != VOIDmode);
2381 case PRE_INC:
2382 case PRE_DEC:
2383 case POST_INC:
2384 case POST_DEC:
2385 case PRE_MODIFY:
2386 case POST_MODIFY:
2387 case CALL:
2388 case UNSPEC_VOLATILE:
2389 /* case TRAP_IF: This isn't clear yet. */
2390 return 1;
2392 case MEM:
2393 case ASM_INPUT:
2394 case ASM_OPERANDS:
2395 if (MEM_VOLATILE_P (x))
2396 return 1;
2398 default:
2399 break;
2402 /* Recursively scan the operands of this expression. */
2405 const char *fmt = GET_RTX_FORMAT (code);
2406 int i;
2408 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2410 if (fmt[i] == 'e')
2412 if (side_effects_p (XEXP (x, i)))
2413 return 1;
2415 else if (fmt[i] == 'E')
2417 int j;
2418 for (j = 0; j < XVECLEN (x, i); j++)
2419 if (side_effects_p (XVECEXP (x, i, j)))
2420 return 1;
2424 return 0;
2427 /* Return nonzero if evaluating rtx X might cause a trap. */
2430 may_trap_p (x)
2431 rtx x;
2433 int i;
2434 enum rtx_code code;
2435 const char *fmt;
2437 if (x == 0)
2438 return 0;
2439 code = GET_CODE (x);
2440 switch (code)
2442 /* Handle these cases quickly. */
2443 case CONST_INT:
2444 case CONST_DOUBLE:
2445 case CONST_VECTOR:
2446 case SYMBOL_REF:
2447 case LABEL_REF:
2448 case CONST:
2449 case PC:
2450 case CC0:
2451 case REG:
2452 case SCRATCH:
2453 return 0;
2455 case ASM_INPUT:
2456 case UNSPEC_VOLATILE:
2457 case TRAP_IF:
2458 return 1;
2460 case ASM_OPERANDS:
2461 return MEM_VOLATILE_P (x);
2463 /* Memory ref can trap unless it's a static var or a stack slot. */
2464 case MEM:
2465 return rtx_addr_can_trap_p (XEXP (x, 0));
2467 /* Division by a non-constant might trap. */
2468 case DIV:
2469 case MOD:
2470 case UDIV:
2471 case UMOD:
2472 if (HONOR_SNANS (GET_MODE (x)))
2473 return 1;
2474 if (! CONSTANT_P (XEXP (x, 1))
2475 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2476 && flag_trapping_math))
2477 return 1;
2478 /* This was const0_rtx, but by not using that,
2479 we can link this file into other programs. */
2480 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2481 return 1;
2482 break;
2484 case EXPR_LIST:
2485 /* An EXPR_LIST is used to represent a function call. This
2486 certainly may trap. */
2487 return 1;
2489 case GE:
2490 case GT:
2491 case LE:
2492 case LT:
2493 case COMPARE:
2494 /* Some floating point comparisons may trap. */
2495 if (!flag_trapping_math)
2496 break;
2497 /* ??? There is no machine independent way to check for tests that trap
2498 when COMPARE is used, though many targets do make this distinction.
2499 For instance, sparc uses CCFPE for compares which generate exceptions
2500 and CCFP for compares which do not generate exceptions. */
2501 if (HONOR_NANS (GET_MODE (x)))
2502 return 1;
2503 /* But often the compare has some CC mode, so check operand
2504 modes as well. */
2505 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2506 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2507 return 1;
2508 break;
2510 case EQ:
2511 case NE:
2512 if (HONOR_SNANS (GET_MODE (x)))
2513 return 1;
2514 /* Often comparison is CC mode, so check operand modes. */
2515 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2516 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2517 return 1;
2518 break;
2520 case FIX:
2521 /* Conversion of floating point might trap. */
2522 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2523 return 1;
2524 break;
2526 case NEG:
2527 case ABS:
2528 /* These operations don't trap even with floating point. */
2529 break;
2531 default:
2532 /* Any floating arithmetic may trap. */
2533 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2534 && flag_trapping_math)
2535 return 1;
2538 fmt = GET_RTX_FORMAT (code);
2539 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2541 if (fmt[i] == 'e')
2543 if (may_trap_p (XEXP (x, i)))
2544 return 1;
2546 else if (fmt[i] == 'E')
2548 int j;
2549 for (j = 0; j < XVECLEN (x, i); j++)
2550 if (may_trap_p (XVECEXP (x, i, j)))
2551 return 1;
2554 return 0;
2557 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2558 i.e., an inequality. */
2561 inequality_comparisons_p (x)
2562 rtx x;
2564 const char *fmt;
2565 int len, i;
2566 enum rtx_code code = GET_CODE (x);
2568 switch (code)
2570 case REG:
2571 case SCRATCH:
2572 case PC:
2573 case CC0:
2574 case CONST_INT:
2575 case CONST_DOUBLE:
2576 case CONST_VECTOR:
2577 case CONST:
2578 case LABEL_REF:
2579 case SYMBOL_REF:
2580 return 0;
2582 case LT:
2583 case LTU:
2584 case GT:
2585 case GTU:
2586 case LE:
2587 case LEU:
2588 case GE:
2589 case GEU:
2590 return 1;
2592 default:
2593 break;
2596 len = GET_RTX_LENGTH (code);
2597 fmt = GET_RTX_FORMAT (code);
2599 for (i = 0; i < len; i++)
2601 if (fmt[i] == 'e')
2603 if (inequality_comparisons_p (XEXP (x, i)))
2604 return 1;
2606 else if (fmt[i] == 'E')
2608 int j;
2609 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2610 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2611 return 1;
2615 return 0;
2618 /* Replace any occurrence of FROM in X with TO. The function does
2619 not enter into CONST_DOUBLE for the replace.
2621 Note that copying is not done so X must not be shared unless all copies
2622 are to be modified. */
2625 replace_rtx (x, from, to)
2626 rtx x, from, to;
2628 int i, j;
2629 const char *fmt;
2631 /* The following prevents loops occurrence when we change MEM in
2632 CONST_DOUBLE onto the same CONST_DOUBLE. */
2633 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2634 return x;
2636 if (x == from)
2637 return to;
2639 /* Allow this function to make replacements in EXPR_LISTs. */
2640 if (x == 0)
2641 return 0;
2643 if (GET_CODE (x) == SUBREG)
2645 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2647 if (GET_CODE (new) == CONST_INT)
2649 x = simplify_subreg (GET_MODE (x), new,
2650 GET_MODE (SUBREG_REG (x)),
2651 SUBREG_BYTE (x));
2652 if (! x)
2653 abort ();
2655 else
2656 SUBREG_REG (x) = new;
2658 return x;
2660 else if (GET_CODE (x) == ZERO_EXTEND)
2662 rtx new = replace_rtx (XEXP (x, 0), from, to);
2664 if (GET_CODE (new) == CONST_INT)
2666 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2667 new, GET_MODE (XEXP (x, 0)));
2668 if (! x)
2669 abort ();
2671 else
2672 XEXP (x, 0) = new;
2674 return x;
2677 fmt = GET_RTX_FORMAT (GET_CODE (x));
2678 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2680 if (fmt[i] == 'e')
2681 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2682 else if (fmt[i] == 'E')
2683 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2684 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2687 return x;
2690 /* Throughout the rtx X, replace many registers according to REG_MAP.
2691 Return the replacement for X (which may be X with altered contents).
2692 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2693 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2695 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2696 should not be mapped to pseudos or vice versa since validate_change
2697 is not called.
2699 If REPLACE_DEST is 1, replacements are also done in destinations;
2700 otherwise, only sources are replaced. */
2703 replace_regs (x, reg_map, nregs, replace_dest)
2704 rtx x;
2705 rtx *reg_map;
2706 unsigned int nregs;
2707 int replace_dest;
2709 enum rtx_code code;
2710 int i;
2711 const char *fmt;
2713 if (x == 0)
2714 return x;
2716 code = GET_CODE (x);
2717 switch (code)
2719 case SCRATCH:
2720 case PC:
2721 case CC0:
2722 case CONST_INT:
2723 case CONST_DOUBLE:
2724 case CONST_VECTOR:
2725 case CONST:
2726 case SYMBOL_REF:
2727 case LABEL_REF:
2728 return x;
2730 case REG:
2731 /* Verify that the register has an entry before trying to access it. */
2732 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2734 /* SUBREGs can't be shared. Always return a copy to ensure that if
2735 this replacement occurs more than once then each instance will
2736 get distinct rtx. */
2737 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2738 return copy_rtx (reg_map[REGNO (x)]);
2739 return reg_map[REGNO (x)];
2741 return x;
2743 case SUBREG:
2744 /* Prevent making nested SUBREGs. */
2745 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2746 && reg_map[REGNO (SUBREG_REG (x))] != 0
2747 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2749 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2750 return simplify_gen_subreg (GET_MODE (x), map_val,
2751 GET_MODE (SUBREG_REG (x)),
2752 SUBREG_BYTE (x));
2754 break;
2756 case SET:
2757 if (replace_dest)
2758 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2760 else if (GET_CODE (SET_DEST (x)) == MEM
2761 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2762 /* Even if we are not to replace destinations, replace register if it
2763 is CONTAINED in destination (destination is memory or
2764 STRICT_LOW_PART). */
2765 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2766 reg_map, nregs, 0);
2767 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2768 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2769 break;
2771 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2772 return x;
2774 default:
2775 break;
2778 fmt = GET_RTX_FORMAT (code);
2779 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2781 if (fmt[i] == 'e')
2782 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2783 else if (fmt[i] == 'E')
2785 int j;
2786 for (j = 0; j < XVECLEN (x, i); j++)
2787 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2788 nregs, replace_dest);
2791 return x;
2794 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2795 constant that is not in the constant pool and not in the condition
2796 of an IF_THEN_ELSE. */
2798 static int
2799 computed_jump_p_1 (x)
2800 rtx x;
2802 enum rtx_code code = GET_CODE (x);
2803 int i, j;
2804 const char *fmt;
2806 switch (code)
2808 case LABEL_REF:
2809 case PC:
2810 return 0;
2812 case CONST:
2813 case CONST_INT:
2814 case CONST_DOUBLE:
2815 case CONST_VECTOR:
2816 case SYMBOL_REF:
2817 case REG:
2818 return 1;
2820 case MEM:
2821 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2822 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2824 case IF_THEN_ELSE:
2825 return (computed_jump_p_1 (XEXP (x, 1))
2826 || computed_jump_p_1 (XEXP (x, 2)));
2828 default:
2829 break;
2832 fmt = GET_RTX_FORMAT (code);
2833 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2835 if (fmt[i] == 'e'
2836 && computed_jump_p_1 (XEXP (x, i)))
2837 return 1;
2839 else if (fmt[i] == 'E')
2840 for (j = 0; j < XVECLEN (x, i); j++)
2841 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2842 return 1;
2845 return 0;
2848 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2850 Tablejumps and casesi insns are not considered indirect jumps;
2851 we can recognize them by a (use (label_ref)). */
2854 computed_jump_p (insn)
2855 rtx insn;
2857 int i;
2858 if (GET_CODE (insn) == JUMP_INSN)
2860 rtx pat = PATTERN (insn);
2862 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2863 return 0;
2864 else if (GET_CODE (pat) == PARALLEL)
2866 int len = XVECLEN (pat, 0);
2867 int has_use_labelref = 0;
2869 for (i = len - 1; i >= 0; i--)
2870 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2871 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2872 == LABEL_REF))
2873 has_use_labelref = 1;
2875 if (! has_use_labelref)
2876 for (i = len - 1; i >= 0; i--)
2877 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2878 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2879 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2880 return 1;
2882 else if (GET_CODE (pat) == SET
2883 && SET_DEST (pat) == pc_rtx
2884 && computed_jump_p_1 (SET_SRC (pat)))
2885 return 1;
2887 return 0;
2890 /* Traverse X via depth-first search, calling F for each
2891 sub-expression (including X itself). F is also passed the DATA.
2892 If F returns -1, do not traverse sub-expressions, but continue
2893 traversing the rest of the tree. If F ever returns any other
2894 nonzero value, stop the traversal, and return the value returned
2895 by F. Otherwise, return 0. This function does not traverse inside
2896 tree structure that contains RTX_EXPRs, or into sub-expressions
2897 whose format code is `0' since it is not known whether or not those
2898 codes are actually RTL.
2900 This routine is very general, and could (should?) be used to
2901 implement many of the other routines in this file. */
2904 for_each_rtx (x, f, data)
2905 rtx *x;
2906 rtx_function f;
2907 void *data;
2909 int result;
2910 int length;
2911 const char *format;
2912 int i;
2914 /* Call F on X. */
2915 result = (*f) (x, data);
2916 if (result == -1)
2917 /* Do not traverse sub-expressions. */
2918 return 0;
2919 else if (result != 0)
2920 /* Stop the traversal. */
2921 return result;
2923 if (*x == NULL_RTX)
2924 /* There are no sub-expressions. */
2925 return 0;
2927 length = GET_RTX_LENGTH (GET_CODE (*x));
2928 format = GET_RTX_FORMAT (GET_CODE (*x));
2930 for (i = 0; i < length; ++i)
2932 switch (format[i])
2934 case 'e':
2935 result = for_each_rtx (&XEXP (*x, i), f, data);
2936 if (result != 0)
2937 return result;
2938 break;
2940 case 'V':
2941 case 'E':
2942 if (XVEC (*x, i) != 0)
2944 int j;
2945 for (j = 0; j < XVECLEN (*x, i); ++j)
2947 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2948 if (result != 0)
2949 return result;
2952 break;
2954 default:
2955 /* Nothing to do. */
2956 break;
2961 return 0;
2964 /* Searches X for any reference to REGNO, returning the rtx of the
2965 reference found if any. Otherwise, returns NULL_RTX. */
2968 regno_use_in (regno, x)
2969 unsigned int regno;
2970 rtx x;
2972 const char *fmt;
2973 int i, j;
2974 rtx tem;
2976 if (GET_CODE (x) == REG && REGNO (x) == regno)
2977 return x;
2979 fmt = GET_RTX_FORMAT (GET_CODE (x));
2980 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2982 if (fmt[i] == 'e')
2984 if ((tem = regno_use_in (regno, XEXP (x, i))))
2985 return tem;
2987 else if (fmt[i] == 'E')
2988 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2989 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2990 return tem;
2993 return NULL_RTX;
2996 /* Return a value indicating whether OP, an operand of a commutative
2997 operation, is preferred as the first or second operand. The higher
2998 the value, the stronger the preference for being the first operand.
2999 We use negative values to indicate a preference for the first operand
3000 and positive values for the second operand. */
3003 commutative_operand_precedence (op)
3004 rtx op;
3006 /* Constants always come the second operand. Prefer "nice" constants. */
3007 if (GET_CODE (op) == CONST_INT)
3008 return -5;
3009 if (GET_CODE (op) == CONST_DOUBLE)
3010 return -4;
3011 if (CONSTANT_P (op))
3012 return -3;
3014 /* SUBREGs of objects should come second. */
3015 if (GET_CODE (op) == SUBREG
3016 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
3017 return -2;
3019 /* If only one operand is a `neg', `not',
3020 `mult', `plus', or `minus' expression, it will be the first
3021 operand. */
3022 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
3023 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
3024 || GET_CODE (op) == MINUS)
3025 return 2;
3027 /* Complex expressions should be the first, so decrease priority
3028 of objects. */
3029 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
3030 return -1;
3031 return 0;
3034 /* Return 1 iff it is necessary to swap operands of commutative operation
3035 in order to canonicalize expression. */
3038 swap_commutative_operands_p (x, y)
3039 rtx x, y;
3041 return (commutative_operand_precedence (x)
3042 < commutative_operand_precedence (y));
3045 /* Return 1 if X is an autoincrement side effect and the register is
3046 not the stack pointer. */
3048 auto_inc_p (x)
3049 rtx x;
3051 switch (GET_CODE (x))
3053 case PRE_INC:
3054 case POST_INC:
3055 case PRE_DEC:
3056 case POST_DEC:
3057 case PRE_MODIFY:
3058 case POST_MODIFY:
3059 /* There are no REG_INC notes for SP. */
3060 if (XEXP (x, 0) != stack_pointer_rtx)
3061 return 1;
3062 default:
3063 break;
3065 return 0;
3068 /* Return 1 if the sequence of instructions beginning with FROM and up
3069 to and including TO is safe to move. If NEW_TO is non-NULL, and
3070 the sequence is not already safe to move, but can be easily
3071 extended to a sequence which is safe, then NEW_TO will point to the
3072 end of the extended sequence.
3074 For now, this function only checks that the region contains whole
3075 exception regions, but it could be extended to check additional
3076 conditions as well. */
3079 insns_safe_to_move_p (from, to, new_to)
3080 rtx from;
3081 rtx to;
3082 rtx *new_to;
3084 int eh_region_count = 0;
3085 int past_to_p = 0;
3086 rtx r = from;
3088 /* By default, assume the end of the region will be what was
3089 suggested. */
3090 if (new_to)
3091 *new_to = to;
3093 while (r)
3095 if (GET_CODE (r) == NOTE)
3097 switch (NOTE_LINE_NUMBER (r))
3099 case NOTE_INSN_EH_REGION_BEG:
3100 ++eh_region_count;
3101 break;
3103 case NOTE_INSN_EH_REGION_END:
3104 if (eh_region_count == 0)
3105 /* This sequence of instructions contains the end of
3106 an exception region, but not he beginning. Moving
3107 it will cause chaos. */
3108 return 0;
3110 --eh_region_count;
3111 break;
3113 default:
3114 break;
3117 else if (past_to_p)
3118 /* If we've passed TO, and we see a non-note instruction, we
3119 can't extend the sequence to a movable sequence. */
3120 return 0;
3122 if (r == to)
3124 if (!new_to)
3125 /* It's OK to move the sequence if there were matched sets of
3126 exception region notes. */
3127 return eh_region_count == 0;
3129 past_to_p = 1;
3132 /* It's OK to move the sequence if there were matched sets of
3133 exception region notes. */
3134 if (past_to_p && eh_region_count == 0)
3136 *new_to = r;
3137 return 1;
3140 /* Go to the next instruction. */
3141 r = NEXT_INSN (r);
3144 return 0;
3147 /* Return nonzero if IN contains a piece of rtl that has the address LOC */
3149 loc_mentioned_in_p (loc, in)
3150 rtx *loc, in;
3152 enum rtx_code code = GET_CODE (in);
3153 const char *fmt = GET_RTX_FORMAT (code);
3154 int i, j;
3156 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3158 if (loc == &in->fld[i].rtx)
3159 return 1;
3160 if (fmt[i] == 'e')
3162 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3163 return 1;
3165 else if (fmt[i] == 'E')
3166 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3167 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3168 return 1;
3170 return 0;
3173 /* Given a subreg X, return the bit offset where the subreg begins
3174 (counting from the least significant bit of the reg). */
3176 unsigned int
3177 subreg_lsb (x)
3178 rtx x;
3180 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
3181 enum machine_mode mode = GET_MODE (x);
3182 unsigned int bitpos;
3183 unsigned int byte;
3184 unsigned int word;
3186 /* A paradoxical subreg begins at bit position 0. */
3187 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
3188 return 0;
3190 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3191 /* If the subreg crosses a word boundary ensure that
3192 it also begins and ends on a word boundary. */
3193 if ((SUBREG_BYTE (x) % UNITS_PER_WORD
3194 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
3195 && (SUBREG_BYTE (x) % UNITS_PER_WORD
3196 || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
3197 abort ();
3199 if (WORDS_BIG_ENDIAN)
3200 word = (GET_MODE_SIZE (inner_mode)
3201 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
3202 else
3203 word = SUBREG_BYTE (x) / UNITS_PER_WORD;
3204 bitpos = word * BITS_PER_WORD;
3206 if (BYTES_BIG_ENDIAN)
3207 byte = (GET_MODE_SIZE (inner_mode)
3208 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
3209 else
3210 byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
3211 bitpos += byte * BITS_PER_UNIT;
3213 return bitpos;
3216 /* This function returns the regno offset of a subreg expression.
3217 xregno - A regno of an inner hard subreg_reg (or what will become one).
3218 xmode - The mode of xregno.
3219 offset - The byte offset.
3220 ymode - The mode of a top level SUBREG (or what may become one).
3221 RETURN - The regno offset which would be used. */
3222 unsigned int
3223 subreg_regno_offset (xregno, xmode, offset, ymode)
3224 unsigned int xregno;
3225 enum machine_mode xmode;
3226 unsigned int offset;
3227 enum machine_mode ymode;
3229 int nregs_xmode, nregs_ymode;
3230 int mode_multiple, nregs_multiple;
3231 int y_offset;
3233 if (xregno >= FIRST_PSEUDO_REGISTER)
3234 abort ();
3236 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3237 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3239 /* If this is a big endian paradoxical subreg, which uses more actual
3240 hard registers than the original register, we must return a negative
3241 offset so that we find the proper highpart of the register. */
3242 if (offset == 0
3243 && nregs_ymode > nregs_xmode
3244 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3245 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3246 return nregs_xmode - nregs_ymode;
3248 if (offset == 0 || nregs_xmode == nregs_ymode)
3249 return 0;
3251 /* size of ymode must not be greater than the size of xmode. */
3252 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3253 if (mode_multiple == 0)
3254 abort ();
3256 y_offset = offset / GET_MODE_SIZE (ymode);
3257 nregs_multiple = nregs_xmode / nregs_ymode;
3258 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3261 /* Return the final regno that a subreg expression refers to. */
3262 unsigned int
3263 subreg_regno (x)
3264 rtx x;
3266 unsigned int ret;
3267 rtx subreg = SUBREG_REG (x);
3268 int regno = REGNO (subreg);
3270 ret = regno + subreg_regno_offset (regno,
3271 GET_MODE (subreg),
3272 SUBREG_BYTE (x),
3273 GET_MODE (x));
3274 return ret;
3277 struct parms_set_data
3279 int nregs;
3280 HARD_REG_SET regs;
3283 /* Helper function for noticing stores to parameter registers. */
3284 static void
3285 parms_set (x, pat, data)
3286 rtx x, pat ATTRIBUTE_UNUSED;
3287 void *data;
3289 struct parms_set_data *d = data;
3290 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3291 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3293 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3294 d->nregs--;
3298 /* Look backward for first parameter to be loaded.
3299 Do not skip BOUNDARY. */
3301 find_first_parameter_load (call_insn, boundary)
3302 rtx call_insn, boundary;
3304 struct parms_set_data parm;
3305 rtx p, before;
3307 /* Since different machines initialize their parameter registers
3308 in different orders, assume nothing. Collect the set of all
3309 parameter registers. */
3310 CLEAR_HARD_REG_SET (parm.regs);
3311 parm.nregs = 0;
3312 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3313 if (GET_CODE (XEXP (p, 0)) == USE
3314 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3316 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3317 abort ();
3319 /* We only care about registers which can hold function
3320 arguments. */
3321 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3322 continue;
3324 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3325 parm.nregs++;
3327 before = call_insn;
3329 /* Search backward for the first set of a register in this set. */
3330 while (parm.nregs && before != boundary)
3332 before = PREV_INSN (before);
3334 /* It is possible that some loads got CSEed from one call to
3335 another. Stop in that case. */
3336 if (GET_CODE (before) == CALL_INSN)
3337 break;
3339 /* Our caller needs either ensure that we will find all sets
3340 (in case code has not been optimized yet), or take care
3341 for possible labels in a way by setting boundary to preceding
3342 CODE_LABEL. */
3343 if (GET_CODE (before) == CODE_LABEL)
3345 if (before != boundary)
3346 abort ();
3347 break;
3350 if (INSN_P (before))
3351 note_stores (PATTERN (before), parms_set, &parm);
3353 return before;
3356 /* Return true if we should avoid inserting code between INSN and preceding
3357 call instruction. */
3359 bool
3360 keep_with_call_p (insn)
3361 rtx insn;
3363 rtx set;
3365 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3367 if (GET_CODE (SET_DEST (set)) == REG
3368 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3369 && fixed_regs[REGNO (SET_DEST (set))]
3370 && general_operand (SET_SRC (set), VOIDmode))
3371 return true;
3372 if (GET_CODE (SET_SRC (set)) == REG
3373 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3374 && GET_CODE (SET_DEST (set)) == REG
3375 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3376 return true;
3377 /* There may be a stack pop just after the call and before the store
3378 of the return register. Search for the actual store when deciding
3379 if we can break or not. */
3380 if (SET_DEST (set) == stack_pointer_rtx)
3382 rtx i2 = next_nonnote_insn (insn);
3383 if (i2 && keep_with_call_p (i2))
3384 return true;
3387 return false;
3390 /* Return true when store to register X can be hoisted to the place
3391 with LIVE registers (can be NULL). Value VAL contains destination
3392 whose value will be used. */
3394 static bool
3395 hoist_test_store (x, val, live)
3396 rtx x, val;
3397 regset live;
3399 if (GET_CODE (x) == SCRATCH)
3400 return true;
3402 if (rtx_equal_p (x, val))
3403 return true;
3405 /* Allow subreg of X in case it is not writting just part of multireg pseudo.
3406 Then we would need to update all users to care hoisting the store too.
3407 Caller may represent that by specifying whole subreg as val. */
3409 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3411 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3412 && GET_MODE_BITSIZE (GET_MODE (x)) <
3413 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3414 return false;
3415 return true;
3417 if (GET_CODE (x) == SUBREG)
3418 x = SUBREG_REG (x);
3420 /* Anything except register store is not hoistable. This includes the
3421 partial stores to registers. */
3423 if (!REG_P (x))
3424 return false;
3426 /* Pseudo registers can be allways replaced by another pseudo to avoid
3427 the side effect, for hard register we must ensure that they are dead.
3428 Eventually we may want to add code to try turn pseudos to hards, but it
3429 is unlikely useful. */
3431 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3433 int regno = REGNO (x);
3434 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3436 if (!live)
3437 return false;
3438 if (REGNO_REG_SET_P (live, regno))
3439 return false;
3440 while (--n > 0)
3441 if (REGNO_REG_SET_P (live, regno + n))
3442 return false;
3444 return true;
3448 /* Return true if INSN can be hoisted to place with LIVE hard registers
3449 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3450 and used by the hoisting pass. */
3452 bool
3453 can_hoist_insn_p (insn, val, live)
3454 rtx insn, val;
3455 regset live;
3457 rtx pat = PATTERN (insn);
3458 int i;
3460 /* It probably does not worth the complexity to handle multiple
3461 set stores. */
3462 if (!single_set (insn))
3463 return false;
3464 /* We can move CALL_INSN, but we need to check that all caller clobbered
3465 regs are dead. */
3466 if (GET_CODE (insn) == CALL_INSN)
3467 return false;
3468 /* In future we will handle hoisting of libcall sequences, but
3469 give up for now. */
3470 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3471 return false;
3472 switch (GET_CODE (pat))
3474 case SET:
3475 if (!hoist_test_store (SET_DEST (pat), val, live))
3476 return false;
3477 break;
3478 case USE:
3479 /* USES do have sick semantics, so do not move them. */
3480 return false;
3481 break;
3482 case CLOBBER:
3483 if (!hoist_test_store (XEXP (pat, 0), val, live))
3484 return false;
3485 break;
3486 case PARALLEL:
3487 for (i = 0; i < XVECLEN (pat, 0); i++)
3489 rtx x = XVECEXP (pat, 0, i);
3490 switch (GET_CODE (x))
3492 case SET:
3493 if (!hoist_test_store (SET_DEST (x), val, live))
3494 return false;
3495 break;
3496 case USE:
3497 /* We need to fix callers to really ensure availability
3498 of all values inisn uses, but for now it is safe to prohibit
3499 hoisting of any insn having such a hidden uses. */
3500 return false;
3501 break;
3502 case CLOBBER:
3503 if (!hoist_test_store (SET_DEST (x), val, live))
3504 return false;
3505 break;
3506 default:
3507 break;
3510 break;
3511 default:
3512 abort ();
3514 return true;
3517 /* Update store after hoisting - replace all stores to pseudo registers
3518 by new ones to avoid clobbering of values except for store to VAL that will
3519 be updated to NEW. */
3521 static void
3522 hoist_update_store (insn, xp, val, new)
3523 rtx insn, *xp, val, new;
3525 rtx x = *xp;
3527 if (GET_CODE (x) == SCRATCH)
3528 return;
3530 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3531 validate_change (insn, xp,
3532 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3533 SUBREG_BYTE (x)), 1);
3534 if (rtx_equal_p (x, val))
3536 validate_change (insn, xp, new, 1);
3537 return;
3539 if (GET_CODE (x) == SUBREG)
3541 xp = &SUBREG_REG (x);
3542 x = *xp;
3545 if (!REG_P (x))
3546 abort ();
3548 /* We've verified that hard registers are dead, so we may keep the side
3549 effect. Otherwise replace it by new pseudo. */
3550 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3551 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3552 REG_NOTES (insn)
3553 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3556 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3557 and each other side effect to pseudo register by new pseudo register. */
3560 hoist_insn_after (insn, after, val, new)
3561 rtx insn, after, val, new;
3563 rtx pat;
3564 int i;
3565 rtx note;
3567 insn = emit_copy_of_insn_after (insn, after);
3568 pat = PATTERN (insn);
3570 /* Remove REG_UNUSED notes as we will re-emit them. */
3571 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3572 remove_note (insn, note);
3574 /* To get this working callers must ensure to move everything referenced
3575 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3576 easier. */
3577 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3578 remove_note (insn, note);
3579 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3580 remove_note (insn, note);
3582 /* Remove REG_DEAD notes as they might not be valid anymore in case
3583 we create redundancy. */
3584 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3585 remove_note (insn, note);
3586 switch (GET_CODE (pat))
3588 case SET:
3589 hoist_update_store (insn, &SET_DEST (pat), val, new);
3590 break;
3591 case USE:
3592 break;
3593 case CLOBBER:
3594 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3595 break;
3596 case PARALLEL:
3597 for (i = 0; i < XVECLEN (pat, 0); i++)
3599 rtx x = XVECEXP (pat, 0, i);
3600 switch (GET_CODE (x))
3602 case SET:
3603 hoist_update_store (insn, &SET_DEST (x), val, new);
3604 break;
3605 case USE:
3606 break;
3607 case CLOBBER:
3608 hoist_update_store (insn, &SET_DEST (x), val, new);
3609 break;
3610 default:
3611 break;
3614 break;
3615 default:
3616 abort ();
3618 if (!apply_change_group ())
3619 abort ();
3621 return insn;
3625 hoist_insn_to_edge (insn, e, val, new)
3626 rtx insn, val, new;
3627 edge e;
3629 rtx new_insn;
3631 /* We cannot insert instructions on an abnormal critical edge.
3632 It will be easier to find the culprit if we die now. */
3633 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3634 abort ();
3636 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3637 stuff. We also emit CALL_INSNS and firends. */
3638 if (e->insns == NULL_RTX)
3640 start_sequence ();
3641 emit_note (NULL, NOTE_INSN_DELETED);
3643 else
3644 push_to_sequence (e->insns);
3646 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3648 e->insns = get_insns ();
3649 end_sequence ();
3650 return new_insn;