std_bitset.h: Replace CHAR_BIT with __CHAR_BIT__, use numeric_limits for bits-per...
[official-gcc.git] / gcc / rtlanal.c
blob5e0677066a8756c1fb96e878410141418855efdb
1 /* Analyze RTL for C-Compiler
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 2, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING. If not, write to the Free
19 Software Foundation, 59 Temple Place - Suite 330, Boston, MA
20 02111-1307, USA. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "toplev.h"
28 #include "rtl.h"
29 #include "hard-reg-set.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "tm_p.h"
33 #include "flags.h"
34 #include "basic-block.h"
35 #include "real.h"
37 /* Forward declarations */
38 static int global_reg_mentioned_p_1 PARAMS ((rtx *, void *));
39 static void set_of_1 PARAMS ((rtx, rtx, void *));
40 static void insn_dependent_p_1 PARAMS ((rtx, rtx, void *));
41 static int computed_jump_p_1 PARAMS ((rtx));
42 static void parms_set PARAMS ((rtx, rtx, void *));
43 static bool hoist_test_store PARAMS ((rtx, rtx, regset));
44 static void hoist_update_store PARAMS ((rtx, rtx *, rtx, rtx));
46 /* Bit flags that specify the machine subtype we are compiling for.
47 Bits are tested using macros TARGET_... defined in the tm.h file
48 and set by `-m...' switches. Must be defined in rtlanal.c. */
50 int target_flags;
52 /* Return 1 if the value of X is unstable
53 (would be different at a different point in the program).
54 The frame pointer, arg pointer, etc. are considered stable
55 (within one function) and so is anything marked `unchanging'. */
57 int
58 rtx_unstable_p (x)
59 rtx x;
61 RTX_CODE code = GET_CODE (x);
62 int i;
63 const char *fmt;
65 switch (code)
67 case MEM:
68 return ! RTX_UNCHANGING_P (x) || rtx_unstable_p (XEXP (x, 0));
70 case QUEUED:
71 return 1;
73 case ADDRESSOF:
74 case CONST:
75 case CONST_INT:
76 case CONST_DOUBLE:
77 case CONST_VECTOR:
78 case SYMBOL_REF:
79 case LABEL_REF:
80 return 0;
82 case REG:
83 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
84 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
85 /* The arg pointer varies if it is not a fixed register. */
86 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM])
87 || RTX_UNCHANGING_P (x))
88 return 0;
89 #ifndef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
90 /* ??? When call-clobbered, the value is stable modulo the restore
91 that must happen after a call. This currently screws up local-alloc
92 into believing that the restore is not needed. */
93 if (x == pic_offset_table_rtx)
94 return 0;
95 #endif
96 return 1;
98 case ASM_OPERANDS:
99 if (MEM_VOLATILE_P (x))
100 return 1;
102 /* FALLTHROUGH */
104 default:
105 break;
108 fmt = GET_RTX_FORMAT (code);
109 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
110 if (fmt[i] == 'e')
112 if (rtx_unstable_p (XEXP (x, i)))
113 return 1;
115 else if (fmt[i] == 'E')
117 int j;
118 for (j = 0; j < XVECLEN (x, i); j++)
119 if (rtx_unstable_p (XVECEXP (x, i, j)))
120 return 1;
123 return 0;
126 /* Return 1 if X has a value that can vary even between two
127 executions of the program. 0 means X can be compared reliably
128 against certain constants or near-constants.
129 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
130 zero, we are slightly more conservative.
131 The frame pointer and the arg pointer are considered constant. */
134 rtx_varies_p (x, for_alias)
135 rtx x;
136 int for_alias;
138 RTX_CODE code = GET_CODE (x);
139 int i;
140 const char *fmt;
142 switch (code)
144 case MEM:
145 return ! RTX_UNCHANGING_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
147 case QUEUED:
148 return 1;
150 case CONST:
151 case CONST_INT:
152 case CONST_DOUBLE:
153 case CONST_VECTOR:
154 case SYMBOL_REF:
155 case LABEL_REF:
156 return 0;
158 case ADDRESSOF:
159 /* This will resolve to some offset from the frame pointer. */
160 return 0;
162 case REG:
163 /* Note that we have to test for the actual rtx used for the frame
164 and arg pointers and not just the register number in case we have
165 eliminated the frame and/or arg pointer and are using it
166 for pseudos. */
167 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
168 /* The arg pointer varies if it is not a fixed register. */
169 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
170 return 0;
171 if (x == pic_offset_table_rtx
172 #ifdef PIC_OFFSET_TABLE_REG_CALL_CLOBBERED
173 /* ??? When call-clobbered, the value is stable modulo the restore
174 that must happen after a call. This currently screws up
175 local-alloc into believing that the restore is not needed, so we
176 must return 0 only if we are called from alias analysis. */
177 && for_alias
178 #endif
180 return 0;
181 return 1;
183 case LO_SUM:
184 /* The operand 0 of a LO_SUM is considered constant
185 (in fact it is related specifically to operand 1)
186 during alias analysis. */
187 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
188 || rtx_varies_p (XEXP (x, 1), for_alias);
190 case ASM_OPERANDS:
191 if (MEM_VOLATILE_P (x))
192 return 1;
194 /* FALLTHROUGH */
196 default:
197 break;
200 fmt = GET_RTX_FORMAT (code);
201 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
202 if (fmt[i] == 'e')
204 if (rtx_varies_p (XEXP (x, i), for_alias))
205 return 1;
207 else if (fmt[i] == 'E')
209 int j;
210 for (j = 0; j < XVECLEN (x, i); j++)
211 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
212 return 1;
215 return 0;
218 /* Return 0 if the use of X as an address in a MEM can cause a trap. */
221 rtx_addr_can_trap_p (x)
222 rtx x;
224 enum rtx_code code = GET_CODE (x);
226 switch (code)
228 case SYMBOL_REF:
229 return SYMBOL_REF_WEAK (x);
231 case LABEL_REF:
232 return 0;
234 case ADDRESSOF:
235 /* This will resolve to some offset from the frame pointer. */
236 return 0;
238 case REG:
239 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
240 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
241 || x == stack_pointer_rtx
242 /* The arg pointer varies if it is not a fixed register. */
243 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
244 return 0;
245 /* All of the virtual frame registers are stack references. */
246 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
247 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
248 return 0;
249 return 1;
251 case CONST:
252 return rtx_addr_can_trap_p (XEXP (x, 0));
254 case PLUS:
255 /* An address is assumed not to trap if it is an address that can't
256 trap plus a constant integer or it is the pic register plus a
257 constant. */
258 return ! ((! rtx_addr_can_trap_p (XEXP (x, 0))
259 && GET_CODE (XEXP (x, 1)) == CONST_INT)
260 || (XEXP (x, 0) == pic_offset_table_rtx
261 && CONSTANT_P (XEXP (x, 1))));
263 case LO_SUM:
264 case PRE_MODIFY:
265 return rtx_addr_can_trap_p (XEXP (x, 1));
267 case PRE_DEC:
268 case PRE_INC:
269 case POST_DEC:
270 case POST_INC:
271 case POST_MODIFY:
272 return rtx_addr_can_trap_p (XEXP (x, 0));
274 default:
275 break;
278 /* If it isn't one of the case above, it can cause a trap. */
279 return 1;
282 /* Return true if X is an address that is known to not be zero. */
284 bool
285 nonzero_address_p (x)
286 rtx x;
288 enum rtx_code code = GET_CODE (x);
290 switch (code)
292 case SYMBOL_REF:
293 return !SYMBOL_REF_WEAK (x);
295 case LABEL_REF:
296 return true;
298 case ADDRESSOF:
299 /* This will resolve to some offset from the frame pointer. */
300 return true;
302 case REG:
303 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
304 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
305 || x == stack_pointer_rtx
306 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
307 return true;
308 /* All of the virtual frame registers are stack references. */
309 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
310 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
311 return true;
312 return false;
314 case CONST:
315 return nonzero_address_p (XEXP (x, 0));
317 case PLUS:
318 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
320 /* Pointers aren't allowed to wrap. If we've got a register
321 that is known to be a pointer, and a positive offset, then
322 the composite can't be zero. */
323 if (INTVAL (XEXP (x, 1)) > 0
324 && REG_P (XEXP (x, 0))
325 && REG_POINTER (XEXP (x, 0)))
326 return true;
328 return nonzero_address_p (XEXP (x, 0));
330 /* Handle PIC references. */
331 else if (XEXP (x, 0) == pic_offset_table_rtx
332 && CONSTANT_P (XEXP (x, 1)))
333 return true;
334 return false;
336 case PRE_MODIFY:
337 /* Similar to the above; allow positive offsets. Further, since
338 auto-inc is only allowed in memories, the register must be a
339 pointer. */
340 if (GET_CODE (XEXP (x, 1)) == CONST_INT
341 && INTVAL (XEXP (x, 1)) > 0)
342 return true;
343 return nonzero_address_p (XEXP (x, 0));
345 case PRE_INC:
346 /* Similarly. Further, the offset is always positive. */
347 return true;
349 case PRE_DEC:
350 case POST_DEC:
351 case POST_INC:
352 case POST_MODIFY:
353 return nonzero_address_p (XEXP (x, 0));
355 case LO_SUM:
356 return nonzero_address_p (XEXP (x, 1));
358 default:
359 break;
362 /* If it isn't one of the case above, might be zero. */
363 return false;
366 /* Return 1 if X refers to a memory location whose address
367 cannot be compared reliably with constant addresses,
368 or if X refers to a BLKmode memory object.
369 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
370 zero, we are slightly more conservative. */
373 rtx_addr_varies_p (x, for_alias)
374 rtx x;
375 int for_alias;
377 enum rtx_code code;
378 int i;
379 const char *fmt;
381 if (x == 0)
382 return 0;
384 code = GET_CODE (x);
385 if (code == MEM)
386 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
388 fmt = GET_RTX_FORMAT (code);
389 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
390 if (fmt[i] == 'e')
392 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
393 return 1;
395 else if (fmt[i] == 'E')
397 int j;
398 for (j = 0; j < XVECLEN (x, i); j++)
399 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
400 return 1;
402 return 0;
405 /* Return the value of the integer term in X, if one is apparent;
406 otherwise return 0.
407 Only obvious integer terms are detected.
408 This is used in cse.c with the `related_value' field. */
410 HOST_WIDE_INT
411 get_integer_term (x)
412 rtx x;
414 if (GET_CODE (x) == CONST)
415 x = XEXP (x, 0);
417 if (GET_CODE (x) == MINUS
418 && GET_CODE (XEXP (x, 1)) == CONST_INT)
419 return - INTVAL (XEXP (x, 1));
420 if (GET_CODE (x) == PLUS
421 && GET_CODE (XEXP (x, 1)) == CONST_INT)
422 return INTVAL (XEXP (x, 1));
423 return 0;
426 /* If X is a constant, return the value sans apparent integer term;
427 otherwise return 0.
428 Only obvious integer terms are detected. */
431 get_related_value (x)
432 rtx x;
434 if (GET_CODE (x) != CONST)
435 return 0;
436 x = XEXP (x, 0);
437 if (GET_CODE (x) == PLUS
438 && GET_CODE (XEXP (x, 1)) == CONST_INT)
439 return XEXP (x, 0);
440 else if (GET_CODE (x) == MINUS
441 && GET_CODE (XEXP (x, 1)) == CONST_INT)
442 return XEXP (x, 0);
443 return 0;
446 /* Given a tablejump insn INSN, return the RTL expression for the offset
447 into the jump table. If the offset cannot be determined, then return
448 NULL_RTX.
450 If EARLIEST is nonzero, it is a pointer to a place where the earliest
451 insn used in locating the offset was found. */
454 get_jump_table_offset (insn, earliest)
455 rtx insn;
456 rtx *earliest;
458 rtx label;
459 rtx table;
460 rtx set;
461 rtx old_insn;
462 rtx x;
463 rtx old_x;
464 rtx y;
465 rtx old_y;
466 int i;
468 if (GET_CODE (insn) != JUMP_INSN
469 || ! (label = JUMP_LABEL (insn))
470 || ! (table = NEXT_INSN (label))
471 || GET_CODE (table) != JUMP_INSN
472 || (GET_CODE (PATTERN (table)) != ADDR_VEC
473 && GET_CODE (PATTERN (table)) != ADDR_DIFF_VEC)
474 || ! (set = single_set (insn)))
475 return NULL_RTX;
477 x = SET_SRC (set);
479 /* Some targets (eg, ARM) emit a tablejump that also
480 contains the out-of-range target. */
481 if (GET_CODE (x) == IF_THEN_ELSE
482 && GET_CODE (XEXP (x, 2)) == LABEL_REF)
483 x = XEXP (x, 1);
485 /* Search backwards and locate the expression stored in X. */
486 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
487 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
490 /* If X is an expression using a relative address then strip
491 off the addition / subtraction of PC, PIC_OFFSET_TABLE_REGNUM,
492 or the jump table label. */
493 if (GET_CODE (PATTERN (table)) == ADDR_DIFF_VEC
494 && (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS))
496 for (i = 0; i < 2; i++)
498 old_insn = insn;
499 y = XEXP (x, i);
501 if (y == pc_rtx || y == pic_offset_table_rtx)
502 break;
504 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
505 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
508 if ((GET_CODE (y) == LABEL_REF && XEXP (y, 0) == label))
509 break;
512 if (i >= 2)
513 return NULL_RTX;
515 x = XEXP (x, 1 - i);
517 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
518 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
522 /* Strip off any sign or zero extension. */
523 if (GET_CODE (x) == SIGN_EXTEND || GET_CODE (x) == ZERO_EXTEND)
525 x = XEXP (x, 0);
527 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
528 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
532 /* If X isn't a MEM then this isn't a tablejump we understand. */
533 if (GET_CODE (x) != MEM)
534 return NULL_RTX;
536 /* Strip off the MEM. */
537 x = XEXP (x, 0);
539 for (old_x = NULL_RTX; GET_CODE (x) == REG && x != old_x;
540 old_x = x, x = find_last_value (x, &insn, NULL_RTX, 0))
543 /* If X isn't a PLUS than this isn't a tablejump we understand. */
544 if (GET_CODE (x) != PLUS)
545 return NULL_RTX;
547 /* At this point we should have an expression representing the jump table
548 plus an offset. Examine each operand in order to determine which one
549 represents the jump table. Knowing that tells us that the other operand
550 must represent the offset. */
551 for (i = 0; i < 2; i++)
553 old_insn = insn;
554 y = XEXP (x, i);
556 for (old_y = NULL_RTX; GET_CODE (y) == REG && y != old_y;
557 old_y = y, y = find_last_value (y, &old_insn, NULL_RTX, 0))
560 if ((GET_CODE (y) == CONST || GET_CODE (y) == LABEL_REF)
561 && reg_mentioned_p (label, y))
562 break;
565 if (i >= 2)
566 return NULL_RTX;
568 x = XEXP (x, 1 - i);
570 /* Strip off the addition / subtraction of PIC_OFFSET_TABLE_REGNUM. */
571 if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS)
572 for (i = 0; i < 2; i++)
573 if (XEXP (x, i) == pic_offset_table_rtx)
575 x = XEXP (x, 1 - i);
576 break;
579 if (earliest)
580 *earliest = insn;
582 /* Return the RTL expression representing the offset. */
583 return x;
586 /* A subroutine of global_reg_mentioned_p, returns 1 if *LOC mentions
587 a global register. */
589 static int
590 global_reg_mentioned_p_1 (loc, data)
591 rtx *loc;
592 void *data ATTRIBUTE_UNUSED;
594 int regno;
595 rtx x = *loc;
597 if (! x)
598 return 0;
600 switch (GET_CODE (x))
602 case SUBREG:
603 if (GET_CODE (SUBREG_REG (x)) == REG)
605 if (REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER
606 && global_regs[subreg_regno (x)])
607 return 1;
608 return 0;
610 break;
612 case REG:
613 regno = REGNO (x);
614 if (regno < FIRST_PSEUDO_REGISTER && global_regs[regno])
615 return 1;
616 return 0;
618 case SCRATCH:
619 case PC:
620 case CC0:
621 case CONST_INT:
622 case CONST_DOUBLE:
623 case CONST:
624 case LABEL_REF:
625 return 0;
627 case CALL:
628 /* A non-constant call might use a global register. */
629 return 1;
631 default:
632 break;
635 return 0;
638 /* Returns nonzero if X mentions a global register. */
641 global_reg_mentioned_p (x)
642 rtx x;
645 if (INSN_P (x))
647 if (GET_CODE (x) == CALL_INSN)
649 if (! CONST_OR_PURE_CALL_P (x))
650 return 1;
651 x = CALL_INSN_FUNCTION_USAGE (x);
652 if (x == 0)
653 return 0;
655 else
656 x = PATTERN (x);
659 return for_each_rtx (&x, global_reg_mentioned_p_1, NULL);
662 /* Return the number of places FIND appears within X. If COUNT_DEST is
663 zero, we do not count occurrences inside the destination of a SET. */
666 count_occurrences (x, find, count_dest)
667 rtx x, find;
668 int count_dest;
670 int i, j;
671 enum rtx_code code;
672 const char *format_ptr;
673 int count;
675 if (x == find)
676 return 1;
678 code = GET_CODE (x);
680 switch (code)
682 case REG:
683 case CONST_INT:
684 case CONST_DOUBLE:
685 case CONST_VECTOR:
686 case SYMBOL_REF:
687 case CODE_LABEL:
688 case PC:
689 case CC0:
690 return 0;
692 case MEM:
693 if (GET_CODE (find) == MEM && rtx_equal_p (x, find))
694 return 1;
695 break;
697 case SET:
698 if (SET_DEST (x) == find && ! count_dest)
699 return count_occurrences (SET_SRC (x), find, count_dest);
700 break;
702 default:
703 break;
706 format_ptr = GET_RTX_FORMAT (code);
707 count = 0;
709 for (i = 0; i < GET_RTX_LENGTH (code); i++)
711 switch (*format_ptr++)
713 case 'e':
714 count += count_occurrences (XEXP (x, i), find, count_dest);
715 break;
717 case 'E':
718 for (j = 0; j < XVECLEN (x, i); j++)
719 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
720 break;
723 return count;
726 /* Nonzero if register REG appears somewhere within IN.
727 Also works if REG is not a register; in this case it checks
728 for a subexpression of IN that is Lisp "equal" to REG. */
731 reg_mentioned_p (reg, in)
732 rtx reg, in;
734 const char *fmt;
735 int i;
736 enum rtx_code code;
738 if (in == 0)
739 return 0;
741 if (reg == in)
742 return 1;
744 if (GET_CODE (in) == LABEL_REF)
745 return reg == XEXP (in, 0);
747 code = GET_CODE (in);
749 switch (code)
751 /* Compare registers by number. */
752 case REG:
753 return GET_CODE (reg) == REG && REGNO (in) == REGNO (reg);
755 /* These codes have no constituent expressions
756 and are unique. */
757 case SCRATCH:
758 case CC0:
759 case PC:
760 return 0;
762 case CONST_INT:
763 return GET_CODE (reg) == CONST_INT && INTVAL (in) == INTVAL (reg);
765 case CONST_VECTOR:
766 case CONST_DOUBLE:
767 /* These are kept unique for a given value. */
768 return 0;
770 default:
771 break;
774 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
775 return 1;
777 fmt = GET_RTX_FORMAT (code);
779 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
781 if (fmt[i] == 'E')
783 int j;
784 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
785 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
786 return 1;
788 else if (fmt[i] == 'e'
789 && reg_mentioned_p (reg, XEXP (in, i)))
790 return 1;
792 return 0;
795 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
796 no CODE_LABEL insn. */
799 no_labels_between_p (beg, end)
800 rtx beg, end;
802 rtx p;
803 if (beg == end)
804 return 0;
805 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
806 if (GET_CODE (p) == CODE_LABEL)
807 return 0;
808 return 1;
811 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
812 no JUMP_INSN insn. */
815 no_jumps_between_p (beg, end)
816 rtx beg, end;
818 rtx p;
819 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
820 if (GET_CODE (p) == JUMP_INSN)
821 return 0;
822 return 1;
825 /* Nonzero if register REG is used in an insn between
826 FROM_INSN and TO_INSN (exclusive of those two). */
829 reg_used_between_p (reg, from_insn, to_insn)
830 rtx reg, from_insn, to_insn;
832 rtx insn;
834 if (from_insn == to_insn)
835 return 0;
837 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
838 if (INSN_P (insn)
839 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
840 || (GET_CODE (insn) == CALL_INSN
841 && (find_reg_fusage (insn, USE, reg)
842 || find_reg_fusage (insn, CLOBBER, reg)))))
843 return 1;
844 return 0;
847 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
848 is entirely replaced by a new value and the only use is as a SET_DEST,
849 we do not consider it a reference. */
852 reg_referenced_p (x, body)
853 rtx x;
854 rtx body;
856 int i;
858 switch (GET_CODE (body))
860 case SET:
861 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
862 return 1;
864 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
865 of a REG that occupies all of the REG, the insn references X if
866 it is mentioned in the destination. */
867 if (GET_CODE (SET_DEST (body)) != CC0
868 && GET_CODE (SET_DEST (body)) != PC
869 && GET_CODE (SET_DEST (body)) != REG
870 && ! (GET_CODE (SET_DEST (body)) == SUBREG
871 && GET_CODE (SUBREG_REG (SET_DEST (body))) == REG
872 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
873 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
874 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
875 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
876 && reg_overlap_mentioned_p (x, SET_DEST (body)))
877 return 1;
878 return 0;
880 case ASM_OPERANDS:
881 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
882 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
883 return 1;
884 return 0;
886 case CALL:
887 case USE:
888 case IF_THEN_ELSE:
889 return reg_overlap_mentioned_p (x, body);
891 case TRAP_IF:
892 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
894 case PREFETCH:
895 return reg_overlap_mentioned_p (x, XEXP (body, 0));
897 case UNSPEC:
898 case UNSPEC_VOLATILE:
899 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
900 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
901 return 1;
902 return 0;
904 case PARALLEL:
905 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
906 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
907 return 1;
908 return 0;
910 case CLOBBER:
911 if (GET_CODE (XEXP (body, 0)) == MEM)
912 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
913 return 1;
914 return 0;
916 case COND_EXEC:
917 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
918 return 1;
919 return reg_referenced_p (x, COND_EXEC_CODE (body));
921 default:
922 return 0;
926 /* Nonzero if register REG is referenced in an insn between
927 FROM_INSN and TO_INSN (exclusive of those two). Sets of REG do
928 not count. */
931 reg_referenced_between_p (reg, from_insn, to_insn)
932 rtx reg, from_insn, to_insn;
934 rtx insn;
936 if (from_insn == to_insn)
937 return 0;
939 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
940 if (INSN_P (insn)
941 && (reg_referenced_p (reg, PATTERN (insn))
942 || (GET_CODE (insn) == CALL_INSN
943 && find_reg_fusage (insn, USE, reg))))
944 return 1;
945 return 0;
948 /* Nonzero if register REG is set or clobbered in an insn between
949 FROM_INSN and TO_INSN (exclusive of those two). */
952 reg_set_between_p (reg, from_insn, to_insn)
953 rtx reg, from_insn, to_insn;
955 rtx insn;
957 if (from_insn == to_insn)
958 return 0;
960 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
961 if (INSN_P (insn) && reg_set_p (reg, insn))
962 return 1;
963 return 0;
966 /* Internals of reg_set_between_p. */
968 reg_set_p (reg, insn)
969 rtx reg, insn;
971 /* We can be passed an insn or part of one. If we are passed an insn,
972 check if a side-effect of the insn clobbers REG. */
973 if (INSN_P (insn)
974 && (FIND_REG_INC_NOTE (insn, reg)
975 || (GET_CODE (insn) == CALL_INSN
976 /* We'd like to test call_used_regs here, but rtlanal.c can't
977 reference that variable due to its use in genattrtab. So
978 we'll just be more conservative.
980 ??? Unless we could ensure that the CALL_INSN_FUNCTION_USAGE
981 information holds all clobbered registers. */
982 && ((GET_CODE (reg) == REG
983 && REGNO (reg) < FIRST_PSEUDO_REGISTER)
984 || GET_CODE (reg) == MEM
985 || find_reg_fusage (insn, CLOBBER, reg)))))
986 return 1;
988 return set_of (reg, insn) != NULL_RTX;
991 /* Similar to reg_set_between_p, but check all registers in X. Return 0
992 only if none of them are modified between START and END. Do not
993 consider non-registers one way or the other. */
996 regs_set_between_p (x, start, end)
997 rtx x;
998 rtx start, end;
1000 enum rtx_code code = GET_CODE (x);
1001 const char *fmt;
1002 int i, j;
1004 switch (code)
1006 case CONST_INT:
1007 case CONST_DOUBLE:
1008 case CONST_VECTOR:
1009 case CONST:
1010 case SYMBOL_REF:
1011 case LABEL_REF:
1012 case PC:
1013 case CC0:
1014 return 0;
1016 case REG:
1017 return reg_set_between_p (x, start, end);
1019 default:
1020 break;
1023 fmt = GET_RTX_FORMAT (code);
1024 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1026 if (fmt[i] == 'e' && regs_set_between_p (XEXP (x, i), start, end))
1027 return 1;
1029 else if (fmt[i] == 'E')
1030 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1031 if (regs_set_between_p (XVECEXP (x, i, j), start, end))
1032 return 1;
1035 return 0;
1038 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1039 only if none of them are modified between START and END. Return 1 if
1040 X contains a MEM; this routine does usememory aliasing. */
1043 modified_between_p (x, start, end)
1044 rtx x;
1045 rtx start, end;
1047 enum rtx_code code = GET_CODE (x);
1048 const char *fmt;
1049 int i, j;
1050 rtx insn;
1052 if (start == end)
1053 return 0;
1055 switch (code)
1057 case CONST_INT:
1058 case CONST_DOUBLE:
1059 case CONST_VECTOR:
1060 case CONST:
1061 case SYMBOL_REF:
1062 case LABEL_REF:
1063 return 0;
1065 case PC:
1066 case CC0:
1067 return 1;
1069 case MEM:
1070 if (RTX_UNCHANGING_P (x))
1071 return 0;
1072 if (modified_between_p (XEXP (x, 0), start, end))
1073 return 1;
1074 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1075 if (memory_modified_in_insn_p (x, insn))
1076 return 1;
1077 return 0;
1078 break;
1080 case REG:
1081 return reg_set_between_p (x, start, end);
1083 default:
1084 break;
1087 fmt = GET_RTX_FORMAT (code);
1088 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1090 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1091 return 1;
1093 else if (fmt[i] == 'E')
1094 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1095 if (modified_between_p (XVECEXP (x, i, j), start, end))
1096 return 1;
1099 return 0;
1102 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1103 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1104 does use memory aliasing. */
1107 modified_in_p (x, insn)
1108 rtx x;
1109 rtx insn;
1111 enum rtx_code code = GET_CODE (x);
1112 const char *fmt;
1113 int i, j;
1115 switch (code)
1117 case CONST_INT:
1118 case CONST_DOUBLE:
1119 case CONST_VECTOR:
1120 case CONST:
1121 case SYMBOL_REF:
1122 case LABEL_REF:
1123 return 0;
1125 case PC:
1126 case CC0:
1127 return 1;
1129 case MEM:
1130 if (RTX_UNCHANGING_P (x))
1131 return 0;
1132 if (modified_in_p (XEXP (x, 0), insn))
1133 return 1;
1134 if (memory_modified_in_insn_p (x, insn))
1135 return 1;
1136 return 0;
1137 break;
1139 case REG:
1140 return reg_set_p (x, insn);
1142 default:
1143 break;
1146 fmt = GET_RTX_FORMAT (code);
1147 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1149 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1150 return 1;
1152 else if (fmt[i] == 'E')
1153 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1154 if (modified_in_p (XVECEXP (x, i, j), insn))
1155 return 1;
1158 return 0;
1161 /* Return true if anything in insn X is (anti,output,true) dependent on
1162 anything in insn Y. */
1165 insn_dependent_p (x, y)
1166 rtx x, y;
1168 rtx tmp;
1170 if (! INSN_P (x) || ! INSN_P (y))
1171 abort ();
1173 tmp = PATTERN (y);
1174 note_stores (PATTERN (x), insn_dependent_p_1, &tmp);
1175 if (tmp == NULL_RTX)
1176 return 1;
1178 tmp = PATTERN (x);
1179 note_stores (PATTERN (y), insn_dependent_p_1, &tmp);
1180 if (tmp == NULL_RTX)
1181 return 1;
1183 return 0;
1186 /* A helper routine for insn_dependent_p called through note_stores. */
1188 static void
1189 insn_dependent_p_1 (x, pat, data)
1190 rtx x;
1191 rtx pat ATTRIBUTE_UNUSED;
1192 void *data;
1194 rtx * pinsn = (rtx *) data;
1196 if (*pinsn && reg_mentioned_p (x, *pinsn))
1197 *pinsn = NULL_RTX;
1200 /* Helper function for set_of. */
1201 struct set_of_data
1203 rtx found;
1204 rtx pat;
1207 static void
1208 set_of_1 (x, pat, data1)
1209 rtx x;
1210 rtx pat;
1211 void *data1;
1213 struct set_of_data *data = (struct set_of_data *) (data1);
1214 if (rtx_equal_p (x, data->pat)
1215 || (GET_CODE (x) != MEM && reg_overlap_mentioned_p (data->pat, x)))
1216 data->found = pat;
1219 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1220 (either directly or via STRICT_LOW_PART and similar modifiers). */
1222 set_of (pat, insn)
1223 rtx pat, insn;
1225 struct set_of_data data;
1226 data.found = NULL_RTX;
1227 data.pat = pat;
1228 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1229 return data.found;
1232 /* Given an INSN, return a SET expression if this insn has only a single SET.
1233 It may also have CLOBBERs, USEs, or SET whose output
1234 will not be used, which we ignore. */
1237 single_set_2 (insn, pat)
1238 rtx insn, pat;
1240 rtx set = NULL;
1241 int set_verified = 1;
1242 int i;
1244 if (GET_CODE (pat) == PARALLEL)
1246 for (i = 0; i < XVECLEN (pat, 0); i++)
1248 rtx sub = XVECEXP (pat, 0, i);
1249 switch (GET_CODE (sub))
1251 case USE:
1252 case CLOBBER:
1253 break;
1255 case SET:
1256 /* We can consider insns having multiple sets, where all
1257 but one are dead as single set insns. In common case
1258 only single set is present in the pattern so we want
1259 to avoid checking for REG_UNUSED notes unless necessary.
1261 When we reach set first time, we just expect this is
1262 the single set we are looking for and only when more
1263 sets are found in the insn, we check them. */
1264 if (!set_verified)
1266 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1267 && !side_effects_p (set))
1268 set = NULL;
1269 else
1270 set_verified = 1;
1272 if (!set)
1273 set = sub, set_verified = 0;
1274 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1275 || side_effects_p (sub))
1276 return NULL_RTX;
1277 break;
1279 default:
1280 return NULL_RTX;
1284 return set;
1287 /* Given an INSN, return nonzero if it has more than one SET, else return
1288 zero. */
1291 multiple_sets (insn)
1292 rtx insn;
1294 int found;
1295 int i;
1297 /* INSN must be an insn. */
1298 if (! INSN_P (insn))
1299 return 0;
1301 /* Only a PARALLEL can have multiple SETs. */
1302 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1304 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1305 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1307 /* If we have already found a SET, then return now. */
1308 if (found)
1309 return 1;
1310 else
1311 found = 1;
1315 /* Either zero or one SET. */
1316 return 0;
1319 /* Return nonzero if the destination of SET equals the source
1320 and there are no side effects. */
1323 set_noop_p (set)
1324 rtx set;
1326 rtx src = SET_SRC (set);
1327 rtx dst = SET_DEST (set);
1329 if (side_effects_p (src) || side_effects_p (dst))
1330 return 0;
1332 if (GET_CODE (dst) == MEM && GET_CODE (src) == MEM)
1333 return rtx_equal_p (dst, src);
1335 if (dst == pc_rtx && src == pc_rtx)
1336 return 1;
1338 if (GET_CODE (dst) == SIGN_EXTRACT
1339 || GET_CODE (dst) == ZERO_EXTRACT)
1340 return rtx_equal_p (XEXP (dst, 0), src)
1341 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx;
1343 if (GET_CODE (dst) == STRICT_LOW_PART)
1344 dst = XEXP (dst, 0);
1346 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1348 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1349 return 0;
1350 src = SUBREG_REG (src);
1351 dst = SUBREG_REG (dst);
1354 return (GET_CODE (src) == REG && GET_CODE (dst) == REG
1355 && REGNO (src) == REGNO (dst));
1358 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1359 value to itself. */
1362 noop_move_p (insn)
1363 rtx insn;
1365 rtx pat = PATTERN (insn);
1367 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1368 return 1;
1370 /* Insns carrying these notes are useful later on. */
1371 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1372 return 0;
1374 /* For now treat an insn with a REG_RETVAL note as a
1375 a special insn which should not be considered a no-op. */
1376 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
1377 return 0;
1379 if (GET_CODE (pat) == SET && set_noop_p (pat))
1380 return 1;
1382 if (GET_CODE (pat) == PARALLEL)
1384 int i;
1385 /* If nothing but SETs of registers to themselves,
1386 this insn can also be deleted. */
1387 for (i = 0; i < XVECLEN (pat, 0); i++)
1389 rtx tem = XVECEXP (pat, 0, i);
1391 if (GET_CODE (tem) == USE
1392 || GET_CODE (tem) == CLOBBER)
1393 continue;
1395 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1396 return 0;
1399 return 1;
1401 return 0;
1405 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1406 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1407 If the object was modified, if we hit a partial assignment to X, or hit a
1408 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1409 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1410 be the src. */
1413 find_last_value (x, pinsn, valid_to, allow_hwreg)
1414 rtx x;
1415 rtx *pinsn;
1416 rtx valid_to;
1417 int allow_hwreg;
1419 rtx p;
1421 for (p = PREV_INSN (*pinsn); p && GET_CODE (p) != CODE_LABEL;
1422 p = PREV_INSN (p))
1423 if (INSN_P (p))
1425 rtx set = single_set (p);
1426 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1428 if (set && rtx_equal_p (x, SET_DEST (set)))
1430 rtx src = SET_SRC (set);
1432 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1433 src = XEXP (note, 0);
1435 if ((valid_to == NULL_RTX
1436 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1437 /* Reject hard registers because we don't usually want
1438 to use them; we'd rather use a pseudo. */
1439 && (! (GET_CODE (src) == REG
1440 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1442 *pinsn = p;
1443 return src;
1447 /* If set in non-simple way, we don't have a value. */
1448 if (reg_set_p (x, p))
1449 break;
1452 return x;
1455 /* Return nonzero if register in range [REGNO, ENDREGNO)
1456 appears either explicitly or implicitly in X
1457 other than being stored into.
1459 References contained within the substructure at LOC do not count.
1460 LOC may be zero, meaning don't ignore anything. */
1463 refers_to_regno_p (regno, endregno, x, loc)
1464 unsigned int regno, endregno;
1465 rtx x;
1466 rtx *loc;
1468 int i;
1469 unsigned int x_regno;
1470 RTX_CODE code;
1471 const char *fmt;
1473 repeat:
1474 /* The contents of a REG_NONNEG note is always zero, so we must come here
1475 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1476 if (x == 0)
1477 return 0;
1479 code = GET_CODE (x);
1481 switch (code)
1483 case REG:
1484 x_regno = REGNO (x);
1486 /* If we modifying the stack, frame, or argument pointer, it will
1487 clobber a virtual register. In fact, we could be more precise,
1488 but it isn't worth it. */
1489 if ((x_regno == STACK_POINTER_REGNUM
1490 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1491 || x_regno == ARG_POINTER_REGNUM
1492 #endif
1493 || x_regno == FRAME_POINTER_REGNUM)
1494 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1495 return 1;
1497 return (endregno > x_regno
1498 && regno < x_regno + (x_regno < FIRST_PSEUDO_REGISTER
1499 ? HARD_REGNO_NREGS (x_regno, GET_MODE (x))
1500 : 1));
1502 case SUBREG:
1503 /* If this is a SUBREG of a hard reg, we can see exactly which
1504 registers are being modified. Otherwise, handle normally. */
1505 if (GET_CODE (SUBREG_REG (x)) == REG
1506 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1508 unsigned int inner_regno = subreg_regno (x);
1509 unsigned int inner_endregno
1510 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1511 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1513 return endregno > inner_regno && regno < inner_endregno;
1515 break;
1517 case CLOBBER:
1518 case SET:
1519 if (&SET_DEST (x) != loc
1520 /* Note setting a SUBREG counts as referring to the REG it is in for
1521 a pseudo but not for hard registers since we can
1522 treat each word individually. */
1523 && ((GET_CODE (SET_DEST (x)) == SUBREG
1524 && loc != &SUBREG_REG (SET_DEST (x))
1525 && GET_CODE (SUBREG_REG (SET_DEST (x))) == REG
1526 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1527 && refers_to_regno_p (regno, endregno,
1528 SUBREG_REG (SET_DEST (x)), loc))
1529 || (GET_CODE (SET_DEST (x)) != REG
1530 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1531 return 1;
1533 if (code == CLOBBER || loc == &SET_SRC (x))
1534 return 0;
1535 x = SET_SRC (x);
1536 goto repeat;
1538 default:
1539 break;
1542 /* X does not match, so try its subexpressions. */
1544 fmt = GET_RTX_FORMAT (code);
1545 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1547 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1549 if (i == 0)
1551 x = XEXP (x, 0);
1552 goto repeat;
1554 else
1555 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1556 return 1;
1558 else if (fmt[i] == 'E')
1560 int j;
1561 for (j = XVECLEN (x, i) - 1; j >=0; j--)
1562 if (loc != &XVECEXP (x, i, j)
1563 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1564 return 1;
1567 return 0;
1570 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1571 we check if any register number in X conflicts with the relevant register
1572 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1573 contains a MEM (we don't bother checking for memory addresses that can't
1574 conflict because we expect this to be a rare case. */
1577 reg_overlap_mentioned_p (x, in)
1578 rtx x, in;
1580 unsigned int regno, endregno;
1582 /* Overly conservative. */
1583 if (GET_CODE (x) == STRICT_LOW_PART)
1584 x = XEXP (x, 0);
1586 /* If either argument is a constant, then modifying X can not affect IN. */
1587 if (CONSTANT_P (x) || CONSTANT_P (in))
1588 return 0;
1590 switch (GET_CODE (x))
1592 case SUBREG:
1593 regno = REGNO (SUBREG_REG (x));
1594 if (regno < FIRST_PSEUDO_REGISTER)
1595 regno = subreg_regno (x);
1596 goto do_reg;
1598 case REG:
1599 regno = REGNO (x);
1600 do_reg:
1601 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1602 ? HARD_REGNO_NREGS (regno, GET_MODE (x)) : 1);
1603 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1605 case MEM:
1607 const char *fmt;
1608 int i;
1610 if (GET_CODE (in) == MEM)
1611 return 1;
1613 fmt = GET_RTX_FORMAT (GET_CODE (in));
1614 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1615 if (fmt[i] == 'e' && reg_overlap_mentioned_p (x, XEXP (in, i)))
1616 return 1;
1618 return 0;
1621 case SCRATCH:
1622 case PC:
1623 case CC0:
1624 return reg_mentioned_p (x, in);
1626 case PARALLEL:
1628 int i;
1630 /* If any register in here refers to it we return true. */
1631 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1632 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1633 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1634 return 1;
1635 return 0;
1638 default:
1639 break;
1642 abort ();
1645 /* Return the last value to which REG was set prior to INSN. If we can't
1646 find it easily, return 0.
1648 We only return a REG, SUBREG, or constant because it is too hard to
1649 check if a MEM remains unchanged. */
1652 reg_set_last (x, insn)
1653 rtx x;
1654 rtx insn;
1656 rtx orig_insn = insn;
1658 /* Scan backwards until reg_set_last_1 changed one of the above flags.
1659 Stop when we reach a label or X is a hard reg and we reach a
1660 CALL_INSN (if reg_set_last_last_regno is a hard reg).
1662 If we find a set of X, ensure that its SET_SRC remains unchanged. */
1664 /* We compare with <= here, because reg_set_last_last_regno
1665 is actually the number of the first reg *not* in X. */
1666 for (;
1667 insn && GET_CODE (insn) != CODE_LABEL
1668 && ! (GET_CODE (insn) == CALL_INSN
1669 && REGNO (x) <= FIRST_PSEUDO_REGISTER);
1670 insn = PREV_INSN (insn))
1671 if (INSN_P (insn))
1673 rtx set = set_of (x, insn);
1674 /* OK, this function modify our register. See if we understand it. */
1675 if (set)
1677 rtx last_value;
1678 if (GET_CODE (set) != SET || SET_DEST (set) != x)
1679 return 0;
1680 last_value = SET_SRC (x);
1681 if (CONSTANT_P (last_value)
1682 || ((GET_CODE (last_value) == REG
1683 || GET_CODE (last_value) == SUBREG)
1684 && ! reg_set_between_p (last_value,
1685 insn, orig_insn)))
1686 return last_value;
1687 else
1688 return 0;
1692 return 0;
1695 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1696 (X would be the pattern of an insn).
1697 FUN receives two arguments:
1698 the REG, MEM, CC0 or PC being stored in or clobbered,
1699 the SET or CLOBBER rtx that does the store.
1701 If the item being stored in or clobbered is a SUBREG of a hard register,
1702 the SUBREG will be passed. */
1704 void
1705 note_stores (x, fun, data)
1706 rtx x;
1707 void (*fun) PARAMS ((rtx, rtx, void *));
1708 void *data;
1710 int i;
1712 if (GET_CODE (x) == COND_EXEC)
1713 x = COND_EXEC_CODE (x);
1715 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1717 rtx dest = SET_DEST (x);
1719 while ((GET_CODE (dest) == SUBREG
1720 && (GET_CODE (SUBREG_REG (dest)) != REG
1721 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1722 || GET_CODE (dest) == ZERO_EXTRACT
1723 || GET_CODE (dest) == SIGN_EXTRACT
1724 || GET_CODE (dest) == STRICT_LOW_PART)
1725 dest = XEXP (dest, 0);
1727 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1728 each of whose first operand is a register. */
1729 if (GET_CODE (dest) == PARALLEL)
1731 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1732 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1733 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1735 else
1736 (*fun) (dest, x, data);
1739 else if (GET_CODE (x) == PARALLEL)
1740 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1741 note_stores (XVECEXP (x, 0, i), fun, data);
1744 /* Like notes_stores, but call FUN for each expression that is being
1745 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1746 FUN for each expression, not any interior subexpressions. FUN receives a
1747 pointer to the expression and the DATA passed to this function.
1749 Note that this is not quite the same test as that done in reg_referenced_p
1750 since that considers something as being referenced if it is being
1751 partially set, while we do not. */
1753 void
1754 note_uses (pbody, fun, data)
1755 rtx *pbody;
1756 void (*fun) PARAMS ((rtx *, void *));
1757 void *data;
1759 rtx body = *pbody;
1760 int i;
1762 switch (GET_CODE (body))
1764 case COND_EXEC:
1765 (*fun) (&COND_EXEC_TEST (body), data);
1766 note_uses (&COND_EXEC_CODE (body), fun, data);
1767 return;
1769 case PARALLEL:
1770 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1771 note_uses (&XVECEXP (body, 0, i), fun, data);
1772 return;
1774 case USE:
1775 (*fun) (&XEXP (body, 0), data);
1776 return;
1778 case ASM_OPERANDS:
1779 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1780 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1781 return;
1783 case TRAP_IF:
1784 (*fun) (&TRAP_CONDITION (body), data);
1785 return;
1787 case PREFETCH:
1788 (*fun) (&XEXP (body, 0), data);
1789 return;
1791 case UNSPEC:
1792 case UNSPEC_VOLATILE:
1793 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1794 (*fun) (&XVECEXP (body, 0, i), data);
1795 return;
1797 case CLOBBER:
1798 if (GET_CODE (XEXP (body, 0)) == MEM)
1799 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1800 return;
1802 case SET:
1804 rtx dest = SET_DEST (body);
1806 /* For sets we replace everything in source plus registers in memory
1807 expression in store and operands of a ZERO_EXTRACT. */
1808 (*fun) (&SET_SRC (body), data);
1810 if (GET_CODE (dest) == ZERO_EXTRACT)
1812 (*fun) (&XEXP (dest, 1), data);
1813 (*fun) (&XEXP (dest, 2), data);
1816 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1817 dest = XEXP (dest, 0);
1819 if (GET_CODE (dest) == MEM)
1820 (*fun) (&XEXP (dest, 0), data);
1822 return;
1824 default:
1825 /* All the other possibilities never store. */
1826 (*fun) (pbody, data);
1827 return;
1831 /* Return nonzero if X's old contents don't survive after INSN.
1832 This will be true if X is (cc0) or if X is a register and
1833 X dies in INSN or because INSN entirely sets X.
1835 "Entirely set" means set directly and not through a SUBREG,
1836 ZERO_EXTRACT or SIGN_EXTRACT, so no trace of the old contents remains.
1837 Likewise, REG_INC does not count.
1839 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1840 but for this use that makes no difference, since regs don't overlap
1841 during their lifetimes. Therefore, this function may be used
1842 at any time after deaths have been computed (in flow.c).
1844 If REG is a hard reg that occupies multiple machine registers, this
1845 function will only return 1 if each of those registers will be replaced
1846 by INSN. */
1849 dead_or_set_p (insn, x)
1850 rtx insn;
1851 rtx x;
1853 unsigned int regno, last_regno;
1854 unsigned int i;
1856 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1857 if (GET_CODE (x) == CC0)
1858 return 1;
1860 if (GET_CODE (x) != REG)
1861 abort ();
1863 regno = REGNO (x);
1864 last_regno = (regno >= FIRST_PSEUDO_REGISTER ? regno
1865 : regno + HARD_REGNO_NREGS (regno, GET_MODE (x)) - 1);
1867 for (i = regno; i <= last_regno; i++)
1868 if (! dead_or_set_regno_p (insn, i))
1869 return 0;
1871 return 1;
1874 /* Utility function for dead_or_set_p to check an individual register. Also
1875 called from flow.c. */
1878 dead_or_set_regno_p (insn, test_regno)
1879 rtx insn;
1880 unsigned int test_regno;
1882 unsigned int regno, endregno;
1883 rtx pattern;
1885 /* See if there is a death note for something that includes TEST_REGNO. */
1886 if (find_regno_note (insn, REG_DEAD, test_regno))
1887 return 1;
1889 if (GET_CODE (insn) == CALL_INSN
1890 && find_regno_fusage (insn, CLOBBER, test_regno))
1891 return 1;
1893 pattern = PATTERN (insn);
1895 if (GET_CODE (pattern) == COND_EXEC)
1896 pattern = COND_EXEC_CODE (pattern);
1898 if (GET_CODE (pattern) == SET)
1900 rtx dest = SET_DEST (pattern);
1902 /* A value is totally replaced if it is the destination or the
1903 destination is a SUBREG of REGNO that does not change the number of
1904 words in it. */
1905 if (GET_CODE (dest) == SUBREG
1906 && (((GET_MODE_SIZE (GET_MODE (dest))
1907 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1908 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1909 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1910 dest = SUBREG_REG (dest);
1912 if (GET_CODE (dest) != REG)
1913 return 0;
1915 regno = REGNO (dest);
1916 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1917 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1919 return (test_regno >= regno && test_regno < endregno);
1921 else if (GET_CODE (pattern) == PARALLEL)
1923 int i;
1925 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1927 rtx body = XVECEXP (pattern, 0, i);
1929 if (GET_CODE (body) == COND_EXEC)
1930 body = COND_EXEC_CODE (body);
1932 if (GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1934 rtx dest = SET_DEST (body);
1936 if (GET_CODE (dest) == SUBREG
1937 && (((GET_MODE_SIZE (GET_MODE (dest))
1938 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1939 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1940 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1941 dest = SUBREG_REG (dest);
1943 if (GET_CODE (dest) != REG)
1944 continue;
1946 regno = REGNO (dest);
1947 endregno = (regno >= FIRST_PSEUDO_REGISTER ? regno + 1
1948 : regno + HARD_REGNO_NREGS (regno, GET_MODE (dest)));
1950 if (test_regno >= regno && test_regno < endregno)
1951 return 1;
1956 return 0;
1959 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1960 If DATUM is nonzero, look for one whose datum is DATUM. */
1963 find_reg_note (insn, kind, datum)
1964 rtx insn;
1965 enum reg_note kind;
1966 rtx datum;
1968 rtx link;
1970 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1971 if (! INSN_P (insn))
1972 return 0;
1974 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1975 if (REG_NOTE_KIND (link) == kind
1976 && (datum == 0 || datum == XEXP (link, 0)))
1977 return link;
1978 return 0;
1981 /* Return the reg-note of kind KIND in insn INSN which applies to register
1982 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1983 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1984 it might be the case that the note overlaps REGNO. */
1987 find_regno_note (insn, kind, regno)
1988 rtx insn;
1989 enum reg_note kind;
1990 unsigned int regno;
1992 rtx link;
1994 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1995 if (! INSN_P (insn))
1996 return 0;
1998 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1999 if (REG_NOTE_KIND (link) == kind
2000 /* Verify that it is a register, so that scratch and MEM won't cause a
2001 problem here. */
2002 && GET_CODE (XEXP (link, 0)) == REG
2003 && REGNO (XEXP (link, 0)) <= regno
2004 && ((REGNO (XEXP (link, 0))
2005 + (REGNO (XEXP (link, 0)) >= FIRST_PSEUDO_REGISTER ? 1
2006 : HARD_REGNO_NREGS (REGNO (XEXP (link, 0)),
2007 GET_MODE (XEXP (link, 0)))))
2008 > regno))
2009 return link;
2010 return 0;
2013 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
2014 has such a note. */
2017 find_reg_equal_equiv_note (insn)
2018 rtx insn;
2020 rtx note;
2022 if (single_set (insn) == 0)
2023 return 0;
2024 else if ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)) != 0)
2025 return note;
2026 else
2027 return find_reg_note (insn, REG_EQUAL, NULL_RTX);
2030 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
2031 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2034 find_reg_fusage (insn, code, datum)
2035 rtx insn;
2036 enum rtx_code code;
2037 rtx datum;
2039 /* If it's not a CALL_INSN, it can't possibly have a
2040 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
2041 if (GET_CODE (insn) != CALL_INSN)
2042 return 0;
2044 if (! datum)
2045 abort ();
2047 if (GET_CODE (datum) != REG)
2049 rtx link;
2051 for (link = CALL_INSN_FUNCTION_USAGE (insn);
2052 link;
2053 link = XEXP (link, 1))
2054 if (GET_CODE (XEXP (link, 0)) == code
2055 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
2056 return 1;
2058 else
2060 unsigned int regno = REGNO (datum);
2062 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2063 to pseudo registers, so don't bother checking. */
2065 if (regno < FIRST_PSEUDO_REGISTER)
2067 unsigned int end_regno
2068 = regno + HARD_REGNO_NREGS (regno, GET_MODE (datum));
2069 unsigned int i;
2071 for (i = regno; i < end_regno; i++)
2072 if (find_regno_fusage (insn, code, i))
2073 return 1;
2077 return 0;
2080 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2081 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2084 find_regno_fusage (insn, code, regno)
2085 rtx insn;
2086 enum rtx_code code;
2087 unsigned int regno;
2089 rtx link;
2091 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2092 to pseudo registers, so don't bother checking. */
2094 if (regno >= FIRST_PSEUDO_REGISTER
2095 || GET_CODE (insn) != CALL_INSN )
2096 return 0;
2098 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2100 unsigned int regnote;
2101 rtx op, reg;
2103 if (GET_CODE (op = XEXP (link, 0)) == code
2104 && GET_CODE (reg = XEXP (op, 0)) == REG
2105 && (regnote = REGNO (reg)) <= regno
2106 && regnote + HARD_REGNO_NREGS (regnote, GET_MODE (reg)) > regno)
2107 return 1;
2110 return 0;
2113 /* Return true if INSN is a call to a pure function. */
2116 pure_call_p (insn)
2117 rtx insn;
2119 rtx link;
2121 if (GET_CODE (insn) != CALL_INSN || ! CONST_OR_PURE_CALL_P (insn))
2122 return 0;
2124 /* Look for the note that differentiates const and pure functions. */
2125 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2127 rtx u, m;
2129 if (GET_CODE (u = XEXP (link, 0)) == USE
2130 && GET_CODE (m = XEXP (u, 0)) == MEM && GET_MODE (m) == BLKmode
2131 && GET_CODE (XEXP (m, 0)) == SCRATCH)
2132 return 1;
2135 return 0;
2138 /* Remove register note NOTE from the REG_NOTES of INSN. */
2140 void
2141 remove_note (insn, note)
2142 rtx insn;
2143 rtx note;
2145 rtx link;
2147 if (note == NULL_RTX)
2148 return;
2150 if (REG_NOTES (insn) == note)
2152 REG_NOTES (insn) = XEXP (note, 1);
2153 return;
2156 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2157 if (XEXP (link, 1) == note)
2159 XEXP (link, 1) = XEXP (note, 1);
2160 return;
2163 abort ();
2166 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2167 return 1 if it is found. A simple equality test is used to determine if
2168 NODE matches. */
2171 in_expr_list_p (listp, node)
2172 rtx listp;
2173 rtx node;
2175 rtx x;
2177 for (x = listp; x; x = XEXP (x, 1))
2178 if (node == XEXP (x, 0))
2179 return 1;
2181 return 0;
2184 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2185 remove that entry from the list if it is found.
2187 A simple equality test is used to determine if NODE matches. */
2189 void
2190 remove_node_from_expr_list (node, listp)
2191 rtx node;
2192 rtx *listp;
2194 rtx temp = *listp;
2195 rtx prev = NULL_RTX;
2197 while (temp)
2199 if (node == XEXP (temp, 0))
2201 /* Splice the node out of the list. */
2202 if (prev)
2203 XEXP (prev, 1) = XEXP (temp, 1);
2204 else
2205 *listp = XEXP (temp, 1);
2207 return;
2210 prev = temp;
2211 temp = XEXP (temp, 1);
2215 /* Nonzero if X contains any volatile instructions. These are instructions
2216 which may cause unpredictable machine state instructions, and thus no
2217 instructions should be moved or combined across them. This includes
2218 only volatile asms and UNSPEC_VOLATILE instructions. */
2221 volatile_insn_p (x)
2222 rtx x;
2224 RTX_CODE code;
2226 code = GET_CODE (x);
2227 switch (code)
2229 case LABEL_REF:
2230 case SYMBOL_REF:
2231 case CONST_INT:
2232 case CONST:
2233 case CONST_DOUBLE:
2234 case CONST_VECTOR:
2235 case CC0:
2236 case PC:
2237 case REG:
2238 case SCRATCH:
2239 case CLOBBER:
2240 case ADDR_VEC:
2241 case ADDR_DIFF_VEC:
2242 case CALL:
2243 case MEM:
2244 return 0;
2246 case UNSPEC_VOLATILE:
2247 /* case TRAP_IF: This isn't clear yet. */
2248 return 1;
2250 case ASM_INPUT:
2251 case ASM_OPERANDS:
2252 if (MEM_VOLATILE_P (x))
2253 return 1;
2255 default:
2256 break;
2259 /* Recursively scan the operands of this expression. */
2262 const char *fmt = GET_RTX_FORMAT (code);
2263 int i;
2265 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2267 if (fmt[i] == 'e')
2269 if (volatile_insn_p (XEXP (x, i)))
2270 return 1;
2272 else if (fmt[i] == 'E')
2274 int j;
2275 for (j = 0; j < XVECLEN (x, i); j++)
2276 if (volatile_insn_p (XVECEXP (x, i, j)))
2277 return 1;
2281 return 0;
2284 /* Nonzero if X contains any volatile memory references
2285 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2288 volatile_refs_p (x)
2289 rtx x;
2291 RTX_CODE code;
2293 code = GET_CODE (x);
2294 switch (code)
2296 case LABEL_REF:
2297 case SYMBOL_REF:
2298 case CONST_INT:
2299 case CONST:
2300 case CONST_DOUBLE:
2301 case CONST_VECTOR:
2302 case CC0:
2303 case PC:
2304 case REG:
2305 case SCRATCH:
2306 case CLOBBER:
2307 case ADDR_VEC:
2308 case ADDR_DIFF_VEC:
2309 return 0;
2311 case UNSPEC_VOLATILE:
2312 return 1;
2314 case MEM:
2315 case ASM_INPUT:
2316 case ASM_OPERANDS:
2317 if (MEM_VOLATILE_P (x))
2318 return 1;
2320 default:
2321 break;
2324 /* Recursively scan the operands of this expression. */
2327 const char *fmt = GET_RTX_FORMAT (code);
2328 int i;
2330 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2332 if (fmt[i] == 'e')
2334 if (volatile_refs_p (XEXP (x, i)))
2335 return 1;
2337 else if (fmt[i] == 'E')
2339 int j;
2340 for (j = 0; j < XVECLEN (x, i); j++)
2341 if (volatile_refs_p (XVECEXP (x, i, j)))
2342 return 1;
2346 return 0;
2349 /* Similar to above, except that it also rejects register pre- and post-
2350 incrementing. */
2353 side_effects_p (x)
2354 rtx x;
2356 RTX_CODE code;
2358 code = GET_CODE (x);
2359 switch (code)
2361 case LABEL_REF:
2362 case SYMBOL_REF:
2363 case CONST_INT:
2364 case CONST:
2365 case CONST_DOUBLE:
2366 case CONST_VECTOR:
2367 case CC0:
2368 case PC:
2369 case REG:
2370 case SCRATCH:
2371 case ADDR_VEC:
2372 case ADDR_DIFF_VEC:
2373 return 0;
2375 case CLOBBER:
2376 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2377 when some combination can't be done. If we see one, don't think
2378 that we can simplify the expression. */
2379 return (GET_MODE (x) != VOIDmode);
2381 case PRE_INC:
2382 case PRE_DEC:
2383 case POST_INC:
2384 case POST_DEC:
2385 case PRE_MODIFY:
2386 case POST_MODIFY:
2387 case CALL:
2388 case UNSPEC_VOLATILE:
2389 /* case TRAP_IF: This isn't clear yet. */
2390 return 1;
2392 case MEM:
2393 case ASM_INPUT:
2394 case ASM_OPERANDS:
2395 if (MEM_VOLATILE_P (x))
2396 return 1;
2398 default:
2399 break;
2402 /* Recursively scan the operands of this expression. */
2405 const char *fmt = GET_RTX_FORMAT (code);
2406 int i;
2408 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2410 if (fmt[i] == 'e')
2412 if (side_effects_p (XEXP (x, i)))
2413 return 1;
2415 else if (fmt[i] == 'E')
2417 int j;
2418 for (j = 0; j < XVECLEN (x, i); j++)
2419 if (side_effects_p (XVECEXP (x, i, j)))
2420 return 1;
2424 return 0;
2427 /* Return nonzero if evaluating rtx X might cause a trap. */
2430 may_trap_p (x)
2431 rtx x;
2433 int i;
2434 enum rtx_code code;
2435 const char *fmt;
2437 if (x == 0)
2438 return 0;
2439 code = GET_CODE (x);
2440 switch (code)
2442 /* Handle these cases quickly. */
2443 case CONST_INT:
2444 case CONST_DOUBLE:
2445 case CONST_VECTOR:
2446 case SYMBOL_REF:
2447 case LABEL_REF:
2448 case CONST:
2449 case PC:
2450 case CC0:
2451 case REG:
2452 case SCRATCH:
2453 return 0;
2455 case ASM_INPUT:
2456 case UNSPEC_VOLATILE:
2457 case TRAP_IF:
2458 return 1;
2460 case ASM_OPERANDS:
2461 return MEM_VOLATILE_P (x);
2463 /* Memory ref can trap unless it's a static var or a stack slot. */
2464 case MEM:
2465 return rtx_addr_can_trap_p (XEXP (x, 0));
2467 /* Division by a non-constant might trap. */
2468 case DIV:
2469 case MOD:
2470 case UDIV:
2471 case UMOD:
2472 if (HONOR_SNANS (GET_MODE (x)))
2473 return 1;
2474 if (! CONSTANT_P (XEXP (x, 1))
2475 || (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2476 && flag_trapping_math))
2477 return 1;
2478 /* This was const0_rtx, but by not using that,
2479 we can link this file into other programs. */
2480 if (GET_CODE (XEXP (x, 1)) == CONST_INT && INTVAL (XEXP (x, 1)) == 0)
2481 return 1;
2482 break;
2484 case EXPR_LIST:
2485 /* An EXPR_LIST is used to represent a function call. This
2486 certainly may trap. */
2487 return 1;
2489 case GE:
2490 case GT:
2491 case LE:
2492 case LT:
2493 case COMPARE:
2494 /* Some floating point comparisons may trap. */
2495 if (!flag_trapping_math)
2496 break;
2497 /* ??? There is no machine independent way to check for tests that trap
2498 when COMPARE is used, though many targets do make this distinction.
2499 For instance, sparc uses CCFPE for compares which generate exceptions
2500 and CCFP for compares which do not generate exceptions. */
2501 if (HONOR_NANS (GET_MODE (x)))
2502 return 1;
2503 /* But often the compare has some CC mode, so check operand
2504 modes as well. */
2505 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2506 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2507 return 1;
2508 break;
2510 case EQ:
2511 case NE:
2512 if (HONOR_SNANS (GET_MODE (x)))
2513 return 1;
2514 /* Often comparison is CC mode, so check operand modes. */
2515 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2516 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2517 return 1;
2518 break;
2520 case NEG:
2521 case ABS:
2522 /* These operations don't trap even with floating point. */
2523 break;
2525 default:
2526 /* Any floating arithmetic may trap. */
2527 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_FLOAT
2528 && flag_trapping_math)
2529 return 1;
2532 fmt = GET_RTX_FORMAT (code);
2533 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2535 if (fmt[i] == 'e')
2537 if (may_trap_p (XEXP (x, i)))
2538 return 1;
2540 else if (fmt[i] == 'E')
2542 int j;
2543 for (j = 0; j < XVECLEN (x, i); j++)
2544 if (may_trap_p (XVECEXP (x, i, j)))
2545 return 1;
2548 return 0;
2551 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2552 i.e., an inequality. */
2555 inequality_comparisons_p (x)
2556 rtx x;
2558 const char *fmt;
2559 int len, i;
2560 enum rtx_code code = GET_CODE (x);
2562 switch (code)
2564 case REG:
2565 case SCRATCH:
2566 case PC:
2567 case CC0:
2568 case CONST_INT:
2569 case CONST_DOUBLE:
2570 case CONST_VECTOR:
2571 case CONST:
2572 case LABEL_REF:
2573 case SYMBOL_REF:
2574 return 0;
2576 case LT:
2577 case LTU:
2578 case GT:
2579 case GTU:
2580 case LE:
2581 case LEU:
2582 case GE:
2583 case GEU:
2584 return 1;
2586 default:
2587 break;
2590 len = GET_RTX_LENGTH (code);
2591 fmt = GET_RTX_FORMAT (code);
2593 for (i = 0; i < len; i++)
2595 if (fmt[i] == 'e')
2597 if (inequality_comparisons_p (XEXP (x, i)))
2598 return 1;
2600 else if (fmt[i] == 'E')
2602 int j;
2603 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2604 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2605 return 1;
2609 return 0;
2612 /* Replace any occurrence of FROM in X with TO. The function does
2613 not enter into CONST_DOUBLE for the replace.
2615 Note that copying is not done so X must not be shared unless all copies
2616 are to be modified. */
2619 replace_rtx (x, from, to)
2620 rtx x, from, to;
2622 int i, j;
2623 const char *fmt;
2625 /* The following prevents loops occurrence when we change MEM in
2626 CONST_DOUBLE onto the same CONST_DOUBLE. */
2627 if (x != 0 && GET_CODE (x) == CONST_DOUBLE)
2628 return x;
2630 if (x == from)
2631 return to;
2633 /* Allow this function to make replacements in EXPR_LISTs. */
2634 if (x == 0)
2635 return 0;
2637 if (GET_CODE (x) == SUBREG)
2639 rtx new = replace_rtx (SUBREG_REG (x), from, to);
2641 if (GET_CODE (new) == CONST_INT)
2643 x = simplify_subreg (GET_MODE (x), new,
2644 GET_MODE (SUBREG_REG (x)),
2645 SUBREG_BYTE (x));
2646 if (! x)
2647 abort ();
2649 else
2650 SUBREG_REG (x) = new;
2652 return x;
2654 else if (GET_CODE (x) == ZERO_EXTEND)
2656 rtx new = replace_rtx (XEXP (x, 0), from, to);
2658 if (GET_CODE (new) == CONST_INT)
2660 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2661 new, GET_MODE (XEXP (x, 0)));
2662 if (! x)
2663 abort ();
2665 else
2666 XEXP (x, 0) = new;
2668 return x;
2671 fmt = GET_RTX_FORMAT (GET_CODE (x));
2672 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2674 if (fmt[i] == 'e')
2675 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2676 else if (fmt[i] == 'E')
2677 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2678 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2681 return x;
2684 /* Throughout the rtx X, replace many registers according to REG_MAP.
2685 Return the replacement for X (which may be X with altered contents).
2686 REG_MAP[R] is the replacement for register R, or 0 for don't replace.
2687 NREGS is the length of REG_MAP; regs >= NREGS are not mapped.
2689 We only support REG_MAP entries of REG or SUBREG. Also, hard registers
2690 should not be mapped to pseudos or vice versa since validate_change
2691 is not called.
2693 If REPLACE_DEST is 1, replacements are also done in destinations;
2694 otherwise, only sources are replaced. */
2697 replace_regs (x, reg_map, nregs, replace_dest)
2698 rtx x;
2699 rtx *reg_map;
2700 unsigned int nregs;
2701 int replace_dest;
2703 enum rtx_code code;
2704 int i;
2705 const char *fmt;
2707 if (x == 0)
2708 return x;
2710 code = GET_CODE (x);
2711 switch (code)
2713 case SCRATCH:
2714 case PC:
2715 case CC0:
2716 case CONST_INT:
2717 case CONST_DOUBLE:
2718 case CONST_VECTOR:
2719 case CONST:
2720 case SYMBOL_REF:
2721 case LABEL_REF:
2722 return x;
2724 case REG:
2725 /* Verify that the register has an entry before trying to access it. */
2726 if (REGNO (x) < nregs && reg_map[REGNO (x)] != 0)
2728 /* SUBREGs can't be shared. Always return a copy to ensure that if
2729 this replacement occurs more than once then each instance will
2730 get distinct rtx. */
2731 if (GET_CODE (reg_map[REGNO (x)]) == SUBREG)
2732 return copy_rtx (reg_map[REGNO (x)]);
2733 return reg_map[REGNO (x)];
2735 return x;
2737 case SUBREG:
2738 /* Prevent making nested SUBREGs. */
2739 if (GET_CODE (SUBREG_REG (x)) == REG && REGNO (SUBREG_REG (x)) < nregs
2740 && reg_map[REGNO (SUBREG_REG (x))] != 0
2741 && GET_CODE (reg_map[REGNO (SUBREG_REG (x))]) == SUBREG)
2743 rtx map_val = reg_map[REGNO (SUBREG_REG (x))];
2744 return simplify_gen_subreg (GET_MODE (x), map_val,
2745 GET_MODE (SUBREG_REG (x)),
2746 SUBREG_BYTE (x));
2748 break;
2750 case SET:
2751 if (replace_dest)
2752 SET_DEST (x) = replace_regs (SET_DEST (x), reg_map, nregs, 0);
2754 else if (GET_CODE (SET_DEST (x)) == MEM
2755 || GET_CODE (SET_DEST (x)) == STRICT_LOW_PART)
2756 /* Even if we are not to replace destinations, replace register if it
2757 is CONTAINED in destination (destination is memory or
2758 STRICT_LOW_PART). */
2759 XEXP (SET_DEST (x), 0) = replace_regs (XEXP (SET_DEST (x), 0),
2760 reg_map, nregs, 0);
2761 else if (GET_CODE (SET_DEST (x)) == ZERO_EXTRACT)
2762 /* Similarly, for ZERO_EXTRACT we replace all operands. */
2763 break;
2765 SET_SRC (x) = replace_regs (SET_SRC (x), reg_map, nregs, 0);
2766 return x;
2768 default:
2769 break;
2772 fmt = GET_RTX_FORMAT (code);
2773 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2775 if (fmt[i] == 'e')
2776 XEXP (x, i) = replace_regs (XEXP (x, i), reg_map, nregs, replace_dest);
2777 else if (fmt[i] == 'E')
2779 int j;
2780 for (j = 0; j < XVECLEN (x, i); j++)
2781 XVECEXP (x, i, j) = replace_regs (XVECEXP (x, i, j), reg_map,
2782 nregs, replace_dest);
2785 return x;
2788 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2789 constant that is not in the constant pool and not in the condition
2790 of an IF_THEN_ELSE. */
2792 static int
2793 computed_jump_p_1 (x)
2794 rtx x;
2796 enum rtx_code code = GET_CODE (x);
2797 int i, j;
2798 const char *fmt;
2800 switch (code)
2802 case LABEL_REF:
2803 case PC:
2804 return 0;
2806 case CONST:
2807 case CONST_INT:
2808 case CONST_DOUBLE:
2809 case CONST_VECTOR:
2810 case SYMBOL_REF:
2811 case REG:
2812 return 1;
2814 case MEM:
2815 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2816 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2818 case IF_THEN_ELSE:
2819 return (computed_jump_p_1 (XEXP (x, 1))
2820 || computed_jump_p_1 (XEXP (x, 2)));
2822 default:
2823 break;
2826 fmt = GET_RTX_FORMAT (code);
2827 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2829 if (fmt[i] == 'e'
2830 && computed_jump_p_1 (XEXP (x, i)))
2831 return 1;
2833 else if (fmt[i] == 'E')
2834 for (j = 0; j < XVECLEN (x, i); j++)
2835 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2836 return 1;
2839 return 0;
2842 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2844 Tablejumps and casesi insns are not considered indirect jumps;
2845 we can recognize them by a (use (label_ref)). */
2848 computed_jump_p (insn)
2849 rtx insn;
2851 int i;
2852 if (GET_CODE (insn) == JUMP_INSN)
2854 rtx pat = PATTERN (insn);
2856 if (find_reg_note (insn, REG_LABEL, NULL_RTX))
2857 return 0;
2858 else if (GET_CODE (pat) == PARALLEL)
2860 int len = XVECLEN (pat, 0);
2861 int has_use_labelref = 0;
2863 for (i = len - 1; i >= 0; i--)
2864 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2865 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2866 == LABEL_REF))
2867 has_use_labelref = 1;
2869 if (! has_use_labelref)
2870 for (i = len - 1; i >= 0; i--)
2871 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2872 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2873 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2874 return 1;
2876 else if (GET_CODE (pat) == SET
2877 && SET_DEST (pat) == pc_rtx
2878 && computed_jump_p_1 (SET_SRC (pat)))
2879 return 1;
2881 return 0;
2884 /* Traverse X via depth-first search, calling F for each
2885 sub-expression (including X itself). F is also passed the DATA.
2886 If F returns -1, do not traverse sub-expressions, but continue
2887 traversing the rest of the tree. If F ever returns any other
2888 nonzero value, stop the traversal, and return the value returned
2889 by F. Otherwise, return 0. This function does not traverse inside
2890 tree structure that contains RTX_EXPRs, or into sub-expressions
2891 whose format code is `0' since it is not known whether or not those
2892 codes are actually RTL.
2894 This routine is very general, and could (should?) be used to
2895 implement many of the other routines in this file. */
2898 for_each_rtx (x, f, data)
2899 rtx *x;
2900 rtx_function f;
2901 void *data;
2903 int result;
2904 int length;
2905 const char *format;
2906 int i;
2908 /* Call F on X. */
2909 result = (*f) (x, data);
2910 if (result == -1)
2911 /* Do not traverse sub-expressions. */
2912 return 0;
2913 else if (result != 0)
2914 /* Stop the traversal. */
2915 return result;
2917 if (*x == NULL_RTX)
2918 /* There are no sub-expressions. */
2919 return 0;
2921 length = GET_RTX_LENGTH (GET_CODE (*x));
2922 format = GET_RTX_FORMAT (GET_CODE (*x));
2924 for (i = 0; i < length; ++i)
2926 switch (format[i])
2928 case 'e':
2929 result = for_each_rtx (&XEXP (*x, i), f, data);
2930 if (result != 0)
2931 return result;
2932 break;
2934 case 'V':
2935 case 'E':
2936 if (XVEC (*x, i) != 0)
2938 int j;
2939 for (j = 0; j < XVECLEN (*x, i); ++j)
2941 result = for_each_rtx (&XVECEXP (*x, i, j), f, data);
2942 if (result != 0)
2943 return result;
2946 break;
2948 default:
2949 /* Nothing to do. */
2950 break;
2955 return 0;
2958 /* Searches X for any reference to REGNO, returning the rtx of the
2959 reference found if any. Otherwise, returns NULL_RTX. */
2962 regno_use_in (regno, x)
2963 unsigned int regno;
2964 rtx x;
2966 const char *fmt;
2967 int i, j;
2968 rtx tem;
2970 if (GET_CODE (x) == REG && REGNO (x) == regno)
2971 return x;
2973 fmt = GET_RTX_FORMAT (GET_CODE (x));
2974 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2976 if (fmt[i] == 'e')
2978 if ((tem = regno_use_in (regno, XEXP (x, i))))
2979 return tem;
2981 else if (fmt[i] == 'E')
2982 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2983 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
2984 return tem;
2987 return NULL_RTX;
2990 /* Return a value indicating whether OP, an operand of a commutative
2991 operation, is preferred as the first or second operand. The higher
2992 the value, the stronger the preference for being the first operand.
2993 We use negative values to indicate a preference for the first operand
2994 and positive values for the second operand. */
2997 commutative_operand_precedence (op)
2998 rtx op;
3000 /* Constants always come the second operand. Prefer "nice" constants. */
3001 if (GET_CODE (op) == CONST_INT)
3002 return -5;
3003 if (GET_CODE (op) == CONST_DOUBLE)
3004 return -4;
3005 if (CONSTANT_P (op))
3006 return -3;
3008 /* SUBREGs of objects should come second. */
3009 if (GET_CODE (op) == SUBREG
3010 && GET_RTX_CLASS (GET_CODE (SUBREG_REG (op))) == 'o')
3011 return -2;
3013 /* If only one operand is a `neg', `not',
3014 `mult', `plus', or `minus' expression, it will be the first
3015 operand. */
3016 if (GET_CODE (op) == NEG || GET_CODE (op) == NOT
3017 || GET_CODE (op) == MULT || GET_CODE (op) == PLUS
3018 || GET_CODE (op) == MINUS)
3019 return 2;
3021 /* Complex expressions should be the first, so decrease priority
3022 of objects. */
3023 if (GET_RTX_CLASS (GET_CODE (op)) == 'o')
3024 return -1;
3025 return 0;
3028 /* Return 1 iff it is necessary to swap operands of commutative operation
3029 in order to canonicalize expression. */
3032 swap_commutative_operands_p (x, y)
3033 rtx x, y;
3035 return (commutative_operand_precedence (x)
3036 < commutative_operand_precedence (y));
3039 /* Return 1 if X is an autoincrement side effect and the register is
3040 not the stack pointer. */
3042 auto_inc_p (x)
3043 rtx x;
3045 switch (GET_CODE (x))
3047 case PRE_INC:
3048 case POST_INC:
3049 case PRE_DEC:
3050 case POST_DEC:
3051 case PRE_MODIFY:
3052 case POST_MODIFY:
3053 /* There are no REG_INC notes for SP. */
3054 if (XEXP (x, 0) != stack_pointer_rtx)
3055 return 1;
3056 default:
3057 break;
3059 return 0;
3062 /* Return 1 if the sequence of instructions beginning with FROM and up
3063 to and including TO is safe to move. If NEW_TO is non-NULL, and
3064 the sequence is not already safe to move, but can be easily
3065 extended to a sequence which is safe, then NEW_TO will point to the
3066 end of the extended sequence.
3068 For now, this function only checks that the region contains whole
3069 exception regions, but it could be extended to check additional
3070 conditions as well. */
3073 insns_safe_to_move_p (from, to, new_to)
3074 rtx from;
3075 rtx to;
3076 rtx *new_to;
3078 int eh_region_count = 0;
3079 int past_to_p = 0;
3080 rtx r = from;
3082 /* By default, assume the end of the region will be what was
3083 suggested. */
3084 if (new_to)
3085 *new_to = to;
3087 while (r)
3089 if (GET_CODE (r) == NOTE)
3091 switch (NOTE_LINE_NUMBER (r))
3093 case NOTE_INSN_EH_REGION_BEG:
3094 ++eh_region_count;
3095 break;
3097 case NOTE_INSN_EH_REGION_END:
3098 if (eh_region_count == 0)
3099 /* This sequence of instructions contains the end of
3100 an exception region, but not he beginning. Moving
3101 it will cause chaos. */
3102 return 0;
3104 --eh_region_count;
3105 break;
3107 default:
3108 break;
3111 else if (past_to_p)
3112 /* If we've passed TO, and we see a non-note instruction, we
3113 can't extend the sequence to a movable sequence. */
3114 return 0;
3116 if (r == to)
3118 if (!new_to)
3119 /* It's OK to move the sequence if there were matched sets of
3120 exception region notes. */
3121 return eh_region_count == 0;
3123 past_to_p = 1;
3126 /* It's OK to move the sequence if there were matched sets of
3127 exception region notes. */
3128 if (past_to_p && eh_region_count == 0)
3130 *new_to = r;
3131 return 1;
3134 /* Go to the next instruction. */
3135 r = NEXT_INSN (r);
3138 return 0;
3141 /* Return nonzero if IN contains a piece of rtl that has the address LOC */
3143 loc_mentioned_in_p (loc, in)
3144 rtx *loc, in;
3146 enum rtx_code code = GET_CODE (in);
3147 const char *fmt = GET_RTX_FORMAT (code);
3148 int i, j;
3150 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3152 if (loc == &in->fld[i].rtx)
3153 return 1;
3154 if (fmt[i] == 'e')
3156 if (loc_mentioned_in_p (loc, XEXP (in, i)))
3157 return 1;
3159 else if (fmt[i] == 'E')
3160 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3161 if (loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3162 return 1;
3164 return 0;
3167 /* Given a subreg X, return the bit offset where the subreg begins
3168 (counting from the least significant bit of the reg). */
3170 unsigned int
3171 subreg_lsb (x)
3172 rtx x;
3174 enum machine_mode inner_mode = GET_MODE (SUBREG_REG (x));
3175 enum machine_mode mode = GET_MODE (x);
3176 unsigned int bitpos;
3177 unsigned int byte;
3178 unsigned int word;
3180 /* A paradoxical subreg begins at bit position 0. */
3181 if (GET_MODE_BITSIZE (mode) > GET_MODE_BITSIZE (inner_mode))
3182 return 0;
3184 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3185 /* If the subreg crosses a word boundary ensure that
3186 it also begins and ends on a word boundary. */
3187 if ((SUBREG_BYTE (x) % UNITS_PER_WORD
3188 + GET_MODE_SIZE (mode)) > UNITS_PER_WORD
3189 && (SUBREG_BYTE (x) % UNITS_PER_WORD
3190 || GET_MODE_SIZE (mode) % UNITS_PER_WORD))
3191 abort ();
3193 if (WORDS_BIG_ENDIAN)
3194 word = (GET_MODE_SIZE (inner_mode)
3195 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) / UNITS_PER_WORD;
3196 else
3197 word = SUBREG_BYTE (x) / UNITS_PER_WORD;
3198 bitpos = word * BITS_PER_WORD;
3200 if (BYTES_BIG_ENDIAN)
3201 byte = (GET_MODE_SIZE (inner_mode)
3202 - (SUBREG_BYTE (x) + GET_MODE_SIZE (mode))) % UNITS_PER_WORD;
3203 else
3204 byte = SUBREG_BYTE (x) % UNITS_PER_WORD;
3205 bitpos += byte * BITS_PER_UNIT;
3207 return bitpos;
3210 /* This function returns the regno offset of a subreg expression.
3211 xregno - A regno of an inner hard subreg_reg (or what will become one).
3212 xmode - The mode of xregno.
3213 offset - The byte offset.
3214 ymode - The mode of a top level SUBREG (or what may become one).
3215 RETURN - The regno offset which would be used. */
3216 unsigned int
3217 subreg_regno_offset (xregno, xmode, offset, ymode)
3218 unsigned int xregno;
3219 enum machine_mode xmode;
3220 unsigned int offset;
3221 enum machine_mode ymode;
3223 int nregs_xmode, nregs_ymode;
3224 int mode_multiple, nregs_multiple;
3225 int y_offset;
3227 if (xregno >= FIRST_PSEUDO_REGISTER)
3228 abort ();
3230 nregs_xmode = HARD_REGNO_NREGS (xregno, xmode);
3231 nregs_ymode = HARD_REGNO_NREGS (xregno, ymode);
3233 /* If this is a big endian paradoxical subreg, which uses more actual
3234 hard registers than the original register, we must return a negative
3235 offset so that we find the proper highpart of the register. */
3236 if (offset == 0
3237 && nregs_ymode > nregs_xmode
3238 && (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3239 ? WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN))
3240 return nregs_xmode - nregs_ymode;
3242 if (offset == 0 || nregs_xmode == nregs_ymode)
3243 return 0;
3245 /* size of ymode must not be greater than the size of xmode. */
3246 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3247 if (mode_multiple == 0)
3248 abort ();
3250 y_offset = offset / GET_MODE_SIZE (ymode);
3251 nregs_multiple = nregs_xmode / nregs_ymode;
3252 return (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3255 /* Return the final regno that a subreg expression refers to. */
3256 unsigned int
3257 subreg_regno (x)
3258 rtx x;
3260 unsigned int ret;
3261 rtx subreg = SUBREG_REG (x);
3262 int regno = REGNO (subreg);
3264 ret = regno + subreg_regno_offset (regno,
3265 GET_MODE (subreg),
3266 SUBREG_BYTE (x),
3267 GET_MODE (x));
3268 return ret;
3271 struct parms_set_data
3273 int nregs;
3274 HARD_REG_SET regs;
3277 /* Helper function for noticing stores to parameter registers. */
3278 static void
3279 parms_set (x, pat, data)
3280 rtx x, pat ATTRIBUTE_UNUSED;
3281 void *data;
3283 struct parms_set_data *d = data;
3284 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3285 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3287 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3288 d->nregs--;
3292 /* Look backward for first parameter to be loaded.
3293 Do not skip BOUNDARY. */
3295 find_first_parameter_load (call_insn, boundary)
3296 rtx call_insn, boundary;
3298 struct parms_set_data parm;
3299 rtx p, before;
3301 /* Since different machines initialize their parameter registers
3302 in different orders, assume nothing. Collect the set of all
3303 parameter registers. */
3304 CLEAR_HARD_REG_SET (parm.regs);
3305 parm.nregs = 0;
3306 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3307 if (GET_CODE (XEXP (p, 0)) == USE
3308 && GET_CODE (XEXP (XEXP (p, 0), 0)) == REG)
3310 if (REGNO (XEXP (XEXP (p, 0), 0)) >= FIRST_PSEUDO_REGISTER)
3311 abort ();
3313 /* We only care about registers which can hold function
3314 arguments. */
3315 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3316 continue;
3318 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3319 parm.nregs++;
3321 before = call_insn;
3323 /* Search backward for the first set of a register in this set. */
3324 while (parm.nregs && before != boundary)
3326 before = PREV_INSN (before);
3328 /* It is possible that some loads got CSEed from one call to
3329 another. Stop in that case. */
3330 if (GET_CODE (before) == CALL_INSN)
3331 break;
3333 /* Our caller needs either ensure that we will find all sets
3334 (in case code has not been optimized yet), or take care
3335 for possible labels in a way by setting boundary to preceding
3336 CODE_LABEL. */
3337 if (GET_CODE (before) == CODE_LABEL)
3339 if (before != boundary)
3340 abort ();
3341 break;
3344 if (INSN_P (before))
3345 note_stores (PATTERN (before), parms_set, &parm);
3347 return before;
3350 /* Return true if we should avoid inserting code between INSN and preceding
3351 call instruction. */
3353 bool
3354 keep_with_call_p (insn)
3355 rtx insn;
3357 rtx set;
3359 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3361 if (GET_CODE (SET_DEST (set)) == REG
3362 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3363 && fixed_regs[REGNO (SET_DEST (set))]
3364 && general_operand (SET_SRC (set), VOIDmode))
3365 return true;
3366 if (GET_CODE (SET_SRC (set)) == REG
3367 && FUNCTION_VALUE_REGNO_P (REGNO (SET_SRC (set)))
3368 && GET_CODE (SET_DEST (set)) == REG
3369 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3370 return true;
3371 /* There may be a stack pop just after the call and before the store
3372 of the return register. Search for the actual store when deciding
3373 if we can break or not. */
3374 if (SET_DEST (set) == stack_pointer_rtx)
3376 rtx i2 = next_nonnote_insn (insn);
3377 if (i2 && keep_with_call_p (i2))
3378 return true;
3381 return false;
3384 /* Return true when store to register X can be hoisted to the place
3385 with LIVE registers (can be NULL). Value VAL contains destination
3386 whose value will be used. */
3388 static bool
3389 hoist_test_store (x, val, live)
3390 rtx x, val;
3391 regset live;
3393 if (GET_CODE (x) == SCRATCH)
3394 return true;
3396 if (rtx_equal_p (x, val))
3397 return true;
3399 /* Allow subreg of X in case it is not writting just part of multireg pseudo.
3400 Then we would need to update all users to care hoisting the store too.
3401 Caller may represent that by specifying whole subreg as val. */
3403 if (GET_CODE (x) == SUBREG && rtx_equal_p (SUBREG_REG (x), val))
3405 if (GET_MODE_SIZE (GET_MODE (SUBREG_REG (x))) > UNITS_PER_WORD
3406 && GET_MODE_BITSIZE (GET_MODE (x)) <
3407 GET_MODE_BITSIZE (GET_MODE (SUBREG_REG (x))))
3408 return false;
3409 return true;
3411 if (GET_CODE (x) == SUBREG)
3412 x = SUBREG_REG (x);
3414 /* Anything except register store is not hoistable. This includes the
3415 partial stores to registers. */
3417 if (!REG_P (x))
3418 return false;
3420 /* Pseudo registers can be allways replaced by another pseudo to avoid
3421 the side effect, for hard register we must ensure that they are dead.
3422 Eventually we may want to add code to try turn pseudos to hards, but it
3423 is unlikely useful. */
3425 if (REGNO (x) < FIRST_PSEUDO_REGISTER)
3427 int regno = REGNO (x);
3428 int n = HARD_REGNO_NREGS (regno, GET_MODE (x));
3430 if (!live)
3431 return false;
3432 if (REGNO_REG_SET_P (live, regno))
3433 return false;
3434 while (--n > 0)
3435 if (REGNO_REG_SET_P (live, regno + n))
3436 return false;
3438 return true;
3442 /* Return true if INSN can be hoisted to place with LIVE hard registers
3443 (LIVE can be NULL when unknown). VAL is expected to be stored by the insn
3444 and used by the hoisting pass. */
3446 bool
3447 can_hoist_insn_p (insn, val, live)
3448 rtx insn, val;
3449 regset live;
3451 rtx pat = PATTERN (insn);
3452 int i;
3454 /* It probably does not worth the complexity to handle multiple
3455 set stores. */
3456 if (!single_set (insn))
3457 return false;
3458 /* We can move CALL_INSN, but we need to check that all caller clobbered
3459 regs are dead. */
3460 if (GET_CODE (insn) == CALL_INSN)
3461 return false;
3462 /* In future we will handle hoisting of libcall sequences, but
3463 give up for now. */
3464 if (find_reg_note (insn, REG_RETVAL, NULL_RTX))
3465 return false;
3466 switch (GET_CODE (pat))
3468 case SET:
3469 if (!hoist_test_store (SET_DEST (pat), val, live))
3470 return false;
3471 break;
3472 case USE:
3473 /* USES do have sick semantics, so do not move them. */
3474 return false;
3475 break;
3476 case CLOBBER:
3477 if (!hoist_test_store (XEXP (pat, 0), val, live))
3478 return false;
3479 break;
3480 case PARALLEL:
3481 for (i = 0; i < XVECLEN (pat, 0); i++)
3483 rtx x = XVECEXP (pat, 0, i);
3484 switch (GET_CODE (x))
3486 case SET:
3487 if (!hoist_test_store (SET_DEST (x), val, live))
3488 return false;
3489 break;
3490 case USE:
3491 /* We need to fix callers to really ensure availability
3492 of all values inisn uses, but for now it is safe to prohibit
3493 hoisting of any insn having such a hidden uses. */
3494 return false;
3495 break;
3496 case CLOBBER:
3497 if (!hoist_test_store (SET_DEST (x), val, live))
3498 return false;
3499 break;
3500 default:
3501 break;
3504 break;
3505 default:
3506 abort ();
3508 return true;
3511 /* Update store after hoisting - replace all stores to pseudo registers
3512 by new ones to avoid clobbering of values except for store to VAL that will
3513 be updated to NEW. */
3515 static void
3516 hoist_update_store (insn, xp, val, new)
3517 rtx insn, *xp, val, new;
3519 rtx x = *xp;
3521 if (GET_CODE (x) == SCRATCH)
3522 return;
3524 if (GET_CODE (x) == SUBREG && SUBREG_REG (x) == val)
3525 validate_change (insn, xp,
3526 simplify_gen_subreg (GET_MODE (x), new, GET_MODE (new),
3527 SUBREG_BYTE (x)), 1);
3528 if (rtx_equal_p (x, val))
3530 validate_change (insn, xp, new, 1);
3531 return;
3533 if (GET_CODE (x) == SUBREG)
3535 xp = &SUBREG_REG (x);
3536 x = *xp;
3539 if (!REG_P (x))
3540 abort ();
3542 /* We've verified that hard registers are dead, so we may keep the side
3543 effect. Otherwise replace it by new pseudo. */
3544 if (REGNO (x) >= FIRST_PSEUDO_REGISTER)
3545 validate_change (insn, xp, gen_reg_rtx (GET_MODE (x)), 1);
3546 REG_NOTES (insn)
3547 = alloc_EXPR_LIST (REG_UNUSED, *xp, REG_NOTES (insn));
3550 /* Create a copy of INSN after AFTER replacing store of VAL to NEW
3551 and each other side effect to pseudo register by new pseudo register. */
3554 hoist_insn_after (insn, after, val, new)
3555 rtx insn, after, val, new;
3557 rtx pat;
3558 int i;
3559 rtx note;
3561 insn = emit_copy_of_insn_after (insn, after);
3562 pat = PATTERN (insn);
3564 /* Remove REG_UNUSED notes as we will re-emit them. */
3565 while ((note = find_reg_note (insn, REG_UNUSED, NULL_RTX)))
3566 remove_note (insn, note);
3568 /* To get this working callers must ensure to move everything referenced
3569 by REG_EQUAL/REG_EQUIV notes too. Lets remove them, it is probably
3570 easier. */
3571 while ((note = find_reg_note (insn, REG_EQUAL, NULL_RTX)))
3572 remove_note (insn, note);
3573 while ((note = find_reg_note (insn, REG_EQUIV, NULL_RTX)))
3574 remove_note (insn, note);
3576 /* Remove REG_DEAD notes as they might not be valid anymore in case
3577 we create redundancy. */
3578 while ((note = find_reg_note (insn, REG_DEAD, NULL_RTX)))
3579 remove_note (insn, note);
3580 switch (GET_CODE (pat))
3582 case SET:
3583 hoist_update_store (insn, &SET_DEST (pat), val, new);
3584 break;
3585 case USE:
3586 break;
3587 case CLOBBER:
3588 hoist_update_store (insn, &XEXP (pat, 0), val, new);
3589 break;
3590 case PARALLEL:
3591 for (i = 0; i < XVECLEN (pat, 0); i++)
3593 rtx x = XVECEXP (pat, 0, i);
3594 switch (GET_CODE (x))
3596 case SET:
3597 hoist_update_store (insn, &SET_DEST (x), val, new);
3598 break;
3599 case USE:
3600 break;
3601 case CLOBBER:
3602 hoist_update_store (insn, &SET_DEST (x), val, new);
3603 break;
3604 default:
3605 break;
3608 break;
3609 default:
3610 abort ();
3612 if (!apply_change_group ())
3613 abort ();
3615 return insn;
3619 hoist_insn_to_edge (insn, e, val, new)
3620 rtx insn, val, new;
3621 edge e;
3623 rtx new_insn;
3625 /* We cannot insert instructions on an abnormal critical edge.
3626 It will be easier to find the culprit if we die now. */
3627 if ((e->flags & EDGE_ABNORMAL) && EDGE_CRITICAL_P (e))
3628 abort ();
3630 /* Do not use emit_insn_on_edge as we want to preserve notes and similar
3631 stuff. We also emit CALL_INSNS and firends. */
3632 if (e->insns == NULL_RTX)
3634 start_sequence ();
3635 emit_note (NULL, NOTE_INSN_DELETED);
3637 else
3638 push_to_sequence (e->insns);
3640 new_insn = hoist_insn_after (insn, get_last_insn (), val, new);
3642 e->insns = get_insns ();
3643 end_sequence ();
3644 return new_insn;