2012-10-29 Vladimir Makarov <vmakarov@redhat.com>
[official-gcc.git] / gcc / rtlanal.c
blob43d4cb829bd835c57a7f0d824d504ddd02827b20
1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010,
4 2011, 2012 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "tm.h"
27 #include "diagnostic-core.h"
28 #include "hard-reg-set.h"
29 #include "rtl.h"
30 #include "insn-config.h"
31 #include "recog.h"
32 #include "target.h"
33 #include "output.h"
34 #include "tm_p.h"
35 #include "flags.h"
36 #include "regs.h"
37 #include "function.h"
38 #include "df.h"
39 #include "tree.h"
40 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
41 #include "addresses.h"
43 /* Forward declarations */
44 static void set_of_1 (rtx, const_rtx, void *);
45 static bool covers_regno_p (const_rtx, unsigned int);
46 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
47 static int rtx_referenced_p_1 (rtx *, void *);
48 static int computed_jump_p_1 (const_rtx);
49 static void parms_set (rtx, const_rtx, void *);
51 static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
52 const_rtx, enum machine_mode,
53 unsigned HOST_WIDE_INT);
54 static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
55 const_rtx, enum machine_mode,
56 unsigned HOST_WIDE_INT);
57 static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
58 enum machine_mode,
59 unsigned int);
60 static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
61 enum machine_mode, unsigned int);
63 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
64 -1 if a code has no such operand. */
65 static int non_rtx_starting_operands[NUM_RTX_CODE];
67 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
68 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
69 SIGN_EXTEND then while narrowing we also have to enforce the
70 representation and sign-extend the value to mode DESTINATION_REP.
72 If the value is already sign-extended to DESTINATION_REP mode we
73 can just switch to DESTINATION mode on it. For each pair of
74 integral modes SOURCE and DESTINATION, when truncating from SOURCE
75 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
76 contains the number of high-order bits in SOURCE that have to be
77 copies of the sign-bit so that we can do this mode-switch to
78 DESTINATION. */
80 static unsigned int
81 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
83 /* Return 1 if the value of X is unstable
84 (would be different at a different point in the program).
85 The frame pointer, arg pointer, etc. are considered stable
86 (within one function) and so is anything marked `unchanging'. */
88 int
89 rtx_unstable_p (const_rtx x)
91 const RTX_CODE code = GET_CODE (x);
92 int i;
93 const char *fmt;
95 switch (code)
97 case MEM:
98 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
100 case CONST:
101 CASE_CONST_ANY:
102 case SYMBOL_REF:
103 case LABEL_REF:
104 return 0;
106 case REG:
107 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
108 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
109 /* The arg pointer varies if it is not a fixed register. */
110 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
111 return 0;
112 /* ??? When call-clobbered, the value is stable modulo the restore
113 that must happen after a call. This currently screws up local-alloc
114 into believing that the restore is not needed. */
115 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
116 return 0;
117 return 1;
119 case ASM_OPERANDS:
120 if (MEM_VOLATILE_P (x))
121 return 1;
123 /* Fall through. */
125 default:
126 break;
129 fmt = GET_RTX_FORMAT (code);
130 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
131 if (fmt[i] == 'e')
133 if (rtx_unstable_p (XEXP (x, i)))
134 return 1;
136 else if (fmt[i] == 'E')
138 int j;
139 for (j = 0; j < XVECLEN (x, i); j++)
140 if (rtx_unstable_p (XVECEXP (x, i, j)))
141 return 1;
144 return 0;
147 /* Return 1 if X has a value that can vary even between two
148 executions of the program. 0 means X can be compared reliably
149 against certain constants or near-constants.
150 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
151 zero, we are slightly more conservative.
152 The frame pointer and the arg pointer are considered constant. */
154 bool
155 rtx_varies_p (const_rtx x, bool for_alias)
157 RTX_CODE code;
158 int i;
159 const char *fmt;
161 if (!x)
162 return 0;
164 code = GET_CODE (x);
165 switch (code)
167 case MEM:
168 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
170 case CONST:
171 CASE_CONST_ANY:
172 case SYMBOL_REF:
173 case LABEL_REF:
174 return 0;
176 case REG:
177 /* Note that we have to test for the actual rtx used for the frame
178 and arg pointers and not just the register number in case we have
179 eliminated the frame and/or arg pointer and are using it
180 for pseudos. */
181 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
182 /* The arg pointer varies if it is not a fixed register. */
183 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
184 return 0;
185 if (x == pic_offset_table_rtx
186 /* ??? When call-clobbered, the value is stable modulo the restore
187 that must happen after a call. This currently screws up
188 local-alloc into believing that the restore is not needed, so we
189 must return 0 only if we are called from alias analysis. */
190 && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
191 return 0;
192 return 1;
194 case LO_SUM:
195 /* The operand 0 of a LO_SUM is considered constant
196 (in fact it is related specifically to operand 1)
197 during alias analysis. */
198 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
199 || rtx_varies_p (XEXP (x, 1), for_alias);
201 case ASM_OPERANDS:
202 if (MEM_VOLATILE_P (x))
203 return 1;
205 /* Fall through. */
207 default:
208 break;
211 fmt = GET_RTX_FORMAT (code);
212 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
213 if (fmt[i] == 'e')
215 if (rtx_varies_p (XEXP (x, i), for_alias))
216 return 1;
218 else if (fmt[i] == 'E')
220 int j;
221 for (j = 0; j < XVECLEN (x, i); j++)
222 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
223 return 1;
226 return 0;
229 /* Return nonzero if the use of X as an address in a MEM can cause a trap.
230 MODE is the mode of the MEM (not that of X) and UNALIGNED_MEMS controls
231 whether nonzero is returned for unaligned memory accesses on strict
232 alignment machines. */
234 static int
235 rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
236 enum machine_mode mode, bool unaligned_mems)
238 enum rtx_code code = GET_CODE (x);
240 if (STRICT_ALIGNMENT
241 && unaligned_mems
242 && GET_MODE_SIZE (mode) != 0)
244 HOST_WIDE_INT actual_offset = offset;
245 #ifdef SPARC_STACK_BOUNDARY_HACK
246 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
247 the real alignment of %sp. However, when it does this, the
248 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
249 if (SPARC_STACK_BOUNDARY_HACK
250 && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
251 actual_offset -= STACK_POINTER_OFFSET;
252 #endif
254 if (actual_offset % GET_MODE_SIZE (mode) != 0)
255 return 1;
258 switch (code)
260 case SYMBOL_REF:
261 if (SYMBOL_REF_WEAK (x))
262 return 1;
263 if (!CONSTANT_POOL_ADDRESS_P (x))
265 tree decl;
266 HOST_WIDE_INT decl_size;
268 if (offset < 0)
269 return 1;
270 if (size == 0)
271 size = GET_MODE_SIZE (mode);
272 if (size == 0)
273 return offset != 0;
275 /* If the size of the access or of the symbol is unknown,
276 assume the worst. */
277 decl = SYMBOL_REF_DECL (x);
279 /* Else check that the access is in bounds. TODO: restructure
280 expr_size/tree_expr_size/int_expr_size and just use the latter. */
281 if (!decl)
282 decl_size = -1;
283 else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
284 decl_size = (host_integerp (DECL_SIZE_UNIT (decl), 0)
285 ? tree_low_cst (DECL_SIZE_UNIT (decl), 0)
286 : -1);
287 else if (TREE_CODE (decl) == STRING_CST)
288 decl_size = TREE_STRING_LENGTH (decl);
289 else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
290 decl_size = int_size_in_bytes (TREE_TYPE (decl));
291 else
292 decl_size = -1;
294 return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
297 return 0;
299 case LABEL_REF:
300 return 0;
302 case REG:
303 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
304 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
305 || x == stack_pointer_rtx
306 /* The arg pointer varies if it is not a fixed register. */
307 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
308 return 0;
309 /* All of the virtual frame registers are stack references. */
310 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
311 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
312 return 0;
313 return 1;
315 case CONST:
316 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
317 mode, unaligned_mems);
319 case PLUS:
320 /* An address is assumed not to trap if:
321 - it is the pic register plus a constant. */
322 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
323 return 0;
325 /* - or it is an address that can't trap plus a constant integer,
326 with the proper remainder modulo the mode size if we are
327 considering unaligned memory references. */
328 if (CONST_INT_P (XEXP (x, 1))
329 && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
330 size, mode, unaligned_mems))
331 return 0;
333 return 1;
335 case LO_SUM:
336 case PRE_MODIFY:
337 return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
338 mode, unaligned_mems);
340 case PRE_DEC:
341 case PRE_INC:
342 case POST_DEC:
343 case POST_INC:
344 case POST_MODIFY:
345 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
346 mode, unaligned_mems);
348 default:
349 break;
352 /* If it isn't one of the case above, it can cause a trap. */
353 return 1;
356 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
359 rtx_addr_can_trap_p (const_rtx x)
361 return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
364 /* Return true if X is an address that is known to not be zero. */
366 bool
367 nonzero_address_p (const_rtx x)
369 const enum rtx_code code = GET_CODE (x);
371 switch (code)
373 case SYMBOL_REF:
374 return !SYMBOL_REF_WEAK (x);
376 case LABEL_REF:
377 return true;
379 case REG:
380 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
381 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
382 || x == stack_pointer_rtx
383 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
384 return true;
385 /* All of the virtual frame registers are stack references. */
386 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
387 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
388 return true;
389 return false;
391 case CONST:
392 return nonzero_address_p (XEXP (x, 0));
394 case PLUS:
395 if (CONST_INT_P (XEXP (x, 1)))
396 return nonzero_address_p (XEXP (x, 0));
397 /* Handle PIC references. */
398 else if (XEXP (x, 0) == pic_offset_table_rtx
399 && CONSTANT_P (XEXP (x, 1)))
400 return true;
401 return false;
403 case PRE_MODIFY:
404 /* Similar to the above; allow positive offsets. Further, since
405 auto-inc is only allowed in memories, the register must be a
406 pointer. */
407 if (CONST_INT_P (XEXP (x, 1))
408 && INTVAL (XEXP (x, 1)) > 0)
409 return true;
410 return nonzero_address_p (XEXP (x, 0));
412 case PRE_INC:
413 /* Similarly. Further, the offset is always positive. */
414 return true;
416 case PRE_DEC:
417 case POST_DEC:
418 case POST_INC:
419 case POST_MODIFY:
420 return nonzero_address_p (XEXP (x, 0));
422 case LO_SUM:
423 return nonzero_address_p (XEXP (x, 1));
425 default:
426 break;
429 /* If it isn't one of the case above, might be zero. */
430 return false;
433 /* Return 1 if X refers to a memory location whose address
434 cannot be compared reliably with constant addresses,
435 or if X refers to a BLKmode memory object.
436 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
437 zero, we are slightly more conservative. */
439 bool
440 rtx_addr_varies_p (const_rtx x, bool for_alias)
442 enum rtx_code code;
443 int i;
444 const char *fmt;
446 if (x == 0)
447 return 0;
449 code = GET_CODE (x);
450 if (code == MEM)
451 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
453 fmt = GET_RTX_FORMAT (code);
454 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
455 if (fmt[i] == 'e')
457 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
458 return 1;
460 else if (fmt[i] == 'E')
462 int j;
463 for (j = 0; j < XVECLEN (x, i); j++)
464 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
465 return 1;
467 return 0;
470 /* Return the CALL in X if there is one. */
473 get_call_rtx_from (rtx x)
475 if (INSN_P (x))
476 x = PATTERN (x);
477 if (GET_CODE (x) == PARALLEL)
478 x = XVECEXP (x, 0, 0);
479 if (GET_CODE (x) == SET)
480 x = SET_SRC (x);
481 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
482 return x;
483 return NULL_RTX;
486 /* Return the value of the integer term in X, if one is apparent;
487 otherwise return 0.
488 Only obvious integer terms are detected.
489 This is used in cse.c with the `related_value' field. */
491 HOST_WIDE_INT
492 get_integer_term (const_rtx x)
494 if (GET_CODE (x) == CONST)
495 x = XEXP (x, 0);
497 if (GET_CODE (x) == MINUS
498 && CONST_INT_P (XEXP (x, 1)))
499 return - INTVAL (XEXP (x, 1));
500 if (GET_CODE (x) == PLUS
501 && CONST_INT_P (XEXP (x, 1)))
502 return INTVAL (XEXP (x, 1));
503 return 0;
506 /* If X is a constant, return the value sans apparent integer term;
507 otherwise return 0.
508 Only obvious integer terms are detected. */
511 get_related_value (const_rtx x)
513 if (GET_CODE (x) != CONST)
514 return 0;
515 x = XEXP (x, 0);
516 if (GET_CODE (x) == PLUS
517 && CONST_INT_P (XEXP (x, 1)))
518 return XEXP (x, 0);
519 else if (GET_CODE (x) == MINUS
520 && CONST_INT_P (XEXP (x, 1)))
521 return XEXP (x, 0);
522 return 0;
525 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
526 to somewhere in the same object or object_block as SYMBOL. */
528 bool
529 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
531 tree decl;
533 if (GET_CODE (symbol) != SYMBOL_REF)
534 return false;
536 if (offset == 0)
537 return true;
539 if (offset > 0)
541 if (CONSTANT_POOL_ADDRESS_P (symbol)
542 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
543 return true;
545 decl = SYMBOL_REF_DECL (symbol);
546 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
547 return true;
550 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
551 && SYMBOL_REF_BLOCK (symbol)
552 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
553 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
554 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
555 return true;
557 return false;
560 /* Split X into a base and a constant offset, storing them in *BASE_OUT
561 and *OFFSET_OUT respectively. */
563 void
564 split_const (rtx x, rtx *base_out, rtx *offset_out)
566 if (GET_CODE (x) == CONST)
568 x = XEXP (x, 0);
569 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
571 *base_out = XEXP (x, 0);
572 *offset_out = XEXP (x, 1);
573 return;
576 *base_out = x;
577 *offset_out = const0_rtx;
580 /* Return the number of places FIND appears within X. If COUNT_DEST is
581 zero, we do not count occurrences inside the destination of a SET. */
584 count_occurrences (const_rtx x, const_rtx find, int count_dest)
586 int i, j;
587 enum rtx_code code;
588 const char *format_ptr;
589 int count;
591 if (x == find)
592 return 1;
594 code = GET_CODE (x);
596 switch (code)
598 case REG:
599 CASE_CONST_ANY:
600 case SYMBOL_REF:
601 case CODE_LABEL:
602 case PC:
603 case CC0:
604 return 0;
606 case EXPR_LIST:
607 count = count_occurrences (XEXP (x, 0), find, count_dest);
608 if (XEXP (x, 1))
609 count += count_occurrences (XEXP (x, 1), find, count_dest);
610 return count;
612 case MEM:
613 if (MEM_P (find) && rtx_equal_p (x, find))
614 return 1;
615 break;
617 case SET:
618 if (SET_DEST (x) == find && ! count_dest)
619 return count_occurrences (SET_SRC (x), find, count_dest);
620 break;
622 default:
623 break;
626 format_ptr = GET_RTX_FORMAT (code);
627 count = 0;
629 for (i = 0; i < GET_RTX_LENGTH (code); i++)
631 switch (*format_ptr++)
633 case 'e':
634 count += count_occurrences (XEXP (x, i), find, count_dest);
635 break;
637 case 'E':
638 for (j = 0; j < XVECLEN (x, i); j++)
639 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
640 break;
643 return count;
647 /* Return TRUE if OP is a register or subreg of a register that
648 holds an unsigned quantity. Otherwise, return FALSE. */
650 bool
651 unsigned_reg_p (rtx op)
653 if (REG_P (op)
654 && REG_EXPR (op)
655 && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op))))
656 return true;
658 if (GET_CODE (op) == SUBREG
659 && SUBREG_PROMOTED_UNSIGNED_P (op))
660 return true;
662 return false;
666 /* Nonzero if register REG appears somewhere within IN.
667 Also works if REG is not a register; in this case it checks
668 for a subexpression of IN that is Lisp "equal" to REG. */
671 reg_mentioned_p (const_rtx reg, const_rtx in)
673 const char *fmt;
674 int i;
675 enum rtx_code code;
677 if (in == 0)
678 return 0;
680 if (reg == in)
681 return 1;
683 if (GET_CODE (in) == LABEL_REF)
684 return reg == XEXP (in, 0);
686 code = GET_CODE (in);
688 switch (code)
690 /* Compare registers by number. */
691 case REG:
692 return REG_P (reg) && REGNO (in) == REGNO (reg);
694 /* These codes have no constituent expressions
695 and are unique. */
696 case SCRATCH:
697 case CC0:
698 case PC:
699 return 0;
701 CASE_CONST_ANY:
702 /* These are kept unique for a given value. */
703 return 0;
705 default:
706 break;
709 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
710 return 1;
712 fmt = GET_RTX_FORMAT (code);
714 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
716 if (fmt[i] == 'E')
718 int j;
719 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
720 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
721 return 1;
723 else if (fmt[i] == 'e'
724 && reg_mentioned_p (reg, XEXP (in, i)))
725 return 1;
727 return 0;
730 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
731 no CODE_LABEL insn. */
734 no_labels_between_p (const_rtx beg, const_rtx end)
736 rtx p;
737 if (beg == end)
738 return 0;
739 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
740 if (LABEL_P (p))
741 return 0;
742 return 1;
745 /* Nonzero if register REG is used in an insn between
746 FROM_INSN and TO_INSN (exclusive of those two). */
749 reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
751 rtx insn;
753 if (from_insn == to_insn)
754 return 0;
756 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
757 if (NONDEBUG_INSN_P (insn)
758 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
759 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
760 return 1;
761 return 0;
764 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
765 is entirely replaced by a new value and the only use is as a SET_DEST,
766 we do not consider it a reference. */
769 reg_referenced_p (const_rtx x, const_rtx body)
771 int i;
773 switch (GET_CODE (body))
775 case SET:
776 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
777 return 1;
779 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
780 of a REG that occupies all of the REG, the insn references X if
781 it is mentioned in the destination. */
782 if (GET_CODE (SET_DEST (body)) != CC0
783 && GET_CODE (SET_DEST (body)) != PC
784 && !REG_P (SET_DEST (body))
785 && ! (GET_CODE (SET_DEST (body)) == SUBREG
786 && REG_P (SUBREG_REG (SET_DEST (body)))
787 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
788 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
789 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
790 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
791 && reg_overlap_mentioned_p (x, SET_DEST (body)))
792 return 1;
793 return 0;
795 case ASM_OPERANDS:
796 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
797 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
798 return 1;
799 return 0;
801 case CALL:
802 case USE:
803 case IF_THEN_ELSE:
804 return reg_overlap_mentioned_p (x, body);
806 case TRAP_IF:
807 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
809 case PREFETCH:
810 return reg_overlap_mentioned_p (x, XEXP (body, 0));
812 case UNSPEC:
813 case UNSPEC_VOLATILE:
814 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
815 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
816 return 1;
817 return 0;
819 case PARALLEL:
820 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
821 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
822 return 1;
823 return 0;
825 case CLOBBER:
826 if (MEM_P (XEXP (body, 0)))
827 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
828 return 1;
829 return 0;
831 case COND_EXEC:
832 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
833 return 1;
834 return reg_referenced_p (x, COND_EXEC_CODE (body));
836 default:
837 return 0;
841 /* Nonzero if register REG is set or clobbered in an insn between
842 FROM_INSN and TO_INSN (exclusive of those two). */
845 reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
847 const_rtx insn;
849 if (from_insn == to_insn)
850 return 0;
852 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
853 if (INSN_P (insn) && reg_set_p (reg, insn))
854 return 1;
855 return 0;
858 /* Internals of reg_set_between_p. */
860 reg_set_p (const_rtx reg, const_rtx insn)
862 /* We can be passed an insn or part of one. If we are passed an insn,
863 check if a side-effect of the insn clobbers REG. */
864 if (INSN_P (insn)
865 && (FIND_REG_INC_NOTE (insn, reg)
866 || (CALL_P (insn)
867 && ((REG_P (reg)
868 && REGNO (reg) < FIRST_PSEUDO_REGISTER
869 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
870 GET_MODE (reg), REGNO (reg)))
871 || MEM_P (reg)
872 || find_reg_fusage (insn, CLOBBER, reg)))))
873 return 1;
875 return set_of (reg, insn) != NULL_RTX;
878 /* Similar to reg_set_between_p, but check all registers in X. Return 0
879 only if none of them are modified between START and END. Return 1 if
880 X contains a MEM; this routine does use memory aliasing. */
883 modified_between_p (const_rtx x, const_rtx start, const_rtx end)
885 const enum rtx_code code = GET_CODE (x);
886 const char *fmt;
887 int i, j;
888 rtx insn;
890 if (start == end)
891 return 0;
893 switch (code)
895 CASE_CONST_ANY:
896 case CONST:
897 case SYMBOL_REF:
898 case LABEL_REF:
899 return 0;
901 case PC:
902 case CC0:
903 return 1;
905 case MEM:
906 if (modified_between_p (XEXP (x, 0), start, end))
907 return 1;
908 if (MEM_READONLY_P (x))
909 return 0;
910 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
911 if (memory_modified_in_insn_p (x, insn))
912 return 1;
913 return 0;
914 break;
916 case REG:
917 return reg_set_between_p (x, start, end);
919 default:
920 break;
923 fmt = GET_RTX_FORMAT (code);
924 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
926 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
927 return 1;
929 else if (fmt[i] == 'E')
930 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
931 if (modified_between_p (XVECEXP (x, i, j), start, end))
932 return 1;
935 return 0;
938 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
939 of them are modified in INSN. Return 1 if X contains a MEM; this routine
940 does use memory aliasing. */
943 modified_in_p (const_rtx x, const_rtx insn)
945 const enum rtx_code code = GET_CODE (x);
946 const char *fmt;
947 int i, j;
949 switch (code)
951 CASE_CONST_ANY:
952 case CONST:
953 case SYMBOL_REF:
954 case LABEL_REF:
955 return 0;
957 case PC:
958 case CC0:
959 return 1;
961 case MEM:
962 if (modified_in_p (XEXP (x, 0), insn))
963 return 1;
964 if (MEM_READONLY_P (x))
965 return 0;
966 if (memory_modified_in_insn_p (x, insn))
967 return 1;
968 return 0;
969 break;
971 case REG:
972 return reg_set_p (x, insn);
974 default:
975 break;
978 fmt = GET_RTX_FORMAT (code);
979 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
981 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
982 return 1;
984 else if (fmt[i] == 'E')
985 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
986 if (modified_in_p (XVECEXP (x, i, j), insn))
987 return 1;
990 return 0;
993 /* Helper function for set_of. */
994 struct set_of_data
996 const_rtx found;
997 const_rtx pat;
1000 static void
1001 set_of_1 (rtx x, const_rtx pat, void *data1)
1003 struct set_of_data *const data = (struct set_of_data *) (data1);
1004 if (rtx_equal_p (x, data->pat)
1005 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1006 data->found = pat;
1009 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1010 (either directly or via STRICT_LOW_PART and similar modifiers). */
1011 const_rtx
1012 set_of (const_rtx pat, const_rtx insn)
1014 struct set_of_data data;
1015 data.found = NULL_RTX;
1016 data.pat = pat;
1017 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1018 return data.found;
1021 /* This function, called through note_stores, collects sets and
1022 clobbers of hard registers in a HARD_REG_SET, which is pointed to
1023 by DATA. */
1024 void
1025 record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1027 HARD_REG_SET *pset = (HARD_REG_SET *)data;
1028 if (REG_P (x) && HARD_REGISTER_P (x))
1029 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1032 /* Examine INSN, and compute the set of hard registers written by it.
1033 Store it in *PSET. Should only be called after reload. */
1034 void
1035 find_all_hard_reg_sets (const_rtx insn, HARD_REG_SET *pset)
1037 rtx link;
1039 CLEAR_HARD_REG_SET (*pset);
1040 note_stores (PATTERN (insn), record_hard_reg_sets, pset);
1041 if (CALL_P (insn))
1042 IOR_HARD_REG_SET (*pset, call_used_reg_set);
1043 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1044 if (REG_NOTE_KIND (link) == REG_INC)
1045 record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1048 /* A for_each_rtx subroutine of record_hard_reg_uses. */
1049 static int
1050 record_hard_reg_uses_1 (rtx *px, void *data)
1052 rtx x = *px;
1053 HARD_REG_SET *pused = (HARD_REG_SET *)data;
1055 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1057 int nregs = hard_regno_nregs[REGNO (x)][GET_MODE (x)];
1058 while (nregs-- > 0)
1059 SET_HARD_REG_BIT (*pused, REGNO (x) + nregs);
1061 return 0;
1064 /* Like record_hard_reg_sets, but called through note_uses. */
1065 void
1066 record_hard_reg_uses (rtx *px, void *data)
1068 for_each_rtx (px, record_hard_reg_uses_1, data);
1071 /* Given an INSN, return a SET expression if this insn has only a single SET.
1072 It may also have CLOBBERs, USEs, or SET whose output
1073 will not be used, which we ignore. */
1076 single_set_2 (const_rtx insn, const_rtx pat)
1078 rtx set = NULL;
1079 int set_verified = 1;
1080 int i;
1082 if (GET_CODE (pat) == PARALLEL)
1084 for (i = 0; i < XVECLEN (pat, 0); i++)
1086 rtx sub = XVECEXP (pat, 0, i);
1087 switch (GET_CODE (sub))
1089 case USE:
1090 case CLOBBER:
1091 break;
1093 case SET:
1094 /* We can consider insns having multiple sets, where all
1095 but one are dead as single set insns. In common case
1096 only single set is present in the pattern so we want
1097 to avoid checking for REG_UNUSED notes unless necessary.
1099 When we reach set first time, we just expect this is
1100 the single set we are looking for and only when more
1101 sets are found in the insn, we check them. */
1102 if (!set_verified)
1104 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1105 && !side_effects_p (set))
1106 set = NULL;
1107 else
1108 set_verified = 1;
1110 if (!set)
1111 set = sub, set_verified = 0;
1112 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1113 || side_effects_p (sub))
1114 return NULL_RTX;
1115 break;
1117 default:
1118 return NULL_RTX;
1122 return set;
1125 /* Given an INSN, return nonzero if it has more than one SET, else return
1126 zero. */
1129 multiple_sets (const_rtx insn)
1131 int found;
1132 int i;
1134 /* INSN must be an insn. */
1135 if (! INSN_P (insn))
1136 return 0;
1138 /* Only a PARALLEL can have multiple SETs. */
1139 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1141 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1142 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1144 /* If we have already found a SET, then return now. */
1145 if (found)
1146 return 1;
1147 else
1148 found = 1;
1152 /* Either zero or one SET. */
1153 return 0;
1156 /* Return nonzero if the destination of SET equals the source
1157 and there are no side effects. */
1160 set_noop_p (const_rtx set)
1162 rtx src = SET_SRC (set);
1163 rtx dst = SET_DEST (set);
1165 if (dst == pc_rtx && src == pc_rtx)
1166 return 1;
1168 if (MEM_P (dst) && MEM_P (src))
1169 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1171 if (GET_CODE (dst) == ZERO_EXTRACT)
1172 return rtx_equal_p (XEXP (dst, 0), src)
1173 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1174 && !side_effects_p (src);
1176 if (GET_CODE (dst) == STRICT_LOW_PART)
1177 dst = XEXP (dst, 0);
1179 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1181 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1182 return 0;
1183 src = SUBREG_REG (src);
1184 dst = SUBREG_REG (dst);
1187 return (REG_P (src) && REG_P (dst)
1188 && REGNO (src) == REGNO (dst));
1191 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1192 value to itself. */
1195 noop_move_p (const_rtx insn)
1197 rtx pat = PATTERN (insn);
1199 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1200 return 1;
1202 /* Insns carrying these notes are useful later on. */
1203 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1204 return 0;
1206 if (GET_CODE (pat) == SET && set_noop_p (pat))
1207 return 1;
1209 if (GET_CODE (pat) == PARALLEL)
1211 int i;
1212 /* If nothing but SETs of registers to themselves,
1213 this insn can also be deleted. */
1214 for (i = 0; i < XVECLEN (pat, 0); i++)
1216 rtx tem = XVECEXP (pat, 0, i);
1218 if (GET_CODE (tem) == USE
1219 || GET_CODE (tem) == CLOBBER)
1220 continue;
1222 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1223 return 0;
1226 return 1;
1228 return 0;
1232 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1233 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1234 If the object was modified, if we hit a partial assignment to X, or hit a
1235 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1236 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1237 be the src. */
1240 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1242 rtx p;
1244 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1245 p = PREV_INSN (p))
1246 if (INSN_P (p))
1248 rtx set = single_set (p);
1249 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1251 if (set && rtx_equal_p (x, SET_DEST (set)))
1253 rtx src = SET_SRC (set);
1255 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1256 src = XEXP (note, 0);
1258 if ((valid_to == NULL_RTX
1259 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1260 /* Reject hard registers because we don't usually want
1261 to use them; we'd rather use a pseudo. */
1262 && (! (REG_P (src)
1263 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1265 *pinsn = p;
1266 return src;
1270 /* If set in non-simple way, we don't have a value. */
1271 if (reg_set_p (x, p))
1272 break;
1275 return x;
1278 /* Return nonzero if register in range [REGNO, ENDREGNO)
1279 appears either explicitly or implicitly in X
1280 other than being stored into.
1282 References contained within the substructure at LOC do not count.
1283 LOC may be zero, meaning don't ignore anything. */
1286 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1287 rtx *loc)
1289 int i;
1290 unsigned int x_regno;
1291 RTX_CODE code;
1292 const char *fmt;
1294 repeat:
1295 /* The contents of a REG_NONNEG note is always zero, so we must come here
1296 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1297 if (x == 0)
1298 return 0;
1300 code = GET_CODE (x);
1302 switch (code)
1304 case REG:
1305 x_regno = REGNO (x);
1307 /* If we modifying the stack, frame, or argument pointer, it will
1308 clobber a virtual register. In fact, we could be more precise,
1309 but it isn't worth it. */
1310 if ((x_regno == STACK_POINTER_REGNUM
1311 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1312 || x_regno == ARG_POINTER_REGNUM
1313 #endif
1314 || x_regno == FRAME_POINTER_REGNUM)
1315 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1316 return 1;
1318 return endregno > x_regno && regno < END_REGNO (x);
1320 case SUBREG:
1321 /* If this is a SUBREG of a hard reg, we can see exactly which
1322 registers are being modified. Otherwise, handle normally. */
1323 if (REG_P (SUBREG_REG (x))
1324 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1326 unsigned int inner_regno = subreg_regno (x);
1327 unsigned int inner_endregno
1328 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1329 ? subreg_nregs (x) : 1);
1331 return endregno > inner_regno && regno < inner_endregno;
1333 break;
1335 case CLOBBER:
1336 case SET:
1337 if (&SET_DEST (x) != loc
1338 /* Note setting a SUBREG counts as referring to the REG it is in for
1339 a pseudo but not for hard registers since we can
1340 treat each word individually. */
1341 && ((GET_CODE (SET_DEST (x)) == SUBREG
1342 && loc != &SUBREG_REG (SET_DEST (x))
1343 && REG_P (SUBREG_REG (SET_DEST (x)))
1344 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1345 && refers_to_regno_p (regno, endregno,
1346 SUBREG_REG (SET_DEST (x)), loc))
1347 || (!REG_P (SET_DEST (x))
1348 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1349 return 1;
1351 if (code == CLOBBER || loc == &SET_SRC (x))
1352 return 0;
1353 x = SET_SRC (x);
1354 goto repeat;
1356 default:
1357 break;
1360 /* X does not match, so try its subexpressions. */
1362 fmt = GET_RTX_FORMAT (code);
1363 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1365 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1367 if (i == 0)
1369 x = XEXP (x, 0);
1370 goto repeat;
1372 else
1373 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1374 return 1;
1376 else if (fmt[i] == 'E')
1378 int j;
1379 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1380 if (loc != &XVECEXP (x, i, j)
1381 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1382 return 1;
1385 return 0;
1388 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1389 we check if any register number in X conflicts with the relevant register
1390 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1391 contains a MEM (we don't bother checking for memory addresses that can't
1392 conflict because we expect this to be a rare case. */
1395 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1397 unsigned int regno, endregno;
1399 /* If either argument is a constant, then modifying X can not
1400 affect IN. Here we look at IN, we can profitably combine
1401 CONSTANT_P (x) with the switch statement below. */
1402 if (CONSTANT_P (in))
1403 return 0;
1405 recurse:
1406 switch (GET_CODE (x))
1408 case STRICT_LOW_PART:
1409 case ZERO_EXTRACT:
1410 case SIGN_EXTRACT:
1411 /* Overly conservative. */
1412 x = XEXP (x, 0);
1413 goto recurse;
1415 case SUBREG:
1416 regno = REGNO (SUBREG_REG (x));
1417 if (regno < FIRST_PSEUDO_REGISTER)
1418 regno = subreg_regno (x);
1419 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1420 ? subreg_nregs (x) : 1);
1421 goto do_reg;
1423 case REG:
1424 regno = REGNO (x);
1425 endregno = END_REGNO (x);
1426 do_reg:
1427 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1429 case MEM:
1431 const char *fmt;
1432 int i;
1434 if (MEM_P (in))
1435 return 1;
1437 fmt = GET_RTX_FORMAT (GET_CODE (in));
1438 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1439 if (fmt[i] == 'e')
1441 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1442 return 1;
1444 else if (fmt[i] == 'E')
1446 int j;
1447 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1448 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1449 return 1;
1452 return 0;
1455 case SCRATCH:
1456 case PC:
1457 case CC0:
1458 return reg_mentioned_p (x, in);
1460 case PARALLEL:
1462 int i;
1464 /* If any register in here refers to it we return true. */
1465 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1466 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1467 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1468 return 1;
1469 return 0;
1472 default:
1473 gcc_assert (CONSTANT_P (x));
1474 return 0;
1478 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1479 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1480 ignored by note_stores, but passed to FUN.
1482 FUN receives three arguments:
1483 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1484 2. the SET or CLOBBER rtx that does the store,
1485 3. the pointer DATA provided to note_stores.
1487 If the item being stored in or clobbered is a SUBREG of a hard register,
1488 the SUBREG will be passed. */
1490 void
1491 note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1493 int i;
1495 if (GET_CODE (x) == COND_EXEC)
1496 x = COND_EXEC_CODE (x);
1498 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1500 rtx dest = SET_DEST (x);
1502 while ((GET_CODE (dest) == SUBREG
1503 && (!REG_P (SUBREG_REG (dest))
1504 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1505 || GET_CODE (dest) == ZERO_EXTRACT
1506 || GET_CODE (dest) == STRICT_LOW_PART)
1507 dest = XEXP (dest, 0);
1509 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1510 each of whose first operand is a register. */
1511 if (GET_CODE (dest) == PARALLEL)
1513 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1514 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1515 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1517 else
1518 (*fun) (dest, x, data);
1521 else if (GET_CODE (x) == PARALLEL)
1522 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1523 note_stores (XVECEXP (x, 0, i), fun, data);
1526 /* Like notes_stores, but call FUN for each expression that is being
1527 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1528 FUN for each expression, not any interior subexpressions. FUN receives a
1529 pointer to the expression and the DATA passed to this function.
1531 Note that this is not quite the same test as that done in reg_referenced_p
1532 since that considers something as being referenced if it is being
1533 partially set, while we do not. */
1535 void
1536 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1538 rtx body = *pbody;
1539 int i;
1541 switch (GET_CODE (body))
1543 case COND_EXEC:
1544 (*fun) (&COND_EXEC_TEST (body), data);
1545 note_uses (&COND_EXEC_CODE (body), fun, data);
1546 return;
1548 case PARALLEL:
1549 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1550 note_uses (&XVECEXP (body, 0, i), fun, data);
1551 return;
1553 case SEQUENCE:
1554 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1555 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1556 return;
1558 case USE:
1559 (*fun) (&XEXP (body, 0), data);
1560 return;
1562 case ASM_OPERANDS:
1563 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1564 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1565 return;
1567 case TRAP_IF:
1568 (*fun) (&TRAP_CONDITION (body), data);
1569 return;
1571 case PREFETCH:
1572 (*fun) (&XEXP (body, 0), data);
1573 return;
1575 case UNSPEC:
1576 case UNSPEC_VOLATILE:
1577 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1578 (*fun) (&XVECEXP (body, 0, i), data);
1579 return;
1581 case CLOBBER:
1582 if (MEM_P (XEXP (body, 0)))
1583 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1584 return;
1586 case SET:
1588 rtx dest = SET_DEST (body);
1590 /* For sets we replace everything in source plus registers in memory
1591 expression in store and operands of a ZERO_EXTRACT. */
1592 (*fun) (&SET_SRC (body), data);
1594 if (GET_CODE (dest) == ZERO_EXTRACT)
1596 (*fun) (&XEXP (dest, 1), data);
1597 (*fun) (&XEXP (dest, 2), data);
1600 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1601 dest = XEXP (dest, 0);
1603 if (MEM_P (dest))
1604 (*fun) (&XEXP (dest, 0), data);
1606 return;
1608 default:
1609 /* All the other possibilities never store. */
1610 (*fun) (pbody, data);
1611 return;
1615 /* Return nonzero if X's old contents don't survive after INSN.
1616 This will be true if X is (cc0) or if X is a register and
1617 X dies in INSN or because INSN entirely sets X.
1619 "Entirely set" means set directly and not through a SUBREG, or
1620 ZERO_EXTRACT, so no trace of the old contents remains.
1621 Likewise, REG_INC does not count.
1623 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1624 but for this use that makes no difference, since regs don't overlap
1625 during their lifetimes. Therefore, this function may be used
1626 at any time after deaths have been computed.
1628 If REG is a hard reg that occupies multiple machine registers, this
1629 function will only return 1 if each of those registers will be replaced
1630 by INSN. */
1633 dead_or_set_p (const_rtx insn, const_rtx x)
1635 unsigned int regno, end_regno;
1636 unsigned int i;
1638 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1639 if (GET_CODE (x) == CC0)
1640 return 1;
1642 gcc_assert (REG_P (x));
1644 regno = REGNO (x);
1645 end_regno = END_REGNO (x);
1646 for (i = regno; i < end_regno; i++)
1647 if (! dead_or_set_regno_p (insn, i))
1648 return 0;
1650 return 1;
1653 /* Return TRUE iff DEST is a register or subreg of a register and
1654 doesn't change the number of words of the inner register, and any
1655 part of the register is TEST_REGNO. */
1657 static bool
1658 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1660 unsigned int regno, endregno;
1662 if (GET_CODE (dest) == SUBREG
1663 && (((GET_MODE_SIZE (GET_MODE (dest))
1664 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1665 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1666 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1667 dest = SUBREG_REG (dest);
1669 if (!REG_P (dest))
1670 return false;
1672 regno = REGNO (dest);
1673 endregno = END_REGNO (dest);
1674 return (test_regno >= regno && test_regno < endregno);
1677 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1678 any member matches the covers_regno_no_parallel_p criteria. */
1680 static bool
1681 covers_regno_p (const_rtx dest, unsigned int test_regno)
1683 if (GET_CODE (dest) == PARALLEL)
1685 /* Some targets place small structures in registers for return
1686 values of functions, and those registers are wrapped in
1687 PARALLELs that we may see as the destination of a SET. */
1688 int i;
1690 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1692 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1693 if (inner != NULL_RTX
1694 && covers_regno_no_parallel_p (inner, test_regno))
1695 return true;
1698 return false;
1700 else
1701 return covers_regno_no_parallel_p (dest, test_regno);
1704 /* Utility function for dead_or_set_p to check an individual register. */
1707 dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1709 const_rtx pattern;
1711 /* See if there is a death note for something that includes TEST_REGNO. */
1712 if (find_regno_note (insn, REG_DEAD, test_regno))
1713 return 1;
1715 if (CALL_P (insn)
1716 && find_regno_fusage (insn, CLOBBER, test_regno))
1717 return 1;
1719 pattern = PATTERN (insn);
1721 /* If a COND_EXEC is not executed, the value survives. */
1722 if (GET_CODE (pattern) == COND_EXEC)
1723 return 0;
1725 if (GET_CODE (pattern) == SET)
1726 return covers_regno_p (SET_DEST (pattern), test_regno);
1727 else if (GET_CODE (pattern) == PARALLEL)
1729 int i;
1731 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1733 rtx body = XVECEXP (pattern, 0, i);
1735 if (GET_CODE (body) == COND_EXEC)
1736 body = COND_EXEC_CODE (body);
1738 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1739 && covers_regno_p (SET_DEST (body), test_regno))
1740 return 1;
1744 return 0;
1747 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1748 If DATUM is nonzero, look for one whose datum is DATUM. */
1751 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1753 rtx link;
1755 gcc_checking_assert (insn);
1757 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1758 if (! INSN_P (insn))
1759 return 0;
1760 if (datum == 0)
1762 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1763 if (REG_NOTE_KIND (link) == kind)
1764 return link;
1765 return 0;
1768 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1769 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1770 return link;
1771 return 0;
1774 /* Return the reg-note of kind KIND in insn INSN which applies to register
1775 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1776 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1777 it might be the case that the note overlaps REGNO. */
1780 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1782 rtx link;
1784 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1785 if (! INSN_P (insn))
1786 return 0;
1788 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1789 if (REG_NOTE_KIND (link) == kind
1790 /* Verify that it is a register, so that scratch and MEM won't cause a
1791 problem here. */
1792 && REG_P (XEXP (link, 0))
1793 && REGNO (XEXP (link, 0)) <= regno
1794 && END_REGNO (XEXP (link, 0)) > regno)
1795 return link;
1796 return 0;
1799 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1800 has such a note. */
1803 find_reg_equal_equiv_note (const_rtx insn)
1805 rtx link;
1807 if (!INSN_P (insn))
1808 return 0;
1810 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1811 if (REG_NOTE_KIND (link) == REG_EQUAL
1812 || REG_NOTE_KIND (link) == REG_EQUIV)
1814 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1815 insns that have multiple sets. Checking single_set to
1816 make sure of this is not the proper check, as explained
1817 in the comment in set_unique_reg_note.
1819 This should be changed into an assert. */
1820 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1821 return 0;
1822 return link;
1824 return NULL;
1827 /* Check whether INSN is a single_set whose source is known to be
1828 equivalent to a constant. Return that constant if so, otherwise
1829 return null. */
1832 find_constant_src (const_rtx insn)
1834 rtx note, set, x;
1836 set = single_set (insn);
1837 if (set)
1839 x = avoid_constant_pool_reference (SET_SRC (set));
1840 if (CONSTANT_P (x))
1841 return x;
1844 note = find_reg_equal_equiv_note (insn);
1845 if (note && CONSTANT_P (XEXP (note, 0)))
1846 return XEXP (note, 0);
1848 return NULL_RTX;
1851 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1852 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1855 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1857 /* If it's not a CALL_INSN, it can't possibly have a
1858 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1859 if (!CALL_P (insn))
1860 return 0;
1862 gcc_assert (datum);
1864 if (!REG_P (datum))
1866 rtx link;
1868 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1869 link;
1870 link = XEXP (link, 1))
1871 if (GET_CODE (XEXP (link, 0)) == code
1872 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1873 return 1;
1875 else
1877 unsigned int regno = REGNO (datum);
1879 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1880 to pseudo registers, so don't bother checking. */
1882 if (regno < FIRST_PSEUDO_REGISTER)
1884 unsigned int end_regno = END_HARD_REGNO (datum);
1885 unsigned int i;
1887 for (i = regno; i < end_regno; i++)
1888 if (find_regno_fusage (insn, code, i))
1889 return 1;
1893 return 0;
1896 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1897 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1900 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1902 rtx link;
1904 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1905 to pseudo registers, so don't bother checking. */
1907 if (regno >= FIRST_PSEUDO_REGISTER
1908 || !CALL_P (insn) )
1909 return 0;
1911 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1913 rtx op, reg;
1915 if (GET_CODE (op = XEXP (link, 0)) == code
1916 && REG_P (reg = XEXP (op, 0))
1917 && REGNO (reg) <= regno
1918 && END_HARD_REGNO (reg) > regno)
1919 return 1;
1922 return 0;
1926 /* Allocate a register note with kind KIND and datum DATUM. LIST is
1927 stored as the pointer to the next register note. */
1930 alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
1932 rtx note;
1934 switch (kind)
1936 case REG_CC_SETTER:
1937 case REG_CC_USER:
1938 case REG_LABEL_TARGET:
1939 case REG_LABEL_OPERAND:
1940 case REG_TM:
1941 /* These types of register notes use an INSN_LIST rather than an
1942 EXPR_LIST, so that copying is done right and dumps look
1943 better. */
1944 note = alloc_INSN_LIST (datum, list);
1945 PUT_REG_NOTE_KIND (note, kind);
1946 break;
1948 default:
1949 note = alloc_EXPR_LIST (kind, datum, list);
1950 break;
1953 return note;
1956 /* Add register note with kind KIND and datum DATUM to INSN. */
1958 void
1959 add_reg_note (rtx insn, enum reg_note kind, rtx datum)
1961 REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
1964 /* Remove register note NOTE from the REG_NOTES of INSN. */
1966 void
1967 remove_note (rtx insn, const_rtx note)
1969 rtx link;
1971 if (note == NULL_RTX)
1972 return;
1974 if (REG_NOTES (insn) == note)
1975 REG_NOTES (insn) = XEXP (note, 1);
1976 else
1977 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1978 if (XEXP (link, 1) == note)
1980 XEXP (link, 1) = XEXP (note, 1);
1981 break;
1984 switch (REG_NOTE_KIND (note))
1986 case REG_EQUAL:
1987 case REG_EQUIV:
1988 df_notes_rescan (insn);
1989 break;
1990 default:
1991 break;
1995 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
1997 void
1998 remove_reg_equal_equiv_notes (rtx insn)
2000 rtx *loc;
2002 loc = &REG_NOTES (insn);
2003 while (*loc)
2005 enum reg_note kind = REG_NOTE_KIND (*loc);
2006 if (kind == REG_EQUAL || kind == REG_EQUIV)
2007 *loc = XEXP (*loc, 1);
2008 else
2009 loc = &XEXP (*loc, 1);
2013 /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO. */
2015 void
2016 remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
2018 df_ref eq_use;
2020 if (!df)
2021 return;
2023 /* This loop is a little tricky. We cannot just go down the chain because
2024 it is being modified by some actions in the loop. So we just iterate
2025 over the head. We plan to drain the list anyway. */
2026 while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
2028 rtx insn = DF_REF_INSN (eq_use);
2029 rtx note = find_reg_equal_equiv_note (insn);
2031 /* This assert is generally triggered when someone deletes a REG_EQUAL
2032 or REG_EQUIV note by hacking the list manually rather than calling
2033 remove_note. */
2034 gcc_assert (note);
2036 remove_note (insn, note);
2040 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2041 return 1 if it is found. A simple equality test is used to determine if
2042 NODE matches. */
2045 in_expr_list_p (const_rtx listp, const_rtx node)
2047 const_rtx x;
2049 for (x = listp; x; x = XEXP (x, 1))
2050 if (node == XEXP (x, 0))
2051 return 1;
2053 return 0;
2056 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2057 remove that entry from the list if it is found.
2059 A simple equality test is used to determine if NODE matches. */
2061 void
2062 remove_node_from_expr_list (const_rtx node, rtx *listp)
2064 rtx temp = *listp;
2065 rtx prev = NULL_RTX;
2067 while (temp)
2069 if (node == XEXP (temp, 0))
2071 /* Splice the node out of the list. */
2072 if (prev)
2073 XEXP (prev, 1) = XEXP (temp, 1);
2074 else
2075 *listp = XEXP (temp, 1);
2077 return;
2080 prev = temp;
2081 temp = XEXP (temp, 1);
2085 /* Nonzero if X contains any volatile instructions. These are instructions
2086 which may cause unpredictable machine state instructions, and thus no
2087 instructions should be moved or combined across them. This includes
2088 only volatile asms and UNSPEC_VOLATILE instructions. */
2091 volatile_insn_p (const_rtx x)
2093 const RTX_CODE code = GET_CODE (x);
2094 switch (code)
2096 case LABEL_REF:
2097 case SYMBOL_REF:
2098 case CONST:
2099 CASE_CONST_ANY:
2100 case CC0:
2101 case PC:
2102 case REG:
2103 case SCRATCH:
2104 case CLOBBER:
2105 case ADDR_VEC:
2106 case ADDR_DIFF_VEC:
2107 case CALL:
2108 case MEM:
2109 return 0;
2111 case UNSPEC_VOLATILE:
2112 /* case TRAP_IF: This isn't clear yet. */
2113 return 1;
2115 case ASM_INPUT:
2116 case ASM_OPERANDS:
2117 if (MEM_VOLATILE_P (x))
2118 return 1;
2120 default:
2121 break;
2124 /* Recursively scan the operands of this expression. */
2127 const char *const fmt = GET_RTX_FORMAT (code);
2128 int i;
2130 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2132 if (fmt[i] == 'e')
2134 if (volatile_insn_p (XEXP (x, i)))
2135 return 1;
2137 else if (fmt[i] == 'E')
2139 int j;
2140 for (j = 0; j < XVECLEN (x, i); j++)
2141 if (volatile_insn_p (XVECEXP (x, i, j)))
2142 return 1;
2146 return 0;
2149 /* Nonzero if X contains any volatile memory references
2150 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2153 volatile_refs_p (const_rtx x)
2155 const RTX_CODE code = GET_CODE (x);
2156 switch (code)
2158 case LABEL_REF:
2159 case SYMBOL_REF:
2160 case CONST:
2161 CASE_CONST_ANY:
2162 case CC0:
2163 case PC:
2164 case REG:
2165 case SCRATCH:
2166 case CLOBBER:
2167 case ADDR_VEC:
2168 case ADDR_DIFF_VEC:
2169 return 0;
2171 case UNSPEC_VOLATILE:
2172 return 1;
2174 case MEM:
2175 case ASM_INPUT:
2176 case ASM_OPERANDS:
2177 if (MEM_VOLATILE_P (x))
2178 return 1;
2180 default:
2181 break;
2184 /* Recursively scan the operands of this expression. */
2187 const char *const fmt = GET_RTX_FORMAT (code);
2188 int i;
2190 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2192 if (fmt[i] == 'e')
2194 if (volatile_refs_p (XEXP (x, i)))
2195 return 1;
2197 else if (fmt[i] == 'E')
2199 int j;
2200 for (j = 0; j < XVECLEN (x, i); j++)
2201 if (volatile_refs_p (XVECEXP (x, i, j)))
2202 return 1;
2206 return 0;
2209 /* Similar to above, except that it also rejects register pre- and post-
2210 incrementing. */
2213 side_effects_p (const_rtx x)
2215 const RTX_CODE code = GET_CODE (x);
2216 switch (code)
2218 case LABEL_REF:
2219 case SYMBOL_REF:
2220 case CONST:
2221 CASE_CONST_ANY:
2222 case CC0:
2223 case PC:
2224 case REG:
2225 case SCRATCH:
2226 case ADDR_VEC:
2227 case ADDR_DIFF_VEC:
2228 case VAR_LOCATION:
2229 return 0;
2231 case CLOBBER:
2232 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2233 when some combination can't be done. If we see one, don't think
2234 that we can simplify the expression. */
2235 return (GET_MODE (x) != VOIDmode);
2237 case PRE_INC:
2238 case PRE_DEC:
2239 case POST_INC:
2240 case POST_DEC:
2241 case PRE_MODIFY:
2242 case POST_MODIFY:
2243 case CALL:
2244 case UNSPEC_VOLATILE:
2245 /* case TRAP_IF: This isn't clear yet. */
2246 return 1;
2248 case MEM:
2249 case ASM_INPUT:
2250 case ASM_OPERANDS:
2251 if (MEM_VOLATILE_P (x))
2252 return 1;
2254 default:
2255 break;
2258 /* Recursively scan the operands of this expression. */
2261 const char *fmt = GET_RTX_FORMAT (code);
2262 int i;
2264 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2266 if (fmt[i] == 'e')
2268 if (side_effects_p (XEXP (x, i)))
2269 return 1;
2271 else if (fmt[i] == 'E')
2273 int j;
2274 for (j = 0; j < XVECLEN (x, i); j++)
2275 if (side_effects_p (XVECEXP (x, i, j)))
2276 return 1;
2280 return 0;
2283 /* Return nonzero if evaluating rtx X might cause a trap.
2284 FLAGS controls how to consider MEMs. A nonzero means the context
2285 of the access may have changed from the original, such that the
2286 address may have become invalid. */
2289 may_trap_p_1 (const_rtx x, unsigned flags)
2291 int i;
2292 enum rtx_code code;
2293 const char *fmt;
2295 /* We make no distinction currently, but this function is part of
2296 the internal target-hooks ABI so we keep the parameter as
2297 "unsigned flags". */
2298 bool code_changed = flags != 0;
2300 if (x == 0)
2301 return 0;
2302 code = GET_CODE (x);
2303 switch (code)
2305 /* Handle these cases quickly. */
2306 CASE_CONST_ANY:
2307 case SYMBOL_REF:
2308 case LABEL_REF:
2309 case CONST:
2310 case PC:
2311 case CC0:
2312 case REG:
2313 case SCRATCH:
2314 return 0;
2316 case UNSPEC:
2317 case UNSPEC_VOLATILE:
2318 return targetm.unspec_may_trap_p (x, flags);
2320 case ASM_INPUT:
2321 case TRAP_IF:
2322 return 1;
2324 case ASM_OPERANDS:
2325 return MEM_VOLATILE_P (x);
2327 /* Memory ref can trap unless it's a static var or a stack slot. */
2328 case MEM:
2329 /* Recognize specific pattern of stack checking probes. */
2330 if (flag_stack_check
2331 && MEM_VOLATILE_P (x)
2332 && XEXP (x, 0) == stack_pointer_rtx)
2333 return 1;
2334 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2335 reference; moving it out of context such as when moving code
2336 when optimizing, might cause its address to become invalid. */
2337 code_changed
2338 || !MEM_NOTRAP_P (x))
2340 HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0;
2341 return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2342 GET_MODE (x), code_changed);
2345 return 0;
2347 /* Division by a non-constant might trap. */
2348 case DIV:
2349 case MOD:
2350 case UDIV:
2351 case UMOD:
2352 if (HONOR_SNANS (GET_MODE (x)))
2353 return 1;
2354 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2355 return flag_trapping_math;
2356 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2357 return 1;
2358 break;
2360 case EXPR_LIST:
2361 /* An EXPR_LIST is used to represent a function call. This
2362 certainly may trap. */
2363 return 1;
2365 case GE:
2366 case GT:
2367 case LE:
2368 case LT:
2369 case LTGT:
2370 case COMPARE:
2371 /* Some floating point comparisons may trap. */
2372 if (!flag_trapping_math)
2373 break;
2374 /* ??? There is no machine independent way to check for tests that trap
2375 when COMPARE is used, though many targets do make this distinction.
2376 For instance, sparc uses CCFPE for compares which generate exceptions
2377 and CCFP for compares which do not generate exceptions. */
2378 if (HONOR_NANS (GET_MODE (x)))
2379 return 1;
2380 /* But often the compare has some CC mode, so check operand
2381 modes as well. */
2382 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2383 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2384 return 1;
2385 break;
2387 case EQ:
2388 case NE:
2389 if (HONOR_SNANS (GET_MODE (x)))
2390 return 1;
2391 /* Often comparison is CC mode, so check operand modes. */
2392 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2393 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2394 return 1;
2395 break;
2397 case FIX:
2398 /* Conversion of floating point might trap. */
2399 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2400 return 1;
2401 break;
2403 case NEG:
2404 case ABS:
2405 case SUBREG:
2406 /* These operations don't trap even with floating point. */
2407 break;
2409 default:
2410 /* Any floating arithmetic may trap. */
2411 if (SCALAR_FLOAT_MODE_P (GET_MODE (x))
2412 && flag_trapping_math)
2413 return 1;
2416 fmt = GET_RTX_FORMAT (code);
2417 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2419 if (fmt[i] == 'e')
2421 if (may_trap_p_1 (XEXP (x, i), flags))
2422 return 1;
2424 else if (fmt[i] == 'E')
2426 int j;
2427 for (j = 0; j < XVECLEN (x, i); j++)
2428 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2429 return 1;
2432 return 0;
2435 /* Return nonzero if evaluating rtx X might cause a trap. */
2438 may_trap_p (const_rtx x)
2440 return may_trap_p_1 (x, 0);
2443 /* Same as above, but additionally return nonzero if evaluating rtx X might
2444 cause a fault. We define a fault for the purpose of this function as a
2445 erroneous execution condition that cannot be encountered during the normal
2446 execution of a valid program; the typical example is an unaligned memory
2447 access on a strict alignment machine. The compiler guarantees that it
2448 doesn't generate code that will fault from a valid program, but this
2449 guarantee doesn't mean anything for individual instructions. Consider
2450 the following example:
2452 struct S { int d; union { char *cp; int *ip; }; };
2454 int foo(struct S *s)
2456 if (s->d == 1)
2457 return *s->ip;
2458 else
2459 return *s->cp;
2462 on a strict alignment machine. In a valid program, foo will never be
2463 invoked on a structure for which d is equal to 1 and the underlying
2464 unique field of the union not aligned on a 4-byte boundary, but the
2465 expression *s->ip might cause a fault if considered individually.
2467 At the RTL level, potentially problematic expressions will almost always
2468 verify may_trap_p; for example, the above dereference can be emitted as
2469 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2470 However, suppose that foo is inlined in a caller that causes s->cp to
2471 point to a local character variable and guarantees that s->d is not set
2472 to 1; foo may have been effectively translated into pseudo-RTL as:
2474 if ((reg:SI) == 1)
2475 (set (reg:SI) (mem:SI (%fp - 7)))
2476 else
2477 (set (reg:QI) (mem:QI (%fp - 7)))
2479 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2480 memory reference to a stack slot, but it will certainly cause a fault
2481 on a strict alignment machine. */
2484 may_trap_or_fault_p (const_rtx x)
2486 return may_trap_p_1 (x, 1);
2489 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2490 i.e., an inequality. */
2493 inequality_comparisons_p (const_rtx x)
2495 const char *fmt;
2496 int len, i;
2497 const enum rtx_code code = GET_CODE (x);
2499 switch (code)
2501 case REG:
2502 case SCRATCH:
2503 case PC:
2504 case CC0:
2505 CASE_CONST_ANY:
2506 case CONST:
2507 case LABEL_REF:
2508 case SYMBOL_REF:
2509 return 0;
2511 case LT:
2512 case LTU:
2513 case GT:
2514 case GTU:
2515 case LE:
2516 case LEU:
2517 case GE:
2518 case GEU:
2519 return 1;
2521 default:
2522 break;
2525 len = GET_RTX_LENGTH (code);
2526 fmt = GET_RTX_FORMAT (code);
2528 for (i = 0; i < len; i++)
2530 if (fmt[i] == 'e')
2532 if (inequality_comparisons_p (XEXP (x, i)))
2533 return 1;
2535 else if (fmt[i] == 'E')
2537 int j;
2538 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2539 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2540 return 1;
2544 return 0;
2547 /* Replace any occurrence of FROM in X with TO. The function does
2548 not enter into CONST_DOUBLE for the replace.
2550 Note that copying is not done so X must not be shared unless all copies
2551 are to be modified. */
2554 replace_rtx (rtx x, rtx from, rtx to)
2556 int i, j;
2557 const char *fmt;
2559 if (x == from)
2560 return to;
2562 /* Allow this function to make replacements in EXPR_LISTs. */
2563 if (x == 0)
2564 return 0;
2566 if (GET_CODE (x) == SUBREG)
2568 rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
2570 if (CONST_INT_P (new_rtx))
2572 x = simplify_subreg (GET_MODE (x), new_rtx,
2573 GET_MODE (SUBREG_REG (x)),
2574 SUBREG_BYTE (x));
2575 gcc_assert (x);
2577 else
2578 SUBREG_REG (x) = new_rtx;
2580 return x;
2582 else if (GET_CODE (x) == ZERO_EXTEND)
2584 rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
2586 if (CONST_INT_P (new_rtx))
2588 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2589 new_rtx, GET_MODE (XEXP (x, 0)));
2590 gcc_assert (x);
2592 else
2593 XEXP (x, 0) = new_rtx;
2595 return x;
2598 fmt = GET_RTX_FORMAT (GET_CODE (x));
2599 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2601 if (fmt[i] == 'e')
2602 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2603 else if (fmt[i] == 'E')
2604 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2605 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2608 return x;
2611 /* Replace occurrences of the old label in *X with the new one.
2612 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2615 replace_label (rtx *x, void *data)
2617 rtx l = *x;
2618 rtx old_label = ((replace_label_data *) data)->r1;
2619 rtx new_label = ((replace_label_data *) data)->r2;
2620 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2622 if (l == NULL_RTX)
2623 return 0;
2625 if (GET_CODE (l) == SYMBOL_REF
2626 && CONSTANT_POOL_ADDRESS_P (l))
2628 rtx c = get_pool_constant (l);
2629 if (rtx_referenced_p (old_label, c))
2631 rtx new_c, new_l;
2632 replace_label_data *d = (replace_label_data *) data;
2634 /* Create a copy of constant C; replace the label inside
2635 but do not update LABEL_NUSES because uses in constant pool
2636 are not counted. */
2637 new_c = copy_rtx (c);
2638 d->update_label_nuses = false;
2639 for_each_rtx (&new_c, replace_label, data);
2640 d->update_label_nuses = update_label_nuses;
2642 /* Add the new constant NEW_C to constant pool and replace
2643 the old reference to constant by new reference. */
2644 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2645 *x = replace_rtx (l, l, new_l);
2647 return 0;
2650 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2651 field. This is not handled by for_each_rtx because it doesn't
2652 handle unprinted ('0') fields. */
2653 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2654 JUMP_LABEL (l) = new_label;
2656 if ((GET_CODE (l) == LABEL_REF
2657 || GET_CODE (l) == INSN_LIST)
2658 && XEXP (l, 0) == old_label)
2660 XEXP (l, 0) = new_label;
2661 if (update_label_nuses)
2663 ++LABEL_NUSES (new_label);
2664 --LABEL_NUSES (old_label);
2666 return 0;
2669 return 0;
2672 /* When *BODY is equal to X or X is directly referenced by *BODY
2673 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2674 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2676 static int
2677 rtx_referenced_p_1 (rtx *body, void *x)
2679 rtx y = (rtx) x;
2681 if (*body == NULL_RTX)
2682 return y == NULL_RTX;
2684 /* Return true if a label_ref *BODY refers to label Y. */
2685 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2686 return XEXP (*body, 0) == y;
2688 /* If *BODY is a reference to pool constant traverse the constant. */
2689 if (GET_CODE (*body) == SYMBOL_REF
2690 && CONSTANT_POOL_ADDRESS_P (*body))
2691 return rtx_referenced_p (y, get_pool_constant (*body));
2693 /* By default, compare the RTL expressions. */
2694 return rtx_equal_p (*body, y);
2697 /* Return true if X is referenced in BODY. */
2700 rtx_referenced_p (rtx x, rtx body)
2702 return for_each_rtx (&body, rtx_referenced_p_1, x);
2705 /* If INSN is a tablejump return true and store the label (before jump table) to
2706 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2708 bool
2709 tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2711 rtx label, table;
2713 if (!JUMP_P (insn))
2714 return false;
2716 label = JUMP_LABEL (insn);
2717 if (label != NULL_RTX && !ANY_RETURN_P (label)
2718 && (table = next_active_insn (label)) != NULL_RTX
2719 && JUMP_TABLE_DATA_P (table))
2721 if (labelp)
2722 *labelp = label;
2723 if (tablep)
2724 *tablep = table;
2725 return true;
2727 return false;
2730 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2731 constant that is not in the constant pool and not in the condition
2732 of an IF_THEN_ELSE. */
2734 static int
2735 computed_jump_p_1 (const_rtx x)
2737 const enum rtx_code code = GET_CODE (x);
2738 int i, j;
2739 const char *fmt;
2741 switch (code)
2743 case LABEL_REF:
2744 case PC:
2745 return 0;
2747 case CONST:
2748 CASE_CONST_ANY:
2749 case SYMBOL_REF:
2750 case REG:
2751 return 1;
2753 case MEM:
2754 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2755 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2757 case IF_THEN_ELSE:
2758 return (computed_jump_p_1 (XEXP (x, 1))
2759 || computed_jump_p_1 (XEXP (x, 2)));
2761 default:
2762 break;
2765 fmt = GET_RTX_FORMAT (code);
2766 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2768 if (fmt[i] == 'e'
2769 && computed_jump_p_1 (XEXP (x, i)))
2770 return 1;
2772 else if (fmt[i] == 'E')
2773 for (j = 0; j < XVECLEN (x, i); j++)
2774 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2775 return 1;
2778 return 0;
2781 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2783 Tablejumps and casesi insns are not considered indirect jumps;
2784 we can recognize them by a (use (label_ref)). */
2787 computed_jump_p (const_rtx insn)
2789 int i;
2790 if (JUMP_P (insn))
2792 rtx pat = PATTERN (insn);
2794 /* If we have a JUMP_LABEL set, we're not a computed jump. */
2795 if (JUMP_LABEL (insn) != NULL)
2796 return 0;
2798 if (GET_CODE (pat) == PARALLEL)
2800 int len = XVECLEN (pat, 0);
2801 int has_use_labelref = 0;
2803 for (i = len - 1; i >= 0; i--)
2804 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2805 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2806 == LABEL_REF))
2807 has_use_labelref = 1;
2809 if (! has_use_labelref)
2810 for (i = len - 1; i >= 0; i--)
2811 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2812 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2813 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2814 return 1;
2816 else if (GET_CODE (pat) == SET
2817 && SET_DEST (pat) == pc_rtx
2818 && computed_jump_p_1 (SET_SRC (pat)))
2819 return 1;
2821 return 0;
2824 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2825 calls. Processes the subexpressions of EXP and passes them to F. */
2826 static int
2827 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2829 int result, i, j;
2830 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2831 rtx *x;
2833 for (; format[n] != '\0'; n++)
2835 switch (format[n])
2837 case 'e':
2838 /* Call F on X. */
2839 x = &XEXP (exp, n);
2840 result = (*f) (x, data);
2841 if (result == -1)
2842 /* Do not traverse sub-expressions. */
2843 continue;
2844 else if (result != 0)
2845 /* Stop the traversal. */
2846 return result;
2848 if (*x == NULL_RTX)
2849 /* There are no sub-expressions. */
2850 continue;
2852 i = non_rtx_starting_operands[GET_CODE (*x)];
2853 if (i >= 0)
2855 result = for_each_rtx_1 (*x, i, f, data);
2856 if (result != 0)
2857 return result;
2859 break;
2861 case 'V':
2862 case 'E':
2863 if (XVEC (exp, n) == 0)
2864 continue;
2865 for (j = 0; j < XVECLEN (exp, n); ++j)
2867 /* Call F on X. */
2868 x = &XVECEXP (exp, n, j);
2869 result = (*f) (x, data);
2870 if (result == -1)
2871 /* Do not traverse sub-expressions. */
2872 continue;
2873 else if (result != 0)
2874 /* Stop the traversal. */
2875 return result;
2877 if (*x == NULL_RTX)
2878 /* There are no sub-expressions. */
2879 continue;
2881 i = non_rtx_starting_operands[GET_CODE (*x)];
2882 if (i >= 0)
2884 result = for_each_rtx_1 (*x, i, f, data);
2885 if (result != 0)
2886 return result;
2889 break;
2891 default:
2892 /* Nothing to do. */
2893 break;
2897 return 0;
2900 /* Traverse X via depth-first search, calling F for each
2901 sub-expression (including X itself). F is also passed the DATA.
2902 If F returns -1, do not traverse sub-expressions, but continue
2903 traversing the rest of the tree. If F ever returns any other
2904 nonzero value, stop the traversal, and return the value returned
2905 by F. Otherwise, return 0. This function does not traverse inside
2906 tree structure that contains RTX_EXPRs, or into sub-expressions
2907 whose format code is `0' since it is not known whether or not those
2908 codes are actually RTL.
2910 This routine is very general, and could (should?) be used to
2911 implement many of the other routines in this file. */
2914 for_each_rtx (rtx *x, rtx_function f, void *data)
2916 int result;
2917 int i;
2919 /* Call F on X. */
2920 result = (*f) (x, data);
2921 if (result == -1)
2922 /* Do not traverse sub-expressions. */
2923 return 0;
2924 else if (result != 0)
2925 /* Stop the traversal. */
2926 return result;
2928 if (*x == NULL_RTX)
2929 /* There are no sub-expressions. */
2930 return 0;
2932 i = non_rtx_starting_operands[GET_CODE (*x)];
2933 if (i < 0)
2934 return 0;
2936 return for_each_rtx_1 (*x, i, f, data);
2941 /* Data structure that holds the internal state communicated between
2942 for_each_inc_dec, for_each_inc_dec_find_mem and
2943 for_each_inc_dec_find_inc_dec. */
2945 struct for_each_inc_dec_ops {
2946 /* The function to be called for each autoinc operation found. */
2947 for_each_inc_dec_fn fn;
2948 /* The opaque argument to be passed to it. */
2949 void *arg;
2950 /* The MEM we're visiting, if any. */
2951 rtx mem;
2954 static int for_each_inc_dec_find_mem (rtx *r, void *d);
2956 /* Find PRE/POST-INC/DEC/MODIFY operations within *R, extract the
2957 operands of the equivalent add insn and pass the result to the
2958 operator specified by *D. */
2960 static int
2961 for_each_inc_dec_find_inc_dec (rtx *r, void *d)
2963 rtx x = *r;
2964 struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *)d;
2966 switch (GET_CODE (x))
2968 case PRE_INC:
2969 case POST_INC:
2971 int size = GET_MODE_SIZE (GET_MODE (data->mem));
2972 rtx r1 = XEXP (x, 0);
2973 rtx c = gen_int_mode (size, GET_MODE (r1));
2974 return data->fn (data->mem, x, r1, r1, c, data->arg);
2977 case PRE_DEC:
2978 case POST_DEC:
2980 int size = GET_MODE_SIZE (GET_MODE (data->mem));
2981 rtx r1 = XEXP (x, 0);
2982 rtx c = gen_int_mode (-size, GET_MODE (r1));
2983 return data->fn (data->mem, x, r1, r1, c, data->arg);
2986 case PRE_MODIFY:
2987 case POST_MODIFY:
2989 rtx r1 = XEXP (x, 0);
2990 rtx add = XEXP (x, 1);
2991 return data->fn (data->mem, x, r1, add, NULL, data->arg);
2994 case MEM:
2996 rtx save = data->mem;
2997 int ret = for_each_inc_dec_find_mem (r, d);
2998 data->mem = save;
2999 return ret;
3002 default:
3003 return 0;
3007 /* If *R is a MEM, find PRE/POST-INC/DEC/MODIFY operations within its
3008 address, extract the operands of the equivalent add insn and pass
3009 the result to the operator specified by *D. */
3011 static int
3012 for_each_inc_dec_find_mem (rtx *r, void *d)
3014 rtx x = *r;
3015 if (x != NULL_RTX && MEM_P (x))
3017 struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *) d;
3018 int result;
3020 data->mem = x;
3022 result = for_each_rtx (&XEXP (x, 0), for_each_inc_dec_find_inc_dec,
3023 data);
3024 if (result)
3025 return result;
3027 return -1;
3029 return 0;
3032 /* Traverse *X looking for MEMs, and for autoinc operations within
3033 them. For each such autoinc operation found, call FN, passing it
3034 the innermost enclosing MEM, the operation itself, the RTX modified
3035 by the operation, two RTXs (the second may be NULL) that, once
3036 added, represent the value to be held by the modified RTX
3037 afterwards, and ARG. FN is to return -1 to skip looking for other
3038 autoinc operations within the visited operation, 0 to continue the
3039 traversal, or any other value to have it returned to the caller of
3040 for_each_inc_dec. */
3043 for_each_inc_dec (rtx *x,
3044 for_each_inc_dec_fn fn,
3045 void *arg)
3047 struct for_each_inc_dec_ops data;
3049 data.fn = fn;
3050 data.arg = arg;
3051 data.mem = NULL;
3053 return for_each_rtx (x, for_each_inc_dec_find_mem, &data);
3057 /* Searches X for any reference to REGNO, returning the rtx of the
3058 reference found if any. Otherwise, returns NULL_RTX. */
3061 regno_use_in (unsigned int regno, rtx x)
3063 const char *fmt;
3064 int i, j;
3065 rtx tem;
3067 if (REG_P (x) && REGNO (x) == regno)
3068 return x;
3070 fmt = GET_RTX_FORMAT (GET_CODE (x));
3071 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3073 if (fmt[i] == 'e')
3075 if ((tem = regno_use_in (regno, XEXP (x, i))))
3076 return tem;
3078 else if (fmt[i] == 'E')
3079 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3080 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3081 return tem;
3084 return NULL_RTX;
3087 /* Return a value indicating whether OP, an operand of a commutative
3088 operation, is preferred as the first or second operand. The higher
3089 the value, the stronger the preference for being the first operand.
3090 We use negative values to indicate a preference for the first operand
3091 and positive values for the second operand. */
3094 commutative_operand_precedence (rtx op)
3096 enum rtx_code code = GET_CODE (op);
3098 /* Constants always come the second operand. Prefer "nice" constants. */
3099 if (code == CONST_INT)
3100 return -8;
3101 if (code == CONST_DOUBLE)
3102 return -7;
3103 if (code == CONST_FIXED)
3104 return -7;
3105 op = avoid_constant_pool_reference (op);
3106 code = GET_CODE (op);
3108 switch (GET_RTX_CLASS (code))
3110 case RTX_CONST_OBJ:
3111 if (code == CONST_INT)
3112 return -6;
3113 if (code == CONST_DOUBLE)
3114 return -5;
3115 if (code == CONST_FIXED)
3116 return -5;
3117 return -4;
3119 case RTX_EXTRA:
3120 /* SUBREGs of objects should come second. */
3121 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3122 return -3;
3123 return 0;
3125 case RTX_OBJ:
3126 /* Complex expressions should be the first, so decrease priority
3127 of objects. Prefer pointer objects over non pointer objects. */
3128 if ((REG_P (op) && REG_POINTER (op))
3129 || (MEM_P (op) && MEM_POINTER (op)))
3130 return -1;
3131 return -2;
3133 case RTX_COMM_ARITH:
3134 /* Prefer operands that are themselves commutative to be first.
3135 This helps to make things linear. In particular,
3136 (and (and (reg) (reg)) (not (reg))) is canonical. */
3137 return 4;
3139 case RTX_BIN_ARITH:
3140 /* If only one operand is a binary expression, it will be the first
3141 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3142 is canonical, although it will usually be further simplified. */
3143 return 2;
3145 case RTX_UNARY:
3146 /* Then prefer NEG and NOT. */
3147 if (code == NEG || code == NOT)
3148 return 1;
3150 default:
3151 return 0;
3155 /* Return 1 iff it is necessary to swap operands of commutative operation
3156 in order to canonicalize expression. */
3158 bool
3159 swap_commutative_operands_p (rtx x, rtx y)
3161 return (commutative_operand_precedence (x)
3162 < commutative_operand_precedence (y));
3165 /* Return 1 if X is an autoincrement side effect and the register is
3166 not the stack pointer. */
3168 auto_inc_p (const_rtx x)
3170 switch (GET_CODE (x))
3172 case PRE_INC:
3173 case POST_INC:
3174 case PRE_DEC:
3175 case POST_DEC:
3176 case PRE_MODIFY:
3177 case POST_MODIFY:
3178 /* There are no REG_INC notes for SP. */
3179 if (XEXP (x, 0) != stack_pointer_rtx)
3180 return 1;
3181 default:
3182 break;
3184 return 0;
3187 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3189 loc_mentioned_in_p (rtx *loc, const_rtx in)
3191 enum rtx_code code;
3192 const char *fmt;
3193 int i, j;
3195 if (!in)
3196 return 0;
3198 code = GET_CODE (in);
3199 fmt = GET_RTX_FORMAT (code);
3200 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3202 if (fmt[i] == 'e')
3204 if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
3205 return 1;
3207 else if (fmt[i] == 'E')
3208 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3209 if (loc == &XVECEXP (in, i, j)
3210 || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3211 return 1;
3213 return 0;
3216 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3217 and SUBREG_BYTE, return the bit offset where the subreg begins
3218 (counting from the least significant bit of the operand). */
3220 unsigned int
3221 subreg_lsb_1 (enum machine_mode outer_mode,
3222 enum machine_mode inner_mode,
3223 unsigned int subreg_byte)
3225 unsigned int bitpos;
3226 unsigned int byte;
3227 unsigned int word;
3229 /* A paradoxical subreg begins at bit position 0. */
3230 if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode))
3231 return 0;
3233 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3234 /* If the subreg crosses a word boundary ensure that
3235 it also begins and ends on a word boundary. */
3236 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3237 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3238 && (subreg_byte % UNITS_PER_WORD
3239 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3241 if (WORDS_BIG_ENDIAN)
3242 word = (GET_MODE_SIZE (inner_mode)
3243 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3244 else
3245 word = subreg_byte / UNITS_PER_WORD;
3246 bitpos = word * BITS_PER_WORD;
3248 if (BYTES_BIG_ENDIAN)
3249 byte = (GET_MODE_SIZE (inner_mode)
3250 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3251 else
3252 byte = subreg_byte % UNITS_PER_WORD;
3253 bitpos += byte * BITS_PER_UNIT;
3255 return bitpos;
3258 /* Given a subreg X, return the bit offset where the subreg begins
3259 (counting from the least significant bit of the reg). */
3261 unsigned int
3262 subreg_lsb (const_rtx x)
3264 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3265 SUBREG_BYTE (x));
3268 /* Fill in information about a subreg of a hard register.
3269 xregno - A regno of an inner hard subreg_reg (or what will become one).
3270 xmode - The mode of xregno.
3271 offset - The byte offset.
3272 ymode - The mode of a top level SUBREG (or what may become one).
3273 info - Pointer to structure to fill in. */
3274 void
3275 subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3276 unsigned int offset, enum machine_mode ymode,
3277 struct subreg_info *info)
3279 int nregs_xmode, nregs_ymode;
3280 int mode_multiple, nregs_multiple;
3281 int offset_adj, y_offset, y_offset_adj;
3282 int regsize_xmode, regsize_ymode;
3283 bool rknown;
3285 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3287 rknown = false;
3289 /* If there are holes in a non-scalar mode in registers, we expect
3290 that it is made up of its units concatenated together. */
3291 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3293 enum machine_mode xmode_unit;
3295 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3296 if (GET_MODE_INNER (xmode) == VOIDmode)
3297 xmode_unit = xmode;
3298 else
3299 xmode_unit = GET_MODE_INNER (xmode);
3300 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3301 gcc_assert (nregs_xmode
3302 == (GET_MODE_NUNITS (xmode)
3303 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3304 gcc_assert (hard_regno_nregs[xregno][xmode]
3305 == (hard_regno_nregs[xregno][xmode_unit]
3306 * GET_MODE_NUNITS (xmode)));
3308 /* You can only ask for a SUBREG of a value with holes in the middle
3309 if you don't cross the holes. (Such a SUBREG should be done by
3310 picking a different register class, or doing it in memory if
3311 necessary.) An example of a value with holes is XCmode on 32-bit
3312 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3313 3 for each part, but in memory it's two 128-bit parts.
3314 Padding is assumed to be at the end (not necessarily the 'high part')
3315 of each unit. */
3316 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3317 < GET_MODE_NUNITS (xmode))
3318 && (offset / GET_MODE_SIZE (xmode_unit)
3319 != ((offset + GET_MODE_SIZE (ymode) - 1)
3320 / GET_MODE_SIZE (xmode_unit))))
3322 info->representable_p = false;
3323 rknown = true;
3326 else
3327 nregs_xmode = hard_regno_nregs[xregno][xmode];
3329 nregs_ymode = hard_regno_nregs[xregno][ymode];
3331 /* Paradoxical subregs are otherwise valid. */
3332 if (!rknown
3333 && offset == 0
3334 && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode))
3336 info->representable_p = true;
3337 /* If this is a big endian paradoxical subreg, which uses more
3338 actual hard registers than the original register, we must
3339 return a negative offset so that we find the proper highpart
3340 of the register. */
3341 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3342 ? REG_WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3343 info->offset = nregs_xmode - nregs_ymode;
3344 else
3345 info->offset = 0;
3346 info->nregs = nregs_ymode;
3347 return;
3350 /* If registers store different numbers of bits in the different
3351 modes, we cannot generally form this subreg. */
3352 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3353 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3354 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3355 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3357 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3358 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3359 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3361 info->representable_p = false;
3362 info->nregs
3363 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3364 info->offset = offset / regsize_xmode;
3365 return;
3367 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3369 info->representable_p = false;
3370 info->nregs
3371 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3372 info->offset = offset / regsize_xmode;
3373 return;
3377 /* Lowpart subregs are otherwise valid. */
3378 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3380 info->representable_p = true;
3381 rknown = true;
3383 if (offset == 0 || nregs_xmode == nregs_ymode)
3385 info->offset = 0;
3386 info->nregs = nregs_ymode;
3387 return;
3391 /* This should always pass, otherwise we don't know how to verify
3392 the constraint. These conditions may be relaxed but
3393 subreg_regno_offset would need to be redesigned. */
3394 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3395 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3397 if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN
3398 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD)
3400 HOST_WIDE_INT xsize = GET_MODE_SIZE (xmode);
3401 HOST_WIDE_INT ysize = GET_MODE_SIZE (ymode);
3402 HOST_WIDE_INT off_low = offset & (ysize - 1);
3403 HOST_WIDE_INT off_high = offset & ~(ysize - 1);
3404 offset = (xsize - ysize - off_high) | off_low;
3406 /* The XMODE value can be seen as a vector of NREGS_XMODE
3407 values. The subreg must represent a lowpart of given field.
3408 Compute what field it is. */
3409 offset_adj = offset;
3410 offset_adj -= subreg_lowpart_offset (ymode,
3411 mode_for_size (GET_MODE_BITSIZE (xmode)
3412 / nregs_xmode,
3413 MODE_INT, 0));
3415 /* Size of ymode must not be greater than the size of xmode. */
3416 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3417 gcc_assert (mode_multiple != 0);
3419 y_offset = offset / GET_MODE_SIZE (ymode);
3420 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3421 nregs_multiple = nregs_xmode / nregs_ymode;
3423 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3424 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3426 if (!rknown)
3428 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3429 rknown = true;
3431 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3432 info->nregs = nregs_ymode;
3435 /* This function returns the regno offset of a subreg expression.
3436 xregno - A regno of an inner hard subreg_reg (or what will become one).
3437 xmode - The mode of xregno.
3438 offset - The byte offset.
3439 ymode - The mode of a top level SUBREG (or what may become one).
3440 RETURN - The regno offset which would be used. */
3441 unsigned int
3442 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3443 unsigned int offset, enum machine_mode ymode)
3445 struct subreg_info info;
3446 subreg_get_info (xregno, xmode, offset, ymode, &info);
3447 return info.offset;
3450 /* This function returns true when the offset is representable via
3451 subreg_offset in the given regno.
3452 xregno - A regno of an inner hard subreg_reg (or what will become one).
3453 xmode - The mode of xregno.
3454 offset - The byte offset.
3455 ymode - The mode of a top level SUBREG (or what may become one).
3456 RETURN - Whether the offset is representable. */
3457 bool
3458 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3459 unsigned int offset, enum machine_mode ymode)
3461 struct subreg_info info;
3462 subreg_get_info (xregno, xmode, offset, ymode, &info);
3463 return info.representable_p;
3466 /* Return the number of a YMODE register to which
3468 (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3470 can be simplified. Return -1 if the subreg can't be simplified.
3472 XREGNO is a hard register number. */
3475 simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode,
3476 unsigned int offset, enum machine_mode ymode)
3478 struct subreg_info info;
3479 unsigned int yregno;
3481 #ifdef CANNOT_CHANGE_MODE_CLASS
3482 /* Give the backend a chance to disallow the mode change. */
3483 if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3484 && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
3485 && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode)
3486 /* We can use mode change in LRA for some transformations. */
3487 && ! lra_in_progress)
3488 return -1;
3489 #endif
3491 /* We shouldn't simplify stack-related registers. */
3492 if ((!reload_completed || frame_pointer_needed)
3493 && xregno == FRAME_POINTER_REGNUM)
3494 return -1;
3496 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3497 /* We should convert arg register in LRA after the elimination
3498 if it is possible. */
3499 && xregno == ARG_POINTER_REGNUM
3500 && ! lra_in_progress)
3501 return -1;
3503 if (xregno == STACK_POINTER_REGNUM
3504 /* We should convert hard stack register in LRA if it is
3505 possible. */
3506 && ! lra_in_progress)
3507 return -1;
3509 /* Try to get the register offset. */
3510 subreg_get_info (xregno, xmode, offset, ymode, &info);
3511 if (!info.representable_p)
3512 return -1;
3514 /* Make sure that the offsetted register value is in range. */
3515 yregno = xregno + info.offset;
3516 if (!HARD_REGISTER_NUM_P (yregno))
3517 return -1;
3519 /* See whether (reg:YMODE YREGNO) is valid.
3521 ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
3522 This is a kludge to work around how complex FP arguments are passed
3523 on IA-64 and should be fixed. See PR target/49226. */
3524 if (!HARD_REGNO_MODE_OK (yregno, ymode)
3525 && HARD_REGNO_MODE_OK (xregno, xmode))
3526 return -1;
3528 return (int) yregno;
3531 /* Return the final regno that a subreg expression refers to. */
3532 unsigned int
3533 subreg_regno (const_rtx x)
3535 unsigned int ret;
3536 rtx subreg = SUBREG_REG (x);
3537 int regno = REGNO (subreg);
3539 ret = regno + subreg_regno_offset (regno,
3540 GET_MODE (subreg),
3541 SUBREG_BYTE (x),
3542 GET_MODE (x));
3543 return ret;
3547 /* Return the number of registers that a subreg expression refers
3548 to. */
3549 unsigned int
3550 subreg_nregs (const_rtx x)
3552 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3555 /* Return the number of registers that a subreg REG with REGNO
3556 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3557 changed so that the regno can be passed in. */
3559 unsigned int
3560 subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3562 struct subreg_info info;
3563 rtx subreg = SUBREG_REG (x);
3565 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3566 &info);
3567 return info.nregs;
3571 struct parms_set_data
3573 int nregs;
3574 HARD_REG_SET regs;
3577 /* Helper function for noticing stores to parameter registers. */
3578 static void
3579 parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3581 struct parms_set_data *const d = (struct parms_set_data *) data;
3582 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3583 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3585 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3586 d->nregs--;
3590 /* Look backward for first parameter to be loaded.
3591 Note that loads of all parameters will not necessarily be
3592 found if CSE has eliminated some of them (e.g., an argument
3593 to the outer function is passed down as a parameter).
3594 Do not skip BOUNDARY. */
3596 find_first_parameter_load (rtx call_insn, rtx boundary)
3598 struct parms_set_data parm;
3599 rtx p, before, first_set;
3601 /* Since different machines initialize their parameter registers
3602 in different orders, assume nothing. Collect the set of all
3603 parameter registers. */
3604 CLEAR_HARD_REG_SET (parm.regs);
3605 parm.nregs = 0;
3606 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3607 if (GET_CODE (XEXP (p, 0)) == USE
3608 && REG_P (XEXP (XEXP (p, 0), 0)))
3610 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3612 /* We only care about registers which can hold function
3613 arguments. */
3614 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3615 continue;
3617 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3618 parm.nregs++;
3620 before = call_insn;
3621 first_set = call_insn;
3623 /* Search backward for the first set of a register in this set. */
3624 while (parm.nregs && before != boundary)
3626 before = PREV_INSN (before);
3628 /* It is possible that some loads got CSEed from one call to
3629 another. Stop in that case. */
3630 if (CALL_P (before))
3631 break;
3633 /* Our caller needs either ensure that we will find all sets
3634 (in case code has not been optimized yet), or take care
3635 for possible labels in a way by setting boundary to preceding
3636 CODE_LABEL. */
3637 if (LABEL_P (before))
3639 gcc_assert (before == boundary);
3640 break;
3643 if (INSN_P (before))
3645 int nregs_old = parm.nregs;
3646 note_stores (PATTERN (before), parms_set, &parm);
3647 /* If we found something that did not set a parameter reg,
3648 we're done. Do not keep going, as that might result
3649 in hoisting an insn before the setting of a pseudo
3650 that is used by the hoisted insn. */
3651 if (nregs_old != parm.nregs)
3652 first_set = before;
3653 else
3654 break;
3657 return first_set;
3660 /* Return true if we should avoid inserting code between INSN and preceding
3661 call instruction. */
3663 bool
3664 keep_with_call_p (const_rtx insn)
3666 rtx set;
3668 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3670 if (REG_P (SET_DEST (set))
3671 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3672 && fixed_regs[REGNO (SET_DEST (set))]
3673 && general_operand (SET_SRC (set), VOIDmode))
3674 return true;
3675 if (REG_P (SET_SRC (set))
3676 && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
3677 && REG_P (SET_DEST (set))
3678 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3679 return true;
3680 /* There may be a stack pop just after the call and before the store
3681 of the return register. Search for the actual store when deciding
3682 if we can break or not. */
3683 if (SET_DEST (set) == stack_pointer_rtx)
3685 /* This CONST_CAST is okay because next_nonnote_insn just
3686 returns its argument and we assign it to a const_rtx
3687 variable. */
3688 const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX(insn));
3689 if (i2 && keep_with_call_p (i2))
3690 return true;
3693 return false;
3696 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3697 to non-complex jumps. That is, direct unconditional, conditional,
3698 and tablejumps, but not computed jumps or returns. It also does
3699 not apply to the fallthru case of a conditional jump. */
3701 bool
3702 label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3704 rtx tmp = JUMP_LABEL (jump_insn);
3706 if (label == tmp)
3707 return true;
3709 if (tablejump_p (jump_insn, NULL, &tmp))
3711 rtvec vec = XVEC (PATTERN (tmp),
3712 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3713 int i, veclen = GET_NUM_ELEM (vec);
3715 for (i = 0; i < veclen; ++i)
3716 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3717 return true;
3720 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3721 return true;
3723 return false;
3727 /* Return an estimate of the cost of computing rtx X.
3728 One use is in cse, to decide which expression to keep in the hash table.
3729 Another is in rtl generation, to pick the cheapest way to multiply.
3730 Other uses like the latter are expected in the future.
3732 X appears as operand OPNO in an expression with code OUTER_CODE.
3733 SPEED specifies whether costs optimized for speed or size should
3734 be returned. */
3737 rtx_cost (rtx x, enum rtx_code outer_code, int opno, bool speed)
3739 int i, j;
3740 enum rtx_code code;
3741 const char *fmt;
3742 int total;
3743 int factor;
3745 if (x == 0)
3746 return 0;
3748 /* A size N times larger than UNITS_PER_WORD likely needs N times as
3749 many insns, taking N times as long. */
3750 factor = GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD;
3751 if (factor == 0)
3752 factor = 1;
3754 /* Compute the default costs of certain things.
3755 Note that targetm.rtx_costs can override the defaults. */
3757 code = GET_CODE (x);
3758 switch (code)
3760 case MULT:
3761 /* Multiplication has time-complexity O(N*N), where N is the
3762 number of units (translated from digits) when using
3763 schoolbook long multiplication. */
3764 total = factor * factor * COSTS_N_INSNS (5);
3765 break;
3766 case DIV:
3767 case UDIV:
3768 case MOD:
3769 case UMOD:
3770 /* Similarly, complexity for schoolbook long division. */
3771 total = factor * factor * COSTS_N_INSNS (7);
3772 break;
3773 case USE:
3774 /* Used in combine.c as a marker. */
3775 total = 0;
3776 break;
3777 case SET:
3778 /* A SET doesn't have a mode, so let's look at the SET_DEST to get
3779 the mode for the factor. */
3780 factor = GET_MODE_SIZE (GET_MODE (SET_DEST (x))) / UNITS_PER_WORD;
3781 if (factor == 0)
3782 factor = 1;
3783 /* Pass through. */
3784 default:
3785 total = factor * COSTS_N_INSNS (1);
3788 switch (code)
3790 case REG:
3791 return 0;
3793 case SUBREG:
3794 total = 0;
3795 /* If we can't tie these modes, make this expensive. The larger
3796 the mode, the more expensive it is. */
3797 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3798 return COSTS_N_INSNS (2 + factor);
3799 break;
3801 default:
3802 if (targetm.rtx_costs (x, code, outer_code, opno, &total, speed))
3803 return total;
3804 break;
3807 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3808 which is already in total. */
3810 fmt = GET_RTX_FORMAT (code);
3811 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3812 if (fmt[i] == 'e')
3813 total += rtx_cost (XEXP (x, i), code, i, speed);
3814 else if (fmt[i] == 'E')
3815 for (j = 0; j < XVECLEN (x, i); j++)
3816 total += rtx_cost (XVECEXP (x, i, j), code, i, speed);
3818 return total;
3821 /* Fill in the structure C with information about both speed and size rtx
3822 costs for X, which is operand OPNO in an expression with code OUTER. */
3824 void
3825 get_full_rtx_cost (rtx x, enum rtx_code outer, int opno,
3826 struct full_rtx_costs *c)
3828 c->speed = rtx_cost (x, outer, opno, true);
3829 c->size = rtx_cost (x, outer, opno, false);
3833 /* Return cost of address expression X.
3834 Expect that X is properly formed address reference.
3836 SPEED parameter specify whether costs optimized for speed or size should
3837 be returned. */
3840 address_cost (rtx x, enum machine_mode mode, addr_space_t as, bool speed)
3842 /* We may be asked for cost of various unusual addresses, such as operands
3843 of push instruction. It is not worthwhile to complicate writing
3844 of the target hook by such cases. */
3846 if (!memory_address_addr_space_p (mode, x, as))
3847 return 1000;
3849 return targetm.address_cost (x, mode, as, speed);
3852 /* If the target doesn't override, compute the cost as with arithmetic. */
3855 default_address_cost (rtx x, enum machine_mode, addr_space_t, bool speed)
3857 return rtx_cost (x, MEM, 0, speed);
3861 unsigned HOST_WIDE_INT
3862 nonzero_bits (const_rtx x, enum machine_mode mode)
3864 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3867 unsigned int
3868 num_sign_bit_copies (const_rtx x, enum machine_mode mode)
3870 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3873 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3874 It avoids exponential behavior in nonzero_bits1 when X has
3875 identical subexpressions on the first or the second level. */
3877 static unsigned HOST_WIDE_INT
3878 cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
3879 enum machine_mode known_mode,
3880 unsigned HOST_WIDE_INT known_ret)
3882 if (x == known_x && mode == known_mode)
3883 return known_ret;
3885 /* Try to find identical subexpressions. If found call
3886 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3887 precomputed value for the subexpression as KNOWN_RET. */
3889 if (ARITHMETIC_P (x))
3891 rtx x0 = XEXP (x, 0);
3892 rtx x1 = XEXP (x, 1);
3894 /* Check the first level. */
3895 if (x0 == x1)
3896 return nonzero_bits1 (x, mode, x0, mode,
3897 cached_nonzero_bits (x0, mode, known_x,
3898 known_mode, known_ret));
3900 /* Check the second level. */
3901 if (ARITHMETIC_P (x0)
3902 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3903 return nonzero_bits1 (x, mode, x1, mode,
3904 cached_nonzero_bits (x1, mode, known_x,
3905 known_mode, known_ret));
3907 if (ARITHMETIC_P (x1)
3908 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3909 return nonzero_bits1 (x, mode, x0, mode,
3910 cached_nonzero_bits (x0, mode, known_x,
3911 known_mode, known_ret));
3914 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3917 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3918 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3919 is less useful. We can't allow both, because that results in exponential
3920 run time recursion. There is a nullstone testcase that triggered
3921 this. This macro avoids accidental uses of num_sign_bit_copies. */
3922 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
3924 /* Given an expression, X, compute which bits in X can be nonzero.
3925 We don't care about bits outside of those defined in MODE.
3927 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
3928 an arithmetic operation, we can do better. */
3930 static unsigned HOST_WIDE_INT
3931 nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
3932 enum machine_mode known_mode,
3933 unsigned HOST_WIDE_INT known_ret)
3935 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
3936 unsigned HOST_WIDE_INT inner_nz;
3937 enum rtx_code code;
3938 enum machine_mode inner_mode;
3939 unsigned int mode_width = GET_MODE_PRECISION (mode);
3941 /* For floating-point and vector values, assume all bits are needed. */
3942 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
3943 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
3944 return nonzero;
3946 /* If X is wider than MODE, use its mode instead. */
3947 if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width)
3949 mode = GET_MODE (x);
3950 nonzero = GET_MODE_MASK (mode);
3951 mode_width = GET_MODE_PRECISION (mode);
3954 if (mode_width > HOST_BITS_PER_WIDE_INT)
3955 /* Our only callers in this case look for single bit values. So
3956 just return the mode mask. Those tests will then be false. */
3957 return nonzero;
3959 #ifndef WORD_REGISTER_OPERATIONS
3960 /* If MODE is wider than X, but both are a single word for both the host
3961 and target machines, we can compute this from which bits of the
3962 object might be nonzero in its own mode, taking into account the fact
3963 that on many CISC machines, accessing an object in a wider mode
3964 causes the high-order bits to become undefined. So they are
3965 not known to be zero. */
3967 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
3968 && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD
3969 && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
3970 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x)))
3972 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
3973 known_x, known_mode, known_ret);
3974 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
3975 return nonzero;
3977 #endif
3979 code = GET_CODE (x);
3980 switch (code)
3982 case REG:
3983 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
3984 /* If pointers extend unsigned and this is a pointer in Pmode, say that
3985 all the bits above ptr_mode are known to be zero. */
3986 /* As we do not know which address space the pointer is referring to,
3987 we can do this only if the target does not support different pointer
3988 or address modes depending on the address space. */
3989 if (target_default_pointer_address_modes_p ()
3990 && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
3991 && REG_POINTER (x))
3992 nonzero &= GET_MODE_MASK (ptr_mode);
3993 #endif
3995 /* Include declared information about alignment of pointers. */
3996 /* ??? We don't properly preserve REG_POINTER changes across
3997 pointer-to-integer casts, so we can't trust it except for
3998 things that we know must be pointers. See execute/960116-1.c. */
3999 if ((x == stack_pointer_rtx
4000 || x == frame_pointer_rtx
4001 || x == arg_pointer_rtx)
4002 && REGNO_POINTER_ALIGN (REGNO (x)))
4004 unsigned HOST_WIDE_INT alignment
4005 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
4007 #ifdef PUSH_ROUNDING
4008 /* If PUSH_ROUNDING is defined, it is possible for the
4009 stack to be momentarily aligned only to that amount,
4010 so we pick the least alignment. */
4011 if (x == stack_pointer_rtx && PUSH_ARGS)
4012 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
4013 alignment);
4014 #endif
4016 nonzero &= ~(alignment - 1);
4020 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
4021 rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
4022 known_mode, known_ret,
4023 &nonzero_for_hook);
4025 if (new_rtx)
4026 nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
4027 known_mode, known_ret);
4029 return nonzero_for_hook;
4032 case CONST_INT:
4033 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4034 /* If X is negative in MODE, sign-extend the value. */
4035 if (INTVAL (x) > 0
4036 && mode_width < BITS_PER_WORD
4037 && (UINTVAL (x) & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
4038 != 0)
4039 return UINTVAL (x) | ((unsigned HOST_WIDE_INT) (-1) << mode_width);
4040 #endif
4042 return UINTVAL (x);
4044 case MEM:
4045 #ifdef LOAD_EXTEND_OP
4046 /* In many, if not most, RISC machines, reading a byte from memory
4047 zeros the rest of the register. Noticing that fact saves a lot
4048 of extra zero-extends. */
4049 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4050 nonzero &= GET_MODE_MASK (GET_MODE (x));
4051 #endif
4052 break;
4054 case EQ: case NE:
4055 case UNEQ: case LTGT:
4056 case GT: case GTU: case UNGT:
4057 case LT: case LTU: case UNLT:
4058 case GE: case GEU: case UNGE:
4059 case LE: case LEU: case UNLE:
4060 case UNORDERED: case ORDERED:
4061 /* If this produces an integer result, we know which bits are set.
4062 Code here used to clear bits outside the mode of X, but that is
4063 now done above. */
4064 /* Mind that MODE is the mode the caller wants to look at this
4065 operation in, and not the actual operation mode. We can wind
4066 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
4067 that describes the results of a vector compare. */
4068 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
4069 && mode_width <= HOST_BITS_PER_WIDE_INT)
4070 nonzero = STORE_FLAG_VALUE;
4071 break;
4073 case NEG:
4074 #if 0
4075 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4076 and num_sign_bit_copies. */
4077 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4078 == GET_MODE_PRECISION (GET_MODE (x)))
4079 nonzero = 1;
4080 #endif
4082 if (GET_MODE_PRECISION (GET_MODE (x)) < mode_width)
4083 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4084 break;
4086 case ABS:
4087 #if 0
4088 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4089 and num_sign_bit_copies. */
4090 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4091 == GET_MODE_PRECISION (GET_MODE (x)))
4092 nonzero = 1;
4093 #endif
4094 break;
4096 case TRUNCATE:
4097 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4098 known_x, known_mode, known_ret)
4099 & GET_MODE_MASK (mode));
4100 break;
4102 case ZERO_EXTEND:
4103 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4104 known_x, known_mode, known_ret);
4105 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4106 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4107 break;
4109 case SIGN_EXTEND:
4110 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4111 Otherwise, show all the bits in the outer mode but not the inner
4112 may be nonzero. */
4113 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4114 known_x, known_mode, known_ret);
4115 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4117 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4118 if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
4119 inner_nz |= (GET_MODE_MASK (mode)
4120 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4123 nonzero &= inner_nz;
4124 break;
4126 case AND:
4127 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4128 known_x, known_mode, known_ret)
4129 & cached_nonzero_bits (XEXP (x, 1), mode,
4130 known_x, known_mode, known_ret);
4131 break;
4133 case XOR: case IOR:
4134 case UMIN: case UMAX: case SMIN: case SMAX:
4136 unsigned HOST_WIDE_INT nonzero0
4137 = cached_nonzero_bits (XEXP (x, 0), mode,
4138 known_x, known_mode, known_ret);
4140 /* Don't call nonzero_bits for the second time if it cannot change
4141 anything. */
4142 if ((nonzero & nonzero0) != nonzero)
4143 nonzero &= nonzero0
4144 | cached_nonzero_bits (XEXP (x, 1), mode,
4145 known_x, known_mode, known_ret);
4147 break;
4149 case PLUS: case MINUS:
4150 case MULT:
4151 case DIV: case UDIV:
4152 case MOD: case UMOD:
4153 /* We can apply the rules of arithmetic to compute the number of
4154 high- and low-order zero bits of these operations. We start by
4155 computing the width (position of the highest-order nonzero bit)
4156 and the number of low-order zero bits for each value. */
4158 unsigned HOST_WIDE_INT nz0
4159 = cached_nonzero_bits (XEXP (x, 0), mode,
4160 known_x, known_mode, known_ret);
4161 unsigned HOST_WIDE_INT nz1
4162 = cached_nonzero_bits (XEXP (x, 1), mode,
4163 known_x, known_mode, known_ret);
4164 int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1;
4165 int width0 = floor_log2 (nz0) + 1;
4166 int width1 = floor_log2 (nz1) + 1;
4167 int low0 = floor_log2 (nz0 & -nz0);
4168 int low1 = floor_log2 (nz1 & -nz1);
4169 unsigned HOST_WIDE_INT op0_maybe_minusp
4170 = nz0 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4171 unsigned HOST_WIDE_INT op1_maybe_minusp
4172 = nz1 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4173 unsigned int result_width = mode_width;
4174 int result_low = 0;
4176 switch (code)
4178 case PLUS:
4179 result_width = MAX (width0, width1) + 1;
4180 result_low = MIN (low0, low1);
4181 break;
4182 case MINUS:
4183 result_low = MIN (low0, low1);
4184 break;
4185 case MULT:
4186 result_width = width0 + width1;
4187 result_low = low0 + low1;
4188 break;
4189 case DIV:
4190 if (width1 == 0)
4191 break;
4192 if (!op0_maybe_minusp && !op1_maybe_minusp)
4193 result_width = width0;
4194 break;
4195 case UDIV:
4196 if (width1 == 0)
4197 break;
4198 result_width = width0;
4199 break;
4200 case MOD:
4201 if (width1 == 0)
4202 break;
4203 if (!op0_maybe_minusp && !op1_maybe_minusp)
4204 result_width = MIN (width0, width1);
4205 result_low = MIN (low0, low1);
4206 break;
4207 case UMOD:
4208 if (width1 == 0)
4209 break;
4210 result_width = MIN (width0, width1);
4211 result_low = MIN (low0, low1);
4212 break;
4213 default:
4214 gcc_unreachable ();
4217 if (result_width < mode_width)
4218 nonzero &= ((unsigned HOST_WIDE_INT) 1 << result_width) - 1;
4220 if (result_low > 0)
4221 nonzero &= ~(((unsigned HOST_WIDE_INT) 1 << result_low) - 1);
4223 break;
4225 case ZERO_EXTRACT:
4226 if (CONST_INT_P (XEXP (x, 1))
4227 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4228 nonzero &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4229 break;
4231 case SUBREG:
4232 /* If this is a SUBREG formed for a promoted variable that has
4233 been zero-extended, we know that at least the high-order bits
4234 are zero, though others might be too. */
4236 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4237 nonzero = GET_MODE_MASK (GET_MODE (x))
4238 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4239 known_x, known_mode, known_ret);
4241 inner_mode = GET_MODE (SUBREG_REG (x));
4242 /* If the inner mode is a single word for both the host and target
4243 machines, we can compute this from which bits of the inner
4244 object might be nonzero. */
4245 if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD
4246 && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT))
4248 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4249 known_x, known_mode, known_ret);
4251 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4252 /* If this is a typical RISC machine, we only have to worry
4253 about the way loads are extended. */
4254 if ((LOAD_EXTEND_OP (inner_mode) == SIGN_EXTEND
4255 ? val_signbit_known_set_p (inner_mode, nonzero)
4256 : LOAD_EXTEND_OP (inner_mode) != ZERO_EXTEND)
4257 || !MEM_P (SUBREG_REG (x)))
4258 #endif
4260 /* On many CISC machines, accessing an object in a wider mode
4261 causes the high-order bits to become undefined. So they are
4262 not known to be zero. */
4263 if (GET_MODE_PRECISION (GET_MODE (x))
4264 > GET_MODE_PRECISION (inner_mode))
4265 nonzero |= (GET_MODE_MASK (GET_MODE (x))
4266 & ~GET_MODE_MASK (inner_mode));
4269 break;
4271 case ASHIFTRT:
4272 case LSHIFTRT:
4273 case ASHIFT:
4274 case ROTATE:
4275 /* The nonzero bits are in two classes: any bits within MODE
4276 that aren't in GET_MODE (x) are always significant. The rest of the
4277 nonzero bits are those that are significant in the operand of
4278 the shift when shifted the appropriate number of bits. This
4279 shows that high-order bits are cleared by the right shift and
4280 low-order bits by left shifts. */
4281 if (CONST_INT_P (XEXP (x, 1))
4282 && INTVAL (XEXP (x, 1)) >= 0
4283 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4284 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
4286 enum machine_mode inner_mode = GET_MODE (x);
4287 unsigned int width = GET_MODE_PRECISION (inner_mode);
4288 int count = INTVAL (XEXP (x, 1));
4289 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4290 unsigned HOST_WIDE_INT op_nonzero
4291 = cached_nonzero_bits (XEXP (x, 0), mode,
4292 known_x, known_mode, known_ret);
4293 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4294 unsigned HOST_WIDE_INT outer = 0;
4296 if (mode_width > width)
4297 outer = (op_nonzero & nonzero & ~mode_mask);
4299 if (code == LSHIFTRT)
4300 inner >>= count;
4301 else if (code == ASHIFTRT)
4303 inner >>= count;
4305 /* If the sign bit may have been nonzero before the shift, we
4306 need to mark all the places it could have been copied to
4307 by the shift as possibly nonzero. */
4308 if (inner & ((unsigned HOST_WIDE_INT) 1 << (width - 1 - count)))
4309 inner |= (((unsigned HOST_WIDE_INT) 1 << count) - 1)
4310 << (width - count);
4312 else if (code == ASHIFT)
4313 inner <<= count;
4314 else
4315 inner = ((inner << (count % width)
4316 | (inner >> (width - (count % width)))) & mode_mask);
4318 nonzero &= (outer | inner);
4320 break;
4322 case FFS:
4323 case POPCOUNT:
4324 /* This is at most the number of bits in the mode. */
4325 nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4326 break;
4328 case CLZ:
4329 /* If CLZ has a known value at zero, then the nonzero bits are
4330 that value, plus the number of bits in the mode minus one. */
4331 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4332 nonzero
4333 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4334 else
4335 nonzero = -1;
4336 break;
4338 case CTZ:
4339 /* If CTZ has a known value at zero, then the nonzero bits are
4340 that value, plus the number of bits in the mode minus one. */
4341 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4342 nonzero
4343 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4344 else
4345 nonzero = -1;
4346 break;
4348 case CLRSB:
4349 /* This is at most the number of bits in the mode minus 1. */
4350 nonzero = ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4351 break;
4353 case PARITY:
4354 nonzero = 1;
4355 break;
4357 case IF_THEN_ELSE:
4359 unsigned HOST_WIDE_INT nonzero_true
4360 = cached_nonzero_bits (XEXP (x, 1), mode,
4361 known_x, known_mode, known_ret);
4363 /* Don't call nonzero_bits for the second time if it cannot change
4364 anything. */
4365 if ((nonzero & nonzero_true) != nonzero)
4366 nonzero &= nonzero_true
4367 | cached_nonzero_bits (XEXP (x, 2), mode,
4368 known_x, known_mode, known_ret);
4370 break;
4372 default:
4373 break;
4376 return nonzero;
4379 /* See the macro definition above. */
4380 #undef cached_num_sign_bit_copies
4383 /* The function cached_num_sign_bit_copies is a wrapper around
4384 num_sign_bit_copies1. It avoids exponential behavior in
4385 num_sign_bit_copies1 when X has identical subexpressions on the
4386 first or the second level. */
4388 static unsigned int
4389 cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
4390 enum machine_mode known_mode,
4391 unsigned int known_ret)
4393 if (x == known_x && mode == known_mode)
4394 return known_ret;
4396 /* Try to find identical subexpressions. If found call
4397 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4398 the precomputed value for the subexpression as KNOWN_RET. */
4400 if (ARITHMETIC_P (x))
4402 rtx x0 = XEXP (x, 0);
4403 rtx x1 = XEXP (x, 1);
4405 /* Check the first level. */
4406 if (x0 == x1)
4407 return
4408 num_sign_bit_copies1 (x, mode, x0, mode,
4409 cached_num_sign_bit_copies (x0, mode, known_x,
4410 known_mode,
4411 known_ret));
4413 /* Check the second level. */
4414 if (ARITHMETIC_P (x0)
4415 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4416 return
4417 num_sign_bit_copies1 (x, mode, x1, mode,
4418 cached_num_sign_bit_copies (x1, mode, known_x,
4419 known_mode,
4420 known_ret));
4422 if (ARITHMETIC_P (x1)
4423 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4424 return
4425 num_sign_bit_copies1 (x, mode, x0, mode,
4426 cached_num_sign_bit_copies (x0, mode, known_x,
4427 known_mode,
4428 known_ret));
4431 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4434 /* Return the number of bits at the high-order end of X that are known to
4435 be equal to the sign bit. X will be used in mode MODE; if MODE is
4436 VOIDmode, X will be used in its own mode. The returned value will always
4437 be between 1 and the number of bits in MODE. */
4439 static unsigned int
4440 num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4441 enum machine_mode known_mode,
4442 unsigned int known_ret)
4444 enum rtx_code code = GET_CODE (x);
4445 unsigned int bitwidth = GET_MODE_PRECISION (mode);
4446 int num0, num1, result;
4447 unsigned HOST_WIDE_INT nonzero;
4449 /* If we weren't given a mode, use the mode of X. If the mode is still
4450 VOIDmode, we don't know anything. Likewise if one of the modes is
4451 floating-point. */
4453 if (mode == VOIDmode)
4454 mode = GET_MODE (x);
4456 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4457 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
4458 return 1;
4460 /* For a smaller object, just ignore the high bits. */
4461 if (bitwidth < GET_MODE_PRECISION (GET_MODE (x)))
4463 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4464 known_x, known_mode, known_ret);
4465 return MAX (1,
4466 num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth));
4469 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x)))
4471 #ifndef WORD_REGISTER_OPERATIONS
4472 /* If this machine does not do all register operations on the entire
4473 register and MODE is wider than the mode of X, we can say nothing
4474 at all about the high-order bits. */
4475 return 1;
4476 #else
4477 /* Likewise on machines that do, if the mode of the object is smaller
4478 than a word and loads of that size don't sign extend, we can say
4479 nothing about the high order bits. */
4480 if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD
4481 #ifdef LOAD_EXTEND_OP
4482 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4483 #endif
4485 return 1;
4486 #endif
4489 switch (code)
4491 case REG:
4493 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4494 /* If pointers extend signed and this is a pointer in Pmode, say that
4495 all the bits above ptr_mode are known to be sign bit copies. */
4496 /* As we do not know which address space the pointer is referring to,
4497 we can do this only if the target does not support different pointer
4498 or address modes depending on the address space. */
4499 if (target_default_pointer_address_modes_p ()
4500 && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4501 && mode == Pmode && REG_POINTER (x))
4502 return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
4503 #endif
4506 unsigned int copies_for_hook = 1, copies = 1;
4507 rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4508 known_mode, known_ret,
4509 &copies_for_hook);
4511 if (new_rtx)
4512 copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
4513 known_mode, known_ret);
4515 if (copies > 1 || copies_for_hook > 1)
4516 return MAX (copies, copies_for_hook);
4518 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4520 break;
4522 case MEM:
4523 #ifdef LOAD_EXTEND_OP
4524 /* Some RISC machines sign-extend all loads of smaller than a word. */
4525 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4526 return MAX (1, ((int) bitwidth
4527 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1));
4528 #endif
4529 break;
4531 case CONST_INT:
4532 /* If the constant is negative, take its 1's complement and remask.
4533 Then see how many zero bits we have. */
4534 nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
4535 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4536 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4537 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4539 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4541 case SUBREG:
4542 /* If this is a SUBREG for a promoted object that is sign-extended
4543 and we are looking at it in a wider mode, we know that at least the
4544 high-order bits are known to be sign bit copies. */
4546 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4548 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4549 known_x, known_mode, known_ret);
4550 return MAX ((int) bitwidth
4551 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1,
4552 num0);
4555 /* For a smaller object, just ignore the high bits. */
4556 if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))))
4558 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4559 known_x, known_mode, known_ret);
4560 return MAX (1, (num0
4561 - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))
4562 - bitwidth)));
4565 #ifdef WORD_REGISTER_OPERATIONS
4566 #ifdef LOAD_EXTEND_OP
4567 /* For paradoxical SUBREGs on machines where all register operations
4568 affect the entire register, just look inside. Note that we are
4569 passing MODE to the recursive call, so the number of sign bit copies
4570 will remain relative to that mode, not the inner mode. */
4572 /* This works only if loads sign extend. Otherwise, if we get a
4573 reload for the inner part, it may be loaded from the stack, and
4574 then we lose all sign bit copies that existed before the store
4575 to the stack. */
4577 if (paradoxical_subreg_p (x)
4578 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4579 && MEM_P (SUBREG_REG (x)))
4580 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4581 known_x, known_mode, known_ret);
4582 #endif
4583 #endif
4584 break;
4586 case SIGN_EXTRACT:
4587 if (CONST_INT_P (XEXP (x, 1)))
4588 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4589 break;
4591 case SIGN_EXTEND:
4592 return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
4593 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4594 known_x, known_mode, known_ret));
4596 case TRUNCATE:
4597 /* For a smaller object, just ignore the high bits. */
4598 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4599 known_x, known_mode, known_ret);
4600 return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
4601 - bitwidth)));
4603 case NOT:
4604 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4605 known_x, known_mode, known_ret);
4607 case ROTATE: case ROTATERT:
4608 /* If we are rotating left by a number of bits less than the number
4609 of sign bit copies, we can just subtract that amount from the
4610 number. */
4611 if (CONST_INT_P (XEXP (x, 1))
4612 && INTVAL (XEXP (x, 1)) >= 0
4613 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4615 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4616 known_x, known_mode, known_ret);
4617 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4618 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4620 break;
4622 case NEG:
4623 /* In general, this subtracts one sign bit copy. But if the value
4624 is known to be positive, the number of sign bit copies is the
4625 same as that of the input. Finally, if the input has just one bit
4626 that might be nonzero, all the bits are copies of the sign bit. */
4627 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4628 known_x, known_mode, known_ret);
4629 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4630 return num0 > 1 ? num0 - 1 : 1;
4632 nonzero = nonzero_bits (XEXP (x, 0), mode);
4633 if (nonzero == 1)
4634 return bitwidth;
4636 if (num0 > 1
4637 && (((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4638 num0--;
4640 return num0;
4642 case IOR: case AND: case XOR:
4643 case SMIN: case SMAX: case UMIN: case UMAX:
4644 /* Logical operations will preserve the number of sign-bit copies.
4645 MIN and MAX operations always return one of the operands. */
4646 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4647 known_x, known_mode, known_ret);
4648 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4649 known_x, known_mode, known_ret);
4651 /* If num1 is clearing some of the top bits then regardless of
4652 the other term, we are guaranteed to have at least that many
4653 high-order zero bits. */
4654 if (code == AND
4655 && num1 > 1
4656 && bitwidth <= HOST_BITS_PER_WIDE_INT
4657 && CONST_INT_P (XEXP (x, 1))
4658 && (UINTVAL (XEXP (x, 1))
4659 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) == 0)
4660 return num1;
4662 /* Similarly for IOR when setting high-order bits. */
4663 if (code == IOR
4664 && num1 > 1
4665 && bitwidth <= HOST_BITS_PER_WIDE_INT
4666 && CONST_INT_P (XEXP (x, 1))
4667 && (UINTVAL (XEXP (x, 1))
4668 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4669 return num1;
4671 return MIN (num0, num1);
4673 case PLUS: case MINUS:
4674 /* For addition and subtraction, we can have a 1-bit carry. However,
4675 if we are subtracting 1 from a positive number, there will not
4676 be such a carry. Furthermore, if the positive number is known to
4677 be 0 or 1, we know the result is either -1 or 0. */
4679 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4680 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4682 nonzero = nonzero_bits (XEXP (x, 0), mode);
4683 if ((((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4684 return (nonzero == 1 || nonzero == 0 ? bitwidth
4685 : bitwidth - floor_log2 (nonzero) - 1);
4688 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4689 known_x, known_mode, known_ret);
4690 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4691 known_x, known_mode, known_ret);
4692 result = MAX (1, MIN (num0, num1) - 1);
4694 return result;
4696 case MULT:
4697 /* The number of bits of the product is the sum of the number of
4698 bits of both terms. However, unless one of the terms if known
4699 to be positive, we must allow for an additional bit since negating
4700 a negative number can remove one sign bit copy. */
4702 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4703 known_x, known_mode, known_ret);
4704 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4705 known_x, known_mode, known_ret);
4707 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4708 if (result > 0
4709 && (bitwidth > HOST_BITS_PER_WIDE_INT
4710 || (((nonzero_bits (XEXP (x, 0), mode)
4711 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4712 && ((nonzero_bits (XEXP (x, 1), mode)
4713 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)))
4714 != 0))))
4715 result--;
4717 return MAX (1, result);
4719 case UDIV:
4720 /* The result must be <= the first operand. If the first operand
4721 has the high bit set, we know nothing about the number of sign
4722 bit copies. */
4723 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4724 return 1;
4725 else if ((nonzero_bits (XEXP (x, 0), mode)
4726 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4727 return 1;
4728 else
4729 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4730 known_x, known_mode, known_ret);
4732 case UMOD:
4733 /* The result must be <= the second operand. If the second operand
4734 has (or just might have) the high bit set, we know nothing about
4735 the number of sign bit copies. */
4736 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4737 return 1;
4738 else if ((nonzero_bits (XEXP (x, 1), mode)
4739 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4740 return 1;
4741 else
4742 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4743 known_x, known_mode, known_ret);
4745 case DIV:
4746 /* Similar to unsigned division, except that we have to worry about
4747 the case where the divisor is negative, in which case we have
4748 to add 1. */
4749 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4750 known_x, known_mode, known_ret);
4751 if (result > 1
4752 && (bitwidth > HOST_BITS_PER_WIDE_INT
4753 || (nonzero_bits (XEXP (x, 1), mode)
4754 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4755 result--;
4757 return result;
4759 case MOD:
4760 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4761 known_x, known_mode, known_ret);
4762 if (result > 1
4763 && (bitwidth > HOST_BITS_PER_WIDE_INT
4764 || (nonzero_bits (XEXP (x, 1), mode)
4765 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4766 result--;
4768 return result;
4770 case ASHIFTRT:
4771 /* Shifts by a constant add to the number of bits equal to the
4772 sign bit. */
4773 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4774 known_x, known_mode, known_ret);
4775 if (CONST_INT_P (XEXP (x, 1))
4776 && INTVAL (XEXP (x, 1)) > 0
4777 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
4778 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4780 return num0;
4782 case ASHIFT:
4783 /* Left shifts destroy copies. */
4784 if (!CONST_INT_P (XEXP (x, 1))
4785 || INTVAL (XEXP (x, 1)) < 0
4786 || INTVAL (XEXP (x, 1)) >= (int) bitwidth
4787 || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x)))
4788 return 1;
4790 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4791 known_x, known_mode, known_ret);
4792 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4794 case IF_THEN_ELSE:
4795 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4796 known_x, known_mode, known_ret);
4797 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4798 known_x, known_mode, known_ret);
4799 return MIN (num0, num1);
4801 case EQ: case NE: case GE: case GT: case LE: case LT:
4802 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4803 case GEU: case GTU: case LEU: case LTU:
4804 case UNORDERED: case ORDERED:
4805 /* If the constant is negative, take its 1's complement and remask.
4806 Then see how many zero bits we have. */
4807 nonzero = STORE_FLAG_VALUE;
4808 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4809 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4810 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4812 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4814 default:
4815 break;
4818 /* If we haven't been able to figure it out by one of the above rules,
4819 see if some of the high-order bits are known to be zero. If so,
4820 count those bits and return one less than that amount. If we can't
4821 safely compute the mask for this mode, always return BITWIDTH. */
4823 bitwidth = GET_MODE_PRECISION (mode);
4824 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4825 return 1;
4827 nonzero = nonzero_bits (x, mode);
4828 return nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))
4829 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4832 /* Calculate the rtx_cost of a single instruction. A return value of
4833 zero indicates an instruction pattern without a known cost. */
4836 insn_rtx_cost (rtx pat, bool speed)
4838 int i, cost;
4839 rtx set;
4841 /* Extract the single set rtx from the instruction pattern.
4842 We can't use single_set since we only have the pattern. */
4843 if (GET_CODE (pat) == SET)
4844 set = pat;
4845 else if (GET_CODE (pat) == PARALLEL)
4847 set = NULL_RTX;
4848 for (i = 0; i < XVECLEN (pat, 0); i++)
4850 rtx x = XVECEXP (pat, 0, i);
4851 if (GET_CODE (x) == SET)
4853 if (set)
4854 return 0;
4855 set = x;
4858 if (!set)
4859 return 0;
4861 else
4862 return 0;
4864 cost = set_src_cost (SET_SRC (set), speed);
4865 return cost > 0 ? cost : COSTS_N_INSNS (1);
4868 /* Given an insn INSN and condition COND, return the condition in a
4869 canonical form to simplify testing by callers. Specifically:
4871 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4872 (2) Both operands will be machine operands; (cc0) will have been replaced.
4873 (3) If an operand is a constant, it will be the second operand.
4874 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4875 for GE, GEU, and LEU.
4877 If the condition cannot be understood, or is an inequality floating-point
4878 comparison which needs to be reversed, 0 will be returned.
4880 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4882 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4883 insn used in locating the condition was found. If a replacement test
4884 of the condition is desired, it should be placed in front of that
4885 insn and we will be sure that the inputs are still valid.
4887 If WANT_REG is nonzero, we wish the condition to be relative to that
4888 register, if possible. Therefore, do not canonicalize the condition
4889 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4890 to be a compare to a CC mode register.
4892 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4893 and at INSN. */
4896 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4897 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4899 enum rtx_code code;
4900 rtx prev = insn;
4901 const_rtx set;
4902 rtx tem;
4903 rtx op0, op1;
4904 int reverse_code = 0;
4905 enum machine_mode mode;
4906 basic_block bb = BLOCK_FOR_INSN (insn);
4908 code = GET_CODE (cond);
4909 mode = GET_MODE (cond);
4910 op0 = XEXP (cond, 0);
4911 op1 = XEXP (cond, 1);
4913 if (reverse)
4914 code = reversed_comparison_code (cond, insn);
4915 if (code == UNKNOWN)
4916 return 0;
4918 if (earliest)
4919 *earliest = insn;
4921 /* If we are comparing a register with zero, see if the register is set
4922 in the previous insn to a COMPARE or a comparison operation. Perform
4923 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
4924 in cse.c */
4926 while ((GET_RTX_CLASS (code) == RTX_COMPARE
4927 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
4928 && op1 == CONST0_RTX (GET_MODE (op0))
4929 && op0 != want_reg)
4931 /* Set nonzero when we find something of interest. */
4932 rtx x = 0;
4934 #ifdef HAVE_cc0
4935 /* If comparison with cc0, import actual comparison from compare
4936 insn. */
4937 if (op0 == cc0_rtx)
4939 if ((prev = prev_nonnote_insn (prev)) == 0
4940 || !NONJUMP_INSN_P (prev)
4941 || (set = single_set (prev)) == 0
4942 || SET_DEST (set) != cc0_rtx)
4943 return 0;
4945 op0 = SET_SRC (set);
4946 op1 = CONST0_RTX (GET_MODE (op0));
4947 if (earliest)
4948 *earliest = prev;
4950 #endif
4952 /* If this is a COMPARE, pick up the two things being compared. */
4953 if (GET_CODE (op0) == COMPARE)
4955 op1 = XEXP (op0, 1);
4956 op0 = XEXP (op0, 0);
4957 continue;
4959 else if (!REG_P (op0))
4960 break;
4962 /* Go back to the previous insn. Stop if it is not an INSN. We also
4963 stop if it isn't a single set or if it has a REG_INC note because
4964 we don't want to bother dealing with it. */
4966 prev = prev_nonnote_nondebug_insn (prev);
4968 if (prev == 0
4969 || !NONJUMP_INSN_P (prev)
4970 || FIND_REG_INC_NOTE (prev, NULL_RTX)
4971 /* In cfglayout mode, there do not have to be labels at the
4972 beginning of a block, or jumps at the end, so the previous
4973 conditions would not stop us when we reach bb boundary. */
4974 || BLOCK_FOR_INSN (prev) != bb)
4975 break;
4977 set = set_of (op0, prev);
4979 if (set
4980 && (GET_CODE (set) != SET
4981 || !rtx_equal_p (SET_DEST (set), op0)))
4982 break;
4984 /* If this is setting OP0, get what it sets it to if it looks
4985 relevant. */
4986 if (set)
4988 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
4989 #ifdef FLOAT_STORE_FLAG_VALUE
4990 REAL_VALUE_TYPE fsfv;
4991 #endif
4993 /* ??? We may not combine comparisons done in a CCmode with
4994 comparisons not done in a CCmode. This is to aid targets
4995 like Alpha that have an IEEE compliant EQ instruction, and
4996 a non-IEEE compliant BEQ instruction. The use of CCmode is
4997 actually artificial, simply to prevent the combination, but
4998 should not affect other platforms.
5000 However, we must allow VOIDmode comparisons to match either
5001 CCmode or non-CCmode comparison, because some ports have
5002 modeless comparisons inside branch patterns.
5004 ??? This mode check should perhaps look more like the mode check
5005 in simplify_comparison in combine. */
5007 if ((GET_CODE (SET_SRC (set)) == COMPARE
5008 || (((code == NE
5009 || (code == LT
5010 && val_signbit_known_set_p (inner_mode,
5011 STORE_FLAG_VALUE))
5012 #ifdef FLOAT_STORE_FLAG_VALUE
5013 || (code == LT
5014 && SCALAR_FLOAT_MODE_P (inner_mode)
5015 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5016 REAL_VALUE_NEGATIVE (fsfv)))
5017 #endif
5019 && COMPARISON_P (SET_SRC (set))))
5020 && (((GET_MODE_CLASS (mode) == MODE_CC)
5021 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
5022 || mode == VOIDmode || inner_mode == VOIDmode))
5023 x = SET_SRC (set);
5024 else if (((code == EQ
5025 || (code == GE
5026 && val_signbit_known_set_p (inner_mode,
5027 STORE_FLAG_VALUE))
5028 #ifdef FLOAT_STORE_FLAG_VALUE
5029 || (code == GE
5030 && SCALAR_FLOAT_MODE_P (inner_mode)
5031 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5032 REAL_VALUE_NEGATIVE (fsfv)))
5033 #endif
5035 && COMPARISON_P (SET_SRC (set))
5036 && (((GET_MODE_CLASS (mode) == MODE_CC)
5037 == (GET_MODE_CLASS (inner_mode) == MODE_CC))
5038 || mode == VOIDmode || inner_mode == VOIDmode))
5041 reverse_code = 1;
5042 x = SET_SRC (set);
5044 else
5045 break;
5048 else if (reg_set_p (op0, prev))
5049 /* If this sets OP0, but not directly, we have to give up. */
5050 break;
5052 if (x)
5054 /* If the caller is expecting the condition to be valid at INSN,
5055 make sure X doesn't change before INSN. */
5056 if (valid_at_insn_p)
5057 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
5058 break;
5059 if (COMPARISON_P (x))
5060 code = GET_CODE (x);
5061 if (reverse_code)
5063 code = reversed_comparison_code (x, prev);
5064 if (code == UNKNOWN)
5065 return 0;
5066 reverse_code = 0;
5069 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5070 if (earliest)
5071 *earliest = prev;
5075 /* If constant is first, put it last. */
5076 if (CONSTANT_P (op0))
5077 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
5079 /* If OP0 is the result of a comparison, we weren't able to find what
5080 was really being compared, so fail. */
5081 if (!allow_cc_mode
5082 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5083 return 0;
5085 /* Canonicalize any ordered comparison with integers involving equality
5086 if we can do computations in the relevant mode and we do not
5087 overflow. */
5089 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
5090 && CONST_INT_P (op1)
5091 && GET_MODE (op0) != VOIDmode
5092 && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
5094 HOST_WIDE_INT const_val = INTVAL (op1);
5095 unsigned HOST_WIDE_INT uconst_val = const_val;
5096 unsigned HOST_WIDE_INT max_val
5097 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
5099 switch (code)
5101 case LE:
5102 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
5103 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
5104 break;
5106 /* When cross-compiling, const_val might be sign-extended from
5107 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
5108 case GE:
5109 if ((const_val & max_val)
5110 != ((unsigned HOST_WIDE_INT) 1
5111 << (GET_MODE_PRECISION (GET_MODE (op0)) - 1)))
5112 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
5113 break;
5115 case LEU:
5116 if (uconst_val < max_val)
5117 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
5118 break;
5120 case GEU:
5121 if (uconst_val != 0)
5122 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
5123 break;
5125 default:
5126 break;
5130 /* Never return CC0; return zero instead. */
5131 if (CC0_P (op0))
5132 return 0;
5134 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
5137 /* Given a jump insn JUMP, return the condition that will cause it to branch
5138 to its JUMP_LABEL. If the condition cannot be understood, or is an
5139 inequality floating-point comparison which needs to be reversed, 0 will
5140 be returned.
5142 If EARLIEST is nonzero, it is a pointer to a place where the earliest
5143 insn used in locating the condition was found. If a replacement test
5144 of the condition is desired, it should be placed in front of that
5145 insn and we will be sure that the inputs are still valid. If EARLIEST
5146 is null, the returned condition will be valid at INSN.
5148 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
5149 compare CC mode register.
5151 VALID_AT_INSN_P is the same as for canonicalize_condition. */
5154 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
5156 rtx cond;
5157 int reverse;
5158 rtx set;
5160 /* If this is not a standard conditional jump, we can't parse it. */
5161 if (!JUMP_P (jump)
5162 || ! any_condjump_p (jump))
5163 return 0;
5164 set = pc_set (jump);
5166 cond = XEXP (SET_SRC (set), 0);
5168 /* If this branches to JUMP_LABEL when the condition is false, reverse
5169 the condition. */
5170 reverse
5171 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
5172 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
5174 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
5175 allow_cc_mode, valid_at_insn_p);
5178 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
5179 TARGET_MODE_REP_EXTENDED.
5181 Note that we assume that the property of
5182 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
5183 narrower than mode B. I.e., if A is a mode narrower than B then in
5184 order to be able to operate on it in mode B, mode A needs to
5185 satisfy the requirements set by the representation of mode B. */
5187 static void
5188 init_num_sign_bit_copies_in_rep (void)
5190 enum machine_mode mode, in_mode;
5192 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
5193 in_mode = GET_MODE_WIDER_MODE (mode))
5194 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
5195 mode = GET_MODE_WIDER_MODE (mode))
5197 enum machine_mode i;
5199 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
5200 extends to the next widest mode. */
5201 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
5202 || GET_MODE_WIDER_MODE (mode) == in_mode);
5204 /* We are in in_mode. Count how many bits outside of mode
5205 have to be copies of the sign-bit. */
5206 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
5208 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
5210 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
5211 /* We can only check sign-bit copies starting from the
5212 top-bit. In order to be able to check the bits we
5213 have already seen we pretend that subsequent bits
5214 have to be sign-bit copies too. */
5215 || num_sign_bit_copies_in_rep [in_mode][mode])
5216 num_sign_bit_copies_in_rep [in_mode][mode]
5217 += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
5222 /* Suppose that truncation from the machine mode of X to MODE is not a
5223 no-op. See if there is anything special about X so that we can
5224 assume it already contains a truncated value of MODE. */
5226 bool
5227 truncated_to_mode (enum machine_mode mode, const_rtx x)
5229 /* This register has already been used in MODE without explicit
5230 truncation. */
5231 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5232 return true;
5234 /* See if we already satisfy the requirements of MODE. If yes we
5235 can just switch to MODE. */
5236 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5237 && (num_sign_bit_copies (x, GET_MODE (x))
5238 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5239 return true;
5241 return false;
5244 /* Initialize non_rtx_starting_operands, which is used to speed up
5245 for_each_rtx. */
5246 void
5247 init_rtlanal (void)
5249 int i;
5250 for (i = 0; i < NUM_RTX_CODE; i++)
5252 const char *format = GET_RTX_FORMAT (i);
5253 const char *first = strpbrk (format, "eEV");
5254 non_rtx_starting_operands[i] = first ? first - format : -1;
5257 init_num_sign_bit_copies_in_rep ();
5260 /* Check whether this is a constant pool constant. */
5261 bool
5262 constant_pool_constant_p (rtx x)
5264 x = avoid_constant_pool_reference (x);
5265 return CONST_DOUBLE_P (x);
5268 /* If M is a bitmask that selects a field of low-order bits within an item but
5269 not the entire word, return the length of the field. Return -1 otherwise.
5270 M is used in machine mode MODE. */
5273 low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m)
5275 if (mode != VOIDmode)
5277 if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
5278 return -1;
5279 m &= GET_MODE_MASK (mode);
5282 return exact_log2 (m + 1);
5285 /* Return the mode of MEM's address. */
5287 enum machine_mode
5288 get_address_mode (rtx mem)
5290 enum machine_mode mode;
5292 gcc_assert (MEM_P (mem));
5293 mode = GET_MODE (XEXP (mem, 0));
5294 if (mode != VOIDmode)
5295 return mode;
5296 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
5299 /* Split up a CONST_DOUBLE or integer constant rtx
5300 into two rtx's for single words,
5301 storing in *FIRST the word that comes first in memory in the target
5302 and in *SECOND the other. */
5304 void
5305 split_double (rtx value, rtx *first, rtx *second)
5307 if (CONST_INT_P (value))
5309 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
5311 /* In this case the CONST_INT holds both target words.
5312 Extract the bits from it into two word-sized pieces.
5313 Sign extend each half to HOST_WIDE_INT. */
5314 unsigned HOST_WIDE_INT low, high;
5315 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
5316 unsigned bits_per_word = BITS_PER_WORD;
5318 /* Set sign_bit to the most significant bit of a word. */
5319 sign_bit = 1;
5320 sign_bit <<= bits_per_word - 1;
5322 /* Set mask so that all bits of the word are set. We could
5323 have used 1 << BITS_PER_WORD instead of basing the
5324 calculation on sign_bit. However, on machines where
5325 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
5326 compiler warning, even though the code would never be
5327 executed. */
5328 mask = sign_bit << 1;
5329 mask--;
5331 /* Set sign_extend as any remaining bits. */
5332 sign_extend = ~mask;
5334 /* Pick the lower word and sign-extend it. */
5335 low = INTVAL (value);
5336 low &= mask;
5337 if (low & sign_bit)
5338 low |= sign_extend;
5340 /* Pick the higher word, shifted to the least significant
5341 bits, and sign-extend it. */
5342 high = INTVAL (value);
5343 high >>= bits_per_word - 1;
5344 high >>= 1;
5345 high &= mask;
5346 if (high & sign_bit)
5347 high |= sign_extend;
5349 /* Store the words in the target machine order. */
5350 if (WORDS_BIG_ENDIAN)
5352 *first = GEN_INT (high);
5353 *second = GEN_INT (low);
5355 else
5357 *first = GEN_INT (low);
5358 *second = GEN_INT (high);
5361 else
5363 /* The rule for using CONST_INT for a wider mode
5364 is that we regard the value as signed.
5365 So sign-extend it. */
5366 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
5367 if (WORDS_BIG_ENDIAN)
5369 *first = high;
5370 *second = value;
5372 else
5374 *first = value;
5375 *second = high;
5379 else if (!CONST_DOUBLE_P (value))
5381 if (WORDS_BIG_ENDIAN)
5383 *first = const0_rtx;
5384 *second = value;
5386 else
5388 *first = value;
5389 *second = const0_rtx;
5392 else if (GET_MODE (value) == VOIDmode
5393 /* This is the old way we did CONST_DOUBLE integers. */
5394 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
5396 /* In an integer, the words are defined as most and least significant.
5397 So order them by the target's convention. */
5398 if (WORDS_BIG_ENDIAN)
5400 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
5401 *second = GEN_INT (CONST_DOUBLE_LOW (value));
5403 else
5405 *first = GEN_INT (CONST_DOUBLE_LOW (value));
5406 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
5409 else
5411 REAL_VALUE_TYPE r;
5412 long l[2];
5413 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
5415 /* Note, this converts the REAL_VALUE_TYPE to the target's
5416 format, splits up the floating point double and outputs
5417 exactly 32 bits of it into each of l[0] and l[1] --
5418 not necessarily BITS_PER_WORD bits. */
5419 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
5421 /* If 32 bits is an entire word for the target, but not for the host,
5422 then sign-extend on the host so that the number will look the same
5423 way on the host that it would on the target. See for instance
5424 simplify_unary_operation. The #if is needed to avoid compiler
5425 warnings. */
5427 #if HOST_BITS_PER_LONG > 32
5428 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
5430 if (l[0] & ((long) 1 << 31))
5431 l[0] |= ((long) (-1) << 32);
5432 if (l[1] & ((long) 1 << 31))
5433 l[1] |= ((long) (-1) << 32);
5435 #endif
5437 *first = GEN_INT (l[0]);
5438 *second = GEN_INT (l[1]);
5442 /* Strip outer address "mutations" from LOC and return a pointer to the
5443 inner value. If OUTER_CODE is nonnull, store the code of the innermost
5444 stripped expression there.
5446 "Mutations" either convert between modes or apply some kind of
5447 alignment. */
5449 rtx *
5450 strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
5452 for (;;)
5454 enum rtx_code code = GET_CODE (*loc);
5455 if (GET_RTX_CLASS (code) == RTX_UNARY)
5456 /* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be
5457 used to convert between pointer sizes. */
5458 loc = &XEXP (*loc, 0);
5459 else if (code == AND && CONST_INT_P (XEXP (*loc, 1)))
5460 /* (and ... (const_int -X)) is used to align to X bytes. */
5461 loc = &XEXP (*loc, 0);
5462 else if (code == SUBREG
5463 && !OBJECT_P (SUBREG_REG (*loc))
5464 && subreg_lowpart_p (*loc))
5465 /* (subreg (operator ...) ...) inside and is used for mode
5466 conversion too. */
5467 loc = &SUBREG_REG (*loc);
5468 else
5469 return loc;
5470 if (outer_code)
5471 *outer_code = code;
5475 /* Return true if X must be a base rather than an index. */
5477 static bool
5478 must_be_base_p (rtx x)
5480 return GET_CODE (x) == LO_SUM;
5483 /* Return true if X must be an index rather than a base. */
5485 static bool
5486 must_be_index_p (rtx x)
5488 return GET_CODE (x) == MULT || GET_CODE (x) == ASHIFT;
5491 /* Set the segment part of address INFO to LOC, given that INNER is the
5492 unmutated value. */
5494 static void
5495 set_address_segment (struct address_info *info, rtx *loc, rtx *inner)
5497 gcc_checking_assert (GET_CODE (*inner) == UNSPEC);
5499 gcc_assert (!info->segment);
5500 info->segment = loc;
5501 info->segment_term = inner;
5504 /* Set the base part of address INFO to LOC, given that INNER is the
5505 unmutated value. */
5507 static void
5508 set_address_base (struct address_info *info, rtx *loc, rtx *inner)
5510 if (GET_CODE (*inner) == LO_SUM)
5511 inner = strip_address_mutations (&XEXP (*inner, 0));
5512 gcc_checking_assert (REG_P (*inner)
5513 || MEM_P (*inner)
5514 || GET_CODE (*inner) == SUBREG);
5516 gcc_assert (!info->base);
5517 info->base = loc;
5518 info->base_term = inner;
5521 /* Set the index part of address INFO to LOC, given that INNER is the
5522 unmutated value. */
5524 static void
5525 set_address_index (struct address_info *info, rtx *loc, rtx *inner)
5527 if ((GET_CODE (*inner) == MULT || GET_CODE (*inner) == ASHIFT)
5528 && CONSTANT_P (XEXP (*inner, 1)))
5529 inner = strip_address_mutations (&XEXP (*inner, 0));
5530 gcc_checking_assert (REG_P (*inner)
5531 || MEM_P (*inner)
5532 || GET_CODE (*inner) == SUBREG);
5534 gcc_assert (!info->index);
5535 info->index = loc;
5536 info->index_term = inner;
5539 /* Set the displacement part of address INFO to LOC, given that INNER
5540 is the constant term. */
5542 static void
5543 set_address_disp (struct address_info *info, rtx *loc, rtx *inner)
5545 gcc_checking_assert (CONSTANT_P (*inner));
5547 gcc_assert (!info->disp);
5548 info->disp = loc;
5549 info->disp_term = inner;
5552 /* INFO->INNER describes a {PRE,POST}_{INC,DEC} address. Set up the
5553 rest of INFO accordingly. */
5555 static void
5556 decompose_incdec_address (struct address_info *info)
5558 info->autoinc_p = true;
5560 rtx *base = &XEXP (*info->inner, 0);
5561 set_address_base (info, base, base);
5562 gcc_checking_assert (info->base == info->base_term);
5564 /* These addresses are only valid when the size of the addressed
5565 value is known. */
5566 gcc_checking_assert (info->mode != VOIDmode);
5569 /* INFO->INNER describes a {PRE,POST}_MODIFY address. Set up the rest
5570 of INFO accordingly. */
5572 static void
5573 decompose_automod_address (struct address_info *info)
5575 info->autoinc_p = true;
5577 rtx *base = &XEXP (*info->inner, 0);
5578 set_address_base (info, base, base);
5579 gcc_checking_assert (info->base == info->base_term);
5581 rtx plus = XEXP (*info->inner, 1);
5582 gcc_assert (GET_CODE (plus) == PLUS);
5584 info->base_term2 = &XEXP (plus, 0);
5585 gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2));
5587 rtx *step = &XEXP (plus, 1);
5588 rtx *inner_step = strip_address_mutations (step);
5589 if (CONSTANT_P (*inner_step))
5590 set_address_disp (info, step, inner_step);
5591 else
5592 set_address_index (info, step, inner_step);
5595 /* Treat *LOC as a tree of PLUS operands and store pointers to the summed
5596 values in [PTR, END). Return a pointer to the end of the used array. */
5598 static rtx **
5599 extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
5601 rtx x = *loc;
5602 if (GET_CODE (x) == PLUS)
5604 ptr = extract_plus_operands (&XEXP (x, 0), ptr, end);
5605 ptr = extract_plus_operands (&XEXP (x, 1), ptr, end);
5607 else
5609 gcc_assert (ptr != end);
5610 *ptr++ = loc;
5612 return ptr;
5615 /* Evaluate the likelihood of X being a base or index value, returning
5616 positive if it is likely to be a base, negative if it is likely to be
5617 an index, and 0 if we can't tell. Make the magnitude of the return
5618 value reflect the amount of confidence we have in the answer.
5620 MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1. */
5622 static int
5623 baseness (rtx x, enum machine_mode mode, addr_space_t as,
5624 enum rtx_code outer_code, enum rtx_code index_code)
5626 /* See whether we can be certain. */
5627 if (must_be_base_p (x))
5628 return 3;
5629 if (must_be_index_p (x))
5630 return -3;
5632 /* Believe *_POINTER unless the address shape requires otherwise. */
5633 if (REG_P (x) && REG_POINTER (x))
5634 return 2;
5635 if (MEM_P (x) && MEM_POINTER (x))
5636 return 2;
5638 if (REG_P (x) && HARD_REGISTER_P (x))
5640 /* X is a hard register. If it only fits one of the base
5641 or index classes, choose that interpretation. */
5642 int regno = REGNO (x);
5643 bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
5644 bool index_p = REGNO_OK_FOR_INDEX_P (regno);
5645 if (base_p != index_p)
5646 return base_p ? 1 : -1;
5648 return 0;
5651 /* INFO->INNER describes a normal, non-automodified address.
5652 Fill in the rest of INFO accordingly. */
5654 static void
5655 decompose_normal_address (struct address_info *info)
5657 /* Treat the address as the sum of up to four values. */
5658 rtx *ops[4];
5659 size_t n_ops = extract_plus_operands (info->inner, ops,
5660 ops + ARRAY_SIZE (ops)) - ops;
5662 /* If there is more than one component, any base component is in a PLUS. */
5663 if (n_ops > 1)
5664 info->base_outer_code = PLUS;
5666 /* Separate the parts that contain a REG or MEM from those that don't.
5667 Record the latter in INFO and leave the former in OPS. */
5668 rtx *inner_ops[4];
5669 size_t out = 0;
5670 for (size_t in = 0; in < n_ops; ++in)
5672 rtx *loc = ops[in];
5673 rtx *inner = strip_address_mutations (loc);
5674 if (CONSTANT_P (*inner))
5675 set_address_disp (info, loc, inner);
5676 else if (GET_CODE (*inner) == UNSPEC)
5677 set_address_segment (info, loc, inner);
5678 else
5680 ops[out] = loc;
5681 inner_ops[out] = inner;
5682 ++out;
5686 /* Classify the remaining OPS members as bases and indexes. */
5687 if (out == 1)
5689 /* Assume that the remaining value is a base unless the shape
5690 requires otherwise. */
5691 if (!must_be_index_p (*inner_ops[0]))
5692 set_address_base (info, ops[0], inner_ops[0]);
5693 else
5694 set_address_index (info, ops[0], inner_ops[0]);
5696 else if (out == 2)
5698 /* In the event of a tie, assume the base comes first. */
5699 if (baseness (*inner_ops[0], info->mode, info->as, PLUS,
5700 GET_CODE (*ops[1]))
5701 >= baseness (*inner_ops[1], info->mode, info->as, PLUS,
5702 GET_CODE (*ops[0])))
5704 set_address_base (info, ops[0], inner_ops[0]);
5705 set_address_index (info, ops[1], inner_ops[1]);
5707 else
5709 set_address_base (info, ops[1], inner_ops[1]);
5710 set_address_index (info, ops[0], inner_ops[0]);
5713 else
5714 gcc_assert (out == 0);
5717 /* Describe address *LOC in *INFO. MODE is the mode of the addressed value,
5718 or VOIDmode if not known. AS is the address space associated with LOC.
5719 OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise. */
5721 void
5722 decompose_address (struct address_info *info, rtx *loc, enum machine_mode mode,
5723 addr_space_t as, enum rtx_code outer_code)
5725 memset (info, 0, sizeof (*info));
5726 info->mode = mode;
5727 info->as = as;
5728 info->addr_outer_code = outer_code;
5729 info->outer = loc;
5730 info->inner = strip_address_mutations (loc, &outer_code);
5731 info->base_outer_code = outer_code;
5732 switch (GET_CODE (*info->inner))
5734 case PRE_DEC:
5735 case PRE_INC:
5736 case POST_DEC:
5737 case POST_INC:
5738 decompose_incdec_address (info);
5739 break;
5741 case PRE_MODIFY:
5742 case POST_MODIFY:
5743 decompose_automod_address (info);
5744 break;
5746 default:
5747 decompose_normal_address (info);
5748 break;
5752 /* Describe address operand LOC in INFO. */
5754 void
5755 decompose_lea_address (struct address_info *info, rtx *loc)
5757 decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS);
5760 /* Describe the address of MEM X in INFO. */
5762 void
5763 decompose_mem_address (struct address_info *info, rtx x)
5765 gcc_assert (MEM_P (x));
5766 decompose_address (info, &XEXP (x, 0), GET_MODE (x),
5767 MEM_ADDR_SPACE (x), MEM);
5770 /* Update INFO after a change to the address it describes. */
5772 void
5773 update_address (struct address_info *info)
5775 decompose_address (info, info->outer, info->mode, info->as,
5776 info->addr_outer_code);
5779 /* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is
5780 more complicated than that. */
5782 HOST_WIDE_INT
5783 get_index_scale (const struct address_info *info)
5785 rtx index = *info->index;
5786 if (GET_CODE (index) == MULT
5787 && CONST_INT_P (XEXP (index, 1))
5788 && info->index_term == &XEXP (index, 0))
5789 return INTVAL (XEXP (index, 1));
5791 if (GET_CODE (index) == ASHIFT
5792 && CONST_INT_P (XEXP (index, 1))
5793 && info->index_term == &XEXP (index, 0))
5794 return (HOST_WIDE_INT) 1 << INTVAL (XEXP (index, 1));
5796 if (info->index == info->index_term)
5797 return 1;
5799 return 0;
5802 /* Return the "index code" of INFO, in the form required by
5803 ok_for_base_p_1. */
5805 enum rtx_code
5806 get_index_code (const struct address_info *info)
5808 if (info->index)
5809 return GET_CODE (*info->index);
5811 if (info->disp)
5812 return GET_CODE (*info->disp);
5814 return SCRATCH;