Merge aosp-toolchain/gcc/gcc-4_9 changes.
[official-gcc.git] / gcc-4_9-mobile / gcc / rtlanal.c
blobe99ef7bf772b17523a31bb4ca60623d9bed5b508
1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "hard-reg-set.h"
27 #include "rtl.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "target.h"
31 #include "output.h"
32 #include "tm_p.h"
33 #include "flags.h"
34 #include "regs.h"
35 #include "function.h"
36 #include "df.h"
37 #include "tree.h"
38 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
39 #include "addresses.h"
41 /* Forward declarations */
42 static void set_of_1 (rtx, const_rtx, void *);
43 static bool covers_regno_p (const_rtx, unsigned int);
44 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
45 static int rtx_referenced_p_1 (rtx *, void *);
46 static int computed_jump_p_1 (const_rtx);
47 static void parms_set (rtx, const_rtx, void *);
49 static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, enum machine_mode,
50 const_rtx, enum machine_mode,
51 unsigned HOST_WIDE_INT);
52 static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, enum machine_mode,
53 const_rtx, enum machine_mode,
54 unsigned HOST_WIDE_INT);
55 static unsigned int cached_num_sign_bit_copies (const_rtx, enum machine_mode, const_rtx,
56 enum machine_mode,
57 unsigned int);
58 static unsigned int num_sign_bit_copies1 (const_rtx, enum machine_mode, const_rtx,
59 enum machine_mode, unsigned int);
61 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
62 -1 if a code has no such operand. */
63 static int non_rtx_starting_operands[NUM_RTX_CODE];
65 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
66 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
67 SIGN_EXTEND then while narrowing we also have to enforce the
68 representation and sign-extend the value to mode DESTINATION_REP.
70 If the value is already sign-extended to DESTINATION_REP mode we
71 can just switch to DESTINATION mode on it. For each pair of
72 integral modes SOURCE and DESTINATION, when truncating from SOURCE
73 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
74 contains the number of high-order bits in SOURCE that have to be
75 copies of the sign-bit so that we can do this mode-switch to
76 DESTINATION. */
78 static unsigned int
79 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
81 /* Return 1 if the value of X is unstable
82 (would be different at a different point in the program).
83 The frame pointer, arg pointer, etc. are considered stable
84 (within one function) and so is anything marked `unchanging'. */
86 int
87 rtx_unstable_p (const_rtx x)
89 const RTX_CODE code = GET_CODE (x);
90 int i;
91 const char *fmt;
93 switch (code)
95 case MEM:
96 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
98 case CONST:
99 CASE_CONST_ANY:
100 case SYMBOL_REF:
101 case LABEL_REF:
102 return 0;
104 case REG:
105 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
106 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
107 /* The arg pointer varies if it is not a fixed register. */
108 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
109 return 0;
110 /* ??? When call-clobbered, the value is stable modulo the restore
111 that must happen after a call. This currently screws up local-alloc
112 into believing that the restore is not needed. */
113 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
114 return 0;
115 return 1;
117 case ASM_OPERANDS:
118 if (MEM_VOLATILE_P (x))
119 return 1;
121 /* Fall through. */
123 default:
124 break;
127 fmt = GET_RTX_FORMAT (code);
128 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
129 if (fmt[i] == 'e')
131 if (rtx_unstable_p (XEXP (x, i)))
132 return 1;
134 else if (fmt[i] == 'E')
136 int j;
137 for (j = 0; j < XVECLEN (x, i); j++)
138 if (rtx_unstable_p (XVECEXP (x, i, j)))
139 return 1;
142 return 0;
145 /* Return 1 if X has a value that can vary even between two
146 executions of the program. 0 means X can be compared reliably
147 against certain constants or near-constants.
148 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
149 zero, we are slightly more conservative.
150 The frame pointer and the arg pointer are considered constant. */
152 bool
153 rtx_varies_p (const_rtx x, bool for_alias)
155 RTX_CODE code;
156 int i;
157 const char *fmt;
159 if (!x)
160 return 0;
162 code = GET_CODE (x);
163 switch (code)
165 case MEM:
166 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
168 case CONST:
169 CASE_CONST_ANY:
170 case SYMBOL_REF:
171 case LABEL_REF:
172 return 0;
174 case REG:
175 /* Note that we have to test for the actual rtx used for the frame
176 and arg pointers and not just the register number in case we have
177 eliminated the frame and/or arg pointer and are using it
178 for pseudos. */
179 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
180 /* The arg pointer varies if it is not a fixed register. */
181 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
182 return 0;
183 if (x == pic_offset_table_rtx
184 /* ??? When call-clobbered, the value is stable modulo the restore
185 that must happen after a call. This currently screws up
186 local-alloc into believing that the restore is not needed, so we
187 must return 0 only if we are called from alias analysis. */
188 && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
189 return 0;
190 return 1;
192 case LO_SUM:
193 /* The operand 0 of a LO_SUM is considered constant
194 (in fact it is related specifically to operand 1)
195 during alias analysis. */
196 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
197 || rtx_varies_p (XEXP (x, 1), for_alias);
199 case ASM_OPERANDS:
200 if (MEM_VOLATILE_P (x))
201 return 1;
203 /* Fall through. */
205 default:
206 break;
209 fmt = GET_RTX_FORMAT (code);
210 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
211 if (fmt[i] == 'e')
213 if (rtx_varies_p (XEXP (x, i), for_alias))
214 return 1;
216 else if (fmt[i] == 'E')
218 int j;
219 for (j = 0; j < XVECLEN (x, i); j++)
220 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
221 return 1;
224 return 0;
227 /* Return nonzero if the use of X+OFFSET as an address in a MEM with SIZE
228 bytes can cause a trap. MODE is the mode of the MEM (not that of X) and
229 UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory
230 references on strict alignment machines. */
232 static int
233 rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
234 enum machine_mode mode, bool unaligned_mems)
236 enum rtx_code code = GET_CODE (x);
238 /* The offset must be a multiple of the mode size if we are considering
239 unaligned memory references on strict alignment machines. */
240 if (STRICT_ALIGNMENT && unaligned_mems && GET_MODE_SIZE (mode) != 0)
242 HOST_WIDE_INT actual_offset = offset;
244 #ifdef SPARC_STACK_BOUNDARY_HACK
245 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
246 the real alignment of %sp. However, when it does this, the
247 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
248 if (SPARC_STACK_BOUNDARY_HACK
249 && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
250 actual_offset -= STACK_POINTER_OFFSET;
251 #endif
253 if (actual_offset % GET_MODE_SIZE (mode) != 0)
254 return 1;
257 switch (code)
259 case SYMBOL_REF:
260 if (SYMBOL_REF_WEAK (x))
261 return 1;
262 if (!CONSTANT_POOL_ADDRESS_P (x))
264 tree decl;
265 HOST_WIDE_INT decl_size;
267 if (offset < 0)
268 return 1;
269 if (size == 0)
270 size = GET_MODE_SIZE (mode);
271 if (size == 0)
272 return offset != 0;
274 /* If the size of the access or of the symbol is unknown,
275 assume the worst. */
276 decl = SYMBOL_REF_DECL (x);
278 /* Else check that the access is in bounds. TODO: restructure
279 expr_size/tree_expr_size/int_expr_size and just use the latter. */
280 if (!decl)
281 decl_size = -1;
282 else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
283 decl_size = (tree_fits_shwi_p (DECL_SIZE_UNIT (decl))
284 ? tree_to_shwi (DECL_SIZE_UNIT (decl))
285 : -1);
286 else if (TREE_CODE (decl) == STRING_CST)
287 decl_size = TREE_STRING_LENGTH (decl);
288 else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
289 decl_size = int_size_in_bytes (TREE_TYPE (decl));
290 else
291 decl_size = -1;
293 return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
296 return 0;
298 case LABEL_REF:
299 return 0;
301 case REG:
302 /* Stack references are assumed not to trap, but we need to deal with
303 nonsensical offsets. */
304 if (x == frame_pointer_rtx)
306 HOST_WIDE_INT adj_offset = offset - STARTING_FRAME_OFFSET;
307 if (size == 0)
308 size = GET_MODE_SIZE (mode);
309 if (FRAME_GROWS_DOWNWARD)
311 if (adj_offset < frame_offset || adj_offset + size - 1 >= 0)
312 return 1;
314 else
316 if (adj_offset < 0 || adj_offset + size - 1 >= frame_offset)
317 return 1;
319 return 0;
321 /* ??? Need to add a similar guard for nonsensical offsets. */
322 if (x == hard_frame_pointer_rtx
323 || x == stack_pointer_rtx
324 /* The arg pointer varies if it is not a fixed register. */
325 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
326 return 0;
327 /* All of the virtual frame registers are stack references. */
328 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
329 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
330 return 0;
331 return 1;
333 case CONST:
334 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
335 mode, unaligned_mems);
337 case PLUS:
338 /* An address is assumed not to trap if:
339 - it is the pic register plus a constant. */
340 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
341 return 0;
343 /* - or it is an address that can't trap plus a constant integer. */
344 if (CONST_INT_P (XEXP (x, 1))
345 && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
346 size, mode, unaligned_mems))
347 return 0;
349 return 1;
351 case LO_SUM:
352 case PRE_MODIFY:
353 return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
354 mode, unaligned_mems);
356 case PRE_DEC:
357 case PRE_INC:
358 case POST_DEC:
359 case POST_INC:
360 case POST_MODIFY:
361 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
362 mode, unaligned_mems);
364 default:
365 break;
368 /* If it isn't one of the case above, it can cause a trap. */
369 return 1;
372 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
375 rtx_addr_can_trap_p (const_rtx x)
377 return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
380 /* Return true if X is an address that is known to not be zero. */
382 bool
383 nonzero_address_p (const_rtx x)
385 const enum rtx_code code = GET_CODE (x);
387 switch (code)
389 case SYMBOL_REF:
390 return !SYMBOL_REF_WEAK (x);
392 case LABEL_REF:
393 return true;
395 case REG:
396 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
397 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
398 || x == stack_pointer_rtx
399 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
400 return true;
401 /* All of the virtual frame registers are stack references. */
402 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
403 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
404 return true;
405 return false;
407 case CONST:
408 return nonzero_address_p (XEXP (x, 0));
410 case PLUS:
411 /* Handle PIC references. */
412 if (XEXP (x, 0) == pic_offset_table_rtx
413 && CONSTANT_P (XEXP (x, 1)))
414 return true;
415 return false;
417 case PRE_MODIFY:
418 /* Similar to the above; allow positive offsets. Further, since
419 auto-inc is only allowed in memories, the register must be a
420 pointer. */
421 if (CONST_INT_P (XEXP (x, 1))
422 && INTVAL (XEXP (x, 1)) > 0)
423 return true;
424 return nonzero_address_p (XEXP (x, 0));
426 case PRE_INC:
427 /* Similarly. Further, the offset is always positive. */
428 return true;
430 case PRE_DEC:
431 case POST_DEC:
432 case POST_INC:
433 case POST_MODIFY:
434 return nonzero_address_p (XEXP (x, 0));
436 case LO_SUM:
437 return nonzero_address_p (XEXP (x, 1));
439 default:
440 break;
443 /* If it isn't one of the case above, might be zero. */
444 return false;
447 /* Return 1 if X refers to a memory location whose address
448 cannot be compared reliably with constant addresses,
449 or if X refers to a BLKmode memory object.
450 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
451 zero, we are slightly more conservative. */
453 bool
454 rtx_addr_varies_p (const_rtx x, bool for_alias)
456 enum rtx_code code;
457 int i;
458 const char *fmt;
460 if (x == 0)
461 return 0;
463 code = GET_CODE (x);
464 if (code == MEM)
465 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
467 fmt = GET_RTX_FORMAT (code);
468 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
469 if (fmt[i] == 'e')
471 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
472 return 1;
474 else if (fmt[i] == 'E')
476 int j;
477 for (j = 0; j < XVECLEN (x, i); j++)
478 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
479 return 1;
481 return 0;
484 /* Return the CALL in X if there is one. */
487 get_call_rtx_from (rtx x)
489 if (INSN_P (x))
490 x = PATTERN (x);
491 if (GET_CODE (x) == PARALLEL)
492 x = XVECEXP (x, 0, 0);
493 if (GET_CODE (x) == SET)
494 x = SET_SRC (x);
495 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
496 return x;
497 return NULL_RTX;
500 /* Return the value of the integer term in X, if one is apparent;
501 otherwise return 0.
502 Only obvious integer terms are detected.
503 This is used in cse.c with the `related_value' field. */
505 HOST_WIDE_INT
506 get_integer_term (const_rtx x)
508 if (GET_CODE (x) == CONST)
509 x = XEXP (x, 0);
511 if (GET_CODE (x) == MINUS
512 && CONST_INT_P (XEXP (x, 1)))
513 return - INTVAL (XEXP (x, 1));
514 if (GET_CODE (x) == PLUS
515 && CONST_INT_P (XEXP (x, 1)))
516 return INTVAL (XEXP (x, 1));
517 return 0;
520 /* If X is a constant, return the value sans apparent integer term;
521 otherwise return 0.
522 Only obvious integer terms are detected. */
525 get_related_value (const_rtx x)
527 if (GET_CODE (x) != CONST)
528 return 0;
529 x = XEXP (x, 0);
530 if (GET_CODE (x) == PLUS
531 && CONST_INT_P (XEXP (x, 1)))
532 return XEXP (x, 0);
533 else if (GET_CODE (x) == MINUS
534 && CONST_INT_P (XEXP (x, 1)))
535 return XEXP (x, 0);
536 return 0;
539 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
540 to somewhere in the same object or object_block as SYMBOL. */
542 bool
543 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
545 tree decl;
547 if (GET_CODE (symbol) != SYMBOL_REF)
548 return false;
550 if (offset == 0)
551 return true;
553 if (offset > 0)
555 if (CONSTANT_POOL_ADDRESS_P (symbol)
556 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
557 return true;
559 decl = SYMBOL_REF_DECL (symbol);
560 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
561 return true;
564 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
565 && SYMBOL_REF_BLOCK (symbol)
566 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
567 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
568 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
569 return true;
571 return false;
574 /* Split X into a base and a constant offset, storing them in *BASE_OUT
575 and *OFFSET_OUT respectively. */
577 void
578 split_const (rtx x, rtx *base_out, rtx *offset_out)
580 if (GET_CODE (x) == CONST)
582 x = XEXP (x, 0);
583 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
585 *base_out = XEXP (x, 0);
586 *offset_out = XEXP (x, 1);
587 return;
590 *base_out = x;
591 *offset_out = const0_rtx;
594 /* Return the number of places FIND appears within X. If COUNT_DEST is
595 zero, we do not count occurrences inside the destination of a SET. */
598 count_occurrences (const_rtx x, const_rtx find, int count_dest)
600 int i, j;
601 enum rtx_code code;
602 const char *format_ptr;
603 int count;
605 if (x == find)
606 return 1;
608 code = GET_CODE (x);
610 switch (code)
612 case REG:
613 CASE_CONST_ANY:
614 case SYMBOL_REF:
615 case CODE_LABEL:
616 case PC:
617 case CC0:
618 return 0;
620 case EXPR_LIST:
621 count = count_occurrences (XEXP (x, 0), find, count_dest);
622 if (XEXP (x, 1))
623 count += count_occurrences (XEXP (x, 1), find, count_dest);
624 return count;
626 case MEM:
627 if (MEM_P (find) && rtx_equal_p (x, find))
628 return 1;
629 break;
631 case SET:
632 if (SET_DEST (x) == find && ! count_dest)
633 return count_occurrences (SET_SRC (x), find, count_dest);
634 break;
636 default:
637 break;
640 format_ptr = GET_RTX_FORMAT (code);
641 count = 0;
643 for (i = 0; i < GET_RTX_LENGTH (code); i++)
645 switch (*format_ptr++)
647 case 'e':
648 count += count_occurrences (XEXP (x, i), find, count_dest);
649 break;
651 case 'E':
652 for (j = 0; j < XVECLEN (x, i); j++)
653 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
654 break;
657 return count;
661 /* Return TRUE if OP is a register or subreg of a register that
662 holds an unsigned quantity. Otherwise, return FALSE. */
664 bool
665 unsigned_reg_p (rtx op)
667 if (REG_P (op)
668 && REG_EXPR (op)
669 && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op))))
670 return true;
672 if (GET_CODE (op) == SUBREG
673 && SUBREG_PROMOTED_UNSIGNED_P (op))
674 return true;
676 return false;
680 /* Nonzero if register REG appears somewhere within IN.
681 Also works if REG is not a register; in this case it checks
682 for a subexpression of IN that is Lisp "equal" to REG. */
685 reg_mentioned_p (const_rtx reg, const_rtx in)
687 const char *fmt;
688 int i;
689 enum rtx_code code;
691 if (in == 0)
692 return 0;
694 if (reg == in)
695 return 1;
697 if (GET_CODE (in) == LABEL_REF)
698 return reg == XEXP (in, 0);
700 code = GET_CODE (in);
702 switch (code)
704 /* Compare registers by number. */
705 case REG:
706 return REG_P (reg) && REGNO (in) == REGNO (reg);
708 /* These codes have no constituent expressions
709 and are unique. */
710 case SCRATCH:
711 case CC0:
712 case PC:
713 return 0;
715 CASE_CONST_ANY:
716 /* These are kept unique for a given value. */
717 return 0;
719 default:
720 break;
723 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
724 return 1;
726 fmt = GET_RTX_FORMAT (code);
728 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
730 if (fmt[i] == 'E')
732 int j;
733 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
734 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
735 return 1;
737 else if (fmt[i] == 'e'
738 && reg_mentioned_p (reg, XEXP (in, i)))
739 return 1;
741 return 0;
744 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
745 no CODE_LABEL insn. */
748 no_labels_between_p (const_rtx beg, const_rtx end)
750 rtx p;
751 if (beg == end)
752 return 0;
753 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
754 if (LABEL_P (p))
755 return 0;
756 return 1;
759 /* Nonzero if register REG is used in an insn between
760 FROM_INSN and TO_INSN (exclusive of those two). */
763 reg_used_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
765 rtx insn;
767 if (from_insn == to_insn)
768 return 0;
770 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
771 if (NONDEBUG_INSN_P (insn)
772 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
773 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
774 return 1;
775 return 0;
778 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
779 is entirely replaced by a new value and the only use is as a SET_DEST,
780 we do not consider it a reference. */
783 reg_referenced_p (const_rtx x, const_rtx body)
785 int i;
787 switch (GET_CODE (body))
789 case SET:
790 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
791 return 1;
793 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
794 of a REG that occupies all of the REG, the insn references X if
795 it is mentioned in the destination. */
796 if (GET_CODE (SET_DEST (body)) != CC0
797 && GET_CODE (SET_DEST (body)) != PC
798 && !REG_P (SET_DEST (body))
799 && ! (GET_CODE (SET_DEST (body)) == SUBREG
800 && REG_P (SUBREG_REG (SET_DEST (body)))
801 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
802 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
803 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
804 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
805 && reg_overlap_mentioned_p (x, SET_DEST (body)))
806 return 1;
807 return 0;
809 case ASM_OPERANDS:
810 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
811 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
812 return 1;
813 return 0;
815 case CALL:
816 case USE:
817 case IF_THEN_ELSE:
818 return reg_overlap_mentioned_p (x, body);
820 case TRAP_IF:
821 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
823 case PREFETCH:
824 return reg_overlap_mentioned_p (x, XEXP (body, 0));
826 case UNSPEC:
827 case UNSPEC_VOLATILE:
828 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
829 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
830 return 1;
831 return 0;
833 case PARALLEL:
834 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
835 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
836 return 1;
837 return 0;
839 case CLOBBER:
840 if (MEM_P (XEXP (body, 0)))
841 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
842 return 1;
843 return 0;
845 case COND_EXEC:
846 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
847 return 1;
848 return reg_referenced_p (x, COND_EXEC_CODE (body));
850 default:
851 return 0;
855 /* Nonzero if register REG is set or clobbered in an insn between
856 FROM_INSN and TO_INSN (exclusive of those two). */
859 reg_set_between_p (const_rtx reg, const_rtx from_insn, const_rtx to_insn)
861 const_rtx insn;
863 if (from_insn == to_insn)
864 return 0;
866 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
867 if (INSN_P (insn) && reg_set_p (reg, insn))
868 return 1;
869 return 0;
872 /* Internals of reg_set_between_p. */
874 reg_set_p (const_rtx reg, const_rtx insn)
876 /* After delay slot handling, call and branch insns might be in a
877 sequence. Check all the elements there. */
878 if (INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
880 for (int i = 0; i < XVECLEN (PATTERN (insn), 0); ++i)
881 if (reg_set_p (reg, XVECEXP (PATTERN (insn), 0, i)))
882 return true;
884 return false;
887 /* We can be passed an insn or part of one. If we are passed an insn,
888 check if a side-effect of the insn clobbers REG. */
889 if (INSN_P (insn)
890 && (FIND_REG_INC_NOTE (insn, reg)
891 || (CALL_P (insn)
892 && ((REG_P (reg)
893 && REGNO (reg) < FIRST_PSEUDO_REGISTER
894 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
895 GET_MODE (reg), REGNO (reg)))
896 || MEM_P (reg)
897 || find_reg_fusage (insn, CLOBBER, reg)))))
898 return true;
900 return set_of (reg, insn) != NULL_RTX;
903 /* Similar to reg_set_between_p, but check all registers in X. Return 0
904 only if none of them are modified between START and END. Return 1 if
905 X contains a MEM; this routine does use memory aliasing. */
908 modified_between_p (const_rtx x, const_rtx start, const_rtx end)
910 const enum rtx_code code = GET_CODE (x);
911 const char *fmt;
912 int i, j;
913 rtx insn;
915 if (start == end)
916 return 0;
918 switch (code)
920 CASE_CONST_ANY:
921 case CONST:
922 case SYMBOL_REF:
923 case LABEL_REF:
924 return 0;
926 case PC:
927 case CC0:
928 return 1;
930 case MEM:
931 if (modified_between_p (XEXP (x, 0), start, end))
932 return 1;
933 if (MEM_READONLY_P (x))
934 return 0;
935 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
936 if (memory_modified_in_insn_p (x, insn))
937 return 1;
938 return 0;
939 break;
941 case REG:
942 return reg_set_between_p (x, start, end);
944 default:
945 break;
948 fmt = GET_RTX_FORMAT (code);
949 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
951 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
952 return 1;
954 else if (fmt[i] == 'E')
955 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
956 if (modified_between_p (XVECEXP (x, i, j), start, end))
957 return 1;
960 return 0;
963 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
964 of them are modified in INSN. Return 1 if X contains a MEM; this routine
965 does use memory aliasing. */
968 modified_in_p (const_rtx x, const_rtx insn)
970 const enum rtx_code code = GET_CODE (x);
971 const char *fmt;
972 int i, j;
974 switch (code)
976 CASE_CONST_ANY:
977 case CONST:
978 case SYMBOL_REF:
979 case LABEL_REF:
980 return 0;
982 case PC:
983 case CC0:
984 return 1;
986 case MEM:
987 if (modified_in_p (XEXP (x, 0), insn))
988 return 1;
989 if (MEM_READONLY_P (x))
990 return 0;
991 if (memory_modified_in_insn_p (x, insn))
992 return 1;
993 return 0;
994 break;
996 case REG:
997 return reg_set_p (x, insn);
999 default:
1000 break;
1003 fmt = GET_RTX_FORMAT (code);
1004 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1006 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1007 return 1;
1009 else if (fmt[i] == 'E')
1010 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1011 if (modified_in_p (XVECEXP (x, i, j), insn))
1012 return 1;
1015 return 0;
1018 /* Helper function for set_of. */
1019 struct set_of_data
1021 const_rtx found;
1022 const_rtx pat;
1025 static void
1026 set_of_1 (rtx x, const_rtx pat, void *data1)
1028 struct set_of_data *const data = (struct set_of_data *) (data1);
1029 if (rtx_equal_p (x, data->pat)
1030 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1031 data->found = pat;
1034 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1035 (either directly or via STRICT_LOW_PART and similar modifiers). */
1036 const_rtx
1037 set_of (const_rtx pat, const_rtx insn)
1039 struct set_of_data data;
1040 data.found = NULL_RTX;
1041 data.pat = pat;
1042 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1043 return data.found;
1046 /* This function, called through note_stores, collects sets and
1047 clobbers of hard registers in a HARD_REG_SET, which is pointed to
1048 by DATA. */
1049 void
1050 record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1052 HARD_REG_SET *pset = (HARD_REG_SET *)data;
1053 if (REG_P (x) && HARD_REGISTER_P (x))
1054 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1057 /* Examine INSN, and compute the set of hard registers written by it.
1058 Store it in *PSET. Should only be called after reload. */
1059 void
1060 find_all_hard_reg_sets (const_rtx insn, HARD_REG_SET *pset)
1062 rtx link;
1064 CLEAR_HARD_REG_SET (*pset);
1065 note_stores (PATTERN (insn), record_hard_reg_sets, pset);
1066 if (CALL_P (insn))
1067 IOR_HARD_REG_SET (*pset, call_used_reg_set);
1068 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1069 if (REG_NOTE_KIND (link) == REG_INC)
1070 record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1073 /* A for_each_rtx subroutine of record_hard_reg_uses. */
1074 static int
1075 record_hard_reg_uses_1 (rtx *px, void *data)
1077 rtx x = *px;
1078 HARD_REG_SET *pused = (HARD_REG_SET *)data;
1080 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1082 int nregs = hard_regno_nregs[REGNO (x)][GET_MODE (x)];
1083 while (nregs-- > 0)
1084 SET_HARD_REG_BIT (*pused, REGNO (x) + nregs);
1086 return 0;
1089 /* Like record_hard_reg_sets, but called through note_uses. */
1090 void
1091 record_hard_reg_uses (rtx *px, void *data)
1093 for_each_rtx (px, record_hard_reg_uses_1, data);
1096 /* Given an INSN, return a SET expression if this insn has only a single SET.
1097 It may also have CLOBBERs, USEs, or SET whose output
1098 will not be used, which we ignore. */
1101 single_set_2 (const_rtx insn, const_rtx pat)
1103 rtx set = NULL;
1104 int set_verified = 1;
1105 int i;
1107 if (GET_CODE (pat) == PARALLEL)
1109 for (i = 0; i < XVECLEN (pat, 0); i++)
1111 rtx sub = XVECEXP (pat, 0, i);
1112 switch (GET_CODE (sub))
1114 case USE:
1115 case CLOBBER:
1116 break;
1118 case SET:
1119 /* We can consider insns having multiple sets, where all
1120 but one are dead as single set insns. In common case
1121 only single set is present in the pattern so we want
1122 to avoid checking for REG_UNUSED notes unless necessary.
1124 When we reach set first time, we just expect this is
1125 the single set we are looking for and only when more
1126 sets are found in the insn, we check them. */
1127 if (!set_verified)
1129 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1130 && !side_effects_p (set))
1131 set = NULL;
1132 else
1133 set_verified = 1;
1135 if (!set)
1136 set = sub, set_verified = 0;
1137 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1138 || side_effects_p (sub))
1139 return NULL_RTX;
1140 break;
1142 default:
1143 return NULL_RTX;
1147 return set;
1150 /* Given an INSN, return nonzero if it has more than one SET, else return
1151 zero. */
1154 multiple_sets (const_rtx insn)
1156 int found;
1157 int i;
1159 /* INSN must be an insn. */
1160 if (! INSN_P (insn))
1161 return 0;
1163 /* Only a PARALLEL can have multiple SETs. */
1164 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1166 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1167 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1169 /* If we have already found a SET, then return now. */
1170 if (found)
1171 return 1;
1172 else
1173 found = 1;
1177 /* Either zero or one SET. */
1178 return 0;
1181 /* Return nonzero if the destination of SET equals the source
1182 and there are no side effects. */
1185 set_noop_p (const_rtx set)
1187 rtx src = SET_SRC (set);
1188 rtx dst = SET_DEST (set);
1190 if (dst == pc_rtx && src == pc_rtx)
1191 return 1;
1193 if (MEM_P (dst) && MEM_P (src))
1194 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1196 if (GET_CODE (dst) == ZERO_EXTRACT)
1197 return rtx_equal_p (XEXP (dst, 0), src)
1198 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1199 && !side_effects_p (src);
1201 if (GET_CODE (dst) == STRICT_LOW_PART)
1202 dst = XEXP (dst, 0);
1204 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1206 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1207 return 0;
1208 src = SUBREG_REG (src);
1209 dst = SUBREG_REG (dst);
1212 /* It is a NOOP if destination overlaps with selected src vector
1213 elements. */
1214 if (GET_CODE (src) == VEC_SELECT
1215 && REG_P (XEXP (src, 0)) && REG_P (dst)
1216 && HARD_REGISTER_P (XEXP (src, 0))
1217 && HARD_REGISTER_P (dst))
1219 int i;
1220 rtx par = XEXP (src, 1);
1221 rtx src0 = XEXP (src, 0);
1222 int c0 = INTVAL (XVECEXP (par, 0, 0));
1223 HOST_WIDE_INT offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0;
1225 for (i = 1; i < XVECLEN (par, 0); i++)
1226 if (INTVAL (XVECEXP (par, 0, i)) != c0 + i)
1227 return 0;
1228 return
1229 simplify_subreg_regno (REGNO (src0), GET_MODE (src0),
1230 offset, GET_MODE (dst)) == (int) REGNO (dst);
1233 return (REG_P (src) && REG_P (dst)
1234 && REGNO (src) == REGNO (dst));
1237 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1238 value to itself. */
1241 noop_move_p (const_rtx insn)
1243 rtx pat = PATTERN (insn);
1245 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1246 return 1;
1248 /* Insns carrying these notes are useful later on. */
1249 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1250 return 0;
1252 /* Check the code to be executed for COND_EXEC. */
1253 if (GET_CODE (pat) == COND_EXEC)
1254 pat = COND_EXEC_CODE (pat);
1256 if (GET_CODE (pat) == SET && set_noop_p (pat))
1257 return 1;
1259 if (GET_CODE (pat) == PARALLEL)
1261 int i;
1262 /* If nothing but SETs of registers to themselves,
1263 this insn can also be deleted. */
1264 for (i = 0; i < XVECLEN (pat, 0); i++)
1266 rtx tem = XVECEXP (pat, 0, i);
1268 if (GET_CODE (tem) == USE
1269 || GET_CODE (tem) == CLOBBER)
1270 continue;
1272 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1273 return 0;
1276 return 1;
1278 return 0;
1282 /* Return the last thing that X was assigned from before *PINSN. If VALID_TO
1283 is not NULL_RTX then verify that the object is not modified up to VALID_TO.
1284 If the object was modified, if we hit a partial assignment to X, or hit a
1285 CODE_LABEL first, return X. If we found an assignment, update *PINSN to
1286 point to it. ALLOW_HWREG is set to 1 if hardware registers are allowed to
1287 be the src. */
1290 find_last_value (rtx x, rtx *pinsn, rtx valid_to, int allow_hwreg)
1292 rtx p;
1294 for (p = PREV_INSN (*pinsn); p && !LABEL_P (p);
1295 p = PREV_INSN (p))
1296 if (INSN_P (p))
1298 rtx set = single_set (p);
1299 rtx note = find_reg_note (p, REG_EQUAL, NULL_RTX);
1301 if (set && rtx_equal_p (x, SET_DEST (set)))
1303 rtx src = SET_SRC (set);
1305 if (note && GET_CODE (XEXP (note, 0)) != EXPR_LIST)
1306 src = XEXP (note, 0);
1308 if ((valid_to == NULL_RTX
1309 || ! modified_between_p (src, PREV_INSN (p), valid_to))
1310 /* Reject hard registers because we don't usually want
1311 to use them; we'd rather use a pseudo. */
1312 && (! (REG_P (src)
1313 && REGNO (src) < FIRST_PSEUDO_REGISTER) || allow_hwreg))
1315 *pinsn = p;
1316 return src;
1320 /* If set in non-simple way, we don't have a value. */
1321 if (reg_set_p (x, p))
1322 break;
1325 return x;
1328 /* Return nonzero if register in range [REGNO, ENDREGNO)
1329 appears either explicitly or implicitly in X
1330 other than being stored into.
1332 References contained within the substructure at LOC do not count.
1333 LOC may be zero, meaning don't ignore anything. */
1336 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1337 rtx *loc)
1339 int i;
1340 unsigned int x_regno;
1341 RTX_CODE code;
1342 const char *fmt;
1344 repeat:
1345 /* The contents of a REG_NONNEG note is always zero, so we must come here
1346 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1347 if (x == 0)
1348 return 0;
1350 code = GET_CODE (x);
1352 switch (code)
1354 case REG:
1355 x_regno = REGNO (x);
1357 /* If we modifying the stack, frame, or argument pointer, it will
1358 clobber a virtual register. In fact, we could be more precise,
1359 but it isn't worth it. */
1360 if ((x_regno == STACK_POINTER_REGNUM
1361 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1362 || x_regno == ARG_POINTER_REGNUM
1363 #endif
1364 || x_regno == FRAME_POINTER_REGNUM)
1365 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1366 return 1;
1368 return endregno > x_regno && regno < END_REGNO (x);
1370 case SUBREG:
1371 /* If this is a SUBREG of a hard reg, we can see exactly which
1372 registers are being modified. Otherwise, handle normally. */
1373 if (REG_P (SUBREG_REG (x))
1374 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1376 unsigned int inner_regno = subreg_regno (x);
1377 unsigned int inner_endregno
1378 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1379 ? subreg_nregs (x) : 1);
1381 return endregno > inner_regno && regno < inner_endregno;
1383 break;
1385 case CLOBBER:
1386 case SET:
1387 if (&SET_DEST (x) != loc
1388 /* Note setting a SUBREG counts as referring to the REG it is in for
1389 a pseudo but not for hard registers since we can
1390 treat each word individually. */
1391 && ((GET_CODE (SET_DEST (x)) == SUBREG
1392 && loc != &SUBREG_REG (SET_DEST (x))
1393 && REG_P (SUBREG_REG (SET_DEST (x)))
1394 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1395 && refers_to_regno_p (regno, endregno,
1396 SUBREG_REG (SET_DEST (x)), loc))
1397 || (!REG_P (SET_DEST (x))
1398 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1399 return 1;
1401 if (code == CLOBBER || loc == &SET_SRC (x))
1402 return 0;
1403 x = SET_SRC (x);
1404 goto repeat;
1406 default:
1407 break;
1410 /* X does not match, so try its subexpressions. */
1412 fmt = GET_RTX_FORMAT (code);
1413 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1415 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1417 if (i == 0)
1419 x = XEXP (x, 0);
1420 goto repeat;
1422 else
1423 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1424 return 1;
1426 else if (fmt[i] == 'E')
1428 int j;
1429 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1430 if (loc != &XVECEXP (x, i, j)
1431 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1432 return 1;
1435 return 0;
1438 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1439 we check if any register number in X conflicts with the relevant register
1440 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1441 contains a MEM (we don't bother checking for memory addresses that can't
1442 conflict because we expect this to be a rare case. */
1445 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1447 unsigned int regno, endregno;
1449 /* If either argument is a constant, then modifying X can not
1450 affect IN. Here we look at IN, we can profitably combine
1451 CONSTANT_P (x) with the switch statement below. */
1452 if (CONSTANT_P (in))
1453 return 0;
1455 recurse:
1456 switch (GET_CODE (x))
1458 case STRICT_LOW_PART:
1459 case ZERO_EXTRACT:
1460 case SIGN_EXTRACT:
1461 /* Overly conservative. */
1462 x = XEXP (x, 0);
1463 goto recurse;
1465 case SUBREG:
1466 regno = REGNO (SUBREG_REG (x));
1467 if (regno < FIRST_PSEUDO_REGISTER)
1468 regno = subreg_regno (x);
1469 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1470 ? subreg_nregs (x) : 1);
1471 goto do_reg;
1473 case REG:
1474 regno = REGNO (x);
1475 endregno = END_REGNO (x);
1476 do_reg:
1477 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1479 case MEM:
1481 const char *fmt;
1482 int i;
1484 if (MEM_P (in))
1485 return 1;
1487 fmt = GET_RTX_FORMAT (GET_CODE (in));
1488 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1489 if (fmt[i] == 'e')
1491 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1492 return 1;
1494 else if (fmt[i] == 'E')
1496 int j;
1497 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1498 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1499 return 1;
1502 return 0;
1505 case SCRATCH:
1506 case PC:
1507 case CC0:
1508 return reg_mentioned_p (x, in);
1510 case PARALLEL:
1512 int i;
1514 /* If any register in here refers to it we return true. */
1515 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1516 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1517 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1518 return 1;
1519 return 0;
1522 default:
1523 gcc_assert (CONSTANT_P (x));
1524 return 0;
1528 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1529 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1530 ignored by note_stores, but passed to FUN.
1532 FUN receives three arguments:
1533 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1534 2. the SET or CLOBBER rtx that does the store,
1535 3. the pointer DATA provided to note_stores.
1537 If the item being stored in or clobbered is a SUBREG of a hard register,
1538 the SUBREG will be passed. */
1540 void
1541 note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1543 int i;
1545 if (GET_CODE (x) == COND_EXEC)
1546 x = COND_EXEC_CODE (x);
1548 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1550 rtx dest = SET_DEST (x);
1552 while ((GET_CODE (dest) == SUBREG
1553 && (!REG_P (SUBREG_REG (dest))
1554 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1555 || GET_CODE (dest) == ZERO_EXTRACT
1556 || GET_CODE (dest) == STRICT_LOW_PART)
1557 dest = XEXP (dest, 0);
1559 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1560 each of whose first operand is a register. */
1561 if (GET_CODE (dest) == PARALLEL)
1563 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1564 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1565 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1567 else
1568 (*fun) (dest, x, data);
1571 else if (GET_CODE (x) == PARALLEL)
1572 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1573 note_stores (XVECEXP (x, 0, i), fun, data);
1576 /* Like notes_stores, but call FUN for each expression that is being
1577 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1578 FUN for each expression, not any interior subexpressions. FUN receives a
1579 pointer to the expression and the DATA passed to this function.
1581 Note that this is not quite the same test as that done in reg_referenced_p
1582 since that considers something as being referenced if it is being
1583 partially set, while we do not. */
1585 void
1586 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1588 rtx body = *pbody;
1589 int i;
1591 switch (GET_CODE (body))
1593 case COND_EXEC:
1594 (*fun) (&COND_EXEC_TEST (body), data);
1595 note_uses (&COND_EXEC_CODE (body), fun, data);
1596 return;
1598 case PARALLEL:
1599 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1600 note_uses (&XVECEXP (body, 0, i), fun, data);
1601 return;
1603 case SEQUENCE:
1604 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1605 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1606 return;
1608 case USE:
1609 (*fun) (&XEXP (body, 0), data);
1610 return;
1612 case ASM_OPERANDS:
1613 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1614 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1615 return;
1617 case TRAP_IF:
1618 (*fun) (&TRAP_CONDITION (body), data);
1619 return;
1621 case PREFETCH:
1622 (*fun) (&XEXP (body, 0), data);
1623 return;
1625 case UNSPEC:
1626 case UNSPEC_VOLATILE:
1627 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1628 (*fun) (&XVECEXP (body, 0, i), data);
1629 return;
1631 case CLOBBER:
1632 if (MEM_P (XEXP (body, 0)))
1633 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1634 return;
1636 case SET:
1638 rtx dest = SET_DEST (body);
1640 /* For sets we replace everything in source plus registers in memory
1641 expression in store and operands of a ZERO_EXTRACT. */
1642 (*fun) (&SET_SRC (body), data);
1644 if (GET_CODE (dest) == ZERO_EXTRACT)
1646 (*fun) (&XEXP (dest, 1), data);
1647 (*fun) (&XEXP (dest, 2), data);
1650 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1651 dest = XEXP (dest, 0);
1653 if (MEM_P (dest))
1654 (*fun) (&XEXP (dest, 0), data);
1656 return;
1658 default:
1659 /* All the other possibilities never store. */
1660 (*fun) (pbody, data);
1661 return;
1665 /* Return nonzero if X's old contents don't survive after INSN.
1666 This will be true if X is (cc0) or if X is a register and
1667 X dies in INSN or because INSN entirely sets X.
1669 "Entirely set" means set directly and not through a SUBREG, or
1670 ZERO_EXTRACT, so no trace of the old contents remains.
1671 Likewise, REG_INC does not count.
1673 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1674 but for this use that makes no difference, since regs don't overlap
1675 during their lifetimes. Therefore, this function may be used
1676 at any time after deaths have been computed.
1678 If REG is a hard reg that occupies multiple machine registers, this
1679 function will only return 1 if each of those registers will be replaced
1680 by INSN. */
1683 dead_or_set_p (const_rtx insn, const_rtx x)
1685 unsigned int regno, end_regno;
1686 unsigned int i;
1688 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1689 if (GET_CODE (x) == CC0)
1690 return 1;
1692 gcc_assert (REG_P (x));
1694 regno = REGNO (x);
1695 end_regno = END_REGNO (x);
1696 for (i = regno; i < end_regno; i++)
1697 if (! dead_or_set_regno_p (insn, i))
1698 return 0;
1700 return 1;
1703 /* Return TRUE iff DEST is a register or subreg of a register and
1704 doesn't change the number of words of the inner register, and any
1705 part of the register is TEST_REGNO. */
1707 static bool
1708 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1710 unsigned int regno, endregno;
1712 if (GET_CODE (dest) == SUBREG
1713 && (((GET_MODE_SIZE (GET_MODE (dest))
1714 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1715 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1716 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1717 dest = SUBREG_REG (dest);
1719 if (!REG_P (dest))
1720 return false;
1722 regno = REGNO (dest);
1723 endregno = END_REGNO (dest);
1724 return (test_regno >= regno && test_regno < endregno);
1727 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1728 any member matches the covers_regno_no_parallel_p criteria. */
1730 static bool
1731 covers_regno_p (const_rtx dest, unsigned int test_regno)
1733 if (GET_CODE (dest) == PARALLEL)
1735 /* Some targets place small structures in registers for return
1736 values of functions, and those registers are wrapped in
1737 PARALLELs that we may see as the destination of a SET. */
1738 int i;
1740 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1742 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1743 if (inner != NULL_RTX
1744 && covers_regno_no_parallel_p (inner, test_regno))
1745 return true;
1748 return false;
1750 else
1751 return covers_regno_no_parallel_p (dest, test_regno);
1754 /* Utility function for dead_or_set_p to check an individual register. */
1757 dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1759 const_rtx pattern;
1761 /* See if there is a death note for something that includes TEST_REGNO. */
1762 if (find_regno_note (insn, REG_DEAD, test_regno))
1763 return 1;
1765 if (CALL_P (insn)
1766 && find_regno_fusage (insn, CLOBBER, test_regno))
1767 return 1;
1769 pattern = PATTERN (insn);
1771 /* If a COND_EXEC is not executed, the value survives. */
1772 if (GET_CODE (pattern) == COND_EXEC)
1773 return 0;
1775 if (GET_CODE (pattern) == SET)
1776 return covers_regno_p (SET_DEST (pattern), test_regno);
1777 else if (GET_CODE (pattern) == PARALLEL)
1779 int i;
1781 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1783 rtx body = XVECEXP (pattern, 0, i);
1785 if (GET_CODE (body) == COND_EXEC)
1786 body = COND_EXEC_CODE (body);
1788 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1789 && covers_regno_p (SET_DEST (body), test_regno))
1790 return 1;
1794 return 0;
1797 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1798 If DATUM is nonzero, look for one whose datum is DATUM. */
1801 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1803 rtx link;
1805 gcc_checking_assert (insn);
1807 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1808 if (! INSN_P (insn))
1809 return 0;
1810 if (datum == 0)
1812 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1813 if (REG_NOTE_KIND (link) == kind)
1814 return link;
1815 return 0;
1818 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1819 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1820 return link;
1821 return 0;
1824 /* Return the reg-note of kind KIND in insn INSN which applies to register
1825 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1826 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1827 it might be the case that the note overlaps REGNO. */
1830 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1832 rtx link;
1834 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1835 if (! INSN_P (insn))
1836 return 0;
1838 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1839 if (REG_NOTE_KIND (link) == kind
1840 /* Verify that it is a register, so that scratch and MEM won't cause a
1841 problem here. */
1842 && REG_P (XEXP (link, 0))
1843 && REGNO (XEXP (link, 0)) <= regno
1844 && END_REGNO (XEXP (link, 0)) > regno)
1845 return link;
1846 return 0;
1849 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1850 has such a note. */
1853 find_reg_equal_equiv_note (const_rtx insn)
1855 rtx link;
1857 if (!INSN_P (insn))
1858 return 0;
1860 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1861 if (REG_NOTE_KIND (link) == REG_EQUAL
1862 || REG_NOTE_KIND (link) == REG_EQUIV)
1864 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1865 insns that have multiple sets. Checking single_set to
1866 make sure of this is not the proper check, as explained
1867 in the comment in set_unique_reg_note.
1869 This should be changed into an assert. */
1870 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1871 return 0;
1872 return link;
1874 return NULL;
1877 /* Check whether INSN is a single_set whose source is known to be
1878 equivalent to a constant. Return that constant if so, otherwise
1879 return null. */
1882 find_constant_src (const_rtx insn)
1884 rtx note, set, x;
1886 set = single_set (insn);
1887 if (set)
1889 x = avoid_constant_pool_reference (SET_SRC (set));
1890 if (CONSTANT_P (x))
1891 return x;
1894 note = find_reg_equal_equiv_note (insn);
1895 if (note && CONSTANT_P (XEXP (note, 0)))
1896 return XEXP (note, 0);
1898 return NULL_RTX;
1901 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1902 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1905 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1907 /* If it's not a CALL_INSN, it can't possibly have a
1908 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1909 if (!CALL_P (insn))
1910 return 0;
1912 gcc_assert (datum);
1914 if (!REG_P (datum))
1916 rtx link;
1918 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1919 link;
1920 link = XEXP (link, 1))
1921 if (GET_CODE (XEXP (link, 0)) == code
1922 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1923 return 1;
1925 else
1927 unsigned int regno = REGNO (datum);
1929 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1930 to pseudo registers, so don't bother checking. */
1932 if (regno < FIRST_PSEUDO_REGISTER)
1934 unsigned int end_regno = END_HARD_REGNO (datum);
1935 unsigned int i;
1937 for (i = regno; i < end_regno; i++)
1938 if (find_regno_fusage (insn, code, i))
1939 return 1;
1943 return 0;
1946 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
1947 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1950 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
1952 rtx link;
1954 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
1955 to pseudo registers, so don't bother checking. */
1957 if (regno >= FIRST_PSEUDO_REGISTER
1958 || !CALL_P (insn) )
1959 return 0;
1961 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1963 rtx op, reg;
1965 if (GET_CODE (op = XEXP (link, 0)) == code
1966 && REG_P (reg = XEXP (op, 0))
1967 && REGNO (reg) <= regno
1968 && END_HARD_REGNO (reg) > regno)
1969 return 1;
1972 return 0;
1976 /* Return true if KIND is an integer REG_NOTE. */
1978 static bool
1979 int_reg_note_p (enum reg_note kind)
1981 return kind == REG_BR_PROB;
1984 /* Allocate a register note with kind KIND and datum DATUM. LIST is
1985 stored as the pointer to the next register note. */
1988 alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
1990 rtx note;
1992 gcc_checking_assert (!int_reg_note_p (kind));
1993 switch (kind)
1995 case REG_CC_SETTER:
1996 case REG_CC_USER:
1997 case REG_LABEL_TARGET:
1998 case REG_LABEL_OPERAND:
1999 case REG_TM:
2000 /* These types of register notes use an INSN_LIST rather than an
2001 EXPR_LIST, so that copying is done right and dumps look
2002 better. */
2003 note = alloc_INSN_LIST (datum, list);
2004 PUT_REG_NOTE_KIND (note, kind);
2005 break;
2007 default:
2008 note = alloc_EXPR_LIST (kind, datum, list);
2009 break;
2012 return note;
2015 /* Add register note with kind KIND and datum DATUM to INSN. */
2017 void
2018 add_reg_note (rtx insn, enum reg_note kind, rtx datum)
2020 REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
2023 /* Add an integer register note with kind KIND and datum DATUM to INSN. */
2025 void
2026 add_int_reg_note (rtx insn, enum reg_note kind, int datum)
2028 gcc_checking_assert (int_reg_note_p (kind));
2029 REG_NOTES (insn) = gen_rtx_INT_LIST ((enum machine_mode) kind,
2030 datum, REG_NOTES (insn));
2033 /* Add a register note like NOTE to INSN. */
2035 void
2036 add_shallow_copy_of_reg_note (rtx insn, rtx note)
2038 if (GET_CODE (note) == INT_LIST)
2039 add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0));
2040 else
2041 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2044 /* Remove register note NOTE from the REG_NOTES of INSN. */
2046 void
2047 remove_note (rtx insn, const_rtx note)
2049 rtx link;
2051 if (note == NULL_RTX)
2052 return;
2054 if (REG_NOTES (insn) == note)
2055 REG_NOTES (insn) = XEXP (note, 1);
2056 else
2057 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2058 if (XEXP (link, 1) == note)
2060 XEXP (link, 1) = XEXP (note, 1);
2061 break;
2064 switch (REG_NOTE_KIND (note))
2066 case REG_EQUAL:
2067 case REG_EQUIV:
2068 df_notes_rescan (insn);
2069 break;
2070 default:
2071 break;
2075 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
2077 void
2078 remove_reg_equal_equiv_notes (rtx insn)
2080 rtx *loc;
2082 loc = &REG_NOTES (insn);
2083 while (*loc)
2085 enum reg_note kind = REG_NOTE_KIND (*loc);
2086 if (kind == REG_EQUAL || kind == REG_EQUIV)
2087 *loc = XEXP (*loc, 1);
2088 else
2089 loc = &XEXP (*loc, 1);
2093 /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO. */
2095 void
2096 remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
2098 df_ref eq_use;
2100 if (!df)
2101 return;
2103 /* This loop is a little tricky. We cannot just go down the chain because
2104 it is being modified by some actions in the loop. So we just iterate
2105 over the head. We plan to drain the list anyway. */
2106 while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
2108 rtx insn = DF_REF_INSN (eq_use);
2109 rtx note = find_reg_equal_equiv_note (insn);
2111 /* This assert is generally triggered when someone deletes a REG_EQUAL
2112 or REG_EQUIV note by hacking the list manually rather than calling
2113 remove_note. */
2114 gcc_assert (note);
2116 remove_note (insn, note);
2120 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2121 return 1 if it is found. A simple equality test is used to determine if
2122 NODE matches. */
2125 in_expr_list_p (const_rtx listp, const_rtx node)
2127 const_rtx x;
2129 for (x = listp; x; x = XEXP (x, 1))
2130 if (node == XEXP (x, 0))
2131 return 1;
2133 return 0;
2136 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2137 remove that entry from the list if it is found.
2139 A simple equality test is used to determine if NODE matches. */
2141 void
2142 remove_node_from_expr_list (const_rtx node, rtx *listp)
2144 rtx temp = *listp;
2145 rtx prev = NULL_RTX;
2147 while (temp)
2149 if (node == XEXP (temp, 0))
2151 /* Splice the node out of the list. */
2152 if (prev)
2153 XEXP (prev, 1) = XEXP (temp, 1);
2154 else
2155 *listp = XEXP (temp, 1);
2157 return;
2160 prev = temp;
2161 temp = XEXP (temp, 1);
2165 /* Nonzero if X contains any volatile instructions. These are instructions
2166 which may cause unpredictable machine state instructions, and thus no
2167 instructions or register uses should be moved or combined across them.
2168 This includes only volatile asms and UNSPEC_VOLATILE instructions. */
2171 volatile_insn_p (const_rtx x)
2173 const RTX_CODE code = GET_CODE (x);
2174 switch (code)
2176 case LABEL_REF:
2177 case SYMBOL_REF:
2178 case CONST:
2179 CASE_CONST_ANY:
2180 case CC0:
2181 case PC:
2182 case REG:
2183 case SCRATCH:
2184 case CLOBBER:
2185 case ADDR_VEC:
2186 case ADDR_DIFF_VEC:
2187 case CALL:
2188 case MEM:
2189 return 0;
2191 case UNSPEC_VOLATILE:
2192 return 1;
2194 case ASM_INPUT:
2195 case ASM_OPERANDS:
2196 if (MEM_VOLATILE_P (x))
2197 return 1;
2199 default:
2200 break;
2203 /* Recursively scan the operands of this expression. */
2206 const char *const fmt = GET_RTX_FORMAT (code);
2207 int i;
2209 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2211 if (fmt[i] == 'e')
2213 if (volatile_insn_p (XEXP (x, i)))
2214 return 1;
2216 else if (fmt[i] == 'E')
2218 int j;
2219 for (j = 0; j < XVECLEN (x, i); j++)
2220 if (volatile_insn_p (XVECEXP (x, i, j)))
2221 return 1;
2225 return 0;
2228 /* Nonzero if X contains any volatile memory references
2229 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2232 volatile_refs_p (const_rtx x)
2234 const RTX_CODE code = GET_CODE (x);
2235 switch (code)
2237 case LABEL_REF:
2238 case SYMBOL_REF:
2239 case CONST:
2240 CASE_CONST_ANY:
2241 case CC0:
2242 case PC:
2243 case REG:
2244 case SCRATCH:
2245 case CLOBBER:
2246 case ADDR_VEC:
2247 case ADDR_DIFF_VEC:
2248 return 0;
2250 case UNSPEC_VOLATILE:
2251 return 1;
2253 case MEM:
2254 case ASM_INPUT:
2255 case ASM_OPERANDS:
2256 if (MEM_VOLATILE_P (x))
2257 return 1;
2259 default:
2260 break;
2263 /* Recursively scan the operands of this expression. */
2266 const char *const fmt = GET_RTX_FORMAT (code);
2267 int i;
2269 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2271 if (fmt[i] == 'e')
2273 if (volatile_refs_p (XEXP (x, i)))
2274 return 1;
2276 else if (fmt[i] == 'E')
2278 int j;
2279 for (j = 0; j < XVECLEN (x, i); j++)
2280 if (volatile_refs_p (XVECEXP (x, i, j)))
2281 return 1;
2285 return 0;
2288 /* Similar to above, except that it also rejects register pre- and post-
2289 incrementing. */
2292 side_effects_p (const_rtx x)
2294 const RTX_CODE code = GET_CODE (x);
2295 switch (code)
2297 case LABEL_REF:
2298 case SYMBOL_REF:
2299 case CONST:
2300 CASE_CONST_ANY:
2301 case CC0:
2302 case PC:
2303 case REG:
2304 case SCRATCH:
2305 case ADDR_VEC:
2306 case ADDR_DIFF_VEC:
2307 case VAR_LOCATION:
2308 return 0;
2310 case CLOBBER:
2311 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2312 when some combination can't be done. If we see one, don't think
2313 that we can simplify the expression. */
2314 return (GET_MODE (x) != VOIDmode);
2316 case PRE_INC:
2317 case PRE_DEC:
2318 case POST_INC:
2319 case POST_DEC:
2320 case PRE_MODIFY:
2321 case POST_MODIFY:
2322 case CALL:
2323 case UNSPEC_VOLATILE:
2324 return 1;
2326 case MEM:
2327 case ASM_INPUT:
2328 case ASM_OPERANDS:
2329 if (MEM_VOLATILE_P (x))
2330 return 1;
2332 default:
2333 break;
2336 /* Recursively scan the operands of this expression. */
2339 const char *fmt = GET_RTX_FORMAT (code);
2340 int i;
2342 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2344 if (fmt[i] == 'e')
2346 if (side_effects_p (XEXP (x, i)))
2347 return 1;
2349 else if (fmt[i] == 'E')
2351 int j;
2352 for (j = 0; j < XVECLEN (x, i); j++)
2353 if (side_effects_p (XVECEXP (x, i, j)))
2354 return 1;
2358 return 0;
2361 /* Return nonzero if evaluating rtx X might cause a trap.
2362 FLAGS controls how to consider MEMs. A nonzero means the context
2363 of the access may have changed from the original, such that the
2364 address may have become invalid. */
2367 may_trap_p_1 (const_rtx x, unsigned flags)
2369 int i;
2370 enum rtx_code code;
2371 const char *fmt;
2373 /* We make no distinction currently, but this function is part of
2374 the internal target-hooks ABI so we keep the parameter as
2375 "unsigned flags". */
2376 bool code_changed = flags != 0;
2378 if (x == 0)
2379 return 0;
2380 code = GET_CODE (x);
2381 switch (code)
2383 /* Handle these cases quickly. */
2384 CASE_CONST_ANY:
2385 case SYMBOL_REF:
2386 case LABEL_REF:
2387 case CONST:
2388 case PC:
2389 case CC0:
2390 case REG:
2391 case SCRATCH:
2392 return 0;
2394 case UNSPEC:
2395 return targetm.unspec_may_trap_p (x, flags);
2397 case UNSPEC_VOLATILE:
2398 case ASM_INPUT:
2399 case TRAP_IF:
2400 return 1;
2402 case ASM_OPERANDS:
2403 return MEM_VOLATILE_P (x);
2405 /* Memory ref can trap unless it's a static var or a stack slot. */
2406 case MEM:
2407 /* Recognize specific pattern of stack checking probes. */
2408 if (flag_stack_check
2409 && MEM_VOLATILE_P (x)
2410 && XEXP (x, 0) == stack_pointer_rtx)
2411 return 1;
2412 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2413 reference; moving it out of context such as when moving code
2414 when optimizing, might cause its address to become invalid. */
2415 code_changed
2416 || !MEM_NOTRAP_P (x))
2418 HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0;
2419 return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2420 GET_MODE (x), code_changed);
2423 return 0;
2425 /* Division by a non-constant might trap. */
2426 case DIV:
2427 case MOD:
2428 case UDIV:
2429 case UMOD:
2430 if (HONOR_SNANS (GET_MODE (x)))
2431 return 1;
2432 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2433 return flag_trapping_math;
2434 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2435 return 1;
2436 break;
2438 case EXPR_LIST:
2439 /* An EXPR_LIST is used to represent a function call. This
2440 certainly may trap. */
2441 return 1;
2443 case GE:
2444 case GT:
2445 case LE:
2446 case LT:
2447 case LTGT:
2448 case COMPARE:
2449 /* Some floating point comparisons may trap. */
2450 if (!flag_trapping_math)
2451 break;
2452 /* ??? There is no machine independent way to check for tests that trap
2453 when COMPARE is used, though many targets do make this distinction.
2454 For instance, sparc uses CCFPE for compares which generate exceptions
2455 and CCFP for compares which do not generate exceptions. */
2456 if (HONOR_NANS (GET_MODE (x)))
2457 return 1;
2458 /* But often the compare has some CC mode, so check operand
2459 modes as well. */
2460 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2461 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2462 return 1;
2463 break;
2465 case EQ:
2466 case NE:
2467 if (HONOR_SNANS (GET_MODE (x)))
2468 return 1;
2469 /* Often comparison is CC mode, so check operand modes. */
2470 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2471 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2472 return 1;
2473 break;
2475 case FIX:
2476 /* Conversion of floating point might trap. */
2477 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2478 return 1;
2479 break;
2481 case NEG:
2482 case ABS:
2483 case SUBREG:
2484 /* These operations don't trap even with floating point. */
2485 break;
2487 default:
2488 /* Any floating arithmetic may trap. */
2489 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)
2490 return 1;
2493 fmt = GET_RTX_FORMAT (code);
2494 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2496 if (fmt[i] == 'e')
2498 if (may_trap_p_1 (XEXP (x, i), flags))
2499 return 1;
2501 else if (fmt[i] == 'E')
2503 int j;
2504 for (j = 0; j < XVECLEN (x, i); j++)
2505 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2506 return 1;
2509 return 0;
2512 /* Return nonzero if evaluating rtx X might cause a trap. */
2515 may_trap_p (const_rtx x)
2517 return may_trap_p_1 (x, 0);
2520 /* Same as above, but additionally return nonzero if evaluating rtx X might
2521 cause a fault. We define a fault for the purpose of this function as a
2522 erroneous execution condition that cannot be encountered during the normal
2523 execution of a valid program; the typical example is an unaligned memory
2524 access on a strict alignment machine. The compiler guarantees that it
2525 doesn't generate code that will fault from a valid program, but this
2526 guarantee doesn't mean anything for individual instructions. Consider
2527 the following example:
2529 struct S { int d; union { char *cp; int *ip; }; };
2531 int foo(struct S *s)
2533 if (s->d == 1)
2534 return *s->ip;
2535 else
2536 return *s->cp;
2539 on a strict alignment machine. In a valid program, foo will never be
2540 invoked on a structure for which d is equal to 1 and the underlying
2541 unique field of the union not aligned on a 4-byte boundary, but the
2542 expression *s->ip might cause a fault if considered individually.
2544 At the RTL level, potentially problematic expressions will almost always
2545 verify may_trap_p; for example, the above dereference can be emitted as
2546 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2547 However, suppose that foo is inlined in a caller that causes s->cp to
2548 point to a local character variable and guarantees that s->d is not set
2549 to 1; foo may have been effectively translated into pseudo-RTL as:
2551 if ((reg:SI) == 1)
2552 (set (reg:SI) (mem:SI (%fp - 7)))
2553 else
2554 (set (reg:QI) (mem:QI (%fp - 7)))
2556 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2557 memory reference to a stack slot, but it will certainly cause a fault
2558 on a strict alignment machine. */
2561 may_trap_or_fault_p (const_rtx x)
2563 return may_trap_p_1 (x, 1);
2566 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2567 i.e., an inequality. */
2570 inequality_comparisons_p (const_rtx x)
2572 const char *fmt;
2573 int len, i;
2574 const enum rtx_code code = GET_CODE (x);
2576 switch (code)
2578 case REG:
2579 case SCRATCH:
2580 case PC:
2581 case CC0:
2582 CASE_CONST_ANY:
2583 case CONST:
2584 case LABEL_REF:
2585 case SYMBOL_REF:
2586 return 0;
2588 case LT:
2589 case LTU:
2590 case GT:
2591 case GTU:
2592 case LE:
2593 case LEU:
2594 case GE:
2595 case GEU:
2596 return 1;
2598 default:
2599 break;
2602 len = GET_RTX_LENGTH (code);
2603 fmt = GET_RTX_FORMAT (code);
2605 for (i = 0; i < len; i++)
2607 if (fmt[i] == 'e')
2609 if (inequality_comparisons_p (XEXP (x, i)))
2610 return 1;
2612 else if (fmt[i] == 'E')
2614 int j;
2615 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2616 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2617 return 1;
2621 return 0;
2624 /* Replace any occurrence of FROM in X with TO. The function does
2625 not enter into CONST_DOUBLE for the replace.
2627 Note that copying is not done so X must not be shared unless all copies
2628 are to be modified. */
2631 replace_rtx (rtx x, rtx from, rtx to)
2633 int i, j;
2634 const char *fmt;
2636 if (x == from)
2637 return to;
2639 /* Allow this function to make replacements in EXPR_LISTs. */
2640 if (x == 0)
2641 return 0;
2643 if (GET_CODE (x) == SUBREG)
2645 rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
2647 if (CONST_INT_P (new_rtx))
2649 x = simplify_subreg (GET_MODE (x), new_rtx,
2650 GET_MODE (SUBREG_REG (x)),
2651 SUBREG_BYTE (x));
2652 gcc_assert (x);
2654 else
2655 SUBREG_REG (x) = new_rtx;
2657 return x;
2659 else if (GET_CODE (x) == ZERO_EXTEND)
2661 rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
2663 if (CONST_INT_P (new_rtx))
2665 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2666 new_rtx, GET_MODE (XEXP (x, 0)));
2667 gcc_assert (x);
2669 else
2670 XEXP (x, 0) = new_rtx;
2672 return x;
2675 fmt = GET_RTX_FORMAT (GET_CODE (x));
2676 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2678 if (fmt[i] == 'e')
2679 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2680 else if (fmt[i] == 'E')
2681 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2682 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2685 return x;
2688 /* Replace occurrences of the old label in *X with the new one.
2689 DATA is a REPLACE_LABEL_DATA containing the old and new labels. */
2692 replace_label (rtx *x, void *data)
2694 rtx l = *x;
2695 rtx old_label = ((replace_label_data *) data)->r1;
2696 rtx new_label = ((replace_label_data *) data)->r2;
2697 bool update_label_nuses = ((replace_label_data *) data)->update_label_nuses;
2699 if (l == NULL_RTX)
2700 return 0;
2702 if (GET_CODE (l) == SYMBOL_REF
2703 && CONSTANT_POOL_ADDRESS_P (l))
2705 rtx c = get_pool_constant (l);
2706 if (rtx_referenced_p (old_label, c))
2708 rtx new_c, new_l;
2709 replace_label_data *d = (replace_label_data *) data;
2711 /* Create a copy of constant C; replace the label inside
2712 but do not update LABEL_NUSES because uses in constant pool
2713 are not counted. */
2714 new_c = copy_rtx (c);
2715 d->update_label_nuses = false;
2716 for_each_rtx (&new_c, replace_label, data);
2717 d->update_label_nuses = update_label_nuses;
2719 /* Add the new constant NEW_C to constant pool and replace
2720 the old reference to constant by new reference. */
2721 new_l = XEXP (force_const_mem (get_pool_mode (l), new_c), 0);
2722 *x = replace_rtx (l, l, new_l);
2724 return 0;
2727 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2728 field. This is not handled by for_each_rtx because it doesn't
2729 handle unprinted ('0') fields. */
2730 if (JUMP_P (l) && JUMP_LABEL (l) == old_label)
2731 JUMP_LABEL (l) = new_label;
2733 if ((GET_CODE (l) == LABEL_REF
2734 || GET_CODE (l) == INSN_LIST)
2735 && XEXP (l, 0) == old_label)
2737 XEXP (l, 0) = new_label;
2738 if (update_label_nuses)
2740 ++LABEL_NUSES (new_label);
2741 --LABEL_NUSES (old_label);
2743 return 0;
2746 return 0;
2749 /* When *BODY is equal to X or X is directly referenced by *BODY
2750 return nonzero, thus FOR_EACH_RTX stops traversing and returns nonzero
2751 too, otherwise FOR_EACH_RTX continues traversing *BODY. */
2753 static int
2754 rtx_referenced_p_1 (rtx *body, void *x)
2756 rtx y = (rtx) x;
2758 if (*body == NULL_RTX)
2759 return y == NULL_RTX;
2761 /* Return true if a label_ref *BODY refers to label Y. */
2762 if (GET_CODE (*body) == LABEL_REF && LABEL_P (y))
2763 return XEXP (*body, 0) == y;
2765 /* If *BODY is a reference to pool constant traverse the constant. */
2766 if (GET_CODE (*body) == SYMBOL_REF
2767 && CONSTANT_POOL_ADDRESS_P (*body))
2768 return rtx_referenced_p (y, get_pool_constant (*body));
2770 /* By default, compare the RTL expressions. */
2771 return rtx_equal_p (*body, y);
2774 /* Return true if X is referenced in BODY. */
2777 rtx_referenced_p (rtx x, rtx body)
2779 return for_each_rtx (&body, rtx_referenced_p_1, x);
2782 /* If INSN is a tablejump return true and store the label (before jump table) to
2783 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2785 bool
2786 tablejump_p (const_rtx insn, rtx *labelp, rtx *tablep)
2788 rtx label, table;
2790 if (!JUMP_P (insn))
2791 return false;
2793 label = JUMP_LABEL (insn);
2794 if (label != NULL_RTX && !ANY_RETURN_P (label)
2795 && (table = NEXT_INSN (label)) != NULL_RTX
2796 && JUMP_TABLE_DATA_P (table))
2798 if (labelp)
2799 *labelp = label;
2800 if (tablep)
2801 *tablep = table;
2802 return true;
2804 return false;
2807 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2808 constant that is not in the constant pool and not in the condition
2809 of an IF_THEN_ELSE. */
2811 static int
2812 computed_jump_p_1 (const_rtx x)
2814 const enum rtx_code code = GET_CODE (x);
2815 int i, j;
2816 const char *fmt;
2818 switch (code)
2820 case LABEL_REF:
2821 case PC:
2822 return 0;
2824 case CONST:
2825 CASE_CONST_ANY:
2826 case SYMBOL_REF:
2827 case REG:
2828 return 1;
2830 case MEM:
2831 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2832 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2834 case IF_THEN_ELSE:
2835 return (computed_jump_p_1 (XEXP (x, 1))
2836 || computed_jump_p_1 (XEXP (x, 2)));
2838 default:
2839 break;
2842 fmt = GET_RTX_FORMAT (code);
2843 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2845 if (fmt[i] == 'e'
2846 && computed_jump_p_1 (XEXP (x, i)))
2847 return 1;
2849 else if (fmt[i] == 'E')
2850 for (j = 0; j < XVECLEN (x, i); j++)
2851 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2852 return 1;
2855 return 0;
2858 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2860 Tablejumps and casesi insns are not considered indirect jumps;
2861 we can recognize them by a (use (label_ref)). */
2864 computed_jump_p (const_rtx insn)
2866 int i;
2867 if (JUMP_P (insn))
2869 rtx pat = PATTERN (insn);
2871 /* If we have a JUMP_LABEL set, we're not a computed jump. */
2872 if (JUMP_LABEL (insn) != NULL)
2873 return 0;
2875 if (GET_CODE (pat) == PARALLEL)
2877 int len = XVECLEN (pat, 0);
2878 int has_use_labelref = 0;
2880 for (i = len - 1; i >= 0; i--)
2881 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
2882 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
2883 == LABEL_REF))
2885 has_use_labelref = 1;
2886 break;
2889 if (! has_use_labelref)
2890 for (i = len - 1; i >= 0; i--)
2891 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
2892 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
2893 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
2894 return 1;
2896 else if (GET_CODE (pat) == SET
2897 && SET_DEST (pat) == pc_rtx
2898 && computed_jump_p_1 (SET_SRC (pat)))
2899 return 1;
2901 return 0;
2904 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
2905 calls. Processes the subexpressions of EXP and passes them to F. */
2906 static int
2907 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
2909 int result, i, j;
2910 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
2911 rtx *x;
2913 for (; format[n] != '\0'; n++)
2915 switch (format[n])
2917 case 'e':
2918 /* Call F on X. */
2919 x = &XEXP (exp, n);
2920 result = (*f) (x, data);
2921 if (result == -1)
2922 /* Do not traverse sub-expressions. */
2923 continue;
2924 else if (result != 0)
2925 /* Stop the traversal. */
2926 return result;
2928 if (*x == NULL_RTX)
2929 /* There are no sub-expressions. */
2930 continue;
2932 i = non_rtx_starting_operands[GET_CODE (*x)];
2933 if (i >= 0)
2935 result = for_each_rtx_1 (*x, i, f, data);
2936 if (result != 0)
2937 return result;
2939 break;
2941 case 'V':
2942 case 'E':
2943 if (XVEC (exp, n) == 0)
2944 continue;
2945 for (j = 0; j < XVECLEN (exp, n); ++j)
2947 /* Call F on X. */
2948 x = &XVECEXP (exp, n, j);
2949 result = (*f) (x, data);
2950 if (result == -1)
2951 /* Do not traverse sub-expressions. */
2952 continue;
2953 else if (result != 0)
2954 /* Stop the traversal. */
2955 return result;
2957 if (*x == NULL_RTX)
2958 /* There are no sub-expressions. */
2959 continue;
2961 i = non_rtx_starting_operands[GET_CODE (*x)];
2962 if (i >= 0)
2964 result = for_each_rtx_1 (*x, i, f, data);
2965 if (result != 0)
2966 return result;
2969 break;
2971 default:
2972 /* Nothing to do. */
2973 break;
2977 return 0;
2980 /* Traverse X via depth-first search, calling F for each
2981 sub-expression (including X itself). F is also passed the DATA.
2982 If F returns -1, do not traverse sub-expressions, but continue
2983 traversing the rest of the tree. If F ever returns any other
2984 nonzero value, stop the traversal, and return the value returned
2985 by F. Otherwise, return 0. This function does not traverse inside
2986 tree structure that contains RTX_EXPRs, or into sub-expressions
2987 whose format code is `0' since it is not known whether or not those
2988 codes are actually RTL.
2990 This routine is very general, and could (should?) be used to
2991 implement many of the other routines in this file. */
2994 for_each_rtx (rtx *x, rtx_function f, void *data)
2996 int result;
2997 int i;
2999 /* Call F on X. */
3000 result = (*f) (x, data);
3001 if (result == -1)
3002 /* Do not traverse sub-expressions. */
3003 return 0;
3004 else if (result != 0)
3005 /* Stop the traversal. */
3006 return result;
3008 if (*x == NULL_RTX)
3009 /* There are no sub-expressions. */
3010 return 0;
3012 i = non_rtx_starting_operands[GET_CODE (*x)];
3013 if (i < 0)
3014 return 0;
3016 return for_each_rtx_1 (*x, i, f, data);
3021 /* Data structure that holds the internal state communicated between
3022 for_each_inc_dec, for_each_inc_dec_find_mem and
3023 for_each_inc_dec_find_inc_dec. */
3025 struct for_each_inc_dec_ops {
3026 /* The function to be called for each autoinc operation found. */
3027 for_each_inc_dec_fn fn;
3028 /* The opaque argument to be passed to it. */
3029 void *arg;
3030 /* The MEM we're visiting, if any. */
3031 rtx mem;
3034 static int for_each_inc_dec_find_mem (rtx *r, void *d);
3036 /* Find PRE/POST-INC/DEC/MODIFY operations within *R, extract the
3037 operands of the equivalent add insn and pass the result to the
3038 operator specified by *D. */
3040 static int
3041 for_each_inc_dec_find_inc_dec (rtx *r, void *d)
3043 rtx x = *r;
3044 struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *)d;
3046 switch (GET_CODE (x))
3048 case PRE_INC:
3049 case POST_INC:
3051 int size = GET_MODE_SIZE (GET_MODE (data->mem));
3052 rtx r1 = XEXP (x, 0);
3053 rtx c = gen_int_mode (size, GET_MODE (r1));
3054 return data->fn (data->mem, x, r1, r1, c, data->arg);
3057 case PRE_DEC:
3058 case POST_DEC:
3060 int size = GET_MODE_SIZE (GET_MODE (data->mem));
3061 rtx r1 = XEXP (x, 0);
3062 rtx c = gen_int_mode (-size, GET_MODE (r1));
3063 return data->fn (data->mem, x, r1, r1, c, data->arg);
3066 case PRE_MODIFY:
3067 case POST_MODIFY:
3069 rtx r1 = XEXP (x, 0);
3070 rtx add = XEXP (x, 1);
3071 return data->fn (data->mem, x, r1, add, NULL, data->arg);
3074 case MEM:
3076 rtx save = data->mem;
3077 int ret = for_each_inc_dec_find_mem (r, d);
3078 data->mem = save;
3079 return ret;
3082 default:
3083 return 0;
3087 /* If *R is a MEM, find PRE/POST-INC/DEC/MODIFY operations within its
3088 address, extract the operands of the equivalent add insn and pass
3089 the result to the operator specified by *D. */
3091 static int
3092 for_each_inc_dec_find_mem (rtx *r, void *d)
3094 rtx x = *r;
3095 if (x != NULL_RTX && MEM_P (x))
3097 struct for_each_inc_dec_ops *data = (struct for_each_inc_dec_ops *) d;
3098 int result;
3100 data->mem = x;
3102 result = for_each_rtx (&XEXP (x, 0), for_each_inc_dec_find_inc_dec,
3103 data);
3104 if (result)
3105 return result;
3107 return -1;
3109 return 0;
3112 /* Traverse *X looking for MEMs, and for autoinc operations within
3113 them. For each such autoinc operation found, call FN, passing it
3114 the innermost enclosing MEM, the operation itself, the RTX modified
3115 by the operation, two RTXs (the second may be NULL) that, once
3116 added, represent the value to be held by the modified RTX
3117 afterwards, and ARG. FN is to return -1 to skip looking for other
3118 autoinc operations within the visited operation, 0 to continue the
3119 traversal, or any other value to have it returned to the caller of
3120 for_each_inc_dec. */
3123 for_each_inc_dec (rtx *x,
3124 for_each_inc_dec_fn fn,
3125 void *arg)
3127 struct for_each_inc_dec_ops data;
3129 data.fn = fn;
3130 data.arg = arg;
3131 data.mem = NULL;
3133 return for_each_rtx (x, for_each_inc_dec_find_mem, &data);
3137 /* Searches X for any reference to REGNO, returning the rtx of the
3138 reference found if any. Otherwise, returns NULL_RTX. */
3141 regno_use_in (unsigned int regno, rtx x)
3143 const char *fmt;
3144 int i, j;
3145 rtx tem;
3147 if (REG_P (x) && REGNO (x) == regno)
3148 return x;
3150 fmt = GET_RTX_FORMAT (GET_CODE (x));
3151 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3153 if (fmt[i] == 'e')
3155 if ((tem = regno_use_in (regno, XEXP (x, i))))
3156 return tem;
3158 else if (fmt[i] == 'E')
3159 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3160 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3161 return tem;
3164 return NULL_RTX;
3167 /* Return a value indicating whether OP, an operand of a commutative
3168 operation, is preferred as the first or second operand. The higher
3169 the value, the stronger the preference for being the first operand.
3170 We use negative values to indicate a preference for the first operand
3171 and positive values for the second operand. */
3174 commutative_operand_precedence (rtx op)
3176 enum rtx_code code = GET_CODE (op);
3178 /* Constants always come the second operand. Prefer "nice" constants. */
3179 if (code == CONST_INT)
3180 return -8;
3181 if (code == CONST_DOUBLE)
3182 return -7;
3183 if (code == CONST_FIXED)
3184 return -7;
3185 op = avoid_constant_pool_reference (op);
3186 code = GET_CODE (op);
3188 switch (GET_RTX_CLASS (code))
3190 case RTX_CONST_OBJ:
3191 if (code == CONST_INT)
3192 return -6;
3193 if (code == CONST_DOUBLE)
3194 return -5;
3195 if (code == CONST_FIXED)
3196 return -5;
3197 return -4;
3199 case RTX_EXTRA:
3200 /* SUBREGs of objects should come second. */
3201 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3202 return -3;
3203 return 0;
3205 case RTX_OBJ:
3206 /* Complex expressions should be the first, so decrease priority
3207 of objects. Prefer pointer objects over non pointer objects. */
3208 if ((REG_P (op) && REG_POINTER (op))
3209 || (MEM_P (op) && MEM_POINTER (op)))
3210 return -1;
3211 return -2;
3213 case RTX_COMM_ARITH:
3214 /* Prefer operands that are themselves commutative to be first.
3215 This helps to make things linear. In particular,
3216 (and (and (reg) (reg)) (not (reg))) is canonical. */
3217 return 4;
3219 case RTX_BIN_ARITH:
3220 /* If only one operand is a binary expression, it will be the first
3221 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3222 is canonical, although it will usually be further simplified. */
3223 return 2;
3225 case RTX_UNARY:
3226 /* Then prefer NEG and NOT. */
3227 if (code == NEG || code == NOT)
3228 return 1;
3230 default:
3231 return 0;
3235 /* Return 1 iff it is necessary to swap operands of commutative operation
3236 in order to canonicalize expression. */
3238 bool
3239 swap_commutative_operands_p (rtx x, rtx y)
3241 return (commutative_operand_precedence (x)
3242 < commutative_operand_precedence (y));
3245 /* Return 1 if X is an autoincrement side effect and the register is
3246 not the stack pointer. */
3248 auto_inc_p (const_rtx x)
3250 switch (GET_CODE (x))
3252 case PRE_INC:
3253 case POST_INC:
3254 case PRE_DEC:
3255 case POST_DEC:
3256 case PRE_MODIFY:
3257 case POST_MODIFY:
3258 /* There are no REG_INC notes for SP. */
3259 if (XEXP (x, 0) != stack_pointer_rtx)
3260 return 1;
3261 default:
3262 break;
3264 return 0;
3267 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3269 loc_mentioned_in_p (rtx *loc, const_rtx in)
3271 enum rtx_code code;
3272 const char *fmt;
3273 int i, j;
3275 if (!in)
3276 return 0;
3278 code = GET_CODE (in);
3279 fmt = GET_RTX_FORMAT (code);
3280 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3282 if (fmt[i] == 'e')
3284 if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
3285 return 1;
3287 else if (fmt[i] == 'E')
3288 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3289 if (loc == &XVECEXP (in, i, j)
3290 || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3291 return 1;
3293 return 0;
3296 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3297 and SUBREG_BYTE, return the bit offset where the subreg begins
3298 (counting from the least significant bit of the operand). */
3300 unsigned int
3301 subreg_lsb_1 (enum machine_mode outer_mode,
3302 enum machine_mode inner_mode,
3303 unsigned int subreg_byte)
3305 unsigned int bitpos;
3306 unsigned int byte;
3307 unsigned int word;
3309 /* A paradoxical subreg begins at bit position 0. */
3310 if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode))
3311 return 0;
3313 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3314 /* If the subreg crosses a word boundary ensure that
3315 it also begins and ends on a word boundary. */
3316 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3317 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3318 && (subreg_byte % UNITS_PER_WORD
3319 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3321 if (WORDS_BIG_ENDIAN)
3322 word = (GET_MODE_SIZE (inner_mode)
3323 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3324 else
3325 word = subreg_byte / UNITS_PER_WORD;
3326 bitpos = word * BITS_PER_WORD;
3328 if (BYTES_BIG_ENDIAN)
3329 byte = (GET_MODE_SIZE (inner_mode)
3330 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3331 else
3332 byte = subreg_byte % UNITS_PER_WORD;
3333 bitpos += byte * BITS_PER_UNIT;
3335 return bitpos;
3338 /* Given a subreg X, return the bit offset where the subreg begins
3339 (counting from the least significant bit of the reg). */
3341 unsigned int
3342 subreg_lsb (const_rtx x)
3344 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3345 SUBREG_BYTE (x));
3348 /* Fill in information about a subreg of a hard register.
3349 xregno - A regno of an inner hard subreg_reg (or what will become one).
3350 xmode - The mode of xregno.
3351 offset - The byte offset.
3352 ymode - The mode of a top level SUBREG (or what may become one).
3353 info - Pointer to structure to fill in. */
3354 void
3355 subreg_get_info (unsigned int xregno, enum machine_mode xmode,
3356 unsigned int offset, enum machine_mode ymode,
3357 struct subreg_info *info)
3359 int nregs_xmode, nregs_ymode;
3360 int mode_multiple, nregs_multiple;
3361 int offset_adj, y_offset, y_offset_adj;
3362 int regsize_xmode, regsize_ymode;
3363 bool rknown;
3365 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3367 rknown = false;
3369 /* If there are holes in a non-scalar mode in registers, we expect
3370 that it is made up of its units concatenated together. */
3371 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3373 enum machine_mode xmode_unit;
3375 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3376 if (GET_MODE_INNER (xmode) == VOIDmode)
3377 xmode_unit = xmode;
3378 else
3379 xmode_unit = GET_MODE_INNER (xmode);
3380 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3381 gcc_assert (nregs_xmode
3382 == (GET_MODE_NUNITS (xmode)
3383 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3384 gcc_assert (hard_regno_nregs[xregno][xmode]
3385 == (hard_regno_nregs[xregno][xmode_unit]
3386 * GET_MODE_NUNITS (xmode)));
3388 /* You can only ask for a SUBREG of a value with holes in the middle
3389 if you don't cross the holes. (Such a SUBREG should be done by
3390 picking a different register class, or doing it in memory if
3391 necessary.) An example of a value with holes is XCmode on 32-bit
3392 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3393 3 for each part, but in memory it's two 128-bit parts.
3394 Padding is assumed to be at the end (not necessarily the 'high part')
3395 of each unit. */
3396 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3397 < GET_MODE_NUNITS (xmode))
3398 && (offset / GET_MODE_SIZE (xmode_unit)
3399 != ((offset + GET_MODE_SIZE (ymode) - 1)
3400 / GET_MODE_SIZE (xmode_unit))))
3402 info->representable_p = false;
3403 rknown = true;
3406 else
3407 nregs_xmode = hard_regno_nregs[xregno][xmode];
3409 nregs_ymode = hard_regno_nregs[xregno][ymode];
3411 /* Paradoxical subregs are otherwise valid. */
3412 if (!rknown
3413 && offset == 0
3414 && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode))
3416 info->representable_p = true;
3417 /* If this is a big endian paradoxical subreg, which uses more
3418 actual hard registers than the original register, we must
3419 return a negative offset so that we find the proper highpart
3420 of the register. */
3421 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3422 ? REG_WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3423 info->offset = nregs_xmode - nregs_ymode;
3424 else
3425 info->offset = 0;
3426 info->nregs = nregs_ymode;
3427 return;
3430 /* If registers store different numbers of bits in the different
3431 modes, we cannot generally form this subreg. */
3432 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3433 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3434 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3435 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3437 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3438 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3439 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3441 info->representable_p = false;
3442 info->nregs
3443 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3444 info->offset = offset / regsize_xmode;
3445 return;
3447 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3449 info->representable_p = false;
3450 info->nregs
3451 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3452 info->offset = offset / regsize_xmode;
3453 return;
3457 /* Lowpart subregs are otherwise valid. */
3458 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3460 info->representable_p = true;
3461 rknown = true;
3463 if (offset == 0 || nregs_xmode == nregs_ymode)
3465 info->offset = 0;
3466 info->nregs = nregs_ymode;
3467 return;
3471 /* This should always pass, otherwise we don't know how to verify
3472 the constraint. These conditions may be relaxed but
3473 subreg_regno_offset would need to be redesigned. */
3474 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3475 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3477 if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN
3478 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD)
3480 HOST_WIDE_INT xsize = GET_MODE_SIZE (xmode);
3481 HOST_WIDE_INT ysize = GET_MODE_SIZE (ymode);
3482 HOST_WIDE_INT off_low = offset & (ysize - 1);
3483 HOST_WIDE_INT off_high = offset & ~(ysize - 1);
3484 offset = (xsize - ysize - off_high) | off_low;
3486 /* The XMODE value can be seen as a vector of NREGS_XMODE
3487 values. The subreg must represent a lowpart of given field.
3488 Compute what field it is. */
3489 offset_adj = offset;
3490 offset_adj -= subreg_lowpart_offset (ymode,
3491 mode_for_size (GET_MODE_BITSIZE (xmode)
3492 / nregs_xmode,
3493 MODE_INT, 0));
3495 /* Size of ymode must not be greater than the size of xmode. */
3496 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3497 gcc_assert (mode_multiple != 0);
3499 y_offset = offset / GET_MODE_SIZE (ymode);
3500 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3501 nregs_multiple = nregs_xmode / nregs_ymode;
3503 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3504 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3506 if (!rknown)
3508 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3509 rknown = true;
3511 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3512 info->nregs = nregs_ymode;
3515 /* This function returns the regno offset of a subreg expression.
3516 xregno - A regno of an inner hard subreg_reg (or what will become one).
3517 xmode - The mode of xregno.
3518 offset - The byte offset.
3519 ymode - The mode of a top level SUBREG (or what may become one).
3520 RETURN - The regno offset which would be used. */
3521 unsigned int
3522 subreg_regno_offset (unsigned int xregno, enum machine_mode xmode,
3523 unsigned int offset, enum machine_mode ymode)
3525 struct subreg_info info;
3526 subreg_get_info (xregno, xmode, offset, ymode, &info);
3527 return info.offset;
3530 /* This function returns true when the offset is representable via
3531 subreg_offset in the given regno.
3532 xregno - A regno of an inner hard subreg_reg (or what will become one).
3533 xmode - The mode of xregno.
3534 offset - The byte offset.
3535 ymode - The mode of a top level SUBREG (or what may become one).
3536 RETURN - Whether the offset is representable. */
3537 bool
3538 subreg_offset_representable_p (unsigned int xregno, enum machine_mode xmode,
3539 unsigned int offset, enum machine_mode ymode)
3541 struct subreg_info info;
3542 subreg_get_info (xregno, xmode, offset, ymode, &info);
3543 return info.representable_p;
3546 /* Return the number of a YMODE register to which
3548 (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3550 can be simplified. Return -1 if the subreg can't be simplified.
3552 XREGNO is a hard register number. */
3555 simplify_subreg_regno (unsigned int xregno, enum machine_mode xmode,
3556 unsigned int offset, enum machine_mode ymode)
3558 struct subreg_info info;
3559 unsigned int yregno;
3561 #ifdef CANNOT_CHANGE_MODE_CLASS
3562 /* Give the backend a chance to disallow the mode change. */
3563 if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3564 && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
3565 && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode)
3566 /* We can use mode change in LRA for some transformations. */
3567 && ! lra_in_progress)
3568 return -1;
3569 #endif
3571 /* We shouldn't simplify stack-related registers. */
3572 if ((!reload_completed || frame_pointer_needed)
3573 && xregno == FRAME_POINTER_REGNUM)
3574 return -1;
3576 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3577 && xregno == ARG_POINTER_REGNUM)
3578 return -1;
3580 if (xregno == STACK_POINTER_REGNUM
3581 /* We should convert hard stack register in LRA if it is
3582 possible. */
3583 && ! lra_in_progress)
3584 return -1;
3586 /* Try to get the register offset. */
3587 subreg_get_info (xregno, xmode, offset, ymode, &info);
3588 if (!info.representable_p)
3589 return -1;
3591 /* Make sure that the offsetted register value is in range. */
3592 yregno = xregno + info.offset;
3593 if (!HARD_REGISTER_NUM_P (yregno))
3594 return -1;
3596 /* See whether (reg:YMODE YREGNO) is valid.
3598 ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
3599 This is a kludge to work around how complex FP arguments are passed
3600 on IA-64 and should be fixed. See PR target/49226. */
3601 if (!HARD_REGNO_MODE_OK (yregno, ymode)
3602 && HARD_REGNO_MODE_OK (xregno, xmode))
3603 return -1;
3605 return (int) yregno;
3608 /* Return the final regno that a subreg expression refers to. */
3609 unsigned int
3610 subreg_regno (const_rtx x)
3612 unsigned int ret;
3613 rtx subreg = SUBREG_REG (x);
3614 int regno = REGNO (subreg);
3616 ret = regno + subreg_regno_offset (regno,
3617 GET_MODE (subreg),
3618 SUBREG_BYTE (x),
3619 GET_MODE (x));
3620 return ret;
3624 /* Return the number of registers that a subreg expression refers
3625 to. */
3626 unsigned int
3627 subreg_nregs (const_rtx x)
3629 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3632 /* Return the number of registers that a subreg REG with REGNO
3633 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3634 changed so that the regno can be passed in. */
3636 unsigned int
3637 subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3639 struct subreg_info info;
3640 rtx subreg = SUBREG_REG (x);
3642 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3643 &info);
3644 return info.nregs;
3648 struct parms_set_data
3650 int nregs;
3651 HARD_REG_SET regs;
3654 /* Helper function for noticing stores to parameter registers. */
3655 static void
3656 parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3658 struct parms_set_data *const d = (struct parms_set_data *) data;
3659 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3660 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3662 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3663 d->nregs--;
3667 /* Look backward for first parameter to be loaded.
3668 Note that loads of all parameters will not necessarily be
3669 found if CSE has eliminated some of them (e.g., an argument
3670 to the outer function is passed down as a parameter).
3671 Do not skip BOUNDARY. */
3673 find_first_parameter_load (rtx call_insn, rtx boundary)
3675 struct parms_set_data parm;
3676 rtx p, before, first_set;
3678 /* Since different machines initialize their parameter registers
3679 in different orders, assume nothing. Collect the set of all
3680 parameter registers. */
3681 CLEAR_HARD_REG_SET (parm.regs);
3682 parm.nregs = 0;
3683 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3684 if (GET_CODE (XEXP (p, 0)) == USE
3685 && REG_P (XEXP (XEXP (p, 0), 0)))
3687 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3689 /* We only care about registers which can hold function
3690 arguments. */
3691 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3692 continue;
3694 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3695 parm.nregs++;
3697 before = call_insn;
3698 first_set = call_insn;
3700 /* Search backward for the first set of a register in this set. */
3701 while (parm.nregs && before != boundary)
3703 before = PREV_INSN (before);
3705 /* It is possible that some loads got CSEed from one call to
3706 another. Stop in that case. */
3707 if (CALL_P (before))
3708 break;
3710 /* Our caller needs either ensure that we will find all sets
3711 (in case code has not been optimized yet), or take care
3712 for possible labels in a way by setting boundary to preceding
3713 CODE_LABEL. */
3714 if (LABEL_P (before))
3716 gcc_assert (before == boundary);
3717 break;
3720 if (INSN_P (before))
3722 int nregs_old = parm.nregs;
3723 note_stores (PATTERN (before), parms_set, &parm);
3724 /* If we found something that did not set a parameter reg,
3725 we're done. Do not keep going, as that might result
3726 in hoisting an insn before the setting of a pseudo
3727 that is used by the hoisted insn. */
3728 if (nregs_old != parm.nregs)
3729 first_set = before;
3730 else
3731 break;
3734 return first_set;
3737 /* Return true if we should avoid inserting code between INSN and preceding
3738 call instruction. */
3740 bool
3741 keep_with_call_p (const_rtx insn)
3743 rtx set;
3745 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3747 if (REG_P (SET_DEST (set))
3748 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3749 && fixed_regs[REGNO (SET_DEST (set))]
3750 && general_operand (SET_SRC (set), VOIDmode))
3751 return true;
3752 if (REG_P (SET_SRC (set))
3753 && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
3754 && REG_P (SET_DEST (set))
3755 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3756 return true;
3757 /* There may be a stack pop just after the call and before the store
3758 of the return register. Search for the actual store when deciding
3759 if we can break or not. */
3760 if (SET_DEST (set) == stack_pointer_rtx)
3762 /* This CONST_CAST is okay because next_nonnote_insn just
3763 returns its argument and we assign it to a const_rtx
3764 variable. */
3765 const_rtx i2 = next_nonnote_insn (CONST_CAST_RTX (insn));
3766 if (i2 && keep_with_call_p (i2))
3767 return true;
3770 return false;
3773 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3774 to non-complex jumps. That is, direct unconditional, conditional,
3775 and tablejumps, but not computed jumps or returns. It also does
3776 not apply to the fallthru case of a conditional jump. */
3778 bool
3779 label_is_jump_target_p (const_rtx label, const_rtx jump_insn)
3781 rtx tmp = JUMP_LABEL (jump_insn);
3783 if (label == tmp)
3784 return true;
3786 if (tablejump_p (jump_insn, NULL, &tmp))
3788 rtvec vec = XVEC (PATTERN (tmp),
3789 GET_CODE (PATTERN (tmp)) == ADDR_DIFF_VEC);
3790 int i, veclen = GET_NUM_ELEM (vec);
3792 for (i = 0; i < veclen; ++i)
3793 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3794 return true;
3797 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3798 return true;
3800 return false;
3804 /* Return an estimate of the cost of computing rtx X.
3805 One use is in cse, to decide which expression to keep in the hash table.
3806 Another is in rtl generation, to pick the cheapest way to multiply.
3807 Other uses like the latter are expected in the future.
3809 X appears as operand OPNO in an expression with code OUTER_CODE.
3810 SPEED specifies whether costs optimized for speed or size should
3811 be returned. */
3814 rtx_cost (rtx x, enum rtx_code outer_code, int opno, bool speed)
3816 int i, j;
3817 enum rtx_code code;
3818 const char *fmt;
3819 int total;
3820 int factor;
3822 if (x == 0)
3823 return 0;
3825 /* A size N times larger than UNITS_PER_WORD likely needs N times as
3826 many insns, taking N times as long. */
3827 factor = GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD;
3828 if (factor == 0)
3829 factor = 1;
3831 /* Compute the default costs of certain things.
3832 Note that targetm.rtx_costs can override the defaults. */
3834 code = GET_CODE (x);
3835 switch (code)
3837 case MULT:
3838 /* Multiplication has time-complexity O(N*N), where N is the
3839 number of units (translated from digits) when using
3840 schoolbook long multiplication. */
3841 total = factor * factor * COSTS_N_INSNS (5);
3842 break;
3843 case DIV:
3844 case UDIV:
3845 case MOD:
3846 case UMOD:
3847 /* Similarly, complexity for schoolbook long division. */
3848 total = factor * factor * COSTS_N_INSNS (7);
3849 break;
3850 case USE:
3851 /* Used in combine.c as a marker. */
3852 total = 0;
3853 break;
3854 case SET:
3855 /* A SET doesn't have a mode, so let's look at the SET_DEST to get
3856 the mode for the factor. */
3857 factor = GET_MODE_SIZE (GET_MODE (SET_DEST (x))) / UNITS_PER_WORD;
3858 if (factor == 0)
3859 factor = 1;
3860 /* Pass through. */
3861 default:
3862 total = factor * COSTS_N_INSNS (1);
3865 switch (code)
3867 case REG:
3868 return 0;
3870 case SUBREG:
3871 total = 0;
3872 /* If we can't tie these modes, make this expensive. The larger
3873 the mode, the more expensive it is. */
3874 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3875 return COSTS_N_INSNS (2 + factor);
3876 break;
3878 default:
3879 if (targetm.rtx_costs (x, code, outer_code, opno, &total, speed))
3880 return total;
3881 break;
3884 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3885 which is already in total. */
3887 fmt = GET_RTX_FORMAT (code);
3888 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3889 if (fmt[i] == 'e')
3890 total += rtx_cost (XEXP (x, i), code, i, speed);
3891 else if (fmt[i] == 'E')
3892 for (j = 0; j < XVECLEN (x, i); j++)
3893 total += rtx_cost (XVECEXP (x, i, j), code, i, speed);
3895 return total;
3898 /* Fill in the structure C with information about both speed and size rtx
3899 costs for X, which is operand OPNO in an expression with code OUTER. */
3901 void
3902 get_full_rtx_cost (rtx x, enum rtx_code outer, int opno,
3903 struct full_rtx_costs *c)
3905 c->speed = rtx_cost (x, outer, opno, true);
3906 c->size = rtx_cost (x, outer, opno, false);
3910 /* Return cost of address expression X.
3911 Expect that X is properly formed address reference.
3913 SPEED parameter specify whether costs optimized for speed or size should
3914 be returned. */
3917 address_cost (rtx x, enum machine_mode mode, addr_space_t as, bool speed)
3919 /* We may be asked for cost of various unusual addresses, such as operands
3920 of push instruction. It is not worthwhile to complicate writing
3921 of the target hook by such cases. */
3923 if (!memory_address_addr_space_p (mode, x, as))
3924 return 1000;
3926 return targetm.address_cost (x, mode, as, speed);
3929 /* If the target doesn't override, compute the cost as with arithmetic. */
3932 default_address_cost (rtx x, enum machine_mode, addr_space_t, bool speed)
3934 return rtx_cost (x, MEM, 0, speed);
3938 unsigned HOST_WIDE_INT
3939 nonzero_bits (const_rtx x, enum machine_mode mode)
3941 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
3944 unsigned int
3945 num_sign_bit_copies (const_rtx x, enum machine_mode mode)
3947 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
3950 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
3951 It avoids exponential behavior in nonzero_bits1 when X has
3952 identical subexpressions on the first or the second level. */
3954 static unsigned HOST_WIDE_INT
3955 cached_nonzero_bits (const_rtx x, enum machine_mode mode, const_rtx known_x,
3956 enum machine_mode known_mode,
3957 unsigned HOST_WIDE_INT known_ret)
3959 if (x == known_x && mode == known_mode)
3960 return known_ret;
3962 /* Try to find identical subexpressions. If found call
3963 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
3964 precomputed value for the subexpression as KNOWN_RET. */
3966 if (ARITHMETIC_P (x))
3968 rtx x0 = XEXP (x, 0);
3969 rtx x1 = XEXP (x, 1);
3971 /* Check the first level. */
3972 if (x0 == x1)
3973 return nonzero_bits1 (x, mode, x0, mode,
3974 cached_nonzero_bits (x0, mode, known_x,
3975 known_mode, known_ret));
3977 /* Check the second level. */
3978 if (ARITHMETIC_P (x0)
3979 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
3980 return nonzero_bits1 (x, mode, x1, mode,
3981 cached_nonzero_bits (x1, mode, known_x,
3982 known_mode, known_ret));
3984 if (ARITHMETIC_P (x1)
3985 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
3986 return nonzero_bits1 (x, mode, x0, mode,
3987 cached_nonzero_bits (x0, mode, known_x,
3988 known_mode, known_ret));
3991 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
3994 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
3995 We don't let nonzero_bits recur into num_sign_bit_copies, because that
3996 is less useful. We can't allow both, because that results in exponential
3997 run time recursion. There is a nullstone testcase that triggered
3998 this. This macro avoids accidental uses of num_sign_bit_copies. */
3999 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
4001 /* Given an expression, X, compute which bits in X can be nonzero.
4002 We don't care about bits outside of those defined in MODE.
4004 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
4005 an arithmetic operation, we can do better. */
4007 static unsigned HOST_WIDE_INT
4008 nonzero_bits1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4009 enum machine_mode known_mode,
4010 unsigned HOST_WIDE_INT known_ret)
4012 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
4013 unsigned HOST_WIDE_INT inner_nz;
4014 enum rtx_code code;
4015 enum machine_mode inner_mode;
4016 unsigned int mode_width = GET_MODE_PRECISION (mode);
4018 /* For floating-point and vector values, assume all bits are needed. */
4019 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
4020 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
4021 return nonzero;
4023 /* If X is wider than MODE, use its mode instead. */
4024 if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width)
4026 mode = GET_MODE (x);
4027 nonzero = GET_MODE_MASK (mode);
4028 mode_width = GET_MODE_PRECISION (mode);
4031 if (mode_width > HOST_BITS_PER_WIDE_INT)
4032 /* Our only callers in this case look for single bit values. So
4033 just return the mode mask. Those tests will then be false. */
4034 return nonzero;
4036 #ifndef WORD_REGISTER_OPERATIONS
4037 /* If MODE is wider than X, but both are a single word for both the host
4038 and target machines, we can compute this from which bits of the
4039 object might be nonzero in its own mode, taking into account the fact
4040 that on many CISC machines, accessing an object in a wider mode
4041 causes the high-order bits to become undefined. So they are
4042 not known to be zero. */
4044 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
4045 && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD
4046 && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
4047 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x)))
4049 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
4050 known_x, known_mode, known_ret);
4051 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
4052 return nonzero;
4054 #endif
4056 code = GET_CODE (x);
4057 switch (code)
4059 case REG:
4060 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4061 /* If pointers extend unsigned and this is a pointer in Pmode, say that
4062 all the bits above ptr_mode are known to be zero. */
4063 /* As we do not know which address space the pointer is referring to,
4064 we can do this only if the target does not support different pointer
4065 or address modes depending on the address space. */
4066 if (target_default_pointer_address_modes_p ()
4067 && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4068 && REG_POINTER (x))
4069 nonzero &= GET_MODE_MASK (ptr_mode);
4070 #endif
4072 /* Include declared information about alignment of pointers. */
4073 /* ??? We don't properly preserve REG_POINTER changes across
4074 pointer-to-integer casts, so we can't trust it except for
4075 things that we know must be pointers. See execute/960116-1.c. */
4076 if ((x == stack_pointer_rtx
4077 || x == frame_pointer_rtx
4078 || x == arg_pointer_rtx)
4079 && REGNO_POINTER_ALIGN (REGNO (x)))
4081 unsigned HOST_WIDE_INT alignment
4082 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
4084 #ifdef PUSH_ROUNDING
4085 /* If PUSH_ROUNDING is defined, it is possible for the
4086 stack to be momentarily aligned only to that amount,
4087 so we pick the least alignment. */
4088 if (x == stack_pointer_rtx && PUSH_ARGS)
4089 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
4090 alignment);
4091 #endif
4093 nonzero &= ~(alignment - 1);
4097 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
4098 rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
4099 known_mode, known_ret,
4100 &nonzero_for_hook);
4102 if (new_rtx)
4103 nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
4104 known_mode, known_ret);
4106 return nonzero_for_hook;
4109 case CONST_INT:
4110 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4111 /* If X is negative in MODE, sign-extend the value. */
4112 if (INTVAL (x) > 0
4113 && mode_width < BITS_PER_WORD
4114 && (UINTVAL (x) & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
4115 != 0)
4116 return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width);
4117 #endif
4119 return UINTVAL (x);
4121 case MEM:
4122 #ifdef LOAD_EXTEND_OP
4123 /* In many, if not most, RISC machines, reading a byte from memory
4124 zeros the rest of the register. Noticing that fact saves a lot
4125 of extra zero-extends. */
4126 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4127 nonzero &= GET_MODE_MASK (GET_MODE (x));
4128 #endif
4129 break;
4131 case EQ: case NE:
4132 case UNEQ: case LTGT:
4133 case GT: case GTU: case UNGT:
4134 case LT: case LTU: case UNLT:
4135 case GE: case GEU: case UNGE:
4136 case LE: case LEU: case UNLE:
4137 case UNORDERED: case ORDERED:
4138 /* If this produces an integer result, we know which bits are set.
4139 Code here used to clear bits outside the mode of X, but that is
4140 now done above. */
4141 /* Mind that MODE is the mode the caller wants to look at this
4142 operation in, and not the actual operation mode. We can wind
4143 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
4144 that describes the results of a vector compare. */
4145 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
4146 && mode_width <= HOST_BITS_PER_WIDE_INT)
4147 nonzero = STORE_FLAG_VALUE;
4148 break;
4150 case NEG:
4151 #if 0
4152 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4153 and num_sign_bit_copies. */
4154 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4155 == GET_MODE_PRECISION (GET_MODE (x)))
4156 nonzero = 1;
4157 #endif
4159 if (GET_MODE_PRECISION (GET_MODE (x)) < mode_width)
4160 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4161 break;
4163 case ABS:
4164 #if 0
4165 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4166 and num_sign_bit_copies. */
4167 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4168 == GET_MODE_PRECISION (GET_MODE (x)))
4169 nonzero = 1;
4170 #endif
4171 break;
4173 case TRUNCATE:
4174 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4175 known_x, known_mode, known_ret)
4176 & GET_MODE_MASK (mode));
4177 break;
4179 case ZERO_EXTEND:
4180 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4181 known_x, known_mode, known_ret);
4182 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4183 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4184 break;
4186 case SIGN_EXTEND:
4187 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4188 Otherwise, show all the bits in the outer mode but not the inner
4189 may be nonzero. */
4190 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4191 known_x, known_mode, known_ret);
4192 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4194 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4195 if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
4196 inner_nz |= (GET_MODE_MASK (mode)
4197 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4200 nonzero &= inner_nz;
4201 break;
4203 case AND:
4204 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4205 known_x, known_mode, known_ret)
4206 & cached_nonzero_bits (XEXP (x, 1), mode,
4207 known_x, known_mode, known_ret);
4208 break;
4210 case XOR: case IOR:
4211 case UMIN: case UMAX: case SMIN: case SMAX:
4213 unsigned HOST_WIDE_INT nonzero0
4214 = cached_nonzero_bits (XEXP (x, 0), mode,
4215 known_x, known_mode, known_ret);
4217 /* Don't call nonzero_bits for the second time if it cannot change
4218 anything. */
4219 if ((nonzero & nonzero0) != nonzero)
4220 nonzero &= nonzero0
4221 | cached_nonzero_bits (XEXP (x, 1), mode,
4222 known_x, known_mode, known_ret);
4224 break;
4226 case PLUS: case MINUS:
4227 case MULT:
4228 case DIV: case UDIV:
4229 case MOD: case UMOD:
4230 /* We can apply the rules of arithmetic to compute the number of
4231 high- and low-order zero bits of these operations. We start by
4232 computing the width (position of the highest-order nonzero bit)
4233 and the number of low-order zero bits for each value. */
4235 unsigned HOST_WIDE_INT nz0
4236 = cached_nonzero_bits (XEXP (x, 0), mode,
4237 known_x, known_mode, known_ret);
4238 unsigned HOST_WIDE_INT nz1
4239 = cached_nonzero_bits (XEXP (x, 1), mode,
4240 known_x, known_mode, known_ret);
4241 int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1;
4242 int width0 = floor_log2 (nz0) + 1;
4243 int width1 = floor_log2 (nz1) + 1;
4244 int low0 = floor_log2 (nz0 & -nz0);
4245 int low1 = floor_log2 (nz1 & -nz1);
4246 unsigned HOST_WIDE_INT op0_maybe_minusp
4247 = nz0 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4248 unsigned HOST_WIDE_INT op1_maybe_minusp
4249 = nz1 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4250 unsigned int result_width = mode_width;
4251 int result_low = 0;
4253 switch (code)
4255 case PLUS:
4256 result_width = MAX (width0, width1) + 1;
4257 result_low = MIN (low0, low1);
4258 break;
4259 case MINUS:
4260 result_low = MIN (low0, low1);
4261 break;
4262 case MULT:
4263 result_width = width0 + width1;
4264 result_low = low0 + low1;
4265 break;
4266 case DIV:
4267 if (width1 == 0)
4268 break;
4269 if (!op0_maybe_minusp && !op1_maybe_minusp)
4270 result_width = width0;
4271 break;
4272 case UDIV:
4273 if (width1 == 0)
4274 break;
4275 result_width = width0;
4276 break;
4277 case MOD:
4278 if (width1 == 0)
4279 break;
4280 if (!op0_maybe_minusp && !op1_maybe_minusp)
4281 result_width = MIN (width0, width1);
4282 result_low = MIN (low0, low1);
4283 break;
4284 case UMOD:
4285 if (width1 == 0)
4286 break;
4287 result_width = MIN (width0, width1);
4288 result_low = MIN (low0, low1);
4289 break;
4290 default:
4291 gcc_unreachable ();
4294 if (result_width < mode_width)
4295 nonzero &= ((unsigned HOST_WIDE_INT) 1 << result_width) - 1;
4297 if (result_low > 0)
4298 nonzero &= ~(((unsigned HOST_WIDE_INT) 1 << result_low) - 1);
4300 break;
4302 case ZERO_EXTRACT:
4303 if (CONST_INT_P (XEXP (x, 1))
4304 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4305 nonzero &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4306 break;
4308 case SUBREG:
4309 /* If this is a SUBREG formed for a promoted variable that has
4310 been zero-extended, we know that at least the high-order bits
4311 are zero, though others might be too. */
4313 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x) > 0)
4314 nonzero = GET_MODE_MASK (GET_MODE (x))
4315 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4316 known_x, known_mode, known_ret);
4318 inner_mode = GET_MODE (SUBREG_REG (x));
4319 /* If the inner mode is a single word for both the host and target
4320 machines, we can compute this from which bits of the inner
4321 object might be nonzero. */
4322 if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD
4323 && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT))
4325 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4326 known_x, known_mode, known_ret);
4328 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4329 /* If this is a typical RISC machine, we only have to worry
4330 about the way loads are extended. */
4331 if ((LOAD_EXTEND_OP (inner_mode) == SIGN_EXTEND
4332 ? val_signbit_known_set_p (inner_mode, nonzero)
4333 : LOAD_EXTEND_OP (inner_mode) != ZERO_EXTEND)
4334 || !MEM_P (SUBREG_REG (x)))
4335 #endif
4337 /* On many CISC machines, accessing an object in a wider mode
4338 causes the high-order bits to become undefined. So they are
4339 not known to be zero. */
4340 if (GET_MODE_PRECISION (GET_MODE (x))
4341 > GET_MODE_PRECISION (inner_mode))
4342 nonzero |= (GET_MODE_MASK (GET_MODE (x))
4343 & ~GET_MODE_MASK (inner_mode));
4346 break;
4348 case ASHIFTRT:
4349 case LSHIFTRT:
4350 case ASHIFT:
4351 case ROTATE:
4352 /* The nonzero bits are in two classes: any bits within MODE
4353 that aren't in GET_MODE (x) are always significant. The rest of the
4354 nonzero bits are those that are significant in the operand of
4355 the shift when shifted the appropriate number of bits. This
4356 shows that high-order bits are cleared by the right shift and
4357 low-order bits by left shifts. */
4358 if (CONST_INT_P (XEXP (x, 1))
4359 && INTVAL (XEXP (x, 1)) >= 0
4360 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4361 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
4363 enum machine_mode inner_mode = GET_MODE (x);
4364 unsigned int width = GET_MODE_PRECISION (inner_mode);
4365 int count = INTVAL (XEXP (x, 1));
4366 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4367 unsigned HOST_WIDE_INT op_nonzero
4368 = cached_nonzero_bits (XEXP (x, 0), mode,
4369 known_x, known_mode, known_ret);
4370 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4371 unsigned HOST_WIDE_INT outer = 0;
4373 if (mode_width > width)
4374 outer = (op_nonzero & nonzero & ~mode_mask);
4376 if (code == LSHIFTRT)
4377 inner >>= count;
4378 else if (code == ASHIFTRT)
4380 inner >>= count;
4382 /* If the sign bit may have been nonzero before the shift, we
4383 need to mark all the places it could have been copied to
4384 by the shift as possibly nonzero. */
4385 if (inner & ((unsigned HOST_WIDE_INT) 1 << (width - 1 - count)))
4386 inner |= (((unsigned HOST_WIDE_INT) 1 << count) - 1)
4387 << (width - count);
4389 else if (code == ASHIFT)
4390 inner <<= count;
4391 else
4392 inner = ((inner << (count % width)
4393 | (inner >> (width - (count % width)))) & mode_mask);
4395 nonzero &= (outer | inner);
4397 break;
4399 case FFS:
4400 case POPCOUNT:
4401 /* This is at most the number of bits in the mode. */
4402 nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4403 break;
4405 case CLZ:
4406 /* If CLZ has a known value at zero, then the nonzero bits are
4407 that value, plus the number of bits in the mode minus one. */
4408 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4409 nonzero
4410 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4411 else
4412 nonzero = -1;
4413 break;
4415 case CTZ:
4416 /* If CTZ has a known value at zero, then the nonzero bits are
4417 that value, plus the number of bits in the mode minus one. */
4418 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4419 nonzero
4420 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4421 else
4422 nonzero = -1;
4423 break;
4425 case CLRSB:
4426 /* This is at most the number of bits in the mode minus 1. */
4427 nonzero = ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4428 break;
4430 case PARITY:
4431 nonzero = 1;
4432 break;
4434 case IF_THEN_ELSE:
4436 unsigned HOST_WIDE_INT nonzero_true
4437 = cached_nonzero_bits (XEXP (x, 1), mode,
4438 known_x, known_mode, known_ret);
4440 /* Don't call nonzero_bits for the second time if it cannot change
4441 anything. */
4442 if ((nonzero & nonzero_true) != nonzero)
4443 nonzero &= nonzero_true
4444 | cached_nonzero_bits (XEXP (x, 2), mode,
4445 known_x, known_mode, known_ret);
4447 break;
4449 default:
4450 break;
4453 return nonzero;
4456 /* See the macro definition above. */
4457 #undef cached_num_sign_bit_copies
4460 /* The function cached_num_sign_bit_copies is a wrapper around
4461 num_sign_bit_copies1. It avoids exponential behavior in
4462 num_sign_bit_copies1 when X has identical subexpressions on the
4463 first or the second level. */
4465 static unsigned int
4466 cached_num_sign_bit_copies (const_rtx x, enum machine_mode mode, const_rtx known_x,
4467 enum machine_mode known_mode,
4468 unsigned int known_ret)
4470 if (x == known_x && mode == known_mode)
4471 return known_ret;
4473 /* Try to find identical subexpressions. If found call
4474 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4475 the precomputed value for the subexpression as KNOWN_RET. */
4477 if (ARITHMETIC_P (x))
4479 rtx x0 = XEXP (x, 0);
4480 rtx x1 = XEXP (x, 1);
4482 /* Check the first level. */
4483 if (x0 == x1)
4484 return
4485 num_sign_bit_copies1 (x, mode, x0, mode,
4486 cached_num_sign_bit_copies (x0, mode, known_x,
4487 known_mode,
4488 known_ret));
4490 /* Check the second level. */
4491 if (ARITHMETIC_P (x0)
4492 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4493 return
4494 num_sign_bit_copies1 (x, mode, x1, mode,
4495 cached_num_sign_bit_copies (x1, mode, known_x,
4496 known_mode,
4497 known_ret));
4499 if (ARITHMETIC_P (x1)
4500 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4501 return
4502 num_sign_bit_copies1 (x, mode, x0, mode,
4503 cached_num_sign_bit_copies (x0, mode, known_x,
4504 known_mode,
4505 known_ret));
4508 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4511 /* Return the number of bits at the high-order end of X that are known to
4512 be equal to the sign bit. X will be used in mode MODE; if MODE is
4513 VOIDmode, X will be used in its own mode. The returned value will always
4514 be between 1 and the number of bits in MODE. */
4516 static unsigned int
4517 num_sign_bit_copies1 (const_rtx x, enum machine_mode mode, const_rtx known_x,
4518 enum machine_mode known_mode,
4519 unsigned int known_ret)
4521 enum rtx_code code = GET_CODE (x);
4522 unsigned int bitwidth = GET_MODE_PRECISION (mode);
4523 int num0, num1, result;
4524 unsigned HOST_WIDE_INT nonzero;
4526 /* If we weren't given a mode, use the mode of X. If the mode is still
4527 VOIDmode, we don't know anything. Likewise if one of the modes is
4528 floating-point. */
4530 if (mode == VOIDmode)
4531 mode = GET_MODE (x);
4533 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4534 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
4535 return 1;
4537 /* For a smaller object, just ignore the high bits. */
4538 if (bitwidth < GET_MODE_PRECISION (GET_MODE (x)))
4540 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4541 known_x, known_mode, known_ret);
4542 return MAX (1,
4543 num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth));
4546 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x)))
4548 #ifndef WORD_REGISTER_OPERATIONS
4549 /* If this machine does not do all register operations on the entire
4550 register and MODE is wider than the mode of X, we can say nothing
4551 at all about the high-order bits. */
4552 return 1;
4553 #else
4554 /* Likewise on machines that do, if the mode of the object is smaller
4555 than a word and loads of that size don't sign extend, we can say
4556 nothing about the high order bits. */
4557 if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD
4558 #ifdef LOAD_EXTEND_OP
4559 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4560 #endif
4562 return 1;
4563 #endif
4566 switch (code)
4568 case REG:
4570 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4571 /* If pointers extend signed and this is a pointer in Pmode, say that
4572 all the bits above ptr_mode are known to be sign bit copies. */
4573 /* As we do not know which address space the pointer is referring to,
4574 we can do this only if the target does not support different pointer
4575 or address modes depending on the address space. */
4576 if (target_default_pointer_address_modes_p ()
4577 && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4578 && mode == Pmode && REG_POINTER (x))
4579 return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
4580 #endif
4583 unsigned int copies_for_hook = 1, copies = 1;
4584 rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4585 known_mode, known_ret,
4586 &copies_for_hook);
4588 if (new_rtx)
4589 copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
4590 known_mode, known_ret);
4592 if (copies > 1 || copies_for_hook > 1)
4593 return MAX (copies, copies_for_hook);
4595 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4597 break;
4599 case MEM:
4600 #ifdef LOAD_EXTEND_OP
4601 /* Some RISC machines sign-extend all loads of smaller than a word. */
4602 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4603 return MAX (1, ((int) bitwidth
4604 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1));
4605 #endif
4606 break;
4608 case CONST_INT:
4609 /* If the constant is negative, take its 1's complement and remask.
4610 Then see how many zero bits we have. */
4611 nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
4612 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4613 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4614 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4616 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4618 case SUBREG:
4619 /* If this is a SUBREG for a promoted object that is sign-extended
4620 and we are looking at it in a wider mode, we know that at least the
4621 high-order bits are known to be sign bit copies. */
4623 if (SUBREG_PROMOTED_VAR_P (x) && ! SUBREG_PROMOTED_UNSIGNED_P (x))
4625 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4626 known_x, known_mode, known_ret);
4627 return MAX ((int) bitwidth
4628 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1,
4629 num0);
4632 /* For a smaller object, just ignore the high bits. */
4633 if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))))
4635 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4636 known_x, known_mode, known_ret);
4637 return MAX (1, (num0
4638 - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))
4639 - bitwidth)));
4642 #ifdef WORD_REGISTER_OPERATIONS
4643 #ifdef LOAD_EXTEND_OP
4644 /* For paradoxical SUBREGs on machines where all register operations
4645 affect the entire register, just look inside. Note that we are
4646 passing MODE to the recursive call, so the number of sign bit copies
4647 will remain relative to that mode, not the inner mode. */
4649 /* This works only if loads sign extend. Otherwise, if we get a
4650 reload for the inner part, it may be loaded from the stack, and
4651 then we lose all sign bit copies that existed before the store
4652 to the stack. */
4654 if (paradoxical_subreg_p (x)
4655 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4656 && MEM_P (SUBREG_REG (x)))
4657 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4658 known_x, known_mode, known_ret);
4659 #endif
4660 #endif
4661 break;
4663 case SIGN_EXTRACT:
4664 if (CONST_INT_P (XEXP (x, 1)))
4665 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4666 break;
4668 case SIGN_EXTEND:
4669 return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
4670 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4671 known_x, known_mode, known_ret));
4673 case TRUNCATE:
4674 /* For a smaller object, just ignore the high bits. */
4675 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4676 known_x, known_mode, known_ret);
4677 return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
4678 - bitwidth)));
4680 case NOT:
4681 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4682 known_x, known_mode, known_ret);
4684 case ROTATE: case ROTATERT:
4685 /* If we are rotating left by a number of bits less than the number
4686 of sign bit copies, we can just subtract that amount from the
4687 number. */
4688 if (CONST_INT_P (XEXP (x, 1))
4689 && INTVAL (XEXP (x, 1)) >= 0
4690 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4692 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4693 known_x, known_mode, known_ret);
4694 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4695 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4697 break;
4699 case NEG:
4700 /* In general, this subtracts one sign bit copy. But if the value
4701 is known to be positive, the number of sign bit copies is the
4702 same as that of the input. Finally, if the input has just one bit
4703 that might be nonzero, all the bits are copies of the sign bit. */
4704 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4705 known_x, known_mode, known_ret);
4706 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4707 return num0 > 1 ? num0 - 1 : 1;
4709 nonzero = nonzero_bits (XEXP (x, 0), mode);
4710 if (nonzero == 1)
4711 return bitwidth;
4713 if (num0 > 1
4714 && (((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4715 num0--;
4717 return num0;
4719 case IOR: case AND: case XOR:
4720 case SMIN: case SMAX: case UMIN: case UMAX:
4721 /* Logical operations will preserve the number of sign-bit copies.
4722 MIN and MAX operations always return one of the operands. */
4723 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4724 known_x, known_mode, known_ret);
4725 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4726 known_x, known_mode, known_ret);
4728 /* If num1 is clearing some of the top bits then regardless of
4729 the other term, we are guaranteed to have at least that many
4730 high-order zero bits. */
4731 if (code == AND
4732 && num1 > 1
4733 && bitwidth <= HOST_BITS_PER_WIDE_INT
4734 && CONST_INT_P (XEXP (x, 1))
4735 && (UINTVAL (XEXP (x, 1))
4736 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) == 0)
4737 return num1;
4739 /* Similarly for IOR when setting high-order bits. */
4740 if (code == IOR
4741 && num1 > 1
4742 && bitwidth <= HOST_BITS_PER_WIDE_INT
4743 && CONST_INT_P (XEXP (x, 1))
4744 && (UINTVAL (XEXP (x, 1))
4745 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4746 return num1;
4748 return MIN (num0, num1);
4750 case PLUS: case MINUS:
4751 /* For addition and subtraction, we can have a 1-bit carry. However,
4752 if we are subtracting 1 from a positive number, there will not
4753 be such a carry. Furthermore, if the positive number is known to
4754 be 0 or 1, we know the result is either -1 or 0. */
4756 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4757 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4759 nonzero = nonzero_bits (XEXP (x, 0), mode);
4760 if ((((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4761 return (nonzero == 1 || nonzero == 0 ? bitwidth
4762 : bitwidth - floor_log2 (nonzero) - 1);
4765 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4766 known_x, known_mode, known_ret);
4767 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4768 known_x, known_mode, known_ret);
4769 result = MAX (1, MIN (num0, num1) - 1);
4771 return result;
4773 case MULT:
4774 /* The number of bits of the product is the sum of the number of
4775 bits of both terms. However, unless one of the terms if known
4776 to be positive, we must allow for an additional bit since negating
4777 a negative number can remove one sign bit copy. */
4779 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4780 known_x, known_mode, known_ret);
4781 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4782 known_x, known_mode, known_ret);
4784 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4785 if (result > 0
4786 && (bitwidth > HOST_BITS_PER_WIDE_INT
4787 || (((nonzero_bits (XEXP (x, 0), mode)
4788 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4789 && ((nonzero_bits (XEXP (x, 1), mode)
4790 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)))
4791 != 0))))
4792 result--;
4794 return MAX (1, result);
4796 case UDIV:
4797 /* The result must be <= the first operand. If the first operand
4798 has the high bit set, we know nothing about the number of sign
4799 bit copies. */
4800 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4801 return 1;
4802 else if ((nonzero_bits (XEXP (x, 0), mode)
4803 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4804 return 1;
4805 else
4806 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4807 known_x, known_mode, known_ret);
4809 case UMOD:
4810 /* The result must be <= the second operand. If the second operand
4811 has (or just might have) the high bit set, we know nothing about
4812 the number of sign bit copies. */
4813 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4814 return 1;
4815 else if ((nonzero_bits (XEXP (x, 1), mode)
4816 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4817 return 1;
4818 else
4819 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4820 known_x, known_mode, known_ret);
4822 case DIV:
4823 /* Similar to unsigned division, except that we have to worry about
4824 the case where the divisor is negative, in which case we have
4825 to add 1. */
4826 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4827 known_x, known_mode, known_ret);
4828 if (result > 1
4829 && (bitwidth > HOST_BITS_PER_WIDE_INT
4830 || (nonzero_bits (XEXP (x, 1), mode)
4831 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4832 result--;
4834 return result;
4836 case MOD:
4837 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4838 known_x, known_mode, known_ret);
4839 if (result > 1
4840 && (bitwidth > HOST_BITS_PER_WIDE_INT
4841 || (nonzero_bits (XEXP (x, 1), mode)
4842 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4843 result--;
4845 return result;
4847 case ASHIFTRT:
4848 /* Shifts by a constant add to the number of bits equal to the
4849 sign bit. */
4850 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4851 known_x, known_mode, known_ret);
4852 if (CONST_INT_P (XEXP (x, 1))
4853 && INTVAL (XEXP (x, 1)) > 0
4854 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
4855 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4857 return num0;
4859 case ASHIFT:
4860 /* Left shifts destroy copies. */
4861 if (!CONST_INT_P (XEXP (x, 1))
4862 || INTVAL (XEXP (x, 1)) < 0
4863 || INTVAL (XEXP (x, 1)) >= (int) bitwidth
4864 || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x)))
4865 return 1;
4867 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4868 known_x, known_mode, known_ret);
4869 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4871 case IF_THEN_ELSE:
4872 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4873 known_x, known_mode, known_ret);
4874 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4875 known_x, known_mode, known_ret);
4876 return MIN (num0, num1);
4878 case EQ: case NE: case GE: case GT: case LE: case LT:
4879 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4880 case GEU: case GTU: case LEU: case LTU:
4881 case UNORDERED: case ORDERED:
4882 /* If the constant is negative, take its 1's complement and remask.
4883 Then see how many zero bits we have. */
4884 nonzero = STORE_FLAG_VALUE;
4885 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4886 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4887 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4889 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4891 default:
4892 break;
4895 /* If we haven't been able to figure it out by one of the above rules,
4896 see if some of the high-order bits are known to be zero. If so,
4897 count those bits and return one less than that amount. If we can't
4898 safely compute the mask for this mode, always return BITWIDTH. */
4900 bitwidth = GET_MODE_PRECISION (mode);
4901 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4902 return 1;
4904 nonzero = nonzero_bits (x, mode);
4905 return nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))
4906 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
4909 /* Calculate the rtx_cost of a single instruction. A return value of
4910 zero indicates an instruction pattern without a known cost. */
4913 insn_rtx_cost (rtx pat, bool speed)
4915 int i, cost;
4916 rtx set;
4918 /* Extract the single set rtx from the instruction pattern.
4919 We can't use single_set since we only have the pattern. */
4920 if (GET_CODE (pat) == SET)
4921 set = pat;
4922 else if (GET_CODE (pat) == PARALLEL)
4924 set = NULL_RTX;
4925 for (i = 0; i < XVECLEN (pat, 0); i++)
4927 rtx x = XVECEXP (pat, 0, i);
4928 if (GET_CODE (x) == SET)
4930 if (set)
4931 return 0;
4932 set = x;
4935 if (!set)
4936 return 0;
4938 else
4939 return 0;
4941 cost = set_src_cost (SET_SRC (set), speed);
4942 return cost > 0 ? cost : COSTS_N_INSNS (1);
4945 /* Given an insn INSN and condition COND, return the condition in a
4946 canonical form to simplify testing by callers. Specifically:
4948 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
4949 (2) Both operands will be machine operands; (cc0) will have been replaced.
4950 (3) If an operand is a constant, it will be the second operand.
4951 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
4952 for GE, GEU, and LEU.
4954 If the condition cannot be understood, or is an inequality floating-point
4955 comparison which needs to be reversed, 0 will be returned.
4957 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
4959 If EARLIEST is nonzero, it is a pointer to a place where the earliest
4960 insn used in locating the condition was found. If a replacement test
4961 of the condition is desired, it should be placed in front of that
4962 insn and we will be sure that the inputs are still valid.
4964 If WANT_REG is nonzero, we wish the condition to be relative to that
4965 register, if possible. Therefore, do not canonicalize the condition
4966 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
4967 to be a compare to a CC mode register.
4969 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
4970 and at INSN. */
4973 canonicalize_condition (rtx insn, rtx cond, int reverse, rtx *earliest,
4974 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
4976 enum rtx_code code;
4977 rtx prev = insn;
4978 const_rtx set;
4979 rtx tem;
4980 rtx op0, op1;
4981 int reverse_code = 0;
4982 enum machine_mode mode;
4983 basic_block bb = BLOCK_FOR_INSN (insn);
4985 code = GET_CODE (cond);
4986 mode = GET_MODE (cond);
4987 op0 = XEXP (cond, 0);
4988 op1 = XEXP (cond, 1);
4990 if (reverse)
4991 code = reversed_comparison_code (cond, insn);
4992 if (code == UNKNOWN)
4993 return 0;
4995 if (earliest)
4996 *earliest = insn;
4998 /* If we are comparing a register with zero, see if the register is set
4999 in the previous insn to a COMPARE or a comparison operation. Perform
5000 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
5001 in cse.c */
5003 while ((GET_RTX_CLASS (code) == RTX_COMPARE
5004 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
5005 && op1 == CONST0_RTX (GET_MODE (op0))
5006 && op0 != want_reg)
5008 /* Set nonzero when we find something of interest. */
5009 rtx x = 0;
5011 #ifdef HAVE_cc0
5012 /* If comparison with cc0, import actual comparison from compare
5013 insn. */
5014 if (op0 == cc0_rtx)
5016 if ((prev = prev_nonnote_insn (prev)) == 0
5017 || !NONJUMP_INSN_P (prev)
5018 || (set = single_set (prev)) == 0
5019 || SET_DEST (set) != cc0_rtx)
5020 return 0;
5022 op0 = SET_SRC (set);
5023 op1 = CONST0_RTX (GET_MODE (op0));
5024 if (earliest)
5025 *earliest = prev;
5027 #endif
5029 /* If this is a COMPARE, pick up the two things being compared. */
5030 if (GET_CODE (op0) == COMPARE)
5032 op1 = XEXP (op0, 1);
5033 op0 = XEXP (op0, 0);
5034 continue;
5036 else if (!REG_P (op0))
5037 break;
5039 /* Go back to the previous insn. Stop if it is not an INSN. We also
5040 stop if it isn't a single set or if it has a REG_INC note because
5041 we don't want to bother dealing with it. */
5043 prev = prev_nonnote_nondebug_insn (prev);
5045 if (prev == 0
5046 || !NONJUMP_INSN_P (prev)
5047 || FIND_REG_INC_NOTE (prev, NULL_RTX)
5048 /* In cfglayout mode, there do not have to be labels at the
5049 beginning of a block, or jumps at the end, so the previous
5050 conditions would not stop us when we reach bb boundary. */
5051 || BLOCK_FOR_INSN (prev) != bb)
5052 break;
5054 set = set_of (op0, prev);
5056 if (set
5057 && (GET_CODE (set) != SET
5058 || !rtx_equal_p (SET_DEST (set), op0)))
5059 break;
5061 /* If this is setting OP0, get what it sets it to if it looks
5062 relevant. */
5063 if (set)
5065 enum machine_mode inner_mode = GET_MODE (SET_DEST (set));
5066 #ifdef FLOAT_STORE_FLAG_VALUE
5067 REAL_VALUE_TYPE fsfv;
5068 #endif
5070 /* ??? We may not combine comparisons done in a CCmode with
5071 comparisons not done in a CCmode. This is to aid targets
5072 like Alpha that have an IEEE compliant EQ instruction, and
5073 a non-IEEE compliant BEQ instruction. The use of CCmode is
5074 actually artificial, simply to prevent the combination, but
5075 should not affect other platforms.
5077 However, we must allow VOIDmode comparisons to match either
5078 CCmode or non-CCmode comparison, because some ports have
5079 modeless comparisons inside branch patterns.
5081 ??? This mode check should perhaps look more like the mode check
5082 in simplify_comparison in combine. */
5083 if (((GET_MODE_CLASS (mode) == MODE_CC)
5084 != (GET_MODE_CLASS (inner_mode) == MODE_CC))
5085 && mode != VOIDmode
5086 && inner_mode != VOIDmode)
5087 break;
5088 if (GET_CODE (SET_SRC (set)) == COMPARE
5089 || (((code == NE
5090 || (code == LT
5091 && val_signbit_known_set_p (inner_mode,
5092 STORE_FLAG_VALUE))
5093 #ifdef FLOAT_STORE_FLAG_VALUE
5094 || (code == LT
5095 && SCALAR_FLOAT_MODE_P (inner_mode)
5096 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5097 REAL_VALUE_NEGATIVE (fsfv)))
5098 #endif
5100 && COMPARISON_P (SET_SRC (set))))
5101 x = SET_SRC (set);
5102 else if (((code == EQ
5103 || (code == GE
5104 && val_signbit_known_set_p (inner_mode,
5105 STORE_FLAG_VALUE))
5106 #ifdef FLOAT_STORE_FLAG_VALUE
5107 || (code == GE
5108 && SCALAR_FLOAT_MODE_P (inner_mode)
5109 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5110 REAL_VALUE_NEGATIVE (fsfv)))
5111 #endif
5113 && COMPARISON_P (SET_SRC (set)))
5115 reverse_code = 1;
5116 x = SET_SRC (set);
5118 else if ((code == EQ || code == NE)
5119 && GET_CODE (SET_SRC (set)) == XOR)
5120 /* Handle sequences like:
5122 (set op0 (xor X Y))
5123 ...(eq|ne op0 (const_int 0))...
5125 in which case:
5127 (eq op0 (const_int 0)) reduces to (eq X Y)
5128 (ne op0 (const_int 0)) reduces to (ne X Y)
5130 This is the form used by MIPS16, for example. */
5131 x = SET_SRC (set);
5132 else
5133 break;
5136 else if (reg_set_p (op0, prev))
5137 /* If this sets OP0, but not directly, we have to give up. */
5138 break;
5140 if (x)
5142 /* If the caller is expecting the condition to be valid at INSN,
5143 make sure X doesn't change before INSN. */
5144 if (valid_at_insn_p)
5145 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
5146 break;
5147 if (COMPARISON_P (x))
5148 code = GET_CODE (x);
5149 if (reverse_code)
5151 code = reversed_comparison_code (x, prev);
5152 if (code == UNKNOWN)
5153 return 0;
5154 reverse_code = 0;
5157 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5158 if (earliest)
5159 *earliest = prev;
5163 /* If constant is first, put it last. */
5164 if (CONSTANT_P (op0))
5165 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
5167 /* If OP0 is the result of a comparison, we weren't able to find what
5168 was really being compared, so fail. */
5169 if (!allow_cc_mode
5170 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5171 return 0;
5173 /* Canonicalize any ordered comparison with integers involving equality
5174 if we can do computations in the relevant mode and we do not
5175 overflow. */
5177 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
5178 && CONST_INT_P (op1)
5179 && GET_MODE (op0) != VOIDmode
5180 && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
5182 HOST_WIDE_INT const_val = INTVAL (op1);
5183 unsigned HOST_WIDE_INT uconst_val = const_val;
5184 unsigned HOST_WIDE_INT max_val
5185 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
5187 switch (code)
5189 case LE:
5190 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
5191 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
5192 break;
5194 /* When cross-compiling, const_val might be sign-extended from
5195 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
5196 case GE:
5197 if ((const_val & max_val)
5198 != ((unsigned HOST_WIDE_INT) 1
5199 << (GET_MODE_PRECISION (GET_MODE (op0)) - 1)))
5200 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
5201 break;
5203 case LEU:
5204 if (uconst_val < max_val)
5205 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
5206 break;
5208 case GEU:
5209 if (uconst_val != 0)
5210 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
5211 break;
5213 default:
5214 break;
5218 /* Never return CC0; return zero instead. */
5219 if (CC0_P (op0))
5220 return 0;
5222 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
5225 /* Given a jump insn JUMP, return the condition that will cause it to branch
5226 to its JUMP_LABEL. If the condition cannot be understood, or is an
5227 inequality floating-point comparison which needs to be reversed, 0 will
5228 be returned.
5230 If EARLIEST is nonzero, it is a pointer to a place where the earliest
5231 insn used in locating the condition was found. If a replacement test
5232 of the condition is desired, it should be placed in front of that
5233 insn and we will be sure that the inputs are still valid. If EARLIEST
5234 is null, the returned condition will be valid at INSN.
5236 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
5237 compare CC mode register.
5239 VALID_AT_INSN_P is the same as for canonicalize_condition. */
5242 get_condition (rtx jump, rtx *earliest, int allow_cc_mode, int valid_at_insn_p)
5244 rtx cond;
5245 int reverse;
5246 rtx set;
5248 /* If this is not a standard conditional jump, we can't parse it. */
5249 if (!JUMP_P (jump)
5250 || ! any_condjump_p (jump))
5251 return 0;
5252 set = pc_set (jump);
5254 cond = XEXP (SET_SRC (set), 0);
5256 /* If this branches to JUMP_LABEL when the condition is false, reverse
5257 the condition. */
5258 reverse
5259 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
5260 && XEXP (XEXP (SET_SRC (set), 2), 0) == JUMP_LABEL (jump);
5262 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
5263 allow_cc_mode, valid_at_insn_p);
5266 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
5267 TARGET_MODE_REP_EXTENDED.
5269 Note that we assume that the property of
5270 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
5271 narrower than mode B. I.e., if A is a mode narrower than B then in
5272 order to be able to operate on it in mode B, mode A needs to
5273 satisfy the requirements set by the representation of mode B. */
5275 static void
5276 init_num_sign_bit_copies_in_rep (void)
5278 enum machine_mode mode, in_mode;
5280 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
5281 in_mode = GET_MODE_WIDER_MODE (mode))
5282 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
5283 mode = GET_MODE_WIDER_MODE (mode))
5285 enum machine_mode i;
5287 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
5288 extends to the next widest mode. */
5289 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
5290 || GET_MODE_WIDER_MODE (mode) == in_mode);
5292 /* We are in in_mode. Count how many bits outside of mode
5293 have to be copies of the sign-bit. */
5294 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
5296 enum machine_mode wider = GET_MODE_WIDER_MODE (i);
5298 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
5299 /* We can only check sign-bit copies starting from the
5300 top-bit. In order to be able to check the bits we
5301 have already seen we pretend that subsequent bits
5302 have to be sign-bit copies too. */
5303 || num_sign_bit_copies_in_rep [in_mode][mode])
5304 num_sign_bit_copies_in_rep [in_mode][mode]
5305 += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
5310 /* Suppose that truncation from the machine mode of X to MODE is not a
5311 no-op. See if there is anything special about X so that we can
5312 assume it already contains a truncated value of MODE. */
5314 bool
5315 truncated_to_mode (enum machine_mode mode, const_rtx x)
5317 /* This register has already been used in MODE without explicit
5318 truncation. */
5319 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5320 return true;
5322 /* See if we already satisfy the requirements of MODE. If yes we
5323 can just switch to MODE. */
5324 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5325 && (num_sign_bit_copies (x, GET_MODE (x))
5326 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5327 return true;
5329 return false;
5332 /* Initialize non_rtx_starting_operands, which is used to speed up
5333 for_each_rtx. */
5334 void
5335 init_rtlanal (void)
5337 int i;
5338 for (i = 0; i < NUM_RTX_CODE; i++)
5340 const char *format = GET_RTX_FORMAT (i);
5341 const char *first = strpbrk (format, "eEV");
5342 non_rtx_starting_operands[i] = first ? first - format : -1;
5345 init_num_sign_bit_copies_in_rep ();
5348 /* Check whether this is a constant pool constant. */
5349 bool
5350 constant_pool_constant_p (rtx x)
5352 x = avoid_constant_pool_reference (x);
5353 return CONST_DOUBLE_P (x);
5356 /* If M is a bitmask that selects a field of low-order bits within an item but
5357 not the entire word, return the length of the field. Return -1 otherwise.
5358 M is used in machine mode MODE. */
5361 low_bitmask_len (enum machine_mode mode, unsigned HOST_WIDE_INT m)
5363 if (mode != VOIDmode)
5365 if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
5366 return -1;
5367 m &= GET_MODE_MASK (mode);
5370 return exact_log2 (m + 1);
5373 /* Return the mode of MEM's address. */
5375 enum machine_mode
5376 get_address_mode (rtx mem)
5378 enum machine_mode mode;
5380 gcc_assert (MEM_P (mem));
5381 mode = GET_MODE (XEXP (mem, 0));
5382 if (mode != VOIDmode)
5383 return mode;
5384 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
5387 /* Split up a CONST_DOUBLE or integer constant rtx
5388 into two rtx's for single words,
5389 storing in *FIRST the word that comes first in memory in the target
5390 and in *SECOND the other. */
5392 void
5393 split_double (rtx value, rtx *first, rtx *second)
5395 if (CONST_INT_P (value))
5397 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
5399 /* In this case the CONST_INT holds both target words.
5400 Extract the bits from it into two word-sized pieces.
5401 Sign extend each half to HOST_WIDE_INT. */
5402 unsigned HOST_WIDE_INT low, high;
5403 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
5404 unsigned bits_per_word = BITS_PER_WORD;
5406 /* Set sign_bit to the most significant bit of a word. */
5407 sign_bit = 1;
5408 sign_bit <<= bits_per_word - 1;
5410 /* Set mask so that all bits of the word are set. We could
5411 have used 1 << BITS_PER_WORD instead of basing the
5412 calculation on sign_bit. However, on machines where
5413 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
5414 compiler warning, even though the code would never be
5415 executed. */
5416 mask = sign_bit << 1;
5417 mask--;
5419 /* Set sign_extend as any remaining bits. */
5420 sign_extend = ~mask;
5422 /* Pick the lower word and sign-extend it. */
5423 low = INTVAL (value);
5424 low &= mask;
5425 if (low & sign_bit)
5426 low |= sign_extend;
5428 /* Pick the higher word, shifted to the least significant
5429 bits, and sign-extend it. */
5430 high = INTVAL (value);
5431 high >>= bits_per_word - 1;
5432 high >>= 1;
5433 high &= mask;
5434 if (high & sign_bit)
5435 high |= sign_extend;
5437 /* Store the words in the target machine order. */
5438 if (WORDS_BIG_ENDIAN)
5440 *first = GEN_INT (high);
5441 *second = GEN_INT (low);
5443 else
5445 *first = GEN_INT (low);
5446 *second = GEN_INT (high);
5449 else
5451 /* The rule for using CONST_INT for a wider mode
5452 is that we regard the value as signed.
5453 So sign-extend it. */
5454 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
5455 if (WORDS_BIG_ENDIAN)
5457 *first = high;
5458 *second = value;
5460 else
5462 *first = value;
5463 *second = high;
5467 else if (!CONST_DOUBLE_P (value))
5469 if (WORDS_BIG_ENDIAN)
5471 *first = const0_rtx;
5472 *second = value;
5474 else
5476 *first = value;
5477 *second = const0_rtx;
5480 else if (GET_MODE (value) == VOIDmode
5481 /* This is the old way we did CONST_DOUBLE integers. */
5482 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
5484 /* In an integer, the words are defined as most and least significant.
5485 So order them by the target's convention. */
5486 if (WORDS_BIG_ENDIAN)
5488 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
5489 *second = GEN_INT (CONST_DOUBLE_LOW (value));
5491 else
5493 *first = GEN_INT (CONST_DOUBLE_LOW (value));
5494 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
5497 else
5499 REAL_VALUE_TYPE r;
5500 long l[2];
5501 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
5503 /* Note, this converts the REAL_VALUE_TYPE to the target's
5504 format, splits up the floating point double and outputs
5505 exactly 32 bits of it into each of l[0] and l[1] --
5506 not necessarily BITS_PER_WORD bits. */
5507 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
5509 /* If 32 bits is an entire word for the target, but not for the host,
5510 then sign-extend on the host so that the number will look the same
5511 way on the host that it would on the target. See for instance
5512 simplify_unary_operation. The #if is needed to avoid compiler
5513 warnings. */
5515 #if HOST_BITS_PER_LONG > 32
5516 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
5518 if (l[0] & ((long) 1 << 31))
5519 l[0] |= ((long) (-1) << 32);
5520 if (l[1] & ((long) 1 << 31))
5521 l[1] |= ((long) (-1) << 32);
5523 #endif
5525 *first = GEN_INT (l[0]);
5526 *second = GEN_INT (l[1]);
5530 /* Return true if X is a sign_extract or zero_extract from the least
5531 significant bit. */
5533 static bool
5534 lsb_bitfield_op_p (rtx x)
5536 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
5538 enum machine_mode mode = GET_MODE (XEXP (x, 0));
5539 HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
5540 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
5542 return (pos == (BITS_BIG_ENDIAN ? GET_MODE_PRECISION (mode) - len : 0));
5544 return false;
5547 /* Strip outer address "mutations" from LOC and return a pointer to the
5548 inner value. If OUTER_CODE is nonnull, store the code of the innermost
5549 stripped expression there.
5551 "Mutations" either convert between modes or apply some kind of
5552 extension, truncation or alignment. */
5554 rtx *
5555 strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
5557 for (;;)
5559 enum rtx_code code = GET_CODE (*loc);
5560 if (GET_RTX_CLASS (code) == RTX_UNARY)
5561 /* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be
5562 used to convert between pointer sizes. */
5563 loc = &XEXP (*loc, 0);
5564 else if (lsb_bitfield_op_p (*loc))
5565 /* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively
5566 acts as a combined truncation and extension. */
5567 loc = &XEXP (*loc, 0);
5568 else if (code == AND && CONST_INT_P (XEXP (*loc, 1)))
5569 /* (and ... (const_int -X)) is used to align to X bytes. */
5570 loc = &XEXP (*loc, 0);
5571 else if (code == SUBREG
5572 && !OBJECT_P (SUBREG_REG (*loc))
5573 && subreg_lowpart_p (*loc))
5574 /* (subreg (operator ...) ...) inside and is used for mode
5575 conversion too. */
5576 loc = &SUBREG_REG (*loc);
5577 else
5578 return loc;
5579 if (outer_code)
5580 *outer_code = code;
5584 /* Return true if CODE applies some kind of scale. The scaled value is
5585 is the first operand and the scale is the second. */
5587 static bool
5588 binary_scale_code_p (enum rtx_code code)
5590 return (code == MULT
5591 || code == ASHIFT
5592 /* Needed by ARM targets. */
5593 || code == ASHIFTRT
5594 || code == LSHIFTRT
5595 || code == ROTATE
5596 || code == ROTATERT);
5599 /* If *INNER can be interpreted as a base, return a pointer to the inner term
5600 (see address_info). Return null otherwise. */
5602 static rtx *
5603 get_base_term (rtx *inner)
5605 if (GET_CODE (*inner) == LO_SUM)
5606 inner = strip_address_mutations (&XEXP (*inner, 0));
5607 if (REG_P (*inner)
5608 || MEM_P (*inner)
5609 || GET_CODE (*inner) == SUBREG)
5610 return inner;
5611 return 0;
5614 /* If *INNER can be interpreted as an index, return a pointer to the inner term
5615 (see address_info). Return null otherwise. */
5617 static rtx *
5618 get_index_term (rtx *inner)
5620 /* At present, only constant scales are allowed. */
5621 if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1)))
5622 inner = strip_address_mutations (&XEXP (*inner, 0));
5623 if (REG_P (*inner)
5624 || MEM_P (*inner)
5625 || GET_CODE (*inner) == SUBREG)
5626 return inner;
5627 return 0;
5630 /* Set the segment part of address INFO to LOC, given that INNER is the
5631 unmutated value. */
5633 static void
5634 set_address_segment (struct address_info *info, rtx *loc, rtx *inner)
5636 gcc_assert (!info->segment);
5637 info->segment = loc;
5638 info->segment_term = inner;
5641 /* Set the base part of address INFO to LOC, given that INNER is the
5642 unmutated value. */
5644 static void
5645 set_address_base (struct address_info *info, rtx *loc, rtx *inner)
5647 gcc_assert (!info->base);
5648 info->base = loc;
5649 info->base_term = inner;
5652 /* Set the index part of address INFO to LOC, given that INNER is the
5653 unmutated value. */
5655 static void
5656 set_address_index (struct address_info *info, rtx *loc, rtx *inner)
5658 gcc_assert (!info->index);
5659 info->index = loc;
5660 info->index_term = inner;
5663 /* Set the displacement part of address INFO to LOC, given that INNER
5664 is the constant term. */
5666 static void
5667 set_address_disp (struct address_info *info, rtx *loc, rtx *inner)
5669 gcc_assert (!info->disp);
5670 info->disp = loc;
5671 info->disp_term = inner;
5674 /* INFO->INNER describes a {PRE,POST}_{INC,DEC} address. Set up the
5675 rest of INFO accordingly. */
5677 static void
5678 decompose_incdec_address (struct address_info *info)
5680 info->autoinc_p = true;
5682 rtx *base = &XEXP (*info->inner, 0);
5683 set_address_base (info, base, base);
5684 gcc_checking_assert (info->base == info->base_term);
5686 /* These addresses are only valid when the size of the addressed
5687 value is known. */
5688 gcc_checking_assert (info->mode != VOIDmode);
5691 /* INFO->INNER describes a {PRE,POST}_MODIFY address. Set up the rest
5692 of INFO accordingly. */
5694 static void
5695 decompose_automod_address (struct address_info *info)
5697 info->autoinc_p = true;
5699 rtx *base = &XEXP (*info->inner, 0);
5700 set_address_base (info, base, base);
5701 gcc_checking_assert (info->base == info->base_term);
5703 rtx plus = XEXP (*info->inner, 1);
5704 gcc_assert (GET_CODE (plus) == PLUS);
5706 info->base_term2 = &XEXP (plus, 0);
5707 gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2));
5709 rtx *step = &XEXP (plus, 1);
5710 rtx *inner_step = strip_address_mutations (step);
5711 if (CONSTANT_P (*inner_step))
5712 set_address_disp (info, step, inner_step);
5713 else
5714 set_address_index (info, step, inner_step);
5717 /* Treat *LOC as a tree of PLUS operands and store pointers to the summed
5718 values in [PTR, END). Return a pointer to the end of the used array. */
5720 static rtx **
5721 extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
5723 rtx x = *loc;
5724 if (GET_CODE (x) == PLUS)
5726 ptr = extract_plus_operands (&XEXP (x, 0), ptr, end);
5727 ptr = extract_plus_operands (&XEXP (x, 1), ptr, end);
5729 else
5731 gcc_assert (ptr != end);
5732 *ptr++ = loc;
5734 return ptr;
5737 /* Evaluate the likelihood of X being a base or index value, returning
5738 positive if it is likely to be a base, negative if it is likely to be
5739 an index, and 0 if we can't tell. Make the magnitude of the return
5740 value reflect the amount of confidence we have in the answer.
5742 MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1. */
5744 static int
5745 baseness (rtx x, enum machine_mode mode, addr_space_t as,
5746 enum rtx_code outer_code, enum rtx_code index_code)
5748 /* Believe *_POINTER unless the address shape requires otherwise. */
5749 if (REG_P (x) && REG_POINTER (x))
5750 return 2;
5751 if (MEM_P (x) && MEM_POINTER (x))
5752 return 2;
5754 if (REG_P (x) && HARD_REGISTER_P (x))
5756 /* X is a hard register. If it only fits one of the base
5757 or index classes, choose that interpretation. */
5758 int regno = REGNO (x);
5759 bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
5760 bool index_p = REGNO_OK_FOR_INDEX_P (regno);
5761 if (base_p != index_p)
5762 return base_p ? 1 : -1;
5764 return 0;
5767 /* INFO->INNER describes a normal, non-automodified address.
5768 Fill in the rest of INFO accordingly. */
5770 static void
5771 decompose_normal_address (struct address_info *info)
5773 /* Treat the address as the sum of up to four values. */
5774 rtx *ops[4];
5775 size_t n_ops = extract_plus_operands (info->inner, ops,
5776 ops + ARRAY_SIZE (ops)) - ops;
5778 /* If there is more than one component, any base component is in a PLUS. */
5779 if (n_ops > 1)
5780 info->base_outer_code = PLUS;
5782 /* Try to classify each sum operand now. Leave those that could be
5783 either a base or an index in OPS. */
5784 rtx *inner_ops[4];
5785 size_t out = 0;
5786 for (size_t in = 0; in < n_ops; ++in)
5788 rtx *loc = ops[in];
5789 rtx *inner = strip_address_mutations (loc);
5790 if (CONSTANT_P (*inner))
5791 set_address_disp (info, loc, inner);
5792 else if (GET_CODE (*inner) == UNSPEC)
5793 set_address_segment (info, loc, inner);
5794 else
5796 /* The only other possibilities are a base or an index. */
5797 rtx *base_term = get_base_term (inner);
5798 rtx *index_term = get_index_term (inner);
5799 gcc_assert (base_term || index_term);
5800 if (!base_term)
5801 set_address_index (info, loc, index_term);
5802 else if (!index_term)
5803 set_address_base (info, loc, base_term);
5804 else
5806 gcc_assert (base_term == index_term);
5807 ops[out] = loc;
5808 inner_ops[out] = base_term;
5809 ++out;
5814 /* Classify the remaining OPS members as bases and indexes. */
5815 if (out == 1)
5817 /* If we haven't seen a base or an index yet, assume that this is
5818 the base. If we were confident that another term was the base
5819 or index, treat the remaining operand as the other kind. */
5820 if (!info->base)
5821 set_address_base (info, ops[0], inner_ops[0]);
5822 else
5823 set_address_index (info, ops[0], inner_ops[0]);
5825 else if (out == 2)
5827 /* In the event of a tie, assume the base comes first. */
5828 if (baseness (*inner_ops[0], info->mode, info->as, PLUS,
5829 GET_CODE (*ops[1]))
5830 >= baseness (*inner_ops[1], info->mode, info->as, PLUS,
5831 GET_CODE (*ops[0])))
5833 set_address_base (info, ops[0], inner_ops[0]);
5834 set_address_index (info, ops[1], inner_ops[1]);
5836 else
5838 set_address_base (info, ops[1], inner_ops[1]);
5839 set_address_index (info, ops[0], inner_ops[0]);
5842 else
5843 gcc_assert (out == 0);
5846 /* Describe address *LOC in *INFO. MODE is the mode of the addressed value,
5847 or VOIDmode if not known. AS is the address space associated with LOC.
5848 OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise. */
5850 void
5851 decompose_address (struct address_info *info, rtx *loc, enum machine_mode mode,
5852 addr_space_t as, enum rtx_code outer_code)
5854 memset (info, 0, sizeof (*info));
5855 info->mode = mode;
5856 info->as = as;
5857 info->addr_outer_code = outer_code;
5858 info->outer = loc;
5859 info->inner = strip_address_mutations (loc, &outer_code);
5860 info->base_outer_code = outer_code;
5861 switch (GET_CODE (*info->inner))
5863 case PRE_DEC:
5864 case PRE_INC:
5865 case POST_DEC:
5866 case POST_INC:
5867 decompose_incdec_address (info);
5868 break;
5870 case PRE_MODIFY:
5871 case POST_MODIFY:
5872 decompose_automod_address (info);
5873 break;
5875 default:
5876 decompose_normal_address (info);
5877 break;
5881 /* Describe address operand LOC in INFO. */
5883 void
5884 decompose_lea_address (struct address_info *info, rtx *loc)
5886 decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS);
5889 /* Describe the address of MEM X in INFO. */
5891 void
5892 decompose_mem_address (struct address_info *info, rtx x)
5894 gcc_assert (MEM_P (x));
5895 decompose_address (info, &XEXP (x, 0), GET_MODE (x),
5896 MEM_ADDR_SPACE (x), MEM);
5899 /* Update INFO after a change to the address it describes. */
5901 void
5902 update_address (struct address_info *info)
5904 decompose_address (info, info->outer, info->mode, info->as,
5905 info->addr_outer_code);
5908 /* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is
5909 more complicated than that. */
5911 HOST_WIDE_INT
5912 get_index_scale (const struct address_info *info)
5914 rtx index = *info->index;
5915 if (GET_CODE (index) == MULT
5916 && CONST_INT_P (XEXP (index, 1))
5917 && info->index_term == &XEXP (index, 0))
5918 return INTVAL (XEXP (index, 1));
5920 if (GET_CODE (index) == ASHIFT
5921 && CONST_INT_P (XEXP (index, 1))
5922 && info->index_term == &XEXP (index, 0))
5923 return (HOST_WIDE_INT) 1 << INTVAL (XEXP (index, 1));
5925 if (info->index == info->index_term)
5926 return 1;
5928 return 0;
5931 /* Return the "index code" of INFO, in the form required by
5932 ok_for_base_p_1. */
5934 enum rtx_code
5935 get_index_code (const struct address_info *info)
5937 if (info->index)
5938 return GET_CODE (*info->index);
5940 if (info->disp)
5941 return GET_CODE (*info->disp);
5943 return SCRATCH;