Make std::vector<bool> meet C++11 allocator requirements.
[official-gcc.git] / gcc / rtlanal.c
blobc9bf69c4717b9169ac9f5a567996bd7df0d2deca
1 /* Analyze RTL for GNU compiler.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "hard-reg-set.h"
27 #include "rtl.h"
28 #include "insn-config.h"
29 #include "recog.h"
30 #include "target.h"
31 #include "output.h"
32 #include "tm_p.h"
33 #include "flags.h"
34 #include "regs.h"
35 #include "hashtab.h"
36 #include "hash-set.h"
37 #include "vec.h"
38 #include "machmode.h"
39 #include "input.h"
40 #include "function.h"
41 #include "predict.h"
42 #include "basic-block.h"
43 #include "df.h"
44 #include "tree.h"
45 #include "emit-rtl.h" /* FIXME: Can go away once crtl is moved to rtl.h. */
46 #include "addresses.h"
47 #include "rtl-iter.h"
49 /* Forward declarations */
50 static void set_of_1 (rtx, const_rtx, void *);
51 static bool covers_regno_p (const_rtx, unsigned int);
52 static bool covers_regno_no_parallel_p (const_rtx, unsigned int);
53 static int computed_jump_p_1 (const_rtx);
54 static void parms_set (rtx, const_rtx, void *);
56 static unsigned HOST_WIDE_INT cached_nonzero_bits (const_rtx, machine_mode,
57 const_rtx, machine_mode,
58 unsigned HOST_WIDE_INT);
59 static unsigned HOST_WIDE_INT nonzero_bits1 (const_rtx, machine_mode,
60 const_rtx, machine_mode,
61 unsigned HOST_WIDE_INT);
62 static unsigned int cached_num_sign_bit_copies (const_rtx, machine_mode, const_rtx,
63 machine_mode,
64 unsigned int);
65 static unsigned int num_sign_bit_copies1 (const_rtx, machine_mode, const_rtx,
66 machine_mode, unsigned int);
68 /* Offset of the first 'e', 'E' or 'V' operand for each rtx code, or
69 -1 if a code has no such operand. */
70 static int non_rtx_starting_operands[NUM_RTX_CODE];
72 rtx_subrtx_bound_info rtx_all_subrtx_bounds[NUM_RTX_CODE];
73 rtx_subrtx_bound_info rtx_nonconst_subrtx_bounds[NUM_RTX_CODE];
75 /* Truncation narrows the mode from SOURCE mode to DESTINATION mode.
76 If TARGET_MODE_REP_EXTENDED (DESTINATION, DESTINATION_REP) is
77 SIGN_EXTEND then while narrowing we also have to enforce the
78 representation and sign-extend the value to mode DESTINATION_REP.
80 If the value is already sign-extended to DESTINATION_REP mode we
81 can just switch to DESTINATION mode on it. For each pair of
82 integral modes SOURCE and DESTINATION, when truncating from SOURCE
83 to DESTINATION, NUM_SIGN_BIT_COPIES_IN_REP[SOURCE][DESTINATION]
84 contains the number of high-order bits in SOURCE that have to be
85 copies of the sign-bit so that we can do this mode-switch to
86 DESTINATION. */
88 static unsigned int
89 num_sign_bit_copies_in_rep[MAX_MODE_INT + 1][MAX_MODE_INT + 1];
91 /* Store X into index I of ARRAY. ARRAY is known to have at least I
92 elements. Return the new base of ARRAY. */
94 template <typename T>
95 typename T::value_type *
96 generic_subrtx_iterator <T>::add_single_to_queue (array_type &array,
97 value_type *base,
98 size_t i, value_type x)
100 if (base == array.stack)
102 if (i < LOCAL_ELEMS)
104 base[i] = x;
105 return base;
107 gcc_checking_assert (i == LOCAL_ELEMS);
108 vec_safe_grow (array.heap, i + 1);
109 base = array.heap->address ();
110 memcpy (base, array.stack, sizeof (array.stack));
111 base[LOCAL_ELEMS] = x;
112 return base;
114 unsigned int length = array.heap->length ();
115 if (length > i)
117 gcc_checking_assert (base == array.heap->address ());
118 base[i] = x;
119 return base;
121 else
123 gcc_checking_assert (i == length);
124 vec_safe_push (array.heap, x);
125 return array.heap->address ();
129 /* Add the subrtxes of X to worklist ARRAY, starting at END. Return the
130 number of elements added to the worklist. */
132 template <typename T>
133 size_t
134 generic_subrtx_iterator <T>::add_subrtxes_to_queue (array_type &array,
135 value_type *base,
136 size_t end, rtx_type x)
138 enum rtx_code code = GET_CODE (x);
139 const char *format = GET_RTX_FORMAT (code);
140 size_t orig_end = end;
141 if (__builtin_expect (INSN_P (x), false))
143 /* Put the pattern at the top of the queue, since that's what
144 we're likely to want most. It also allows for the SEQUENCE
145 code below. */
146 for (int i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; --i)
147 if (format[i] == 'e')
149 value_type subx = T::get_value (x->u.fld[i].rt_rtx);
150 if (__builtin_expect (end < LOCAL_ELEMS, true))
151 base[end++] = subx;
152 else
153 base = add_single_to_queue (array, base, end++, subx);
156 else
157 for (int i = 0; format[i]; ++i)
158 if (format[i] == 'e')
160 value_type subx = T::get_value (x->u.fld[i].rt_rtx);
161 if (__builtin_expect (end < LOCAL_ELEMS, true))
162 base[end++] = subx;
163 else
164 base = add_single_to_queue (array, base, end++, subx);
166 else if (format[i] == 'E')
168 unsigned int length = GET_NUM_ELEM (x->u.fld[i].rt_rtvec);
169 rtx *vec = x->u.fld[i].rt_rtvec->elem;
170 if (__builtin_expect (end + length <= LOCAL_ELEMS, true))
171 for (unsigned int j = 0; j < length; j++)
172 base[end++] = T::get_value (vec[j]);
173 else
174 for (unsigned int j = 0; j < length; j++)
175 base = add_single_to_queue (array, base, end++,
176 T::get_value (vec[j]));
177 if (code == SEQUENCE && end == length)
178 /* If the subrtxes of the sequence fill the entire array then
179 we know that no other parts of a containing insn are queued.
180 The caller is therefore iterating over the sequence as a
181 PATTERN (...), so we also want the patterns of the
182 subinstructions. */
183 for (unsigned int j = 0; j < length; j++)
185 typename T::rtx_type x = T::get_rtx (base[j]);
186 if (INSN_P (x))
187 base[j] = T::get_value (PATTERN (x));
190 return end - orig_end;
193 template <typename T>
194 void
195 generic_subrtx_iterator <T>::free_array (array_type &array)
197 vec_free (array.heap);
200 template <typename T>
201 const size_t generic_subrtx_iterator <T>::LOCAL_ELEMS;
203 template class generic_subrtx_iterator <const_rtx_accessor>;
204 template class generic_subrtx_iterator <rtx_var_accessor>;
205 template class generic_subrtx_iterator <rtx_ptr_accessor>;
207 /* Return 1 if the value of X is unstable
208 (would be different at a different point in the program).
209 The frame pointer, arg pointer, etc. are considered stable
210 (within one function) and so is anything marked `unchanging'. */
213 rtx_unstable_p (const_rtx x)
215 const RTX_CODE code = GET_CODE (x);
216 int i;
217 const char *fmt;
219 switch (code)
221 case MEM:
222 return !MEM_READONLY_P (x) || rtx_unstable_p (XEXP (x, 0));
224 case CONST:
225 CASE_CONST_ANY:
226 case SYMBOL_REF:
227 case LABEL_REF:
228 return 0;
230 case REG:
231 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
232 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
233 /* The arg pointer varies if it is not a fixed register. */
234 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
235 return 0;
236 /* ??? When call-clobbered, the value is stable modulo the restore
237 that must happen after a call. This currently screws up local-alloc
238 into believing that the restore is not needed. */
239 if (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED && x == pic_offset_table_rtx)
240 return 0;
241 return 1;
243 case ASM_OPERANDS:
244 if (MEM_VOLATILE_P (x))
245 return 1;
247 /* Fall through. */
249 default:
250 break;
253 fmt = GET_RTX_FORMAT (code);
254 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
255 if (fmt[i] == 'e')
257 if (rtx_unstable_p (XEXP (x, i)))
258 return 1;
260 else if (fmt[i] == 'E')
262 int j;
263 for (j = 0; j < XVECLEN (x, i); j++)
264 if (rtx_unstable_p (XVECEXP (x, i, j)))
265 return 1;
268 return 0;
271 /* Return 1 if X has a value that can vary even between two
272 executions of the program. 0 means X can be compared reliably
273 against certain constants or near-constants.
274 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
275 zero, we are slightly more conservative.
276 The frame pointer and the arg pointer are considered constant. */
278 bool
279 rtx_varies_p (const_rtx x, bool for_alias)
281 RTX_CODE code;
282 int i;
283 const char *fmt;
285 if (!x)
286 return 0;
288 code = GET_CODE (x);
289 switch (code)
291 case MEM:
292 return !MEM_READONLY_P (x) || rtx_varies_p (XEXP (x, 0), for_alias);
294 case CONST:
295 CASE_CONST_ANY:
296 case SYMBOL_REF:
297 case LABEL_REF:
298 return 0;
300 case REG:
301 /* Note that we have to test for the actual rtx used for the frame
302 and arg pointers and not just the register number in case we have
303 eliminated the frame and/or arg pointer and are using it
304 for pseudos. */
305 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
306 /* The arg pointer varies if it is not a fixed register. */
307 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
308 return 0;
309 if (x == pic_offset_table_rtx
310 /* ??? When call-clobbered, the value is stable modulo the restore
311 that must happen after a call. This currently screws up
312 local-alloc into believing that the restore is not needed, so we
313 must return 0 only if we are called from alias analysis. */
314 && (!PIC_OFFSET_TABLE_REG_CALL_CLOBBERED || for_alias))
315 return 0;
316 return 1;
318 case LO_SUM:
319 /* The operand 0 of a LO_SUM is considered constant
320 (in fact it is related specifically to operand 1)
321 during alias analysis. */
322 return (! for_alias && rtx_varies_p (XEXP (x, 0), for_alias))
323 || rtx_varies_p (XEXP (x, 1), for_alias);
325 case ASM_OPERANDS:
326 if (MEM_VOLATILE_P (x))
327 return 1;
329 /* Fall through. */
331 default:
332 break;
335 fmt = GET_RTX_FORMAT (code);
336 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
337 if (fmt[i] == 'e')
339 if (rtx_varies_p (XEXP (x, i), for_alias))
340 return 1;
342 else if (fmt[i] == 'E')
344 int j;
345 for (j = 0; j < XVECLEN (x, i); j++)
346 if (rtx_varies_p (XVECEXP (x, i, j), for_alias))
347 return 1;
350 return 0;
353 /* Return nonzero if the use of X+OFFSET as an address in a MEM with SIZE
354 bytes can cause a trap. MODE is the mode of the MEM (not that of X) and
355 UNALIGNED_MEMS controls whether nonzero is returned for unaligned memory
356 references on strict alignment machines. */
358 static int
359 rtx_addr_can_trap_p_1 (const_rtx x, HOST_WIDE_INT offset, HOST_WIDE_INT size,
360 machine_mode mode, bool unaligned_mems)
362 enum rtx_code code = GET_CODE (x);
364 /* The offset must be a multiple of the mode size if we are considering
365 unaligned memory references on strict alignment machines. */
366 if (STRICT_ALIGNMENT && unaligned_mems && GET_MODE_SIZE (mode) != 0)
368 HOST_WIDE_INT actual_offset = offset;
370 #ifdef SPARC_STACK_BOUNDARY_HACK
371 /* ??? The SPARC port may claim a STACK_BOUNDARY higher than
372 the real alignment of %sp. However, when it does this, the
373 alignment of %sp+STACK_POINTER_OFFSET is STACK_BOUNDARY. */
374 if (SPARC_STACK_BOUNDARY_HACK
375 && (x == stack_pointer_rtx || x == hard_frame_pointer_rtx))
376 actual_offset -= STACK_POINTER_OFFSET;
377 #endif
379 if (actual_offset % GET_MODE_SIZE (mode) != 0)
380 return 1;
383 switch (code)
385 case SYMBOL_REF:
386 if (SYMBOL_REF_WEAK (x))
387 return 1;
388 if (!CONSTANT_POOL_ADDRESS_P (x))
390 tree decl;
391 HOST_WIDE_INT decl_size;
393 if (offset < 0)
394 return 1;
395 if (size == 0)
396 size = GET_MODE_SIZE (mode);
397 if (size == 0)
398 return offset != 0;
400 /* If the size of the access or of the symbol is unknown,
401 assume the worst. */
402 decl = SYMBOL_REF_DECL (x);
404 /* Else check that the access is in bounds. TODO: restructure
405 expr_size/tree_expr_size/int_expr_size and just use the latter. */
406 if (!decl)
407 decl_size = -1;
408 else if (DECL_P (decl) && DECL_SIZE_UNIT (decl))
409 decl_size = (tree_fits_shwi_p (DECL_SIZE_UNIT (decl))
410 ? tree_to_shwi (DECL_SIZE_UNIT (decl))
411 : -1);
412 else if (TREE_CODE (decl) == STRING_CST)
413 decl_size = TREE_STRING_LENGTH (decl);
414 else if (TYPE_SIZE_UNIT (TREE_TYPE (decl)))
415 decl_size = int_size_in_bytes (TREE_TYPE (decl));
416 else
417 decl_size = -1;
419 return (decl_size <= 0 ? offset != 0 : offset + size > decl_size);
422 return 0;
424 case LABEL_REF:
425 return 0;
427 case REG:
428 /* Stack references are assumed not to trap, but we need to deal with
429 nonsensical offsets. */
430 if (x == frame_pointer_rtx)
432 HOST_WIDE_INT adj_offset = offset - STARTING_FRAME_OFFSET;
433 if (size == 0)
434 size = GET_MODE_SIZE (mode);
435 if (FRAME_GROWS_DOWNWARD)
437 if (adj_offset < frame_offset || adj_offset + size - 1 >= 0)
438 return 1;
440 else
442 if (adj_offset < 0 || adj_offset + size - 1 >= frame_offset)
443 return 1;
445 return 0;
447 /* ??? Need to add a similar guard for nonsensical offsets. */
448 if (x == hard_frame_pointer_rtx
449 || x == stack_pointer_rtx
450 /* The arg pointer varies if it is not a fixed register. */
451 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
452 return 0;
453 /* All of the virtual frame registers are stack references. */
454 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
455 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
456 return 0;
457 return 1;
459 case CONST:
460 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
461 mode, unaligned_mems);
463 case PLUS:
464 /* An address is assumed not to trap if:
465 - it is the pic register plus a constant. */
466 if (XEXP (x, 0) == pic_offset_table_rtx && CONSTANT_P (XEXP (x, 1)))
467 return 0;
469 /* - or it is an address that can't trap plus a constant integer. */
470 if (CONST_INT_P (XEXP (x, 1))
471 && !rtx_addr_can_trap_p_1 (XEXP (x, 0), offset + INTVAL (XEXP (x, 1)),
472 size, mode, unaligned_mems))
473 return 0;
475 return 1;
477 case LO_SUM:
478 case PRE_MODIFY:
479 return rtx_addr_can_trap_p_1 (XEXP (x, 1), offset, size,
480 mode, unaligned_mems);
482 case PRE_DEC:
483 case PRE_INC:
484 case POST_DEC:
485 case POST_INC:
486 case POST_MODIFY:
487 return rtx_addr_can_trap_p_1 (XEXP (x, 0), offset, size,
488 mode, unaligned_mems);
490 default:
491 break;
494 /* If it isn't one of the case above, it can cause a trap. */
495 return 1;
498 /* Return nonzero if the use of X as an address in a MEM can cause a trap. */
501 rtx_addr_can_trap_p (const_rtx x)
503 return rtx_addr_can_trap_p_1 (x, 0, 0, VOIDmode, false);
506 /* Return true if X is an address that is known to not be zero. */
508 bool
509 nonzero_address_p (const_rtx x)
511 const enum rtx_code code = GET_CODE (x);
513 switch (code)
515 case SYMBOL_REF:
516 return !SYMBOL_REF_WEAK (x);
518 case LABEL_REF:
519 return true;
521 case REG:
522 /* As in rtx_varies_p, we have to use the actual rtx, not reg number. */
523 if (x == frame_pointer_rtx || x == hard_frame_pointer_rtx
524 || x == stack_pointer_rtx
525 || (x == arg_pointer_rtx && fixed_regs[ARG_POINTER_REGNUM]))
526 return true;
527 /* All of the virtual frame registers are stack references. */
528 if (REGNO (x) >= FIRST_VIRTUAL_REGISTER
529 && REGNO (x) <= LAST_VIRTUAL_REGISTER)
530 return true;
531 return false;
533 case CONST:
534 return nonzero_address_p (XEXP (x, 0));
536 case PLUS:
537 /* Handle PIC references. */
538 if (XEXP (x, 0) == pic_offset_table_rtx
539 && CONSTANT_P (XEXP (x, 1)))
540 return true;
541 return false;
543 case PRE_MODIFY:
544 /* Similar to the above; allow positive offsets. Further, since
545 auto-inc is only allowed in memories, the register must be a
546 pointer. */
547 if (CONST_INT_P (XEXP (x, 1))
548 && INTVAL (XEXP (x, 1)) > 0)
549 return true;
550 return nonzero_address_p (XEXP (x, 0));
552 case PRE_INC:
553 /* Similarly. Further, the offset is always positive. */
554 return true;
556 case PRE_DEC:
557 case POST_DEC:
558 case POST_INC:
559 case POST_MODIFY:
560 return nonzero_address_p (XEXP (x, 0));
562 case LO_SUM:
563 return nonzero_address_p (XEXP (x, 1));
565 default:
566 break;
569 /* If it isn't one of the case above, might be zero. */
570 return false;
573 /* Return 1 if X refers to a memory location whose address
574 cannot be compared reliably with constant addresses,
575 or if X refers to a BLKmode memory object.
576 FOR_ALIAS is nonzero if we are called from alias analysis; if it is
577 zero, we are slightly more conservative. */
579 bool
580 rtx_addr_varies_p (const_rtx x, bool for_alias)
582 enum rtx_code code;
583 int i;
584 const char *fmt;
586 if (x == 0)
587 return 0;
589 code = GET_CODE (x);
590 if (code == MEM)
591 return GET_MODE (x) == BLKmode || rtx_varies_p (XEXP (x, 0), for_alias);
593 fmt = GET_RTX_FORMAT (code);
594 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
595 if (fmt[i] == 'e')
597 if (rtx_addr_varies_p (XEXP (x, i), for_alias))
598 return 1;
600 else if (fmt[i] == 'E')
602 int j;
603 for (j = 0; j < XVECLEN (x, i); j++)
604 if (rtx_addr_varies_p (XVECEXP (x, i, j), for_alias))
605 return 1;
607 return 0;
610 /* Return the CALL in X if there is one. */
613 get_call_rtx_from (rtx x)
615 if (INSN_P (x))
616 x = PATTERN (x);
617 if (GET_CODE (x) == PARALLEL)
618 x = XVECEXP (x, 0, 0);
619 if (GET_CODE (x) == SET)
620 x = SET_SRC (x);
621 if (GET_CODE (x) == CALL && MEM_P (XEXP (x, 0)))
622 return x;
623 return NULL_RTX;
626 /* Return the value of the integer term in X, if one is apparent;
627 otherwise return 0.
628 Only obvious integer terms are detected.
629 This is used in cse.c with the `related_value' field. */
631 HOST_WIDE_INT
632 get_integer_term (const_rtx x)
634 if (GET_CODE (x) == CONST)
635 x = XEXP (x, 0);
637 if (GET_CODE (x) == MINUS
638 && CONST_INT_P (XEXP (x, 1)))
639 return - INTVAL (XEXP (x, 1));
640 if (GET_CODE (x) == PLUS
641 && CONST_INT_P (XEXP (x, 1)))
642 return INTVAL (XEXP (x, 1));
643 return 0;
646 /* If X is a constant, return the value sans apparent integer term;
647 otherwise return 0.
648 Only obvious integer terms are detected. */
651 get_related_value (const_rtx x)
653 if (GET_CODE (x) != CONST)
654 return 0;
655 x = XEXP (x, 0);
656 if (GET_CODE (x) == PLUS
657 && CONST_INT_P (XEXP (x, 1)))
658 return XEXP (x, 0);
659 else if (GET_CODE (x) == MINUS
660 && CONST_INT_P (XEXP (x, 1)))
661 return XEXP (x, 0);
662 return 0;
665 /* Return true if SYMBOL is a SYMBOL_REF and OFFSET + SYMBOL points
666 to somewhere in the same object or object_block as SYMBOL. */
668 bool
669 offset_within_block_p (const_rtx symbol, HOST_WIDE_INT offset)
671 tree decl;
673 if (GET_CODE (symbol) != SYMBOL_REF)
674 return false;
676 if (offset == 0)
677 return true;
679 if (offset > 0)
681 if (CONSTANT_POOL_ADDRESS_P (symbol)
682 && offset < (int) GET_MODE_SIZE (get_pool_mode (symbol)))
683 return true;
685 decl = SYMBOL_REF_DECL (symbol);
686 if (decl && offset < int_size_in_bytes (TREE_TYPE (decl)))
687 return true;
690 if (SYMBOL_REF_HAS_BLOCK_INFO_P (symbol)
691 && SYMBOL_REF_BLOCK (symbol)
692 && SYMBOL_REF_BLOCK_OFFSET (symbol) >= 0
693 && ((unsigned HOST_WIDE_INT) offset + SYMBOL_REF_BLOCK_OFFSET (symbol)
694 < (unsigned HOST_WIDE_INT) SYMBOL_REF_BLOCK (symbol)->size))
695 return true;
697 return false;
700 /* Split X into a base and a constant offset, storing them in *BASE_OUT
701 and *OFFSET_OUT respectively. */
703 void
704 split_const (rtx x, rtx *base_out, rtx *offset_out)
706 if (GET_CODE (x) == CONST)
708 x = XEXP (x, 0);
709 if (GET_CODE (x) == PLUS && CONST_INT_P (XEXP (x, 1)))
711 *base_out = XEXP (x, 0);
712 *offset_out = XEXP (x, 1);
713 return;
716 *base_out = x;
717 *offset_out = const0_rtx;
720 /* Return the number of places FIND appears within X. If COUNT_DEST is
721 zero, we do not count occurrences inside the destination of a SET. */
724 count_occurrences (const_rtx x, const_rtx find, int count_dest)
726 int i, j;
727 enum rtx_code code;
728 const char *format_ptr;
729 int count;
731 if (x == find)
732 return 1;
734 code = GET_CODE (x);
736 switch (code)
738 case REG:
739 CASE_CONST_ANY:
740 case SYMBOL_REF:
741 case CODE_LABEL:
742 case PC:
743 case CC0:
744 return 0;
746 case EXPR_LIST:
747 count = count_occurrences (XEXP (x, 0), find, count_dest);
748 if (XEXP (x, 1))
749 count += count_occurrences (XEXP (x, 1), find, count_dest);
750 return count;
752 case MEM:
753 if (MEM_P (find) && rtx_equal_p (x, find))
754 return 1;
755 break;
757 case SET:
758 if (SET_DEST (x) == find && ! count_dest)
759 return count_occurrences (SET_SRC (x), find, count_dest);
760 break;
762 default:
763 break;
766 format_ptr = GET_RTX_FORMAT (code);
767 count = 0;
769 for (i = 0; i < GET_RTX_LENGTH (code); i++)
771 switch (*format_ptr++)
773 case 'e':
774 count += count_occurrences (XEXP (x, i), find, count_dest);
775 break;
777 case 'E':
778 for (j = 0; j < XVECLEN (x, i); j++)
779 count += count_occurrences (XVECEXP (x, i, j), find, count_dest);
780 break;
783 return count;
787 /* Return TRUE if OP is a register or subreg of a register that
788 holds an unsigned quantity. Otherwise, return FALSE. */
790 bool
791 unsigned_reg_p (rtx op)
793 if (REG_P (op)
794 && REG_EXPR (op)
795 && TYPE_UNSIGNED (TREE_TYPE (REG_EXPR (op))))
796 return true;
798 if (GET_CODE (op) == SUBREG
799 && SUBREG_PROMOTED_SIGN (op))
800 return true;
802 return false;
806 /* Nonzero if register REG appears somewhere within IN.
807 Also works if REG is not a register; in this case it checks
808 for a subexpression of IN that is Lisp "equal" to REG. */
811 reg_mentioned_p (const_rtx reg, const_rtx in)
813 const char *fmt;
814 int i;
815 enum rtx_code code;
817 if (in == 0)
818 return 0;
820 if (reg == in)
821 return 1;
823 if (GET_CODE (in) == LABEL_REF)
824 return reg == LABEL_REF_LABEL (in);
826 code = GET_CODE (in);
828 switch (code)
830 /* Compare registers by number. */
831 case REG:
832 return REG_P (reg) && REGNO (in) == REGNO (reg);
834 /* These codes have no constituent expressions
835 and are unique. */
836 case SCRATCH:
837 case CC0:
838 case PC:
839 return 0;
841 CASE_CONST_ANY:
842 /* These are kept unique for a given value. */
843 return 0;
845 default:
846 break;
849 if (GET_CODE (reg) == code && rtx_equal_p (reg, in))
850 return 1;
852 fmt = GET_RTX_FORMAT (code);
854 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
856 if (fmt[i] == 'E')
858 int j;
859 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
860 if (reg_mentioned_p (reg, XVECEXP (in, i, j)))
861 return 1;
863 else if (fmt[i] == 'e'
864 && reg_mentioned_p (reg, XEXP (in, i)))
865 return 1;
867 return 0;
870 /* Return 1 if in between BEG and END, exclusive of BEG and END, there is
871 no CODE_LABEL insn. */
874 no_labels_between_p (const rtx_insn *beg, const rtx_insn *end)
876 rtx_insn *p;
877 if (beg == end)
878 return 0;
879 for (p = NEXT_INSN (beg); p != end; p = NEXT_INSN (p))
880 if (LABEL_P (p))
881 return 0;
882 return 1;
885 /* Nonzero if register REG is used in an insn between
886 FROM_INSN and TO_INSN (exclusive of those two). */
889 reg_used_between_p (const_rtx reg, const rtx_insn *from_insn,
890 const rtx_insn *to_insn)
892 rtx_insn *insn;
894 if (from_insn == to_insn)
895 return 0;
897 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
898 if (NONDEBUG_INSN_P (insn)
899 && (reg_overlap_mentioned_p (reg, PATTERN (insn))
900 || (CALL_P (insn) && find_reg_fusage (insn, USE, reg))))
901 return 1;
902 return 0;
905 /* Nonzero if the old value of X, a register, is referenced in BODY. If X
906 is entirely replaced by a new value and the only use is as a SET_DEST,
907 we do not consider it a reference. */
910 reg_referenced_p (const_rtx x, const_rtx body)
912 int i;
914 switch (GET_CODE (body))
916 case SET:
917 if (reg_overlap_mentioned_p (x, SET_SRC (body)))
918 return 1;
920 /* If the destination is anything other than CC0, PC, a REG or a SUBREG
921 of a REG that occupies all of the REG, the insn references X if
922 it is mentioned in the destination. */
923 if (GET_CODE (SET_DEST (body)) != CC0
924 && GET_CODE (SET_DEST (body)) != PC
925 && !REG_P (SET_DEST (body))
926 && ! (GET_CODE (SET_DEST (body)) == SUBREG
927 && REG_P (SUBREG_REG (SET_DEST (body)))
928 && (((GET_MODE_SIZE (GET_MODE (SUBREG_REG (SET_DEST (body))))
929 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)
930 == ((GET_MODE_SIZE (GET_MODE (SET_DEST (body)))
931 + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD)))
932 && reg_overlap_mentioned_p (x, SET_DEST (body)))
933 return 1;
934 return 0;
936 case ASM_OPERANDS:
937 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
938 if (reg_overlap_mentioned_p (x, ASM_OPERANDS_INPUT (body, i)))
939 return 1;
940 return 0;
942 case CALL:
943 case USE:
944 case IF_THEN_ELSE:
945 return reg_overlap_mentioned_p (x, body);
947 case TRAP_IF:
948 return reg_overlap_mentioned_p (x, TRAP_CONDITION (body));
950 case PREFETCH:
951 return reg_overlap_mentioned_p (x, XEXP (body, 0));
953 case UNSPEC:
954 case UNSPEC_VOLATILE:
955 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
956 if (reg_overlap_mentioned_p (x, XVECEXP (body, 0, i)))
957 return 1;
958 return 0;
960 case PARALLEL:
961 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
962 if (reg_referenced_p (x, XVECEXP (body, 0, i)))
963 return 1;
964 return 0;
966 case CLOBBER:
967 if (MEM_P (XEXP (body, 0)))
968 if (reg_overlap_mentioned_p (x, XEXP (XEXP (body, 0), 0)))
969 return 1;
970 return 0;
972 case COND_EXEC:
973 if (reg_overlap_mentioned_p (x, COND_EXEC_TEST (body)))
974 return 1;
975 return reg_referenced_p (x, COND_EXEC_CODE (body));
977 default:
978 return 0;
982 /* Nonzero if register REG is set or clobbered in an insn between
983 FROM_INSN and TO_INSN (exclusive of those two). */
986 reg_set_between_p (const_rtx reg, const rtx_insn *from_insn,
987 const rtx_insn *to_insn)
989 const rtx_insn *insn;
991 if (from_insn == to_insn)
992 return 0;
994 for (insn = NEXT_INSN (from_insn); insn != to_insn; insn = NEXT_INSN (insn))
995 if (INSN_P (insn) && reg_set_p (reg, insn))
996 return 1;
997 return 0;
1000 /* Internals of reg_set_between_p. */
1002 reg_set_p (const_rtx reg, const_rtx insn)
1004 /* We can be passed an insn or part of one. If we are passed an insn,
1005 check if a side-effect of the insn clobbers REG. */
1006 if (INSN_P (insn)
1007 && (FIND_REG_INC_NOTE (insn, reg)
1008 || (CALL_P (insn)
1009 && ((REG_P (reg)
1010 && REGNO (reg) < FIRST_PSEUDO_REGISTER
1011 && overlaps_hard_reg_set_p (regs_invalidated_by_call,
1012 GET_MODE (reg), REGNO (reg)))
1013 || MEM_P (reg)
1014 || find_reg_fusage (insn, CLOBBER, reg)))))
1015 return 1;
1017 return set_of (reg, insn) != NULL_RTX;
1020 /* Similar to reg_set_between_p, but check all registers in X. Return 0
1021 only if none of them are modified between START and END. Return 1 if
1022 X contains a MEM; this routine does use memory aliasing. */
1025 modified_between_p (const_rtx x, const rtx_insn *start, const rtx_insn *end)
1027 const enum rtx_code code = GET_CODE (x);
1028 const char *fmt;
1029 int i, j;
1030 rtx_insn *insn;
1032 if (start == end)
1033 return 0;
1035 switch (code)
1037 CASE_CONST_ANY:
1038 case CONST:
1039 case SYMBOL_REF:
1040 case LABEL_REF:
1041 return 0;
1043 case PC:
1044 case CC0:
1045 return 1;
1047 case MEM:
1048 if (modified_between_p (XEXP (x, 0), start, end))
1049 return 1;
1050 if (MEM_READONLY_P (x))
1051 return 0;
1052 for (insn = NEXT_INSN (start); insn != end; insn = NEXT_INSN (insn))
1053 if (memory_modified_in_insn_p (x, insn))
1054 return 1;
1055 return 0;
1056 break;
1058 case REG:
1059 return reg_set_between_p (x, start, end);
1061 default:
1062 break;
1065 fmt = GET_RTX_FORMAT (code);
1066 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1068 if (fmt[i] == 'e' && modified_between_p (XEXP (x, i), start, end))
1069 return 1;
1071 else if (fmt[i] == 'E')
1072 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1073 if (modified_between_p (XVECEXP (x, i, j), start, end))
1074 return 1;
1077 return 0;
1080 /* Similar to reg_set_p, but check all registers in X. Return 0 only if none
1081 of them are modified in INSN. Return 1 if X contains a MEM; this routine
1082 does use memory aliasing. */
1085 modified_in_p (const_rtx x, const_rtx insn)
1087 const enum rtx_code code = GET_CODE (x);
1088 const char *fmt;
1089 int i, j;
1091 switch (code)
1093 CASE_CONST_ANY:
1094 case CONST:
1095 case SYMBOL_REF:
1096 case LABEL_REF:
1097 return 0;
1099 case PC:
1100 case CC0:
1101 return 1;
1103 case MEM:
1104 if (modified_in_p (XEXP (x, 0), insn))
1105 return 1;
1106 if (MEM_READONLY_P (x))
1107 return 0;
1108 if (memory_modified_in_insn_p (x, insn))
1109 return 1;
1110 return 0;
1111 break;
1113 case REG:
1114 return reg_set_p (x, insn);
1116 default:
1117 break;
1120 fmt = GET_RTX_FORMAT (code);
1121 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1123 if (fmt[i] == 'e' && modified_in_p (XEXP (x, i), insn))
1124 return 1;
1126 else if (fmt[i] == 'E')
1127 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1128 if (modified_in_p (XVECEXP (x, i, j), insn))
1129 return 1;
1132 return 0;
1135 /* Helper function for set_of. */
1136 struct set_of_data
1138 const_rtx found;
1139 const_rtx pat;
1142 static void
1143 set_of_1 (rtx x, const_rtx pat, void *data1)
1145 struct set_of_data *const data = (struct set_of_data *) (data1);
1146 if (rtx_equal_p (x, data->pat)
1147 || (!MEM_P (x) && reg_overlap_mentioned_p (data->pat, x)))
1148 data->found = pat;
1151 /* Give an INSN, return a SET or CLOBBER expression that does modify PAT
1152 (either directly or via STRICT_LOW_PART and similar modifiers). */
1153 const_rtx
1154 set_of (const_rtx pat, const_rtx insn)
1156 struct set_of_data data;
1157 data.found = NULL_RTX;
1158 data.pat = pat;
1159 note_stores (INSN_P (insn) ? PATTERN (insn) : insn, set_of_1, &data);
1160 return data.found;
1163 /* Add all hard register in X to *PSET. */
1164 void
1165 find_all_hard_regs (const_rtx x, HARD_REG_SET *pset)
1167 subrtx_iterator::array_type array;
1168 FOR_EACH_SUBRTX (iter, array, x, NONCONST)
1170 const_rtx x = *iter;
1171 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER)
1172 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1176 /* This function, called through note_stores, collects sets and
1177 clobbers of hard registers in a HARD_REG_SET, which is pointed to
1178 by DATA. */
1179 void
1180 record_hard_reg_sets (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
1182 HARD_REG_SET *pset = (HARD_REG_SET *)data;
1183 if (REG_P (x) && HARD_REGISTER_P (x))
1184 add_to_hard_reg_set (pset, GET_MODE (x), REGNO (x));
1187 /* Examine INSN, and compute the set of hard registers written by it.
1188 Store it in *PSET. Should only be called after reload. */
1189 void
1190 find_all_hard_reg_sets (const_rtx insn, HARD_REG_SET *pset, bool implicit)
1192 rtx link;
1194 CLEAR_HARD_REG_SET (*pset);
1195 note_stores (PATTERN (insn), record_hard_reg_sets, pset);
1196 if (CALL_P (insn))
1198 if (implicit)
1199 IOR_HARD_REG_SET (*pset, call_used_reg_set);
1201 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
1202 record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1204 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1205 if (REG_NOTE_KIND (link) == REG_INC)
1206 record_hard_reg_sets (XEXP (link, 0), NULL, pset);
1209 /* Like record_hard_reg_sets, but called through note_uses. */
1210 void
1211 record_hard_reg_uses (rtx *px, void *data)
1213 find_all_hard_regs (*px, (HARD_REG_SET *) data);
1216 /* Given an INSN, return a SET expression if this insn has only a single SET.
1217 It may also have CLOBBERs, USEs, or SET whose output
1218 will not be used, which we ignore. */
1221 single_set_2 (const rtx_insn *insn, const_rtx pat)
1223 rtx set = NULL;
1224 int set_verified = 1;
1225 int i;
1227 if (GET_CODE (pat) == PARALLEL)
1229 for (i = 0; i < XVECLEN (pat, 0); i++)
1231 rtx sub = XVECEXP (pat, 0, i);
1232 switch (GET_CODE (sub))
1234 case USE:
1235 case CLOBBER:
1236 break;
1238 case SET:
1239 /* We can consider insns having multiple sets, where all
1240 but one are dead as single set insns. In common case
1241 only single set is present in the pattern so we want
1242 to avoid checking for REG_UNUSED notes unless necessary.
1244 When we reach set first time, we just expect this is
1245 the single set we are looking for and only when more
1246 sets are found in the insn, we check them. */
1247 if (!set_verified)
1249 if (find_reg_note (insn, REG_UNUSED, SET_DEST (set))
1250 && !side_effects_p (set))
1251 set = NULL;
1252 else
1253 set_verified = 1;
1255 if (!set)
1256 set = sub, set_verified = 0;
1257 else if (!find_reg_note (insn, REG_UNUSED, SET_DEST (sub))
1258 || side_effects_p (sub))
1259 return NULL_RTX;
1260 break;
1262 default:
1263 return NULL_RTX;
1267 return set;
1270 /* Given an INSN, return nonzero if it has more than one SET, else return
1271 zero. */
1274 multiple_sets (const_rtx insn)
1276 int found;
1277 int i;
1279 /* INSN must be an insn. */
1280 if (! INSN_P (insn))
1281 return 0;
1283 /* Only a PARALLEL can have multiple SETs. */
1284 if (GET_CODE (PATTERN (insn)) == PARALLEL)
1286 for (i = 0, found = 0; i < XVECLEN (PATTERN (insn), 0); i++)
1287 if (GET_CODE (XVECEXP (PATTERN (insn), 0, i)) == SET)
1289 /* If we have already found a SET, then return now. */
1290 if (found)
1291 return 1;
1292 else
1293 found = 1;
1297 /* Either zero or one SET. */
1298 return 0;
1301 /* Return nonzero if the destination of SET equals the source
1302 and there are no side effects. */
1305 set_noop_p (const_rtx set)
1307 rtx src = SET_SRC (set);
1308 rtx dst = SET_DEST (set);
1310 if (dst == pc_rtx && src == pc_rtx)
1311 return 1;
1313 if (MEM_P (dst) && MEM_P (src))
1314 return rtx_equal_p (dst, src) && !side_effects_p (dst);
1316 if (GET_CODE (dst) == ZERO_EXTRACT)
1317 return rtx_equal_p (XEXP (dst, 0), src)
1318 && ! BYTES_BIG_ENDIAN && XEXP (dst, 2) == const0_rtx
1319 && !side_effects_p (src);
1321 if (GET_CODE (dst) == STRICT_LOW_PART)
1322 dst = XEXP (dst, 0);
1324 if (GET_CODE (src) == SUBREG && GET_CODE (dst) == SUBREG)
1326 if (SUBREG_BYTE (src) != SUBREG_BYTE (dst))
1327 return 0;
1328 src = SUBREG_REG (src);
1329 dst = SUBREG_REG (dst);
1332 /* It is a NOOP if destination overlaps with selected src vector
1333 elements. */
1334 if (GET_CODE (src) == VEC_SELECT
1335 && REG_P (XEXP (src, 0)) && REG_P (dst)
1336 && HARD_REGISTER_P (XEXP (src, 0))
1337 && HARD_REGISTER_P (dst))
1339 int i;
1340 rtx par = XEXP (src, 1);
1341 rtx src0 = XEXP (src, 0);
1342 int c0 = INTVAL (XVECEXP (par, 0, 0));
1343 HOST_WIDE_INT offset = GET_MODE_UNIT_SIZE (GET_MODE (src0)) * c0;
1345 for (i = 1; i < XVECLEN (par, 0); i++)
1346 if (INTVAL (XVECEXP (par, 0, i)) != c0 + i)
1347 return 0;
1348 return
1349 simplify_subreg_regno (REGNO (src0), GET_MODE (src0),
1350 offset, GET_MODE (dst)) == (int) REGNO (dst);
1353 return (REG_P (src) && REG_P (dst)
1354 && REGNO (src) == REGNO (dst));
1357 /* Return nonzero if an insn consists only of SETs, each of which only sets a
1358 value to itself. */
1361 noop_move_p (const_rtx insn)
1363 rtx pat = PATTERN (insn);
1365 if (INSN_CODE (insn) == NOOP_MOVE_INSN_CODE)
1366 return 1;
1368 /* Insns carrying these notes are useful later on. */
1369 if (find_reg_note (insn, REG_EQUAL, NULL_RTX))
1370 return 0;
1372 /* Check the code to be executed for COND_EXEC. */
1373 if (GET_CODE (pat) == COND_EXEC)
1374 pat = COND_EXEC_CODE (pat);
1376 if (GET_CODE (pat) == SET && set_noop_p (pat))
1377 return 1;
1379 if (GET_CODE (pat) == PARALLEL)
1381 int i;
1382 /* If nothing but SETs of registers to themselves,
1383 this insn can also be deleted. */
1384 for (i = 0; i < XVECLEN (pat, 0); i++)
1386 rtx tem = XVECEXP (pat, 0, i);
1388 if (GET_CODE (tem) == USE
1389 || GET_CODE (tem) == CLOBBER)
1390 continue;
1392 if (GET_CODE (tem) != SET || ! set_noop_p (tem))
1393 return 0;
1396 return 1;
1398 return 0;
1402 /* Return nonzero if register in range [REGNO, ENDREGNO)
1403 appears either explicitly or implicitly in X
1404 other than being stored into.
1406 References contained within the substructure at LOC do not count.
1407 LOC may be zero, meaning don't ignore anything. */
1410 refers_to_regno_p (unsigned int regno, unsigned int endregno, const_rtx x,
1411 rtx *loc)
1413 int i;
1414 unsigned int x_regno;
1415 RTX_CODE code;
1416 const char *fmt;
1418 repeat:
1419 /* The contents of a REG_NONNEG note is always zero, so we must come here
1420 upon repeat in case the last REG_NOTE is a REG_NONNEG note. */
1421 if (x == 0)
1422 return 0;
1424 code = GET_CODE (x);
1426 switch (code)
1428 case REG:
1429 x_regno = REGNO (x);
1431 /* If we modifying the stack, frame, or argument pointer, it will
1432 clobber a virtual register. In fact, we could be more precise,
1433 but it isn't worth it. */
1434 if ((x_regno == STACK_POINTER_REGNUM
1435 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
1436 || x_regno == ARG_POINTER_REGNUM
1437 #endif
1438 || x_regno == FRAME_POINTER_REGNUM)
1439 && regno >= FIRST_VIRTUAL_REGISTER && regno <= LAST_VIRTUAL_REGISTER)
1440 return 1;
1442 return endregno > x_regno && regno < END_REGNO (x);
1444 case SUBREG:
1445 /* If this is a SUBREG of a hard reg, we can see exactly which
1446 registers are being modified. Otherwise, handle normally. */
1447 if (REG_P (SUBREG_REG (x))
1448 && REGNO (SUBREG_REG (x)) < FIRST_PSEUDO_REGISTER)
1450 unsigned int inner_regno = subreg_regno (x);
1451 unsigned int inner_endregno
1452 = inner_regno + (inner_regno < FIRST_PSEUDO_REGISTER
1453 ? subreg_nregs (x) : 1);
1455 return endregno > inner_regno && regno < inner_endregno;
1457 break;
1459 case CLOBBER:
1460 case SET:
1461 if (&SET_DEST (x) != loc
1462 /* Note setting a SUBREG counts as referring to the REG it is in for
1463 a pseudo but not for hard registers since we can
1464 treat each word individually. */
1465 && ((GET_CODE (SET_DEST (x)) == SUBREG
1466 && loc != &SUBREG_REG (SET_DEST (x))
1467 && REG_P (SUBREG_REG (SET_DEST (x)))
1468 && REGNO (SUBREG_REG (SET_DEST (x))) >= FIRST_PSEUDO_REGISTER
1469 && refers_to_regno_p (regno, endregno,
1470 SUBREG_REG (SET_DEST (x)), loc))
1471 || (!REG_P (SET_DEST (x))
1472 && refers_to_regno_p (regno, endregno, SET_DEST (x), loc))))
1473 return 1;
1475 if (code == CLOBBER || loc == &SET_SRC (x))
1476 return 0;
1477 x = SET_SRC (x);
1478 goto repeat;
1480 default:
1481 break;
1484 /* X does not match, so try its subexpressions. */
1486 fmt = GET_RTX_FORMAT (code);
1487 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
1489 if (fmt[i] == 'e' && loc != &XEXP (x, i))
1491 if (i == 0)
1493 x = XEXP (x, 0);
1494 goto repeat;
1496 else
1497 if (refers_to_regno_p (regno, endregno, XEXP (x, i), loc))
1498 return 1;
1500 else if (fmt[i] == 'E')
1502 int j;
1503 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
1504 if (loc != &XVECEXP (x, i, j)
1505 && refers_to_regno_p (regno, endregno, XVECEXP (x, i, j), loc))
1506 return 1;
1509 return 0;
1512 /* Nonzero if modifying X will affect IN. If X is a register or a SUBREG,
1513 we check if any register number in X conflicts with the relevant register
1514 numbers. If X is a constant, return 0. If X is a MEM, return 1 iff IN
1515 contains a MEM (we don't bother checking for memory addresses that can't
1516 conflict because we expect this to be a rare case. */
1519 reg_overlap_mentioned_p (const_rtx x, const_rtx in)
1521 unsigned int regno, endregno;
1523 /* If either argument is a constant, then modifying X can not
1524 affect IN. Here we look at IN, we can profitably combine
1525 CONSTANT_P (x) with the switch statement below. */
1526 if (CONSTANT_P (in))
1527 return 0;
1529 recurse:
1530 switch (GET_CODE (x))
1532 case STRICT_LOW_PART:
1533 case ZERO_EXTRACT:
1534 case SIGN_EXTRACT:
1535 /* Overly conservative. */
1536 x = XEXP (x, 0);
1537 goto recurse;
1539 case SUBREG:
1540 regno = REGNO (SUBREG_REG (x));
1541 if (regno < FIRST_PSEUDO_REGISTER)
1542 regno = subreg_regno (x);
1543 endregno = regno + (regno < FIRST_PSEUDO_REGISTER
1544 ? subreg_nregs (x) : 1);
1545 goto do_reg;
1547 case REG:
1548 regno = REGNO (x);
1549 endregno = END_REGNO (x);
1550 do_reg:
1551 return refers_to_regno_p (regno, endregno, in, (rtx*) 0);
1553 case MEM:
1555 const char *fmt;
1556 int i;
1558 if (MEM_P (in))
1559 return 1;
1561 fmt = GET_RTX_FORMAT (GET_CODE (in));
1562 for (i = GET_RTX_LENGTH (GET_CODE (in)) - 1; i >= 0; i--)
1563 if (fmt[i] == 'e')
1565 if (reg_overlap_mentioned_p (x, XEXP (in, i)))
1566 return 1;
1568 else if (fmt[i] == 'E')
1570 int j;
1571 for (j = XVECLEN (in, i) - 1; j >= 0; --j)
1572 if (reg_overlap_mentioned_p (x, XVECEXP (in, i, j)))
1573 return 1;
1576 return 0;
1579 case SCRATCH:
1580 case PC:
1581 case CC0:
1582 return reg_mentioned_p (x, in);
1584 case PARALLEL:
1586 int i;
1588 /* If any register in here refers to it we return true. */
1589 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1590 if (XEXP (XVECEXP (x, 0, i), 0) != 0
1591 && reg_overlap_mentioned_p (XEXP (XVECEXP (x, 0, i), 0), in))
1592 return 1;
1593 return 0;
1596 default:
1597 gcc_assert (CONSTANT_P (x));
1598 return 0;
1602 /* Call FUN on each register or MEM that is stored into or clobbered by X.
1603 (X would be the pattern of an insn). DATA is an arbitrary pointer,
1604 ignored by note_stores, but passed to FUN.
1606 FUN receives three arguments:
1607 1. the REG, MEM, CC0 or PC being stored in or clobbered,
1608 2. the SET or CLOBBER rtx that does the store,
1609 3. the pointer DATA provided to note_stores.
1611 If the item being stored in or clobbered is a SUBREG of a hard register,
1612 the SUBREG will be passed. */
1614 void
1615 note_stores (const_rtx x, void (*fun) (rtx, const_rtx, void *), void *data)
1617 int i;
1619 if (GET_CODE (x) == COND_EXEC)
1620 x = COND_EXEC_CODE (x);
1622 if (GET_CODE (x) == SET || GET_CODE (x) == CLOBBER)
1624 rtx dest = SET_DEST (x);
1626 while ((GET_CODE (dest) == SUBREG
1627 && (!REG_P (SUBREG_REG (dest))
1628 || REGNO (SUBREG_REG (dest)) >= FIRST_PSEUDO_REGISTER))
1629 || GET_CODE (dest) == ZERO_EXTRACT
1630 || GET_CODE (dest) == STRICT_LOW_PART)
1631 dest = XEXP (dest, 0);
1633 /* If we have a PARALLEL, SET_DEST is a list of EXPR_LIST expressions,
1634 each of whose first operand is a register. */
1635 if (GET_CODE (dest) == PARALLEL)
1637 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1638 if (XEXP (XVECEXP (dest, 0, i), 0) != 0)
1639 (*fun) (XEXP (XVECEXP (dest, 0, i), 0), x, data);
1641 else
1642 (*fun) (dest, x, data);
1645 else if (GET_CODE (x) == PARALLEL)
1646 for (i = XVECLEN (x, 0) - 1; i >= 0; i--)
1647 note_stores (XVECEXP (x, 0, i), fun, data);
1650 /* Like notes_stores, but call FUN for each expression that is being
1651 referenced in PBODY, a pointer to the PATTERN of an insn. We only call
1652 FUN for each expression, not any interior subexpressions. FUN receives a
1653 pointer to the expression and the DATA passed to this function.
1655 Note that this is not quite the same test as that done in reg_referenced_p
1656 since that considers something as being referenced if it is being
1657 partially set, while we do not. */
1659 void
1660 note_uses (rtx *pbody, void (*fun) (rtx *, void *), void *data)
1662 rtx body = *pbody;
1663 int i;
1665 switch (GET_CODE (body))
1667 case COND_EXEC:
1668 (*fun) (&COND_EXEC_TEST (body), data);
1669 note_uses (&COND_EXEC_CODE (body), fun, data);
1670 return;
1672 case PARALLEL:
1673 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1674 note_uses (&XVECEXP (body, 0, i), fun, data);
1675 return;
1677 case SEQUENCE:
1678 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1679 note_uses (&PATTERN (XVECEXP (body, 0, i)), fun, data);
1680 return;
1682 case USE:
1683 (*fun) (&XEXP (body, 0), data);
1684 return;
1686 case ASM_OPERANDS:
1687 for (i = ASM_OPERANDS_INPUT_LENGTH (body) - 1; i >= 0; i--)
1688 (*fun) (&ASM_OPERANDS_INPUT (body, i), data);
1689 return;
1691 case TRAP_IF:
1692 (*fun) (&TRAP_CONDITION (body), data);
1693 return;
1695 case PREFETCH:
1696 (*fun) (&XEXP (body, 0), data);
1697 return;
1699 case UNSPEC:
1700 case UNSPEC_VOLATILE:
1701 for (i = XVECLEN (body, 0) - 1; i >= 0; i--)
1702 (*fun) (&XVECEXP (body, 0, i), data);
1703 return;
1705 case CLOBBER:
1706 if (MEM_P (XEXP (body, 0)))
1707 (*fun) (&XEXP (XEXP (body, 0), 0), data);
1708 return;
1710 case SET:
1712 rtx dest = SET_DEST (body);
1714 /* For sets we replace everything in source plus registers in memory
1715 expression in store and operands of a ZERO_EXTRACT. */
1716 (*fun) (&SET_SRC (body), data);
1718 if (GET_CODE (dest) == ZERO_EXTRACT)
1720 (*fun) (&XEXP (dest, 1), data);
1721 (*fun) (&XEXP (dest, 2), data);
1724 while (GET_CODE (dest) == SUBREG || GET_CODE (dest) == STRICT_LOW_PART)
1725 dest = XEXP (dest, 0);
1727 if (MEM_P (dest))
1728 (*fun) (&XEXP (dest, 0), data);
1730 return;
1732 default:
1733 /* All the other possibilities never store. */
1734 (*fun) (pbody, data);
1735 return;
1739 /* Return nonzero if X's old contents don't survive after INSN.
1740 This will be true if X is (cc0) or if X is a register and
1741 X dies in INSN or because INSN entirely sets X.
1743 "Entirely set" means set directly and not through a SUBREG, or
1744 ZERO_EXTRACT, so no trace of the old contents remains.
1745 Likewise, REG_INC does not count.
1747 REG may be a hard or pseudo reg. Renumbering is not taken into account,
1748 but for this use that makes no difference, since regs don't overlap
1749 during their lifetimes. Therefore, this function may be used
1750 at any time after deaths have been computed.
1752 If REG is a hard reg that occupies multiple machine registers, this
1753 function will only return 1 if each of those registers will be replaced
1754 by INSN. */
1757 dead_or_set_p (const_rtx insn, const_rtx x)
1759 unsigned int regno, end_regno;
1760 unsigned int i;
1762 /* Can't use cc0_rtx below since this file is used by genattrtab.c. */
1763 if (GET_CODE (x) == CC0)
1764 return 1;
1766 gcc_assert (REG_P (x));
1768 regno = REGNO (x);
1769 end_regno = END_REGNO (x);
1770 for (i = regno; i < end_regno; i++)
1771 if (! dead_or_set_regno_p (insn, i))
1772 return 0;
1774 return 1;
1777 /* Return TRUE iff DEST is a register or subreg of a register and
1778 doesn't change the number of words of the inner register, and any
1779 part of the register is TEST_REGNO. */
1781 static bool
1782 covers_regno_no_parallel_p (const_rtx dest, unsigned int test_regno)
1784 unsigned int regno, endregno;
1786 if (GET_CODE (dest) == SUBREG
1787 && (((GET_MODE_SIZE (GET_MODE (dest))
1788 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)
1789 == ((GET_MODE_SIZE (GET_MODE (SUBREG_REG (dest)))
1790 + UNITS_PER_WORD - 1) / UNITS_PER_WORD)))
1791 dest = SUBREG_REG (dest);
1793 if (!REG_P (dest))
1794 return false;
1796 regno = REGNO (dest);
1797 endregno = END_REGNO (dest);
1798 return (test_regno >= regno && test_regno < endregno);
1801 /* Like covers_regno_no_parallel_p, but also handles PARALLELs where
1802 any member matches the covers_regno_no_parallel_p criteria. */
1804 static bool
1805 covers_regno_p (const_rtx dest, unsigned int test_regno)
1807 if (GET_CODE (dest) == PARALLEL)
1809 /* Some targets place small structures in registers for return
1810 values of functions, and those registers are wrapped in
1811 PARALLELs that we may see as the destination of a SET. */
1812 int i;
1814 for (i = XVECLEN (dest, 0) - 1; i >= 0; i--)
1816 rtx inner = XEXP (XVECEXP (dest, 0, i), 0);
1817 if (inner != NULL_RTX
1818 && covers_regno_no_parallel_p (inner, test_regno))
1819 return true;
1822 return false;
1824 else
1825 return covers_regno_no_parallel_p (dest, test_regno);
1828 /* Utility function for dead_or_set_p to check an individual register. */
1831 dead_or_set_regno_p (const_rtx insn, unsigned int test_regno)
1833 const_rtx pattern;
1835 /* See if there is a death note for something that includes TEST_REGNO. */
1836 if (find_regno_note (insn, REG_DEAD, test_regno))
1837 return 1;
1839 if (CALL_P (insn)
1840 && find_regno_fusage (insn, CLOBBER, test_regno))
1841 return 1;
1843 pattern = PATTERN (insn);
1845 /* If a COND_EXEC is not executed, the value survives. */
1846 if (GET_CODE (pattern) == COND_EXEC)
1847 return 0;
1849 if (GET_CODE (pattern) == SET)
1850 return covers_regno_p (SET_DEST (pattern), test_regno);
1851 else if (GET_CODE (pattern) == PARALLEL)
1853 int i;
1855 for (i = XVECLEN (pattern, 0) - 1; i >= 0; i--)
1857 rtx body = XVECEXP (pattern, 0, i);
1859 if (GET_CODE (body) == COND_EXEC)
1860 body = COND_EXEC_CODE (body);
1862 if ((GET_CODE (body) == SET || GET_CODE (body) == CLOBBER)
1863 && covers_regno_p (SET_DEST (body), test_regno))
1864 return 1;
1868 return 0;
1871 /* Return the reg-note of kind KIND in insn INSN, if there is one.
1872 If DATUM is nonzero, look for one whose datum is DATUM. */
1875 find_reg_note (const_rtx insn, enum reg_note kind, const_rtx datum)
1877 rtx link;
1879 gcc_checking_assert (insn);
1881 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1882 if (! INSN_P (insn))
1883 return 0;
1884 if (datum == 0)
1886 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1887 if (REG_NOTE_KIND (link) == kind)
1888 return link;
1889 return 0;
1892 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1893 if (REG_NOTE_KIND (link) == kind && datum == XEXP (link, 0))
1894 return link;
1895 return 0;
1898 /* Return the reg-note of kind KIND in insn INSN which applies to register
1899 number REGNO, if any. Return 0 if there is no such reg-note. Note that
1900 the REGNO of this NOTE need not be REGNO if REGNO is a hard register;
1901 it might be the case that the note overlaps REGNO. */
1904 find_regno_note (const_rtx insn, enum reg_note kind, unsigned int regno)
1906 rtx link;
1908 /* Ignore anything that is not an INSN, JUMP_INSN or CALL_INSN. */
1909 if (! INSN_P (insn))
1910 return 0;
1912 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1913 if (REG_NOTE_KIND (link) == kind
1914 /* Verify that it is a register, so that scratch and MEM won't cause a
1915 problem here. */
1916 && REG_P (XEXP (link, 0))
1917 && REGNO (XEXP (link, 0)) <= regno
1918 && END_REGNO (XEXP (link, 0)) > regno)
1919 return link;
1920 return 0;
1923 /* Return a REG_EQUIV or REG_EQUAL note if insn has only a single set and
1924 has such a note. */
1927 find_reg_equal_equiv_note (const_rtx insn)
1929 rtx link;
1931 if (!INSN_P (insn))
1932 return 0;
1934 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
1935 if (REG_NOTE_KIND (link) == REG_EQUAL
1936 || REG_NOTE_KIND (link) == REG_EQUIV)
1938 /* FIXME: We should never have REG_EQUAL/REG_EQUIV notes on
1939 insns that have multiple sets. Checking single_set to
1940 make sure of this is not the proper check, as explained
1941 in the comment in set_unique_reg_note.
1943 This should be changed into an assert. */
1944 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
1945 return 0;
1946 return link;
1948 return NULL;
1951 /* Check whether INSN is a single_set whose source is known to be
1952 equivalent to a constant. Return that constant if so, otherwise
1953 return null. */
1956 find_constant_src (const rtx_insn *insn)
1958 rtx note, set, x;
1960 set = single_set (insn);
1961 if (set)
1963 x = avoid_constant_pool_reference (SET_SRC (set));
1964 if (CONSTANT_P (x))
1965 return x;
1968 note = find_reg_equal_equiv_note (insn);
1969 if (note && CONSTANT_P (XEXP (note, 0)))
1970 return XEXP (note, 0);
1972 return NULL_RTX;
1975 /* Return true if DATUM, or any overlap of DATUM, of kind CODE is found
1976 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
1979 find_reg_fusage (const_rtx insn, enum rtx_code code, const_rtx datum)
1981 /* If it's not a CALL_INSN, it can't possibly have a
1982 CALL_INSN_FUNCTION_USAGE field, so don't bother checking. */
1983 if (!CALL_P (insn))
1984 return 0;
1986 gcc_assert (datum);
1988 if (!REG_P (datum))
1990 rtx link;
1992 for (link = CALL_INSN_FUNCTION_USAGE (insn);
1993 link;
1994 link = XEXP (link, 1))
1995 if (GET_CODE (XEXP (link, 0)) == code
1996 && rtx_equal_p (datum, XEXP (XEXP (link, 0), 0)))
1997 return 1;
1999 else
2001 unsigned int regno = REGNO (datum);
2003 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2004 to pseudo registers, so don't bother checking. */
2006 if (regno < FIRST_PSEUDO_REGISTER)
2008 unsigned int end_regno = END_HARD_REGNO (datum);
2009 unsigned int i;
2011 for (i = regno; i < end_regno; i++)
2012 if (find_regno_fusage (insn, code, i))
2013 return 1;
2017 return 0;
2020 /* Return true if REGNO, or any overlap of REGNO, of kind CODE is found
2021 in the CALL_INSN_FUNCTION_USAGE information of INSN. */
2024 find_regno_fusage (const_rtx insn, enum rtx_code code, unsigned int regno)
2026 rtx link;
2028 /* CALL_INSN_FUNCTION_USAGE information cannot contain references
2029 to pseudo registers, so don't bother checking. */
2031 if (regno >= FIRST_PSEUDO_REGISTER
2032 || !CALL_P (insn) )
2033 return 0;
2035 for (link = CALL_INSN_FUNCTION_USAGE (insn); link; link = XEXP (link, 1))
2037 rtx op, reg;
2039 if (GET_CODE (op = XEXP (link, 0)) == code
2040 && REG_P (reg = XEXP (op, 0))
2041 && REGNO (reg) <= regno
2042 && END_HARD_REGNO (reg) > regno)
2043 return 1;
2046 return 0;
2050 /* Return true if KIND is an integer REG_NOTE. */
2052 static bool
2053 int_reg_note_p (enum reg_note kind)
2055 return kind == REG_BR_PROB;
2058 /* Allocate a register note with kind KIND and datum DATUM. LIST is
2059 stored as the pointer to the next register note. */
2062 alloc_reg_note (enum reg_note kind, rtx datum, rtx list)
2064 rtx note;
2066 gcc_checking_assert (!int_reg_note_p (kind));
2067 switch (kind)
2069 case REG_CC_SETTER:
2070 case REG_CC_USER:
2071 case REG_LABEL_TARGET:
2072 case REG_LABEL_OPERAND:
2073 case REG_TM:
2074 /* These types of register notes use an INSN_LIST rather than an
2075 EXPR_LIST, so that copying is done right and dumps look
2076 better. */
2077 note = alloc_INSN_LIST (datum, list);
2078 PUT_REG_NOTE_KIND (note, kind);
2079 break;
2081 default:
2082 note = alloc_EXPR_LIST (kind, datum, list);
2083 break;
2086 return note;
2089 /* Add register note with kind KIND and datum DATUM to INSN. */
2091 void
2092 add_reg_note (rtx insn, enum reg_note kind, rtx datum)
2094 REG_NOTES (insn) = alloc_reg_note (kind, datum, REG_NOTES (insn));
2097 /* Add an integer register note with kind KIND and datum DATUM to INSN. */
2099 void
2100 add_int_reg_note (rtx insn, enum reg_note kind, int datum)
2102 gcc_checking_assert (int_reg_note_p (kind));
2103 REG_NOTES (insn) = gen_rtx_INT_LIST ((machine_mode) kind,
2104 datum, REG_NOTES (insn));
2107 /* Add a register note like NOTE to INSN. */
2109 void
2110 add_shallow_copy_of_reg_note (rtx insn, rtx note)
2112 if (GET_CODE (note) == INT_LIST)
2113 add_int_reg_note (insn, REG_NOTE_KIND (note), XINT (note, 0));
2114 else
2115 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
2118 /* Remove register note NOTE from the REG_NOTES of INSN. */
2120 void
2121 remove_note (rtx insn, const_rtx note)
2123 rtx link;
2125 if (note == NULL_RTX)
2126 return;
2128 if (REG_NOTES (insn) == note)
2129 REG_NOTES (insn) = XEXP (note, 1);
2130 else
2131 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
2132 if (XEXP (link, 1) == note)
2134 XEXP (link, 1) = XEXP (note, 1);
2135 break;
2138 switch (REG_NOTE_KIND (note))
2140 case REG_EQUAL:
2141 case REG_EQUIV:
2142 df_notes_rescan (as_a <rtx_insn *> (insn));
2143 break;
2144 default:
2145 break;
2149 /* Remove REG_EQUAL and/or REG_EQUIV notes if INSN has such notes. */
2151 void
2152 remove_reg_equal_equiv_notes (rtx insn)
2154 rtx *loc;
2156 loc = &REG_NOTES (insn);
2157 while (*loc)
2159 enum reg_note kind = REG_NOTE_KIND (*loc);
2160 if (kind == REG_EQUAL || kind == REG_EQUIV)
2161 *loc = XEXP (*loc, 1);
2162 else
2163 loc = &XEXP (*loc, 1);
2167 /* Remove all REG_EQUAL and REG_EQUIV notes referring to REGNO. */
2169 void
2170 remove_reg_equal_equiv_notes_for_regno (unsigned int regno)
2172 df_ref eq_use;
2174 if (!df)
2175 return;
2177 /* This loop is a little tricky. We cannot just go down the chain because
2178 it is being modified by some actions in the loop. So we just iterate
2179 over the head. We plan to drain the list anyway. */
2180 while ((eq_use = DF_REG_EQ_USE_CHAIN (regno)) != NULL)
2182 rtx_insn *insn = DF_REF_INSN (eq_use);
2183 rtx note = find_reg_equal_equiv_note (insn);
2185 /* This assert is generally triggered when someone deletes a REG_EQUAL
2186 or REG_EQUIV note by hacking the list manually rather than calling
2187 remove_note. */
2188 gcc_assert (note);
2190 remove_note (insn, note);
2194 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2195 return 1 if it is found. A simple equality test is used to determine if
2196 NODE matches. */
2199 in_expr_list_p (const_rtx listp, const_rtx node)
2201 const_rtx x;
2203 for (x = listp; x; x = XEXP (x, 1))
2204 if (node == XEXP (x, 0))
2205 return 1;
2207 return 0;
2210 /* Search LISTP (an EXPR_LIST) for an entry whose first operand is NODE and
2211 remove that entry from the list if it is found.
2213 A simple equality test is used to determine if NODE matches. */
2215 void
2216 remove_node_from_expr_list (const_rtx node, rtx_expr_list **listp)
2218 rtx_expr_list *temp = *listp;
2219 rtx prev = NULL_RTX;
2221 while (temp)
2223 if (node == temp->element ())
2225 /* Splice the node out of the list. */
2226 if (prev)
2227 XEXP (prev, 1) = temp->next ();
2228 else
2229 *listp = temp->next ();
2231 return;
2234 prev = temp;
2235 temp = temp->next ();
2239 /* Search LISTP (an INSN_LIST) for an entry whose first operand is NODE and
2240 remove that entry from the list if it is found.
2242 A simple equality test is used to determine if NODE matches. */
2244 void
2245 remove_node_from_insn_list (const rtx_insn *node, rtx_insn_list **listp)
2247 rtx_insn_list *temp = *listp;
2248 rtx prev = NULL;
2250 while (temp)
2252 if (node == temp->insn ())
2254 /* Splice the node out of the list. */
2255 if (prev)
2256 XEXP (prev, 1) = temp->next ();
2257 else
2258 *listp = temp->next ();
2260 return;
2263 prev = temp;
2264 temp = temp->next ();
2268 /* Nonzero if X contains any volatile instructions. These are instructions
2269 which may cause unpredictable machine state instructions, and thus no
2270 instructions or register uses should be moved or combined across them.
2271 This includes only volatile asms and UNSPEC_VOLATILE instructions. */
2274 volatile_insn_p (const_rtx x)
2276 const RTX_CODE code = GET_CODE (x);
2277 switch (code)
2279 case LABEL_REF:
2280 case SYMBOL_REF:
2281 case CONST:
2282 CASE_CONST_ANY:
2283 case CC0:
2284 case PC:
2285 case REG:
2286 case SCRATCH:
2287 case CLOBBER:
2288 case ADDR_VEC:
2289 case ADDR_DIFF_VEC:
2290 case CALL:
2291 case MEM:
2292 return 0;
2294 case UNSPEC_VOLATILE:
2295 return 1;
2297 case ASM_INPUT:
2298 case ASM_OPERANDS:
2299 if (MEM_VOLATILE_P (x))
2300 return 1;
2302 default:
2303 break;
2306 /* Recursively scan the operands of this expression. */
2309 const char *const fmt = GET_RTX_FORMAT (code);
2310 int i;
2312 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2314 if (fmt[i] == 'e')
2316 if (volatile_insn_p (XEXP (x, i)))
2317 return 1;
2319 else if (fmt[i] == 'E')
2321 int j;
2322 for (j = 0; j < XVECLEN (x, i); j++)
2323 if (volatile_insn_p (XVECEXP (x, i, j)))
2324 return 1;
2328 return 0;
2331 /* Nonzero if X contains any volatile memory references
2332 UNSPEC_VOLATILE operations or volatile ASM_OPERANDS expressions. */
2335 volatile_refs_p (const_rtx x)
2337 const RTX_CODE code = GET_CODE (x);
2338 switch (code)
2340 case LABEL_REF:
2341 case SYMBOL_REF:
2342 case CONST:
2343 CASE_CONST_ANY:
2344 case CC0:
2345 case PC:
2346 case REG:
2347 case SCRATCH:
2348 case CLOBBER:
2349 case ADDR_VEC:
2350 case ADDR_DIFF_VEC:
2351 return 0;
2353 case UNSPEC_VOLATILE:
2354 return 1;
2356 case MEM:
2357 case ASM_INPUT:
2358 case ASM_OPERANDS:
2359 if (MEM_VOLATILE_P (x))
2360 return 1;
2362 default:
2363 break;
2366 /* Recursively scan the operands of this expression. */
2369 const char *const fmt = GET_RTX_FORMAT (code);
2370 int i;
2372 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2374 if (fmt[i] == 'e')
2376 if (volatile_refs_p (XEXP (x, i)))
2377 return 1;
2379 else if (fmt[i] == 'E')
2381 int j;
2382 for (j = 0; j < XVECLEN (x, i); j++)
2383 if (volatile_refs_p (XVECEXP (x, i, j)))
2384 return 1;
2388 return 0;
2391 /* Similar to above, except that it also rejects register pre- and post-
2392 incrementing. */
2395 side_effects_p (const_rtx x)
2397 const RTX_CODE code = GET_CODE (x);
2398 switch (code)
2400 case LABEL_REF:
2401 case SYMBOL_REF:
2402 case CONST:
2403 CASE_CONST_ANY:
2404 case CC0:
2405 case PC:
2406 case REG:
2407 case SCRATCH:
2408 case ADDR_VEC:
2409 case ADDR_DIFF_VEC:
2410 case VAR_LOCATION:
2411 return 0;
2413 case CLOBBER:
2414 /* Reject CLOBBER with a non-VOID mode. These are made by combine.c
2415 when some combination can't be done. If we see one, don't think
2416 that we can simplify the expression. */
2417 return (GET_MODE (x) != VOIDmode);
2419 case PRE_INC:
2420 case PRE_DEC:
2421 case POST_INC:
2422 case POST_DEC:
2423 case PRE_MODIFY:
2424 case POST_MODIFY:
2425 case CALL:
2426 case UNSPEC_VOLATILE:
2427 return 1;
2429 case MEM:
2430 case ASM_INPUT:
2431 case ASM_OPERANDS:
2432 if (MEM_VOLATILE_P (x))
2433 return 1;
2435 default:
2436 break;
2439 /* Recursively scan the operands of this expression. */
2442 const char *fmt = GET_RTX_FORMAT (code);
2443 int i;
2445 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2447 if (fmt[i] == 'e')
2449 if (side_effects_p (XEXP (x, i)))
2450 return 1;
2452 else if (fmt[i] == 'E')
2454 int j;
2455 for (j = 0; j < XVECLEN (x, i); j++)
2456 if (side_effects_p (XVECEXP (x, i, j)))
2457 return 1;
2461 return 0;
2464 /* Return nonzero if evaluating rtx X might cause a trap.
2465 FLAGS controls how to consider MEMs. A nonzero means the context
2466 of the access may have changed from the original, such that the
2467 address may have become invalid. */
2470 may_trap_p_1 (const_rtx x, unsigned flags)
2472 int i;
2473 enum rtx_code code;
2474 const char *fmt;
2476 /* We make no distinction currently, but this function is part of
2477 the internal target-hooks ABI so we keep the parameter as
2478 "unsigned flags". */
2479 bool code_changed = flags != 0;
2481 if (x == 0)
2482 return 0;
2483 code = GET_CODE (x);
2484 switch (code)
2486 /* Handle these cases quickly. */
2487 CASE_CONST_ANY:
2488 case SYMBOL_REF:
2489 case LABEL_REF:
2490 case CONST:
2491 case PC:
2492 case CC0:
2493 case REG:
2494 case SCRATCH:
2495 return 0;
2497 case UNSPEC:
2498 return targetm.unspec_may_trap_p (x, flags);
2500 case UNSPEC_VOLATILE:
2501 case ASM_INPUT:
2502 case TRAP_IF:
2503 return 1;
2505 case ASM_OPERANDS:
2506 return MEM_VOLATILE_P (x);
2508 /* Memory ref can trap unless it's a static var or a stack slot. */
2509 case MEM:
2510 /* Recognize specific pattern of stack checking probes. */
2511 if (flag_stack_check
2512 && MEM_VOLATILE_P (x)
2513 && XEXP (x, 0) == stack_pointer_rtx)
2514 return 1;
2515 if (/* MEM_NOTRAP_P only relates to the actual position of the memory
2516 reference; moving it out of context such as when moving code
2517 when optimizing, might cause its address to become invalid. */
2518 code_changed
2519 || !MEM_NOTRAP_P (x))
2521 HOST_WIDE_INT size = MEM_SIZE_KNOWN_P (x) ? MEM_SIZE (x) : 0;
2522 return rtx_addr_can_trap_p_1 (XEXP (x, 0), 0, size,
2523 GET_MODE (x), code_changed);
2526 return 0;
2528 /* Division by a non-constant might trap. */
2529 case DIV:
2530 case MOD:
2531 case UDIV:
2532 case UMOD:
2533 if (HONOR_SNANS (GET_MODE (x)))
2534 return 1;
2535 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)))
2536 return flag_trapping_math;
2537 if (!CONSTANT_P (XEXP (x, 1)) || (XEXP (x, 1) == const0_rtx))
2538 return 1;
2539 break;
2541 case EXPR_LIST:
2542 /* An EXPR_LIST is used to represent a function call. This
2543 certainly may trap. */
2544 return 1;
2546 case GE:
2547 case GT:
2548 case LE:
2549 case LT:
2550 case LTGT:
2551 case COMPARE:
2552 /* Some floating point comparisons may trap. */
2553 if (!flag_trapping_math)
2554 break;
2555 /* ??? There is no machine independent way to check for tests that trap
2556 when COMPARE is used, though many targets do make this distinction.
2557 For instance, sparc uses CCFPE for compares which generate exceptions
2558 and CCFP for compares which do not generate exceptions. */
2559 if (HONOR_NANS (GET_MODE (x)))
2560 return 1;
2561 /* But often the compare has some CC mode, so check operand
2562 modes as well. */
2563 if (HONOR_NANS (GET_MODE (XEXP (x, 0)))
2564 || HONOR_NANS (GET_MODE (XEXP (x, 1))))
2565 return 1;
2566 break;
2568 case EQ:
2569 case NE:
2570 if (HONOR_SNANS (GET_MODE (x)))
2571 return 1;
2572 /* Often comparison is CC mode, so check operand modes. */
2573 if (HONOR_SNANS (GET_MODE (XEXP (x, 0)))
2574 || HONOR_SNANS (GET_MODE (XEXP (x, 1))))
2575 return 1;
2576 break;
2578 case FIX:
2579 /* Conversion of floating point might trap. */
2580 if (flag_trapping_math && HONOR_NANS (GET_MODE (XEXP (x, 0))))
2581 return 1;
2582 break;
2584 case NEG:
2585 case ABS:
2586 case SUBREG:
2587 /* These operations don't trap even with floating point. */
2588 break;
2590 default:
2591 /* Any floating arithmetic may trap. */
2592 if (SCALAR_FLOAT_MODE_P (GET_MODE (x)) && flag_trapping_math)
2593 return 1;
2596 fmt = GET_RTX_FORMAT (code);
2597 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2599 if (fmt[i] == 'e')
2601 if (may_trap_p_1 (XEXP (x, i), flags))
2602 return 1;
2604 else if (fmt[i] == 'E')
2606 int j;
2607 for (j = 0; j < XVECLEN (x, i); j++)
2608 if (may_trap_p_1 (XVECEXP (x, i, j), flags))
2609 return 1;
2612 return 0;
2615 /* Return nonzero if evaluating rtx X might cause a trap. */
2618 may_trap_p (const_rtx x)
2620 return may_trap_p_1 (x, 0);
2623 /* Same as above, but additionally return nonzero if evaluating rtx X might
2624 cause a fault. We define a fault for the purpose of this function as a
2625 erroneous execution condition that cannot be encountered during the normal
2626 execution of a valid program; the typical example is an unaligned memory
2627 access on a strict alignment machine. The compiler guarantees that it
2628 doesn't generate code that will fault from a valid program, but this
2629 guarantee doesn't mean anything for individual instructions. Consider
2630 the following example:
2632 struct S { int d; union { char *cp; int *ip; }; };
2634 int foo(struct S *s)
2636 if (s->d == 1)
2637 return *s->ip;
2638 else
2639 return *s->cp;
2642 on a strict alignment machine. In a valid program, foo will never be
2643 invoked on a structure for which d is equal to 1 and the underlying
2644 unique field of the union not aligned on a 4-byte boundary, but the
2645 expression *s->ip might cause a fault if considered individually.
2647 At the RTL level, potentially problematic expressions will almost always
2648 verify may_trap_p; for example, the above dereference can be emitted as
2649 (mem:SI (reg:P)) and this expression is may_trap_p for a generic register.
2650 However, suppose that foo is inlined in a caller that causes s->cp to
2651 point to a local character variable and guarantees that s->d is not set
2652 to 1; foo may have been effectively translated into pseudo-RTL as:
2654 if ((reg:SI) == 1)
2655 (set (reg:SI) (mem:SI (%fp - 7)))
2656 else
2657 (set (reg:QI) (mem:QI (%fp - 7)))
2659 Now (mem:SI (%fp - 7)) is considered as not may_trap_p since it is a
2660 memory reference to a stack slot, but it will certainly cause a fault
2661 on a strict alignment machine. */
2664 may_trap_or_fault_p (const_rtx x)
2666 return may_trap_p_1 (x, 1);
2669 /* Return nonzero if X contains a comparison that is not either EQ or NE,
2670 i.e., an inequality. */
2673 inequality_comparisons_p (const_rtx x)
2675 const char *fmt;
2676 int len, i;
2677 const enum rtx_code code = GET_CODE (x);
2679 switch (code)
2681 case REG:
2682 case SCRATCH:
2683 case PC:
2684 case CC0:
2685 CASE_CONST_ANY:
2686 case CONST:
2687 case LABEL_REF:
2688 case SYMBOL_REF:
2689 return 0;
2691 case LT:
2692 case LTU:
2693 case GT:
2694 case GTU:
2695 case LE:
2696 case LEU:
2697 case GE:
2698 case GEU:
2699 return 1;
2701 default:
2702 break;
2705 len = GET_RTX_LENGTH (code);
2706 fmt = GET_RTX_FORMAT (code);
2708 for (i = 0; i < len; i++)
2710 if (fmt[i] == 'e')
2712 if (inequality_comparisons_p (XEXP (x, i)))
2713 return 1;
2715 else if (fmt[i] == 'E')
2717 int j;
2718 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2719 if (inequality_comparisons_p (XVECEXP (x, i, j)))
2720 return 1;
2724 return 0;
2727 /* Replace any occurrence of FROM in X with TO. The function does
2728 not enter into CONST_DOUBLE for the replace.
2730 Note that copying is not done so X must not be shared unless all copies
2731 are to be modified. */
2734 replace_rtx (rtx x, rtx from, rtx to)
2736 int i, j;
2737 const char *fmt;
2739 if (x == from)
2740 return to;
2742 /* Allow this function to make replacements in EXPR_LISTs. */
2743 if (x == 0)
2744 return 0;
2746 if (GET_CODE (x) == SUBREG)
2748 rtx new_rtx = replace_rtx (SUBREG_REG (x), from, to);
2750 if (CONST_INT_P (new_rtx))
2752 x = simplify_subreg (GET_MODE (x), new_rtx,
2753 GET_MODE (SUBREG_REG (x)),
2754 SUBREG_BYTE (x));
2755 gcc_assert (x);
2757 else
2758 SUBREG_REG (x) = new_rtx;
2760 return x;
2762 else if (GET_CODE (x) == ZERO_EXTEND)
2764 rtx new_rtx = replace_rtx (XEXP (x, 0), from, to);
2766 if (CONST_INT_P (new_rtx))
2768 x = simplify_unary_operation (ZERO_EXTEND, GET_MODE (x),
2769 new_rtx, GET_MODE (XEXP (x, 0)));
2770 gcc_assert (x);
2772 else
2773 XEXP (x, 0) = new_rtx;
2775 return x;
2778 fmt = GET_RTX_FORMAT (GET_CODE (x));
2779 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
2781 if (fmt[i] == 'e')
2782 XEXP (x, i) = replace_rtx (XEXP (x, i), from, to);
2783 else if (fmt[i] == 'E')
2784 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
2785 XVECEXP (x, i, j) = replace_rtx (XVECEXP (x, i, j), from, to);
2788 return x;
2791 /* Replace occurrences of the OLD_LABEL in *LOC with NEW_LABEL. Also track
2792 the change in LABEL_NUSES if UPDATE_LABEL_NUSES. */
2794 void
2795 replace_label (rtx *loc, rtx old_label, rtx new_label, bool update_label_nuses)
2797 /* Handle jump tables specially, since ADDR_{DIFF_,}VECs can be long. */
2798 rtx x = *loc;
2799 if (JUMP_TABLE_DATA_P (x))
2801 x = PATTERN (x);
2802 rtvec vec = XVEC (x, GET_CODE (x) == ADDR_DIFF_VEC);
2803 int len = GET_NUM_ELEM (vec);
2804 for (int i = 0; i < len; ++i)
2806 rtx ref = RTVEC_ELT (vec, i);
2807 if (XEXP (ref, 0) == old_label)
2809 XEXP (ref, 0) = new_label;
2810 if (update_label_nuses)
2812 ++LABEL_NUSES (new_label);
2813 --LABEL_NUSES (old_label);
2817 return;
2820 /* If this is a JUMP_INSN, then we also need to fix the JUMP_LABEL
2821 field. This is not handled by the iterator because it doesn't
2822 handle unprinted ('0') fields. */
2823 if (JUMP_P (x) && JUMP_LABEL (x) == old_label)
2824 JUMP_LABEL (x) = new_label;
2826 subrtx_ptr_iterator::array_type array;
2827 FOR_EACH_SUBRTX_PTR (iter, array, loc, ALL)
2829 rtx *loc = *iter;
2830 if (rtx x = *loc)
2832 if (GET_CODE (x) == SYMBOL_REF
2833 && CONSTANT_POOL_ADDRESS_P (x))
2835 rtx c = get_pool_constant (x);
2836 if (rtx_referenced_p (old_label, c))
2838 /* Create a copy of constant C; replace the label inside
2839 but do not update LABEL_NUSES because uses in constant pool
2840 are not counted. */
2841 rtx new_c = copy_rtx (c);
2842 replace_label (&new_c, old_label, new_label, false);
2844 /* Add the new constant NEW_C to constant pool and replace
2845 the old reference to constant by new reference. */
2846 rtx new_mem = force_const_mem (get_pool_mode (x), new_c);
2847 *loc = replace_rtx (x, x, XEXP (new_mem, 0));
2851 if ((GET_CODE (x) == LABEL_REF
2852 || GET_CODE (x) == INSN_LIST)
2853 && XEXP (x, 0) == old_label)
2855 XEXP (x, 0) = new_label;
2856 if (update_label_nuses)
2858 ++LABEL_NUSES (new_label);
2859 --LABEL_NUSES (old_label);
2866 void
2867 replace_label_in_insn (rtx_insn *insn, rtx old_label, rtx new_label,
2868 bool update_label_nuses)
2870 rtx insn_as_rtx = insn;
2871 replace_label (&insn_as_rtx, old_label, new_label, update_label_nuses);
2872 gcc_checking_assert (insn_as_rtx == insn);
2875 /* Return true if X is referenced in BODY. */
2877 bool
2878 rtx_referenced_p (const_rtx x, const_rtx body)
2880 subrtx_iterator::array_type array;
2881 FOR_EACH_SUBRTX (iter, array, body, ALL)
2882 if (const_rtx y = *iter)
2884 /* Check if a label_ref Y refers to label X. */
2885 if (GET_CODE (y) == LABEL_REF
2886 && LABEL_P (x)
2887 && LABEL_REF_LABEL (y) == x)
2888 return true;
2890 if (rtx_equal_p (x, y))
2891 return true;
2893 /* If Y is a reference to pool constant traverse the constant. */
2894 if (GET_CODE (y) == SYMBOL_REF
2895 && CONSTANT_POOL_ADDRESS_P (y))
2896 iter.substitute (get_pool_constant (y));
2898 return false;
2901 /* If INSN is a tablejump return true and store the label (before jump table) to
2902 *LABELP and the jump table to *TABLEP. LABELP and TABLEP may be NULL. */
2904 bool
2905 tablejump_p (const rtx_insn *insn, rtx *labelp, rtx_jump_table_data **tablep)
2907 rtx label, table;
2909 if (!JUMP_P (insn))
2910 return false;
2912 label = JUMP_LABEL (insn);
2913 if (label != NULL_RTX && !ANY_RETURN_P (label)
2914 && (table = NEXT_INSN (as_a <rtx_insn *> (label))) != NULL_RTX
2915 && JUMP_TABLE_DATA_P (table))
2917 if (labelp)
2918 *labelp = label;
2919 if (tablep)
2920 *tablep = as_a <rtx_jump_table_data *> (table);
2921 return true;
2923 return false;
2926 /* A subroutine of computed_jump_p, return 1 if X contains a REG or MEM or
2927 constant that is not in the constant pool and not in the condition
2928 of an IF_THEN_ELSE. */
2930 static int
2931 computed_jump_p_1 (const_rtx x)
2933 const enum rtx_code code = GET_CODE (x);
2934 int i, j;
2935 const char *fmt;
2937 switch (code)
2939 case LABEL_REF:
2940 case PC:
2941 return 0;
2943 case CONST:
2944 CASE_CONST_ANY:
2945 case SYMBOL_REF:
2946 case REG:
2947 return 1;
2949 case MEM:
2950 return ! (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
2951 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)));
2953 case IF_THEN_ELSE:
2954 return (computed_jump_p_1 (XEXP (x, 1))
2955 || computed_jump_p_1 (XEXP (x, 2)));
2957 default:
2958 break;
2961 fmt = GET_RTX_FORMAT (code);
2962 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
2964 if (fmt[i] == 'e'
2965 && computed_jump_p_1 (XEXP (x, i)))
2966 return 1;
2968 else if (fmt[i] == 'E')
2969 for (j = 0; j < XVECLEN (x, i); j++)
2970 if (computed_jump_p_1 (XVECEXP (x, i, j)))
2971 return 1;
2974 return 0;
2977 /* Return nonzero if INSN is an indirect jump (aka computed jump).
2979 Tablejumps and casesi insns are not considered indirect jumps;
2980 we can recognize them by a (use (label_ref)). */
2983 computed_jump_p (const_rtx insn)
2985 int i;
2986 if (JUMP_P (insn))
2988 rtx pat = PATTERN (insn);
2990 /* If we have a JUMP_LABEL set, we're not a computed jump. */
2991 if (JUMP_LABEL (insn) != NULL)
2992 return 0;
2994 if (GET_CODE (pat) == PARALLEL)
2996 int len = XVECLEN (pat, 0);
2997 int has_use_labelref = 0;
2999 for (i = len - 1; i >= 0; i--)
3000 if (GET_CODE (XVECEXP (pat, 0, i)) == USE
3001 && (GET_CODE (XEXP (XVECEXP (pat, 0, i), 0))
3002 == LABEL_REF))
3004 has_use_labelref = 1;
3005 break;
3008 if (! has_use_labelref)
3009 for (i = len - 1; i >= 0; i--)
3010 if (GET_CODE (XVECEXP (pat, 0, i)) == SET
3011 && SET_DEST (XVECEXP (pat, 0, i)) == pc_rtx
3012 && computed_jump_p_1 (SET_SRC (XVECEXP (pat, 0, i))))
3013 return 1;
3015 else if (GET_CODE (pat) == SET
3016 && SET_DEST (pat) == pc_rtx
3017 && computed_jump_p_1 (SET_SRC (pat)))
3018 return 1;
3020 return 0;
3023 /* Optimized loop of for_each_rtx, trying to avoid useless recursive
3024 calls. Processes the subexpressions of EXP and passes them to F. */
3025 static int
3026 for_each_rtx_1 (rtx exp, int n, rtx_function f, void *data)
3028 int result, i, j;
3029 const char *format = GET_RTX_FORMAT (GET_CODE (exp));
3030 rtx *x;
3032 for (; format[n] != '\0'; n++)
3034 switch (format[n])
3036 case 'e':
3037 /* Call F on X. */
3038 x = &XEXP (exp, n);
3039 result = (*f) (x, data);
3040 if (result == -1)
3041 /* Do not traverse sub-expressions. */
3042 continue;
3043 else if (result != 0)
3044 /* Stop the traversal. */
3045 return result;
3047 if (*x == NULL_RTX)
3048 /* There are no sub-expressions. */
3049 continue;
3051 i = non_rtx_starting_operands[GET_CODE (*x)];
3052 if (i >= 0)
3054 result = for_each_rtx_1 (*x, i, f, data);
3055 if (result != 0)
3056 return result;
3058 break;
3060 case 'V':
3061 case 'E':
3062 if (XVEC (exp, n) == 0)
3063 continue;
3064 for (j = 0; j < XVECLEN (exp, n); ++j)
3066 /* Call F on X. */
3067 x = &XVECEXP (exp, n, j);
3068 result = (*f) (x, data);
3069 if (result == -1)
3070 /* Do not traverse sub-expressions. */
3071 continue;
3072 else if (result != 0)
3073 /* Stop the traversal. */
3074 return result;
3076 if (*x == NULL_RTX)
3077 /* There are no sub-expressions. */
3078 continue;
3080 i = non_rtx_starting_operands[GET_CODE (*x)];
3081 if (i >= 0)
3083 result = for_each_rtx_1 (*x, i, f, data);
3084 if (result != 0)
3085 return result;
3088 break;
3090 default:
3091 /* Nothing to do. */
3092 break;
3096 return 0;
3099 /* Traverse X via depth-first search, calling F for each
3100 sub-expression (including X itself). F is also passed the DATA.
3101 If F returns -1, do not traverse sub-expressions, but continue
3102 traversing the rest of the tree. If F ever returns any other
3103 nonzero value, stop the traversal, and return the value returned
3104 by F. Otherwise, return 0. This function does not traverse inside
3105 tree structure that contains RTX_EXPRs, or into sub-expressions
3106 whose format code is `0' since it is not known whether or not those
3107 codes are actually RTL.
3109 This routine is very general, and could (should?) be used to
3110 implement many of the other routines in this file. */
3113 for_each_rtx (rtx *x, rtx_function f, void *data)
3115 int result;
3116 int i;
3118 /* Call F on X. */
3119 result = (*f) (x, data);
3120 if (result == -1)
3121 /* Do not traverse sub-expressions. */
3122 return 0;
3123 else if (result != 0)
3124 /* Stop the traversal. */
3125 return result;
3127 if (*x == NULL_RTX)
3128 /* There are no sub-expressions. */
3129 return 0;
3131 i = non_rtx_starting_operands[GET_CODE (*x)];
3132 if (i < 0)
3133 return 0;
3135 return for_each_rtx_1 (*x, i, f, data);
3138 /* Like "for_each_rtx", but for calling on an rtx_insn **. */
3141 for_each_rtx_in_insn (rtx_insn **insn, rtx_function f, void *data)
3143 rtx insn_as_rtx = *insn;
3144 int result;
3146 result = for_each_rtx (&insn_as_rtx, f, data);
3148 if (insn_as_rtx != *insn)
3149 *insn = safe_as_a <rtx_insn *> (insn_as_rtx);
3151 return result;
3156 /* MEM has a PRE/POST-INC/DEC/MODIFY address X. Extract the operands of
3157 the equivalent add insn and pass the result to FN, using DATA as the
3158 final argument. */
3160 static int
3161 for_each_inc_dec_find_inc_dec (rtx mem, for_each_inc_dec_fn fn, void *data)
3163 rtx x = XEXP (mem, 0);
3164 switch (GET_CODE (x))
3166 case PRE_INC:
3167 case POST_INC:
3169 int size = GET_MODE_SIZE (GET_MODE (mem));
3170 rtx r1 = XEXP (x, 0);
3171 rtx c = gen_int_mode (size, GET_MODE (r1));
3172 return fn (mem, x, r1, r1, c, data);
3175 case PRE_DEC:
3176 case POST_DEC:
3178 int size = GET_MODE_SIZE (GET_MODE (mem));
3179 rtx r1 = XEXP (x, 0);
3180 rtx c = gen_int_mode (-size, GET_MODE (r1));
3181 return fn (mem, x, r1, r1, c, data);
3184 case PRE_MODIFY:
3185 case POST_MODIFY:
3187 rtx r1 = XEXP (x, 0);
3188 rtx add = XEXP (x, 1);
3189 return fn (mem, x, r1, add, NULL, data);
3192 default:
3193 gcc_unreachable ();
3197 /* Traverse *LOC looking for MEMs that have autoinc addresses.
3198 For each such autoinc operation found, call FN, passing it
3199 the innermost enclosing MEM, the operation itself, the RTX modified
3200 by the operation, two RTXs (the second may be NULL) that, once
3201 added, represent the value to be held by the modified RTX
3202 afterwards, and DATA. FN is to return 0 to continue the
3203 traversal or any other value to have it returned to the caller of
3204 for_each_inc_dec. */
3207 for_each_inc_dec (rtx x,
3208 for_each_inc_dec_fn fn,
3209 void *data)
3211 subrtx_var_iterator::array_type array;
3212 FOR_EACH_SUBRTX_VAR (iter, array, x, NONCONST)
3214 rtx mem = *iter;
3215 if (mem
3216 && MEM_P (mem)
3217 && GET_RTX_CLASS (GET_CODE (XEXP (mem, 0))) == RTX_AUTOINC)
3219 int res = for_each_inc_dec_find_inc_dec (mem, fn, data);
3220 if (res != 0)
3221 return res;
3222 iter.skip_subrtxes ();
3225 return 0;
3229 /* Searches X for any reference to REGNO, returning the rtx of the
3230 reference found if any. Otherwise, returns NULL_RTX. */
3233 regno_use_in (unsigned int regno, rtx x)
3235 const char *fmt;
3236 int i, j;
3237 rtx tem;
3239 if (REG_P (x) && REGNO (x) == regno)
3240 return x;
3242 fmt = GET_RTX_FORMAT (GET_CODE (x));
3243 for (i = GET_RTX_LENGTH (GET_CODE (x)) - 1; i >= 0; i--)
3245 if (fmt[i] == 'e')
3247 if ((tem = regno_use_in (regno, XEXP (x, i))))
3248 return tem;
3250 else if (fmt[i] == 'E')
3251 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3252 if ((tem = regno_use_in (regno , XVECEXP (x, i, j))))
3253 return tem;
3256 return NULL_RTX;
3259 /* Return a value indicating whether OP, an operand of a commutative
3260 operation, is preferred as the first or second operand. The higher
3261 the value, the stronger the preference for being the first operand.
3262 We use negative values to indicate a preference for the first operand
3263 and positive values for the second operand. */
3266 commutative_operand_precedence (rtx op)
3268 enum rtx_code code = GET_CODE (op);
3270 /* Constants always come the second operand. Prefer "nice" constants. */
3271 if (code == CONST_INT)
3272 return -8;
3273 if (code == CONST_WIDE_INT)
3274 return -8;
3275 if (code == CONST_DOUBLE)
3276 return -7;
3277 if (code == CONST_FIXED)
3278 return -7;
3279 op = avoid_constant_pool_reference (op);
3280 code = GET_CODE (op);
3282 switch (GET_RTX_CLASS (code))
3284 case RTX_CONST_OBJ:
3285 if (code == CONST_INT)
3286 return -6;
3287 if (code == CONST_WIDE_INT)
3288 return -6;
3289 if (code == CONST_DOUBLE)
3290 return -5;
3291 if (code == CONST_FIXED)
3292 return -5;
3293 return -4;
3295 case RTX_EXTRA:
3296 /* SUBREGs of objects should come second. */
3297 if (code == SUBREG && OBJECT_P (SUBREG_REG (op)))
3298 return -3;
3299 return 0;
3301 case RTX_OBJ:
3302 /* Complex expressions should be the first, so decrease priority
3303 of objects. Prefer pointer objects over non pointer objects. */
3304 if ((REG_P (op) && REG_POINTER (op))
3305 || (MEM_P (op) && MEM_POINTER (op)))
3306 return -1;
3307 return -2;
3309 case RTX_COMM_ARITH:
3310 /* Prefer operands that are themselves commutative to be first.
3311 This helps to make things linear. In particular,
3312 (and (and (reg) (reg)) (not (reg))) is canonical. */
3313 return 4;
3315 case RTX_BIN_ARITH:
3316 /* If only one operand is a binary expression, it will be the first
3317 operand. In particular, (plus (minus (reg) (reg)) (neg (reg)))
3318 is canonical, although it will usually be further simplified. */
3319 return 2;
3321 case RTX_UNARY:
3322 /* Then prefer NEG and NOT. */
3323 if (code == NEG || code == NOT)
3324 return 1;
3326 default:
3327 return 0;
3331 /* Return 1 iff it is necessary to swap operands of commutative operation
3332 in order to canonicalize expression. */
3334 bool
3335 swap_commutative_operands_p (rtx x, rtx y)
3337 return (commutative_operand_precedence (x)
3338 < commutative_operand_precedence (y));
3341 /* Return 1 if X is an autoincrement side effect and the register is
3342 not the stack pointer. */
3344 auto_inc_p (const_rtx x)
3346 switch (GET_CODE (x))
3348 case PRE_INC:
3349 case POST_INC:
3350 case PRE_DEC:
3351 case POST_DEC:
3352 case PRE_MODIFY:
3353 case POST_MODIFY:
3354 /* There are no REG_INC notes for SP. */
3355 if (XEXP (x, 0) != stack_pointer_rtx)
3356 return 1;
3357 default:
3358 break;
3360 return 0;
3363 /* Return nonzero if IN contains a piece of rtl that has the address LOC. */
3365 loc_mentioned_in_p (rtx *loc, const_rtx in)
3367 enum rtx_code code;
3368 const char *fmt;
3369 int i, j;
3371 if (!in)
3372 return 0;
3374 code = GET_CODE (in);
3375 fmt = GET_RTX_FORMAT (code);
3376 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3378 if (fmt[i] == 'e')
3380 if (loc == &XEXP (in, i) || loc_mentioned_in_p (loc, XEXP (in, i)))
3381 return 1;
3383 else if (fmt[i] == 'E')
3384 for (j = XVECLEN (in, i) - 1; j >= 0; j--)
3385 if (loc == &XVECEXP (in, i, j)
3386 || loc_mentioned_in_p (loc, XVECEXP (in, i, j)))
3387 return 1;
3389 return 0;
3392 /* Helper function for subreg_lsb. Given a subreg's OUTER_MODE, INNER_MODE,
3393 and SUBREG_BYTE, return the bit offset where the subreg begins
3394 (counting from the least significant bit of the operand). */
3396 unsigned int
3397 subreg_lsb_1 (machine_mode outer_mode,
3398 machine_mode inner_mode,
3399 unsigned int subreg_byte)
3401 unsigned int bitpos;
3402 unsigned int byte;
3403 unsigned int word;
3405 /* A paradoxical subreg begins at bit position 0. */
3406 if (GET_MODE_PRECISION (outer_mode) > GET_MODE_PRECISION (inner_mode))
3407 return 0;
3409 if (WORDS_BIG_ENDIAN != BYTES_BIG_ENDIAN)
3410 /* If the subreg crosses a word boundary ensure that
3411 it also begins and ends on a word boundary. */
3412 gcc_assert (!((subreg_byte % UNITS_PER_WORD
3413 + GET_MODE_SIZE (outer_mode)) > UNITS_PER_WORD
3414 && (subreg_byte % UNITS_PER_WORD
3415 || GET_MODE_SIZE (outer_mode) % UNITS_PER_WORD)));
3417 if (WORDS_BIG_ENDIAN)
3418 word = (GET_MODE_SIZE (inner_mode)
3419 - (subreg_byte + GET_MODE_SIZE (outer_mode))) / UNITS_PER_WORD;
3420 else
3421 word = subreg_byte / UNITS_PER_WORD;
3422 bitpos = word * BITS_PER_WORD;
3424 if (BYTES_BIG_ENDIAN)
3425 byte = (GET_MODE_SIZE (inner_mode)
3426 - (subreg_byte + GET_MODE_SIZE (outer_mode))) % UNITS_PER_WORD;
3427 else
3428 byte = subreg_byte % UNITS_PER_WORD;
3429 bitpos += byte * BITS_PER_UNIT;
3431 return bitpos;
3434 /* Given a subreg X, return the bit offset where the subreg begins
3435 (counting from the least significant bit of the reg). */
3437 unsigned int
3438 subreg_lsb (const_rtx x)
3440 return subreg_lsb_1 (GET_MODE (x), GET_MODE (SUBREG_REG (x)),
3441 SUBREG_BYTE (x));
3444 /* Fill in information about a subreg of a hard register.
3445 xregno - A regno of an inner hard subreg_reg (or what will become one).
3446 xmode - The mode of xregno.
3447 offset - The byte offset.
3448 ymode - The mode of a top level SUBREG (or what may become one).
3449 info - Pointer to structure to fill in.
3451 Rather than considering one particular inner register (and thus one
3452 particular "outer" register) in isolation, this function really uses
3453 XREGNO as a model for a sequence of isomorphic hard registers. Thus the
3454 function does not check whether adding INFO->offset to XREGNO gives
3455 a valid hard register; even if INFO->offset + XREGNO is out of range,
3456 there might be another register of the same type that is in range.
3457 Likewise it doesn't check whether HARD_REGNO_MODE_OK accepts the new
3458 register, since that can depend on things like whether the final
3459 register number is even or odd. Callers that want to check whether
3460 this particular subreg can be replaced by a simple (reg ...) should
3461 use simplify_subreg_regno. */
3463 void
3464 subreg_get_info (unsigned int xregno, machine_mode xmode,
3465 unsigned int offset, machine_mode ymode,
3466 struct subreg_info *info)
3468 int nregs_xmode, nregs_ymode;
3469 int mode_multiple, nregs_multiple;
3470 int offset_adj, y_offset, y_offset_adj;
3471 int regsize_xmode, regsize_ymode;
3472 bool rknown;
3474 gcc_assert (xregno < FIRST_PSEUDO_REGISTER);
3476 rknown = false;
3478 /* If there are holes in a non-scalar mode in registers, we expect
3479 that it is made up of its units concatenated together. */
3480 if (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode))
3482 machine_mode xmode_unit;
3484 nregs_xmode = HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode);
3485 if (GET_MODE_INNER (xmode) == VOIDmode)
3486 xmode_unit = xmode;
3487 else
3488 xmode_unit = GET_MODE_INNER (xmode);
3489 gcc_assert (HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode_unit));
3490 gcc_assert (nregs_xmode
3491 == (GET_MODE_NUNITS (xmode)
3492 * HARD_REGNO_NREGS_WITH_PADDING (xregno, xmode_unit)));
3493 gcc_assert (hard_regno_nregs[xregno][xmode]
3494 == (hard_regno_nregs[xregno][xmode_unit]
3495 * GET_MODE_NUNITS (xmode)));
3497 /* You can only ask for a SUBREG of a value with holes in the middle
3498 if you don't cross the holes. (Such a SUBREG should be done by
3499 picking a different register class, or doing it in memory if
3500 necessary.) An example of a value with holes is XCmode on 32-bit
3501 x86 with -m128bit-long-double; it's represented in 6 32-bit registers,
3502 3 for each part, but in memory it's two 128-bit parts.
3503 Padding is assumed to be at the end (not necessarily the 'high part')
3504 of each unit. */
3505 if ((offset / GET_MODE_SIZE (xmode_unit) + 1
3506 < GET_MODE_NUNITS (xmode))
3507 && (offset / GET_MODE_SIZE (xmode_unit)
3508 != ((offset + GET_MODE_SIZE (ymode) - 1)
3509 / GET_MODE_SIZE (xmode_unit))))
3511 info->representable_p = false;
3512 rknown = true;
3515 else
3516 nregs_xmode = hard_regno_nregs[xregno][xmode];
3518 nregs_ymode = hard_regno_nregs[xregno][ymode];
3520 /* Paradoxical subregs are otherwise valid. */
3521 if (!rknown
3522 && offset == 0
3523 && GET_MODE_PRECISION (ymode) > GET_MODE_PRECISION (xmode))
3525 info->representable_p = true;
3526 /* If this is a big endian paradoxical subreg, which uses more
3527 actual hard registers than the original register, we must
3528 return a negative offset so that we find the proper highpart
3529 of the register. */
3530 if (GET_MODE_SIZE (ymode) > UNITS_PER_WORD
3531 ? REG_WORDS_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3532 info->offset = nregs_xmode - nregs_ymode;
3533 else
3534 info->offset = 0;
3535 info->nregs = nregs_ymode;
3536 return;
3539 /* If registers store different numbers of bits in the different
3540 modes, we cannot generally form this subreg. */
3541 if (!HARD_REGNO_NREGS_HAS_PADDING (xregno, xmode)
3542 && !HARD_REGNO_NREGS_HAS_PADDING (xregno, ymode)
3543 && (GET_MODE_SIZE (xmode) % nregs_xmode) == 0
3544 && (GET_MODE_SIZE (ymode) % nregs_ymode) == 0)
3546 regsize_xmode = GET_MODE_SIZE (xmode) / nregs_xmode;
3547 regsize_ymode = GET_MODE_SIZE (ymode) / nregs_ymode;
3548 if (!rknown && regsize_xmode > regsize_ymode && nregs_ymode > 1)
3550 info->representable_p = false;
3551 info->nregs
3552 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3553 info->offset = offset / regsize_xmode;
3554 return;
3556 if (!rknown && regsize_ymode > regsize_xmode && nregs_xmode > 1)
3558 info->representable_p = false;
3559 info->nregs
3560 = (GET_MODE_SIZE (ymode) + regsize_xmode - 1) / regsize_xmode;
3561 info->offset = offset / regsize_xmode;
3562 return;
3566 /* Lowpart subregs are otherwise valid. */
3567 if (!rknown && offset == subreg_lowpart_offset (ymode, xmode))
3569 info->representable_p = true;
3570 rknown = true;
3572 if (offset == 0 || nregs_xmode == nregs_ymode)
3574 info->offset = 0;
3575 info->nregs = nregs_ymode;
3576 return;
3580 /* This should always pass, otherwise we don't know how to verify
3581 the constraint. These conditions may be relaxed but
3582 subreg_regno_offset would need to be redesigned. */
3583 gcc_assert ((GET_MODE_SIZE (xmode) % GET_MODE_SIZE (ymode)) == 0);
3584 gcc_assert ((nregs_xmode % nregs_ymode) == 0);
3586 if (WORDS_BIG_ENDIAN != REG_WORDS_BIG_ENDIAN
3587 && GET_MODE_SIZE (xmode) > UNITS_PER_WORD)
3589 HOST_WIDE_INT xsize = GET_MODE_SIZE (xmode);
3590 HOST_WIDE_INT ysize = GET_MODE_SIZE (ymode);
3591 HOST_WIDE_INT off_low = offset & (ysize - 1);
3592 HOST_WIDE_INT off_high = offset & ~(ysize - 1);
3593 offset = (xsize - ysize - off_high) | off_low;
3595 /* The XMODE value can be seen as a vector of NREGS_XMODE
3596 values. The subreg must represent a lowpart of given field.
3597 Compute what field it is. */
3598 offset_adj = offset;
3599 offset_adj -= subreg_lowpart_offset (ymode,
3600 mode_for_size (GET_MODE_BITSIZE (xmode)
3601 / nregs_xmode,
3602 MODE_INT, 0));
3604 /* Size of ymode must not be greater than the size of xmode. */
3605 mode_multiple = GET_MODE_SIZE (xmode) / GET_MODE_SIZE (ymode);
3606 gcc_assert (mode_multiple != 0);
3608 y_offset = offset / GET_MODE_SIZE (ymode);
3609 y_offset_adj = offset_adj / GET_MODE_SIZE (ymode);
3610 nregs_multiple = nregs_xmode / nregs_ymode;
3612 gcc_assert ((offset_adj % GET_MODE_SIZE (ymode)) == 0);
3613 gcc_assert ((mode_multiple % nregs_multiple) == 0);
3615 if (!rknown)
3617 info->representable_p = (!(y_offset_adj % (mode_multiple / nregs_multiple)));
3618 rknown = true;
3620 info->offset = (y_offset / (mode_multiple / nregs_multiple)) * nregs_ymode;
3621 info->nregs = nregs_ymode;
3624 /* This function returns the regno offset of a subreg expression.
3625 xregno - A regno of an inner hard subreg_reg (or what will become one).
3626 xmode - The mode of xregno.
3627 offset - The byte offset.
3628 ymode - The mode of a top level SUBREG (or what may become one).
3629 RETURN - The regno offset which would be used. */
3630 unsigned int
3631 subreg_regno_offset (unsigned int xregno, machine_mode xmode,
3632 unsigned int offset, machine_mode ymode)
3634 struct subreg_info info;
3635 subreg_get_info (xregno, xmode, offset, ymode, &info);
3636 return info.offset;
3639 /* This function returns true when the offset is representable via
3640 subreg_offset in the given regno.
3641 xregno - A regno of an inner hard subreg_reg (or what will become one).
3642 xmode - The mode of xregno.
3643 offset - The byte offset.
3644 ymode - The mode of a top level SUBREG (or what may become one).
3645 RETURN - Whether the offset is representable. */
3646 bool
3647 subreg_offset_representable_p (unsigned int xregno, machine_mode xmode,
3648 unsigned int offset, machine_mode ymode)
3650 struct subreg_info info;
3651 subreg_get_info (xregno, xmode, offset, ymode, &info);
3652 return info.representable_p;
3655 /* Return the number of a YMODE register to which
3657 (subreg:YMODE (reg:XMODE XREGNO) OFFSET)
3659 can be simplified. Return -1 if the subreg can't be simplified.
3661 XREGNO is a hard register number. */
3664 simplify_subreg_regno (unsigned int xregno, machine_mode xmode,
3665 unsigned int offset, machine_mode ymode)
3667 struct subreg_info info;
3668 unsigned int yregno;
3670 #ifdef CANNOT_CHANGE_MODE_CLASS
3671 /* Give the backend a chance to disallow the mode change. */
3672 if (GET_MODE_CLASS (xmode) != MODE_COMPLEX_INT
3673 && GET_MODE_CLASS (xmode) != MODE_COMPLEX_FLOAT
3674 && REG_CANNOT_CHANGE_MODE_P (xregno, xmode, ymode)
3675 /* We can use mode change in LRA for some transformations. */
3676 && ! lra_in_progress)
3677 return -1;
3678 #endif
3680 /* We shouldn't simplify stack-related registers. */
3681 if ((!reload_completed || frame_pointer_needed)
3682 && xregno == FRAME_POINTER_REGNUM)
3683 return -1;
3685 if (FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
3686 && xregno == ARG_POINTER_REGNUM)
3687 return -1;
3689 if (xregno == STACK_POINTER_REGNUM
3690 /* We should convert hard stack register in LRA if it is
3691 possible. */
3692 && ! lra_in_progress)
3693 return -1;
3695 /* Try to get the register offset. */
3696 subreg_get_info (xregno, xmode, offset, ymode, &info);
3697 if (!info.representable_p)
3698 return -1;
3700 /* Make sure that the offsetted register value is in range. */
3701 yregno = xregno + info.offset;
3702 if (!HARD_REGISTER_NUM_P (yregno))
3703 return -1;
3705 /* See whether (reg:YMODE YREGNO) is valid.
3707 ??? We allow invalid registers if (reg:XMODE XREGNO) is also invalid.
3708 This is a kludge to work around how complex FP arguments are passed
3709 on IA-64 and should be fixed. See PR target/49226. */
3710 if (!HARD_REGNO_MODE_OK (yregno, ymode)
3711 && HARD_REGNO_MODE_OK (xregno, xmode))
3712 return -1;
3714 return (int) yregno;
3717 /* Return the final regno that a subreg expression refers to. */
3718 unsigned int
3719 subreg_regno (const_rtx x)
3721 unsigned int ret;
3722 rtx subreg = SUBREG_REG (x);
3723 int regno = REGNO (subreg);
3725 ret = regno + subreg_regno_offset (regno,
3726 GET_MODE (subreg),
3727 SUBREG_BYTE (x),
3728 GET_MODE (x));
3729 return ret;
3733 /* Return the number of registers that a subreg expression refers
3734 to. */
3735 unsigned int
3736 subreg_nregs (const_rtx x)
3738 return subreg_nregs_with_regno (REGNO (SUBREG_REG (x)), x);
3741 /* Return the number of registers that a subreg REG with REGNO
3742 expression refers to. This is a copy of the rtlanal.c:subreg_nregs
3743 changed so that the regno can be passed in. */
3745 unsigned int
3746 subreg_nregs_with_regno (unsigned int regno, const_rtx x)
3748 struct subreg_info info;
3749 rtx subreg = SUBREG_REG (x);
3751 subreg_get_info (regno, GET_MODE (subreg), SUBREG_BYTE (x), GET_MODE (x),
3752 &info);
3753 return info.nregs;
3757 struct parms_set_data
3759 int nregs;
3760 HARD_REG_SET regs;
3763 /* Helper function for noticing stores to parameter registers. */
3764 static void
3765 parms_set (rtx x, const_rtx pat ATTRIBUTE_UNUSED, void *data)
3767 struct parms_set_data *const d = (struct parms_set_data *) data;
3768 if (REG_P (x) && REGNO (x) < FIRST_PSEUDO_REGISTER
3769 && TEST_HARD_REG_BIT (d->regs, REGNO (x)))
3771 CLEAR_HARD_REG_BIT (d->regs, REGNO (x));
3772 d->nregs--;
3776 /* Look backward for first parameter to be loaded.
3777 Note that loads of all parameters will not necessarily be
3778 found if CSE has eliminated some of them (e.g., an argument
3779 to the outer function is passed down as a parameter).
3780 Do not skip BOUNDARY. */
3781 rtx_insn *
3782 find_first_parameter_load (rtx_insn *call_insn, rtx_insn *boundary)
3784 struct parms_set_data parm;
3785 rtx p;
3786 rtx_insn *before, *first_set;
3788 /* Since different machines initialize their parameter registers
3789 in different orders, assume nothing. Collect the set of all
3790 parameter registers. */
3791 CLEAR_HARD_REG_SET (parm.regs);
3792 parm.nregs = 0;
3793 for (p = CALL_INSN_FUNCTION_USAGE (call_insn); p; p = XEXP (p, 1))
3794 if (GET_CODE (XEXP (p, 0)) == USE
3795 && REG_P (XEXP (XEXP (p, 0), 0)))
3797 gcc_assert (REGNO (XEXP (XEXP (p, 0), 0)) < FIRST_PSEUDO_REGISTER);
3799 /* We only care about registers which can hold function
3800 arguments. */
3801 if (!FUNCTION_ARG_REGNO_P (REGNO (XEXP (XEXP (p, 0), 0))))
3802 continue;
3804 SET_HARD_REG_BIT (parm.regs, REGNO (XEXP (XEXP (p, 0), 0)));
3805 parm.nregs++;
3807 before = call_insn;
3808 first_set = call_insn;
3810 /* Search backward for the first set of a register in this set. */
3811 while (parm.nregs && before != boundary)
3813 before = PREV_INSN (before);
3815 /* It is possible that some loads got CSEed from one call to
3816 another. Stop in that case. */
3817 if (CALL_P (before))
3818 break;
3820 /* Our caller needs either ensure that we will find all sets
3821 (in case code has not been optimized yet), or take care
3822 for possible labels in a way by setting boundary to preceding
3823 CODE_LABEL. */
3824 if (LABEL_P (before))
3826 gcc_assert (before == boundary);
3827 break;
3830 if (INSN_P (before))
3832 int nregs_old = parm.nregs;
3833 note_stores (PATTERN (before), parms_set, &parm);
3834 /* If we found something that did not set a parameter reg,
3835 we're done. Do not keep going, as that might result
3836 in hoisting an insn before the setting of a pseudo
3837 that is used by the hoisted insn. */
3838 if (nregs_old != parm.nregs)
3839 first_set = before;
3840 else
3841 break;
3844 return first_set;
3847 /* Return true if we should avoid inserting code between INSN and preceding
3848 call instruction. */
3850 bool
3851 keep_with_call_p (const rtx_insn *insn)
3853 rtx set;
3855 if (INSN_P (insn) && (set = single_set (insn)) != NULL)
3857 if (REG_P (SET_DEST (set))
3858 && REGNO (SET_DEST (set)) < FIRST_PSEUDO_REGISTER
3859 && fixed_regs[REGNO (SET_DEST (set))]
3860 && general_operand (SET_SRC (set), VOIDmode))
3861 return true;
3862 if (REG_P (SET_SRC (set))
3863 && targetm.calls.function_value_regno_p (REGNO (SET_SRC (set)))
3864 && REG_P (SET_DEST (set))
3865 && REGNO (SET_DEST (set)) >= FIRST_PSEUDO_REGISTER)
3866 return true;
3867 /* There may be a stack pop just after the call and before the store
3868 of the return register. Search for the actual store when deciding
3869 if we can break or not. */
3870 if (SET_DEST (set) == stack_pointer_rtx)
3872 /* This CONST_CAST is okay because next_nonnote_insn just
3873 returns its argument and we assign it to a const_rtx
3874 variable. */
3875 const rtx_insn *i2
3876 = next_nonnote_insn (const_cast<rtx_insn *> (insn));
3877 if (i2 && keep_with_call_p (i2))
3878 return true;
3881 return false;
3884 /* Return true if LABEL is a target of JUMP_INSN. This applies only
3885 to non-complex jumps. That is, direct unconditional, conditional,
3886 and tablejumps, but not computed jumps or returns. It also does
3887 not apply to the fallthru case of a conditional jump. */
3889 bool
3890 label_is_jump_target_p (const_rtx label, const rtx_insn *jump_insn)
3892 rtx tmp = JUMP_LABEL (jump_insn);
3893 rtx_jump_table_data *table;
3895 if (label == tmp)
3896 return true;
3898 if (tablejump_p (jump_insn, NULL, &table))
3900 rtvec vec = table->get_labels ();
3901 int i, veclen = GET_NUM_ELEM (vec);
3903 for (i = 0; i < veclen; ++i)
3904 if (XEXP (RTVEC_ELT (vec, i), 0) == label)
3905 return true;
3908 if (find_reg_note (jump_insn, REG_LABEL_TARGET, label))
3909 return true;
3911 return false;
3915 /* Return an estimate of the cost of computing rtx X.
3916 One use is in cse, to decide which expression to keep in the hash table.
3917 Another is in rtl generation, to pick the cheapest way to multiply.
3918 Other uses like the latter are expected in the future.
3920 X appears as operand OPNO in an expression with code OUTER_CODE.
3921 SPEED specifies whether costs optimized for speed or size should
3922 be returned. */
3925 rtx_cost (rtx x, enum rtx_code outer_code, int opno, bool speed)
3927 int i, j;
3928 enum rtx_code code;
3929 const char *fmt;
3930 int total;
3931 int factor;
3933 if (x == 0)
3934 return 0;
3936 /* A size N times larger than UNITS_PER_WORD likely needs N times as
3937 many insns, taking N times as long. */
3938 factor = GET_MODE_SIZE (GET_MODE (x)) / UNITS_PER_WORD;
3939 if (factor == 0)
3940 factor = 1;
3942 /* Compute the default costs of certain things.
3943 Note that targetm.rtx_costs can override the defaults. */
3945 code = GET_CODE (x);
3946 switch (code)
3948 case MULT:
3949 /* Multiplication has time-complexity O(N*N), where N is the
3950 number of units (translated from digits) when using
3951 schoolbook long multiplication. */
3952 total = factor * factor * COSTS_N_INSNS (5);
3953 break;
3954 case DIV:
3955 case UDIV:
3956 case MOD:
3957 case UMOD:
3958 /* Similarly, complexity for schoolbook long division. */
3959 total = factor * factor * COSTS_N_INSNS (7);
3960 break;
3961 case USE:
3962 /* Used in combine.c as a marker. */
3963 total = 0;
3964 break;
3965 case SET:
3966 /* A SET doesn't have a mode, so let's look at the SET_DEST to get
3967 the mode for the factor. */
3968 factor = GET_MODE_SIZE (GET_MODE (SET_DEST (x))) / UNITS_PER_WORD;
3969 if (factor == 0)
3970 factor = 1;
3971 /* Pass through. */
3972 default:
3973 total = factor * COSTS_N_INSNS (1);
3976 switch (code)
3978 case REG:
3979 return 0;
3981 case SUBREG:
3982 total = 0;
3983 /* If we can't tie these modes, make this expensive. The larger
3984 the mode, the more expensive it is. */
3985 if (! MODES_TIEABLE_P (GET_MODE (x), GET_MODE (SUBREG_REG (x))))
3986 return COSTS_N_INSNS (2 + factor);
3987 break;
3989 default:
3990 if (targetm.rtx_costs (x, code, outer_code, opno, &total, speed))
3991 return total;
3992 break;
3995 /* Sum the costs of the sub-rtx's, plus cost of this operation,
3996 which is already in total. */
3998 fmt = GET_RTX_FORMAT (code);
3999 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
4000 if (fmt[i] == 'e')
4001 total += rtx_cost (XEXP (x, i), code, i, speed);
4002 else if (fmt[i] == 'E')
4003 for (j = 0; j < XVECLEN (x, i); j++)
4004 total += rtx_cost (XVECEXP (x, i, j), code, i, speed);
4006 return total;
4009 /* Fill in the structure C with information about both speed and size rtx
4010 costs for X, which is operand OPNO in an expression with code OUTER. */
4012 void
4013 get_full_rtx_cost (rtx x, enum rtx_code outer, int opno,
4014 struct full_rtx_costs *c)
4016 c->speed = rtx_cost (x, outer, opno, true);
4017 c->size = rtx_cost (x, outer, opno, false);
4021 /* Return cost of address expression X.
4022 Expect that X is properly formed address reference.
4024 SPEED parameter specify whether costs optimized for speed or size should
4025 be returned. */
4028 address_cost (rtx x, machine_mode mode, addr_space_t as, bool speed)
4030 /* We may be asked for cost of various unusual addresses, such as operands
4031 of push instruction. It is not worthwhile to complicate writing
4032 of the target hook by such cases. */
4034 if (!memory_address_addr_space_p (mode, x, as))
4035 return 1000;
4037 return targetm.address_cost (x, mode, as, speed);
4040 /* If the target doesn't override, compute the cost as with arithmetic. */
4043 default_address_cost (rtx x, machine_mode, addr_space_t, bool speed)
4045 return rtx_cost (x, MEM, 0, speed);
4049 unsigned HOST_WIDE_INT
4050 nonzero_bits (const_rtx x, machine_mode mode)
4052 return cached_nonzero_bits (x, mode, NULL_RTX, VOIDmode, 0);
4055 unsigned int
4056 num_sign_bit_copies (const_rtx x, machine_mode mode)
4058 return cached_num_sign_bit_copies (x, mode, NULL_RTX, VOIDmode, 0);
4061 /* The function cached_nonzero_bits is a wrapper around nonzero_bits1.
4062 It avoids exponential behavior in nonzero_bits1 when X has
4063 identical subexpressions on the first or the second level. */
4065 static unsigned HOST_WIDE_INT
4066 cached_nonzero_bits (const_rtx x, machine_mode mode, const_rtx known_x,
4067 machine_mode known_mode,
4068 unsigned HOST_WIDE_INT known_ret)
4070 if (x == known_x && mode == known_mode)
4071 return known_ret;
4073 /* Try to find identical subexpressions. If found call
4074 nonzero_bits1 on X with the subexpressions as KNOWN_X and the
4075 precomputed value for the subexpression as KNOWN_RET. */
4077 if (ARITHMETIC_P (x))
4079 rtx x0 = XEXP (x, 0);
4080 rtx x1 = XEXP (x, 1);
4082 /* Check the first level. */
4083 if (x0 == x1)
4084 return nonzero_bits1 (x, mode, x0, mode,
4085 cached_nonzero_bits (x0, mode, known_x,
4086 known_mode, known_ret));
4088 /* Check the second level. */
4089 if (ARITHMETIC_P (x0)
4090 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4091 return nonzero_bits1 (x, mode, x1, mode,
4092 cached_nonzero_bits (x1, mode, known_x,
4093 known_mode, known_ret));
4095 if (ARITHMETIC_P (x1)
4096 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4097 return nonzero_bits1 (x, mode, x0, mode,
4098 cached_nonzero_bits (x0, mode, known_x,
4099 known_mode, known_ret));
4102 return nonzero_bits1 (x, mode, known_x, known_mode, known_ret);
4105 /* We let num_sign_bit_copies recur into nonzero_bits as that is useful.
4106 We don't let nonzero_bits recur into num_sign_bit_copies, because that
4107 is less useful. We can't allow both, because that results in exponential
4108 run time recursion. There is a nullstone testcase that triggered
4109 this. This macro avoids accidental uses of num_sign_bit_copies. */
4110 #define cached_num_sign_bit_copies sorry_i_am_preventing_exponential_behavior
4112 /* Given an expression, X, compute which bits in X can be nonzero.
4113 We don't care about bits outside of those defined in MODE.
4115 For most X this is simply GET_MODE_MASK (GET_MODE (MODE)), but if X is
4116 an arithmetic operation, we can do better. */
4118 static unsigned HOST_WIDE_INT
4119 nonzero_bits1 (const_rtx x, machine_mode mode, const_rtx known_x,
4120 machine_mode known_mode,
4121 unsigned HOST_WIDE_INT known_ret)
4123 unsigned HOST_WIDE_INT nonzero = GET_MODE_MASK (mode);
4124 unsigned HOST_WIDE_INT inner_nz;
4125 enum rtx_code code;
4126 machine_mode inner_mode;
4127 unsigned int mode_width = GET_MODE_PRECISION (mode);
4129 /* For floating-point and vector values, assume all bits are needed. */
4130 if (FLOAT_MODE_P (GET_MODE (x)) || FLOAT_MODE_P (mode)
4131 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
4132 return nonzero;
4134 /* If X is wider than MODE, use its mode instead. */
4135 if (GET_MODE_PRECISION (GET_MODE (x)) > mode_width)
4137 mode = GET_MODE (x);
4138 nonzero = GET_MODE_MASK (mode);
4139 mode_width = GET_MODE_PRECISION (mode);
4142 if (mode_width > HOST_BITS_PER_WIDE_INT)
4143 /* Our only callers in this case look for single bit values. So
4144 just return the mode mask. Those tests will then be false. */
4145 return nonzero;
4147 #ifndef WORD_REGISTER_OPERATIONS
4148 /* If MODE is wider than X, but both are a single word for both the host
4149 and target machines, we can compute this from which bits of the
4150 object might be nonzero in its own mode, taking into account the fact
4151 that on many CISC machines, accessing an object in a wider mode
4152 causes the high-order bits to become undefined. So they are
4153 not known to be zero. */
4155 if (GET_MODE (x) != VOIDmode && GET_MODE (x) != mode
4156 && GET_MODE_PRECISION (GET_MODE (x)) <= BITS_PER_WORD
4157 && GET_MODE_PRECISION (GET_MODE (x)) <= HOST_BITS_PER_WIDE_INT
4158 && GET_MODE_PRECISION (mode) > GET_MODE_PRECISION (GET_MODE (x)))
4160 nonzero &= cached_nonzero_bits (x, GET_MODE (x),
4161 known_x, known_mode, known_ret);
4162 nonzero |= GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x));
4163 return nonzero;
4165 #endif
4167 code = GET_CODE (x);
4168 switch (code)
4170 case REG:
4171 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4172 /* If pointers extend unsigned and this is a pointer in Pmode, say that
4173 all the bits above ptr_mode are known to be zero. */
4174 /* As we do not know which address space the pointer is referring to,
4175 we can do this only if the target does not support different pointer
4176 or address modes depending on the address space. */
4177 if (target_default_pointer_address_modes_p ()
4178 && POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4179 && REG_POINTER (x))
4180 nonzero &= GET_MODE_MASK (ptr_mode);
4181 #endif
4183 /* Include declared information about alignment of pointers. */
4184 /* ??? We don't properly preserve REG_POINTER changes across
4185 pointer-to-integer casts, so we can't trust it except for
4186 things that we know must be pointers. See execute/960116-1.c. */
4187 if ((x == stack_pointer_rtx
4188 || x == frame_pointer_rtx
4189 || x == arg_pointer_rtx)
4190 && REGNO_POINTER_ALIGN (REGNO (x)))
4192 unsigned HOST_WIDE_INT alignment
4193 = REGNO_POINTER_ALIGN (REGNO (x)) / BITS_PER_UNIT;
4195 #ifdef PUSH_ROUNDING
4196 /* If PUSH_ROUNDING is defined, it is possible for the
4197 stack to be momentarily aligned only to that amount,
4198 so we pick the least alignment. */
4199 if (x == stack_pointer_rtx && PUSH_ARGS)
4200 alignment = MIN ((unsigned HOST_WIDE_INT) PUSH_ROUNDING (1),
4201 alignment);
4202 #endif
4204 nonzero &= ~(alignment - 1);
4208 unsigned HOST_WIDE_INT nonzero_for_hook = nonzero;
4209 rtx new_rtx = rtl_hooks.reg_nonzero_bits (x, mode, known_x,
4210 known_mode, known_ret,
4211 &nonzero_for_hook);
4213 if (new_rtx)
4214 nonzero_for_hook &= cached_nonzero_bits (new_rtx, mode, known_x,
4215 known_mode, known_ret);
4217 return nonzero_for_hook;
4220 case CONST_INT:
4221 #ifdef SHORT_IMMEDIATES_SIGN_EXTEND
4222 /* If X is negative in MODE, sign-extend the value. */
4223 if (INTVAL (x) > 0
4224 && mode_width < BITS_PER_WORD
4225 && (UINTVAL (x) & ((unsigned HOST_WIDE_INT) 1 << (mode_width - 1)))
4226 != 0)
4227 return UINTVAL (x) | (HOST_WIDE_INT_M1U << mode_width);
4228 #endif
4230 return UINTVAL (x);
4232 case MEM:
4233 #ifdef LOAD_EXTEND_OP
4234 /* In many, if not most, RISC machines, reading a byte from memory
4235 zeros the rest of the register. Noticing that fact saves a lot
4236 of extra zero-extends. */
4237 if (LOAD_EXTEND_OP (GET_MODE (x)) == ZERO_EXTEND)
4238 nonzero &= GET_MODE_MASK (GET_MODE (x));
4239 #endif
4240 break;
4242 case EQ: case NE:
4243 case UNEQ: case LTGT:
4244 case GT: case GTU: case UNGT:
4245 case LT: case LTU: case UNLT:
4246 case GE: case GEU: case UNGE:
4247 case LE: case LEU: case UNLE:
4248 case UNORDERED: case ORDERED:
4249 /* If this produces an integer result, we know which bits are set.
4250 Code here used to clear bits outside the mode of X, but that is
4251 now done above. */
4252 /* Mind that MODE is the mode the caller wants to look at this
4253 operation in, and not the actual operation mode. We can wind
4254 up with (subreg:DI (gt:V4HI x y)), and we don't have anything
4255 that describes the results of a vector compare. */
4256 if (GET_MODE_CLASS (GET_MODE (x)) == MODE_INT
4257 && mode_width <= HOST_BITS_PER_WIDE_INT)
4258 nonzero = STORE_FLAG_VALUE;
4259 break;
4261 case NEG:
4262 #if 0
4263 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4264 and num_sign_bit_copies. */
4265 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4266 == GET_MODE_PRECISION (GET_MODE (x)))
4267 nonzero = 1;
4268 #endif
4270 if (GET_MODE_PRECISION (GET_MODE (x)) < mode_width)
4271 nonzero |= (GET_MODE_MASK (mode) & ~GET_MODE_MASK (GET_MODE (x)));
4272 break;
4274 case ABS:
4275 #if 0
4276 /* Disabled to avoid exponential mutual recursion between nonzero_bits
4277 and num_sign_bit_copies. */
4278 if (num_sign_bit_copies (XEXP (x, 0), GET_MODE (x))
4279 == GET_MODE_PRECISION (GET_MODE (x)))
4280 nonzero = 1;
4281 #endif
4282 break;
4284 case TRUNCATE:
4285 nonzero &= (cached_nonzero_bits (XEXP (x, 0), mode,
4286 known_x, known_mode, known_ret)
4287 & GET_MODE_MASK (mode));
4288 break;
4290 case ZERO_EXTEND:
4291 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4292 known_x, known_mode, known_ret);
4293 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4294 nonzero &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4295 break;
4297 case SIGN_EXTEND:
4298 /* If the sign bit is known clear, this is the same as ZERO_EXTEND.
4299 Otherwise, show all the bits in the outer mode but not the inner
4300 may be nonzero. */
4301 inner_nz = cached_nonzero_bits (XEXP (x, 0), mode,
4302 known_x, known_mode, known_ret);
4303 if (GET_MODE (XEXP (x, 0)) != VOIDmode)
4305 inner_nz &= GET_MODE_MASK (GET_MODE (XEXP (x, 0)));
4306 if (val_signbit_known_set_p (GET_MODE (XEXP (x, 0)), inner_nz))
4307 inner_nz |= (GET_MODE_MASK (mode)
4308 & ~GET_MODE_MASK (GET_MODE (XEXP (x, 0))));
4311 nonzero &= inner_nz;
4312 break;
4314 case AND:
4315 nonzero &= cached_nonzero_bits (XEXP (x, 0), mode,
4316 known_x, known_mode, known_ret)
4317 & cached_nonzero_bits (XEXP (x, 1), mode,
4318 known_x, known_mode, known_ret);
4319 break;
4321 case XOR: case IOR:
4322 case UMIN: case UMAX: case SMIN: case SMAX:
4324 unsigned HOST_WIDE_INT nonzero0
4325 = cached_nonzero_bits (XEXP (x, 0), mode,
4326 known_x, known_mode, known_ret);
4328 /* Don't call nonzero_bits for the second time if it cannot change
4329 anything. */
4330 if ((nonzero & nonzero0) != nonzero)
4331 nonzero &= nonzero0
4332 | cached_nonzero_bits (XEXP (x, 1), mode,
4333 known_x, known_mode, known_ret);
4335 break;
4337 case PLUS: case MINUS:
4338 case MULT:
4339 case DIV: case UDIV:
4340 case MOD: case UMOD:
4341 /* We can apply the rules of arithmetic to compute the number of
4342 high- and low-order zero bits of these operations. We start by
4343 computing the width (position of the highest-order nonzero bit)
4344 and the number of low-order zero bits for each value. */
4346 unsigned HOST_WIDE_INT nz0
4347 = cached_nonzero_bits (XEXP (x, 0), mode,
4348 known_x, known_mode, known_ret);
4349 unsigned HOST_WIDE_INT nz1
4350 = cached_nonzero_bits (XEXP (x, 1), mode,
4351 known_x, known_mode, known_ret);
4352 int sign_index = GET_MODE_PRECISION (GET_MODE (x)) - 1;
4353 int width0 = floor_log2 (nz0) + 1;
4354 int width1 = floor_log2 (nz1) + 1;
4355 int low0 = floor_log2 (nz0 & -nz0);
4356 int low1 = floor_log2 (nz1 & -nz1);
4357 unsigned HOST_WIDE_INT op0_maybe_minusp
4358 = nz0 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4359 unsigned HOST_WIDE_INT op1_maybe_minusp
4360 = nz1 & ((unsigned HOST_WIDE_INT) 1 << sign_index);
4361 unsigned int result_width = mode_width;
4362 int result_low = 0;
4364 switch (code)
4366 case PLUS:
4367 result_width = MAX (width0, width1) + 1;
4368 result_low = MIN (low0, low1);
4369 break;
4370 case MINUS:
4371 result_low = MIN (low0, low1);
4372 break;
4373 case MULT:
4374 result_width = width0 + width1;
4375 result_low = low0 + low1;
4376 break;
4377 case DIV:
4378 if (width1 == 0)
4379 break;
4380 if (!op0_maybe_minusp && !op1_maybe_minusp)
4381 result_width = width0;
4382 break;
4383 case UDIV:
4384 if (width1 == 0)
4385 break;
4386 result_width = width0;
4387 break;
4388 case MOD:
4389 if (width1 == 0)
4390 break;
4391 if (!op0_maybe_minusp && !op1_maybe_minusp)
4392 result_width = MIN (width0, width1);
4393 result_low = MIN (low0, low1);
4394 break;
4395 case UMOD:
4396 if (width1 == 0)
4397 break;
4398 result_width = MIN (width0, width1);
4399 result_low = MIN (low0, low1);
4400 break;
4401 default:
4402 gcc_unreachable ();
4405 if (result_width < mode_width)
4406 nonzero &= ((unsigned HOST_WIDE_INT) 1 << result_width) - 1;
4408 if (result_low > 0)
4409 nonzero &= ~(((unsigned HOST_WIDE_INT) 1 << result_low) - 1);
4411 break;
4413 case ZERO_EXTRACT:
4414 if (CONST_INT_P (XEXP (x, 1))
4415 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT)
4416 nonzero &= ((unsigned HOST_WIDE_INT) 1 << INTVAL (XEXP (x, 1))) - 1;
4417 break;
4419 case SUBREG:
4420 /* If this is a SUBREG formed for a promoted variable that has
4421 been zero-extended, we know that at least the high-order bits
4422 are zero, though others might be too. */
4424 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_UNSIGNED_P (x))
4425 nonzero = GET_MODE_MASK (GET_MODE (x))
4426 & cached_nonzero_bits (SUBREG_REG (x), GET_MODE (x),
4427 known_x, known_mode, known_ret);
4429 inner_mode = GET_MODE (SUBREG_REG (x));
4430 /* If the inner mode is a single word for both the host and target
4431 machines, we can compute this from which bits of the inner
4432 object might be nonzero. */
4433 if (GET_MODE_PRECISION (inner_mode) <= BITS_PER_WORD
4434 && (GET_MODE_PRECISION (inner_mode) <= HOST_BITS_PER_WIDE_INT))
4436 nonzero &= cached_nonzero_bits (SUBREG_REG (x), mode,
4437 known_x, known_mode, known_ret);
4439 #if defined (WORD_REGISTER_OPERATIONS) && defined (LOAD_EXTEND_OP)
4440 /* If this is a typical RISC machine, we only have to worry
4441 about the way loads are extended. */
4442 if ((LOAD_EXTEND_OP (inner_mode) == SIGN_EXTEND
4443 ? val_signbit_known_set_p (inner_mode, nonzero)
4444 : LOAD_EXTEND_OP (inner_mode) != ZERO_EXTEND)
4445 || !MEM_P (SUBREG_REG (x)))
4446 #endif
4448 /* On many CISC machines, accessing an object in a wider mode
4449 causes the high-order bits to become undefined. So they are
4450 not known to be zero. */
4451 if (GET_MODE_PRECISION (GET_MODE (x))
4452 > GET_MODE_PRECISION (inner_mode))
4453 nonzero |= (GET_MODE_MASK (GET_MODE (x))
4454 & ~GET_MODE_MASK (inner_mode));
4457 break;
4459 case ASHIFTRT:
4460 case LSHIFTRT:
4461 case ASHIFT:
4462 case ROTATE:
4463 /* The nonzero bits are in two classes: any bits within MODE
4464 that aren't in GET_MODE (x) are always significant. The rest of the
4465 nonzero bits are those that are significant in the operand of
4466 the shift when shifted the appropriate number of bits. This
4467 shows that high-order bits are cleared by the right shift and
4468 low-order bits by left shifts. */
4469 if (CONST_INT_P (XEXP (x, 1))
4470 && INTVAL (XEXP (x, 1)) >= 0
4471 && INTVAL (XEXP (x, 1)) < HOST_BITS_PER_WIDE_INT
4472 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
4474 machine_mode inner_mode = GET_MODE (x);
4475 unsigned int width = GET_MODE_PRECISION (inner_mode);
4476 int count = INTVAL (XEXP (x, 1));
4477 unsigned HOST_WIDE_INT mode_mask = GET_MODE_MASK (inner_mode);
4478 unsigned HOST_WIDE_INT op_nonzero
4479 = cached_nonzero_bits (XEXP (x, 0), mode,
4480 known_x, known_mode, known_ret);
4481 unsigned HOST_WIDE_INT inner = op_nonzero & mode_mask;
4482 unsigned HOST_WIDE_INT outer = 0;
4484 if (mode_width > width)
4485 outer = (op_nonzero & nonzero & ~mode_mask);
4487 if (code == LSHIFTRT)
4488 inner >>= count;
4489 else if (code == ASHIFTRT)
4491 inner >>= count;
4493 /* If the sign bit may have been nonzero before the shift, we
4494 need to mark all the places it could have been copied to
4495 by the shift as possibly nonzero. */
4496 if (inner & ((unsigned HOST_WIDE_INT) 1 << (width - 1 - count)))
4497 inner |= (((unsigned HOST_WIDE_INT) 1 << count) - 1)
4498 << (width - count);
4500 else if (code == ASHIFT)
4501 inner <<= count;
4502 else
4503 inner = ((inner << (count % width)
4504 | (inner >> (width - (count % width)))) & mode_mask);
4506 nonzero &= (outer | inner);
4508 break;
4510 case FFS:
4511 case POPCOUNT:
4512 /* This is at most the number of bits in the mode. */
4513 nonzero = ((unsigned HOST_WIDE_INT) 2 << (floor_log2 (mode_width))) - 1;
4514 break;
4516 case CLZ:
4517 /* If CLZ has a known value at zero, then the nonzero bits are
4518 that value, plus the number of bits in the mode minus one. */
4519 if (CLZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4520 nonzero
4521 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4522 else
4523 nonzero = -1;
4524 break;
4526 case CTZ:
4527 /* If CTZ has a known value at zero, then the nonzero bits are
4528 that value, plus the number of bits in the mode minus one. */
4529 if (CTZ_DEFINED_VALUE_AT_ZERO (mode, nonzero))
4530 nonzero
4531 |= ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4532 else
4533 nonzero = -1;
4534 break;
4536 case CLRSB:
4537 /* This is at most the number of bits in the mode minus 1. */
4538 nonzero = ((unsigned HOST_WIDE_INT) 1 << (floor_log2 (mode_width))) - 1;
4539 break;
4541 case PARITY:
4542 nonzero = 1;
4543 break;
4545 case IF_THEN_ELSE:
4547 unsigned HOST_WIDE_INT nonzero_true
4548 = cached_nonzero_bits (XEXP (x, 1), mode,
4549 known_x, known_mode, known_ret);
4551 /* Don't call nonzero_bits for the second time if it cannot change
4552 anything. */
4553 if ((nonzero & nonzero_true) != nonzero)
4554 nonzero &= nonzero_true
4555 | cached_nonzero_bits (XEXP (x, 2), mode,
4556 known_x, known_mode, known_ret);
4558 break;
4560 default:
4561 break;
4564 return nonzero;
4567 /* See the macro definition above. */
4568 #undef cached_num_sign_bit_copies
4571 /* The function cached_num_sign_bit_copies is a wrapper around
4572 num_sign_bit_copies1. It avoids exponential behavior in
4573 num_sign_bit_copies1 when X has identical subexpressions on the
4574 first or the second level. */
4576 static unsigned int
4577 cached_num_sign_bit_copies (const_rtx x, machine_mode mode, const_rtx known_x,
4578 machine_mode known_mode,
4579 unsigned int known_ret)
4581 if (x == known_x && mode == known_mode)
4582 return known_ret;
4584 /* Try to find identical subexpressions. If found call
4585 num_sign_bit_copies1 on X with the subexpressions as KNOWN_X and
4586 the precomputed value for the subexpression as KNOWN_RET. */
4588 if (ARITHMETIC_P (x))
4590 rtx x0 = XEXP (x, 0);
4591 rtx x1 = XEXP (x, 1);
4593 /* Check the first level. */
4594 if (x0 == x1)
4595 return
4596 num_sign_bit_copies1 (x, mode, x0, mode,
4597 cached_num_sign_bit_copies (x0, mode, known_x,
4598 known_mode,
4599 known_ret));
4601 /* Check the second level. */
4602 if (ARITHMETIC_P (x0)
4603 && (x1 == XEXP (x0, 0) || x1 == XEXP (x0, 1)))
4604 return
4605 num_sign_bit_copies1 (x, mode, x1, mode,
4606 cached_num_sign_bit_copies (x1, mode, known_x,
4607 known_mode,
4608 known_ret));
4610 if (ARITHMETIC_P (x1)
4611 && (x0 == XEXP (x1, 0) || x0 == XEXP (x1, 1)))
4612 return
4613 num_sign_bit_copies1 (x, mode, x0, mode,
4614 cached_num_sign_bit_copies (x0, mode, known_x,
4615 known_mode,
4616 known_ret));
4619 return num_sign_bit_copies1 (x, mode, known_x, known_mode, known_ret);
4622 /* Return the number of bits at the high-order end of X that are known to
4623 be equal to the sign bit. X will be used in mode MODE; if MODE is
4624 VOIDmode, X will be used in its own mode. The returned value will always
4625 be between 1 and the number of bits in MODE. */
4627 static unsigned int
4628 num_sign_bit_copies1 (const_rtx x, machine_mode mode, const_rtx known_x,
4629 machine_mode known_mode,
4630 unsigned int known_ret)
4632 enum rtx_code code = GET_CODE (x);
4633 unsigned int bitwidth = GET_MODE_PRECISION (mode);
4634 int num0, num1, result;
4635 unsigned HOST_WIDE_INT nonzero;
4637 /* If we weren't given a mode, use the mode of X. If the mode is still
4638 VOIDmode, we don't know anything. Likewise if one of the modes is
4639 floating-point. */
4641 if (mode == VOIDmode)
4642 mode = GET_MODE (x);
4644 if (mode == VOIDmode || FLOAT_MODE_P (mode) || FLOAT_MODE_P (GET_MODE (x))
4645 || VECTOR_MODE_P (GET_MODE (x)) || VECTOR_MODE_P (mode))
4646 return 1;
4648 /* For a smaller object, just ignore the high bits. */
4649 if (bitwidth < GET_MODE_PRECISION (GET_MODE (x)))
4651 num0 = cached_num_sign_bit_copies (x, GET_MODE (x),
4652 known_x, known_mode, known_ret);
4653 return MAX (1,
4654 num0 - (int) (GET_MODE_PRECISION (GET_MODE (x)) - bitwidth));
4657 if (GET_MODE (x) != VOIDmode && bitwidth > GET_MODE_PRECISION (GET_MODE (x)))
4659 #ifndef WORD_REGISTER_OPERATIONS
4660 /* If this machine does not do all register operations on the entire
4661 register and MODE is wider than the mode of X, we can say nothing
4662 at all about the high-order bits. */
4663 return 1;
4664 #else
4665 /* Likewise on machines that do, if the mode of the object is smaller
4666 than a word and loads of that size don't sign extend, we can say
4667 nothing about the high order bits. */
4668 if (GET_MODE_PRECISION (GET_MODE (x)) < BITS_PER_WORD
4669 #ifdef LOAD_EXTEND_OP
4670 && LOAD_EXTEND_OP (GET_MODE (x)) != SIGN_EXTEND
4671 #endif
4673 return 1;
4674 #endif
4677 switch (code)
4679 case REG:
4681 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
4682 /* If pointers extend signed and this is a pointer in Pmode, say that
4683 all the bits above ptr_mode are known to be sign bit copies. */
4684 /* As we do not know which address space the pointer is referring to,
4685 we can do this only if the target does not support different pointer
4686 or address modes depending on the address space. */
4687 if (target_default_pointer_address_modes_p ()
4688 && ! POINTERS_EXTEND_UNSIGNED && GET_MODE (x) == Pmode
4689 && mode == Pmode && REG_POINTER (x))
4690 return GET_MODE_PRECISION (Pmode) - GET_MODE_PRECISION (ptr_mode) + 1;
4691 #endif
4694 unsigned int copies_for_hook = 1, copies = 1;
4695 rtx new_rtx = rtl_hooks.reg_num_sign_bit_copies (x, mode, known_x,
4696 known_mode, known_ret,
4697 &copies_for_hook);
4699 if (new_rtx)
4700 copies = cached_num_sign_bit_copies (new_rtx, mode, known_x,
4701 known_mode, known_ret);
4703 if (copies > 1 || copies_for_hook > 1)
4704 return MAX (copies, copies_for_hook);
4706 /* Else, use nonzero_bits to guess num_sign_bit_copies (see below). */
4708 break;
4710 case MEM:
4711 #ifdef LOAD_EXTEND_OP
4712 /* Some RISC machines sign-extend all loads of smaller than a word. */
4713 if (LOAD_EXTEND_OP (GET_MODE (x)) == SIGN_EXTEND)
4714 return MAX (1, ((int) bitwidth
4715 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1));
4716 #endif
4717 break;
4719 case CONST_INT:
4720 /* If the constant is negative, take its 1's complement and remask.
4721 Then see how many zero bits we have. */
4722 nonzero = UINTVAL (x) & GET_MODE_MASK (mode);
4723 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4724 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4725 nonzero = (~nonzero) & GET_MODE_MASK (mode);
4727 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
4729 case SUBREG:
4730 /* If this is a SUBREG for a promoted object that is sign-extended
4731 and we are looking at it in a wider mode, we know that at least the
4732 high-order bits are known to be sign bit copies. */
4734 if (SUBREG_PROMOTED_VAR_P (x) && SUBREG_PROMOTED_SIGNED_P (x))
4736 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4737 known_x, known_mode, known_ret);
4738 return MAX ((int) bitwidth
4739 - (int) GET_MODE_PRECISION (GET_MODE (x)) + 1,
4740 num0);
4743 /* For a smaller object, just ignore the high bits. */
4744 if (bitwidth <= GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))))
4746 num0 = cached_num_sign_bit_copies (SUBREG_REG (x), VOIDmode,
4747 known_x, known_mode, known_ret);
4748 return MAX (1, (num0
4749 - (int) (GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x)))
4750 - bitwidth)));
4753 #ifdef WORD_REGISTER_OPERATIONS
4754 #ifdef LOAD_EXTEND_OP
4755 /* For paradoxical SUBREGs on machines where all register operations
4756 affect the entire register, just look inside. Note that we are
4757 passing MODE to the recursive call, so the number of sign bit copies
4758 will remain relative to that mode, not the inner mode. */
4760 /* This works only if loads sign extend. Otherwise, if we get a
4761 reload for the inner part, it may be loaded from the stack, and
4762 then we lose all sign bit copies that existed before the store
4763 to the stack. */
4765 if (paradoxical_subreg_p (x)
4766 && LOAD_EXTEND_OP (GET_MODE (SUBREG_REG (x))) == SIGN_EXTEND
4767 && MEM_P (SUBREG_REG (x)))
4768 return cached_num_sign_bit_copies (SUBREG_REG (x), mode,
4769 known_x, known_mode, known_ret);
4770 #endif
4771 #endif
4772 break;
4774 case SIGN_EXTRACT:
4775 if (CONST_INT_P (XEXP (x, 1)))
4776 return MAX (1, (int) bitwidth - INTVAL (XEXP (x, 1)));
4777 break;
4779 case SIGN_EXTEND:
4780 return (bitwidth - GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
4781 + cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4782 known_x, known_mode, known_ret));
4784 case TRUNCATE:
4785 /* For a smaller object, just ignore the high bits. */
4786 num0 = cached_num_sign_bit_copies (XEXP (x, 0), VOIDmode,
4787 known_x, known_mode, known_ret);
4788 return MAX (1, (num0 - (int) (GET_MODE_PRECISION (GET_MODE (XEXP (x, 0)))
4789 - bitwidth)));
4791 case NOT:
4792 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4793 known_x, known_mode, known_ret);
4795 case ROTATE: case ROTATERT:
4796 /* If we are rotating left by a number of bits less than the number
4797 of sign bit copies, we can just subtract that amount from the
4798 number. */
4799 if (CONST_INT_P (XEXP (x, 1))
4800 && INTVAL (XEXP (x, 1)) >= 0
4801 && INTVAL (XEXP (x, 1)) < (int) bitwidth)
4803 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4804 known_x, known_mode, known_ret);
4805 return MAX (1, num0 - (code == ROTATE ? INTVAL (XEXP (x, 1))
4806 : (int) bitwidth - INTVAL (XEXP (x, 1))));
4808 break;
4810 case NEG:
4811 /* In general, this subtracts one sign bit copy. But if the value
4812 is known to be positive, the number of sign bit copies is the
4813 same as that of the input. Finally, if the input has just one bit
4814 that might be nonzero, all the bits are copies of the sign bit. */
4815 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4816 known_x, known_mode, known_ret);
4817 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4818 return num0 > 1 ? num0 - 1 : 1;
4820 nonzero = nonzero_bits (XEXP (x, 0), mode);
4821 if (nonzero == 1)
4822 return bitwidth;
4824 if (num0 > 1
4825 && (((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero))
4826 num0--;
4828 return num0;
4830 case IOR: case AND: case XOR:
4831 case SMIN: case SMAX: case UMIN: case UMAX:
4832 /* Logical operations will preserve the number of sign-bit copies.
4833 MIN and MAX operations always return one of the operands. */
4834 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4835 known_x, known_mode, known_ret);
4836 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4837 known_x, known_mode, known_ret);
4839 /* If num1 is clearing some of the top bits then regardless of
4840 the other term, we are guaranteed to have at least that many
4841 high-order zero bits. */
4842 if (code == AND
4843 && num1 > 1
4844 && bitwidth <= HOST_BITS_PER_WIDE_INT
4845 && CONST_INT_P (XEXP (x, 1))
4846 && (UINTVAL (XEXP (x, 1))
4847 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) == 0)
4848 return num1;
4850 /* Similarly for IOR when setting high-order bits. */
4851 if (code == IOR
4852 && num1 > 1
4853 && bitwidth <= HOST_BITS_PER_WIDE_INT
4854 && CONST_INT_P (XEXP (x, 1))
4855 && (UINTVAL (XEXP (x, 1))
4856 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4857 return num1;
4859 return MIN (num0, num1);
4861 case PLUS: case MINUS:
4862 /* For addition and subtraction, we can have a 1-bit carry. However,
4863 if we are subtracting 1 from a positive number, there will not
4864 be such a carry. Furthermore, if the positive number is known to
4865 be 0 or 1, we know the result is either -1 or 0. */
4867 if (code == PLUS && XEXP (x, 1) == constm1_rtx
4868 && bitwidth <= HOST_BITS_PER_WIDE_INT)
4870 nonzero = nonzero_bits (XEXP (x, 0), mode);
4871 if ((((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)) & nonzero) == 0)
4872 return (nonzero == 1 || nonzero == 0 ? bitwidth
4873 : bitwidth - floor_log2 (nonzero) - 1);
4876 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4877 known_x, known_mode, known_ret);
4878 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4879 known_x, known_mode, known_ret);
4880 result = MAX (1, MIN (num0, num1) - 1);
4882 return result;
4884 case MULT:
4885 /* The number of bits of the product is the sum of the number of
4886 bits of both terms. However, unless one of the terms if known
4887 to be positive, we must allow for an additional bit since negating
4888 a negative number can remove one sign bit copy. */
4890 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4891 known_x, known_mode, known_ret);
4892 num1 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4893 known_x, known_mode, known_ret);
4895 result = bitwidth - (bitwidth - num0) - (bitwidth - num1);
4896 if (result > 0
4897 && (bitwidth > HOST_BITS_PER_WIDE_INT
4898 || (((nonzero_bits (XEXP (x, 0), mode)
4899 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4900 && ((nonzero_bits (XEXP (x, 1), mode)
4901 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1)))
4902 != 0))))
4903 result--;
4905 return MAX (1, result);
4907 case UDIV:
4908 /* The result must be <= the first operand. If the first operand
4909 has the high bit set, we know nothing about the number of sign
4910 bit copies. */
4911 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4912 return 1;
4913 else if ((nonzero_bits (XEXP (x, 0), mode)
4914 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4915 return 1;
4916 else
4917 return cached_num_sign_bit_copies (XEXP (x, 0), mode,
4918 known_x, known_mode, known_ret);
4920 case UMOD:
4921 /* The result must be <= the second operand. If the second operand
4922 has (or just might have) the high bit set, we know nothing about
4923 the number of sign bit copies. */
4924 if (bitwidth > HOST_BITS_PER_WIDE_INT)
4925 return 1;
4926 else if ((nonzero_bits (XEXP (x, 1), mode)
4927 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4928 return 1;
4929 else
4930 return cached_num_sign_bit_copies (XEXP (x, 1), mode,
4931 known_x, known_mode, known_ret);
4933 case DIV:
4934 /* Similar to unsigned division, except that we have to worry about
4935 the case where the divisor is negative, in which case we have
4936 to add 1. */
4937 result = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4938 known_x, known_mode, known_ret);
4939 if (result > 1
4940 && (bitwidth > HOST_BITS_PER_WIDE_INT
4941 || (nonzero_bits (XEXP (x, 1), mode)
4942 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4943 result--;
4945 return result;
4947 case MOD:
4948 result = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4949 known_x, known_mode, known_ret);
4950 if (result > 1
4951 && (bitwidth > HOST_BITS_PER_WIDE_INT
4952 || (nonzero_bits (XEXP (x, 1), mode)
4953 & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0))
4954 result--;
4956 return result;
4958 case ASHIFTRT:
4959 /* Shifts by a constant add to the number of bits equal to the
4960 sign bit. */
4961 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4962 known_x, known_mode, known_ret);
4963 if (CONST_INT_P (XEXP (x, 1))
4964 && INTVAL (XEXP (x, 1)) > 0
4965 && INTVAL (XEXP (x, 1)) < GET_MODE_PRECISION (GET_MODE (x)))
4966 num0 = MIN ((int) bitwidth, num0 + INTVAL (XEXP (x, 1)));
4968 return num0;
4970 case ASHIFT:
4971 /* Left shifts destroy copies. */
4972 if (!CONST_INT_P (XEXP (x, 1))
4973 || INTVAL (XEXP (x, 1)) < 0
4974 || INTVAL (XEXP (x, 1)) >= (int) bitwidth
4975 || INTVAL (XEXP (x, 1)) >= GET_MODE_PRECISION (GET_MODE (x)))
4976 return 1;
4978 num0 = cached_num_sign_bit_copies (XEXP (x, 0), mode,
4979 known_x, known_mode, known_ret);
4980 return MAX (1, num0 - INTVAL (XEXP (x, 1)));
4982 case IF_THEN_ELSE:
4983 num0 = cached_num_sign_bit_copies (XEXP (x, 1), mode,
4984 known_x, known_mode, known_ret);
4985 num1 = cached_num_sign_bit_copies (XEXP (x, 2), mode,
4986 known_x, known_mode, known_ret);
4987 return MIN (num0, num1);
4989 case EQ: case NE: case GE: case GT: case LE: case LT:
4990 case UNEQ: case LTGT: case UNGE: case UNGT: case UNLE: case UNLT:
4991 case GEU: case GTU: case LEU: case LTU:
4992 case UNORDERED: case ORDERED:
4993 /* If the constant is negative, take its 1's complement and remask.
4994 Then see how many zero bits we have. */
4995 nonzero = STORE_FLAG_VALUE;
4996 if (bitwidth <= HOST_BITS_PER_WIDE_INT
4997 && (nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))) != 0)
4998 nonzero = (~nonzero) & GET_MODE_MASK (mode);
5000 return (nonzero == 0 ? bitwidth : bitwidth - floor_log2 (nonzero) - 1);
5002 default:
5003 break;
5006 /* If we haven't been able to figure it out by one of the above rules,
5007 see if some of the high-order bits are known to be zero. If so,
5008 count those bits and return one less than that amount. If we can't
5009 safely compute the mask for this mode, always return BITWIDTH. */
5011 bitwidth = GET_MODE_PRECISION (mode);
5012 if (bitwidth > HOST_BITS_PER_WIDE_INT)
5013 return 1;
5015 nonzero = nonzero_bits (x, mode);
5016 return nonzero & ((unsigned HOST_WIDE_INT) 1 << (bitwidth - 1))
5017 ? 1 : bitwidth - floor_log2 (nonzero) - 1;
5020 /* Calculate the rtx_cost of a single instruction. A return value of
5021 zero indicates an instruction pattern without a known cost. */
5024 insn_rtx_cost (rtx pat, bool speed)
5026 int i, cost;
5027 rtx set;
5029 /* Extract the single set rtx from the instruction pattern.
5030 We can't use single_set since we only have the pattern. */
5031 if (GET_CODE (pat) == SET)
5032 set = pat;
5033 else if (GET_CODE (pat) == PARALLEL)
5035 set = NULL_RTX;
5036 for (i = 0; i < XVECLEN (pat, 0); i++)
5038 rtx x = XVECEXP (pat, 0, i);
5039 if (GET_CODE (x) == SET)
5041 if (set)
5042 return 0;
5043 set = x;
5046 if (!set)
5047 return 0;
5049 else
5050 return 0;
5052 cost = set_src_cost (SET_SRC (set), speed);
5053 return cost > 0 ? cost : COSTS_N_INSNS (1);
5056 /* Returns estimate on cost of computing SEQ. */
5058 unsigned
5059 seq_cost (const rtx_insn *seq, bool speed)
5061 unsigned cost = 0;
5062 rtx set;
5064 for (; seq; seq = NEXT_INSN (seq))
5066 set = single_set (seq);
5067 if (set)
5068 cost += set_rtx_cost (set, speed);
5069 else
5070 cost++;
5073 return cost;
5076 /* Given an insn INSN and condition COND, return the condition in a
5077 canonical form to simplify testing by callers. Specifically:
5079 (1) The code will always be a comparison operation (EQ, NE, GT, etc.).
5080 (2) Both operands will be machine operands; (cc0) will have been replaced.
5081 (3) If an operand is a constant, it will be the second operand.
5082 (4) (LE x const) will be replaced with (LT x <const+1>) and similarly
5083 for GE, GEU, and LEU.
5085 If the condition cannot be understood, or is an inequality floating-point
5086 comparison which needs to be reversed, 0 will be returned.
5088 If REVERSE is nonzero, then reverse the condition prior to canonizing it.
5090 If EARLIEST is nonzero, it is a pointer to a place where the earliest
5091 insn used in locating the condition was found. If a replacement test
5092 of the condition is desired, it should be placed in front of that
5093 insn and we will be sure that the inputs are still valid.
5095 If WANT_REG is nonzero, we wish the condition to be relative to that
5096 register, if possible. Therefore, do not canonicalize the condition
5097 further. If ALLOW_CC_MODE is nonzero, allow the condition returned
5098 to be a compare to a CC mode register.
5100 If VALID_AT_INSN_P, the condition must be valid at both *EARLIEST
5101 and at INSN. */
5104 canonicalize_condition (rtx_insn *insn, rtx cond, int reverse,
5105 rtx_insn **earliest,
5106 rtx want_reg, int allow_cc_mode, int valid_at_insn_p)
5108 enum rtx_code code;
5109 rtx_insn *prev = insn;
5110 const_rtx set;
5111 rtx tem;
5112 rtx op0, op1;
5113 int reverse_code = 0;
5114 machine_mode mode;
5115 basic_block bb = BLOCK_FOR_INSN (insn);
5117 code = GET_CODE (cond);
5118 mode = GET_MODE (cond);
5119 op0 = XEXP (cond, 0);
5120 op1 = XEXP (cond, 1);
5122 if (reverse)
5123 code = reversed_comparison_code (cond, insn);
5124 if (code == UNKNOWN)
5125 return 0;
5127 if (earliest)
5128 *earliest = insn;
5130 /* If we are comparing a register with zero, see if the register is set
5131 in the previous insn to a COMPARE or a comparison operation. Perform
5132 the same tests as a function of STORE_FLAG_VALUE as find_comparison_args
5133 in cse.c */
5135 while ((GET_RTX_CLASS (code) == RTX_COMPARE
5136 || GET_RTX_CLASS (code) == RTX_COMM_COMPARE)
5137 && op1 == CONST0_RTX (GET_MODE (op0))
5138 && op0 != want_reg)
5140 /* Set nonzero when we find something of interest. */
5141 rtx x = 0;
5143 #ifdef HAVE_cc0
5144 /* If comparison with cc0, import actual comparison from compare
5145 insn. */
5146 if (op0 == cc0_rtx)
5148 if ((prev = prev_nonnote_insn (prev)) == 0
5149 || !NONJUMP_INSN_P (prev)
5150 || (set = single_set (prev)) == 0
5151 || SET_DEST (set) != cc0_rtx)
5152 return 0;
5154 op0 = SET_SRC (set);
5155 op1 = CONST0_RTX (GET_MODE (op0));
5156 if (earliest)
5157 *earliest = prev;
5159 #endif
5161 /* If this is a COMPARE, pick up the two things being compared. */
5162 if (GET_CODE (op0) == COMPARE)
5164 op1 = XEXP (op0, 1);
5165 op0 = XEXP (op0, 0);
5166 continue;
5168 else if (!REG_P (op0))
5169 break;
5171 /* Go back to the previous insn. Stop if it is not an INSN. We also
5172 stop if it isn't a single set or if it has a REG_INC note because
5173 we don't want to bother dealing with it. */
5175 prev = prev_nonnote_nondebug_insn (prev);
5177 if (prev == 0
5178 || !NONJUMP_INSN_P (prev)
5179 || FIND_REG_INC_NOTE (prev, NULL_RTX)
5180 /* In cfglayout mode, there do not have to be labels at the
5181 beginning of a block, or jumps at the end, so the previous
5182 conditions would not stop us when we reach bb boundary. */
5183 || BLOCK_FOR_INSN (prev) != bb)
5184 break;
5186 set = set_of (op0, prev);
5188 if (set
5189 && (GET_CODE (set) != SET
5190 || !rtx_equal_p (SET_DEST (set), op0)))
5191 break;
5193 /* If this is setting OP0, get what it sets it to if it looks
5194 relevant. */
5195 if (set)
5197 machine_mode inner_mode = GET_MODE (SET_DEST (set));
5198 #ifdef FLOAT_STORE_FLAG_VALUE
5199 REAL_VALUE_TYPE fsfv;
5200 #endif
5202 /* ??? We may not combine comparisons done in a CCmode with
5203 comparisons not done in a CCmode. This is to aid targets
5204 like Alpha that have an IEEE compliant EQ instruction, and
5205 a non-IEEE compliant BEQ instruction. The use of CCmode is
5206 actually artificial, simply to prevent the combination, but
5207 should not affect other platforms.
5209 However, we must allow VOIDmode comparisons to match either
5210 CCmode or non-CCmode comparison, because some ports have
5211 modeless comparisons inside branch patterns.
5213 ??? This mode check should perhaps look more like the mode check
5214 in simplify_comparison in combine. */
5215 if (((GET_MODE_CLASS (mode) == MODE_CC)
5216 != (GET_MODE_CLASS (inner_mode) == MODE_CC))
5217 && mode != VOIDmode
5218 && inner_mode != VOIDmode)
5219 break;
5220 if (GET_CODE (SET_SRC (set)) == COMPARE
5221 || (((code == NE
5222 || (code == LT
5223 && val_signbit_known_set_p (inner_mode,
5224 STORE_FLAG_VALUE))
5225 #ifdef FLOAT_STORE_FLAG_VALUE
5226 || (code == LT
5227 && SCALAR_FLOAT_MODE_P (inner_mode)
5228 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5229 REAL_VALUE_NEGATIVE (fsfv)))
5230 #endif
5232 && COMPARISON_P (SET_SRC (set))))
5233 x = SET_SRC (set);
5234 else if (((code == EQ
5235 || (code == GE
5236 && val_signbit_known_set_p (inner_mode,
5237 STORE_FLAG_VALUE))
5238 #ifdef FLOAT_STORE_FLAG_VALUE
5239 || (code == GE
5240 && SCALAR_FLOAT_MODE_P (inner_mode)
5241 && (fsfv = FLOAT_STORE_FLAG_VALUE (inner_mode),
5242 REAL_VALUE_NEGATIVE (fsfv)))
5243 #endif
5245 && COMPARISON_P (SET_SRC (set)))
5247 reverse_code = 1;
5248 x = SET_SRC (set);
5250 else if ((code == EQ || code == NE)
5251 && GET_CODE (SET_SRC (set)) == XOR)
5252 /* Handle sequences like:
5254 (set op0 (xor X Y))
5255 ...(eq|ne op0 (const_int 0))...
5257 in which case:
5259 (eq op0 (const_int 0)) reduces to (eq X Y)
5260 (ne op0 (const_int 0)) reduces to (ne X Y)
5262 This is the form used by MIPS16, for example. */
5263 x = SET_SRC (set);
5264 else
5265 break;
5268 else if (reg_set_p (op0, prev))
5269 /* If this sets OP0, but not directly, we have to give up. */
5270 break;
5272 if (x)
5274 /* If the caller is expecting the condition to be valid at INSN,
5275 make sure X doesn't change before INSN. */
5276 if (valid_at_insn_p)
5277 if (modified_in_p (x, prev) || modified_between_p (x, prev, insn))
5278 break;
5279 if (COMPARISON_P (x))
5280 code = GET_CODE (x);
5281 if (reverse_code)
5283 code = reversed_comparison_code (x, prev);
5284 if (code == UNKNOWN)
5285 return 0;
5286 reverse_code = 0;
5289 op0 = XEXP (x, 0), op1 = XEXP (x, 1);
5290 if (earliest)
5291 *earliest = prev;
5295 /* If constant is first, put it last. */
5296 if (CONSTANT_P (op0))
5297 code = swap_condition (code), tem = op0, op0 = op1, op1 = tem;
5299 /* If OP0 is the result of a comparison, we weren't able to find what
5300 was really being compared, so fail. */
5301 if (!allow_cc_mode
5302 && GET_MODE_CLASS (GET_MODE (op0)) == MODE_CC)
5303 return 0;
5305 /* Canonicalize any ordered comparison with integers involving equality
5306 if we can do computations in the relevant mode and we do not
5307 overflow. */
5309 if (GET_MODE_CLASS (GET_MODE (op0)) != MODE_CC
5310 && CONST_INT_P (op1)
5311 && GET_MODE (op0) != VOIDmode
5312 && GET_MODE_PRECISION (GET_MODE (op0)) <= HOST_BITS_PER_WIDE_INT)
5314 HOST_WIDE_INT const_val = INTVAL (op1);
5315 unsigned HOST_WIDE_INT uconst_val = const_val;
5316 unsigned HOST_WIDE_INT max_val
5317 = (unsigned HOST_WIDE_INT) GET_MODE_MASK (GET_MODE (op0));
5319 switch (code)
5321 case LE:
5322 if ((unsigned HOST_WIDE_INT) const_val != max_val >> 1)
5323 code = LT, op1 = gen_int_mode (const_val + 1, GET_MODE (op0));
5324 break;
5326 /* When cross-compiling, const_val might be sign-extended from
5327 BITS_PER_WORD to HOST_BITS_PER_WIDE_INT */
5328 case GE:
5329 if ((const_val & max_val)
5330 != ((unsigned HOST_WIDE_INT) 1
5331 << (GET_MODE_PRECISION (GET_MODE (op0)) - 1)))
5332 code = GT, op1 = gen_int_mode (const_val - 1, GET_MODE (op0));
5333 break;
5335 case LEU:
5336 if (uconst_val < max_val)
5337 code = LTU, op1 = gen_int_mode (uconst_val + 1, GET_MODE (op0));
5338 break;
5340 case GEU:
5341 if (uconst_val != 0)
5342 code = GTU, op1 = gen_int_mode (uconst_val - 1, GET_MODE (op0));
5343 break;
5345 default:
5346 break;
5350 /* Never return CC0; return zero instead. */
5351 if (CC0_P (op0))
5352 return 0;
5354 return gen_rtx_fmt_ee (code, VOIDmode, op0, op1);
5357 /* Given a jump insn JUMP, return the condition that will cause it to branch
5358 to its JUMP_LABEL. If the condition cannot be understood, or is an
5359 inequality floating-point comparison which needs to be reversed, 0 will
5360 be returned.
5362 If EARLIEST is nonzero, it is a pointer to a place where the earliest
5363 insn used in locating the condition was found. If a replacement test
5364 of the condition is desired, it should be placed in front of that
5365 insn and we will be sure that the inputs are still valid. If EARLIEST
5366 is null, the returned condition will be valid at INSN.
5368 If ALLOW_CC_MODE is nonzero, allow the condition returned to be a
5369 compare CC mode register.
5371 VALID_AT_INSN_P is the same as for canonicalize_condition. */
5374 get_condition (rtx_insn *jump, rtx_insn **earliest, int allow_cc_mode,
5375 int valid_at_insn_p)
5377 rtx cond;
5378 int reverse;
5379 rtx set;
5381 /* If this is not a standard conditional jump, we can't parse it. */
5382 if (!JUMP_P (jump)
5383 || ! any_condjump_p (jump))
5384 return 0;
5385 set = pc_set (jump);
5387 cond = XEXP (SET_SRC (set), 0);
5389 /* If this branches to JUMP_LABEL when the condition is false, reverse
5390 the condition. */
5391 reverse
5392 = GET_CODE (XEXP (SET_SRC (set), 2)) == LABEL_REF
5393 && LABEL_REF_LABEL (XEXP (SET_SRC (set), 2)) == JUMP_LABEL (jump);
5395 return canonicalize_condition (jump, cond, reverse, earliest, NULL_RTX,
5396 allow_cc_mode, valid_at_insn_p);
5399 /* Initialize the table NUM_SIGN_BIT_COPIES_IN_REP based on
5400 TARGET_MODE_REP_EXTENDED.
5402 Note that we assume that the property of
5403 TARGET_MODE_REP_EXTENDED(B, C) is sticky to the integral modes
5404 narrower than mode B. I.e., if A is a mode narrower than B then in
5405 order to be able to operate on it in mode B, mode A needs to
5406 satisfy the requirements set by the representation of mode B. */
5408 static void
5409 init_num_sign_bit_copies_in_rep (void)
5411 machine_mode mode, in_mode;
5413 for (in_mode = GET_CLASS_NARROWEST_MODE (MODE_INT); in_mode != VOIDmode;
5414 in_mode = GET_MODE_WIDER_MODE (mode))
5415 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT); mode != in_mode;
5416 mode = GET_MODE_WIDER_MODE (mode))
5418 machine_mode i;
5420 /* Currently, it is assumed that TARGET_MODE_REP_EXTENDED
5421 extends to the next widest mode. */
5422 gcc_assert (targetm.mode_rep_extended (mode, in_mode) == UNKNOWN
5423 || GET_MODE_WIDER_MODE (mode) == in_mode);
5425 /* We are in in_mode. Count how many bits outside of mode
5426 have to be copies of the sign-bit. */
5427 for (i = mode; i != in_mode; i = GET_MODE_WIDER_MODE (i))
5429 machine_mode wider = GET_MODE_WIDER_MODE (i);
5431 if (targetm.mode_rep_extended (i, wider) == SIGN_EXTEND
5432 /* We can only check sign-bit copies starting from the
5433 top-bit. In order to be able to check the bits we
5434 have already seen we pretend that subsequent bits
5435 have to be sign-bit copies too. */
5436 || num_sign_bit_copies_in_rep [in_mode][mode])
5437 num_sign_bit_copies_in_rep [in_mode][mode]
5438 += GET_MODE_PRECISION (wider) - GET_MODE_PRECISION (i);
5443 /* Suppose that truncation from the machine mode of X to MODE is not a
5444 no-op. See if there is anything special about X so that we can
5445 assume it already contains a truncated value of MODE. */
5447 bool
5448 truncated_to_mode (machine_mode mode, const_rtx x)
5450 /* This register has already been used in MODE without explicit
5451 truncation. */
5452 if (REG_P (x) && rtl_hooks.reg_truncated_to_mode (mode, x))
5453 return true;
5455 /* See if we already satisfy the requirements of MODE. If yes we
5456 can just switch to MODE. */
5457 if (num_sign_bit_copies_in_rep[GET_MODE (x)][mode]
5458 && (num_sign_bit_copies (x, GET_MODE (x))
5459 >= num_sign_bit_copies_in_rep[GET_MODE (x)][mode] + 1))
5460 return true;
5462 return false;
5465 /* Return true if RTX code CODE has a single sequence of zero or more
5466 "e" operands and no rtvec operands. Initialize its rtx_all_subrtx_bounds
5467 entry in that case. */
5469 static bool
5470 setup_reg_subrtx_bounds (unsigned int code)
5472 const char *format = GET_RTX_FORMAT ((enum rtx_code) code);
5473 unsigned int i = 0;
5474 for (; format[i] != 'e'; ++i)
5476 if (!format[i])
5477 /* No subrtxes. Leave start and count as 0. */
5478 return true;
5479 if (format[i] == 'E' || format[i] == 'V')
5480 return false;
5483 /* Record the sequence of 'e's. */
5484 rtx_all_subrtx_bounds[code].start = i;
5486 ++i;
5487 while (format[i] == 'e');
5488 rtx_all_subrtx_bounds[code].count = i - rtx_all_subrtx_bounds[code].start;
5489 /* rtl-iter.h relies on this. */
5490 gcc_checking_assert (rtx_all_subrtx_bounds[code].count <= 3);
5492 for (; format[i]; ++i)
5493 if (format[i] == 'E' || format[i] == 'V' || format[i] == 'e')
5494 return false;
5496 return true;
5499 /* Initialize non_rtx_starting_operands, which is used to speed up
5500 for_each_rtx, and rtx_all_subrtx_bounds. */
5501 void
5502 init_rtlanal (void)
5504 int i;
5505 for (i = 0; i < NUM_RTX_CODE; i++)
5507 const char *format = GET_RTX_FORMAT (i);
5508 const char *first = strpbrk (format, "eEV");
5509 non_rtx_starting_operands[i] = first ? first - format : -1;
5510 if (!setup_reg_subrtx_bounds (i))
5511 rtx_all_subrtx_bounds[i].count = UCHAR_MAX;
5512 if (GET_RTX_CLASS (i) != RTX_CONST_OBJ)
5513 rtx_nonconst_subrtx_bounds[i] = rtx_all_subrtx_bounds[i];
5516 init_num_sign_bit_copies_in_rep ();
5519 /* Check whether this is a constant pool constant. */
5520 bool
5521 constant_pool_constant_p (rtx x)
5523 x = avoid_constant_pool_reference (x);
5524 return CONST_DOUBLE_P (x);
5527 /* If M is a bitmask that selects a field of low-order bits within an item but
5528 not the entire word, return the length of the field. Return -1 otherwise.
5529 M is used in machine mode MODE. */
5532 low_bitmask_len (machine_mode mode, unsigned HOST_WIDE_INT m)
5534 if (mode != VOIDmode)
5536 if (GET_MODE_PRECISION (mode) > HOST_BITS_PER_WIDE_INT)
5537 return -1;
5538 m &= GET_MODE_MASK (mode);
5541 return exact_log2 (m + 1);
5544 /* Return the mode of MEM's address. */
5546 machine_mode
5547 get_address_mode (rtx mem)
5549 machine_mode mode;
5551 gcc_assert (MEM_P (mem));
5552 mode = GET_MODE (XEXP (mem, 0));
5553 if (mode != VOIDmode)
5554 return mode;
5555 return targetm.addr_space.address_mode (MEM_ADDR_SPACE (mem));
5558 /* Split up a CONST_DOUBLE or integer constant rtx
5559 into two rtx's for single words,
5560 storing in *FIRST the word that comes first in memory in the target
5561 and in *SECOND the other.
5563 TODO: This function needs to be rewritten to work on any size
5564 integer. */
5566 void
5567 split_double (rtx value, rtx *first, rtx *second)
5569 if (CONST_INT_P (value))
5571 if (HOST_BITS_PER_WIDE_INT >= (2 * BITS_PER_WORD))
5573 /* In this case the CONST_INT holds both target words.
5574 Extract the bits from it into two word-sized pieces.
5575 Sign extend each half to HOST_WIDE_INT. */
5576 unsigned HOST_WIDE_INT low, high;
5577 unsigned HOST_WIDE_INT mask, sign_bit, sign_extend;
5578 unsigned bits_per_word = BITS_PER_WORD;
5580 /* Set sign_bit to the most significant bit of a word. */
5581 sign_bit = 1;
5582 sign_bit <<= bits_per_word - 1;
5584 /* Set mask so that all bits of the word are set. We could
5585 have used 1 << BITS_PER_WORD instead of basing the
5586 calculation on sign_bit. However, on machines where
5587 HOST_BITS_PER_WIDE_INT == BITS_PER_WORD, it could cause a
5588 compiler warning, even though the code would never be
5589 executed. */
5590 mask = sign_bit << 1;
5591 mask--;
5593 /* Set sign_extend as any remaining bits. */
5594 sign_extend = ~mask;
5596 /* Pick the lower word and sign-extend it. */
5597 low = INTVAL (value);
5598 low &= mask;
5599 if (low & sign_bit)
5600 low |= sign_extend;
5602 /* Pick the higher word, shifted to the least significant
5603 bits, and sign-extend it. */
5604 high = INTVAL (value);
5605 high >>= bits_per_word - 1;
5606 high >>= 1;
5607 high &= mask;
5608 if (high & sign_bit)
5609 high |= sign_extend;
5611 /* Store the words in the target machine order. */
5612 if (WORDS_BIG_ENDIAN)
5614 *first = GEN_INT (high);
5615 *second = GEN_INT (low);
5617 else
5619 *first = GEN_INT (low);
5620 *second = GEN_INT (high);
5623 else
5625 /* The rule for using CONST_INT for a wider mode
5626 is that we regard the value as signed.
5627 So sign-extend it. */
5628 rtx high = (INTVAL (value) < 0 ? constm1_rtx : const0_rtx);
5629 if (WORDS_BIG_ENDIAN)
5631 *first = high;
5632 *second = value;
5634 else
5636 *first = value;
5637 *second = high;
5641 else if (GET_CODE (value) == CONST_WIDE_INT)
5643 /* All of this is scary code and needs to be converted to
5644 properly work with any size integer. */
5645 gcc_assert (CONST_WIDE_INT_NUNITS (value) == 2);
5646 if (WORDS_BIG_ENDIAN)
5648 *first = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
5649 *second = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
5651 else
5653 *first = GEN_INT (CONST_WIDE_INT_ELT (value, 0));
5654 *second = GEN_INT (CONST_WIDE_INT_ELT (value, 1));
5657 else if (!CONST_DOUBLE_P (value))
5659 if (WORDS_BIG_ENDIAN)
5661 *first = const0_rtx;
5662 *second = value;
5664 else
5666 *first = value;
5667 *second = const0_rtx;
5670 else if (GET_MODE (value) == VOIDmode
5671 /* This is the old way we did CONST_DOUBLE integers. */
5672 || GET_MODE_CLASS (GET_MODE (value)) == MODE_INT)
5674 /* In an integer, the words are defined as most and least significant.
5675 So order them by the target's convention. */
5676 if (WORDS_BIG_ENDIAN)
5678 *first = GEN_INT (CONST_DOUBLE_HIGH (value));
5679 *second = GEN_INT (CONST_DOUBLE_LOW (value));
5681 else
5683 *first = GEN_INT (CONST_DOUBLE_LOW (value));
5684 *second = GEN_INT (CONST_DOUBLE_HIGH (value));
5687 else
5689 REAL_VALUE_TYPE r;
5690 long l[2];
5691 REAL_VALUE_FROM_CONST_DOUBLE (r, value);
5693 /* Note, this converts the REAL_VALUE_TYPE to the target's
5694 format, splits up the floating point double and outputs
5695 exactly 32 bits of it into each of l[0] and l[1] --
5696 not necessarily BITS_PER_WORD bits. */
5697 REAL_VALUE_TO_TARGET_DOUBLE (r, l);
5699 /* If 32 bits is an entire word for the target, but not for the host,
5700 then sign-extend on the host so that the number will look the same
5701 way on the host that it would on the target. See for instance
5702 simplify_unary_operation. The #if is needed to avoid compiler
5703 warnings. */
5705 #if HOST_BITS_PER_LONG > 32
5706 if (BITS_PER_WORD < HOST_BITS_PER_LONG && BITS_PER_WORD == 32)
5708 if (l[0] & ((long) 1 << 31))
5709 l[0] |= ((long) (-1) << 32);
5710 if (l[1] & ((long) 1 << 31))
5711 l[1] |= ((long) (-1) << 32);
5713 #endif
5715 *first = GEN_INT (l[0]);
5716 *second = GEN_INT (l[1]);
5720 /* Return true if X is a sign_extract or zero_extract from the least
5721 significant bit. */
5723 static bool
5724 lsb_bitfield_op_p (rtx x)
5726 if (GET_RTX_CLASS (GET_CODE (x)) == RTX_BITFIELD_OPS)
5728 machine_mode mode = GET_MODE (XEXP (x, 0));
5729 HOST_WIDE_INT len = INTVAL (XEXP (x, 1));
5730 HOST_WIDE_INT pos = INTVAL (XEXP (x, 2));
5732 return (pos == (BITS_BIG_ENDIAN ? GET_MODE_PRECISION (mode) - len : 0));
5734 return false;
5737 /* Strip outer address "mutations" from LOC and return a pointer to the
5738 inner value. If OUTER_CODE is nonnull, store the code of the innermost
5739 stripped expression there.
5741 "Mutations" either convert between modes or apply some kind of
5742 extension, truncation or alignment. */
5744 rtx *
5745 strip_address_mutations (rtx *loc, enum rtx_code *outer_code)
5747 for (;;)
5749 enum rtx_code code = GET_CODE (*loc);
5750 if (GET_RTX_CLASS (code) == RTX_UNARY)
5751 /* Things like SIGN_EXTEND, ZERO_EXTEND and TRUNCATE can be
5752 used to convert between pointer sizes. */
5753 loc = &XEXP (*loc, 0);
5754 else if (lsb_bitfield_op_p (*loc))
5755 /* A [SIGN|ZERO]_EXTRACT from the least significant bit effectively
5756 acts as a combined truncation and extension. */
5757 loc = &XEXP (*loc, 0);
5758 else if (code == AND && CONST_INT_P (XEXP (*loc, 1)))
5759 /* (and ... (const_int -X)) is used to align to X bytes. */
5760 loc = &XEXP (*loc, 0);
5761 else if (code == SUBREG
5762 && !OBJECT_P (SUBREG_REG (*loc))
5763 && subreg_lowpart_p (*loc))
5764 /* (subreg (operator ...) ...) inside and is used for mode
5765 conversion too. */
5766 loc = &SUBREG_REG (*loc);
5767 else
5768 return loc;
5769 if (outer_code)
5770 *outer_code = code;
5774 /* Return true if CODE applies some kind of scale. The scaled value is
5775 is the first operand and the scale is the second. */
5777 static bool
5778 binary_scale_code_p (enum rtx_code code)
5780 return (code == MULT
5781 || code == ASHIFT
5782 /* Needed by ARM targets. */
5783 || code == ASHIFTRT
5784 || code == LSHIFTRT
5785 || code == ROTATE
5786 || code == ROTATERT);
5789 /* If *INNER can be interpreted as a base, return a pointer to the inner term
5790 (see address_info). Return null otherwise. */
5792 static rtx *
5793 get_base_term (rtx *inner)
5795 if (GET_CODE (*inner) == LO_SUM)
5796 inner = strip_address_mutations (&XEXP (*inner, 0));
5797 if (REG_P (*inner)
5798 || MEM_P (*inner)
5799 || GET_CODE (*inner) == SUBREG
5800 || GET_CODE (*inner) == SCRATCH)
5801 return inner;
5802 return 0;
5805 /* If *INNER can be interpreted as an index, return a pointer to the inner term
5806 (see address_info). Return null otherwise. */
5808 static rtx *
5809 get_index_term (rtx *inner)
5811 /* At present, only constant scales are allowed. */
5812 if (binary_scale_code_p (GET_CODE (*inner)) && CONSTANT_P (XEXP (*inner, 1)))
5813 inner = strip_address_mutations (&XEXP (*inner, 0));
5814 if (REG_P (*inner)
5815 || MEM_P (*inner)
5816 || GET_CODE (*inner) == SUBREG)
5817 return inner;
5818 return 0;
5821 /* Set the segment part of address INFO to LOC, given that INNER is the
5822 unmutated value. */
5824 static void
5825 set_address_segment (struct address_info *info, rtx *loc, rtx *inner)
5827 gcc_assert (!info->segment);
5828 info->segment = loc;
5829 info->segment_term = inner;
5832 /* Set the base part of address INFO to LOC, given that INNER is the
5833 unmutated value. */
5835 static void
5836 set_address_base (struct address_info *info, rtx *loc, rtx *inner)
5838 gcc_assert (!info->base);
5839 info->base = loc;
5840 info->base_term = inner;
5843 /* Set the index part of address INFO to LOC, given that INNER is the
5844 unmutated value. */
5846 static void
5847 set_address_index (struct address_info *info, rtx *loc, rtx *inner)
5849 gcc_assert (!info->index);
5850 info->index = loc;
5851 info->index_term = inner;
5854 /* Set the displacement part of address INFO to LOC, given that INNER
5855 is the constant term. */
5857 static void
5858 set_address_disp (struct address_info *info, rtx *loc, rtx *inner)
5860 gcc_assert (!info->disp);
5861 info->disp = loc;
5862 info->disp_term = inner;
5865 /* INFO->INNER describes a {PRE,POST}_{INC,DEC} address. Set up the
5866 rest of INFO accordingly. */
5868 static void
5869 decompose_incdec_address (struct address_info *info)
5871 info->autoinc_p = true;
5873 rtx *base = &XEXP (*info->inner, 0);
5874 set_address_base (info, base, base);
5875 gcc_checking_assert (info->base == info->base_term);
5877 /* These addresses are only valid when the size of the addressed
5878 value is known. */
5879 gcc_checking_assert (info->mode != VOIDmode);
5882 /* INFO->INNER describes a {PRE,POST}_MODIFY address. Set up the rest
5883 of INFO accordingly. */
5885 static void
5886 decompose_automod_address (struct address_info *info)
5888 info->autoinc_p = true;
5890 rtx *base = &XEXP (*info->inner, 0);
5891 set_address_base (info, base, base);
5892 gcc_checking_assert (info->base == info->base_term);
5894 rtx plus = XEXP (*info->inner, 1);
5895 gcc_assert (GET_CODE (plus) == PLUS);
5897 info->base_term2 = &XEXP (plus, 0);
5898 gcc_checking_assert (rtx_equal_p (*info->base_term, *info->base_term2));
5900 rtx *step = &XEXP (plus, 1);
5901 rtx *inner_step = strip_address_mutations (step);
5902 if (CONSTANT_P (*inner_step))
5903 set_address_disp (info, step, inner_step);
5904 else
5905 set_address_index (info, step, inner_step);
5908 /* Treat *LOC as a tree of PLUS operands and store pointers to the summed
5909 values in [PTR, END). Return a pointer to the end of the used array. */
5911 static rtx **
5912 extract_plus_operands (rtx *loc, rtx **ptr, rtx **end)
5914 rtx x = *loc;
5915 if (GET_CODE (x) == PLUS)
5917 ptr = extract_plus_operands (&XEXP (x, 0), ptr, end);
5918 ptr = extract_plus_operands (&XEXP (x, 1), ptr, end);
5920 else
5922 gcc_assert (ptr != end);
5923 *ptr++ = loc;
5925 return ptr;
5928 /* Evaluate the likelihood of X being a base or index value, returning
5929 positive if it is likely to be a base, negative if it is likely to be
5930 an index, and 0 if we can't tell. Make the magnitude of the return
5931 value reflect the amount of confidence we have in the answer.
5933 MODE, AS, OUTER_CODE and INDEX_CODE are as for ok_for_base_p_1. */
5935 static int
5936 baseness (rtx x, machine_mode mode, addr_space_t as,
5937 enum rtx_code outer_code, enum rtx_code index_code)
5939 /* Believe *_POINTER unless the address shape requires otherwise. */
5940 if (REG_P (x) && REG_POINTER (x))
5941 return 2;
5942 if (MEM_P (x) && MEM_POINTER (x))
5943 return 2;
5945 if (REG_P (x) && HARD_REGISTER_P (x))
5947 /* X is a hard register. If it only fits one of the base
5948 or index classes, choose that interpretation. */
5949 int regno = REGNO (x);
5950 bool base_p = ok_for_base_p_1 (regno, mode, as, outer_code, index_code);
5951 bool index_p = REGNO_OK_FOR_INDEX_P (regno);
5952 if (base_p != index_p)
5953 return base_p ? 1 : -1;
5955 return 0;
5958 /* INFO->INNER describes a normal, non-automodified address.
5959 Fill in the rest of INFO accordingly. */
5961 static void
5962 decompose_normal_address (struct address_info *info)
5964 /* Treat the address as the sum of up to four values. */
5965 rtx *ops[4];
5966 size_t n_ops = extract_plus_operands (info->inner, ops,
5967 ops + ARRAY_SIZE (ops)) - ops;
5969 /* If there is more than one component, any base component is in a PLUS. */
5970 if (n_ops > 1)
5971 info->base_outer_code = PLUS;
5973 /* Try to classify each sum operand now. Leave those that could be
5974 either a base or an index in OPS. */
5975 rtx *inner_ops[4];
5976 size_t out = 0;
5977 for (size_t in = 0; in < n_ops; ++in)
5979 rtx *loc = ops[in];
5980 rtx *inner = strip_address_mutations (loc);
5981 if (CONSTANT_P (*inner))
5982 set_address_disp (info, loc, inner);
5983 else if (GET_CODE (*inner) == UNSPEC)
5984 set_address_segment (info, loc, inner);
5985 else
5987 /* The only other possibilities are a base or an index. */
5988 rtx *base_term = get_base_term (inner);
5989 rtx *index_term = get_index_term (inner);
5990 gcc_assert (base_term || index_term);
5991 if (!base_term)
5992 set_address_index (info, loc, index_term);
5993 else if (!index_term)
5994 set_address_base (info, loc, base_term);
5995 else
5997 gcc_assert (base_term == index_term);
5998 ops[out] = loc;
5999 inner_ops[out] = base_term;
6000 ++out;
6005 /* Classify the remaining OPS members as bases and indexes. */
6006 if (out == 1)
6008 /* If we haven't seen a base or an index yet, assume that this is
6009 the base. If we were confident that another term was the base
6010 or index, treat the remaining operand as the other kind. */
6011 if (!info->base)
6012 set_address_base (info, ops[0], inner_ops[0]);
6013 else
6014 set_address_index (info, ops[0], inner_ops[0]);
6016 else if (out == 2)
6018 /* In the event of a tie, assume the base comes first. */
6019 if (baseness (*inner_ops[0], info->mode, info->as, PLUS,
6020 GET_CODE (*ops[1]))
6021 >= baseness (*inner_ops[1], info->mode, info->as, PLUS,
6022 GET_CODE (*ops[0])))
6024 set_address_base (info, ops[0], inner_ops[0]);
6025 set_address_index (info, ops[1], inner_ops[1]);
6027 else
6029 set_address_base (info, ops[1], inner_ops[1]);
6030 set_address_index (info, ops[0], inner_ops[0]);
6033 else
6034 gcc_assert (out == 0);
6037 /* Describe address *LOC in *INFO. MODE is the mode of the addressed value,
6038 or VOIDmode if not known. AS is the address space associated with LOC.
6039 OUTER_CODE is MEM if *LOC is a MEM address and ADDRESS otherwise. */
6041 void
6042 decompose_address (struct address_info *info, rtx *loc, machine_mode mode,
6043 addr_space_t as, enum rtx_code outer_code)
6045 memset (info, 0, sizeof (*info));
6046 info->mode = mode;
6047 info->as = as;
6048 info->addr_outer_code = outer_code;
6049 info->outer = loc;
6050 info->inner = strip_address_mutations (loc, &outer_code);
6051 info->base_outer_code = outer_code;
6052 switch (GET_CODE (*info->inner))
6054 case PRE_DEC:
6055 case PRE_INC:
6056 case POST_DEC:
6057 case POST_INC:
6058 decompose_incdec_address (info);
6059 break;
6061 case PRE_MODIFY:
6062 case POST_MODIFY:
6063 decompose_automod_address (info);
6064 break;
6066 default:
6067 decompose_normal_address (info);
6068 break;
6072 /* Describe address operand LOC in INFO. */
6074 void
6075 decompose_lea_address (struct address_info *info, rtx *loc)
6077 decompose_address (info, loc, VOIDmode, ADDR_SPACE_GENERIC, ADDRESS);
6080 /* Describe the address of MEM X in INFO. */
6082 void
6083 decompose_mem_address (struct address_info *info, rtx x)
6085 gcc_assert (MEM_P (x));
6086 decompose_address (info, &XEXP (x, 0), GET_MODE (x),
6087 MEM_ADDR_SPACE (x), MEM);
6090 /* Update INFO after a change to the address it describes. */
6092 void
6093 update_address (struct address_info *info)
6095 decompose_address (info, info->outer, info->mode, info->as,
6096 info->addr_outer_code);
6099 /* Return the scale applied to *INFO->INDEX_TERM, or 0 if the index is
6100 more complicated than that. */
6102 HOST_WIDE_INT
6103 get_index_scale (const struct address_info *info)
6105 rtx index = *info->index;
6106 if (GET_CODE (index) == MULT
6107 && CONST_INT_P (XEXP (index, 1))
6108 && info->index_term == &XEXP (index, 0))
6109 return INTVAL (XEXP (index, 1));
6111 if (GET_CODE (index) == ASHIFT
6112 && CONST_INT_P (XEXP (index, 1))
6113 && info->index_term == &XEXP (index, 0))
6114 return (HOST_WIDE_INT) 1 << INTVAL (XEXP (index, 1));
6116 if (info->index == info->index_term)
6117 return 1;
6119 return 0;
6122 /* Return the "index code" of INFO, in the form required by
6123 ok_for_base_p_1. */
6125 enum rtx_code
6126 get_index_code (const struct address_info *info)
6128 if (info->index)
6129 return GET_CODE (*info->index);
6131 if (info->disp)
6132 return GET_CODE (*info->disp);
6134 return SCRATCH;
6137 /* Return true if X contains a thread-local symbol. */
6139 bool
6140 tls_referenced_p (const_rtx x)
6142 if (!targetm.have_tls)
6143 return false;
6145 subrtx_iterator::array_type array;
6146 FOR_EACH_SUBRTX (iter, array, x, ALL)
6147 if (GET_CODE (*iter) == SYMBOL_REF && SYMBOL_REF_TLS_MODEL (*iter) != 0)
6148 return true;
6149 return false;