1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
36 #include "coretypes.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
49 #include "diagnostic-core.h"
51 #include "fold-const.h"
59 #include "stor-layout.h"
62 #include "rtx-vector-builder.h"
64 #include "gimple-ssa.h"
67 struct target_rtl default_target_rtl
;
69 struct target_rtl
*this_target_rtl
= &default_target_rtl
;
72 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
74 /* Commonly used modes. */
76 scalar_int_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
77 scalar_int_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
78 scalar_int_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
80 /* Datastructures maintained for currently processed function in RTL form. */
82 struct rtl_data x_rtl
;
84 /* Indexed by pseudo register number, gives the rtx for that pseudo.
85 Allocated in parallel with regno_pointer_align.
86 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
87 with length attribute nested in top level structures. */
91 /* This is *not* reset after each function. It gives each CODE_LABEL
92 in the entire compilation a unique label number. */
94 static GTY(()) int label_num
= 1;
96 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
97 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
98 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
99 is set only for MODE_INT and MODE_VECTOR_INT modes. */
101 rtx const_tiny_rtx
[4][(int) MAX_MACHINE_MODE
];
105 REAL_VALUE_TYPE dconst0
;
106 REAL_VALUE_TYPE dconst1
;
107 REAL_VALUE_TYPE dconst2
;
108 REAL_VALUE_TYPE dconstm1
;
109 REAL_VALUE_TYPE dconsthalf
;
111 /* Record fixed-point constant 0 and 1. */
112 FIXED_VALUE_TYPE fconst0
[MAX_FCONST0
];
113 FIXED_VALUE_TYPE fconst1
[MAX_FCONST1
];
115 /* We make one copy of (const_int C) where C is in
116 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
117 to save space during the compilation and simplify comparisons of
120 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
122 /* Standard pieces of rtx, to be substituted directly into things. */
125 rtx simple_return_rtx
;
128 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
129 this pointer should normally never be dereferenced), but is required to be
130 distinct from NULL_RTX. Currently used by peephole2 pass. */
131 rtx_insn
*invalid_insn_rtx
;
133 /* A hash table storing CONST_INTs whose absolute value is greater
134 than MAX_SAVED_CONST_INT. */
136 struct const_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
138 typedef HOST_WIDE_INT compare_type
;
140 static hashval_t
hash (rtx i
);
141 static bool equal (rtx i
, HOST_WIDE_INT h
);
144 static GTY ((cache
)) hash_table
<const_int_hasher
> *const_int_htab
;
146 struct const_wide_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
148 static hashval_t
hash (rtx x
);
149 static bool equal (rtx x
, rtx y
);
152 static GTY ((cache
)) hash_table
<const_wide_int_hasher
> *const_wide_int_htab
;
154 struct const_poly_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
156 typedef std::pair
<machine_mode
, poly_wide_int_ref
> compare_type
;
158 static hashval_t
hash (rtx x
);
159 static bool equal (rtx x
, const compare_type
&y
);
162 static GTY ((cache
)) hash_table
<const_poly_int_hasher
> *const_poly_int_htab
;
164 /* A hash table storing register attribute structures. */
165 struct reg_attr_hasher
: ggc_cache_ptr_hash
<reg_attrs
>
167 static hashval_t
hash (reg_attrs
*x
);
168 static bool equal (reg_attrs
*a
, reg_attrs
*b
);
171 static GTY ((cache
)) hash_table
<reg_attr_hasher
> *reg_attrs_htab
;
173 /* A hash table storing all CONST_DOUBLEs. */
174 struct const_double_hasher
: ggc_cache_ptr_hash
<rtx_def
>
176 static hashval_t
hash (rtx x
);
177 static bool equal (rtx x
, rtx y
);
180 static GTY ((cache
)) hash_table
<const_double_hasher
> *const_double_htab
;
182 /* A hash table storing all CONST_FIXEDs. */
183 struct const_fixed_hasher
: ggc_cache_ptr_hash
<rtx_def
>
185 static hashval_t
hash (rtx x
);
186 static bool equal (rtx x
, rtx y
);
189 static GTY ((cache
)) hash_table
<const_fixed_hasher
> *const_fixed_htab
;
191 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
192 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
193 #define first_label_num (crtl->emit.x_first_label_num)
195 static void set_used_decls (tree
);
196 static void mark_label_nuses (rtx
);
197 #if TARGET_SUPPORTS_WIDE_INT
198 static rtx
lookup_const_wide_int (rtx
);
200 static rtx
lookup_const_double (rtx
);
201 static rtx
lookup_const_fixed (rtx
);
202 static rtx
gen_const_vector (machine_mode
, int);
203 static void copy_rtx_if_shared_1 (rtx
*orig
);
205 /* Probability of the conditional branch currently proceeded by try_split. */
206 profile_probability split_branch_probability
;
208 /* Returns a hash code for X (which is a really a CONST_INT). */
211 const_int_hasher::hash (rtx x
)
213 return (hashval_t
) INTVAL (x
);
216 /* Returns nonzero if the value represented by X (which is really a
217 CONST_INT) is the same as that given by Y (which is really a
221 const_int_hasher::equal (rtx x
, HOST_WIDE_INT y
)
223 return (INTVAL (x
) == y
);
226 #if TARGET_SUPPORTS_WIDE_INT
227 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
230 const_wide_int_hasher::hash (rtx x
)
233 unsigned HOST_WIDE_INT hash
= 0;
236 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
237 hash
+= CONST_WIDE_INT_ELT (xr
, i
);
239 return (hashval_t
) hash
;
242 /* Returns nonzero if the value represented by X (which is really a
243 CONST_WIDE_INT) is the same as that given by Y (which is really a
247 const_wide_int_hasher::equal (rtx x
, rtx y
)
252 if (CONST_WIDE_INT_NUNITS (xr
) != CONST_WIDE_INT_NUNITS (yr
))
255 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
256 if (CONST_WIDE_INT_ELT (xr
, i
) != CONST_WIDE_INT_ELT (yr
, i
))
263 /* Returns a hash code for CONST_POLY_INT X. */
266 const_poly_int_hasher::hash (rtx x
)
269 h
.add_int (GET_MODE (x
));
270 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
271 h
.add_wide_int (CONST_POLY_INT_COEFFS (x
)[i
]);
275 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
278 const_poly_int_hasher::equal (rtx x
, const compare_type
&y
)
280 if (GET_MODE (x
) != y
.first
)
282 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
283 if (CONST_POLY_INT_COEFFS (x
)[i
] != y
.second
.coeffs
[i
])
288 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
290 const_double_hasher::hash (rtx x
)
292 const_rtx
const value
= x
;
295 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (value
) == VOIDmode
)
296 h
= CONST_DOUBLE_LOW (value
) ^ CONST_DOUBLE_HIGH (value
);
299 h
= real_hash (CONST_DOUBLE_REAL_VALUE (value
));
300 /* MODE is used in the comparison, so it should be in the hash. */
301 h
^= GET_MODE (value
);
306 /* Returns nonzero if the value represented by X (really a ...)
307 is the same as that represented by Y (really a ...) */
309 const_double_hasher::equal (rtx x
, rtx y
)
311 const_rtx
const a
= x
, b
= y
;
313 if (GET_MODE (a
) != GET_MODE (b
))
315 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (a
) == VOIDmode
)
316 return (CONST_DOUBLE_LOW (a
) == CONST_DOUBLE_LOW (b
)
317 && CONST_DOUBLE_HIGH (a
) == CONST_DOUBLE_HIGH (b
));
319 return real_identical (CONST_DOUBLE_REAL_VALUE (a
),
320 CONST_DOUBLE_REAL_VALUE (b
));
323 /* Returns a hash code for X (which is really a CONST_FIXED). */
326 const_fixed_hasher::hash (rtx x
)
328 const_rtx
const value
= x
;
331 h
= fixed_hash (CONST_FIXED_VALUE (value
));
332 /* MODE is used in the comparison, so it should be in the hash. */
333 h
^= GET_MODE (value
);
337 /* Returns nonzero if the value represented by X is the same as that
341 const_fixed_hasher::equal (rtx x
, rtx y
)
343 const_rtx
const a
= x
, b
= y
;
345 if (GET_MODE (a
) != GET_MODE (b
))
347 return fixed_identical (CONST_FIXED_VALUE (a
), CONST_FIXED_VALUE (b
));
350 /* Return true if the given memory attributes are equal. */
353 mem_attrs_eq_p (const class mem_attrs
*p
, const class mem_attrs
*q
)
359 return (p
->alias
== q
->alias
360 && p
->offset_known_p
== q
->offset_known_p
361 && (!p
->offset_known_p
|| known_eq (p
->offset
, q
->offset
))
362 && p
->size_known_p
== q
->size_known_p
363 && (!p
->size_known_p
|| known_eq (p
->size
, q
->size
))
364 && p
->align
== q
->align
365 && p
->addrspace
== q
->addrspace
366 && (p
->expr
== q
->expr
367 || (p
->expr
!= NULL_TREE
&& q
->expr
!= NULL_TREE
368 && operand_equal_p (p
->expr
, q
->expr
, 0))));
371 /* Set MEM's memory attributes so that they are the same as ATTRS. */
374 set_mem_attrs (rtx mem
, mem_attrs
*attrs
)
376 /* If everything is the default, we can just clear the attributes. */
377 if (mem_attrs_eq_p (attrs
, mode_mem_attrs
[(int) GET_MODE (mem
)]))
384 || !mem_attrs_eq_p (attrs
, MEM_ATTRS (mem
)))
386 MEM_ATTRS (mem
) = ggc_alloc
<mem_attrs
> ();
387 memcpy (MEM_ATTRS (mem
), attrs
, sizeof (mem_attrs
));
391 /* Returns a hash code for X (which is a really a reg_attrs *). */
394 reg_attr_hasher::hash (reg_attrs
*x
)
396 const reg_attrs
*const p
= x
;
400 h
.add_poly_hwi (p
->offset
);
404 /* Returns nonzero if the value represented by X is the same as that given by
408 reg_attr_hasher::equal (reg_attrs
*x
, reg_attrs
*y
)
410 const reg_attrs
*const p
= x
;
411 const reg_attrs
*const q
= y
;
413 return (p
->decl
== q
->decl
&& known_eq (p
->offset
, q
->offset
));
415 /* Allocate a new reg_attrs structure and insert it into the hash table if
416 one identical to it is not already in the table. We are doing this for
420 get_reg_attrs (tree decl
, poly_int64 offset
)
424 /* If everything is the default, we can just return zero. */
425 if (decl
== 0 && known_eq (offset
, 0))
429 attrs
.offset
= offset
;
431 reg_attrs
**slot
= reg_attrs_htab
->find_slot (&attrs
, INSERT
);
434 *slot
= ggc_alloc
<reg_attrs
> ();
435 memcpy (*slot
, &attrs
, sizeof (reg_attrs
));
443 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
444 and to block register equivalences to be seen across this insn. */
449 rtx x
= gen_rtx_ASM_INPUT (VOIDmode
, "");
450 MEM_VOLATILE_P (x
) = true;
456 /* Set the mode and register number of X to MODE and REGNO. */
459 set_mode_and_regno (rtx x
, machine_mode mode
, unsigned int regno
)
461 unsigned int nregs
= (HARD_REGISTER_NUM_P (regno
)
462 ? hard_regno_nregs (regno
, mode
)
464 PUT_MODE_RAW (x
, mode
);
465 set_regno_raw (x
, regno
, nregs
);
468 /* Initialize a fresh REG rtx with mode MODE and register REGNO. */
471 init_raw_REG (rtx x
, machine_mode mode
, unsigned int regno
)
473 set_mode_and_regno (x
, mode
, regno
);
474 REG_ATTRS (x
) = NULL
;
475 ORIGINAL_REGNO (x
) = regno
;
479 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
480 don't attempt to share with the various global pieces of rtl (such as
481 frame_pointer_rtx). */
484 gen_raw_REG (machine_mode mode
, unsigned int regno
)
486 rtx x
= rtx_alloc (REG MEM_STAT_INFO
);
487 init_raw_REG (x
, mode
, regno
);
491 /* There are some RTL codes that require special attention; the generation
492 functions do the raw handling. If you add to this list, modify
493 special_rtx in gengenrtl.c as well. */
496 gen_rtx_EXPR_LIST (machine_mode mode
, rtx expr
, rtx expr_list
)
498 return as_a
<rtx_expr_list
*> (gen_rtx_fmt_ee (EXPR_LIST
, mode
, expr
,
503 gen_rtx_INSN_LIST (machine_mode mode
, rtx insn
, rtx insn_list
)
505 return as_a
<rtx_insn_list
*> (gen_rtx_fmt_ue (INSN_LIST
, mode
, insn
,
510 gen_rtx_INSN (machine_mode mode
, rtx_insn
*prev_insn
, rtx_insn
*next_insn
,
511 basic_block bb
, rtx pattern
, int location
, int code
,
514 return as_a
<rtx_insn
*> (gen_rtx_fmt_uuBeiie (INSN
, mode
,
515 prev_insn
, next_insn
,
516 bb
, pattern
, location
, code
,
521 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED
, HOST_WIDE_INT arg
)
523 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
524 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
526 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
527 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
528 return const_true_rtx
;
531 /* Look up the CONST_INT in the hash table. */
532 rtx
*slot
= const_int_htab
->find_slot_with_hash (arg
, (hashval_t
) arg
,
535 *slot
= gen_rtx_raw_CONST_INT (VOIDmode
, arg
);
541 gen_int_mode (poly_int64 c
, machine_mode mode
)
543 c
= trunc_int_for_mode (c
, mode
);
544 if (c
.is_constant ())
545 return GEN_INT (c
.coeffs
[0]);
546 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
547 return immed_wide_int_const (poly_wide_int::from (c
, prec
, SIGNED
), mode
);
550 /* CONST_DOUBLEs might be created from pairs of integers, or from
551 REAL_VALUE_TYPEs. Also, their length is known only at run time,
552 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
554 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
555 hash table. If so, return its counterpart; otherwise add it
556 to the hash table and return it. */
558 lookup_const_double (rtx real
)
560 rtx
*slot
= const_double_htab
->find_slot (real
, INSERT
);
567 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
568 VALUE in mode MODE. */
570 const_double_from_real_value (REAL_VALUE_TYPE value
, machine_mode mode
)
572 rtx real
= rtx_alloc (CONST_DOUBLE
);
573 PUT_MODE (real
, mode
);
577 return lookup_const_double (real
);
580 /* Determine whether FIXED, a CONST_FIXED, already exists in the
581 hash table. If so, return its counterpart; otherwise add it
582 to the hash table and return it. */
585 lookup_const_fixed (rtx fixed
)
587 rtx
*slot
= const_fixed_htab
->find_slot (fixed
, INSERT
);
594 /* Return a CONST_FIXED rtx for a fixed-point value specified by
595 VALUE in mode MODE. */
598 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value
, machine_mode mode
)
600 rtx fixed
= rtx_alloc (CONST_FIXED
);
601 PUT_MODE (fixed
, mode
);
605 return lookup_const_fixed (fixed
);
608 #if TARGET_SUPPORTS_WIDE_INT == 0
609 /* Constructs double_int from rtx CST. */
612 rtx_to_double_int (const_rtx cst
)
616 if (CONST_INT_P (cst
))
617 r
= double_int::from_shwi (INTVAL (cst
));
618 else if (CONST_DOUBLE_AS_INT_P (cst
))
620 r
.low
= CONST_DOUBLE_LOW (cst
);
621 r
.high
= CONST_DOUBLE_HIGH (cst
);
630 #if TARGET_SUPPORTS_WIDE_INT
631 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
632 If so, return its counterpart; otherwise add it to the hash table and
636 lookup_const_wide_int (rtx wint
)
638 rtx
*slot
= const_wide_int_htab
->find_slot (wint
, INSERT
);
646 /* Return an rtx constant for V, given that the constant has mode MODE.
647 The returned rtx will be a CONST_INT if V fits, otherwise it will be
648 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
649 (if TARGET_SUPPORTS_WIDE_INT). */
652 immed_wide_int_const_1 (const wide_int_ref
&v
, machine_mode mode
)
654 unsigned int len
= v
.get_len ();
655 /* Not scalar_int_mode because we also allow pointer bound modes. */
656 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
658 /* Allow truncation but not extension since we do not know if the
659 number is signed or unsigned. */
660 gcc_assert (prec
<= v
.get_precision ());
662 if (len
< 2 || prec
<= HOST_BITS_PER_WIDE_INT
)
663 return gen_int_mode (v
.elt (0), mode
);
665 #if TARGET_SUPPORTS_WIDE_INT
669 unsigned int blocks_needed
670 = (prec
+ HOST_BITS_PER_WIDE_INT
- 1) / HOST_BITS_PER_WIDE_INT
;
672 if (len
> blocks_needed
)
675 value
= const_wide_int_alloc (len
);
677 /* It is so tempting to just put the mode in here. Must control
679 PUT_MODE (value
, VOIDmode
);
680 CWI_PUT_NUM_ELEM (value
, len
);
682 for (i
= 0; i
< len
; i
++)
683 CONST_WIDE_INT_ELT (value
, i
) = v
.elt (i
);
685 return lookup_const_wide_int (value
);
688 return immed_double_const (v
.elt (0), v
.elt (1), mode
);
692 #if TARGET_SUPPORTS_WIDE_INT == 0
693 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
694 of ints: I0 is the low-order word and I1 is the high-order word.
695 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
696 implied upper bits are copies of the high bit of i1. The value
697 itself is neither signed nor unsigned. Do not use this routine for
698 non-integer modes; convert to REAL_VALUE_TYPE and use
699 const_double_from_real_value. */
702 immed_double_const (HOST_WIDE_INT i0
, HOST_WIDE_INT i1
, machine_mode mode
)
707 /* There are the following cases (note that there are no modes with
708 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
710 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
712 2) If the value of the integer fits into HOST_WIDE_INT anyway
713 (i.e., i1 consists only from copies of the sign bit, and sign
714 of i0 and i1 are the same), then we return a CONST_INT for i0.
715 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
717 if (is_a
<scalar_mode
> (mode
, &smode
)
718 && GET_MODE_BITSIZE (smode
) <= HOST_BITS_PER_WIDE_INT
)
719 return gen_int_mode (i0
, mode
);
721 /* If this integer fits in one word, return a CONST_INT. */
722 if ((i1
== 0 && i0
>= 0) || (i1
== ~0 && i0
< 0))
725 /* We use VOIDmode for integers. */
726 value
= rtx_alloc (CONST_DOUBLE
);
727 PUT_MODE (value
, VOIDmode
);
729 CONST_DOUBLE_LOW (value
) = i0
;
730 CONST_DOUBLE_HIGH (value
) = i1
;
732 for (i
= 2; i
< (sizeof CONST_DOUBLE_FORMAT
- 1); i
++)
733 XWINT (value
, i
) = 0;
735 return lookup_const_double (value
);
739 /* Return an rtx representation of C in mode MODE. */
742 immed_wide_int_const (const poly_wide_int_ref
&c
, machine_mode mode
)
744 if (c
.is_constant ())
745 return immed_wide_int_const_1 (c
.coeffs
[0], mode
);
747 /* Not scalar_int_mode because we also allow pointer bound modes. */
748 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
750 /* Allow truncation but not extension since we do not know if the
751 number is signed or unsigned. */
752 gcc_assert (prec
<= c
.coeffs
[0].get_precision ());
753 poly_wide_int newc
= poly_wide_int::from (c
, prec
, SIGNED
);
755 /* See whether we already have an rtx for this constant. */
758 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
759 h
.add_wide_int (newc
.coeffs
[i
]);
760 const_poly_int_hasher::compare_type
typed_value (mode
, newc
);
761 rtx
*slot
= const_poly_int_htab
->find_slot_with_hash (typed_value
,
767 /* Create a new rtx. There's a choice to be made here between installing
768 the actual mode of the rtx or leaving it as VOIDmode (for consistency
769 with CONST_INT). In practice the handling of the codes is different
770 enough that we get no benefit from using VOIDmode, and various places
771 assume that VOIDmode implies CONST_INT. Using the real mode seems like
772 the right long-term direction anyway. */
773 typedef trailing_wide_ints
<NUM_POLY_INT_COEFFS
> twi
;
774 size_t extra_size
= twi::extra_size (prec
);
775 x
= rtx_alloc_v (CONST_POLY_INT
,
776 sizeof (struct const_poly_int_def
) + extra_size
);
778 CONST_POLY_INT_COEFFS (x
).set_precision (prec
);
779 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
780 CONST_POLY_INT_COEFFS (x
)[i
] = newc
.coeffs
[i
];
787 gen_rtx_REG (machine_mode mode
, unsigned int regno
)
789 /* In case the MD file explicitly references the frame pointer, have
790 all such references point to the same frame pointer. This is
791 used during frame pointer elimination to distinguish the explicit
792 references to these registers from pseudos that happened to be
795 If we have eliminated the frame pointer or arg pointer, we will
796 be using it as a normal register, for example as a spill
797 register. In such cases, we might be accessing it in a mode that
798 is not Pmode and therefore cannot use the pre-allocated rtx.
800 Also don't do this when we are making new REGs in reload, since
801 we don't want to get confused with the real pointers. */
803 if (mode
== Pmode
&& !reload_in_progress
&& !lra_in_progress
)
805 if (regno
== FRAME_POINTER_REGNUM
806 && (!reload_completed
|| frame_pointer_needed
))
807 return frame_pointer_rtx
;
809 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
810 && regno
== HARD_FRAME_POINTER_REGNUM
811 && (!reload_completed
|| frame_pointer_needed
))
812 return hard_frame_pointer_rtx
;
813 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
814 if (FRAME_POINTER_REGNUM
!= ARG_POINTER_REGNUM
815 && regno
== ARG_POINTER_REGNUM
)
816 return arg_pointer_rtx
;
818 #ifdef RETURN_ADDRESS_POINTER_REGNUM
819 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
820 return return_address_pointer_rtx
;
822 if (regno
== (unsigned) PIC_OFFSET_TABLE_REGNUM
823 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
824 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
825 return pic_offset_table_rtx
;
826 if (regno
== STACK_POINTER_REGNUM
)
827 return stack_pointer_rtx
;
831 /* If the per-function register table has been set up, try to re-use
832 an existing entry in that table to avoid useless generation of RTL.
834 This code is disabled for now until we can fix the various backends
835 which depend on having non-shared hard registers in some cases. Long
836 term we want to re-enable this code as it can significantly cut down
837 on the amount of useless RTL that gets generated.
839 We'll also need to fix some code that runs after reload that wants to
840 set ORIGINAL_REGNO. */
845 && regno
< FIRST_PSEUDO_REGISTER
846 && reg_raw_mode
[regno
] == mode
)
847 return regno_reg_rtx
[regno
];
850 return gen_raw_REG (mode
, regno
);
854 gen_rtx_MEM (machine_mode mode
, rtx addr
)
856 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
858 /* This field is not cleared by the mere allocation of the rtx, so
865 /* Generate a memory referring to non-trapping constant memory. */
868 gen_const_mem (machine_mode mode
, rtx addr
)
870 rtx mem
= gen_rtx_MEM (mode
, addr
);
871 MEM_READONLY_P (mem
) = 1;
872 MEM_NOTRAP_P (mem
) = 1;
876 /* Generate a MEM referring to fixed portions of the frame, e.g., register
880 gen_frame_mem (machine_mode mode
, rtx addr
)
882 rtx mem
= gen_rtx_MEM (mode
, addr
);
883 MEM_NOTRAP_P (mem
) = 1;
884 set_mem_alias_set (mem
, get_frame_alias_set ());
888 /* Generate a MEM referring to a temporary use of the stack, not part
889 of the fixed stack frame. For example, something which is pushed
890 by a target splitter. */
892 gen_tmp_stack_mem (machine_mode mode
, rtx addr
)
894 rtx mem
= gen_rtx_MEM (mode
, addr
);
895 MEM_NOTRAP_P (mem
) = 1;
896 if (!cfun
->calls_alloca
)
897 set_mem_alias_set (mem
, get_frame_alias_set ());
901 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
902 this construct would be valid, and false otherwise. */
905 validate_subreg (machine_mode omode
, machine_mode imode
,
906 const_rtx reg
, poly_uint64 offset
)
908 poly_uint64 isize
= GET_MODE_SIZE (imode
);
909 poly_uint64 osize
= GET_MODE_SIZE (omode
);
911 /* The sizes must be ordered, so that we know whether the subreg
912 is partial, paradoxical or complete. */
913 if (!ordered_p (isize
, osize
))
916 /* All subregs must be aligned. */
917 if (!multiple_p (offset
, osize
))
920 /* The subreg offset cannot be outside the inner object. */
921 if (maybe_ge (offset
, isize
))
924 poly_uint64 regsize
= REGMODE_NATURAL_SIZE (imode
);
926 /* ??? This should not be here. Temporarily continue to allow word_mode
927 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
928 Generally, backends are doing something sketchy but it'll take time to
930 if (omode
== word_mode
)
932 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
933 is the culprit here, and not the backends. */
934 else if (known_ge (osize
, regsize
) && known_ge (isize
, osize
))
936 /* Allow component subregs of complex and vector. Though given the below
937 extraction rules, it's not always clear what that means. */
938 else if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
939 && GET_MODE_INNER (imode
) == omode
)
941 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
942 i.e. (subreg:V4SF (reg:SF) 0) or (subreg:V4SF (reg:V2SF) 0). This
943 surely isn't the cleanest way to represent this. It's questionable
944 if this ought to be represented at all -- why can't this all be hidden
945 in post-reload splitters that make arbitrarily mode changes to the
946 registers themselves. */
947 else if (VECTOR_MODE_P (omode
)
948 && GET_MODE_INNER (omode
) == GET_MODE_INNER (imode
))
950 /* Subregs involving floating point modes are not allowed to
951 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
952 (subreg:SI (reg:DF) 0) isn't. */
953 else if (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))
955 if (! (known_eq (isize
, osize
)
956 /* LRA can use subreg to store a floating point value in
957 an integer mode. Although the floating point and the
958 integer modes need the same number of hard registers,
959 the size of floating point mode can be less than the
960 integer mode. LRA also uses subregs for a register
961 should be used in different mode in on insn. */
966 /* Paradoxical subregs must have offset zero. */
967 if (maybe_gt (osize
, isize
))
968 return known_eq (offset
, 0U);
970 /* This is a normal subreg. Verify that the offset is representable. */
972 /* For hard registers, we already have most of these rules collected in
973 subreg_offset_representable_p. */
974 if (reg
&& REG_P (reg
) && HARD_REGISTER_P (reg
))
976 unsigned int regno
= REGNO (reg
);
978 if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
979 && GET_MODE_INNER (imode
) == omode
)
981 else if (!REG_CAN_CHANGE_MODE_P (regno
, imode
, omode
))
984 return subreg_offset_representable_p (regno
, imode
, offset
, omode
);
987 /* The outer size must be ordered wrt the register size, otherwise
988 we wouldn't know at compile time how many registers the outer
990 if (!ordered_p (osize
, regsize
))
993 /* For pseudo registers, we want most of the same checks. Namely:
995 Assume that the pseudo register will be allocated to hard registers
996 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
997 the remainder must correspond to the lowpart of the containing hard
998 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
999 otherwise it is at the lowest offset.
1001 Given that we've already checked the mode and offset alignment,
1002 we only have to check subblock subregs here. */
1003 if (maybe_lt (osize
, regsize
)
1004 && ! (lra_in_progress
&& (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))))
1006 /* It is invalid for the target to pick a register size for a mode
1007 that isn't ordered wrt to the size of that mode. */
1008 poly_uint64 block_size
= ordered_min (isize
, regsize
);
1009 unsigned int start_reg
;
1010 poly_uint64 offset_within_reg
;
1011 if (!can_div_trunc_p (offset
, block_size
, &start_reg
, &offset_within_reg
)
1012 || (BYTES_BIG_ENDIAN
1013 ? maybe_ne (offset_within_reg
, block_size
- osize
)
1014 : maybe_ne (offset_within_reg
, 0U)))
1021 gen_rtx_SUBREG (machine_mode mode
, rtx reg
, poly_uint64 offset
)
1023 gcc_assert (validate_subreg (mode
, GET_MODE (reg
), reg
, offset
));
1024 return gen_rtx_raw_SUBREG (mode
, reg
, offset
);
1027 /* Generate a SUBREG representing the least-significant part of REG if MODE
1028 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1031 gen_lowpart_SUBREG (machine_mode mode
, rtx reg
)
1033 machine_mode inmode
;
1035 inmode
= GET_MODE (reg
);
1036 if (inmode
== VOIDmode
)
1038 return gen_rtx_SUBREG (mode
, reg
,
1039 subreg_lowpart_offset (mode
, inmode
));
1043 gen_rtx_VAR_LOCATION (machine_mode mode
, tree decl
, rtx loc
,
1044 enum var_init_status status
)
1046 rtx x
= gen_rtx_fmt_te (VAR_LOCATION
, mode
, decl
, loc
);
1047 PAT_VAR_LOCATION_STATUS (x
) = status
;
1052 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1055 gen_rtvec (int n
, ...)
1063 /* Don't allocate an empty rtvec... */
1070 rt_val
= rtvec_alloc (n
);
1072 for (i
= 0; i
< n
; i
++)
1073 rt_val
->elem
[i
] = va_arg (p
, rtx
);
1080 gen_rtvec_v (int n
, rtx
*argp
)
1085 /* Don't allocate an empty rtvec... */
1089 rt_val
= rtvec_alloc (n
);
1091 for (i
= 0; i
< n
; i
++)
1092 rt_val
->elem
[i
] = *argp
++;
1098 gen_rtvec_v (int n
, rtx_insn
**argp
)
1103 /* Don't allocate an empty rtvec... */
1107 rt_val
= rtvec_alloc (n
);
1109 for (i
= 0; i
< n
; i
++)
1110 rt_val
->elem
[i
] = *argp
++;
1116 /* Return the number of bytes between the start of an OUTER_MODE
1117 in-memory value and the start of an INNER_MODE in-memory value,
1118 given that the former is a lowpart of the latter. It may be a
1119 paradoxical lowpart, in which case the offset will be negative
1120 on big-endian targets. */
1123 byte_lowpart_offset (machine_mode outer_mode
,
1124 machine_mode inner_mode
)
1126 if (paradoxical_subreg_p (outer_mode
, inner_mode
))
1127 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
1129 return subreg_lowpart_offset (outer_mode
, inner_mode
);
1132 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1133 from address X. For paradoxical big-endian subregs this is a
1134 negative value, otherwise it's the same as OFFSET. */
1137 subreg_memory_offset (machine_mode outer_mode
, machine_mode inner_mode
,
1140 if (paradoxical_subreg_p (outer_mode
, inner_mode
))
1142 gcc_assert (known_eq (offset
, 0U));
1143 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
1148 /* As above, but return the offset that existing subreg X would have
1149 if SUBREG_REG (X) were stored in memory. The only significant thing
1150 about the current SUBREG_REG is its mode. */
1153 subreg_memory_offset (const_rtx x
)
1155 return subreg_memory_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)),
1159 /* Generate a REG rtx for a new pseudo register of mode MODE.
1160 This pseudo is assigned the next sequential register number. */
1163 gen_reg_rtx (machine_mode mode
)
1166 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
1168 gcc_assert (can_create_pseudo_p ());
1170 /* If a virtual register with bigger mode alignment is generated,
1171 increase stack alignment estimation because it might be spilled
1173 if (SUPPORTS_STACK_ALIGNMENT
1174 && crtl
->stack_alignment_estimated
< align
1175 && !crtl
->stack_realign_processed
)
1177 unsigned int min_align
= MINIMUM_ALIGNMENT (NULL
, mode
, align
);
1178 if (crtl
->stack_alignment_estimated
< min_align
)
1179 crtl
->stack_alignment_estimated
= min_align
;
1182 if (generating_concat_p
1183 && (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
1184 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
))
1186 /* For complex modes, don't make a single pseudo.
1187 Instead, make a CONCAT of two pseudos.
1188 This allows noncontiguous allocation of the real and imaginary parts,
1189 which makes much better code. Besides, allocating DCmode
1190 pseudos overstrains reload on some machines like the 386. */
1191 rtx realpart
, imagpart
;
1192 machine_mode partmode
= GET_MODE_INNER (mode
);
1194 realpart
= gen_reg_rtx (partmode
);
1195 imagpart
= gen_reg_rtx (partmode
);
1196 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
1199 /* Do not call gen_reg_rtx with uninitialized crtl. */
1200 gcc_assert (crtl
->emit
.regno_pointer_align_length
);
1202 crtl
->emit
.ensure_regno_capacity ();
1203 gcc_assert (reg_rtx_no
< crtl
->emit
.regno_pointer_align_length
);
1205 val
= gen_raw_REG (mode
, reg_rtx_no
);
1206 regno_reg_rtx
[reg_rtx_no
++] = val
;
1210 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1211 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1214 emit_status::ensure_regno_capacity ()
1216 int old_size
= regno_pointer_align_length
;
1218 if (reg_rtx_no
< old_size
)
1221 int new_size
= old_size
* 2;
1222 while (reg_rtx_no
>= new_size
)
1225 char *tmp
= XRESIZEVEC (char, regno_pointer_align
, new_size
);
1226 memset (tmp
+ old_size
, 0, new_size
- old_size
);
1227 regno_pointer_align
= (unsigned char *) tmp
;
1229 rtx
*new1
= GGC_RESIZEVEC (rtx
, regno_reg_rtx
, new_size
);
1230 memset (new1
+ old_size
, 0, (new_size
- old_size
) * sizeof (rtx
));
1231 regno_reg_rtx
= new1
;
1233 crtl
->emit
.regno_pointer_align_length
= new_size
;
1236 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1239 reg_is_parm_p (rtx reg
)
1243 gcc_assert (REG_P (reg
));
1244 decl
= REG_EXPR (reg
);
1245 return (decl
&& TREE_CODE (decl
) == PARM_DECL
);
1248 /* Update NEW with the same attributes as REG, but with OFFSET added
1249 to the REG_OFFSET. */
1252 update_reg_offset (rtx new_rtx
, rtx reg
, poly_int64 offset
)
1254 REG_ATTRS (new_rtx
) = get_reg_attrs (REG_EXPR (reg
),
1255 REG_OFFSET (reg
) + offset
);
1258 /* Generate a register with same attributes as REG, but with OFFSET
1259 added to the REG_OFFSET. */
1262 gen_rtx_REG_offset (rtx reg
, machine_mode mode
, unsigned int regno
,
1265 rtx new_rtx
= gen_rtx_REG (mode
, regno
);
1267 update_reg_offset (new_rtx
, reg
, offset
);
1271 /* Generate a new pseudo-register with the same attributes as REG, but
1272 with OFFSET added to the REG_OFFSET. */
1275 gen_reg_rtx_offset (rtx reg
, machine_mode mode
, int offset
)
1277 rtx new_rtx
= gen_reg_rtx (mode
);
1279 update_reg_offset (new_rtx
, reg
, offset
);
1283 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1284 new register is a (possibly paradoxical) lowpart of the old one. */
1287 adjust_reg_mode (rtx reg
, machine_mode mode
)
1289 update_reg_offset (reg
, reg
, byte_lowpart_offset (mode
, GET_MODE (reg
)));
1290 PUT_MODE (reg
, mode
);
1293 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1294 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1297 set_reg_attrs_from_value (rtx reg
, rtx x
)
1300 bool can_be_reg_pointer
= true;
1302 /* Don't call mark_reg_pointer for incompatible pointer sign
1304 while (GET_CODE (x
) == SIGN_EXTEND
1305 || GET_CODE (x
) == ZERO_EXTEND
1306 || GET_CODE (x
) == TRUNCATE
1307 || (GET_CODE (x
) == SUBREG
&& subreg_lowpart_p (x
)))
1309 #if defined(POINTERS_EXTEND_UNSIGNED)
1310 if (((GET_CODE (x
) == SIGN_EXTEND
&& POINTERS_EXTEND_UNSIGNED
)
1311 || (GET_CODE (x
) == ZERO_EXTEND
&& ! POINTERS_EXTEND_UNSIGNED
)
1312 || (paradoxical_subreg_p (x
)
1313 && ! (SUBREG_PROMOTED_VAR_P (x
)
1314 && SUBREG_CHECK_PROMOTED_SIGN (x
,
1315 POINTERS_EXTEND_UNSIGNED
))))
1316 && !targetm
.have_ptr_extend ())
1317 can_be_reg_pointer
= false;
1322 /* Hard registers can be reused for multiple purposes within the same
1323 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1324 on them is wrong. */
1325 if (HARD_REGISTER_P (reg
))
1328 offset
= byte_lowpart_offset (GET_MODE (reg
), GET_MODE (x
));
1331 if (MEM_OFFSET_KNOWN_P (x
))
1332 REG_ATTRS (reg
) = get_reg_attrs (MEM_EXPR (x
),
1333 MEM_OFFSET (x
) + offset
);
1334 if (can_be_reg_pointer
&& MEM_POINTER (x
))
1335 mark_reg_pointer (reg
, 0);
1340 update_reg_offset (reg
, x
, offset
);
1341 if (can_be_reg_pointer
&& REG_POINTER (x
))
1342 mark_reg_pointer (reg
, REGNO_POINTER_ALIGN (REGNO (x
)));
1346 /* Generate a REG rtx for a new pseudo register, copying the mode
1347 and attributes from X. */
1350 gen_reg_rtx_and_attrs (rtx x
)
1352 rtx reg
= gen_reg_rtx (GET_MODE (x
));
1353 set_reg_attrs_from_value (reg
, x
);
1357 /* Set the register attributes for registers contained in PARM_RTX.
1358 Use needed values from memory attributes of MEM. */
1361 set_reg_attrs_for_parm (rtx parm_rtx
, rtx mem
)
1363 if (REG_P (parm_rtx
))
1364 set_reg_attrs_from_value (parm_rtx
, mem
);
1365 else if (GET_CODE (parm_rtx
) == PARALLEL
)
1367 /* Check for a NULL entry in the first slot, used to indicate that the
1368 parameter goes both on the stack and in registers. */
1369 int i
= XEXP (XVECEXP (parm_rtx
, 0, 0), 0) ? 0 : 1;
1370 for (; i
< XVECLEN (parm_rtx
, 0); i
++)
1372 rtx x
= XVECEXP (parm_rtx
, 0, i
);
1373 if (REG_P (XEXP (x
, 0)))
1374 REG_ATTRS (XEXP (x
, 0))
1375 = get_reg_attrs (MEM_EXPR (mem
),
1376 INTVAL (XEXP (x
, 1)));
1381 /* Set the REG_ATTRS for registers in value X, given that X represents
1385 set_reg_attrs_for_decl_rtl (tree t
, rtx x
)
1390 if (GET_CODE (x
) == SUBREG
)
1392 gcc_assert (subreg_lowpart_p (x
));
1397 = get_reg_attrs (t
, byte_lowpart_offset (GET_MODE (x
),
1400 : TYPE_MODE (TREE_TYPE (tdecl
))));
1401 if (GET_CODE (x
) == CONCAT
)
1403 if (REG_P (XEXP (x
, 0)))
1404 REG_ATTRS (XEXP (x
, 0)) = get_reg_attrs (t
, 0);
1405 if (REG_P (XEXP (x
, 1)))
1406 REG_ATTRS (XEXP (x
, 1))
1407 = get_reg_attrs (t
, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x
, 0))));
1409 if (GET_CODE (x
) == PARALLEL
)
1413 /* Check for a NULL entry, used to indicate that the parameter goes
1414 both on the stack and in registers. */
1415 if (XEXP (XVECEXP (x
, 0, 0), 0))
1420 for (i
= start
; i
< XVECLEN (x
, 0); i
++)
1422 rtx y
= XVECEXP (x
, 0, i
);
1423 if (REG_P (XEXP (y
, 0)))
1424 REG_ATTRS (XEXP (y
, 0)) = get_reg_attrs (t
, INTVAL (XEXP (y
, 1)));
1429 /* Assign the RTX X to declaration T. */
1432 set_decl_rtl (tree t
, rtx x
)
1434 DECL_WRTL_CHECK (t
)->decl_with_rtl
.rtl
= x
;
1436 set_reg_attrs_for_decl_rtl (t
, x
);
1439 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1440 if the ABI requires the parameter to be passed by reference. */
1443 set_decl_incoming_rtl (tree t
, rtx x
, bool by_reference_p
)
1445 DECL_INCOMING_RTL (t
) = x
;
1446 if (x
&& !by_reference_p
)
1447 set_reg_attrs_for_decl_rtl (t
, x
);
1450 /* Identify REG (which may be a CONCAT) as a user register. */
1453 mark_user_reg (rtx reg
)
1455 if (GET_CODE (reg
) == CONCAT
)
1457 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
1458 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
1462 gcc_assert (REG_P (reg
));
1463 REG_USERVAR_P (reg
) = 1;
1467 /* Identify REG as a probable pointer register and show its alignment
1468 as ALIGN, if nonzero. */
1471 mark_reg_pointer (rtx reg
, int align
)
1473 if (! REG_POINTER (reg
))
1475 REG_POINTER (reg
) = 1;
1478 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1480 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
1481 /* We can no-longer be sure just how aligned this pointer is. */
1482 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1485 /* Return 1 plus largest pseudo reg number used in the current function. */
1493 /* Return 1 + the largest label number used so far in the current function. */
1496 max_label_num (void)
1501 /* Return first label number used in this function (if any were used). */
1504 get_first_label_num (void)
1506 return first_label_num
;
1509 /* If the rtx for label was created during the expansion of a nested
1510 function, then first_label_num won't include this label number.
1511 Fix this now so that array indices work later. */
1514 maybe_set_first_label_num (rtx_code_label
*x
)
1516 if (CODE_LABEL_NUMBER (x
) < first_label_num
)
1517 first_label_num
= CODE_LABEL_NUMBER (x
);
1520 /* For use by the RTL function loader, when mingling with normal
1522 Ensure that label_num is greater than the label num of X, to avoid
1523 duplicate labels in the generated assembler. */
1526 maybe_set_max_label_num (rtx_code_label
*x
)
1528 if (CODE_LABEL_NUMBER (x
) >= label_num
)
1529 label_num
= CODE_LABEL_NUMBER (x
) + 1;
1533 /* Return a value representing some low-order bits of X, where the number
1534 of low-order bits is given by MODE. Note that no conversion is done
1535 between floating-point and fixed-point values, rather, the bit
1536 representation is returned.
1538 This function handles the cases in common between gen_lowpart, below,
1539 and two variants in cse.c and combine.c. These are the cases that can
1540 be safely handled at all points in the compilation.
1542 If this is not a case we can handle, return 0. */
1545 gen_lowpart_common (machine_mode mode
, rtx x
)
1547 poly_uint64 msize
= GET_MODE_SIZE (mode
);
1548 machine_mode innermode
;
1550 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1551 so we have to make one up. Yuk. */
1552 innermode
= GET_MODE (x
);
1554 && known_le (msize
* BITS_PER_UNIT
,
1555 (unsigned HOST_WIDE_INT
) HOST_BITS_PER_WIDE_INT
))
1556 innermode
= int_mode_for_size (HOST_BITS_PER_WIDE_INT
, 0).require ();
1557 else if (innermode
== VOIDmode
)
1558 innermode
= int_mode_for_size (HOST_BITS_PER_DOUBLE_INT
, 0).require ();
1560 gcc_assert (innermode
!= VOIDmode
&& innermode
!= BLKmode
);
1562 if (innermode
== mode
)
1565 /* The size of the outer and inner modes must be ordered. */
1566 poly_uint64 xsize
= GET_MODE_SIZE (innermode
);
1567 if (!ordered_p (msize
, xsize
))
1570 if (SCALAR_FLOAT_MODE_P (mode
))
1572 /* Don't allow paradoxical FLOAT_MODE subregs. */
1573 if (maybe_gt (msize
, xsize
))
1578 /* MODE must occupy no more of the underlying registers than X. */
1579 poly_uint64 regsize
= REGMODE_NATURAL_SIZE (innermode
);
1580 unsigned int mregs
, xregs
;
1581 if (!can_div_away_from_zero_p (msize
, regsize
, &mregs
)
1582 || !can_div_away_from_zero_p (xsize
, regsize
, &xregs
)
1587 scalar_int_mode int_mode
, int_innermode
, from_mode
;
1588 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
1589 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
1590 && is_a
<scalar_int_mode
> (innermode
, &int_innermode
)
1591 && is_a
<scalar_int_mode
> (GET_MODE (XEXP (x
, 0)), &from_mode
))
1593 /* If we are getting the low-order part of something that has been
1594 sign- or zero-extended, we can either just use the object being
1595 extended or make a narrower extension. If we want an even smaller
1596 piece than the size of the object being extended, call ourselves
1599 This case is used mostly by combine and cse. */
1601 if (from_mode
== int_mode
)
1603 else if (GET_MODE_SIZE (int_mode
) < GET_MODE_SIZE (from_mode
))
1604 return gen_lowpart_common (int_mode
, XEXP (x
, 0));
1605 else if (GET_MODE_SIZE (int_mode
) < GET_MODE_SIZE (int_innermode
))
1606 return gen_rtx_fmt_e (GET_CODE (x
), int_mode
, XEXP (x
, 0));
1608 else if (GET_CODE (x
) == SUBREG
|| REG_P (x
)
1609 || GET_CODE (x
) == CONCAT
|| GET_CODE (x
) == CONST_VECTOR
1610 || CONST_DOUBLE_AS_FLOAT_P (x
) || CONST_SCALAR_INT_P (x
)
1611 || CONST_POLY_INT_P (x
))
1612 return lowpart_subreg (mode
, x
, innermode
);
1614 /* Otherwise, we can't do this. */
1619 gen_highpart (machine_mode mode
, rtx x
)
1621 poly_uint64 msize
= GET_MODE_SIZE (mode
);
1624 /* This case loses if X is a subreg. To catch bugs early,
1625 complain if an invalid MODE is used even in other cases. */
1626 gcc_assert (known_le (msize
, (unsigned int) UNITS_PER_WORD
)
1627 || known_eq (msize
, GET_MODE_UNIT_SIZE (GET_MODE (x
))));
1629 result
= simplify_gen_subreg (mode
, x
, GET_MODE (x
),
1630 subreg_highpart_offset (mode
, GET_MODE (x
)));
1631 gcc_assert (result
);
1633 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1634 the target if we have a MEM. gen_highpart must return a valid operand,
1635 emitting code if necessary to do so. */
1638 result
= validize_mem (result
);
1639 gcc_assert (result
);
1645 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1646 be VOIDmode constant. */
1648 gen_highpart_mode (machine_mode outermode
, machine_mode innermode
, rtx exp
)
1650 if (GET_MODE (exp
) != VOIDmode
)
1652 gcc_assert (GET_MODE (exp
) == innermode
);
1653 return gen_highpart (outermode
, exp
);
1655 return simplify_gen_subreg (outermode
, exp
, innermode
,
1656 subreg_highpart_offset (outermode
, innermode
));
1659 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1660 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1663 subreg_size_lowpart_offset (poly_uint64 outer_bytes
, poly_uint64 inner_bytes
)
1665 gcc_checking_assert (ordered_p (outer_bytes
, inner_bytes
));
1666 if (maybe_gt (outer_bytes
, inner_bytes
))
1667 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1670 if (BYTES_BIG_ENDIAN
&& WORDS_BIG_ENDIAN
)
1671 return inner_bytes
- outer_bytes
;
1672 else if (!BYTES_BIG_ENDIAN
&& !WORDS_BIG_ENDIAN
)
1675 return subreg_size_offset_from_lsb (outer_bytes
, inner_bytes
, 0);
1678 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1679 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1682 subreg_size_highpart_offset (poly_uint64 outer_bytes
, poly_uint64 inner_bytes
)
1684 gcc_assert (known_ge (inner_bytes
, outer_bytes
));
1686 if (BYTES_BIG_ENDIAN
&& WORDS_BIG_ENDIAN
)
1688 else if (!BYTES_BIG_ENDIAN
&& !WORDS_BIG_ENDIAN
)
1689 return inner_bytes
- outer_bytes
;
1691 return subreg_size_offset_from_lsb (outer_bytes
, inner_bytes
,
1692 (inner_bytes
- outer_bytes
)
1696 /* Return 1 iff X, assumed to be a SUBREG,
1697 refers to the least significant part of its containing reg.
1698 If X is not a SUBREG, always return 1 (it is its own low part!). */
1701 subreg_lowpart_p (const_rtx x
)
1703 if (GET_CODE (x
) != SUBREG
)
1705 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1708 return known_eq (subreg_lowpart_offset (GET_MODE (x
),
1709 GET_MODE (SUBREG_REG (x
))),
1713 /* Return subword OFFSET of operand OP.
1714 The word number, OFFSET, is interpreted as the word number starting
1715 at the low-order address. OFFSET 0 is the low-order word if not
1716 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1718 If we cannot extract the required word, we return zero. Otherwise,
1719 an rtx corresponding to the requested word will be returned.
1721 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1722 reload has completed, a valid address will always be returned. After
1723 reload, if a valid address cannot be returned, we return zero.
1725 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1726 it is the responsibility of the caller.
1728 MODE is the mode of OP in case it is a CONST_INT.
1730 ??? This is still rather broken for some cases. The problem for the
1731 moment is that all callers of this thing provide no 'goal mode' to
1732 tell us to work with. This exists because all callers were written
1733 in a word based SUBREG world.
1734 Now use of this function can be deprecated by simplify_subreg in most
1739 operand_subword (rtx op
, poly_uint64 offset
, int validate_address
,
1742 if (mode
== VOIDmode
)
1743 mode
= GET_MODE (op
);
1745 gcc_assert (mode
!= VOIDmode
);
1747 /* If OP is narrower than a word, fail. */
1749 && maybe_lt (GET_MODE_SIZE (mode
), UNITS_PER_WORD
))
1752 /* If we want a word outside OP, return zero. */
1754 && maybe_gt ((offset
+ 1) * UNITS_PER_WORD
, GET_MODE_SIZE (mode
)))
1757 /* Form a new MEM at the requested address. */
1760 rtx new_rtx
= adjust_address_nv (op
, word_mode
, offset
* UNITS_PER_WORD
);
1762 if (! validate_address
)
1765 else if (reload_completed
)
1767 if (! strict_memory_address_addr_space_p (word_mode
,
1769 MEM_ADDR_SPACE (op
)))
1773 return replace_equiv_address (new_rtx
, XEXP (new_rtx
, 0));
1776 /* Rest can be handled by simplify_subreg. */
1777 return simplify_gen_subreg (word_mode
, op
, mode
, (offset
* UNITS_PER_WORD
));
1780 /* Similar to `operand_subword', but never return 0. If we can't
1781 extract the required subword, put OP into a register and try again.
1782 The second attempt must succeed. We always validate the address in
1785 MODE is the mode of OP, in case it is CONST_INT. */
1788 operand_subword_force (rtx op
, poly_uint64 offset
, machine_mode mode
)
1790 rtx result
= operand_subword (op
, offset
, 1, mode
);
1795 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1797 /* If this is a register which cannot be accessed by words, copy it
1798 to a pseudo register. */
1800 op
= copy_to_reg (op
);
1802 op
= force_reg (mode
, op
);
1805 result
= operand_subword (op
, offset
, 1, mode
);
1806 gcc_assert (result
);
1811 mem_attrs::mem_attrs ()
1817 addrspace (ADDR_SPACE_GENERIC
),
1818 offset_known_p (false),
1819 size_known_p (false)
1822 /* Returns 1 if both MEM_EXPR can be considered equal
1826 mem_expr_equal_p (const_tree expr1
, const_tree expr2
)
1831 if (! expr1
|| ! expr2
)
1834 if (TREE_CODE (expr1
) != TREE_CODE (expr2
))
1837 return operand_equal_p (expr1
, expr2
, 0);
1840 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1841 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1845 get_mem_align_offset (rtx mem
, unsigned int align
)
1850 /* This function can't use
1851 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1852 || (MAX (MEM_ALIGN (mem),
1853 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1857 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1859 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1860 for <variable>. get_inner_reference doesn't handle it and
1861 even if it did, the alignment in that case needs to be determined
1862 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1863 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1864 isn't sufficiently aligned, the object it is in might be. */
1865 gcc_assert (MEM_P (mem
));
1866 expr
= MEM_EXPR (mem
);
1867 if (expr
== NULL_TREE
|| !MEM_OFFSET_KNOWN_P (mem
))
1870 offset
= MEM_OFFSET (mem
);
1873 if (DECL_ALIGN (expr
) < align
)
1876 else if (INDIRECT_REF_P (expr
))
1878 if (TYPE_ALIGN (TREE_TYPE (expr
)) < (unsigned int) align
)
1881 else if (TREE_CODE (expr
) == COMPONENT_REF
)
1885 tree inner
= TREE_OPERAND (expr
, 0);
1886 tree field
= TREE_OPERAND (expr
, 1);
1887 tree byte_offset
= component_ref_field_offset (expr
);
1888 tree bit_offset
= DECL_FIELD_BIT_OFFSET (field
);
1890 poly_uint64 suboffset
;
1892 || !poly_int_tree_p (byte_offset
, &suboffset
)
1893 || !tree_fits_uhwi_p (bit_offset
))
1896 offset
+= suboffset
;
1897 offset
+= tree_to_uhwi (bit_offset
) / BITS_PER_UNIT
;
1899 if (inner
== NULL_TREE
)
1901 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field
))
1902 < (unsigned int) align
)
1906 else if (DECL_P (inner
))
1908 if (DECL_ALIGN (inner
) < align
)
1912 else if (TREE_CODE (inner
) != COMPONENT_REF
)
1920 HOST_WIDE_INT misalign
;
1921 if (!known_misalignment (offset
, align
/ BITS_PER_UNIT
, &misalign
))
1926 /* Given REF (a MEM) and T, either the type of X or the expression
1927 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1928 if we are making a new object of this type. BITPOS is nonzero if
1929 there is an offset outstanding on T that will be applied later. */
1932 set_mem_attributes_minus_bitpos (rtx ref
, tree t
, int objectp
,
1935 poly_int64 apply_bitpos
= 0;
1937 class mem_attrs attrs
, *defattrs
, *refattrs
;
1940 /* It can happen that type_for_mode was given a mode for which there
1941 is no language-level type. In which case it returns NULL, which
1946 type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
1947 if (type
== error_mark_node
)
1950 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1951 wrong answer, as it assumes that DECL_RTL already has the right alias
1952 info. Callers should not set DECL_RTL until after the call to
1953 set_mem_attributes. */
1954 gcc_assert (!DECL_P (t
) || ref
!= DECL_RTL_IF_SET (t
));
1956 /* Get the alias set from the expression or type (perhaps using a
1957 front-end routine) and use it. */
1958 attrs
.alias
= get_alias_set (t
);
1960 MEM_VOLATILE_P (ref
) |= TYPE_VOLATILE (type
);
1961 MEM_POINTER (ref
) = POINTER_TYPE_P (type
);
1963 /* Default values from pre-existing memory attributes if present. */
1964 refattrs
= MEM_ATTRS (ref
);
1967 /* ??? Can this ever happen? Calling this routine on a MEM that
1968 already carries memory attributes should probably be invalid. */
1969 attrs
.expr
= refattrs
->expr
;
1970 attrs
.offset_known_p
= refattrs
->offset_known_p
;
1971 attrs
.offset
= refattrs
->offset
;
1972 attrs
.size_known_p
= refattrs
->size_known_p
;
1973 attrs
.size
= refattrs
->size
;
1974 attrs
.align
= refattrs
->align
;
1977 /* Otherwise, default values from the mode of the MEM reference. */
1980 defattrs
= mode_mem_attrs
[(int) GET_MODE (ref
)];
1981 gcc_assert (!defattrs
->expr
);
1982 gcc_assert (!defattrs
->offset_known_p
);
1984 /* Respect mode size. */
1985 attrs
.size_known_p
= defattrs
->size_known_p
;
1986 attrs
.size
= defattrs
->size
;
1987 /* ??? Is this really necessary? We probably should always get
1988 the size from the type below. */
1990 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1991 if T is an object, always compute the object alignment below. */
1993 attrs
.align
= defattrs
->align
;
1995 attrs
.align
= BITS_PER_UNIT
;
1996 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1997 e.g. if the type carries an alignment attribute. Should we be
1998 able to simply always use TYPE_ALIGN? */
2001 /* We can set the alignment from the type if we are making an object or if
2002 this is an INDIRECT_REF. */
2003 if (objectp
|| TREE_CODE (t
) == INDIRECT_REF
)
2004 attrs
.align
= MAX (attrs
.align
, TYPE_ALIGN (type
));
2006 /* If the size is known, we can set that. */
2007 tree new_size
= TYPE_SIZE_UNIT (type
);
2009 /* The address-space is that of the type. */
2010 as
= TYPE_ADDR_SPACE (type
);
2012 /* If T is not a type, we may be able to deduce some more information about
2018 if (TREE_THIS_VOLATILE (t
))
2019 MEM_VOLATILE_P (ref
) = 1;
2021 /* Now remove any conversions: they don't change what the underlying
2022 object is. Likewise for SAVE_EXPR. */
2023 while (CONVERT_EXPR_P (t
)
2024 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
2025 || TREE_CODE (t
) == SAVE_EXPR
)
2026 t
= TREE_OPERAND (t
, 0);
2028 /* Note whether this expression can trap. */
2029 MEM_NOTRAP_P (ref
) = !tree_could_trap_p (t
);
2031 base
= get_base_address (t
);
2035 && TREE_READONLY (base
)
2036 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
))
2037 && !TREE_THIS_VOLATILE (base
))
2038 MEM_READONLY_P (ref
) = 1;
2040 /* Mark static const strings readonly as well. */
2041 if (TREE_CODE (base
) == STRING_CST
2042 && TREE_READONLY (base
)
2043 && TREE_STATIC (base
))
2044 MEM_READONLY_P (ref
) = 1;
2046 /* Address-space information is on the base object. */
2047 if (TREE_CODE (base
) == MEM_REF
2048 || TREE_CODE (base
) == TARGET_MEM_REF
)
2049 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base
,
2052 as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
2055 /* If this expression uses it's parent's alias set, mark it such
2056 that we won't change it. */
2057 if (component_uses_parent_alias_set_from (t
) != NULL_TREE
)
2058 MEM_KEEP_ALIAS_SET_P (ref
) = 1;
2060 /* If this is a decl, set the attributes of the MEM from it. */
2064 attrs
.offset_known_p
= true;
2066 apply_bitpos
= bitpos
;
2067 new_size
= DECL_SIZE_UNIT (t
);
2070 /* ??? If we end up with a constant or a descriptor do not
2071 record a MEM_EXPR. */
2072 else if (CONSTANT_CLASS_P (t
)
2073 || TREE_CODE (t
) == CONSTRUCTOR
)
2076 /* If this is a field reference, record it. */
2077 else if (TREE_CODE (t
) == COMPONENT_REF
)
2080 attrs
.offset_known_p
= true;
2082 apply_bitpos
= bitpos
;
2083 if (DECL_BIT_FIELD (TREE_OPERAND (t
, 1)))
2084 new_size
= DECL_SIZE_UNIT (TREE_OPERAND (t
, 1));
2087 /* Else record it. */
2090 gcc_assert (handled_component_p (t
)
2091 || TREE_CODE (t
) == MEM_REF
2092 || TREE_CODE (t
) == TARGET_MEM_REF
);
2094 attrs
.offset_known_p
= true;
2096 apply_bitpos
= bitpos
;
2099 /* If this is a reference based on a partitioned decl replace the
2100 base with a MEM_REF of the pointer representative we created
2101 during stack slot partitioning. */
2104 && ! is_global_var (base
)
2105 && cfun
->gimple_df
->decls_to_pointers
!= NULL
)
2107 tree
*namep
= cfun
->gimple_df
->decls_to_pointers
->get (base
);
2110 attrs
.expr
= unshare_expr (attrs
.expr
);
2111 tree
*orig_base
= &attrs
.expr
;
2112 while (handled_component_p (*orig_base
))
2113 orig_base
= &TREE_OPERAND (*orig_base
, 0);
2114 tree aptrt
= reference_alias_ptr_type (*orig_base
);
2115 *orig_base
= build2 (MEM_REF
, TREE_TYPE (*orig_base
), *namep
,
2116 build_int_cst (aptrt
, 0));
2120 /* Compute the alignment. */
2121 unsigned int obj_align
;
2122 unsigned HOST_WIDE_INT obj_bitpos
;
2123 get_object_alignment_1 (t
, &obj_align
, &obj_bitpos
);
2124 unsigned int diff_align
= known_alignment (obj_bitpos
- bitpos
);
2125 if (diff_align
!= 0)
2126 obj_align
= MIN (obj_align
, diff_align
);
2127 attrs
.align
= MAX (attrs
.align
, obj_align
);
2130 poly_uint64 const_size
;
2131 if (poly_int_tree_p (new_size
, &const_size
))
2133 attrs
.size_known_p
= true;
2134 attrs
.size
= const_size
;
2137 /* If we modified OFFSET based on T, then subtract the outstanding
2138 bit position offset. Similarly, increase the size of the accessed
2139 object to contain the negative offset. */
2140 if (maybe_ne (apply_bitpos
, 0))
2142 gcc_assert (attrs
.offset_known_p
);
2143 poly_int64 bytepos
= bits_to_bytes_round_down (apply_bitpos
);
2144 attrs
.offset
-= bytepos
;
2145 if (attrs
.size_known_p
)
2146 attrs
.size
+= bytepos
;
2149 /* Now set the attributes we computed above. */
2150 attrs
.addrspace
= as
;
2151 set_mem_attrs (ref
, &attrs
);
2155 set_mem_attributes (rtx ref
, tree t
, int objectp
)
2157 set_mem_attributes_minus_bitpos (ref
, t
, objectp
, 0);
2160 /* Set the alias set of MEM to SET. */
2163 set_mem_alias_set (rtx mem
, alias_set_type set
)
2165 /* If the new and old alias sets don't conflict, something is wrong. */
2166 gcc_checking_assert (alias_sets_conflict_p (set
, MEM_ALIAS_SET (mem
)));
2167 mem_attrs
attrs (*get_mem_attrs (mem
));
2169 set_mem_attrs (mem
, &attrs
);
2172 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2175 set_mem_addr_space (rtx mem
, addr_space_t addrspace
)
2177 mem_attrs
attrs (*get_mem_attrs (mem
));
2178 attrs
.addrspace
= addrspace
;
2179 set_mem_attrs (mem
, &attrs
);
2182 /* Set the alignment of MEM to ALIGN bits. */
2185 set_mem_align (rtx mem
, unsigned int align
)
2187 mem_attrs
attrs (*get_mem_attrs (mem
));
2188 attrs
.align
= align
;
2189 set_mem_attrs (mem
, &attrs
);
2192 /* Set the expr for MEM to EXPR. */
2195 set_mem_expr (rtx mem
, tree expr
)
2197 mem_attrs
attrs (*get_mem_attrs (mem
));
2199 set_mem_attrs (mem
, &attrs
);
2202 /* Set the offset of MEM to OFFSET. */
2205 set_mem_offset (rtx mem
, poly_int64 offset
)
2207 mem_attrs
attrs (*get_mem_attrs (mem
));
2208 attrs
.offset_known_p
= true;
2209 attrs
.offset
= offset
;
2210 set_mem_attrs (mem
, &attrs
);
2213 /* Clear the offset of MEM. */
2216 clear_mem_offset (rtx mem
)
2218 mem_attrs
attrs (*get_mem_attrs (mem
));
2219 attrs
.offset_known_p
= false;
2220 set_mem_attrs (mem
, &attrs
);
2223 /* Set the size of MEM to SIZE. */
2226 set_mem_size (rtx mem
, poly_int64 size
)
2228 mem_attrs
attrs (*get_mem_attrs (mem
));
2229 attrs
.size_known_p
= true;
2231 set_mem_attrs (mem
, &attrs
);
2234 /* Clear the size of MEM. */
2237 clear_mem_size (rtx mem
)
2239 mem_attrs
attrs (*get_mem_attrs (mem
));
2240 attrs
.size_known_p
= false;
2241 set_mem_attrs (mem
, &attrs
);
2244 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2245 and its address changed to ADDR. (VOIDmode means don't change the mode.
2246 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2247 returned memory location is required to be valid. INPLACE is true if any
2248 changes can be made directly to MEMREF or false if MEMREF must be treated
2251 The memory attributes are not changed. */
2254 change_address_1 (rtx memref
, machine_mode mode
, rtx addr
, int validate
,
2260 gcc_assert (MEM_P (memref
));
2261 as
= MEM_ADDR_SPACE (memref
);
2262 if (mode
== VOIDmode
)
2263 mode
= GET_MODE (memref
);
2265 addr
= XEXP (memref
, 0);
2266 if (mode
== GET_MODE (memref
) && addr
== XEXP (memref
, 0)
2267 && (!validate
|| memory_address_addr_space_p (mode
, addr
, as
)))
2270 /* Don't validate address for LRA. LRA can make the address valid
2271 by itself in most efficient way. */
2272 if (validate
&& !lra_in_progress
)
2274 if (reload_in_progress
|| reload_completed
)
2275 gcc_assert (memory_address_addr_space_p (mode
, addr
, as
));
2277 addr
= memory_address_addr_space (mode
, addr
, as
);
2280 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
2285 XEXP (memref
, 0) = addr
;
2289 new_rtx
= gen_rtx_MEM (mode
, addr
);
2290 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2294 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2295 way we are changing MEMREF, so we only preserve the alias set. */
2298 change_address (rtx memref
, machine_mode mode
, rtx addr
)
2300 rtx new_rtx
= change_address_1 (memref
, mode
, addr
, 1, false);
2301 machine_mode mmode
= GET_MODE (new_rtx
);
2302 class mem_attrs
*defattrs
;
2304 mem_attrs
attrs (*get_mem_attrs (memref
));
2305 defattrs
= mode_mem_attrs
[(int) mmode
];
2306 attrs
.expr
= NULL_TREE
;
2307 attrs
.offset_known_p
= false;
2308 attrs
.size_known_p
= defattrs
->size_known_p
;
2309 attrs
.size
= defattrs
->size
;
2310 attrs
.align
= defattrs
->align
;
2312 /* If there are no changes, just return the original memory reference. */
2313 if (new_rtx
== memref
)
2315 if (mem_attrs_eq_p (get_mem_attrs (memref
), &attrs
))
2318 new_rtx
= gen_rtx_MEM (mmode
, XEXP (memref
, 0));
2319 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2322 set_mem_attrs (new_rtx
, &attrs
);
2326 /* Return a memory reference like MEMREF, but with its mode changed
2327 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2328 nonzero, the memory address is forced to be valid.
2329 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2330 and the caller is responsible for adjusting MEMREF base register.
2331 If ADJUST_OBJECT is zero, the underlying object associated with the
2332 memory reference is left unchanged and the caller is responsible for
2333 dealing with it. Otherwise, if the new memory reference is outside
2334 the underlying object, even partially, then the object is dropped.
2335 SIZE, if nonzero, is the size of an access in cases where MODE
2336 has no inherent size. */
2339 adjust_address_1 (rtx memref
, machine_mode mode
, poly_int64 offset
,
2340 int validate
, int adjust_address
, int adjust_object
,
2343 rtx addr
= XEXP (memref
, 0);
2345 scalar_int_mode address_mode
;
2346 class mem_attrs
attrs (*get_mem_attrs (memref
)), *defattrs
;
2347 unsigned HOST_WIDE_INT max_align
;
2348 #ifdef POINTERS_EXTEND_UNSIGNED
2349 scalar_int_mode pointer_mode
2350 = targetm
.addr_space
.pointer_mode (attrs
.addrspace
);
2353 /* VOIDmode means no mode change for change_address_1. */
2354 if (mode
== VOIDmode
)
2355 mode
= GET_MODE (memref
);
2357 /* Take the size of non-BLKmode accesses from the mode. */
2358 defattrs
= mode_mem_attrs
[(int) mode
];
2359 if (defattrs
->size_known_p
)
2360 size
= defattrs
->size
;
2362 /* If there are no changes, just return the original memory reference. */
2363 if (mode
== GET_MODE (memref
)
2364 && known_eq (offset
, 0)
2365 && (known_eq (size
, 0)
2366 || (attrs
.size_known_p
&& known_eq (attrs
.size
, size
)))
2367 && (!validate
|| memory_address_addr_space_p (mode
, addr
,
2371 /* ??? Prefer to create garbage instead of creating shared rtl.
2372 This may happen even if offset is nonzero -- consider
2373 (plus (plus reg reg) const_int) -- so do this always. */
2374 addr
= copy_rtx (addr
);
2376 /* Convert a possibly large offset to a signed value within the
2377 range of the target address space. */
2378 address_mode
= get_address_mode (memref
);
2379 offset
= trunc_int_for_mode (offset
, address_mode
);
2383 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2384 object, we can merge it into the LO_SUM. */
2385 if (GET_MODE (memref
) != BLKmode
2386 && GET_CODE (addr
) == LO_SUM
2387 && known_in_range_p (offset
,
2388 0, (GET_MODE_ALIGNMENT (GET_MODE (memref
))
2390 addr
= gen_rtx_LO_SUM (address_mode
, XEXP (addr
, 0),
2391 plus_constant (address_mode
,
2392 XEXP (addr
, 1), offset
));
2393 #ifdef POINTERS_EXTEND_UNSIGNED
2394 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2395 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2396 the fact that pointers are not allowed to overflow. */
2397 else if (POINTERS_EXTEND_UNSIGNED
> 0
2398 && GET_CODE (addr
) == ZERO_EXTEND
2399 && GET_MODE (XEXP (addr
, 0)) == pointer_mode
2400 && known_eq (trunc_int_for_mode (offset
, pointer_mode
), offset
))
2401 addr
= gen_rtx_ZERO_EXTEND (address_mode
,
2402 plus_constant (pointer_mode
,
2403 XEXP (addr
, 0), offset
));
2406 addr
= plus_constant (address_mode
, addr
, offset
);
2409 new_rtx
= change_address_1 (memref
, mode
, addr
, validate
, false);
2411 /* If the address is a REG, change_address_1 rightfully returns memref,
2412 but this would destroy memref's MEM_ATTRS. */
2413 if (new_rtx
== memref
&& maybe_ne (offset
, 0))
2414 new_rtx
= copy_rtx (new_rtx
);
2416 /* Conservatively drop the object if we don't know where we start from. */
2417 if (adjust_object
&& (!attrs
.offset_known_p
|| !attrs
.size_known_p
))
2419 attrs
.expr
= NULL_TREE
;
2423 /* Compute the new values of the memory attributes due to this adjustment.
2424 We add the offsets and update the alignment. */
2425 if (attrs
.offset_known_p
)
2427 attrs
.offset
+= offset
;
2429 /* Drop the object if the new left end is not within its bounds. */
2430 if (adjust_object
&& maybe_lt (attrs
.offset
, 0))
2432 attrs
.expr
= NULL_TREE
;
2437 /* Compute the new alignment by taking the MIN of the alignment and the
2438 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2440 if (maybe_ne (offset
, 0))
2442 max_align
= known_alignment (offset
) * BITS_PER_UNIT
;
2443 attrs
.align
= MIN (attrs
.align
, max_align
);
2446 if (maybe_ne (size
, 0))
2448 /* Drop the object if the new right end is not within its bounds. */
2449 if (adjust_object
&& maybe_gt (offset
+ size
, attrs
.size
))
2451 attrs
.expr
= NULL_TREE
;
2454 attrs
.size_known_p
= true;
2457 else if (attrs
.size_known_p
)
2459 gcc_assert (!adjust_object
);
2460 attrs
.size
-= offset
;
2461 /* ??? The store_by_pieces machinery generates negative sizes,
2462 so don't assert for that here. */
2465 set_mem_attrs (new_rtx
, &attrs
);
2470 /* Return a memory reference like MEMREF, but with its mode changed
2471 to MODE and its address changed to ADDR, which is assumed to be
2472 MEMREF offset by OFFSET bytes. If VALIDATE is
2473 nonzero, the memory address is forced to be valid. */
2476 adjust_automodify_address_1 (rtx memref
, machine_mode mode
, rtx addr
,
2477 poly_int64 offset
, int validate
)
2479 memref
= change_address_1 (memref
, VOIDmode
, addr
, validate
, false);
2480 return adjust_address_1 (memref
, mode
, offset
, validate
, 0, 0, 0);
2483 /* Return a memory reference like MEMREF, but whose address is changed by
2484 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2485 known to be in OFFSET (possibly 1). */
2488 offset_address (rtx memref
, rtx offset
, unsigned HOST_WIDE_INT pow2
)
2490 rtx new_rtx
, addr
= XEXP (memref
, 0);
2491 machine_mode address_mode
;
2492 class mem_attrs
*defattrs
;
2494 mem_attrs
attrs (*get_mem_attrs (memref
));
2495 address_mode
= get_address_mode (memref
);
2496 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2498 /* At this point we don't know _why_ the address is invalid. It
2499 could have secondary memory references, multiplies or anything.
2501 However, if we did go and rearrange things, we can wind up not
2502 being able to recognize the magic around pic_offset_table_rtx.
2503 This stuff is fragile, and is yet another example of why it is
2504 bad to expose PIC machinery too early. */
2505 if (! memory_address_addr_space_p (GET_MODE (memref
), new_rtx
,
2507 && GET_CODE (addr
) == PLUS
2508 && XEXP (addr
, 0) == pic_offset_table_rtx
)
2510 addr
= force_reg (GET_MODE (addr
), addr
);
2511 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2514 update_temp_slot_address (XEXP (memref
, 0), new_rtx
);
2515 new_rtx
= change_address_1 (memref
, VOIDmode
, new_rtx
, 1, false);
2517 /* If there are no changes, just return the original memory reference. */
2518 if (new_rtx
== memref
)
2521 /* Update the alignment to reflect the offset. Reset the offset, which
2523 defattrs
= mode_mem_attrs
[(int) GET_MODE (new_rtx
)];
2524 attrs
.offset_known_p
= false;
2525 attrs
.size_known_p
= defattrs
->size_known_p
;
2526 attrs
.size
= defattrs
->size
;
2527 attrs
.align
= MIN (attrs
.align
, pow2
* BITS_PER_UNIT
);
2528 set_mem_attrs (new_rtx
, &attrs
);
2532 /* Return a memory reference like MEMREF, but with its address changed to
2533 ADDR. The caller is asserting that the actual piece of memory pointed
2534 to is the same, just the form of the address is being changed, such as
2535 by putting something into a register. INPLACE is true if any changes
2536 can be made directly to MEMREF or false if MEMREF must be treated as
2540 replace_equiv_address (rtx memref
, rtx addr
, bool inplace
)
2542 /* change_address_1 copies the memory attribute structure without change
2543 and that's exactly what we want here. */
2544 update_temp_slot_address (XEXP (memref
, 0), addr
);
2545 return change_address_1 (memref
, VOIDmode
, addr
, 1, inplace
);
2548 /* Likewise, but the reference is not required to be valid. */
2551 replace_equiv_address_nv (rtx memref
, rtx addr
, bool inplace
)
2553 return change_address_1 (memref
, VOIDmode
, addr
, 0, inplace
);
2556 /* Return a memory reference like MEMREF, but with its mode widened to
2557 MODE and offset by OFFSET. This would be used by targets that e.g.
2558 cannot issue QImode memory operations and have to use SImode memory
2559 operations plus masking logic. */
2562 widen_memory_access (rtx memref
, machine_mode mode
, poly_int64 offset
)
2564 rtx new_rtx
= adjust_address_1 (memref
, mode
, offset
, 1, 1, 0, 0);
2565 poly_uint64 size
= GET_MODE_SIZE (mode
);
2567 /* If there are no changes, just return the original memory reference. */
2568 if (new_rtx
== memref
)
2571 mem_attrs
attrs (*get_mem_attrs (new_rtx
));
2573 /* If we don't know what offset we were at within the expression, then
2574 we can't know if we've overstepped the bounds. */
2575 if (! attrs
.offset_known_p
)
2576 attrs
.expr
= NULL_TREE
;
2580 if (TREE_CODE (attrs
.expr
) == COMPONENT_REF
)
2582 tree field
= TREE_OPERAND (attrs
.expr
, 1);
2583 tree offset
= component_ref_field_offset (attrs
.expr
);
2585 if (! DECL_SIZE_UNIT (field
))
2587 attrs
.expr
= NULL_TREE
;
2591 /* Is the field at least as large as the access? If so, ok,
2592 otherwise strip back to the containing structure. */
2593 if (poly_int_tree_p (DECL_SIZE_UNIT (field
))
2594 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field
)), size
)
2595 && known_ge (attrs
.offset
, 0))
2598 poly_uint64 suboffset
;
2599 if (!poly_int_tree_p (offset
, &suboffset
))
2601 attrs
.expr
= NULL_TREE
;
2605 attrs
.expr
= TREE_OPERAND (attrs
.expr
, 0);
2606 attrs
.offset
+= suboffset
;
2607 attrs
.offset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
2610 /* Similarly for the decl. */
2611 else if (DECL_P (attrs
.expr
)
2612 && DECL_SIZE_UNIT (attrs
.expr
)
2613 && poly_int_tree_p (DECL_SIZE_UNIT (attrs
.expr
))
2614 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs
.expr
)),
2616 && known_ge (attrs
.offset
, 0))
2620 /* The widened memory access overflows the expression, which means
2621 that it could alias another expression. Zap it. */
2622 attrs
.expr
= NULL_TREE
;
2628 attrs
.offset_known_p
= false;
2630 /* The widened memory may alias other stuff, so zap the alias set. */
2631 /* ??? Maybe use get_alias_set on any remaining expression. */
2633 attrs
.size_known_p
= true;
2635 set_mem_attrs (new_rtx
, &attrs
);
2639 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2640 static GTY(()) tree spill_slot_decl
;
2643 get_spill_slot_decl (bool force_build_p
)
2645 tree d
= spill_slot_decl
;
2648 if (d
|| !force_build_p
)
2651 d
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
2652 VAR_DECL
, get_identifier ("%sfp"), void_type_node
);
2653 DECL_ARTIFICIAL (d
) = 1;
2654 DECL_IGNORED_P (d
) = 1;
2656 spill_slot_decl
= d
;
2658 rd
= gen_rtx_MEM (BLKmode
, frame_pointer_rtx
);
2659 MEM_NOTRAP_P (rd
) = 1;
2660 mem_attrs
attrs (*mode_mem_attrs
[(int) BLKmode
]);
2661 attrs
.alias
= new_alias_set ();
2663 set_mem_attrs (rd
, &attrs
);
2664 SET_DECL_RTL (d
, rd
);
2669 /* Given MEM, a result from assign_stack_local, fill in the memory
2670 attributes as appropriate for a register allocator spill slot.
2671 These slots are not aliasable by other memory. We arrange for
2672 them all to use a single MEM_EXPR, so that the aliasing code can
2673 work properly in the case of shared spill slots. */
2676 set_mem_attrs_for_spill (rtx mem
)
2680 mem_attrs
attrs (*get_mem_attrs (mem
));
2681 attrs
.expr
= get_spill_slot_decl (true);
2682 attrs
.alias
= MEM_ALIAS_SET (DECL_RTL (attrs
.expr
));
2683 attrs
.addrspace
= ADDR_SPACE_GENERIC
;
2685 /* We expect the incoming memory to be of the form:
2686 (mem:MODE (plus (reg sfp) (const_int offset)))
2687 with perhaps the plus missing for offset = 0. */
2688 addr
= XEXP (mem
, 0);
2689 attrs
.offset_known_p
= true;
2690 strip_offset (addr
, &attrs
.offset
);
2692 set_mem_attrs (mem
, &attrs
);
2693 MEM_NOTRAP_P (mem
) = 1;
2696 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2699 gen_label_rtx (void)
2701 return as_a
<rtx_code_label
*> (
2702 gen_rtx_CODE_LABEL (VOIDmode
, NULL_RTX
, NULL_RTX
,
2703 NULL
, label_num
++, NULL
));
2706 /* For procedure integration. */
2708 /* Install new pointers to the first and last insns in the chain.
2709 Also, set cur_insn_uid to one higher than the last in use.
2710 Used for an inline-procedure after copying the insn chain. */
2713 set_new_first_and_last_insn (rtx_insn
*first
, rtx_insn
*last
)
2717 set_first_insn (first
);
2718 set_last_insn (last
);
2721 if (param_min_nondebug_insn_uid
|| MAY_HAVE_DEBUG_INSNS
)
2723 int debug_count
= 0;
2725 cur_insn_uid
= param_min_nondebug_insn_uid
- 1;
2726 cur_debug_insn_uid
= 0;
2728 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2729 if (INSN_UID (insn
) < param_min_nondebug_insn_uid
)
2730 cur_debug_insn_uid
= MAX (cur_debug_insn_uid
, INSN_UID (insn
));
2733 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2734 if (DEBUG_INSN_P (insn
))
2739 cur_debug_insn_uid
= param_min_nondebug_insn_uid
+ debug_count
;
2741 cur_debug_insn_uid
++;
2744 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2745 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2750 /* Go through all the RTL insn bodies and copy any invalid shared
2751 structure. This routine should only be called once. */
2754 unshare_all_rtl_1 (rtx_insn
*insn
)
2756 /* Unshare just about everything else. */
2757 unshare_all_rtl_in_chain (insn
);
2759 /* Make sure the addresses of stack slots found outside the insn chain
2760 (such as, in DECL_RTL of a variable) are not shared
2761 with the insn chain.
2763 This special care is necessary when the stack slot MEM does not
2764 actually appear in the insn chain. If it does appear, its address
2765 is unshared from all else at that point. */
2768 FOR_EACH_VEC_SAFE_ELT (stack_slot_list
, i
, temp
)
2769 (*stack_slot_list
)[i
] = copy_rtx_if_shared (temp
);
2772 /* Go through all the RTL insn bodies and copy any invalid shared
2773 structure, again. This is a fairly expensive thing to do so it
2774 should be done sparingly. */
2777 unshare_all_rtl_again (rtx_insn
*insn
)
2782 for (p
= insn
; p
; p
= NEXT_INSN (p
))
2785 reset_used_flags (PATTERN (p
));
2786 reset_used_flags (REG_NOTES (p
));
2788 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p
));
2791 /* Make sure that virtual stack slots are not shared. */
2792 set_used_decls (DECL_INITIAL (cfun
->decl
));
2794 /* Make sure that virtual parameters are not shared. */
2795 for (decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2796 set_used_flags (DECL_RTL (decl
));
2800 FOR_EACH_VEC_SAFE_ELT (stack_slot_list
, i
, temp
)
2801 reset_used_flags (temp
);
2803 unshare_all_rtl_1 (insn
);
2807 unshare_all_rtl (void)
2809 unshare_all_rtl_1 (get_insns ());
2811 for (tree decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2813 if (DECL_RTL_SET_P (decl
))
2814 SET_DECL_RTL (decl
, copy_rtx_if_shared (DECL_RTL (decl
)));
2815 DECL_INCOMING_RTL (decl
) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl
));
2822 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2823 Recursively does the same for subexpressions. */
2826 verify_rtx_sharing (rtx orig
, rtx insn
)
2831 const char *format_ptr
;
2836 code
= GET_CODE (x
);
2838 /* These types may be freely shared. */
2854 /* SCRATCH must be shared because they represent distinct values. */
2857 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2858 clobbers or clobbers of hard registers that originated as pseudos.
2859 This is needed to allow safe register renaming. */
2860 if (REG_P (XEXP (x
, 0))
2861 && HARD_REGISTER_NUM_P (REGNO (XEXP (x
, 0)))
2862 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x
, 0))))
2867 if (shared_const_p (orig
))
2872 /* A MEM is allowed to be shared if its address is constant. */
2873 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
2874 || reload_completed
|| reload_in_progress
)
2883 /* This rtx may not be shared. If it has already been seen,
2884 replace it with a copy of itself. */
2885 if (flag_checking
&& RTX_FLAG (x
, used
))
2887 error ("invalid rtl sharing found in the insn");
2889 error ("shared rtx");
2891 internal_error ("internal consistency failure");
2893 gcc_assert (!RTX_FLAG (x
, used
));
2895 RTX_FLAG (x
, used
) = 1;
2897 /* Now scan the subexpressions recursively. */
2899 format_ptr
= GET_RTX_FORMAT (code
);
2901 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2903 switch (*format_ptr
++)
2906 verify_rtx_sharing (XEXP (x
, i
), insn
);
2910 if (XVEC (x
, i
) != NULL
)
2913 int len
= XVECLEN (x
, i
);
2915 for (j
= 0; j
< len
; j
++)
2917 /* We allow sharing of ASM_OPERANDS inside single
2919 if (j
&& GET_CODE (XVECEXP (x
, i
, j
)) == SET
2920 && (GET_CODE (SET_SRC (XVECEXP (x
, i
, j
)))
2922 verify_rtx_sharing (SET_DEST (XVECEXP (x
, i
, j
)), insn
);
2924 verify_rtx_sharing (XVECEXP (x
, i
, j
), insn
);
2933 /* Reset used-flags for INSN. */
2936 reset_insn_used_flags (rtx insn
)
2938 gcc_assert (INSN_P (insn
));
2939 reset_used_flags (PATTERN (insn
));
2940 reset_used_flags (REG_NOTES (insn
));
2942 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2945 /* Go through all the RTL insn bodies and clear all the USED bits. */
2948 reset_all_used_flags (void)
2952 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2955 rtx pat
= PATTERN (p
);
2956 if (GET_CODE (pat
) != SEQUENCE
)
2957 reset_insn_used_flags (p
);
2960 gcc_assert (REG_NOTES (p
) == NULL
);
2961 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2963 rtx insn
= XVECEXP (pat
, 0, i
);
2965 reset_insn_used_flags (insn
);
2971 /* Verify sharing in INSN. */
2974 verify_insn_sharing (rtx insn
)
2976 gcc_assert (INSN_P (insn
));
2977 verify_rtx_sharing (PATTERN (insn
), insn
);
2978 verify_rtx_sharing (REG_NOTES (insn
), insn
);
2980 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn
), insn
);
2983 /* Go through all the RTL insn bodies and check that there is no unexpected
2984 sharing in between the subexpressions. */
2987 verify_rtl_sharing (void)
2991 timevar_push (TV_VERIFY_RTL_SHARING
);
2993 reset_all_used_flags ();
2995 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2998 rtx pat
= PATTERN (p
);
2999 if (GET_CODE (pat
) != SEQUENCE
)
3000 verify_insn_sharing (p
);
3002 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
3004 rtx insn
= XVECEXP (pat
, 0, i
);
3006 verify_insn_sharing (insn
);
3010 reset_all_used_flags ();
3012 timevar_pop (TV_VERIFY_RTL_SHARING
);
3015 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3016 Assumes the mark bits are cleared at entry. */
3019 unshare_all_rtl_in_chain (rtx_insn
*insn
)
3021 for (; insn
; insn
= NEXT_INSN (insn
))
3024 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
3025 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
3027 CALL_INSN_FUNCTION_USAGE (insn
)
3028 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn
));
3032 /* Go through all virtual stack slots of a function and mark them as
3033 shared. We never replace the DECL_RTLs themselves with a copy,
3034 but expressions mentioned into a DECL_RTL cannot be shared with
3035 expressions in the instruction stream.
3037 Note that reload may convert pseudo registers into memories in-place.
3038 Pseudo registers are always shared, but MEMs never are. Thus if we
3039 reset the used flags on MEMs in the instruction stream, we must set
3040 them again on MEMs that appear in DECL_RTLs. */
3043 set_used_decls (tree blk
)
3048 for (t
= BLOCK_VARS (blk
); t
; t
= DECL_CHAIN (t
))
3049 if (DECL_RTL_SET_P (t
))
3050 set_used_flags (DECL_RTL (t
));
3052 /* Now process sub-blocks. */
3053 for (t
= BLOCK_SUBBLOCKS (blk
); t
; t
= BLOCK_CHAIN (t
))
3057 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3058 Recursively does the same for subexpressions. Uses
3059 copy_rtx_if_shared_1 to reduce stack space. */
3062 copy_rtx_if_shared (rtx orig
)
3064 copy_rtx_if_shared_1 (&orig
);
3068 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3069 use. Recursively does the same for subexpressions. */
3072 copy_rtx_if_shared_1 (rtx
*orig1
)
3078 const char *format_ptr
;
3082 /* Repeat is used to turn tail-recursion into iteration. */
3089 code
= GET_CODE (x
);
3091 /* These types may be freely shared. */
3107 /* SCRATCH must be shared because they represent distinct values. */
3110 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
3111 clobbers or clobbers of hard registers that originated as pseudos.
3112 This is needed to allow safe register renaming. */
3113 if (REG_P (XEXP (x
, 0))
3114 && HARD_REGISTER_NUM_P (REGNO (XEXP (x
, 0)))
3115 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x
, 0))))
3120 if (shared_const_p (x
))
3130 /* The chain of insns is not being copied. */
3137 /* This rtx may not be shared. If it has already been seen,
3138 replace it with a copy of itself. */
3140 if (RTX_FLAG (x
, used
))
3142 x
= shallow_copy_rtx (x
);
3145 RTX_FLAG (x
, used
) = 1;
3147 /* Now scan the subexpressions recursively.
3148 We can store any replaced subexpressions directly into X
3149 since we know X is not shared! Any vectors in X
3150 must be copied if X was copied. */
3152 format_ptr
= GET_RTX_FORMAT (code
);
3153 length
= GET_RTX_LENGTH (code
);
3156 for (i
= 0; i
< length
; i
++)
3158 switch (*format_ptr
++)
3162 copy_rtx_if_shared_1 (last_ptr
);
3163 last_ptr
= &XEXP (x
, i
);
3167 if (XVEC (x
, i
) != NULL
)
3170 int len
= XVECLEN (x
, i
);
3172 /* Copy the vector iff I copied the rtx and the length
3174 if (copied
&& len
> 0)
3175 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
3177 /* Call recursively on all inside the vector. */
3178 for (j
= 0; j
< len
; j
++)
3181 copy_rtx_if_shared_1 (last_ptr
);
3182 last_ptr
= &XVECEXP (x
, i
, j
);
3197 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3200 mark_used_flags (rtx x
, int flag
)
3204 const char *format_ptr
;
3207 /* Repeat is used to turn tail-recursion into iteration. */
3212 code
= GET_CODE (x
);
3214 /* These types may be freely shared so we needn't do any resetting
3238 /* The chain of insns is not being copied. */
3245 RTX_FLAG (x
, used
) = flag
;
3247 format_ptr
= GET_RTX_FORMAT (code
);
3248 length
= GET_RTX_LENGTH (code
);
3250 for (i
= 0; i
< length
; i
++)
3252 switch (*format_ptr
++)
3260 mark_used_flags (XEXP (x
, i
), flag
);
3264 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3265 mark_used_flags (XVECEXP (x
, i
, j
), flag
);
3271 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3272 to look for shared sub-parts. */
3275 reset_used_flags (rtx x
)
3277 mark_used_flags (x
, 0);
3280 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3281 to look for shared sub-parts. */
3284 set_used_flags (rtx x
)
3286 mark_used_flags (x
, 1);
3289 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3290 Return X or the rtx for the pseudo reg the value of X was copied into.
3291 OTHER must be valid as a SET_DEST. */
3294 make_safe_from (rtx x
, rtx other
)
3297 switch (GET_CODE (other
))
3300 other
= SUBREG_REG (other
);
3302 case STRICT_LOW_PART
:
3305 other
= XEXP (other
, 0);
3314 && GET_CODE (x
) != SUBREG
)
3316 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
3317 || reg_mentioned_p (other
, x
))))
3319 rtx temp
= gen_reg_rtx (GET_MODE (x
));
3320 emit_move_insn (temp
, x
);
3326 /* Emission of insns (adding them to the doubly-linked list). */
3328 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3331 get_last_insn_anywhere (void)
3333 struct sequence_stack
*seq
;
3334 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
3340 /* Return the first nonnote insn emitted in current sequence or current
3341 function. This routine looks inside SEQUENCEs. */
3344 get_first_nonnote_insn (void)
3346 rtx_insn
*insn
= get_insns ();
3351 for (insn
= next_insn (insn
);
3352 insn
&& NOTE_P (insn
);
3353 insn
= next_insn (insn
))
3357 if (NONJUMP_INSN_P (insn
)
3358 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3359 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3366 /* Return the last nonnote insn emitted in current sequence or current
3367 function. This routine looks inside SEQUENCEs. */
3370 get_last_nonnote_insn (void)
3372 rtx_insn
*insn
= get_last_insn ();
3377 for (insn
= previous_insn (insn
);
3378 insn
&& NOTE_P (insn
);
3379 insn
= previous_insn (insn
))
3383 if (NONJUMP_INSN_P (insn
))
3384 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3385 insn
= seq
->insn (seq
->len () - 1);
3392 /* Return the number of actual (non-debug) insns emitted in this
3396 get_max_insn_count (void)
3398 int n
= cur_insn_uid
;
3400 /* The table size must be stable across -g, to avoid codegen
3401 differences due to debug insns, and not be affected by
3402 -fmin-insn-uid, to avoid excessive table size and to simplify
3403 debugging of -fcompare-debug failures. */
3404 if (cur_debug_insn_uid
> param_min_nondebug_insn_uid
)
3405 n
-= cur_debug_insn_uid
;
3407 n
-= param_min_nondebug_insn_uid
;
3413 /* Return the next insn. If it is a SEQUENCE, return the first insn
3417 next_insn (rtx_insn
*insn
)
3421 insn
= NEXT_INSN (insn
);
3422 if (insn
&& NONJUMP_INSN_P (insn
)
3423 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3424 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3430 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3434 previous_insn (rtx_insn
*insn
)
3438 insn
= PREV_INSN (insn
);
3439 if (insn
&& NONJUMP_INSN_P (insn
))
3440 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3441 insn
= seq
->insn (seq
->len () - 1);
3447 /* Return the next insn after INSN that is not a NOTE. This routine does not
3448 look inside SEQUENCEs. */
3451 next_nonnote_insn (rtx_insn
*insn
)
3455 insn
= NEXT_INSN (insn
);
3456 if (insn
== 0 || !NOTE_P (insn
))
3463 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3464 routine does not look inside SEQUENCEs. */
3467 next_nondebug_insn (rtx_insn
*insn
)
3471 insn
= NEXT_INSN (insn
);
3472 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3479 /* Return the previous insn before INSN that is not a NOTE. This routine does
3480 not look inside SEQUENCEs. */
3483 prev_nonnote_insn (rtx_insn
*insn
)
3487 insn
= PREV_INSN (insn
);
3488 if (insn
== 0 || !NOTE_P (insn
))
3495 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3496 This routine does not look inside SEQUENCEs. */
3499 prev_nondebug_insn (rtx_insn
*insn
)
3503 insn
= PREV_INSN (insn
);
3504 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3511 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3512 This routine does not look inside SEQUENCEs. */
3515 next_nonnote_nondebug_insn (rtx_insn
*insn
)
3519 insn
= NEXT_INSN (insn
);
3520 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3527 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3528 but stop the search before we enter another basic block. This
3529 routine does not look inside SEQUENCEs. */
3532 next_nonnote_nondebug_insn_bb (rtx_insn
*insn
)
3536 insn
= NEXT_INSN (insn
);
3539 if (DEBUG_INSN_P (insn
))
3543 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3550 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3551 This routine does not look inside SEQUENCEs. */
3554 prev_nonnote_nondebug_insn (rtx_insn
*insn
)
3558 insn
= PREV_INSN (insn
);
3559 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3566 /* Return the previous insn before INSN that is not a NOTE nor
3567 DEBUG_INSN, but stop the search before we enter another basic
3568 block. This routine does not look inside SEQUENCEs. */
3571 prev_nonnote_nondebug_insn_bb (rtx_insn
*insn
)
3575 insn
= PREV_INSN (insn
);
3578 if (DEBUG_INSN_P (insn
))
3582 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3589 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3590 or 0, if there is none. This routine does not look inside
3594 next_real_insn (rtx_insn
*insn
)
3598 insn
= NEXT_INSN (insn
);
3599 if (insn
== 0 || INSN_P (insn
))
3606 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3607 or 0, if there is none. This routine does not look inside
3611 prev_real_insn (rtx_insn
*insn
)
3615 insn
= PREV_INSN (insn
);
3616 if (insn
== 0 || INSN_P (insn
))
3623 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3624 or 0, if there is none. This routine does not look inside
3628 next_real_nondebug_insn (rtx uncast_insn
)
3630 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3634 insn
= NEXT_INSN (insn
);
3635 if (insn
== 0 || NONDEBUG_INSN_P (insn
))
3642 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3643 or 0, if there is none. This routine does not look inside
3647 prev_real_nondebug_insn (rtx_insn
*insn
)
3651 insn
= PREV_INSN (insn
);
3652 if (insn
== 0 || NONDEBUG_INSN_P (insn
))
3659 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3660 This routine does not look inside SEQUENCEs. */
3663 last_call_insn (void)
3667 for (insn
= get_last_insn ();
3668 insn
&& !CALL_P (insn
);
3669 insn
= PREV_INSN (insn
))
3672 return safe_as_a
<rtx_call_insn
*> (insn
);
3675 /* Find the next insn after INSN that really does something. This routine
3676 does not look inside SEQUENCEs. After reload this also skips over
3677 standalone USE and CLOBBER insn. */
3680 active_insn_p (const rtx_insn
*insn
)
3682 return (CALL_P (insn
) || JUMP_P (insn
)
3683 || JUMP_TABLE_DATA_P (insn
) /* FIXME */
3684 || (NONJUMP_INSN_P (insn
)
3685 && (! reload_completed
3686 || (GET_CODE (PATTERN (insn
)) != USE
3687 && GET_CODE (PATTERN (insn
)) != CLOBBER
))));
3691 next_active_insn (rtx_insn
*insn
)
3695 insn
= NEXT_INSN (insn
);
3696 if (insn
== 0 || active_insn_p (insn
))
3703 /* Find the last insn before INSN that really does something. This routine
3704 does not look inside SEQUENCEs. After reload this also skips over
3705 standalone USE and CLOBBER insn. */
3708 prev_active_insn (rtx_insn
*insn
)
3712 insn
= PREV_INSN (insn
);
3713 if (insn
== 0 || active_insn_p (insn
))
3720 /* Return the next insn that uses CC0 after INSN, which is assumed to
3721 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3722 applied to the result of this function should yield INSN).
3724 Normally, this is simply the next insn. However, if a REG_CC_USER note
3725 is present, it contains the insn that uses CC0.
3727 Return 0 if we can't find the insn. */
3730 next_cc0_user (rtx_insn
*insn
)
3732 rtx note
= find_reg_note (insn
, REG_CC_USER
, NULL_RTX
);
3735 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3737 insn
= next_nonnote_insn (insn
);
3738 if (insn
&& NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3739 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3741 if (insn
&& INSN_P (insn
) && reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
3747 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3748 note, it is the previous insn. */
3751 prev_cc0_setter (rtx_insn
*insn
)
3753 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
3756 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3758 insn
= prev_nonnote_insn (insn
);
3759 gcc_assert (sets_cc0_p (PATTERN (insn
)));
3764 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3767 find_auto_inc (const_rtx x
, const_rtx reg
)
3769 subrtx_iterator::array_type array
;
3770 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
3772 const_rtx x
= *iter
;
3773 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
3774 && rtx_equal_p (reg
, XEXP (x
, 0)))
3780 /* Increment the label uses for all labels present in rtx. */
3783 mark_label_nuses (rtx x
)
3789 code
= GET_CODE (x
);
3790 if (code
== LABEL_REF
&& LABEL_P (label_ref_label (x
)))
3791 LABEL_NUSES (label_ref_label (x
))++;
3793 fmt
= GET_RTX_FORMAT (code
);
3794 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3797 mark_label_nuses (XEXP (x
, i
));
3798 else if (fmt
[i
] == 'E')
3799 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3800 mark_label_nuses (XVECEXP (x
, i
, j
));
3805 /* Try splitting insns that can be split for better scheduling.
3806 PAT is the pattern which might split.
3807 TRIAL is the insn providing PAT.
3808 LAST is nonzero if we should return the last insn of the sequence produced.
3810 If this routine succeeds in splitting, it returns the first or last
3811 replacement insn depending on the value of LAST. Otherwise, it
3812 returns TRIAL. If the insn to be returned can be split, it will be. */
3815 try_split (rtx pat
, rtx_insn
*trial
, int last
)
3817 rtx_insn
*before
, *after
;
3819 rtx_insn
*seq
, *tem
;
3820 profile_probability probability
;
3821 rtx_insn
*insn_last
, *insn
;
3823 rtx_insn
*call_insn
= NULL
;
3825 /* We're not good at redistributing frame information. */
3826 if (RTX_FRAME_RELATED_P (trial
))
3829 if (any_condjump_p (trial
)
3830 && (note
= find_reg_note (trial
, REG_BR_PROB
, 0)))
3831 split_branch_probability
3832 = profile_probability::from_reg_br_prob_note (XINT (note
, 0));
3834 split_branch_probability
= profile_probability::uninitialized ();
3836 probability
= split_branch_probability
;
3838 seq
= split_insns (pat
, trial
);
3840 split_branch_probability
= profile_probability::uninitialized ();
3845 /* Avoid infinite loop if any insn of the result matches
3846 the original pattern. */
3850 if (INSN_P (insn_last
)
3851 && rtx_equal_p (PATTERN (insn_last
), pat
))
3853 if (!NEXT_INSN (insn_last
))
3855 insn_last
= NEXT_INSN (insn_last
);
3858 /* We will be adding the new sequence to the function. The splitters
3859 may have introduced invalid RTL sharing, so unshare the sequence now. */
3860 unshare_all_rtl_in_chain (seq
);
3862 /* Mark labels and copy flags. */
3863 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3868 CROSSING_JUMP_P (insn
) = CROSSING_JUMP_P (trial
);
3869 mark_jump_label (PATTERN (insn
), insn
, 0);
3871 if (probability
.initialized_p ()
3872 && any_condjump_p (insn
)
3873 && !find_reg_note (insn
, REG_BR_PROB
, 0))
3875 /* We can preserve the REG_BR_PROB notes only if exactly
3876 one jump is created, otherwise the machine description
3877 is responsible for this step using
3878 split_branch_probability variable. */
3879 gcc_assert (njumps
== 1);
3880 add_reg_br_prob_note (insn
, probability
);
3885 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3886 in SEQ and copy any additional information across. */
3889 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3892 gcc_assert (call_insn
== NULL_RTX
);
3895 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3896 target may have explicitly specified. */
3897 rtx
*p
= &CALL_INSN_FUNCTION_USAGE (insn
);
3900 *p
= CALL_INSN_FUNCTION_USAGE (trial
);
3902 /* If the old call was a sibling call, the new one must
3904 SIBLING_CALL_P (insn
) = SIBLING_CALL_P (trial
);
3908 /* Copy notes, particularly those related to the CFG. */
3909 for (note
= REG_NOTES (trial
); note
; note
= XEXP (note
, 1))
3911 switch (REG_NOTE_KIND (note
))
3914 copy_reg_eh_region_note_backward (note
, insn_last
, NULL
);
3920 case REG_CALL_NOCF_CHECK
:
3921 case REG_CALL_ARG_LOCATION
:
3922 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3925 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3929 case REG_NON_LOCAL_GOTO
:
3930 case REG_LABEL_TARGET
:
3931 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3934 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3942 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3944 rtx reg
= XEXP (note
, 0);
3945 if (!FIND_REG_INC_NOTE (insn
, reg
)
3946 && find_auto_inc (PATTERN (insn
), reg
))
3947 add_reg_note (insn
, REG_INC
, reg
);
3952 fixup_args_size_notes (NULL
, insn_last
, get_args_size (note
));
3956 gcc_assert (call_insn
!= NULL_RTX
);
3957 add_reg_note (call_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3965 /* If there are LABELS inside the split insns increment the
3966 usage count so we don't delete the label. */
3970 while (insn
!= NULL_RTX
)
3972 /* JUMP_P insns have already been "marked" above. */
3973 if (NONJUMP_INSN_P (insn
))
3974 mark_label_nuses (PATTERN (insn
));
3976 insn
= PREV_INSN (insn
);
3980 before
= PREV_INSN (trial
);
3981 after
= NEXT_INSN (trial
);
3983 emit_insn_after_setloc (seq
, trial
, INSN_LOCATION (trial
));
3985 delete_insn (trial
);
3987 /* Recursively call try_split for each new insn created; by the
3988 time control returns here that insn will be fully split, so
3989 set LAST and continue from the insn after the one returned.
3990 We can't use next_active_insn here since AFTER may be a note.
3991 Ignore deleted insns, which can be occur if not optimizing. */
3992 for (tem
= NEXT_INSN (before
); tem
!= after
; tem
= NEXT_INSN (tem
))
3993 if (! tem
->deleted () && INSN_P (tem
))
3994 tem
= try_split (PATTERN (tem
), tem
, 1);
3996 /* Return either the first or the last insn, depending on which was
3999 ? (after
? PREV_INSN (after
) : get_last_insn ())
4000 : NEXT_INSN (before
);
4003 /* Make and return an INSN rtx, initializing all its slots.
4004 Store PATTERN in the pattern slots. */
4007 make_insn_raw (rtx pattern
)
4011 insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
4013 INSN_UID (insn
) = cur_insn_uid
++;
4014 PATTERN (insn
) = pattern
;
4015 INSN_CODE (insn
) = -1;
4016 REG_NOTES (insn
) = NULL
;
4017 INSN_LOCATION (insn
) = curr_insn_location ();
4018 BLOCK_FOR_INSN (insn
) = NULL
;
4020 #ifdef ENABLE_RTL_CHECKING
4023 && (returnjump_p (insn
)
4024 || (GET_CODE (insn
) == SET
4025 && SET_DEST (insn
) == pc_rtx
)))
4027 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4035 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4038 make_debug_insn_raw (rtx pattern
)
4040 rtx_debug_insn
*insn
;
4042 insn
= as_a
<rtx_debug_insn
*> (rtx_alloc (DEBUG_INSN
));
4043 INSN_UID (insn
) = cur_debug_insn_uid
++;
4044 if (cur_debug_insn_uid
> param_min_nondebug_insn_uid
)
4045 INSN_UID (insn
) = cur_insn_uid
++;
4047 PATTERN (insn
) = pattern
;
4048 INSN_CODE (insn
) = -1;
4049 REG_NOTES (insn
) = NULL
;
4050 INSN_LOCATION (insn
) = curr_insn_location ();
4051 BLOCK_FOR_INSN (insn
) = NULL
;
4056 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4059 make_jump_insn_raw (rtx pattern
)
4061 rtx_jump_insn
*insn
;
4063 insn
= as_a
<rtx_jump_insn
*> (rtx_alloc (JUMP_INSN
));
4064 INSN_UID (insn
) = cur_insn_uid
++;
4066 PATTERN (insn
) = pattern
;
4067 INSN_CODE (insn
) = -1;
4068 REG_NOTES (insn
) = NULL
;
4069 JUMP_LABEL (insn
) = NULL
;
4070 INSN_LOCATION (insn
) = curr_insn_location ();
4071 BLOCK_FOR_INSN (insn
) = NULL
;
4076 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4079 make_call_insn_raw (rtx pattern
)
4081 rtx_call_insn
*insn
;
4083 insn
= as_a
<rtx_call_insn
*> (rtx_alloc (CALL_INSN
));
4084 INSN_UID (insn
) = cur_insn_uid
++;
4086 PATTERN (insn
) = pattern
;
4087 INSN_CODE (insn
) = -1;
4088 REG_NOTES (insn
) = NULL
;
4089 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
4090 INSN_LOCATION (insn
) = curr_insn_location ();
4091 BLOCK_FOR_INSN (insn
) = NULL
;
4096 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4099 make_note_raw (enum insn_note subtype
)
4101 /* Some notes are never created this way at all. These notes are
4102 only created by patching out insns. */
4103 gcc_assert (subtype
!= NOTE_INSN_DELETED_LABEL
4104 && subtype
!= NOTE_INSN_DELETED_DEBUG_LABEL
);
4106 rtx_note
*note
= as_a
<rtx_note
*> (rtx_alloc (NOTE
));
4107 INSN_UID (note
) = cur_insn_uid
++;
4108 NOTE_KIND (note
) = subtype
;
4109 BLOCK_FOR_INSN (note
) = NULL
;
4110 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
4114 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4115 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4116 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4119 link_insn_into_chain (rtx_insn
*insn
, rtx_insn
*prev
, rtx_insn
*next
)
4121 SET_PREV_INSN (insn
) = prev
;
4122 SET_NEXT_INSN (insn
) = next
;
4125 SET_NEXT_INSN (prev
) = insn
;
4126 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4128 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4129 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = insn
;
4134 SET_PREV_INSN (next
) = insn
;
4135 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4137 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4138 SET_PREV_INSN (sequence
->insn (0)) = insn
;
4142 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4144 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (insn
));
4145 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4146 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4150 /* Add INSN to the end of the doubly-linked list.
4151 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4154 add_insn (rtx_insn
*insn
)
4156 rtx_insn
*prev
= get_last_insn ();
4157 link_insn_into_chain (insn
, prev
, NULL
);
4158 if (get_insns () == NULL
)
4159 set_first_insn (insn
);
4160 set_last_insn (insn
);
4163 /* Add INSN into the doubly-linked list after insn AFTER. */
4166 add_insn_after_nobb (rtx_insn
*insn
, rtx_insn
*after
)
4168 rtx_insn
*next
= NEXT_INSN (after
);
4170 gcc_assert (!optimize
|| !after
->deleted ());
4172 link_insn_into_chain (insn
, after
, next
);
4176 struct sequence_stack
*seq
;
4178 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4179 if (after
== seq
->last
)
4187 /* Add INSN into the doubly-linked list before insn BEFORE. */
4190 add_insn_before_nobb (rtx_insn
*insn
, rtx_insn
*before
)
4192 rtx_insn
*prev
= PREV_INSN (before
);
4194 gcc_assert (!optimize
|| !before
->deleted ());
4196 link_insn_into_chain (insn
, prev
, before
);
4200 struct sequence_stack
*seq
;
4202 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4203 if (before
== seq
->first
)
4213 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4214 If BB is NULL, an attempt is made to infer the bb from before.
4216 This and the next function should be the only functions called
4217 to insert an insn once delay slots have been filled since only
4218 they know how to update a SEQUENCE. */
4221 add_insn_after (rtx_insn
*insn
, rtx_insn
*after
, basic_block bb
)
4223 add_insn_after_nobb (insn
, after
);
4224 if (!BARRIER_P (after
)
4225 && !BARRIER_P (insn
)
4226 && (bb
= BLOCK_FOR_INSN (after
)))
4228 set_block_for_insn (insn
, bb
);
4230 df_insn_rescan (insn
);
4231 /* Should not happen as first in the BB is always
4232 either NOTE or LABEL. */
4233 if (BB_END (bb
) == after
4234 /* Avoid clobbering of structure when creating new BB. */
4235 && !BARRIER_P (insn
)
4236 && !NOTE_INSN_BASIC_BLOCK_P (insn
))
4241 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4242 If BB is NULL, an attempt is made to infer the bb from before.
4244 This and the previous function should be the only functions called
4245 to insert an insn once delay slots have been filled since only
4246 they know how to update a SEQUENCE. */
4249 add_insn_before (rtx_insn
*insn
, rtx_insn
*before
, basic_block bb
)
4251 add_insn_before_nobb (insn
, before
);
4254 && !BARRIER_P (before
)
4255 && !BARRIER_P (insn
))
4256 bb
= BLOCK_FOR_INSN (before
);
4260 set_block_for_insn (insn
, bb
);
4262 df_insn_rescan (insn
);
4263 /* Should not happen as first in the BB is always either NOTE or
4265 gcc_assert (BB_HEAD (bb
) != insn
4266 /* Avoid clobbering of structure when creating new BB. */
4268 || NOTE_INSN_BASIC_BLOCK_P (insn
));
4272 /* Replace insn with an deleted instruction note. */
4275 set_insn_deleted (rtx_insn
*insn
)
4278 df_insn_delete (insn
);
4279 PUT_CODE (insn
, NOTE
);
4280 NOTE_KIND (insn
) = NOTE_INSN_DELETED
;
4284 /* Unlink INSN from the insn chain.
4286 This function knows how to handle sequences.
4288 This function does not invalidate data flow information associated with
4289 INSN (i.e. does not call df_insn_delete). That makes this function
4290 usable for only disconnecting an insn from the chain, and re-emit it
4293 To later insert INSN elsewhere in the insn chain via add_insn and
4294 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4295 the caller. Nullifying them here breaks many insn chain walks.
4297 To really delete an insn and related DF information, use delete_insn. */
4300 remove_insn (rtx_insn
*insn
)
4302 rtx_insn
*next
= NEXT_INSN (insn
);
4303 rtx_insn
*prev
= PREV_INSN (insn
);
4308 SET_NEXT_INSN (prev
) = next
;
4309 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4311 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4312 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4317 struct sequence_stack
*seq
;
4319 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4320 if (insn
== seq
->first
)
4331 SET_PREV_INSN (next
) = prev
;
4332 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4334 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4335 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4340 struct sequence_stack
*seq
;
4342 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4343 if (insn
== seq
->last
)
4352 /* Fix up basic block boundaries, if necessary. */
4353 if (!BARRIER_P (insn
)
4354 && (bb
= BLOCK_FOR_INSN (insn
)))
4356 if (BB_HEAD (bb
) == insn
)
4358 /* Never ever delete the basic block note without deleting whole
4360 gcc_assert (!NOTE_P (insn
));
4361 BB_HEAD (bb
) = next
;
4363 if (BB_END (bb
) == insn
)
4368 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4371 add_function_usage_to (rtx call_insn
, rtx call_fusage
)
4373 gcc_assert (call_insn
&& CALL_P (call_insn
));
4375 /* Put the register usage information on the CALL. If there is already
4376 some usage information, put ours at the end. */
4377 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
4381 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
4382 link
= XEXP (link
, 1))
4385 XEXP (link
, 1) = call_fusage
;
4388 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
4391 /* Delete all insns made since FROM.
4392 FROM becomes the new last instruction. */
4395 delete_insns_since (rtx_insn
*from
)
4400 SET_NEXT_INSN (from
) = 0;
4401 set_last_insn (from
);
4404 /* This function is deprecated, please use sequences instead.
4406 Move a consecutive bunch of insns to a different place in the chain.
4407 The insns to be moved are those between FROM and TO.
4408 They are moved to a new position after the insn AFTER.
4409 AFTER must not be FROM or TO or any insn in between.
4411 This function does not know about SEQUENCEs and hence should not be
4412 called after delay-slot filling has been done. */
4415 reorder_insns_nobb (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4419 for (rtx_insn
*x
= from
; x
!= to
; x
= NEXT_INSN (x
))
4420 gcc_assert (after
!= x
);
4421 gcc_assert (after
!= to
);
4424 /* Splice this bunch out of where it is now. */
4425 if (PREV_INSN (from
))
4426 SET_NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
4428 SET_PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
4429 if (get_last_insn () == to
)
4430 set_last_insn (PREV_INSN (from
));
4431 if (get_insns () == from
)
4432 set_first_insn (NEXT_INSN (to
));
4434 /* Make the new neighbors point to it and it to them. */
4435 if (NEXT_INSN (after
))
4436 SET_PREV_INSN (NEXT_INSN (after
)) = to
;
4438 SET_NEXT_INSN (to
) = NEXT_INSN (after
);
4439 SET_PREV_INSN (from
) = after
;
4440 SET_NEXT_INSN (after
) = from
;
4441 if (after
== get_last_insn ())
4445 /* Same as function above, but take care to update BB boundaries. */
4447 reorder_insns (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4449 rtx_insn
*prev
= PREV_INSN (from
);
4450 basic_block bb
, bb2
;
4452 reorder_insns_nobb (from
, to
, after
);
4454 if (!BARRIER_P (after
)
4455 && (bb
= BLOCK_FOR_INSN (after
)))
4458 df_set_bb_dirty (bb
);
4460 if (!BARRIER_P (from
)
4461 && (bb2
= BLOCK_FOR_INSN (from
)))
4463 if (BB_END (bb2
) == to
)
4464 BB_END (bb2
) = prev
;
4465 df_set_bb_dirty (bb2
);
4468 if (BB_END (bb
) == after
)
4471 for (x
= from
; x
!= NEXT_INSN (to
); x
= NEXT_INSN (x
))
4473 df_insn_change_bb (x
, bb
);
4478 /* Emit insn(s) of given code and pattern
4479 at a specified place within the doubly-linked list.
4481 All of the emit_foo global entry points accept an object
4482 X which is either an insn list or a PATTERN of a single
4485 There are thus a few canonical ways to generate code and
4486 emit it at a specific place in the instruction stream. For
4487 example, consider the instruction named SPOT and the fact that
4488 we would like to emit some instructions before SPOT. We might
4492 ... emit the new instructions ...
4493 insns_head = get_insns ();
4496 emit_insn_before (insns_head, SPOT);
4498 It used to be common to generate SEQUENCE rtl instead, but that
4499 is a relic of the past which no longer occurs. The reason is that
4500 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4501 generated would almost certainly die right after it was created. */
4504 emit_pattern_before_noloc (rtx x
, rtx_insn
*before
, rtx_insn
*last
,
4506 rtx_insn
*(*make_raw
) (rtx
))
4510 gcc_assert (before
);
4515 switch (GET_CODE (x
))
4524 insn
= as_a
<rtx_insn
*> (x
);
4527 rtx_insn
*next
= NEXT_INSN (insn
);
4528 add_insn_before (insn
, before
, bb
);
4534 #ifdef ENABLE_RTL_CHECKING
4541 last
= (*make_raw
) (x
);
4542 add_insn_before (last
, before
, bb
);
4549 /* Make X be output before the instruction BEFORE. */
4552 emit_insn_before_noloc (rtx x
, rtx_insn
*before
, basic_block bb
)
4554 return emit_pattern_before_noloc (x
, before
, before
, bb
, make_insn_raw
);
4557 /* Make an instruction with body X and code JUMP_INSN
4558 and output it before the instruction BEFORE. */
4561 emit_jump_insn_before_noloc (rtx x
, rtx_insn
*before
)
4563 return as_a
<rtx_jump_insn
*> (
4564 emit_pattern_before_noloc (x
, before
, NULL
, NULL
,
4565 make_jump_insn_raw
));
4568 /* Make an instruction with body X and code CALL_INSN
4569 and output it before the instruction BEFORE. */
4572 emit_call_insn_before_noloc (rtx x
, rtx_insn
*before
)
4574 return emit_pattern_before_noloc (x
, before
, NULL
, NULL
,
4575 make_call_insn_raw
);
4578 /* Make an instruction with body X and code DEBUG_INSN
4579 and output it before the instruction BEFORE. */
4582 emit_debug_insn_before_noloc (rtx x
, rtx_insn
*before
)
4584 return emit_pattern_before_noloc (x
, before
, NULL
, NULL
,
4585 make_debug_insn_raw
);
4588 /* Make an insn of code BARRIER
4589 and output it before the insn BEFORE. */
4592 emit_barrier_before (rtx_insn
*before
)
4594 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4596 INSN_UID (insn
) = cur_insn_uid
++;
4598 add_insn_before (insn
, before
, NULL
);
4602 /* Emit the label LABEL before the insn BEFORE. */
4605 emit_label_before (rtx_code_label
*label
, rtx_insn
*before
)
4607 gcc_checking_assert (INSN_UID (label
) == 0);
4608 INSN_UID (label
) = cur_insn_uid
++;
4609 add_insn_before (label
, before
, NULL
);
4613 /* Helper for emit_insn_after, handles lists of instructions
4617 emit_insn_after_1 (rtx_insn
*first
, rtx_insn
*after
, basic_block bb
)
4620 rtx_insn
*after_after
;
4621 if (!bb
&& !BARRIER_P (after
))
4622 bb
= BLOCK_FOR_INSN (after
);
4626 df_set_bb_dirty (bb
);
4627 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4628 if (!BARRIER_P (last
))
4630 set_block_for_insn (last
, bb
);
4631 df_insn_rescan (last
);
4633 if (!BARRIER_P (last
))
4635 set_block_for_insn (last
, bb
);
4636 df_insn_rescan (last
);
4638 if (BB_END (bb
) == after
)
4642 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4645 after_after
= NEXT_INSN (after
);
4647 SET_NEXT_INSN (after
) = first
;
4648 SET_PREV_INSN (first
) = after
;
4649 SET_NEXT_INSN (last
) = after_after
;
4651 SET_PREV_INSN (after_after
) = last
;
4653 if (after
== get_last_insn ())
4654 set_last_insn (last
);
4660 emit_pattern_after_noloc (rtx x
, rtx_insn
*after
, basic_block bb
,
4661 rtx_insn
*(*make_raw
)(rtx
))
4663 rtx_insn
*last
= after
;
4670 switch (GET_CODE (x
))
4679 last
= emit_insn_after_1 (as_a
<rtx_insn
*> (x
), after
, bb
);
4682 #ifdef ENABLE_RTL_CHECKING
4689 last
= (*make_raw
) (x
);
4690 add_insn_after (last
, after
, bb
);
4697 /* Make X be output after the insn AFTER and set the BB of insn. If
4698 BB is NULL, an attempt is made to infer the BB from AFTER. */
4701 emit_insn_after_noloc (rtx x
, rtx_insn
*after
, basic_block bb
)
4703 return emit_pattern_after_noloc (x
, after
, bb
, make_insn_raw
);
4707 /* Make an insn of code JUMP_INSN with body X
4708 and output it after the insn AFTER. */
4711 emit_jump_insn_after_noloc (rtx x
, rtx_insn
*after
)
4713 return as_a
<rtx_jump_insn
*> (
4714 emit_pattern_after_noloc (x
, after
, NULL
, make_jump_insn_raw
));
4717 /* Make an instruction with body X and code CALL_INSN
4718 and output it after the instruction AFTER. */
4721 emit_call_insn_after_noloc (rtx x
, rtx_insn
*after
)
4723 return emit_pattern_after_noloc (x
, after
, NULL
, make_call_insn_raw
);
4726 /* Make an instruction with body X and code CALL_INSN
4727 and output it after the instruction AFTER. */
4730 emit_debug_insn_after_noloc (rtx x
, rtx_insn
*after
)
4732 return emit_pattern_after_noloc (x
, after
, NULL
, make_debug_insn_raw
);
4735 /* Make an insn of code BARRIER
4736 and output it after the insn AFTER. */
4739 emit_barrier_after (rtx_insn
*after
)
4741 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4743 INSN_UID (insn
) = cur_insn_uid
++;
4745 add_insn_after (insn
, after
, NULL
);
4749 /* Emit the label LABEL after the insn AFTER. */
4752 emit_label_after (rtx_insn
*label
, rtx_insn
*after
)
4754 gcc_checking_assert (INSN_UID (label
) == 0);
4755 INSN_UID (label
) = cur_insn_uid
++;
4756 add_insn_after (label
, after
, NULL
);
4760 /* Notes require a bit of special handling: Some notes need to have their
4761 BLOCK_FOR_INSN set, others should never have it set, and some should
4762 have it set or clear depending on the context. */
4764 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4765 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4766 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4769 note_outside_basic_block_p (enum insn_note subtype
, bool on_bb_boundary_p
)
4773 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4774 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
4777 /* Notes for var tracking and EH region markers can appear between or
4778 inside basic blocks. If the caller is emitting on the basic block
4779 boundary, do not set BLOCK_FOR_INSN on the new note. */
4780 case NOTE_INSN_VAR_LOCATION
:
4781 case NOTE_INSN_EH_REGION_BEG
:
4782 case NOTE_INSN_EH_REGION_END
:
4783 return on_bb_boundary_p
;
4785 /* Otherwise, BLOCK_FOR_INSN must be set. */
4791 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4794 emit_note_after (enum insn_note subtype
, rtx_insn
*after
)
4796 rtx_note
*note
= make_note_raw (subtype
);
4797 basic_block bb
= BARRIER_P (after
) ? NULL
: BLOCK_FOR_INSN (after
);
4798 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_END (bb
) == after
);
4800 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4801 add_insn_after_nobb (note
, after
);
4803 add_insn_after (note
, after
, bb
);
4807 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4810 emit_note_before (enum insn_note subtype
, rtx_insn
*before
)
4812 rtx_note
*note
= make_note_raw (subtype
);
4813 basic_block bb
= BARRIER_P (before
) ? NULL
: BLOCK_FOR_INSN (before
);
4814 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_HEAD (bb
) == before
);
4816 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4817 add_insn_before_nobb (note
, before
);
4819 add_insn_before (note
, before
, bb
);
4823 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4824 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4827 emit_pattern_after_setloc (rtx pattern
, rtx_insn
*after
, location_t loc
,
4828 rtx_insn
*(*make_raw
) (rtx
))
4830 rtx_insn
*last
= emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4832 if (pattern
== NULL_RTX
|| !loc
)
4835 after
= NEXT_INSN (after
);
4838 if (active_insn_p (after
)
4839 && !JUMP_TABLE_DATA_P (after
) /* FIXME */
4840 && !INSN_LOCATION (after
))
4841 INSN_LOCATION (after
) = loc
;
4844 after
= NEXT_INSN (after
);
4849 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4850 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4854 emit_pattern_after (rtx pattern
, rtx_insn
*after
, bool skip_debug_insns
,
4855 rtx_insn
*(*make_raw
) (rtx
))
4857 rtx_insn
*prev
= after
;
4859 if (skip_debug_insns
)
4860 while (DEBUG_INSN_P (prev
))
4861 prev
= PREV_INSN (prev
);
4864 return emit_pattern_after_setloc (pattern
, after
, INSN_LOCATION (prev
),
4867 return emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4870 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4872 emit_insn_after_setloc (rtx pattern
, rtx_insn
*after
, location_t loc
)
4874 return emit_pattern_after_setloc (pattern
, after
, loc
, make_insn_raw
);
4877 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4879 emit_insn_after (rtx pattern
, rtx_insn
*after
)
4881 return emit_pattern_after (pattern
, after
, true, make_insn_raw
);
4884 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4886 emit_jump_insn_after_setloc (rtx pattern
, rtx_insn
*after
, location_t loc
)
4888 return as_a
<rtx_jump_insn
*> (
4889 emit_pattern_after_setloc (pattern
, after
, loc
, make_jump_insn_raw
));
4892 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4894 emit_jump_insn_after (rtx pattern
, rtx_insn
*after
)
4896 return as_a
<rtx_jump_insn
*> (
4897 emit_pattern_after (pattern
, after
, true, make_jump_insn_raw
));
4900 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4902 emit_call_insn_after_setloc (rtx pattern
, rtx_insn
*after
, location_t loc
)
4904 return emit_pattern_after_setloc (pattern
, after
, loc
, make_call_insn_raw
);
4907 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4909 emit_call_insn_after (rtx pattern
, rtx_insn
*after
)
4911 return emit_pattern_after (pattern
, after
, true, make_call_insn_raw
);
4914 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4916 emit_debug_insn_after_setloc (rtx pattern
, rtx_insn
*after
, location_t loc
)
4918 return emit_pattern_after_setloc (pattern
, after
, loc
, make_debug_insn_raw
);
4921 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4923 emit_debug_insn_after (rtx pattern
, rtx_insn
*after
)
4925 return emit_pattern_after (pattern
, after
, false, make_debug_insn_raw
);
4928 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4929 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4930 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4934 emit_pattern_before_setloc (rtx pattern
, rtx_insn
*before
, location_t loc
,
4935 bool insnp
, rtx_insn
*(*make_raw
) (rtx
))
4937 rtx_insn
*first
= PREV_INSN (before
);
4938 rtx_insn
*last
= emit_pattern_before_noloc (pattern
, before
,
4939 insnp
? before
: NULL
,
4942 if (pattern
== NULL_RTX
|| !loc
)
4946 first
= get_insns ();
4948 first
= NEXT_INSN (first
);
4951 if (active_insn_p (first
)
4952 && !JUMP_TABLE_DATA_P (first
) /* FIXME */
4953 && !INSN_LOCATION (first
))
4954 INSN_LOCATION (first
) = loc
;
4957 first
= NEXT_INSN (first
);
4962 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4963 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4964 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4965 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4968 emit_pattern_before (rtx pattern
, rtx_insn
*before
, bool skip_debug_insns
,
4969 bool insnp
, rtx_insn
*(*make_raw
) (rtx
))
4971 rtx_insn
*next
= before
;
4973 if (skip_debug_insns
)
4974 while (DEBUG_INSN_P (next
))
4975 next
= PREV_INSN (next
);
4978 return emit_pattern_before_setloc (pattern
, before
, INSN_LOCATION (next
),
4981 return emit_pattern_before_noloc (pattern
, before
,
4982 insnp
? before
: NULL
,
4986 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4988 emit_insn_before_setloc (rtx pattern
, rtx_insn
*before
, location_t loc
)
4990 return emit_pattern_before_setloc (pattern
, before
, loc
, true,
4994 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4996 emit_insn_before (rtx pattern
, rtx_insn
*before
)
4998 return emit_pattern_before (pattern
, before
, true, true, make_insn_raw
);
5001 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5003 emit_jump_insn_before_setloc (rtx pattern
, rtx_insn
*before
, location_t loc
)
5005 return as_a
<rtx_jump_insn
*> (
5006 emit_pattern_before_setloc (pattern
, before
, loc
, false,
5007 make_jump_insn_raw
));
5010 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
5012 emit_jump_insn_before (rtx pattern
, rtx_insn
*before
)
5014 return as_a
<rtx_jump_insn
*> (
5015 emit_pattern_before (pattern
, before
, true, false,
5016 make_jump_insn_raw
));
5019 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5021 emit_call_insn_before_setloc (rtx pattern
, rtx_insn
*before
, location_t loc
)
5023 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
5024 make_call_insn_raw
);
5027 /* Like emit_call_insn_before_noloc,
5028 but set insn_location according to BEFORE. */
5030 emit_call_insn_before (rtx pattern
, rtx_insn
*before
)
5032 return emit_pattern_before (pattern
, before
, true, false,
5033 make_call_insn_raw
);
5036 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5038 emit_debug_insn_before_setloc (rtx pattern
, rtx_insn
*before
, location_t loc
)
5040 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
5041 make_debug_insn_raw
);
5044 /* Like emit_debug_insn_before_noloc,
5045 but set insn_location according to BEFORE. */
5047 emit_debug_insn_before (rtx pattern
, rtx_insn
*before
)
5049 return emit_pattern_before (pattern
, before
, false, false,
5050 make_debug_insn_raw
);
5053 /* Take X and emit it at the end of the doubly-linked
5056 Returns the last insn emitted. */
5061 rtx_insn
*last
= get_last_insn ();
5067 switch (GET_CODE (x
))
5076 insn
= as_a
<rtx_insn
*> (x
);
5079 rtx_insn
*next
= NEXT_INSN (insn
);
5086 #ifdef ENABLE_RTL_CHECKING
5087 case JUMP_TABLE_DATA
:
5094 last
= make_insn_raw (x
);
5102 /* Make an insn of code DEBUG_INSN with pattern X
5103 and add it to the end of the doubly-linked list. */
5106 emit_debug_insn (rtx x
)
5108 rtx_insn
*last
= get_last_insn ();
5114 switch (GET_CODE (x
))
5123 insn
= as_a
<rtx_insn
*> (x
);
5126 rtx_insn
*next
= NEXT_INSN (insn
);
5133 #ifdef ENABLE_RTL_CHECKING
5134 case JUMP_TABLE_DATA
:
5141 last
= make_debug_insn_raw (x
);
5149 /* Make an insn of code JUMP_INSN with pattern X
5150 and add it to the end of the doubly-linked list. */
5153 emit_jump_insn (rtx x
)
5155 rtx_insn
*last
= NULL
;
5158 switch (GET_CODE (x
))
5167 insn
= as_a
<rtx_insn
*> (x
);
5170 rtx_insn
*next
= NEXT_INSN (insn
);
5177 #ifdef ENABLE_RTL_CHECKING
5178 case JUMP_TABLE_DATA
:
5185 last
= make_jump_insn_raw (x
);
5193 /* Make an insn of code CALL_INSN with pattern X
5194 and add it to the end of the doubly-linked list. */
5197 emit_call_insn (rtx x
)
5201 switch (GET_CODE (x
))
5210 insn
= emit_insn (x
);
5213 #ifdef ENABLE_RTL_CHECKING
5215 case JUMP_TABLE_DATA
:
5221 insn
= make_call_insn_raw (x
);
5229 /* Add the label LABEL to the end of the doubly-linked list. */
5232 emit_label (rtx uncast_label
)
5234 rtx_code_label
*label
= as_a
<rtx_code_label
*> (uncast_label
);
5236 gcc_checking_assert (INSN_UID (label
) == 0);
5237 INSN_UID (label
) = cur_insn_uid
++;
5242 /* Make an insn of code JUMP_TABLE_DATA
5243 and add it to the end of the doubly-linked list. */
5245 rtx_jump_table_data
*
5246 emit_jump_table_data (rtx table
)
5248 rtx_jump_table_data
*jump_table_data
=
5249 as_a
<rtx_jump_table_data
*> (rtx_alloc (JUMP_TABLE_DATA
));
5250 INSN_UID (jump_table_data
) = cur_insn_uid
++;
5251 PATTERN (jump_table_data
) = table
;
5252 BLOCK_FOR_INSN (jump_table_data
) = NULL
;
5253 add_insn (jump_table_data
);
5254 return jump_table_data
;
5257 /* Make an insn of code BARRIER
5258 and add it to the end of the doubly-linked list. */
5263 rtx_barrier
*barrier
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
5264 INSN_UID (barrier
) = cur_insn_uid
++;
5269 /* Emit a copy of note ORIG. */
5272 emit_note_copy (rtx_note
*orig
)
5274 enum insn_note kind
= (enum insn_note
) NOTE_KIND (orig
);
5275 rtx_note
*note
= make_note_raw (kind
);
5276 NOTE_DATA (note
) = NOTE_DATA (orig
);
5281 /* Make an insn of code NOTE or type NOTE_NO
5282 and add it to the end of the doubly-linked list. */
5285 emit_note (enum insn_note kind
)
5287 rtx_note
*note
= make_note_raw (kind
);
5292 /* Emit a clobber of lvalue X. */
5295 emit_clobber (rtx x
)
5297 /* CONCATs should not appear in the insn stream. */
5298 if (GET_CODE (x
) == CONCAT
)
5300 emit_clobber (XEXP (x
, 0));
5301 return emit_clobber (XEXP (x
, 1));
5303 return emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
5306 /* Return a sequence of insns to clobber lvalue X. */
5320 /* Emit a use of rvalue X. */
5325 /* CONCATs should not appear in the insn stream. */
5326 if (GET_CODE (x
) == CONCAT
)
5328 emit_use (XEXP (x
, 0));
5329 return emit_use (XEXP (x
, 1));
5331 return emit_insn (gen_rtx_USE (VOIDmode
, x
));
5334 /* Return a sequence of insns to use rvalue X. */
5348 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5349 Return the set in INSN that such notes describe, or NULL if the notes
5350 have no meaning for INSN. */
5353 set_for_reg_notes (rtx insn
)
5360 pat
= PATTERN (insn
);
5361 if (GET_CODE (pat
) == PARALLEL
)
5363 /* We do not use single_set because that ignores SETs of unused
5364 registers. REG_EQUAL and REG_EQUIV notes really do require the
5365 PARALLEL to have a single SET. */
5366 if (multiple_sets (insn
))
5368 pat
= XVECEXP (pat
, 0, 0);
5371 if (GET_CODE (pat
) != SET
)
5374 reg
= SET_DEST (pat
);
5376 /* Notes apply to the contents of a STRICT_LOW_PART. */
5377 if (GET_CODE (reg
) == STRICT_LOW_PART
5378 || GET_CODE (reg
) == ZERO_EXTRACT
)
5379 reg
= XEXP (reg
, 0);
5381 /* Check that we have a register. */
5382 if (!(REG_P (reg
) || GET_CODE (reg
) == SUBREG
))
5388 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5389 note of this type already exists, remove it first. */
5392 set_unique_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
5394 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
5400 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5401 if (!set_for_reg_notes (insn
) && GET_CODE (PATTERN (insn
)) != USE
)
5404 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5405 It serves no useful purpose and breaks eliminate_regs. */
5406 if (GET_CODE (datum
) == ASM_OPERANDS
)
5409 /* Notes with side effects are dangerous. Even if the side-effect
5410 initially mirrors one in PATTERN (INSN), later optimizations
5411 might alter the way that the final register value is calculated
5412 and so move or alter the side-effect in some way. The note would
5413 then no longer be a valid substitution for SET_SRC. */
5414 if (side_effects_p (datum
))
5423 XEXP (note
, 0) = datum
;
5426 add_reg_note (insn
, kind
, datum
);
5427 note
= REG_NOTES (insn
);
5434 df_notes_rescan (as_a
<rtx_insn
*> (insn
));
5443 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5445 set_dst_reg_note (rtx insn
, enum reg_note kind
, rtx datum
, rtx dst
)
5447 rtx set
= set_for_reg_notes (insn
);
5449 if (set
&& SET_DEST (set
) == dst
)
5450 return set_unique_reg_note (insn
, kind
, datum
);
5454 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5455 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5458 If X is a label, it is simply added into the insn chain. */
5461 emit (rtx x
, bool allow_barrier_p
)
5463 enum rtx_code code
= classify_insn (x
);
5468 return emit_label (x
);
5470 return emit_insn (x
);
5473 rtx_insn
*insn
= emit_jump_insn (x
);
5475 && (any_uncondjump_p (insn
) || GET_CODE (x
) == RETURN
))
5476 return emit_barrier ();
5480 return emit_call_insn (x
);
5482 return emit_debug_insn (x
);
5488 /* Space for free sequence stack entries. */
5489 static GTY ((deletable
)) struct sequence_stack
*free_sequence_stack
;
5491 /* Begin emitting insns to a sequence. If this sequence will contain
5492 something that might cause the compiler to pop arguments to function
5493 calls (because those pops have previously been deferred; see
5494 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5495 before calling this function. That will ensure that the deferred
5496 pops are not accidentally emitted in the middle of this sequence. */
5499 start_sequence (void)
5501 struct sequence_stack
*tem
;
5503 if (free_sequence_stack
!= NULL
)
5505 tem
= free_sequence_stack
;
5506 free_sequence_stack
= tem
->next
;
5509 tem
= ggc_alloc
<sequence_stack
> ();
5511 tem
->next
= get_current_sequence ()->next
;
5512 tem
->first
= get_insns ();
5513 tem
->last
= get_last_insn ();
5514 get_current_sequence ()->next
= tem
;
5520 /* Set up the insn chain starting with FIRST as the current sequence,
5521 saving the previously current one. See the documentation for
5522 start_sequence for more information about how to use this function. */
5525 push_to_sequence (rtx_insn
*first
)
5531 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
))
5534 set_first_insn (first
);
5535 set_last_insn (last
);
5538 /* Like push_to_sequence, but take the last insn as an argument to avoid
5539 looping through the list. */
5542 push_to_sequence2 (rtx_insn
*first
, rtx_insn
*last
)
5546 set_first_insn (first
);
5547 set_last_insn (last
);
5550 /* Set up the outer-level insn chain
5551 as the current sequence, saving the previously current one. */
5554 push_topmost_sequence (void)
5556 struct sequence_stack
*top
;
5560 top
= get_topmost_sequence ();
5561 set_first_insn (top
->first
);
5562 set_last_insn (top
->last
);
5565 /* After emitting to the outer-level insn chain, update the outer-level
5566 insn chain, and restore the previous saved state. */
5569 pop_topmost_sequence (void)
5571 struct sequence_stack
*top
;
5573 top
= get_topmost_sequence ();
5574 top
->first
= get_insns ();
5575 top
->last
= get_last_insn ();
5580 /* After emitting to a sequence, restore previous saved state.
5582 To get the contents of the sequence just made, you must call
5583 `get_insns' *before* calling here.
5585 If the compiler might have deferred popping arguments while
5586 generating this sequence, and this sequence will not be immediately
5587 inserted into the instruction stream, use do_pending_stack_adjust
5588 before calling get_insns. That will ensure that the deferred
5589 pops are inserted into this sequence, and not into some random
5590 location in the instruction stream. See INHIBIT_DEFER_POP for more
5591 information about deferred popping of arguments. */
5596 struct sequence_stack
*tem
= get_current_sequence ()->next
;
5598 set_first_insn (tem
->first
);
5599 set_last_insn (tem
->last
);
5600 get_current_sequence ()->next
= tem
->next
;
5602 memset (tem
, 0, sizeof (*tem
));
5603 tem
->next
= free_sequence_stack
;
5604 free_sequence_stack
= tem
;
5607 /* Return 1 if currently emitting into a sequence. */
5610 in_sequence_p (void)
5612 return get_current_sequence ()->next
!= 0;
5615 /* Put the various virtual registers into REGNO_REG_RTX. */
5618 init_virtual_regs (void)
5620 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
5621 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
5622 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
5623 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
5624 regno_reg_rtx
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
5625 regno_reg_rtx
[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
]
5626 = virtual_preferred_stack_boundary_rtx
;
5630 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5631 static rtx copy_insn_scratch_in
[MAX_RECOG_OPERANDS
];
5632 static rtx copy_insn_scratch_out
[MAX_RECOG_OPERANDS
];
5633 static int copy_insn_n_scratches
;
5635 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5636 copied an ASM_OPERANDS.
5637 In that case, it is the original input-operand vector. */
5638 static rtvec orig_asm_operands_vector
;
5640 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5641 copied an ASM_OPERANDS.
5642 In that case, it is the copied input-operand vector. */
5643 static rtvec copy_asm_operands_vector
;
5645 /* Likewise for the constraints vector. */
5646 static rtvec orig_asm_constraints_vector
;
5647 static rtvec copy_asm_constraints_vector
;
5649 /* Recursively create a new copy of an rtx for copy_insn.
5650 This function differs from copy_rtx in that it handles SCRATCHes and
5651 ASM_OPERANDs properly.
5652 Normally, this function is not used directly; use copy_insn as front end.
5653 However, you could first copy an insn pattern with copy_insn and then use
5654 this function afterwards to properly copy any REG_NOTEs containing
5658 copy_insn_1 (rtx orig
)
5663 const char *format_ptr
;
5668 code
= GET_CODE (orig
);
5683 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5684 clobbers or clobbers of hard registers that originated as pseudos.
5685 This is needed to allow safe register renaming. */
5686 if (REG_P (XEXP (orig
, 0))
5687 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig
, 0)))
5688 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig
, 0))))
5693 for (i
= 0; i
< copy_insn_n_scratches
; i
++)
5694 if (copy_insn_scratch_in
[i
] == orig
)
5695 return copy_insn_scratch_out
[i
];
5699 if (shared_const_p (orig
))
5703 /* A MEM with a constant address is not sharable. The problem is that
5704 the constant address may need to be reloaded. If the mem is shared,
5705 then reloading one copy of this mem will cause all copies to appear
5706 to have been reloaded. */
5712 /* Copy the various flags, fields, and other information. We assume
5713 that all fields need copying, and then clear the fields that should
5714 not be copied. That is the sensible default behavior, and forces
5715 us to explicitly document why we are *not* copying a flag. */
5716 copy
= shallow_copy_rtx (orig
);
5718 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5721 RTX_FLAG (copy
, jump
) = 0;
5722 RTX_FLAG (copy
, call
) = 0;
5723 RTX_FLAG (copy
, frame_related
) = 0;
5726 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
5728 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
5729 switch (*format_ptr
++)
5732 if (XEXP (orig
, i
) != NULL
)
5733 XEXP (copy
, i
) = copy_insn_1 (XEXP (orig
, i
));
5738 if (XVEC (orig
, i
) == orig_asm_constraints_vector
)
5739 XVEC (copy
, i
) = copy_asm_constraints_vector
;
5740 else if (XVEC (orig
, i
) == orig_asm_operands_vector
)
5741 XVEC (copy
, i
) = copy_asm_operands_vector
;
5742 else if (XVEC (orig
, i
) != NULL
)
5744 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
5745 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
5746 XVECEXP (copy
, i
, j
) = copy_insn_1 (XVECEXP (orig
, i
, j
));
5758 /* These are left unchanged. */
5765 if (code
== SCRATCH
)
5767 i
= copy_insn_n_scratches
++;
5768 gcc_assert (i
< MAX_RECOG_OPERANDS
);
5769 copy_insn_scratch_in
[i
] = orig
;
5770 copy_insn_scratch_out
[i
] = copy
;
5772 else if (code
== ASM_OPERANDS
)
5774 orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
5775 copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
5776 orig_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig
);
5777 copy_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
5783 /* Create a new copy of an rtx.
5784 This function differs from copy_rtx in that it handles SCRATCHes and
5785 ASM_OPERANDs properly.
5786 INSN doesn't really have to be a full INSN; it could be just the
5789 copy_insn (rtx insn
)
5791 copy_insn_n_scratches
= 0;
5792 orig_asm_operands_vector
= 0;
5793 orig_asm_constraints_vector
= 0;
5794 copy_asm_operands_vector
= 0;
5795 copy_asm_constraints_vector
= 0;
5796 return copy_insn_1 (insn
);
5799 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5800 on that assumption that INSN itself remains in its original place. */
5803 copy_delay_slot_insn (rtx_insn
*insn
)
5805 /* Copy INSN with its rtx_code, all its notes, location etc. */
5806 insn
= as_a
<rtx_insn
*> (copy_rtx (insn
));
5807 INSN_UID (insn
) = cur_insn_uid
++;
5811 /* Initialize data structures and variables in this file
5812 before generating rtl for each function. */
5817 set_first_insn (NULL
);
5818 set_last_insn (NULL
);
5819 if (param_min_nondebug_insn_uid
)
5820 cur_insn_uid
= param_min_nondebug_insn_uid
;
5823 cur_debug_insn_uid
= 1;
5824 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
5825 first_label_num
= label_num
;
5826 get_current_sequence ()->next
= NULL
;
5828 /* Init the tables that describe all the pseudo regs. */
5830 crtl
->emit
.regno_pointer_align_length
= LAST_VIRTUAL_REGISTER
+ 101;
5832 crtl
->emit
.regno_pointer_align
5833 = XCNEWVEC (unsigned char, crtl
->emit
.regno_pointer_align_length
);
5836 = ggc_cleared_vec_alloc
<rtx
> (crtl
->emit
.regno_pointer_align_length
);
5838 /* Put copies of all the hard registers into regno_reg_rtx. */
5839 memcpy (regno_reg_rtx
,
5840 initial_regno_reg_rtx
,
5841 FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
5843 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5844 init_virtual_regs ();
5846 /* Indicate that the virtual registers and stack locations are
5848 REG_POINTER (stack_pointer_rtx
) = 1;
5849 REG_POINTER (frame_pointer_rtx
) = 1;
5850 REG_POINTER (hard_frame_pointer_rtx
) = 1;
5851 REG_POINTER (arg_pointer_rtx
) = 1;
5853 REG_POINTER (virtual_incoming_args_rtx
) = 1;
5854 REG_POINTER (virtual_stack_vars_rtx
) = 1;
5855 REG_POINTER (virtual_stack_dynamic_rtx
) = 1;
5856 REG_POINTER (virtual_outgoing_args_rtx
) = 1;
5857 REG_POINTER (virtual_cfa_rtx
) = 1;
5859 #ifdef STACK_BOUNDARY
5860 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
;
5861 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5862 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5863 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
;
5865 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5866 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
) = STACK_BOUNDARY
;
5867 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
) = STACK_BOUNDARY
;
5868 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5870 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = BITS_PER_WORD
;
5873 #ifdef INIT_EXPANDERS
5878 /* Return the value of element I of CONST_VECTOR X as a wide_int. */
5881 const_vector_int_elt (const_rtx x
, unsigned int i
)
5883 /* First handle elements that are directly encoded. */
5884 machine_mode elt_mode
= GET_MODE_INNER (GET_MODE (x
));
5885 if (i
< (unsigned int) XVECLEN (x
, 0))
5886 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x
, i
), elt_mode
);
5888 /* Identify the pattern that contains element I and work out the index of
5889 the last encoded element for that pattern. */
5890 unsigned int encoded_nelts
= const_vector_encoded_nelts (x
);
5891 unsigned int npatterns
= CONST_VECTOR_NPATTERNS (x
);
5892 unsigned int count
= i
/ npatterns
;
5893 unsigned int pattern
= i
% npatterns
;
5894 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
5896 /* If there are no steps, the final encoded value is the right one. */
5897 if (!CONST_VECTOR_STEPPED_P (x
))
5898 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x
, final_i
), elt_mode
);
5900 /* Otherwise work out the value from the last two encoded elements. */
5901 rtx v1
= CONST_VECTOR_ENCODED_ELT (x
, final_i
- npatterns
);
5902 rtx v2
= CONST_VECTOR_ENCODED_ELT (x
, final_i
);
5903 wide_int diff
= wi::sub (rtx_mode_t (v2
, elt_mode
),
5904 rtx_mode_t (v1
, elt_mode
));
5905 return wi::add (rtx_mode_t (v2
, elt_mode
), (count
- 2) * diff
);
5908 /* Return the value of element I of CONST_VECTOR X. */
5911 const_vector_elt (const_rtx x
, unsigned int i
)
5913 /* First handle elements that are directly encoded. */
5914 if (i
< (unsigned int) XVECLEN (x
, 0))
5915 return CONST_VECTOR_ENCODED_ELT (x
, i
);
5917 /* If there are no steps, the final encoded value is the right one. */
5918 if (!CONST_VECTOR_STEPPED_P (x
))
5920 /* Identify the pattern that contains element I and work out the index of
5921 the last encoded element for that pattern. */
5922 unsigned int encoded_nelts
= const_vector_encoded_nelts (x
);
5923 unsigned int npatterns
= CONST_VECTOR_NPATTERNS (x
);
5924 unsigned int pattern
= i
% npatterns
;
5925 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
5926 return CONST_VECTOR_ENCODED_ELT (x
, final_i
);
5929 /* Otherwise work out the value from the last two encoded elements. */
5930 return immed_wide_int_const (const_vector_int_elt (x
, i
),
5931 GET_MODE_INNER (GET_MODE (x
)));
5934 /* Return true if X is a valid element for a CONST_VECTOR of the given
5938 valid_for_const_vector_p (machine_mode
, rtx x
)
5940 return (CONST_SCALAR_INT_P (x
)
5941 || CONST_DOUBLE_AS_FLOAT_P (x
)
5942 || CONST_FIXED_P (x
));
5945 /* Generate a vector constant of mode MODE in which every element has
5949 gen_const_vec_duplicate (machine_mode mode
, rtx elt
)
5951 rtx_vector_builder
builder (mode
, 1, 1);
5952 builder
.quick_push (elt
);
5953 return builder
.build ();
5956 /* Return a vector rtx of mode MODE in which every element has value X.
5957 The result will be a constant if X is constant. */
5960 gen_vec_duplicate (machine_mode mode
, rtx x
)
5962 if (valid_for_const_vector_p (mode
, x
))
5963 return gen_const_vec_duplicate (mode
, x
);
5964 return gen_rtx_VEC_DUPLICATE (mode
, x
);
5967 /* A subroutine of const_vec_series_p that handles the case in which:
5969 (GET_CODE (X) == CONST_VECTOR
5970 && CONST_VECTOR_NPATTERNS (X) == 1
5971 && !CONST_VECTOR_DUPLICATE_P (X))
5973 is known to hold. */
5976 const_vec_series_p_1 (const_rtx x
, rtx
*base_out
, rtx
*step_out
)
5978 /* Stepped sequences are only defined for integers, to avoid specifying
5979 rounding behavior. */
5980 if (GET_MODE_CLASS (GET_MODE (x
)) != MODE_VECTOR_INT
)
5983 /* A non-duplicated vector with two elements can always be seen as a
5984 series with a nonzero step. Longer vectors must have a stepped
5986 if (maybe_ne (CONST_VECTOR_NUNITS (x
), 2)
5987 && !CONST_VECTOR_STEPPED_P (x
))
5990 /* Calculate the step between the first and second elements. */
5991 scalar_mode inner
= GET_MODE_INNER (GET_MODE (x
));
5992 rtx base
= CONST_VECTOR_ELT (x
, 0);
5993 rtx step
= simplify_binary_operation (MINUS
, inner
,
5994 CONST_VECTOR_ENCODED_ELT (x
, 1), base
);
5995 if (rtx_equal_p (step
, CONST0_RTX (inner
)))
5998 /* If we have a stepped encoding, check that the step between the
5999 second and third elements is the same as STEP. */
6000 if (CONST_VECTOR_STEPPED_P (x
))
6002 rtx diff
= simplify_binary_operation (MINUS
, inner
,
6003 CONST_VECTOR_ENCODED_ELT (x
, 2),
6004 CONST_VECTOR_ENCODED_ELT (x
, 1));
6005 if (!rtx_equal_p (step
, diff
))
6014 /* Generate a vector constant of mode MODE in which element I has
6015 the value BASE + I * STEP. */
6018 gen_const_vec_series (machine_mode mode
, rtx base
, rtx step
)
6020 gcc_assert (valid_for_const_vector_p (mode
, base
)
6021 && valid_for_const_vector_p (mode
, step
));
6023 rtx_vector_builder
builder (mode
, 1, 3);
6024 builder
.quick_push (base
);
6025 for (int i
= 1; i
< 3; ++i
)
6026 builder
.quick_push (simplify_gen_binary (PLUS
, GET_MODE_INNER (mode
),
6027 builder
[i
- 1], step
));
6028 return builder
.build ();
6031 /* Generate a vector of mode MODE in which element I has the value
6032 BASE + I * STEP. The result will be a constant if BASE and STEP
6033 are both constants. */
6036 gen_vec_series (machine_mode mode
, rtx base
, rtx step
)
6038 if (step
== const0_rtx
)
6039 return gen_vec_duplicate (mode
, base
);
6040 if (valid_for_const_vector_p (mode
, base
)
6041 && valid_for_const_vector_p (mode
, step
))
6042 return gen_const_vec_series (mode
, base
, step
);
6043 return gen_rtx_VEC_SERIES (mode
, base
, step
);
6046 /* Generate a new vector constant for mode MODE and constant value
6050 gen_const_vector (machine_mode mode
, int constant
)
6052 machine_mode inner
= GET_MODE_INNER (mode
);
6054 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner
));
6056 rtx el
= const_tiny_rtx
[constant
][(int) inner
];
6059 return gen_const_vec_duplicate (mode
, el
);
6062 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6063 all elements are zero, and the one vector when all elements are one. */
6065 gen_rtx_CONST_VECTOR (machine_mode mode
, rtvec v
)
6067 gcc_assert (known_eq (GET_MODE_NUNITS (mode
), GET_NUM_ELEM (v
)));
6069 /* If the values are all the same, check to see if we can use one of the
6070 standard constant vectors. */
6071 if (rtvec_all_equal_p (v
))
6072 return gen_const_vec_duplicate (mode
, RTVEC_ELT (v
, 0));
6074 unsigned int nunits
= GET_NUM_ELEM (v
);
6075 rtx_vector_builder
builder (mode
, nunits
, 1);
6076 for (unsigned int i
= 0; i
< nunits
; ++i
)
6077 builder
.quick_push (RTVEC_ELT (v
, i
));
6078 return builder
.build (v
);
6081 /* Initialise global register information required by all functions. */
6084 init_emit_regs (void)
6090 /* Reset register attributes */
6091 reg_attrs_htab
->empty ();
6093 /* We need reg_raw_mode, so initialize the modes now. */
6094 init_reg_modes_target ();
6096 /* Assign register numbers to the globally defined register rtx. */
6097 stack_pointer_rtx
= gen_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
6098 frame_pointer_rtx
= gen_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
6099 hard_frame_pointer_rtx
= gen_raw_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
);
6100 arg_pointer_rtx
= gen_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
6101 virtual_incoming_args_rtx
=
6102 gen_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
6103 virtual_stack_vars_rtx
=
6104 gen_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
6105 virtual_stack_dynamic_rtx
=
6106 gen_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
6107 virtual_outgoing_args_rtx
=
6108 gen_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
6109 virtual_cfa_rtx
= gen_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
6110 virtual_preferred_stack_boundary_rtx
=
6111 gen_raw_REG (Pmode
, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
);
6113 /* Initialize RTL for commonly used hard registers. These are
6114 copied into regno_reg_rtx as we begin to compile each function. */
6115 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
6116 initial_regno_reg_rtx
[i
] = gen_raw_REG (reg_raw_mode
[i
], i
);
6118 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6119 return_address_pointer_rtx
6120 = gen_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
6123 pic_offset_table_rtx
= NULL_RTX
;
6124 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
6125 pic_offset_table_rtx
= gen_raw_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
6127 for (i
= 0; i
< (int) MAX_MACHINE_MODE
; i
++)
6129 mode
= (machine_mode
) i
;
6130 attrs
= ggc_cleared_alloc
<mem_attrs
> ();
6131 attrs
->align
= BITS_PER_UNIT
;
6132 attrs
->addrspace
= ADDR_SPACE_GENERIC
;
6133 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
6135 attrs
->size_known_p
= true;
6136 attrs
->size
= GET_MODE_SIZE (mode
);
6137 if (STRICT_ALIGNMENT
)
6138 attrs
->align
= GET_MODE_ALIGNMENT (mode
);
6140 mode_mem_attrs
[i
] = attrs
;
6143 split_branch_probability
= profile_probability::uninitialized ();
6146 /* Initialize global machine_mode variables. */
6149 init_derived_machine_modes (void)
6151 opt_scalar_int_mode mode_iter
, opt_byte_mode
, opt_word_mode
;
6152 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
6154 scalar_int_mode mode
= mode_iter
.require ();
6156 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
6157 && !opt_byte_mode
.exists ())
6158 opt_byte_mode
= mode
;
6160 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
6161 && !opt_word_mode
.exists ())
6162 opt_word_mode
= mode
;
6165 byte_mode
= opt_byte_mode
.require ();
6166 word_mode
= opt_word_mode
.require ();
6167 ptr_mode
= as_a
<scalar_int_mode
>
6168 (mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0).require ());
6171 /* Create some permanent unique rtl objects shared between all functions. */
6174 init_emit_once (void)
6178 scalar_float_mode double_mode
;
6179 opt_scalar_mode smode_iter
;
6181 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6182 CONST_FIXED, and memory attribute hash tables. */
6183 const_int_htab
= hash_table
<const_int_hasher
>::create_ggc (37);
6185 #if TARGET_SUPPORTS_WIDE_INT
6186 const_wide_int_htab
= hash_table
<const_wide_int_hasher
>::create_ggc (37);
6188 const_double_htab
= hash_table
<const_double_hasher
>::create_ggc (37);
6190 if (NUM_POLY_INT_COEFFS
> 1)
6191 const_poly_int_htab
= hash_table
<const_poly_int_hasher
>::create_ggc (37);
6193 const_fixed_htab
= hash_table
<const_fixed_hasher
>::create_ggc (37);
6195 reg_attrs_htab
= hash_table
<reg_attr_hasher
>::create_ggc (37);
6197 #ifdef INIT_EXPANDERS
6198 /* This is to initialize {init|mark|free}_machine_status before the first
6199 call to push_function_context_to. This is needed by the Chill front
6200 end which calls push_function_context_to before the first call to
6201 init_function_start. */
6205 /* Create the unique rtx's for certain rtx codes and operand values. */
6207 /* Process stack-limiting command-line options. */
6208 if (opt_fstack_limit_symbol_arg
!= NULL
)
6210 = gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (opt_fstack_limit_symbol_arg
));
6211 if (opt_fstack_limit_register_no
>= 0)
6212 stack_limit_rtx
= gen_rtx_REG (Pmode
, opt_fstack_limit_register_no
);
6214 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6215 tries to use these variables. */
6216 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
6217 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
6218 gen_rtx_raw_CONST_INT (VOIDmode
, (HOST_WIDE_INT
) i
);
6220 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
6221 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
6222 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
6224 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
6226 double_mode
= float_mode_for_size (DOUBLE_TYPE_SIZE
).require ();
6228 real_from_integer (&dconst0
, double_mode
, 0, SIGNED
);
6229 real_from_integer (&dconst1
, double_mode
, 1, SIGNED
);
6230 real_from_integer (&dconst2
, double_mode
, 2, SIGNED
);
6235 dconsthalf
= dconst1
;
6236 SET_REAL_EXP (&dconsthalf
, REAL_EXP (&dconsthalf
) - 1);
6238 for (i
= 0; i
< 3; i
++)
6240 const REAL_VALUE_TYPE
*const r
=
6241 (i
== 0 ? &dconst0
: i
== 1 ? &dconst1
: &dconst2
);
6243 FOR_EACH_MODE_IN_CLASS (mode
, MODE_FLOAT
)
6244 const_tiny_rtx
[i
][(int) mode
] =
6245 const_double_from_real_value (*r
, mode
);
6247 FOR_EACH_MODE_IN_CLASS (mode
, MODE_DECIMAL_FLOAT
)
6248 const_tiny_rtx
[i
][(int) mode
] =
6249 const_double_from_real_value (*r
, mode
);
6251 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
6253 FOR_EACH_MODE_IN_CLASS (mode
, MODE_INT
)
6254 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
6256 for (mode
= MIN_MODE_PARTIAL_INT
;
6257 mode
<= MAX_MODE_PARTIAL_INT
;
6258 mode
= (machine_mode
)((int)(mode
) + 1))
6259 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
6262 const_tiny_rtx
[3][(int) VOIDmode
] = constm1_rtx
;
6264 FOR_EACH_MODE_IN_CLASS (mode
, MODE_INT
)
6265 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
6267 /* For BImode, 1 and -1 are unsigned and signed interpretations
6268 of the same value. */
6269 const_tiny_rtx
[0][(int) BImode
] = const0_rtx
;
6270 const_tiny_rtx
[1][(int) BImode
] = const_true_rtx
;
6271 const_tiny_rtx
[3][(int) BImode
] = const_true_rtx
;
6273 for (mode
= MIN_MODE_PARTIAL_INT
;
6274 mode
<= MAX_MODE_PARTIAL_INT
;
6275 mode
= (machine_mode
)((int)(mode
) + 1))
6276 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
6278 FOR_EACH_MODE_IN_CLASS (mode
, MODE_COMPLEX_INT
)
6280 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
6281 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
6284 FOR_EACH_MODE_IN_CLASS (mode
, MODE_COMPLEX_FLOAT
)
6286 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
6287 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
6290 /* As for BImode, "all 1" and "all -1" are unsigned and signed
6291 interpretations of the same value. */
6292 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_BOOL
)
6294 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6295 const_tiny_rtx
[3][(int) mode
] = gen_const_vector (mode
, 3);
6296 const_tiny_rtx
[1][(int) mode
] = const_tiny_rtx
[3][(int) mode
];
6299 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_INT
)
6301 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6302 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6303 const_tiny_rtx
[3][(int) mode
] = gen_const_vector (mode
, 3);
6306 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_FLOAT
)
6308 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6309 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6312 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_FRACT
)
6314 scalar_mode smode
= smode_iter
.require ();
6315 FCONST0 (smode
).data
.high
= 0;
6316 FCONST0 (smode
).data
.low
= 0;
6317 FCONST0 (smode
).mode
= smode
;
6318 const_tiny_rtx
[0][(int) smode
]
6319 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6322 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_UFRACT
)
6324 scalar_mode smode
= smode_iter
.require ();
6325 FCONST0 (smode
).data
.high
= 0;
6326 FCONST0 (smode
).data
.low
= 0;
6327 FCONST0 (smode
).mode
= smode
;
6328 const_tiny_rtx
[0][(int) smode
]
6329 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6332 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_ACCUM
)
6334 scalar_mode smode
= smode_iter
.require ();
6335 FCONST0 (smode
).data
.high
= 0;
6336 FCONST0 (smode
).data
.low
= 0;
6337 FCONST0 (smode
).mode
= smode
;
6338 const_tiny_rtx
[0][(int) smode
]
6339 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6341 /* We store the value 1. */
6342 FCONST1 (smode
).data
.high
= 0;
6343 FCONST1 (smode
).data
.low
= 0;
6344 FCONST1 (smode
).mode
= smode
;
6345 FCONST1 (smode
).data
6346 = double_int_one
.lshift (GET_MODE_FBIT (smode
),
6347 HOST_BITS_PER_DOUBLE_INT
,
6348 SIGNED_FIXED_POINT_MODE_P (smode
));
6349 const_tiny_rtx
[1][(int) smode
]
6350 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode
), smode
);
6353 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_UACCUM
)
6355 scalar_mode smode
= smode_iter
.require ();
6356 FCONST0 (smode
).data
.high
= 0;
6357 FCONST0 (smode
).data
.low
= 0;
6358 FCONST0 (smode
).mode
= smode
;
6359 const_tiny_rtx
[0][(int) smode
]
6360 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6362 /* We store the value 1. */
6363 FCONST1 (smode
).data
.high
= 0;
6364 FCONST1 (smode
).data
.low
= 0;
6365 FCONST1 (smode
).mode
= smode
;
6366 FCONST1 (smode
).data
6367 = double_int_one
.lshift (GET_MODE_FBIT (smode
),
6368 HOST_BITS_PER_DOUBLE_INT
,
6369 SIGNED_FIXED_POINT_MODE_P (smode
));
6370 const_tiny_rtx
[1][(int) smode
]
6371 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode
), smode
);
6374 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_FRACT
)
6376 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6379 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_UFRACT
)
6381 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6384 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_ACCUM
)
6386 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6387 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6390 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_UACCUM
)
6392 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6393 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6396 for (i
= (int) CCmode
; i
< (int) MAX_MACHINE_MODE
; ++i
)
6397 if (GET_MODE_CLASS ((machine_mode
) i
) == MODE_CC
)
6398 const_tiny_rtx
[0][i
] = const0_rtx
;
6400 pc_rtx
= gen_rtx_fmt_ (PC
, VOIDmode
);
6401 ret_rtx
= gen_rtx_fmt_ (RETURN
, VOIDmode
);
6402 simple_return_rtx
= gen_rtx_fmt_ (SIMPLE_RETURN
, VOIDmode
);
6403 cc0_rtx
= gen_rtx_fmt_ (CC0
, VOIDmode
);
6404 invalid_insn_rtx
= gen_rtx_INSN (VOIDmode
,
6408 /*pattern=*/NULL_RTX
,
6411 /*reg_notes=*/NULL_RTX
);
6414 /* Produce exact duplicate of insn INSN after AFTER.
6415 Care updating of libcall regions if present. */
6418 emit_copy_of_insn_after (rtx_insn
*insn
, rtx_insn
*after
)
6423 switch (GET_CODE (insn
))
6426 new_rtx
= emit_insn_after (copy_insn (PATTERN (insn
)), after
);
6430 new_rtx
= emit_jump_insn_after (copy_insn (PATTERN (insn
)), after
);
6431 CROSSING_JUMP_P (new_rtx
) = CROSSING_JUMP_P (insn
);
6435 new_rtx
= emit_debug_insn_after (copy_insn (PATTERN (insn
)), after
);
6439 new_rtx
= emit_call_insn_after (copy_insn (PATTERN (insn
)), after
);
6440 if (CALL_INSN_FUNCTION_USAGE (insn
))
6441 CALL_INSN_FUNCTION_USAGE (new_rtx
)
6442 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn
));
6443 SIBLING_CALL_P (new_rtx
) = SIBLING_CALL_P (insn
);
6444 RTL_CONST_CALL_P (new_rtx
) = RTL_CONST_CALL_P (insn
);
6445 RTL_PURE_CALL_P (new_rtx
) = RTL_PURE_CALL_P (insn
);
6446 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx
)
6447 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
);
6454 /* Update LABEL_NUSES. */
6455 mark_jump_label (PATTERN (new_rtx
), new_rtx
, 0);
6457 INSN_LOCATION (new_rtx
) = INSN_LOCATION (insn
);
6459 /* If the old insn is frame related, then so is the new one. This is
6460 primarily needed for IA-64 unwind info which marks epilogue insns,
6461 which may be duplicated by the basic block reordering code. */
6462 RTX_FRAME_RELATED_P (new_rtx
) = RTX_FRAME_RELATED_P (insn
);
6464 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6465 rtx
*ptail
= ®_NOTES (new_rtx
);
6466 while (*ptail
!= NULL_RTX
)
6467 ptail
= &XEXP (*ptail
, 1);
6469 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6470 will make them. REG_LABEL_TARGETs are created there too, but are
6471 supposed to be sticky, so we copy them. */
6472 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
6473 if (REG_NOTE_KIND (link
) != REG_LABEL_OPERAND
)
6475 *ptail
= duplicate_reg_note (link
);
6476 ptail
= &XEXP (*ptail
, 1);
6479 INSN_CODE (new_rtx
) = INSN_CODE (insn
);
6483 static GTY((deletable
)) rtx hard_reg_clobbers
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
6485 gen_hard_reg_clobber (machine_mode mode
, unsigned int regno
)
6487 if (hard_reg_clobbers
[mode
][regno
])
6488 return hard_reg_clobbers
[mode
][regno
];
6490 return (hard_reg_clobbers
[mode
][regno
] =
6491 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (mode
, regno
)));
6494 location_t prologue_location
;
6495 location_t epilogue_location
;
6497 /* Hold current location information and last location information, so the
6498 datastructures are built lazily only when some instructions in given
6499 place are needed. */
6500 static location_t curr_location
;
6502 /* Allocate insn location datastructure. */
6504 insn_locations_init (void)
6506 prologue_location
= epilogue_location
= 0;
6507 curr_location
= UNKNOWN_LOCATION
;
6510 /* At the end of emit stage, clear current location. */
6512 insn_locations_finalize (void)
6514 epilogue_location
= curr_location
;
6515 curr_location
= UNKNOWN_LOCATION
;
6518 /* Set current location. */
6520 set_curr_insn_location (location_t location
)
6522 curr_location
= location
;
6525 /* Get current location. */
6527 curr_insn_location (void)
6529 return curr_location
;
6532 /* Set the location of the insn chain starting at INSN to LOC. */
6534 set_insn_locations (rtx_insn
*insn
, location_t loc
)
6539 INSN_LOCATION (insn
) = loc
;
6540 insn
= NEXT_INSN (insn
);
6544 /* Return lexical scope block insn belongs to. */
6546 insn_scope (const rtx_insn
*insn
)
6548 return LOCATION_BLOCK (INSN_LOCATION (insn
));
6551 /* Return line number of the statement that produced this insn. */
6553 insn_line (const rtx_insn
*insn
)
6555 return LOCATION_LINE (INSN_LOCATION (insn
));
6558 /* Return source file of the statement that produced this insn. */
6560 insn_file (const rtx_insn
*insn
)
6562 return LOCATION_FILE (INSN_LOCATION (insn
));
6565 /* Return expanded location of the statement that produced this insn. */
6567 insn_location (const rtx_insn
*insn
)
6569 return expand_location (INSN_LOCATION (insn
));
6572 /* Return true if memory model MODEL requires a pre-operation (release-style)
6573 barrier or a post-operation (acquire-style) barrier. While not universal,
6574 this function matches behavior of several targets. */
6577 need_atomic_barrier_p (enum memmodel model
, bool pre
)
6579 switch (model
& MEMMODEL_BASE_MASK
)
6581 case MEMMODEL_RELAXED
:
6582 case MEMMODEL_CONSUME
:
6584 case MEMMODEL_RELEASE
:
6586 case MEMMODEL_ACQUIRE
:
6588 case MEMMODEL_ACQ_REL
:
6589 case MEMMODEL_SEQ_CST
:
6596 /* Return a constant shift amount for shifting a value of mode MODE
6600 gen_int_shift_amount (machine_mode
, poly_int64 value
)
6602 /* Use a 64-bit mode, to avoid any truncation.
6604 ??? Perhaps this should be automatically derived from the .md files
6605 instead, or perhaps have a target hook. */
6606 scalar_int_mode shift_mode
= (BITS_PER_UNIT
== 8
6608 : int_mode_for_size (64, 0).require ());
6609 return gen_int_mode (value
, shift_mode
);
6612 /* Initialize fields of rtl_data related to stack alignment. */
6615 rtl_data::init_stack_alignment ()
6617 stack_alignment_needed
= STACK_BOUNDARY
;
6618 max_used_stack_slot_alignment
= STACK_BOUNDARY
;
6619 stack_alignment_estimated
= 0;
6620 preferred_stack_boundary
= STACK_BOUNDARY
;
6624 #include "gt-emit-rtl.h"