1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
36 #include "coretypes.h"
44 #include "stringpool.h"
45 #include "insn-config.h"
49 #include "diagnostic-core.h"
51 #include "fold-const.h"
59 #include "stor-layout.h"
62 #include "rtx-vector-builder.h"
64 #include "gimple-ssa.h"
67 struct target_rtl default_target_rtl
;
69 struct target_rtl
*this_target_rtl
= &default_target_rtl
;
72 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
74 /* Commonly used modes. */
76 scalar_int_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
77 scalar_int_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
78 scalar_int_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
80 /* Datastructures maintained for currently processed function in RTL form. */
82 struct rtl_data x_rtl
;
84 /* Indexed by pseudo register number, gives the rtx for that pseudo.
85 Allocated in parallel with regno_pointer_align.
86 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
87 with length attribute nested in top level structures. */
91 /* This is *not* reset after each function. It gives each CODE_LABEL
92 in the entire compilation a unique label number. */
94 static GTY(()) int label_num
= 1;
96 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
97 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
98 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
99 is set only for MODE_INT and MODE_VECTOR_INT modes. */
101 rtx const_tiny_rtx
[4][(int) MAX_MACHINE_MODE
];
105 REAL_VALUE_TYPE dconst0
;
106 REAL_VALUE_TYPE dconst1
;
107 REAL_VALUE_TYPE dconst2
;
108 REAL_VALUE_TYPE dconstm1
;
109 REAL_VALUE_TYPE dconsthalf
;
111 /* Record fixed-point constant 0 and 1. */
112 FIXED_VALUE_TYPE fconst0
[MAX_FCONST0
];
113 FIXED_VALUE_TYPE fconst1
[MAX_FCONST1
];
115 /* We make one copy of (const_int C) where C is in
116 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
117 to save space during the compilation and simplify comparisons of
120 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
122 /* Standard pieces of rtx, to be substituted directly into things. */
125 rtx simple_return_rtx
;
127 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
128 this pointer should normally never be dereferenced), but is required to be
129 distinct from NULL_RTX. Currently used by peephole2 pass. */
130 rtx_insn
*invalid_insn_rtx
;
132 /* A hash table storing CONST_INTs whose absolute value is greater
133 than MAX_SAVED_CONST_INT. */
135 struct const_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
137 typedef HOST_WIDE_INT compare_type
;
139 static hashval_t
hash (rtx i
);
140 static bool equal (rtx i
, HOST_WIDE_INT h
);
143 static GTY ((cache
)) hash_table
<const_int_hasher
> *const_int_htab
;
145 struct const_wide_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
147 static hashval_t
hash (rtx x
);
148 static bool equal (rtx x
, rtx y
);
151 static GTY ((cache
)) hash_table
<const_wide_int_hasher
> *const_wide_int_htab
;
153 struct const_poly_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
155 typedef std::pair
<machine_mode
, poly_wide_int_ref
> compare_type
;
157 static hashval_t
hash (rtx x
);
158 static bool equal (rtx x
, const compare_type
&y
);
161 static GTY ((cache
)) hash_table
<const_poly_int_hasher
> *const_poly_int_htab
;
163 /* A hash table storing register attribute structures. */
164 struct reg_attr_hasher
: ggc_cache_ptr_hash
<reg_attrs
>
166 static hashval_t
hash (reg_attrs
*x
);
167 static bool equal (reg_attrs
*a
, reg_attrs
*b
);
170 static GTY ((cache
)) hash_table
<reg_attr_hasher
> *reg_attrs_htab
;
172 /* A hash table storing all CONST_DOUBLEs. */
173 struct const_double_hasher
: ggc_cache_ptr_hash
<rtx_def
>
175 static hashval_t
hash (rtx x
);
176 static bool equal (rtx x
, rtx y
);
179 static GTY ((cache
)) hash_table
<const_double_hasher
> *const_double_htab
;
181 /* A hash table storing all CONST_FIXEDs. */
182 struct const_fixed_hasher
: ggc_cache_ptr_hash
<rtx_def
>
184 static hashval_t
hash (rtx x
);
185 static bool equal (rtx x
, rtx y
);
188 static GTY ((cache
)) hash_table
<const_fixed_hasher
> *const_fixed_htab
;
190 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
191 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
192 #define first_label_num (crtl->emit.x_first_label_num)
194 static void set_used_decls (tree
);
195 static void mark_label_nuses (rtx
);
196 #if TARGET_SUPPORTS_WIDE_INT
197 static rtx
lookup_const_wide_int (rtx
);
199 static rtx
lookup_const_double (rtx
);
200 static rtx
lookup_const_fixed (rtx
);
201 static rtx
gen_const_vector (machine_mode
, int);
202 static void copy_rtx_if_shared_1 (rtx
*orig
);
204 /* Probability of the conditional branch currently proceeded by try_split. */
205 profile_probability split_branch_probability
;
207 /* Returns a hash code for X (which is a really a CONST_INT). */
210 const_int_hasher::hash (rtx x
)
212 return (hashval_t
) INTVAL (x
);
215 /* Returns nonzero if the value represented by X (which is really a
216 CONST_INT) is the same as that given by Y (which is really a
220 const_int_hasher::equal (rtx x
, HOST_WIDE_INT y
)
222 return (INTVAL (x
) == y
);
225 #if TARGET_SUPPORTS_WIDE_INT
226 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
229 const_wide_int_hasher::hash (rtx x
)
232 unsigned HOST_WIDE_INT hash
= 0;
235 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
236 hash
+= CONST_WIDE_INT_ELT (xr
, i
);
238 return (hashval_t
) hash
;
241 /* Returns nonzero if the value represented by X (which is really a
242 CONST_WIDE_INT) is the same as that given by Y (which is really a
246 const_wide_int_hasher::equal (rtx x
, rtx y
)
251 if (CONST_WIDE_INT_NUNITS (xr
) != CONST_WIDE_INT_NUNITS (yr
))
254 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
255 if (CONST_WIDE_INT_ELT (xr
, i
) != CONST_WIDE_INT_ELT (yr
, i
))
262 /* Returns a hash code for CONST_POLY_INT X. */
265 const_poly_int_hasher::hash (rtx x
)
268 h
.add_int (GET_MODE (x
));
269 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
270 h
.add_wide_int (CONST_POLY_INT_COEFFS (x
)[i
]);
274 /* Returns nonzero if CONST_POLY_INT X is an rtx representation of Y. */
277 const_poly_int_hasher::equal (rtx x
, const compare_type
&y
)
279 if (GET_MODE (x
) != y
.first
)
281 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
282 if (CONST_POLY_INT_COEFFS (x
)[i
] != y
.second
.coeffs
[i
])
287 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
289 const_double_hasher::hash (rtx x
)
291 const_rtx
const value
= x
;
294 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (value
) == VOIDmode
)
295 h
= CONST_DOUBLE_LOW (value
) ^ CONST_DOUBLE_HIGH (value
);
298 h
= real_hash (CONST_DOUBLE_REAL_VALUE (value
));
299 /* MODE is used in the comparison, so it should be in the hash. */
300 h
^= GET_MODE (value
);
305 /* Returns nonzero if the value represented by X (really a ...)
306 is the same as that represented by Y (really a ...) */
308 const_double_hasher::equal (rtx x
, rtx y
)
310 const_rtx
const a
= x
, b
= y
;
312 if (GET_MODE (a
) != GET_MODE (b
))
314 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (a
) == VOIDmode
)
315 return (CONST_DOUBLE_LOW (a
) == CONST_DOUBLE_LOW (b
)
316 && CONST_DOUBLE_HIGH (a
) == CONST_DOUBLE_HIGH (b
));
318 return real_identical (CONST_DOUBLE_REAL_VALUE (a
),
319 CONST_DOUBLE_REAL_VALUE (b
));
322 /* Returns a hash code for X (which is really a CONST_FIXED). */
325 const_fixed_hasher::hash (rtx x
)
327 const_rtx
const value
= x
;
330 h
= fixed_hash (CONST_FIXED_VALUE (value
));
331 /* MODE is used in the comparison, so it should be in the hash. */
332 h
^= GET_MODE (value
);
336 /* Returns nonzero if the value represented by X is the same as that
340 const_fixed_hasher::equal (rtx x
, rtx y
)
342 const_rtx
const a
= x
, b
= y
;
344 if (GET_MODE (a
) != GET_MODE (b
))
346 return fixed_identical (CONST_FIXED_VALUE (a
), CONST_FIXED_VALUE (b
));
349 /* Return true if the given memory attributes are equal. */
352 mem_attrs_eq_p (const class mem_attrs
*p
, const class mem_attrs
*q
)
358 return (p
->alias
== q
->alias
359 && p
->offset_known_p
== q
->offset_known_p
360 && (!p
->offset_known_p
|| known_eq (p
->offset
, q
->offset
))
361 && p
->size_known_p
== q
->size_known_p
362 && (!p
->size_known_p
|| known_eq (p
->size
, q
->size
))
363 && p
->align
== q
->align
364 && p
->addrspace
== q
->addrspace
365 && (p
->expr
== q
->expr
366 || (p
->expr
!= NULL_TREE
&& q
->expr
!= NULL_TREE
367 && operand_equal_p (p
->expr
, q
->expr
, 0))));
370 /* Set MEM's memory attributes so that they are the same as ATTRS. */
373 set_mem_attrs (rtx mem
, mem_attrs
*attrs
)
375 /* If everything is the default, we can just clear the attributes. */
376 if (mem_attrs_eq_p (attrs
, mode_mem_attrs
[(int) GET_MODE (mem
)]))
383 || !mem_attrs_eq_p (attrs
, MEM_ATTRS (mem
)))
385 MEM_ATTRS (mem
) = ggc_alloc
<mem_attrs
> ();
386 memcpy (MEM_ATTRS (mem
), attrs
, sizeof (mem_attrs
));
390 /* Returns a hash code for X (which is a really a reg_attrs *). */
393 reg_attr_hasher::hash (reg_attrs
*x
)
395 const reg_attrs
*const p
= x
;
399 h
.add_poly_hwi (p
->offset
);
403 /* Returns nonzero if the value represented by X is the same as that given by
407 reg_attr_hasher::equal (reg_attrs
*x
, reg_attrs
*y
)
409 const reg_attrs
*const p
= x
;
410 const reg_attrs
*const q
= y
;
412 return (p
->decl
== q
->decl
&& known_eq (p
->offset
, q
->offset
));
414 /* Allocate a new reg_attrs structure and insert it into the hash table if
415 one identical to it is not already in the table. We are doing this for
419 get_reg_attrs (tree decl
, poly_int64 offset
)
423 /* If everything is the default, we can just return zero. */
424 if (decl
== 0 && known_eq (offset
, 0))
428 attrs
.offset
= offset
;
430 reg_attrs
**slot
= reg_attrs_htab
->find_slot (&attrs
, INSERT
);
433 *slot
= ggc_alloc
<reg_attrs
> ();
434 memcpy (*slot
, &attrs
, sizeof (reg_attrs
));
442 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
443 and to block register equivalences to be seen across this insn. */
448 rtx x
= gen_rtx_ASM_INPUT (VOIDmode
, "");
449 MEM_VOLATILE_P (x
) = true;
455 /* Set the mode and register number of X to MODE and REGNO. */
458 set_mode_and_regno (rtx x
, machine_mode mode
, unsigned int regno
)
460 unsigned int nregs
= (HARD_REGISTER_NUM_P (regno
)
461 ? hard_regno_nregs (regno
, mode
)
463 PUT_MODE_RAW (x
, mode
);
464 set_regno_raw (x
, regno
, nregs
);
467 /* Initialize a fresh REG rtx with mode MODE and register REGNO. */
470 init_raw_REG (rtx x
, machine_mode mode
, unsigned int regno
)
472 set_mode_and_regno (x
, mode
, regno
);
473 REG_ATTRS (x
) = NULL
;
474 ORIGINAL_REGNO (x
) = regno
;
478 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
479 don't attempt to share with the various global pieces of rtl (such as
480 frame_pointer_rtx). */
483 gen_raw_REG (machine_mode mode
, unsigned int regno
)
485 rtx x
= rtx_alloc (REG MEM_STAT_INFO
);
486 init_raw_REG (x
, mode
, regno
);
490 /* There are some RTL codes that require special attention; the generation
491 functions do the raw handling. If you add to this list, modify
492 special_rtx in gengenrtl.c as well. */
495 gen_rtx_EXPR_LIST (machine_mode mode
, rtx expr
, rtx expr_list
)
497 return as_a
<rtx_expr_list
*> (gen_rtx_fmt_ee (EXPR_LIST
, mode
, expr
,
502 gen_rtx_INSN_LIST (machine_mode mode
, rtx insn
, rtx insn_list
)
504 return as_a
<rtx_insn_list
*> (gen_rtx_fmt_ue (INSN_LIST
, mode
, insn
,
509 gen_rtx_INSN (machine_mode mode
, rtx_insn
*prev_insn
, rtx_insn
*next_insn
,
510 basic_block bb
, rtx pattern
, int location
, int code
,
513 return as_a
<rtx_insn
*> (gen_rtx_fmt_uuBeiie (INSN
, mode
,
514 prev_insn
, next_insn
,
515 bb
, pattern
, location
, code
,
520 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED
, HOST_WIDE_INT arg
)
522 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
523 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
525 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
526 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
527 return const_true_rtx
;
530 /* Look up the CONST_INT in the hash table. */
531 rtx
*slot
= const_int_htab
->find_slot_with_hash (arg
, (hashval_t
) arg
,
534 *slot
= gen_rtx_raw_CONST_INT (VOIDmode
, arg
);
540 gen_int_mode (poly_int64 c
, machine_mode mode
)
542 c
= trunc_int_for_mode (c
, mode
);
543 if (c
.is_constant ())
544 return GEN_INT (c
.coeffs
[0]);
545 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
546 return immed_wide_int_const (poly_wide_int::from (c
, prec
, SIGNED
), mode
);
549 /* CONST_DOUBLEs might be created from pairs of integers, or from
550 REAL_VALUE_TYPEs. Also, their length is known only at run time,
551 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
553 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
554 hash table. If so, return its counterpart; otherwise add it
555 to the hash table and return it. */
557 lookup_const_double (rtx real
)
559 rtx
*slot
= const_double_htab
->find_slot (real
, INSERT
);
566 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
567 VALUE in mode MODE. */
569 const_double_from_real_value (REAL_VALUE_TYPE value
, machine_mode mode
)
571 rtx real
= rtx_alloc (CONST_DOUBLE
);
572 PUT_MODE (real
, mode
);
576 return lookup_const_double (real
);
579 /* Determine whether FIXED, a CONST_FIXED, already exists in the
580 hash table. If so, return its counterpart; otherwise add it
581 to the hash table and return it. */
584 lookup_const_fixed (rtx fixed
)
586 rtx
*slot
= const_fixed_htab
->find_slot (fixed
, INSERT
);
593 /* Return a CONST_FIXED rtx for a fixed-point value specified by
594 VALUE in mode MODE. */
597 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value
, machine_mode mode
)
599 rtx fixed
= rtx_alloc (CONST_FIXED
);
600 PUT_MODE (fixed
, mode
);
604 return lookup_const_fixed (fixed
);
607 #if TARGET_SUPPORTS_WIDE_INT == 0
608 /* Constructs double_int from rtx CST. */
611 rtx_to_double_int (const_rtx cst
)
615 if (CONST_INT_P (cst
))
616 r
= double_int::from_shwi (INTVAL (cst
));
617 else if (CONST_DOUBLE_AS_INT_P (cst
))
619 r
.low
= CONST_DOUBLE_LOW (cst
);
620 r
.high
= CONST_DOUBLE_HIGH (cst
);
629 #if TARGET_SUPPORTS_WIDE_INT
630 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
631 If so, return its counterpart; otherwise add it to the hash table and
635 lookup_const_wide_int (rtx wint
)
637 rtx
*slot
= const_wide_int_htab
->find_slot (wint
, INSERT
);
645 /* Return an rtx constant for V, given that the constant has mode MODE.
646 The returned rtx will be a CONST_INT if V fits, otherwise it will be
647 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
648 (if TARGET_SUPPORTS_WIDE_INT). */
651 immed_wide_int_const_1 (const wide_int_ref
&v
, machine_mode mode
)
653 unsigned int len
= v
.get_len ();
654 /* Not scalar_int_mode because we also allow pointer bound modes. */
655 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
657 /* Allow truncation but not extension since we do not know if the
658 number is signed or unsigned. */
659 gcc_assert (prec
<= v
.get_precision ());
661 if (len
< 2 || prec
<= HOST_BITS_PER_WIDE_INT
)
662 return gen_int_mode (v
.elt (0), mode
);
664 #if TARGET_SUPPORTS_WIDE_INT
668 unsigned int blocks_needed
669 = (prec
+ HOST_BITS_PER_WIDE_INT
- 1) / HOST_BITS_PER_WIDE_INT
;
671 if (len
> blocks_needed
)
674 value
= const_wide_int_alloc (len
);
676 /* It is so tempting to just put the mode in here. Must control
678 PUT_MODE (value
, VOIDmode
);
679 CWI_PUT_NUM_ELEM (value
, len
);
681 for (i
= 0; i
< len
; i
++)
682 CONST_WIDE_INT_ELT (value
, i
) = v
.elt (i
);
684 return lookup_const_wide_int (value
);
687 return immed_double_const (v
.elt (0), v
.elt (1), mode
);
691 #if TARGET_SUPPORTS_WIDE_INT == 0
692 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
693 of ints: I0 is the low-order word and I1 is the high-order word.
694 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
695 implied upper bits are copies of the high bit of i1. The value
696 itself is neither signed nor unsigned. Do not use this routine for
697 non-integer modes; convert to REAL_VALUE_TYPE and use
698 const_double_from_real_value. */
701 immed_double_const (HOST_WIDE_INT i0
, HOST_WIDE_INT i1
, machine_mode mode
)
706 /* There are the following cases (note that there are no modes with
707 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
709 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
711 2) If the value of the integer fits into HOST_WIDE_INT anyway
712 (i.e., i1 consists only from copies of the sign bit, and sign
713 of i0 and i1 are the same), then we return a CONST_INT for i0.
714 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
716 if (is_a
<scalar_mode
> (mode
, &smode
)
717 && GET_MODE_BITSIZE (smode
) <= HOST_BITS_PER_WIDE_INT
)
718 return gen_int_mode (i0
, mode
);
720 /* If this integer fits in one word, return a CONST_INT. */
721 if ((i1
== 0 && i0
>= 0) || (i1
== ~0 && i0
< 0))
724 /* We use VOIDmode for integers. */
725 value
= rtx_alloc (CONST_DOUBLE
);
726 PUT_MODE (value
, VOIDmode
);
728 CONST_DOUBLE_LOW (value
) = i0
;
729 CONST_DOUBLE_HIGH (value
) = i1
;
731 for (i
= 2; i
< (sizeof CONST_DOUBLE_FORMAT
- 1); i
++)
732 XWINT (value
, i
) = 0;
734 return lookup_const_double (value
);
738 /* Return an rtx representation of C in mode MODE. */
741 immed_wide_int_const (const poly_wide_int_ref
&c
, machine_mode mode
)
743 if (c
.is_constant ())
744 return immed_wide_int_const_1 (c
.coeffs
[0], mode
);
746 /* Not scalar_int_mode because we also allow pointer bound modes. */
747 unsigned int prec
= GET_MODE_PRECISION (as_a
<scalar_mode
> (mode
));
749 /* Allow truncation but not extension since we do not know if the
750 number is signed or unsigned. */
751 gcc_assert (prec
<= c
.coeffs
[0].get_precision ());
752 poly_wide_int newc
= poly_wide_int::from (c
, prec
, SIGNED
);
754 /* See whether we already have an rtx for this constant. */
757 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
758 h
.add_wide_int (newc
.coeffs
[i
]);
759 const_poly_int_hasher::compare_type
typed_value (mode
, newc
);
760 rtx
*slot
= const_poly_int_htab
->find_slot_with_hash (typed_value
,
766 /* Create a new rtx. There's a choice to be made here between installing
767 the actual mode of the rtx or leaving it as VOIDmode (for consistency
768 with CONST_INT). In practice the handling of the codes is different
769 enough that we get no benefit from using VOIDmode, and various places
770 assume that VOIDmode implies CONST_INT. Using the real mode seems like
771 the right long-term direction anyway. */
772 typedef trailing_wide_ints
<NUM_POLY_INT_COEFFS
> twi
;
773 size_t extra_size
= twi::extra_size (prec
);
774 x
= rtx_alloc_v (CONST_POLY_INT
,
775 sizeof (struct const_poly_int_def
) + extra_size
);
777 CONST_POLY_INT_COEFFS (x
).set_precision (prec
);
778 for (unsigned int i
= 0; i
< NUM_POLY_INT_COEFFS
; ++i
)
779 CONST_POLY_INT_COEFFS (x
)[i
] = newc
.coeffs
[i
];
786 gen_rtx_REG (machine_mode mode
, unsigned int regno
)
788 /* In case the MD file explicitly references the frame pointer, have
789 all such references point to the same frame pointer. This is
790 used during frame pointer elimination to distinguish the explicit
791 references to these registers from pseudos that happened to be
794 If we have eliminated the frame pointer or arg pointer, we will
795 be using it as a normal register, for example as a spill
796 register. In such cases, we might be accessing it in a mode that
797 is not Pmode and therefore cannot use the pre-allocated rtx.
799 Also don't do this when we are making new REGs in reload, since
800 we don't want to get confused with the real pointers. */
802 if (mode
== Pmode
&& !reload_in_progress
&& !lra_in_progress
)
804 if (regno
== FRAME_POINTER_REGNUM
805 && (!reload_completed
|| frame_pointer_needed
))
806 return frame_pointer_rtx
;
808 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
809 && regno
== HARD_FRAME_POINTER_REGNUM
810 && (!reload_completed
|| frame_pointer_needed
))
811 return hard_frame_pointer_rtx
;
812 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
813 if (FRAME_POINTER_REGNUM
!= ARG_POINTER_REGNUM
814 && regno
== ARG_POINTER_REGNUM
)
815 return arg_pointer_rtx
;
817 #ifdef RETURN_ADDRESS_POINTER_REGNUM
818 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
819 return return_address_pointer_rtx
;
821 if (regno
== (unsigned) PIC_OFFSET_TABLE_REGNUM
822 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
823 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
824 return pic_offset_table_rtx
;
825 if (regno
== STACK_POINTER_REGNUM
)
826 return stack_pointer_rtx
;
830 /* If the per-function register table has been set up, try to re-use
831 an existing entry in that table to avoid useless generation of RTL.
833 This code is disabled for now until we can fix the various backends
834 which depend on having non-shared hard registers in some cases. Long
835 term we want to re-enable this code as it can significantly cut down
836 on the amount of useless RTL that gets generated.
838 We'll also need to fix some code that runs after reload that wants to
839 set ORIGINAL_REGNO. */
844 && regno
< FIRST_PSEUDO_REGISTER
845 && reg_raw_mode
[regno
] == mode
)
846 return regno_reg_rtx
[regno
];
849 return gen_raw_REG (mode
, regno
);
853 gen_rtx_MEM (machine_mode mode
, rtx addr
)
855 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
857 /* This field is not cleared by the mere allocation of the rtx, so
864 /* Generate a memory referring to non-trapping constant memory. */
867 gen_const_mem (machine_mode mode
, rtx addr
)
869 rtx mem
= gen_rtx_MEM (mode
, addr
);
870 MEM_READONLY_P (mem
) = 1;
871 MEM_NOTRAP_P (mem
) = 1;
875 /* Generate a MEM referring to fixed portions of the frame, e.g., register
879 gen_frame_mem (machine_mode mode
, rtx addr
)
881 rtx mem
= gen_rtx_MEM (mode
, addr
);
882 MEM_NOTRAP_P (mem
) = 1;
883 set_mem_alias_set (mem
, get_frame_alias_set ());
887 /* Generate a MEM referring to a temporary use of the stack, not part
888 of the fixed stack frame. For example, something which is pushed
889 by a target splitter. */
891 gen_tmp_stack_mem (machine_mode mode
, rtx addr
)
893 rtx mem
= gen_rtx_MEM (mode
, addr
);
894 MEM_NOTRAP_P (mem
) = 1;
895 if (!cfun
->calls_alloca
)
896 set_mem_alias_set (mem
, get_frame_alias_set ());
900 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
901 this construct would be valid, and false otherwise. */
904 validate_subreg (machine_mode omode
, machine_mode imode
,
905 const_rtx reg
, poly_uint64 offset
)
907 poly_uint64 isize
= GET_MODE_SIZE (imode
);
908 poly_uint64 osize
= GET_MODE_SIZE (omode
);
910 /* The sizes must be ordered, so that we know whether the subreg
911 is partial, paradoxical or complete. */
912 if (!ordered_p (isize
, osize
))
915 /* All subregs must be aligned. */
916 if (!multiple_p (offset
, osize
))
919 /* The subreg offset cannot be outside the inner object. */
920 if (maybe_ge (offset
, isize
))
923 poly_uint64 regsize
= REGMODE_NATURAL_SIZE (imode
);
925 /* ??? This should not be here. Temporarily continue to allow word_mode
926 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
927 Generally, backends are doing something sketchy but it'll take time to
929 if (omode
== word_mode
)
931 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
932 is the culprit here, and not the backends. */
933 else if (known_ge (osize
, regsize
) && known_ge (isize
, osize
))
935 /* Allow component subregs of complex and vector. Though given the below
936 extraction rules, it's not always clear what that means. */
937 else if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
938 && GET_MODE_INNER (imode
) == omode
)
940 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
941 i.e. (subreg:V4SF (reg:SF) 0) or (subreg:V4SF (reg:V2SF) 0). This
942 surely isn't the cleanest way to represent this. It's questionable
943 if this ought to be represented at all -- why can't this all be hidden
944 in post-reload splitters that make arbitrarily mode changes to the
945 registers themselves. */
946 else if (VECTOR_MODE_P (omode
)
947 && GET_MODE_INNER (omode
) == GET_MODE_INNER (imode
))
949 /* Subregs involving floating point modes are not allowed to
950 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
951 (subreg:SI (reg:DF) 0) isn't. */
952 else if (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))
954 if (! (known_eq (isize
, osize
)
955 /* LRA can use subreg to store a floating point value in
956 an integer mode. Although the floating point and the
957 integer modes need the same number of hard registers,
958 the size of floating point mode can be less than the
959 integer mode. LRA also uses subregs for a register
960 should be used in different mode in on insn. */
965 /* Paradoxical subregs must have offset zero. */
966 if (maybe_gt (osize
, isize
))
967 return known_eq (offset
, 0U);
969 /* This is a normal subreg. Verify that the offset is representable. */
971 /* For hard registers, we already have most of these rules collected in
972 subreg_offset_representable_p. */
973 if (reg
&& REG_P (reg
) && HARD_REGISTER_P (reg
))
975 unsigned int regno
= REGNO (reg
);
977 if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
978 && GET_MODE_INNER (imode
) == omode
)
980 else if (!REG_CAN_CHANGE_MODE_P (regno
, imode
, omode
))
983 return subreg_offset_representable_p (regno
, imode
, offset
, omode
);
986 /* The outer size must be ordered wrt the register size, otherwise
987 we wouldn't know at compile time how many registers the outer
989 if (!ordered_p (osize
, regsize
))
992 /* For pseudo registers, we want most of the same checks. Namely:
994 Assume that the pseudo register will be allocated to hard registers
995 that can hold REGSIZE bytes each. If OSIZE is not a multiple of REGSIZE,
996 the remainder must correspond to the lowpart of the containing hard
997 register. If BYTES_BIG_ENDIAN, the lowpart is at the highest offset,
998 otherwise it is at the lowest offset.
1000 Given that we've already checked the mode and offset alignment,
1001 we only have to check subblock subregs here. */
1002 if (maybe_lt (osize
, regsize
)
1003 && ! (lra_in_progress
&& (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))))
1005 /* It is invalid for the target to pick a register size for a mode
1006 that isn't ordered wrt to the size of that mode. */
1007 poly_uint64 block_size
= ordered_min (isize
, regsize
);
1008 unsigned int start_reg
;
1009 poly_uint64 offset_within_reg
;
1010 if (!can_div_trunc_p (offset
, block_size
, &start_reg
, &offset_within_reg
)
1011 || (BYTES_BIG_ENDIAN
1012 ? maybe_ne (offset_within_reg
, block_size
- osize
)
1013 : maybe_ne (offset_within_reg
, 0U)))
1020 gen_rtx_SUBREG (machine_mode mode
, rtx reg
, poly_uint64 offset
)
1022 gcc_assert (validate_subreg (mode
, GET_MODE (reg
), reg
, offset
));
1023 return gen_rtx_raw_SUBREG (mode
, reg
, offset
);
1026 /* Generate a SUBREG representing the least-significant part of REG if MODE
1027 is smaller than mode of REG, otherwise paradoxical SUBREG. */
1030 gen_lowpart_SUBREG (machine_mode mode
, rtx reg
)
1032 machine_mode inmode
;
1034 inmode
= GET_MODE (reg
);
1035 if (inmode
== VOIDmode
)
1037 return gen_rtx_SUBREG (mode
, reg
,
1038 subreg_lowpart_offset (mode
, inmode
));
1042 gen_rtx_VAR_LOCATION (machine_mode mode
, tree decl
, rtx loc
,
1043 enum var_init_status status
)
1045 rtx x
= gen_rtx_fmt_te (VAR_LOCATION
, mode
, decl
, loc
);
1046 PAT_VAR_LOCATION_STATUS (x
) = status
;
1051 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
1054 gen_rtvec (int n
, ...)
1062 /* Don't allocate an empty rtvec... */
1069 rt_val
= rtvec_alloc (n
);
1071 for (i
= 0; i
< n
; i
++)
1072 rt_val
->elem
[i
] = va_arg (p
, rtx
);
1079 gen_rtvec_v (int n
, rtx
*argp
)
1084 /* Don't allocate an empty rtvec... */
1088 rt_val
= rtvec_alloc (n
);
1090 for (i
= 0; i
< n
; i
++)
1091 rt_val
->elem
[i
] = *argp
++;
1097 gen_rtvec_v (int n
, rtx_insn
**argp
)
1102 /* Don't allocate an empty rtvec... */
1106 rt_val
= rtvec_alloc (n
);
1108 for (i
= 0; i
< n
; i
++)
1109 rt_val
->elem
[i
] = *argp
++;
1115 /* Return the number of bytes between the start of an OUTER_MODE
1116 in-memory value and the start of an INNER_MODE in-memory value,
1117 given that the former is a lowpart of the latter. It may be a
1118 paradoxical lowpart, in which case the offset will be negative
1119 on big-endian targets. */
1122 byte_lowpart_offset (machine_mode outer_mode
,
1123 machine_mode inner_mode
)
1125 if (paradoxical_subreg_p (outer_mode
, inner_mode
))
1126 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
1128 return subreg_lowpart_offset (outer_mode
, inner_mode
);
1131 /* Return the offset of (subreg:OUTER_MODE (mem:INNER_MODE X) OFFSET)
1132 from address X. For paradoxical big-endian subregs this is a
1133 negative value, otherwise it's the same as OFFSET. */
1136 subreg_memory_offset (machine_mode outer_mode
, machine_mode inner_mode
,
1139 if (paradoxical_subreg_p (outer_mode
, inner_mode
))
1141 gcc_assert (known_eq (offset
, 0U));
1142 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
1147 /* As above, but return the offset that existing subreg X would have
1148 if SUBREG_REG (X) were stored in memory. The only significant thing
1149 about the current SUBREG_REG is its mode. */
1152 subreg_memory_offset (const_rtx x
)
1154 return subreg_memory_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)),
1158 /* Generate a REG rtx for a new pseudo register of mode MODE.
1159 This pseudo is assigned the next sequential register number. */
1162 gen_reg_rtx (machine_mode mode
)
1165 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
1167 gcc_assert (can_create_pseudo_p ());
1169 /* If a virtual register with bigger mode alignment is generated,
1170 increase stack alignment estimation because it might be spilled
1172 if (SUPPORTS_STACK_ALIGNMENT
1173 && crtl
->stack_alignment_estimated
< align
1174 && !crtl
->stack_realign_processed
)
1176 unsigned int min_align
= MINIMUM_ALIGNMENT (NULL
, mode
, align
);
1177 if (crtl
->stack_alignment_estimated
< min_align
)
1178 crtl
->stack_alignment_estimated
= min_align
;
1181 if (generating_concat_p
1182 && (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
1183 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
))
1185 /* For complex modes, don't make a single pseudo.
1186 Instead, make a CONCAT of two pseudos.
1187 This allows noncontiguous allocation of the real and imaginary parts,
1188 which makes much better code. Besides, allocating DCmode
1189 pseudos overstrains reload on some machines like the 386. */
1190 rtx realpart
, imagpart
;
1191 machine_mode partmode
= GET_MODE_INNER (mode
);
1193 realpart
= gen_reg_rtx (partmode
);
1194 imagpart
= gen_reg_rtx (partmode
);
1195 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
1198 /* Do not call gen_reg_rtx with uninitialized crtl. */
1199 gcc_assert (crtl
->emit
.regno_pointer_align_length
);
1201 crtl
->emit
.ensure_regno_capacity ();
1202 gcc_assert (reg_rtx_no
< crtl
->emit
.regno_pointer_align_length
);
1204 val
= gen_raw_REG (mode
, reg_rtx_no
);
1205 regno_reg_rtx
[reg_rtx_no
++] = val
;
1209 /* Make sure m_regno_pointer_align, and regno_reg_rtx are large
1210 enough to have elements in the range 0 <= idx <= reg_rtx_no. */
1213 emit_status::ensure_regno_capacity ()
1215 int old_size
= regno_pointer_align_length
;
1217 if (reg_rtx_no
< old_size
)
1220 int new_size
= old_size
* 2;
1221 while (reg_rtx_no
>= new_size
)
1224 char *tmp
= XRESIZEVEC (char, regno_pointer_align
, new_size
);
1225 memset (tmp
+ old_size
, 0, new_size
- old_size
);
1226 regno_pointer_align
= (unsigned char *) tmp
;
1228 rtx
*new1
= GGC_RESIZEVEC (rtx
, regno_reg_rtx
, new_size
);
1229 memset (new1
+ old_size
, 0, (new_size
- old_size
) * sizeof (rtx
));
1230 regno_reg_rtx
= new1
;
1232 crtl
->emit
.regno_pointer_align_length
= new_size
;
1235 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1238 reg_is_parm_p (rtx reg
)
1242 gcc_assert (REG_P (reg
));
1243 decl
= REG_EXPR (reg
);
1244 return (decl
&& TREE_CODE (decl
) == PARM_DECL
);
1247 /* Update NEW with the same attributes as REG, but with OFFSET added
1248 to the REG_OFFSET. */
1251 update_reg_offset (rtx new_rtx
, rtx reg
, poly_int64 offset
)
1253 REG_ATTRS (new_rtx
) = get_reg_attrs (REG_EXPR (reg
),
1254 REG_OFFSET (reg
) + offset
);
1257 /* Generate a register with same attributes as REG, but with OFFSET
1258 added to the REG_OFFSET. */
1261 gen_rtx_REG_offset (rtx reg
, machine_mode mode
, unsigned int regno
,
1264 rtx new_rtx
= gen_rtx_REG (mode
, regno
);
1266 update_reg_offset (new_rtx
, reg
, offset
);
1270 /* Generate a new pseudo-register with the same attributes as REG, but
1271 with OFFSET added to the REG_OFFSET. */
1274 gen_reg_rtx_offset (rtx reg
, machine_mode mode
, int offset
)
1276 rtx new_rtx
= gen_reg_rtx (mode
);
1278 update_reg_offset (new_rtx
, reg
, offset
);
1282 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1283 new register is a (possibly paradoxical) lowpart of the old one. */
1286 adjust_reg_mode (rtx reg
, machine_mode mode
)
1288 update_reg_offset (reg
, reg
, byte_lowpart_offset (mode
, GET_MODE (reg
)));
1289 PUT_MODE (reg
, mode
);
1292 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1293 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1296 set_reg_attrs_from_value (rtx reg
, rtx x
)
1299 bool can_be_reg_pointer
= true;
1301 /* Don't call mark_reg_pointer for incompatible pointer sign
1303 while (GET_CODE (x
) == SIGN_EXTEND
1304 || GET_CODE (x
) == ZERO_EXTEND
1305 || GET_CODE (x
) == TRUNCATE
1306 || (GET_CODE (x
) == SUBREG
&& subreg_lowpart_p (x
)))
1308 #if defined(POINTERS_EXTEND_UNSIGNED)
1309 if (((GET_CODE (x
) == SIGN_EXTEND
&& POINTERS_EXTEND_UNSIGNED
)
1310 || (GET_CODE (x
) == ZERO_EXTEND
&& ! POINTERS_EXTEND_UNSIGNED
)
1311 || (paradoxical_subreg_p (x
)
1312 && ! (SUBREG_PROMOTED_VAR_P (x
)
1313 && SUBREG_CHECK_PROMOTED_SIGN (x
,
1314 POINTERS_EXTEND_UNSIGNED
))))
1315 && !targetm
.have_ptr_extend ())
1316 can_be_reg_pointer
= false;
1321 /* Hard registers can be reused for multiple purposes within the same
1322 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1323 on them is wrong. */
1324 if (HARD_REGISTER_P (reg
))
1327 offset
= byte_lowpart_offset (GET_MODE (reg
), GET_MODE (x
));
1330 if (MEM_OFFSET_KNOWN_P (x
))
1331 REG_ATTRS (reg
) = get_reg_attrs (MEM_EXPR (x
),
1332 MEM_OFFSET (x
) + offset
);
1333 if (can_be_reg_pointer
&& MEM_POINTER (x
))
1334 mark_reg_pointer (reg
, 0);
1339 update_reg_offset (reg
, x
, offset
);
1340 if (can_be_reg_pointer
&& REG_POINTER (x
))
1341 mark_reg_pointer (reg
, REGNO_POINTER_ALIGN (REGNO (x
)));
1345 /* Generate a REG rtx for a new pseudo register, copying the mode
1346 and attributes from X. */
1349 gen_reg_rtx_and_attrs (rtx x
)
1351 rtx reg
= gen_reg_rtx (GET_MODE (x
));
1352 set_reg_attrs_from_value (reg
, x
);
1356 /* Set the register attributes for registers contained in PARM_RTX.
1357 Use needed values from memory attributes of MEM. */
1360 set_reg_attrs_for_parm (rtx parm_rtx
, rtx mem
)
1362 if (REG_P (parm_rtx
))
1363 set_reg_attrs_from_value (parm_rtx
, mem
);
1364 else if (GET_CODE (parm_rtx
) == PARALLEL
)
1366 /* Check for a NULL entry in the first slot, used to indicate that the
1367 parameter goes both on the stack and in registers. */
1368 int i
= XEXP (XVECEXP (parm_rtx
, 0, 0), 0) ? 0 : 1;
1369 for (; i
< XVECLEN (parm_rtx
, 0); i
++)
1371 rtx x
= XVECEXP (parm_rtx
, 0, i
);
1372 if (REG_P (XEXP (x
, 0)))
1373 REG_ATTRS (XEXP (x
, 0))
1374 = get_reg_attrs (MEM_EXPR (mem
),
1375 INTVAL (XEXP (x
, 1)));
1380 /* Set the REG_ATTRS for registers in value X, given that X represents
1384 set_reg_attrs_for_decl_rtl (tree t
, rtx x
)
1389 if (GET_CODE (x
) == SUBREG
)
1391 gcc_assert (subreg_lowpart_p (x
));
1396 = get_reg_attrs (t
, byte_lowpart_offset (GET_MODE (x
),
1399 : TYPE_MODE (TREE_TYPE (tdecl
))));
1400 if (GET_CODE (x
) == CONCAT
)
1402 if (REG_P (XEXP (x
, 0)))
1403 REG_ATTRS (XEXP (x
, 0)) = get_reg_attrs (t
, 0);
1404 if (REG_P (XEXP (x
, 1)))
1405 REG_ATTRS (XEXP (x
, 1))
1406 = get_reg_attrs (t
, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x
, 0))));
1408 if (GET_CODE (x
) == PARALLEL
)
1412 /* Check for a NULL entry, used to indicate that the parameter goes
1413 both on the stack and in registers. */
1414 if (XEXP (XVECEXP (x
, 0, 0), 0))
1419 for (i
= start
; i
< XVECLEN (x
, 0); i
++)
1421 rtx y
= XVECEXP (x
, 0, i
);
1422 if (REG_P (XEXP (y
, 0)))
1423 REG_ATTRS (XEXP (y
, 0)) = get_reg_attrs (t
, INTVAL (XEXP (y
, 1)));
1428 /* Assign the RTX X to declaration T. */
1431 set_decl_rtl (tree t
, rtx x
)
1433 DECL_WRTL_CHECK (t
)->decl_with_rtl
.rtl
= x
;
1435 set_reg_attrs_for_decl_rtl (t
, x
);
1438 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1439 if the ABI requires the parameter to be passed by reference. */
1442 set_decl_incoming_rtl (tree t
, rtx x
, bool by_reference_p
)
1444 DECL_INCOMING_RTL (t
) = x
;
1445 if (x
&& !by_reference_p
)
1446 set_reg_attrs_for_decl_rtl (t
, x
);
1449 /* Identify REG (which may be a CONCAT) as a user register. */
1452 mark_user_reg (rtx reg
)
1454 if (GET_CODE (reg
) == CONCAT
)
1456 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
1457 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
1461 gcc_assert (REG_P (reg
));
1462 REG_USERVAR_P (reg
) = 1;
1466 /* Identify REG as a probable pointer register and show its alignment
1467 as ALIGN, if nonzero. */
1470 mark_reg_pointer (rtx reg
, int align
)
1472 if (! REG_POINTER (reg
))
1474 REG_POINTER (reg
) = 1;
1477 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1479 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
1480 /* We can no-longer be sure just how aligned this pointer is. */
1481 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1484 /* Return 1 plus largest pseudo reg number used in the current function. */
1492 /* Return 1 + the largest label number used so far in the current function. */
1495 max_label_num (void)
1500 /* Return first label number used in this function (if any were used). */
1503 get_first_label_num (void)
1505 return first_label_num
;
1508 /* If the rtx for label was created during the expansion of a nested
1509 function, then first_label_num won't include this label number.
1510 Fix this now so that array indices work later. */
1513 maybe_set_first_label_num (rtx_code_label
*x
)
1515 if (CODE_LABEL_NUMBER (x
) < first_label_num
)
1516 first_label_num
= CODE_LABEL_NUMBER (x
);
1519 /* For use by the RTL function loader, when mingling with normal
1521 Ensure that label_num is greater than the label num of X, to avoid
1522 duplicate labels in the generated assembler. */
1525 maybe_set_max_label_num (rtx_code_label
*x
)
1527 if (CODE_LABEL_NUMBER (x
) >= label_num
)
1528 label_num
= CODE_LABEL_NUMBER (x
) + 1;
1532 /* Return a value representing some low-order bits of X, where the number
1533 of low-order bits is given by MODE. Note that no conversion is done
1534 between floating-point and fixed-point values, rather, the bit
1535 representation is returned.
1537 This function handles the cases in common between gen_lowpart, below,
1538 and two variants in cse.c and combine.c. These are the cases that can
1539 be safely handled at all points in the compilation.
1541 If this is not a case we can handle, return 0. */
1544 gen_lowpart_common (machine_mode mode
, rtx x
)
1546 poly_uint64 msize
= GET_MODE_SIZE (mode
);
1547 machine_mode innermode
;
1549 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1550 so we have to make one up. Yuk. */
1551 innermode
= GET_MODE (x
);
1553 && known_le (msize
* BITS_PER_UNIT
,
1554 (unsigned HOST_WIDE_INT
) HOST_BITS_PER_WIDE_INT
))
1555 innermode
= int_mode_for_size (HOST_BITS_PER_WIDE_INT
, 0).require ();
1556 else if (innermode
== VOIDmode
)
1557 innermode
= int_mode_for_size (HOST_BITS_PER_DOUBLE_INT
, 0).require ();
1559 gcc_assert (innermode
!= VOIDmode
&& innermode
!= BLKmode
);
1561 if (innermode
== mode
)
1564 /* The size of the outer and inner modes must be ordered. */
1565 poly_uint64 xsize
= GET_MODE_SIZE (innermode
);
1566 if (!ordered_p (msize
, xsize
))
1569 if (SCALAR_FLOAT_MODE_P (mode
))
1571 /* Don't allow paradoxical FLOAT_MODE subregs. */
1572 if (maybe_gt (msize
, xsize
))
1577 /* MODE must occupy no more of the underlying registers than X. */
1578 poly_uint64 regsize
= REGMODE_NATURAL_SIZE (innermode
);
1579 unsigned int mregs
, xregs
;
1580 if (!can_div_away_from_zero_p (msize
, regsize
, &mregs
)
1581 || !can_div_away_from_zero_p (xsize
, regsize
, &xregs
)
1586 scalar_int_mode int_mode
, int_innermode
, from_mode
;
1587 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
1588 && is_a
<scalar_int_mode
> (mode
, &int_mode
)
1589 && is_a
<scalar_int_mode
> (innermode
, &int_innermode
)
1590 && is_a
<scalar_int_mode
> (GET_MODE (XEXP (x
, 0)), &from_mode
))
1592 /* If we are getting the low-order part of something that has been
1593 sign- or zero-extended, we can either just use the object being
1594 extended or make a narrower extension. If we want an even smaller
1595 piece than the size of the object being extended, call ourselves
1598 This case is used mostly by combine and cse. */
1600 if (from_mode
== int_mode
)
1602 else if (GET_MODE_SIZE (int_mode
) < GET_MODE_SIZE (from_mode
))
1603 return gen_lowpart_common (int_mode
, XEXP (x
, 0));
1604 else if (GET_MODE_SIZE (int_mode
) < GET_MODE_SIZE (int_innermode
))
1605 return gen_rtx_fmt_e (GET_CODE (x
), int_mode
, XEXP (x
, 0));
1607 else if (GET_CODE (x
) == SUBREG
|| REG_P (x
)
1608 || GET_CODE (x
) == CONCAT
|| GET_CODE (x
) == CONST_VECTOR
1609 || CONST_DOUBLE_AS_FLOAT_P (x
) || CONST_SCALAR_INT_P (x
)
1610 || CONST_POLY_INT_P (x
))
1611 return lowpart_subreg (mode
, x
, innermode
);
1613 /* Otherwise, we can't do this. */
1618 gen_highpart (machine_mode mode
, rtx x
)
1620 poly_uint64 msize
= GET_MODE_SIZE (mode
);
1623 /* This case loses if X is a subreg. To catch bugs early,
1624 complain if an invalid MODE is used even in other cases. */
1625 gcc_assert (known_le (msize
, (unsigned int) UNITS_PER_WORD
)
1626 || known_eq (msize
, GET_MODE_UNIT_SIZE (GET_MODE (x
))));
1628 /* gen_lowpart_common handles a lot of special cases due to needing to handle
1629 paradoxical subregs; it only calls simplify_gen_subreg when certain that
1630 it will produce something meaningful. The only case we need to handle
1631 specially here is MEM. */
1634 poly_int64 offset
= subreg_highpart_offset (mode
, GET_MODE (x
));
1635 return adjust_address (x
, mode
, offset
);
1638 result
= simplify_gen_subreg (mode
, x
, GET_MODE (x
),
1639 subreg_highpart_offset (mode
, GET_MODE (x
)));
1640 /* Since we handle MEM directly above, we should never get a MEM back
1641 from simplify_gen_subreg. */
1642 gcc_assert (result
&& !MEM_P (result
));
1647 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1648 be VOIDmode constant. */
1650 gen_highpart_mode (machine_mode outermode
, machine_mode innermode
, rtx exp
)
1652 if (GET_MODE (exp
) != VOIDmode
)
1654 gcc_assert (GET_MODE (exp
) == innermode
);
1655 return gen_highpart (outermode
, exp
);
1657 return simplify_gen_subreg (outermode
, exp
, innermode
,
1658 subreg_highpart_offset (outermode
, innermode
));
1661 /* Return the SUBREG_BYTE for a lowpart subreg whose outer mode has
1662 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1665 subreg_size_lowpart_offset (poly_uint64 outer_bytes
, poly_uint64 inner_bytes
)
1667 gcc_checking_assert (ordered_p (outer_bytes
, inner_bytes
));
1668 if (maybe_gt (outer_bytes
, inner_bytes
))
1669 /* Paradoxical subregs always have a SUBREG_BYTE of 0. */
1672 if (BYTES_BIG_ENDIAN
&& WORDS_BIG_ENDIAN
)
1673 return inner_bytes
- outer_bytes
;
1674 else if (!BYTES_BIG_ENDIAN
&& !WORDS_BIG_ENDIAN
)
1677 return subreg_size_offset_from_lsb (outer_bytes
, inner_bytes
, 0);
1680 /* Return the SUBREG_BYTE for a highpart subreg whose outer mode has
1681 OUTER_BYTES bytes and whose inner mode has INNER_BYTES bytes. */
1684 subreg_size_highpart_offset (poly_uint64 outer_bytes
, poly_uint64 inner_bytes
)
1686 gcc_assert (known_ge (inner_bytes
, outer_bytes
));
1688 if (BYTES_BIG_ENDIAN
&& WORDS_BIG_ENDIAN
)
1690 else if (!BYTES_BIG_ENDIAN
&& !WORDS_BIG_ENDIAN
)
1691 return inner_bytes
- outer_bytes
;
1693 return subreg_size_offset_from_lsb (outer_bytes
, inner_bytes
,
1694 (inner_bytes
- outer_bytes
)
1698 /* Return 1 iff X, assumed to be a SUBREG,
1699 refers to the least significant part of its containing reg.
1700 If X is not a SUBREG, always return 1 (it is its own low part!). */
1703 subreg_lowpart_p (const_rtx x
)
1705 if (GET_CODE (x
) != SUBREG
)
1707 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1710 return known_eq (subreg_lowpart_offset (GET_MODE (x
),
1711 GET_MODE (SUBREG_REG (x
))),
1715 /* Return subword OFFSET of operand OP.
1716 The word number, OFFSET, is interpreted as the word number starting
1717 at the low-order address. OFFSET 0 is the low-order word if not
1718 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1720 If we cannot extract the required word, we return zero. Otherwise,
1721 an rtx corresponding to the requested word will be returned.
1723 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1724 reload has completed, a valid address will always be returned. After
1725 reload, if a valid address cannot be returned, we return zero.
1727 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1728 it is the responsibility of the caller.
1730 MODE is the mode of OP in case it is a CONST_INT.
1732 ??? This is still rather broken for some cases. The problem for the
1733 moment is that all callers of this thing provide no 'goal mode' to
1734 tell us to work with. This exists because all callers were written
1735 in a word based SUBREG world.
1736 Now use of this function can be deprecated by simplify_subreg in most
1741 operand_subword (rtx op
, poly_uint64 offset
, int validate_address
,
1744 if (mode
== VOIDmode
)
1745 mode
= GET_MODE (op
);
1747 gcc_assert (mode
!= VOIDmode
);
1749 /* If OP is narrower than a word, fail. */
1751 && maybe_lt (GET_MODE_SIZE (mode
), UNITS_PER_WORD
))
1754 /* If we want a word outside OP, return zero. */
1756 && maybe_gt ((offset
+ 1) * UNITS_PER_WORD
, GET_MODE_SIZE (mode
)))
1759 /* Form a new MEM at the requested address. */
1762 rtx new_rtx
= adjust_address_nv (op
, word_mode
, offset
* UNITS_PER_WORD
);
1764 if (! validate_address
)
1767 else if (reload_completed
)
1769 if (! strict_memory_address_addr_space_p (word_mode
,
1771 MEM_ADDR_SPACE (op
)))
1775 return replace_equiv_address (new_rtx
, XEXP (new_rtx
, 0));
1778 /* Rest can be handled by simplify_subreg. */
1779 return simplify_gen_subreg (word_mode
, op
, mode
, (offset
* UNITS_PER_WORD
));
1782 /* Similar to `operand_subword', but never return 0. If we can't
1783 extract the required subword, put OP into a register and try again.
1784 The second attempt must succeed. We always validate the address in
1787 MODE is the mode of OP, in case it is CONST_INT. */
1790 operand_subword_force (rtx op
, poly_uint64 offset
, machine_mode mode
)
1792 rtx result
= operand_subword (op
, offset
, 1, mode
);
1797 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1799 /* If this is a register which cannot be accessed by words, copy it
1800 to a pseudo register. */
1802 op
= copy_to_reg (op
);
1804 op
= force_reg (mode
, op
);
1807 result
= operand_subword (op
, offset
, 1, mode
);
1808 gcc_assert (result
);
1813 mem_attrs::mem_attrs ()
1819 addrspace (ADDR_SPACE_GENERIC
),
1820 offset_known_p (false),
1821 size_known_p (false)
1824 /* Returns 1 if both MEM_EXPR can be considered equal
1828 mem_expr_equal_p (const_tree expr1
, const_tree expr2
)
1833 if (! expr1
|| ! expr2
)
1836 if (TREE_CODE (expr1
) != TREE_CODE (expr2
))
1839 return operand_equal_p (expr1
, expr2
, 0);
1842 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1843 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1847 get_mem_align_offset (rtx mem
, unsigned int align
)
1852 /* This function can't use
1853 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1854 || (MAX (MEM_ALIGN (mem),
1855 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1859 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1861 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1862 for <variable>. get_inner_reference doesn't handle it and
1863 even if it did, the alignment in that case needs to be determined
1864 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1865 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1866 isn't sufficiently aligned, the object it is in might be. */
1867 gcc_assert (MEM_P (mem
));
1868 expr
= MEM_EXPR (mem
);
1869 if (expr
== NULL_TREE
|| !MEM_OFFSET_KNOWN_P (mem
))
1872 offset
= MEM_OFFSET (mem
);
1875 if (DECL_ALIGN (expr
) < align
)
1878 else if (INDIRECT_REF_P (expr
))
1880 if (TYPE_ALIGN (TREE_TYPE (expr
)) < (unsigned int) align
)
1883 else if (TREE_CODE (expr
) == COMPONENT_REF
)
1887 tree inner
= TREE_OPERAND (expr
, 0);
1888 tree field
= TREE_OPERAND (expr
, 1);
1889 tree byte_offset
= component_ref_field_offset (expr
);
1890 tree bit_offset
= DECL_FIELD_BIT_OFFSET (field
);
1892 poly_uint64 suboffset
;
1894 || !poly_int_tree_p (byte_offset
, &suboffset
)
1895 || !tree_fits_uhwi_p (bit_offset
))
1898 offset
+= suboffset
;
1899 offset
+= tree_to_uhwi (bit_offset
) / BITS_PER_UNIT
;
1901 if (inner
== NULL_TREE
)
1903 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field
))
1904 < (unsigned int) align
)
1908 else if (DECL_P (inner
))
1910 if (DECL_ALIGN (inner
) < align
)
1914 else if (TREE_CODE (inner
) != COMPONENT_REF
)
1922 HOST_WIDE_INT misalign
;
1923 if (!known_misalignment (offset
, align
/ BITS_PER_UNIT
, &misalign
))
1928 /* Given REF (a MEM) and T, either the type of X or the expression
1929 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1930 if we are making a new object of this type. BITPOS is nonzero if
1931 there is an offset outstanding on T that will be applied later. */
1934 set_mem_attributes_minus_bitpos (rtx ref
, tree t
, int objectp
,
1937 poly_int64 apply_bitpos
= 0;
1939 class mem_attrs attrs
, *defattrs
, *refattrs
;
1942 /* It can happen that type_for_mode was given a mode for which there
1943 is no language-level type. In which case it returns NULL, which
1948 type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
1949 if (type
== error_mark_node
)
1952 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1953 wrong answer, as it assumes that DECL_RTL already has the right alias
1954 info. Callers should not set DECL_RTL until after the call to
1955 set_mem_attributes. */
1956 gcc_assert (!DECL_P (t
) || ref
!= DECL_RTL_IF_SET (t
));
1958 /* Get the alias set from the expression or type (perhaps using a
1959 front-end routine) and use it. */
1960 attrs
.alias
= get_alias_set (t
);
1962 MEM_VOLATILE_P (ref
) |= TYPE_VOLATILE (type
);
1963 MEM_POINTER (ref
) = POINTER_TYPE_P (type
);
1965 /* Default values from pre-existing memory attributes if present. */
1966 refattrs
= MEM_ATTRS (ref
);
1969 /* ??? Can this ever happen? Calling this routine on a MEM that
1970 already carries memory attributes should probably be invalid. */
1971 attrs
.expr
= refattrs
->expr
;
1972 attrs
.offset_known_p
= refattrs
->offset_known_p
;
1973 attrs
.offset
= refattrs
->offset
;
1974 attrs
.size_known_p
= refattrs
->size_known_p
;
1975 attrs
.size
= refattrs
->size
;
1976 attrs
.align
= refattrs
->align
;
1979 /* Otherwise, default values from the mode of the MEM reference. */
1982 defattrs
= mode_mem_attrs
[(int) GET_MODE (ref
)];
1983 gcc_assert (!defattrs
->expr
);
1984 gcc_assert (!defattrs
->offset_known_p
);
1986 /* Respect mode size. */
1987 attrs
.size_known_p
= defattrs
->size_known_p
;
1988 attrs
.size
= defattrs
->size
;
1989 /* ??? Is this really necessary? We probably should always get
1990 the size from the type below. */
1992 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1993 if T is an object, always compute the object alignment below. */
1995 attrs
.align
= defattrs
->align
;
1997 attrs
.align
= BITS_PER_UNIT
;
1998 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1999 e.g. if the type carries an alignment attribute. Should we be
2000 able to simply always use TYPE_ALIGN? */
2003 /* We can set the alignment from the type if we are making an object or if
2004 this is an INDIRECT_REF. */
2005 if (objectp
|| TREE_CODE (t
) == INDIRECT_REF
)
2006 attrs
.align
= MAX (attrs
.align
, TYPE_ALIGN (type
));
2008 /* If the size is known, we can set that. */
2009 tree new_size
= TYPE_SIZE_UNIT (type
);
2011 /* The address-space is that of the type. */
2012 as
= TYPE_ADDR_SPACE (type
);
2014 /* If T is not a type, we may be able to deduce some more information about
2020 if (TREE_THIS_VOLATILE (t
))
2021 MEM_VOLATILE_P (ref
) = 1;
2023 /* Now remove any conversions: they don't change what the underlying
2024 object is. Likewise for SAVE_EXPR. */
2025 while (CONVERT_EXPR_P (t
)
2026 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
2027 || TREE_CODE (t
) == SAVE_EXPR
)
2028 t
= TREE_OPERAND (t
, 0);
2030 /* Note whether this expression can trap. */
2031 MEM_NOTRAP_P (ref
) = !tree_could_trap_p (t
);
2033 base
= get_base_address (t
);
2037 && TREE_READONLY (base
)
2038 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
))
2039 && !TREE_THIS_VOLATILE (base
))
2040 MEM_READONLY_P (ref
) = 1;
2042 /* Mark static const strings readonly as well. */
2043 if (TREE_CODE (base
) == STRING_CST
2044 && TREE_READONLY (base
)
2045 && TREE_STATIC (base
))
2046 MEM_READONLY_P (ref
) = 1;
2048 /* Address-space information is on the base object. */
2049 if (TREE_CODE (base
) == MEM_REF
2050 || TREE_CODE (base
) == TARGET_MEM_REF
)
2051 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base
,
2054 as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
2057 /* If this expression uses it's parent's alias set, mark it such
2058 that we won't change it. */
2059 if (component_uses_parent_alias_set_from (t
) != NULL_TREE
)
2060 MEM_KEEP_ALIAS_SET_P (ref
) = 1;
2062 /* If this is a decl, set the attributes of the MEM from it. */
2066 attrs
.offset_known_p
= true;
2068 apply_bitpos
= bitpos
;
2069 new_size
= DECL_SIZE_UNIT (t
);
2072 /* ??? If we end up with a constant or a descriptor do not
2073 record a MEM_EXPR. */
2074 else if (CONSTANT_CLASS_P (t
)
2075 || TREE_CODE (t
) == CONSTRUCTOR
)
2078 /* If this is a field reference, record it. */
2079 else if (TREE_CODE (t
) == COMPONENT_REF
)
2082 attrs
.offset_known_p
= true;
2084 apply_bitpos
= bitpos
;
2085 if (DECL_BIT_FIELD (TREE_OPERAND (t
, 1)))
2086 new_size
= DECL_SIZE_UNIT (TREE_OPERAND (t
, 1));
2089 /* Else record it. */
2092 gcc_assert (handled_component_p (t
)
2093 || TREE_CODE (t
) == MEM_REF
2094 || TREE_CODE (t
) == TARGET_MEM_REF
);
2096 attrs
.offset_known_p
= true;
2098 apply_bitpos
= bitpos
;
2101 /* If this is a reference based on a partitioned decl replace the
2102 base with a MEM_REF of the pointer representative we created
2103 during stack slot partitioning. */
2106 && ! is_global_var (base
)
2107 && cfun
->gimple_df
->decls_to_pointers
!= NULL
)
2109 tree
*namep
= cfun
->gimple_df
->decls_to_pointers
->get (base
);
2112 attrs
.expr
= unshare_expr (attrs
.expr
);
2113 tree
*orig_base
= &attrs
.expr
;
2114 while (handled_component_p (*orig_base
))
2115 orig_base
= &TREE_OPERAND (*orig_base
, 0);
2116 tree aptrt
= reference_alias_ptr_type (*orig_base
);
2117 *orig_base
= build2 (MEM_REF
, TREE_TYPE (*orig_base
), *namep
,
2118 build_int_cst (aptrt
, 0));
2122 /* Compute the alignment. */
2123 unsigned int obj_align
;
2124 unsigned HOST_WIDE_INT obj_bitpos
;
2125 get_object_alignment_1 (t
, &obj_align
, &obj_bitpos
);
2126 unsigned int diff_align
= known_alignment (obj_bitpos
- bitpos
);
2127 if (diff_align
!= 0)
2128 obj_align
= MIN (obj_align
, diff_align
);
2129 attrs
.align
= MAX (attrs
.align
, obj_align
);
2132 poly_uint64 const_size
;
2133 if (poly_int_tree_p (new_size
, &const_size
))
2135 attrs
.size_known_p
= true;
2136 attrs
.size
= const_size
;
2139 /* If we modified OFFSET based on T, then subtract the outstanding
2140 bit position offset. Similarly, increase the size of the accessed
2141 object to contain the negative offset. */
2142 if (maybe_ne (apply_bitpos
, 0))
2144 gcc_assert (attrs
.offset_known_p
);
2145 poly_int64 bytepos
= bits_to_bytes_round_down (apply_bitpos
);
2146 attrs
.offset
-= bytepos
;
2147 if (attrs
.size_known_p
)
2148 attrs
.size
+= bytepos
;
2151 /* Now set the attributes we computed above. */
2152 attrs
.addrspace
= as
;
2153 set_mem_attrs (ref
, &attrs
);
2157 set_mem_attributes (rtx ref
, tree t
, int objectp
)
2159 set_mem_attributes_minus_bitpos (ref
, t
, objectp
, 0);
2162 /* Set the alias set of MEM to SET. */
2165 set_mem_alias_set (rtx mem
, alias_set_type set
)
2167 /* If the new and old alias sets don't conflict, something is wrong. */
2168 gcc_checking_assert (alias_sets_conflict_p (set
, MEM_ALIAS_SET (mem
)));
2169 mem_attrs
attrs (*get_mem_attrs (mem
));
2171 set_mem_attrs (mem
, &attrs
);
2174 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2177 set_mem_addr_space (rtx mem
, addr_space_t addrspace
)
2179 mem_attrs
attrs (*get_mem_attrs (mem
));
2180 attrs
.addrspace
= addrspace
;
2181 set_mem_attrs (mem
, &attrs
);
2184 /* Set the alignment of MEM to ALIGN bits. */
2187 set_mem_align (rtx mem
, unsigned int align
)
2189 mem_attrs
attrs (*get_mem_attrs (mem
));
2190 attrs
.align
= align
;
2191 set_mem_attrs (mem
, &attrs
);
2194 /* Set the expr for MEM to EXPR. */
2197 set_mem_expr (rtx mem
, tree expr
)
2199 mem_attrs
attrs (*get_mem_attrs (mem
));
2201 set_mem_attrs (mem
, &attrs
);
2204 /* Set the offset of MEM to OFFSET. */
2207 set_mem_offset (rtx mem
, poly_int64 offset
)
2209 mem_attrs
attrs (*get_mem_attrs (mem
));
2210 attrs
.offset_known_p
= true;
2211 attrs
.offset
= offset
;
2212 set_mem_attrs (mem
, &attrs
);
2215 /* Clear the offset of MEM. */
2218 clear_mem_offset (rtx mem
)
2220 mem_attrs
attrs (*get_mem_attrs (mem
));
2221 attrs
.offset_known_p
= false;
2222 set_mem_attrs (mem
, &attrs
);
2225 /* Set the size of MEM to SIZE. */
2228 set_mem_size (rtx mem
, poly_int64 size
)
2230 mem_attrs
attrs (*get_mem_attrs (mem
));
2231 attrs
.size_known_p
= true;
2233 set_mem_attrs (mem
, &attrs
);
2236 /* Clear the size of MEM. */
2239 clear_mem_size (rtx mem
)
2241 mem_attrs
attrs (*get_mem_attrs (mem
));
2242 attrs
.size_known_p
= false;
2243 set_mem_attrs (mem
, &attrs
);
2246 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2247 and its address changed to ADDR. (VOIDmode means don't change the mode.
2248 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2249 returned memory location is required to be valid. INPLACE is true if any
2250 changes can be made directly to MEMREF or false if MEMREF must be treated
2253 The memory attributes are not changed. */
2256 change_address_1 (rtx memref
, machine_mode mode
, rtx addr
, int validate
,
2262 gcc_assert (MEM_P (memref
));
2263 as
= MEM_ADDR_SPACE (memref
);
2264 if (mode
== VOIDmode
)
2265 mode
= GET_MODE (memref
);
2267 addr
= XEXP (memref
, 0);
2268 if (mode
== GET_MODE (memref
) && addr
== XEXP (memref
, 0)
2269 && (!validate
|| memory_address_addr_space_p (mode
, addr
, as
)))
2272 /* Don't validate address for LRA. LRA can make the address valid
2273 by itself in most efficient way. */
2274 if (validate
&& !lra_in_progress
)
2276 if (reload_in_progress
|| reload_completed
)
2277 gcc_assert (memory_address_addr_space_p (mode
, addr
, as
));
2279 addr
= memory_address_addr_space (mode
, addr
, as
);
2282 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
2287 XEXP (memref
, 0) = addr
;
2291 new_rtx
= gen_rtx_MEM (mode
, addr
);
2292 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2296 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2297 way we are changing MEMREF, so we only preserve the alias set. */
2300 change_address (rtx memref
, machine_mode mode
, rtx addr
)
2302 rtx new_rtx
= change_address_1 (memref
, mode
, addr
, 1, false);
2303 machine_mode mmode
= GET_MODE (new_rtx
);
2304 class mem_attrs
*defattrs
;
2306 mem_attrs
attrs (*get_mem_attrs (memref
));
2307 defattrs
= mode_mem_attrs
[(int) mmode
];
2308 attrs
.expr
= NULL_TREE
;
2309 attrs
.offset_known_p
= false;
2310 attrs
.size_known_p
= defattrs
->size_known_p
;
2311 attrs
.size
= defattrs
->size
;
2312 attrs
.align
= defattrs
->align
;
2314 /* If there are no changes, just return the original memory reference. */
2315 if (new_rtx
== memref
)
2317 if (mem_attrs_eq_p (get_mem_attrs (memref
), &attrs
))
2320 new_rtx
= gen_rtx_MEM (mmode
, XEXP (memref
, 0));
2321 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2324 set_mem_attrs (new_rtx
, &attrs
);
2328 /* Return a memory reference like MEMREF, but with its mode changed
2329 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2330 nonzero, the memory address is forced to be valid.
2331 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2332 and the caller is responsible for adjusting MEMREF base register.
2333 If ADJUST_OBJECT is zero, the underlying object associated with the
2334 memory reference is left unchanged and the caller is responsible for
2335 dealing with it. Otherwise, if the new memory reference is outside
2336 the underlying object, even partially, then the object is dropped.
2337 SIZE, if nonzero, is the size of an access in cases where MODE
2338 has no inherent size. */
2341 adjust_address_1 (rtx memref
, machine_mode mode
, poly_int64 offset
,
2342 int validate
, int adjust_address
, int adjust_object
,
2345 rtx addr
= XEXP (memref
, 0);
2347 scalar_int_mode address_mode
;
2348 class mem_attrs
attrs (*get_mem_attrs (memref
)), *defattrs
;
2349 unsigned HOST_WIDE_INT max_align
;
2350 #ifdef POINTERS_EXTEND_UNSIGNED
2351 scalar_int_mode pointer_mode
2352 = targetm
.addr_space
.pointer_mode (attrs
.addrspace
);
2355 /* VOIDmode means no mode change for change_address_1. */
2356 if (mode
== VOIDmode
)
2357 mode
= GET_MODE (memref
);
2359 /* Take the size of non-BLKmode accesses from the mode. */
2360 defattrs
= mode_mem_attrs
[(int) mode
];
2361 if (defattrs
->size_known_p
)
2362 size
= defattrs
->size
;
2364 /* If there are no changes, just return the original memory reference. */
2365 if (mode
== GET_MODE (memref
)
2366 && known_eq (offset
, 0)
2367 && (known_eq (size
, 0)
2368 || (attrs
.size_known_p
&& known_eq (attrs
.size
, size
)))
2369 && (!validate
|| memory_address_addr_space_p (mode
, addr
,
2373 /* ??? Prefer to create garbage instead of creating shared rtl.
2374 This may happen even if offset is nonzero -- consider
2375 (plus (plus reg reg) const_int) -- so do this always. */
2376 addr
= copy_rtx (addr
);
2378 /* Convert a possibly large offset to a signed value within the
2379 range of the target address space. */
2380 address_mode
= get_address_mode (memref
);
2381 offset
= trunc_int_for_mode (offset
, address_mode
);
2385 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2386 object, we can merge it into the LO_SUM. */
2387 if (GET_MODE (memref
) != BLKmode
2388 && GET_CODE (addr
) == LO_SUM
2389 && known_in_range_p (offset
,
2390 0, (GET_MODE_ALIGNMENT (GET_MODE (memref
))
2392 addr
= gen_rtx_LO_SUM (address_mode
, XEXP (addr
, 0),
2393 plus_constant (address_mode
,
2394 XEXP (addr
, 1), offset
));
2395 #ifdef POINTERS_EXTEND_UNSIGNED
2396 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2397 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2398 the fact that pointers are not allowed to overflow. */
2399 else if (POINTERS_EXTEND_UNSIGNED
> 0
2400 && GET_CODE (addr
) == ZERO_EXTEND
2401 && GET_MODE (XEXP (addr
, 0)) == pointer_mode
2402 && known_eq (trunc_int_for_mode (offset
, pointer_mode
), offset
))
2403 addr
= gen_rtx_ZERO_EXTEND (address_mode
,
2404 plus_constant (pointer_mode
,
2405 XEXP (addr
, 0), offset
));
2408 addr
= plus_constant (address_mode
, addr
, offset
);
2411 new_rtx
= change_address_1 (memref
, mode
, addr
, validate
, false);
2413 /* If the address is a REG, change_address_1 rightfully returns memref,
2414 but this would destroy memref's MEM_ATTRS. */
2415 if (new_rtx
== memref
&& maybe_ne (offset
, 0))
2416 new_rtx
= copy_rtx (new_rtx
);
2418 /* Conservatively drop the object if we don't know where we start from. */
2419 if (adjust_object
&& (!attrs
.offset_known_p
|| !attrs
.size_known_p
))
2421 attrs
.expr
= NULL_TREE
;
2425 /* Compute the new values of the memory attributes due to this adjustment.
2426 We add the offsets and update the alignment. */
2427 if (attrs
.offset_known_p
)
2429 attrs
.offset
+= offset
;
2431 /* Drop the object if the new left end is not within its bounds. */
2432 if (adjust_object
&& maybe_lt (attrs
.offset
, 0))
2434 attrs
.expr
= NULL_TREE
;
2439 /* Compute the new alignment by taking the MIN of the alignment and the
2440 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2442 if (maybe_ne (offset
, 0))
2444 max_align
= known_alignment (offset
) * BITS_PER_UNIT
;
2445 attrs
.align
= MIN (attrs
.align
, max_align
);
2448 if (maybe_ne (size
, 0))
2450 /* Drop the object if the new right end is not within its bounds. */
2451 if (adjust_object
&& maybe_gt (offset
+ size
, attrs
.size
))
2453 attrs
.expr
= NULL_TREE
;
2456 attrs
.size_known_p
= true;
2459 else if (attrs
.size_known_p
)
2461 gcc_assert (!adjust_object
);
2462 attrs
.size
-= offset
;
2463 /* ??? The store_by_pieces machinery generates negative sizes,
2464 so don't assert for that here. */
2467 set_mem_attrs (new_rtx
, &attrs
);
2472 /* Return a memory reference like MEMREF, but with its mode changed
2473 to MODE and its address changed to ADDR, which is assumed to be
2474 MEMREF offset by OFFSET bytes. If VALIDATE is
2475 nonzero, the memory address is forced to be valid. */
2478 adjust_automodify_address_1 (rtx memref
, machine_mode mode
, rtx addr
,
2479 poly_int64 offset
, int validate
)
2481 memref
= change_address_1 (memref
, VOIDmode
, addr
, validate
, false);
2482 return adjust_address_1 (memref
, mode
, offset
, validate
, 0, 0, 0);
2485 /* Return a memory reference like MEMREF, but whose address is changed by
2486 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2487 known to be in OFFSET (possibly 1). */
2490 offset_address (rtx memref
, rtx offset
, unsigned HOST_WIDE_INT pow2
)
2492 rtx new_rtx
, addr
= XEXP (memref
, 0);
2493 machine_mode address_mode
;
2494 class mem_attrs
*defattrs
;
2496 mem_attrs
attrs (*get_mem_attrs (memref
));
2497 address_mode
= get_address_mode (memref
);
2498 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2500 /* At this point we don't know _why_ the address is invalid. It
2501 could have secondary memory references, multiplies or anything.
2503 However, if we did go and rearrange things, we can wind up not
2504 being able to recognize the magic around pic_offset_table_rtx.
2505 This stuff is fragile, and is yet another example of why it is
2506 bad to expose PIC machinery too early. */
2507 if (! memory_address_addr_space_p (GET_MODE (memref
), new_rtx
,
2509 && GET_CODE (addr
) == PLUS
2510 && XEXP (addr
, 0) == pic_offset_table_rtx
)
2512 addr
= force_reg (GET_MODE (addr
), addr
);
2513 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2516 update_temp_slot_address (XEXP (memref
, 0), new_rtx
);
2517 new_rtx
= change_address_1 (memref
, VOIDmode
, new_rtx
, 1, false);
2519 /* If there are no changes, just return the original memory reference. */
2520 if (new_rtx
== memref
)
2523 /* Update the alignment to reflect the offset. Reset the offset, which
2525 defattrs
= mode_mem_attrs
[(int) GET_MODE (new_rtx
)];
2526 attrs
.offset_known_p
= false;
2527 attrs
.size_known_p
= defattrs
->size_known_p
;
2528 attrs
.size
= defattrs
->size
;
2529 attrs
.align
= MIN (attrs
.align
, pow2
* BITS_PER_UNIT
);
2530 set_mem_attrs (new_rtx
, &attrs
);
2534 /* Return a memory reference like MEMREF, but with its address changed to
2535 ADDR. The caller is asserting that the actual piece of memory pointed
2536 to is the same, just the form of the address is being changed, such as
2537 by putting something into a register. INPLACE is true if any changes
2538 can be made directly to MEMREF or false if MEMREF must be treated as
2542 replace_equiv_address (rtx memref
, rtx addr
, bool inplace
)
2544 /* change_address_1 copies the memory attribute structure without change
2545 and that's exactly what we want here. */
2546 update_temp_slot_address (XEXP (memref
, 0), addr
);
2547 return change_address_1 (memref
, VOIDmode
, addr
, 1, inplace
);
2550 /* Likewise, but the reference is not required to be valid. */
2553 replace_equiv_address_nv (rtx memref
, rtx addr
, bool inplace
)
2555 return change_address_1 (memref
, VOIDmode
, addr
, 0, inplace
);
2558 /* Return a memory reference like MEMREF, but with its mode widened to
2559 MODE and offset by OFFSET. This would be used by targets that e.g.
2560 cannot issue QImode memory operations and have to use SImode memory
2561 operations plus masking logic. */
2564 widen_memory_access (rtx memref
, machine_mode mode
, poly_int64 offset
)
2566 rtx new_rtx
= adjust_address_1 (memref
, mode
, offset
, 1, 1, 0, 0);
2567 poly_uint64 size
= GET_MODE_SIZE (mode
);
2569 /* If there are no changes, just return the original memory reference. */
2570 if (new_rtx
== memref
)
2573 mem_attrs
attrs (*get_mem_attrs (new_rtx
));
2575 /* If we don't know what offset we were at within the expression, then
2576 we can't know if we've overstepped the bounds. */
2577 if (! attrs
.offset_known_p
)
2578 attrs
.expr
= NULL_TREE
;
2582 if (TREE_CODE (attrs
.expr
) == COMPONENT_REF
)
2584 tree field
= TREE_OPERAND (attrs
.expr
, 1);
2585 tree offset
= component_ref_field_offset (attrs
.expr
);
2587 if (! DECL_SIZE_UNIT (field
))
2589 attrs
.expr
= NULL_TREE
;
2593 /* Is the field at least as large as the access? If so, ok,
2594 otherwise strip back to the containing structure. */
2595 if (poly_int_tree_p (DECL_SIZE_UNIT (field
))
2596 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (field
)), size
)
2597 && known_ge (attrs
.offset
, 0))
2600 poly_uint64 suboffset
;
2601 if (!poly_int_tree_p (offset
, &suboffset
))
2603 attrs
.expr
= NULL_TREE
;
2607 attrs
.expr
= TREE_OPERAND (attrs
.expr
, 0);
2608 attrs
.offset
+= suboffset
;
2609 attrs
.offset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
2612 /* Similarly for the decl. */
2613 else if (DECL_P (attrs
.expr
)
2614 && DECL_SIZE_UNIT (attrs
.expr
)
2615 && poly_int_tree_p (DECL_SIZE_UNIT (attrs
.expr
))
2616 && known_ge (wi::to_poly_offset (DECL_SIZE_UNIT (attrs
.expr
)),
2618 && known_ge (attrs
.offset
, 0))
2622 /* The widened memory access overflows the expression, which means
2623 that it could alias another expression. Zap it. */
2624 attrs
.expr
= NULL_TREE
;
2630 attrs
.offset_known_p
= false;
2632 /* The widened memory may alias other stuff, so zap the alias set. */
2633 /* ??? Maybe use get_alias_set on any remaining expression. */
2635 attrs
.size_known_p
= true;
2637 set_mem_attrs (new_rtx
, &attrs
);
2641 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2642 static GTY(()) tree spill_slot_decl
;
2645 get_spill_slot_decl (bool force_build_p
)
2647 tree d
= spill_slot_decl
;
2650 if (d
|| !force_build_p
)
2653 d
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
2654 VAR_DECL
, get_identifier ("%sfp"), void_type_node
);
2655 DECL_ARTIFICIAL (d
) = 1;
2656 DECL_IGNORED_P (d
) = 1;
2658 spill_slot_decl
= d
;
2660 rd
= gen_rtx_MEM (BLKmode
, frame_pointer_rtx
);
2661 MEM_NOTRAP_P (rd
) = 1;
2662 mem_attrs
attrs (*mode_mem_attrs
[(int) BLKmode
]);
2663 attrs
.alias
= new_alias_set ();
2665 set_mem_attrs (rd
, &attrs
);
2666 SET_DECL_RTL (d
, rd
);
2671 /* Given MEM, a result from assign_stack_local, fill in the memory
2672 attributes as appropriate for a register allocator spill slot.
2673 These slots are not aliasable by other memory. We arrange for
2674 them all to use a single MEM_EXPR, so that the aliasing code can
2675 work properly in the case of shared spill slots. */
2678 set_mem_attrs_for_spill (rtx mem
)
2682 mem_attrs
attrs (*get_mem_attrs (mem
));
2683 attrs
.expr
= get_spill_slot_decl (true);
2684 attrs
.alias
= MEM_ALIAS_SET (DECL_RTL (attrs
.expr
));
2685 attrs
.addrspace
= ADDR_SPACE_GENERIC
;
2687 /* We expect the incoming memory to be of the form:
2688 (mem:MODE (plus (reg sfp) (const_int offset)))
2689 with perhaps the plus missing for offset = 0. */
2690 addr
= XEXP (mem
, 0);
2691 attrs
.offset_known_p
= true;
2692 strip_offset (addr
, &attrs
.offset
);
2694 set_mem_attrs (mem
, &attrs
);
2695 MEM_NOTRAP_P (mem
) = 1;
2698 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2701 gen_label_rtx (void)
2703 return as_a
<rtx_code_label
*> (
2704 gen_rtx_CODE_LABEL (VOIDmode
, NULL_RTX
, NULL_RTX
,
2705 NULL
, label_num
++, NULL
));
2708 /* For procedure integration. */
2710 /* Install new pointers to the first and last insns in the chain.
2711 Also, set cur_insn_uid to one higher than the last in use.
2712 Used for an inline-procedure after copying the insn chain. */
2715 set_new_first_and_last_insn (rtx_insn
*first
, rtx_insn
*last
)
2719 set_first_insn (first
);
2720 set_last_insn (last
);
2723 if (param_min_nondebug_insn_uid
|| MAY_HAVE_DEBUG_INSNS
)
2725 int debug_count
= 0;
2727 cur_insn_uid
= param_min_nondebug_insn_uid
- 1;
2728 cur_debug_insn_uid
= 0;
2730 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2731 if (INSN_UID (insn
) < param_min_nondebug_insn_uid
)
2732 cur_debug_insn_uid
= MAX (cur_debug_insn_uid
, INSN_UID (insn
));
2735 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2736 if (DEBUG_INSN_P (insn
))
2741 cur_debug_insn_uid
= param_min_nondebug_insn_uid
+ debug_count
;
2743 cur_debug_insn_uid
++;
2746 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2747 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2752 /* Go through all the RTL insn bodies and copy any invalid shared
2753 structure. This routine should only be called once. */
2756 unshare_all_rtl_1 (rtx_insn
*insn
)
2758 /* Unshare just about everything else. */
2759 unshare_all_rtl_in_chain (insn
);
2761 /* Make sure the addresses of stack slots found outside the insn chain
2762 (such as, in DECL_RTL of a variable) are not shared
2763 with the insn chain.
2765 This special care is necessary when the stack slot MEM does not
2766 actually appear in the insn chain. If it does appear, its address
2767 is unshared from all else at that point. */
2770 FOR_EACH_VEC_SAFE_ELT (stack_slot_list
, i
, temp
)
2771 (*stack_slot_list
)[i
] = copy_rtx_if_shared (temp
);
2774 /* Go through all the RTL insn bodies and copy any invalid shared
2775 structure, again. This is a fairly expensive thing to do so it
2776 should be done sparingly. */
2779 unshare_all_rtl_again (rtx_insn
*insn
)
2784 for (p
= insn
; p
; p
= NEXT_INSN (p
))
2787 reset_used_flags (PATTERN (p
));
2788 reset_used_flags (REG_NOTES (p
));
2790 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p
));
2793 /* Make sure that virtual stack slots are not shared. */
2794 set_used_decls (DECL_INITIAL (cfun
->decl
));
2796 /* Make sure that virtual parameters are not shared. */
2797 for (decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2798 set_used_flags (DECL_RTL (decl
));
2802 FOR_EACH_VEC_SAFE_ELT (stack_slot_list
, i
, temp
)
2803 reset_used_flags (temp
);
2805 unshare_all_rtl_1 (insn
);
2809 unshare_all_rtl (void)
2811 unshare_all_rtl_1 (get_insns ());
2813 for (tree decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2815 if (DECL_RTL_SET_P (decl
))
2816 SET_DECL_RTL (decl
, copy_rtx_if_shared (DECL_RTL (decl
)));
2817 DECL_INCOMING_RTL (decl
) = copy_rtx_if_shared (DECL_INCOMING_RTL (decl
));
2824 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2825 Recursively does the same for subexpressions. */
2828 verify_rtx_sharing (rtx orig
, rtx insn
)
2833 const char *format_ptr
;
2838 code
= GET_CODE (x
);
2840 /* These types may be freely shared. */
2855 /* SCRATCH must be shared because they represent distinct values. */
2858 /* Share clobbers of hard registers, but do not share pseudo reg
2859 clobbers or clobbers of hard registers that originated as pseudos.
2860 This is needed to allow safe register renaming. */
2861 if (REG_P (XEXP (x
, 0))
2862 && HARD_REGISTER_NUM_P (REGNO (XEXP (x
, 0)))
2863 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x
, 0))))
2868 if (shared_const_p (orig
))
2873 /* A MEM is allowed to be shared if its address is constant. */
2874 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
2875 || reload_completed
|| reload_in_progress
)
2884 /* This rtx may not be shared. If it has already been seen,
2885 replace it with a copy of itself. */
2886 if (flag_checking
&& RTX_FLAG (x
, used
))
2888 error ("invalid rtl sharing found in the insn");
2890 error ("shared rtx");
2892 internal_error ("internal consistency failure");
2894 gcc_assert (!RTX_FLAG (x
, used
));
2896 RTX_FLAG (x
, used
) = 1;
2898 /* Now scan the subexpressions recursively. */
2900 format_ptr
= GET_RTX_FORMAT (code
);
2902 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2904 switch (*format_ptr
++)
2907 verify_rtx_sharing (XEXP (x
, i
), insn
);
2911 if (XVEC (x
, i
) != NULL
)
2914 int len
= XVECLEN (x
, i
);
2916 for (j
= 0; j
< len
; j
++)
2918 /* We allow sharing of ASM_OPERANDS inside single
2920 if (j
&& GET_CODE (XVECEXP (x
, i
, j
)) == SET
2921 && (GET_CODE (SET_SRC (XVECEXP (x
, i
, j
)))
2923 verify_rtx_sharing (SET_DEST (XVECEXP (x
, i
, j
)), insn
);
2925 verify_rtx_sharing (XVECEXP (x
, i
, j
), insn
);
2934 /* Reset used-flags for INSN. */
2937 reset_insn_used_flags (rtx insn
)
2939 gcc_assert (INSN_P (insn
));
2940 reset_used_flags (PATTERN (insn
));
2941 reset_used_flags (REG_NOTES (insn
));
2943 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2946 /* Go through all the RTL insn bodies and clear all the USED bits. */
2949 reset_all_used_flags (void)
2953 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2956 rtx pat
= PATTERN (p
);
2957 if (GET_CODE (pat
) != SEQUENCE
)
2958 reset_insn_used_flags (p
);
2961 gcc_assert (REG_NOTES (p
) == NULL
);
2962 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2964 rtx insn
= XVECEXP (pat
, 0, i
);
2966 reset_insn_used_flags (insn
);
2972 /* Verify sharing in INSN. */
2975 verify_insn_sharing (rtx insn
)
2977 gcc_assert (INSN_P (insn
));
2978 verify_rtx_sharing (PATTERN (insn
), insn
);
2979 verify_rtx_sharing (REG_NOTES (insn
), insn
);
2981 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (insn
), insn
);
2984 /* Go through all the RTL insn bodies and check that there is no unexpected
2985 sharing in between the subexpressions. */
2988 verify_rtl_sharing (void)
2992 timevar_push (TV_VERIFY_RTL_SHARING
);
2994 reset_all_used_flags ();
2996 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2999 rtx pat
= PATTERN (p
);
3000 if (GET_CODE (pat
) != SEQUENCE
)
3001 verify_insn_sharing (p
);
3003 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
3005 rtx insn
= XVECEXP (pat
, 0, i
);
3007 verify_insn_sharing (insn
);
3011 reset_all_used_flags ();
3013 timevar_pop (TV_VERIFY_RTL_SHARING
);
3016 /* Go through all the RTL insn bodies and copy any invalid shared structure.
3017 Assumes the mark bits are cleared at entry. */
3020 unshare_all_rtl_in_chain (rtx_insn
*insn
)
3022 for (; insn
; insn
= NEXT_INSN (insn
))
3025 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
3026 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
3028 CALL_INSN_FUNCTION_USAGE (insn
)
3029 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn
));
3033 /* Go through all virtual stack slots of a function and mark them as
3034 shared. We never replace the DECL_RTLs themselves with a copy,
3035 but expressions mentioned into a DECL_RTL cannot be shared with
3036 expressions in the instruction stream.
3038 Note that reload may convert pseudo registers into memories in-place.
3039 Pseudo registers are always shared, but MEMs never are. Thus if we
3040 reset the used flags on MEMs in the instruction stream, we must set
3041 them again on MEMs that appear in DECL_RTLs. */
3044 set_used_decls (tree blk
)
3049 for (t
= BLOCK_VARS (blk
); t
; t
= DECL_CHAIN (t
))
3050 if (DECL_RTL_SET_P (t
))
3051 set_used_flags (DECL_RTL (t
));
3053 /* Now process sub-blocks. */
3054 for (t
= BLOCK_SUBBLOCKS (blk
); t
; t
= BLOCK_CHAIN (t
))
3058 /* Mark ORIG as in use, and return a copy of it if it was already in use.
3059 Recursively does the same for subexpressions. Uses
3060 copy_rtx_if_shared_1 to reduce stack space. */
3063 copy_rtx_if_shared (rtx orig
)
3065 copy_rtx_if_shared_1 (&orig
);
3069 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
3070 use. Recursively does the same for subexpressions. */
3073 copy_rtx_if_shared_1 (rtx
*orig1
)
3079 const char *format_ptr
;
3083 /* Repeat is used to turn tail-recursion into iteration. */
3090 code
= GET_CODE (x
);
3092 /* These types may be freely shared. */
3107 /* SCRATCH must be shared because they represent distinct values. */
3110 /* Share clobbers of hard registers, but do not share pseudo reg
3111 clobbers or clobbers of hard registers that originated as pseudos.
3112 This is needed to allow safe register renaming. */
3113 if (REG_P (XEXP (x
, 0))
3114 && HARD_REGISTER_NUM_P (REGNO (XEXP (x
, 0)))
3115 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (x
, 0))))
3120 if (shared_const_p (x
))
3130 /* The chain of insns is not being copied. */
3137 /* This rtx may not be shared. If it has already been seen,
3138 replace it with a copy of itself. */
3140 if (RTX_FLAG (x
, used
))
3142 x
= shallow_copy_rtx (x
);
3145 RTX_FLAG (x
, used
) = 1;
3147 /* Now scan the subexpressions recursively.
3148 We can store any replaced subexpressions directly into X
3149 since we know X is not shared! Any vectors in X
3150 must be copied if X was copied. */
3152 format_ptr
= GET_RTX_FORMAT (code
);
3153 length
= GET_RTX_LENGTH (code
);
3156 for (i
= 0; i
< length
; i
++)
3158 switch (*format_ptr
++)
3162 copy_rtx_if_shared_1 (last_ptr
);
3163 last_ptr
= &XEXP (x
, i
);
3167 if (XVEC (x
, i
) != NULL
)
3170 int len
= XVECLEN (x
, i
);
3172 /* Copy the vector iff I copied the rtx and the length
3174 if (copied
&& len
> 0)
3175 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
3177 /* Call recursively on all inside the vector. */
3178 for (j
= 0; j
< len
; j
++)
3181 copy_rtx_if_shared_1 (last_ptr
);
3182 last_ptr
= &XVECEXP (x
, i
, j
);
3197 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3200 mark_used_flags (rtx x
, int flag
)
3204 const char *format_ptr
;
3207 /* Repeat is used to turn tail-recursion into iteration. */
3212 code
= GET_CODE (x
);
3214 /* These types may be freely shared so we needn't do any resetting
3237 /* The chain of insns is not being copied. */
3244 RTX_FLAG (x
, used
) = flag
;
3246 format_ptr
= GET_RTX_FORMAT (code
);
3247 length
= GET_RTX_LENGTH (code
);
3249 for (i
= 0; i
< length
; i
++)
3251 switch (*format_ptr
++)
3259 mark_used_flags (XEXP (x
, i
), flag
);
3263 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3264 mark_used_flags (XVECEXP (x
, i
, j
), flag
);
3270 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3271 to look for shared sub-parts. */
3274 reset_used_flags (rtx x
)
3276 mark_used_flags (x
, 0);
3279 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3280 to look for shared sub-parts. */
3283 set_used_flags (rtx x
)
3285 mark_used_flags (x
, 1);
3288 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3289 Return X or the rtx for the pseudo reg the value of X was copied into.
3290 OTHER must be valid as a SET_DEST. */
3293 make_safe_from (rtx x
, rtx other
)
3296 switch (GET_CODE (other
))
3299 other
= SUBREG_REG (other
);
3301 case STRICT_LOW_PART
:
3304 other
= XEXP (other
, 0);
3313 && GET_CODE (x
) != SUBREG
)
3315 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
3316 || reg_mentioned_p (other
, x
))))
3318 rtx temp
= gen_reg_rtx (GET_MODE (x
));
3319 emit_move_insn (temp
, x
);
3325 /* Emission of insns (adding them to the doubly-linked list). */
3327 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3330 get_last_insn_anywhere (void)
3332 struct sequence_stack
*seq
;
3333 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
3339 /* Return the first nonnote insn emitted in current sequence or current
3340 function. This routine looks inside SEQUENCEs. */
3343 get_first_nonnote_insn (void)
3345 rtx_insn
*insn
= get_insns ();
3350 for (insn
= next_insn (insn
);
3351 insn
&& NOTE_P (insn
);
3352 insn
= next_insn (insn
))
3356 if (NONJUMP_INSN_P (insn
)
3357 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3358 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3365 /* Return the last nonnote insn emitted in current sequence or current
3366 function. This routine looks inside SEQUENCEs. */
3369 get_last_nonnote_insn (void)
3371 rtx_insn
*insn
= get_last_insn ();
3376 for (insn
= previous_insn (insn
);
3377 insn
&& NOTE_P (insn
);
3378 insn
= previous_insn (insn
))
3382 if (NONJUMP_INSN_P (insn
))
3383 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3384 insn
= seq
->insn (seq
->len () - 1);
3391 /* Return the number of actual (non-debug) insns emitted in this
3395 get_max_insn_count (void)
3397 int n
= cur_insn_uid
;
3399 /* The table size must be stable across -g, to avoid codegen
3400 differences due to debug insns, and not be affected by
3401 -fmin-insn-uid, to avoid excessive table size and to simplify
3402 debugging of -fcompare-debug failures. */
3403 if (cur_debug_insn_uid
> param_min_nondebug_insn_uid
)
3404 n
-= cur_debug_insn_uid
;
3406 n
-= param_min_nondebug_insn_uid
;
3412 /* Return the next insn. If it is a SEQUENCE, return the first insn
3416 next_insn (rtx_insn
*insn
)
3420 insn
= NEXT_INSN (insn
);
3421 if (insn
&& NONJUMP_INSN_P (insn
)
3422 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3423 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3429 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3433 previous_insn (rtx_insn
*insn
)
3437 insn
= PREV_INSN (insn
);
3438 if (insn
&& NONJUMP_INSN_P (insn
))
3439 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3440 insn
= seq
->insn (seq
->len () - 1);
3446 /* Return the next insn after INSN that is not a NOTE. This routine does not
3447 look inside SEQUENCEs. */
3450 next_nonnote_insn (rtx_insn
*insn
)
3454 insn
= NEXT_INSN (insn
);
3455 if (insn
== 0 || !NOTE_P (insn
))
3462 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3463 routine does not look inside SEQUENCEs. */
3466 next_nondebug_insn (rtx_insn
*insn
)
3470 insn
= NEXT_INSN (insn
);
3471 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3478 /* Return the previous insn before INSN that is not a NOTE. This routine does
3479 not look inside SEQUENCEs. */
3482 prev_nonnote_insn (rtx_insn
*insn
)
3486 insn
= PREV_INSN (insn
);
3487 if (insn
== 0 || !NOTE_P (insn
))
3494 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3495 This routine does not look inside SEQUENCEs. */
3498 prev_nondebug_insn (rtx_insn
*insn
)
3502 insn
= PREV_INSN (insn
);
3503 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3510 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3511 This routine does not look inside SEQUENCEs. */
3514 next_nonnote_nondebug_insn (rtx_insn
*insn
)
3518 insn
= NEXT_INSN (insn
);
3519 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3526 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN,
3527 but stop the search before we enter another basic block. This
3528 routine does not look inside SEQUENCEs. */
3531 next_nonnote_nondebug_insn_bb (rtx_insn
*insn
)
3535 insn
= NEXT_INSN (insn
);
3538 if (DEBUG_INSN_P (insn
))
3542 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3549 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3550 This routine does not look inside SEQUENCEs. */
3553 prev_nonnote_nondebug_insn (rtx_insn
*insn
)
3557 insn
= PREV_INSN (insn
);
3558 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3565 /* Return the previous insn before INSN that is not a NOTE nor
3566 DEBUG_INSN, but stop the search before we enter another basic
3567 block. This routine does not look inside SEQUENCEs. */
3570 prev_nonnote_nondebug_insn_bb (rtx_insn
*insn
)
3574 insn
= PREV_INSN (insn
);
3577 if (DEBUG_INSN_P (insn
))
3581 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3588 /* Return the next INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN after INSN;
3589 or 0, if there is none. This routine does not look inside
3593 next_real_insn (rtx_insn
*insn
)
3597 insn
= NEXT_INSN (insn
);
3598 if (insn
== 0 || INSN_P (insn
))
3605 /* Return the last INSN, CALL_INSN, JUMP_INSN or DEBUG_INSN before INSN;
3606 or 0, if there is none. This routine does not look inside
3610 prev_real_insn (rtx_insn
*insn
)
3614 insn
= PREV_INSN (insn
);
3615 if (insn
== 0 || INSN_P (insn
))
3622 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3623 or 0, if there is none. This routine does not look inside
3627 next_real_nondebug_insn (rtx uncast_insn
)
3629 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3633 insn
= NEXT_INSN (insn
);
3634 if (insn
== 0 || NONDEBUG_INSN_P (insn
))
3641 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3642 or 0, if there is none. This routine does not look inside
3646 prev_real_nondebug_insn (rtx_insn
*insn
)
3650 insn
= PREV_INSN (insn
);
3651 if (insn
== 0 || NONDEBUG_INSN_P (insn
))
3658 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3659 This routine does not look inside SEQUENCEs. */
3662 last_call_insn (void)
3666 for (insn
= get_last_insn ();
3667 insn
&& !CALL_P (insn
);
3668 insn
= PREV_INSN (insn
))
3671 return safe_as_a
<rtx_call_insn
*> (insn
);
3674 /* Find the next insn after INSN that really does something. This routine
3675 does not look inside SEQUENCEs. After reload this also skips over
3676 standalone USE and CLOBBER insn. */
3679 active_insn_p (const rtx_insn
*insn
)
3681 return (CALL_P (insn
) || JUMP_P (insn
)
3682 || JUMP_TABLE_DATA_P (insn
) /* FIXME */
3683 || (NONJUMP_INSN_P (insn
)
3684 && (! reload_completed
3685 || (GET_CODE (PATTERN (insn
)) != USE
3686 && GET_CODE (PATTERN (insn
)) != CLOBBER
))));
3690 next_active_insn (rtx_insn
*insn
)
3694 insn
= NEXT_INSN (insn
);
3695 if (insn
== 0 || active_insn_p (insn
))
3702 /* Find the last insn before INSN that really does something. This routine
3703 does not look inside SEQUENCEs. After reload this also skips over
3704 standalone USE and CLOBBER insn. */
3707 prev_active_insn (rtx_insn
*insn
)
3711 insn
= PREV_INSN (insn
);
3712 if (insn
== 0 || active_insn_p (insn
))
3719 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3722 find_auto_inc (const_rtx x
, const_rtx reg
)
3724 subrtx_iterator::array_type array
;
3725 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
3727 const_rtx x
= *iter
;
3728 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
3729 && rtx_equal_p (reg
, XEXP (x
, 0)))
3735 /* Increment the label uses for all labels present in rtx. */
3738 mark_label_nuses (rtx x
)
3744 code
= GET_CODE (x
);
3745 if (code
== LABEL_REF
&& LABEL_P (label_ref_label (x
)))
3746 LABEL_NUSES (label_ref_label (x
))++;
3748 fmt
= GET_RTX_FORMAT (code
);
3749 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3752 mark_label_nuses (XEXP (x
, i
));
3753 else if (fmt
[i
] == 'E')
3754 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3755 mark_label_nuses (XVECEXP (x
, i
, j
));
3760 /* Try splitting insns that can be split for better scheduling.
3761 PAT is the pattern which might split.
3762 TRIAL is the insn providing PAT.
3763 LAST is nonzero if we should return the last insn of the sequence produced.
3765 If this routine succeeds in splitting, it returns the first or last
3766 replacement insn depending on the value of LAST. Otherwise, it
3767 returns TRIAL. If the insn to be returned can be split, it will be. */
3770 try_split (rtx pat
, rtx_insn
*trial
, int last
)
3772 rtx_insn
*before
, *after
;
3774 rtx_insn
*seq
, *tem
;
3775 profile_probability probability
;
3776 rtx_insn
*insn_last
, *insn
;
3778 rtx_insn
*call_insn
= NULL
;
3780 if (any_condjump_p (trial
)
3781 && (note
= find_reg_note (trial
, REG_BR_PROB
, 0)))
3782 split_branch_probability
3783 = profile_probability::from_reg_br_prob_note (XINT (note
, 0));
3785 split_branch_probability
= profile_probability::uninitialized ();
3787 probability
= split_branch_probability
;
3789 seq
= split_insns (pat
, trial
);
3791 split_branch_probability
= profile_probability::uninitialized ();
3796 int split_insn_count
= 0;
3797 /* Avoid infinite loop if any insn of the result matches
3798 the original pattern. */
3802 if (INSN_P (insn_last
)
3803 && rtx_equal_p (PATTERN (insn_last
), pat
))
3806 if (!NEXT_INSN (insn_last
))
3808 insn_last
= NEXT_INSN (insn_last
);
3811 /* We're not good at redistributing frame information if
3812 the split occurs before reload or if it results in more
3814 if (RTX_FRAME_RELATED_P (trial
))
3816 if (!reload_completed
|| split_insn_count
!= 1)
3819 rtx_insn
*new_insn
= seq
;
3820 rtx_insn
*old_insn
= trial
;
3821 copy_frame_info_to_split_insn (old_insn
, new_insn
);
3824 /* We will be adding the new sequence to the function. The splitters
3825 may have introduced invalid RTL sharing, so unshare the sequence now. */
3826 unshare_all_rtl_in_chain (seq
);
3828 /* Mark labels and copy flags. */
3829 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3834 CROSSING_JUMP_P (insn
) = CROSSING_JUMP_P (trial
);
3835 mark_jump_label (PATTERN (insn
), insn
, 0);
3837 if (probability
.initialized_p ()
3838 && any_condjump_p (insn
)
3839 && !find_reg_note (insn
, REG_BR_PROB
, 0))
3841 /* We can preserve the REG_BR_PROB notes only if exactly
3842 one jump is created, otherwise the machine description
3843 is responsible for this step using
3844 split_branch_probability variable. */
3845 gcc_assert (njumps
== 1);
3846 add_reg_br_prob_note (insn
, probability
);
3851 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3852 in SEQ and copy any additional information across. */
3855 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3858 gcc_assert (call_insn
== NULL_RTX
);
3861 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3862 target may have explicitly specified. */
3863 rtx
*p
= &CALL_INSN_FUNCTION_USAGE (insn
);
3866 *p
= CALL_INSN_FUNCTION_USAGE (trial
);
3868 /* If the old call was a sibling call, the new one must
3870 SIBLING_CALL_P (insn
) = SIBLING_CALL_P (trial
);
3874 /* Copy notes, particularly those related to the CFG. */
3875 for (note
= REG_NOTES (trial
); note
; note
= XEXP (note
, 1))
3877 switch (REG_NOTE_KIND (note
))
3880 copy_reg_eh_region_note_backward (note
, insn_last
, NULL
);
3886 case REG_CALL_NOCF_CHECK
:
3887 case REG_CALL_ARG_LOCATION
:
3888 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3891 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3895 case REG_NON_LOCAL_GOTO
:
3896 case REG_LABEL_TARGET
:
3897 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3900 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3908 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3910 rtx reg
= XEXP (note
, 0);
3911 if (!FIND_REG_INC_NOTE (insn
, reg
)
3912 && find_auto_inc (PATTERN (insn
), reg
))
3913 add_reg_note (insn
, REG_INC
, reg
);
3918 fixup_args_size_notes (NULL
, insn_last
, get_args_size (note
));
3922 case REG_UNTYPED_CALL
:
3923 gcc_assert (call_insn
!= NULL_RTX
);
3924 add_reg_note (call_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3932 /* If there are LABELS inside the split insns increment the
3933 usage count so we don't delete the label. */
3937 while (insn
!= NULL_RTX
)
3939 /* JUMP_P insns have already been "marked" above. */
3940 if (NONJUMP_INSN_P (insn
))
3941 mark_label_nuses (PATTERN (insn
));
3943 insn
= PREV_INSN (insn
);
3947 before
= PREV_INSN (trial
);
3948 after
= NEXT_INSN (trial
);
3950 emit_insn_after_setloc (seq
, trial
, INSN_LOCATION (trial
));
3952 delete_insn (trial
);
3954 /* Recursively call try_split for each new insn created; by the
3955 time control returns here that insn will be fully split, so
3956 set LAST and continue from the insn after the one returned.
3957 We can't use next_active_insn here since AFTER may be a note.
3958 Ignore deleted insns, which can be occur if not optimizing. */
3959 for (tem
= NEXT_INSN (before
); tem
!= after
; tem
= NEXT_INSN (tem
))
3960 if (! tem
->deleted () && INSN_P (tem
))
3961 tem
= try_split (PATTERN (tem
), tem
, 1);
3963 /* Return either the first or the last insn, depending on which was
3966 ? (after
? PREV_INSN (after
) : get_last_insn ())
3967 : NEXT_INSN (before
);
3970 /* Make and return an INSN rtx, initializing all its slots.
3971 Store PATTERN in the pattern slots. */
3974 make_insn_raw (rtx pattern
)
3978 insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
3980 INSN_UID (insn
) = cur_insn_uid
++;
3981 PATTERN (insn
) = pattern
;
3982 INSN_CODE (insn
) = -1;
3983 REG_NOTES (insn
) = NULL
;
3984 INSN_LOCATION (insn
) = curr_insn_location ();
3985 BLOCK_FOR_INSN (insn
) = NULL
;
3987 #ifdef ENABLE_RTL_CHECKING
3990 && (returnjump_p (insn
)
3991 || (GET_CODE (insn
) == SET
3992 && SET_DEST (insn
) == pc_rtx
)))
3994 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
4002 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
4005 make_debug_insn_raw (rtx pattern
)
4007 rtx_debug_insn
*insn
;
4009 insn
= as_a
<rtx_debug_insn
*> (rtx_alloc (DEBUG_INSN
));
4010 INSN_UID (insn
) = cur_debug_insn_uid
++;
4011 if (cur_debug_insn_uid
> param_min_nondebug_insn_uid
)
4012 INSN_UID (insn
) = cur_insn_uid
++;
4014 PATTERN (insn
) = pattern
;
4015 INSN_CODE (insn
) = -1;
4016 REG_NOTES (insn
) = NULL
;
4017 INSN_LOCATION (insn
) = curr_insn_location ();
4018 BLOCK_FOR_INSN (insn
) = NULL
;
4023 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
4026 make_jump_insn_raw (rtx pattern
)
4028 rtx_jump_insn
*insn
;
4030 insn
= as_a
<rtx_jump_insn
*> (rtx_alloc (JUMP_INSN
));
4031 INSN_UID (insn
) = cur_insn_uid
++;
4033 PATTERN (insn
) = pattern
;
4034 INSN_CODE (insn
) = -1;
4035 REG_NOTES (insn
) = NULL
;
4036 JUMP_LABEL (insn
) = NULL
;
4037 INSN_LOCATION (insn
) = curr_insn_location ();
4038 BLOCK_FOR_INSN (insn
) = NULL
;
4043 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
4046 make_call_insn_raw (rtx pattern
)
4048 rtx_call_insn
*insn
;
4050 insn
= as_a
<rtx_call_insn
*> (rtx_alloc (CALL_INSN
));
4051 INSN_UID (insn
) = cur_insn_uid
++;
4053 PATTERN (insn
) = pattern
;
4054 INSN_CODE (insn
) = -1;
4055 REG_NOTES (insn
) = NULL
;
4056 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
4057 INSN_LOCATION (insn
) = curr_insn_location ();
4058 BLOCK_FOR_INSN (insn
) = NULL
;
4063 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
4066 make_note_raw (enum insn_note subtype
)
4068 /* Some notes are never created this way at all. These notes are
4069 only created by patching out insns. */
4070 gcc_assert (subtype
!= NOTE_INSN_DELETED_LABEL
4071 && subtype
!= NOTE_INSN_DELETED_DEBUG_LABEL
);
4073 rtx_note
*note
= as_a
<rtx_note
*> (rtx_alloc (NOTE
));
4074 INSN_UID (note
) = cur_insn_uid
++;
4075 NOTE_KIND (note
) = subtype
;
4076 BLOCK_FOR_INSN (note
) = NULL
;
4077 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
4081 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
4082 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
4083 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
4086 link_insn_into_chain (rtx_insn
*insn
, rtx_insn
*prev
, rtx_insn
*next
)
4088 SET_PREV_INSN (insn
) = prev
;
4089 SET_NEXT_INSN (insn
) = next
;
4092 SET_NEXT_INSN (prev
) = insn
;
4093 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4095 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4096 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = insn
;
4101 SET_PREV_INSN (next
) = insn
;
4102 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4104 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4105 SET_PREV_INSN (sequence
->insn (0)) = insn
;
4109 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
4111 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (insn
));
4112 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4113 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4117 /* Add INSN to the end of the doubly-linked list.
4118 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
4121 add_insn (rtx_insn
*insn
)
4123 rtx_insn
*prev
= get_last_insn ();
4124 link_insn_into_chain (insn
, prev
, NULL
);
4125 if (get_insns () == NULL
)
4126 set_first_insn (insn
);
4127 set_last_insn (insn
);
4130 /* Add INSN into the doubly-linked list after insn AFTER. */
4133 add_insn_after_nobb (rtx_insn
*insn
, rtx_insn
*after
)
4135 rtx_insn
*next
= NEXT_INSN (after
);
4137 gcc_assert (!optimize
|| !after
->deleted ());
4139 link_insn_into_chain (insn
, after
, next
);
4143 struct sequence_stack
*seq
;
4145 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4146 if (after
== seq
->last
)
4154 /* Add INSN into the doubly-linked list before insn BEFORE. */
4157 add_insn_before_nobb (rtx_insn
*insn
, rtx_insn
*before
)
4159 rtx_insn
*prev
= PREV_INSN (before
);
4161 gcc_assert (!optimize
|| !before
->deleted ());
4163 link_insn_into_chain (insn
, prev
, before
);
4167 struct sequence_stack
*seq
;
4169 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4170 if (before
== seq
->first
)
4180 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4181 If BB is NULL, an attempt is made to infer the bb from before.
4183 This and the next function should be the only functions called
4184 to insert an insn once delay slots have been filled since only
4185 they know how to update a SEQUENCE. */
4188 add_insn_after (rtx_insn
*insn
, rtx_insn
*after
, basic_block bb
)
4190 add_insn_after_nobb (insn
, after
);
4191 if (!BARRIER_P (after
)
4192 && !BARRIER_P (insn
)
4193 && (bb
= BLOCK_FOR_INSN (after
)))
4195 set_block_for_insn (insn
, bb
);
4197 df_insn_rescan (insn
);
4198 /* Should not happen as first in the BB is always
4199 either NOTE or LABEL. */
4200 if (BB_END (bb
) == after
4201 /* Avoid clobbering of structure when creating new BB. */
4202 && !BARRIER_P (insn
)
4203 && !NOTE_INSN_BASIC_BLOCK_P (insn
))
4208 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4209 If BB is NULL, an attempt is made to infer the bb from before.
4211 This and the previous function should be the only functions called
4212 to insert an insn once delay slots have been filled since only
4213 they know how to update a SEQUENCE. */
4216 add_insn_before (rtx_insn
*insn
, rtx_insn
*before
, basic_block bb
)
4218 add_insn_before_nobb (insn
, before
);
4221 && !BARRIER_P (before
)
4222 && !BARRIER_P (insn
))
4223 bb
= BLOCK_FOR_INSN (before
);
4227 set_block_for_insn (insn
, bb
);
4229 df_insn_rescan (insn
);
4230 /* Should not happen as first in the BB is always either NOTE or
4232 gcc_assert (BB_HEAD (bb
) != insn
4233 /* Avoid clobbering of structure when creating new BB. */
4235 || NOTE_INSN_BASIC_BLOCK_P (insn
));
4239 /* Replace insn with an deleted instruction note. */
4242 set_insn_deleted (rtx_insn
*insn
)
4245 df_insn_delete (insn
);
4246 PUT_CODE (insn
, NOTE
);
4247 NOTE_KIND (insn
) = NOTE_INSN_DELETED
;
4251 /* Unlink INSN from the insn chain.
4253 This function knows how to handle sequences.
4255 This function does not invalidate data flow information associated with
4256 INSN (i.e. does not call df_insn_delete). That makes this function
4257 usable for only disconnecting an insn from the chain, and re-emit it
4260 To later insert INSN elsewhere in the insn chain via add_insn and
4261 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4262 the caller. Nullifying them here breaks many insn chain walks.
4264 To really delete an insn and related DF information, use delete_insn. */
4267 remove_insn (rtx_insn
*insn
)
4269 rtx_insn
*next
= NEXT_INSN (insn
);
4270 rtx_insn
*prev
= PREV_INSN (insn
);
4275 SET_NEXT_INSN (prev
) = next
;
4276 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4278 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4279 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4284 struct sequence_stack
*seq
;
4286 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4287 if (insn
== seq
->first
)
4298 SET_PREV_INSN (next
) = prev
;
4299 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4301 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4302 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4307 struct sequence_stack
*seq
;
4309 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4310 if (insn
== seq
->last
)
4319 /* Fix up basic block boundaries, if necessary. */
4320 if (!BARRIER_P (insn
)
4321 && (bb
= BLOCK_FOR_INSN (insn
)))
4323 if (BB_HEAD (bb
) == insn
)
4325 /* Never ever delete the basic block note without deleting whole
4327 gcc_assert (!NOTE_P (insn
));
4328 BB_HEAD (bb
) = next
;
4330 if (BB_END (bb
) == insn
)
4335 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4338 add_function_usage_to (rtx call_insn
, rtx call_fusage
)
4340 gcc_assert (call_insn
&& CALL_P (call_insn
));
4342 /* Put the register usage information on the CALL. If there is already
4343 some usage information, put ours at the end. */
4344 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
4348 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
4349 link
= XEXP (link
, 1))
4352 XEXP (link
, 1) = call_fusage
;
4355 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
4358 /* Delete all insns made since FROM.
4359 FROM becomes the new last instruction. */
4362 delete_insns_since (rtx_insn
*from
)
4367 SET_NEXT_INSN (from
) = 0;
4368 set_last_insn (from
);
4371 /* This function is deprecated, please use sequences instead.
4373 Move a consecutive bunch of insns to a different place in the chain.
4374 The insns to be moved are those between FROM and TO.
4375 They are moved to a new position after the insn AFTER.
4376 AFTER must not be FROM or TO or any insn in between.
4378 This function does not know about SEQUENCEs and hence should not be
4379 called after delay-slot filling has been done. */
4382 reorder_insns_nobb (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4386 for (rtx_insn
*x
= from
; x
!= to
; x
= NEXT_INSN (x
))
4387 gcc_assert (after
!= x
);
4388 gcc_assert (after
!= to
);
4391 /* Splice this bunch out of where it is now. */
4392 if (PREV_INSN (from
))
4393 SET_NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
4395 SET_PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
4396 if (get_last_insn () == to
)
4397 set_last_insn (PREV_INSN (from
));
4398 if (get_insns () == from
)
4399 set_first_insn (NEXT_INSN (to
));
4401 /* Make the new neighbors point to it and it to them. */
4402 if (NEXT_INSN (after
))
4403 SET_PREV_INSN (NEXT_INSN (after
)) = to
;
4405 SET_NEXT_INSN (to
) = NEXT_INSN (after
);
4406 SET_PREV_INSN (from
) = after
;
4407 SET_NEXT_INSN (after
) = from
;
4408 if (after
== get_last_insn ())
4412 /* Same as function above, but take care to update BB boundaries. */
4414 reorder_insns (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4416 rtx_insn
*prev
= PREV_INSN (from
);
4417 basic_block bb
, bb2
;
4419 reorder_insns_nobb (from
, to
, after
);
4421 if (!BARRIER_P (after
)
4422 && (bb
= BLOCK_FOR_INSN (after
)))
4425 df_set_bb_dirty (bb
);
4427 if (!BARRIER_P (from
)
4428 && (bb2
= BLOCK_FOR_INSN (from
)))
4430 if (BB_END (bb2
) == to
)
4431 BB_END (bb2
) = prev
;
4432 df_set_bb_dirty (bb2
);
4435 if (BB_END (bb
) == after
)
4438 for (x
= from
; x
!= NEXT_INSN (to
); x
= NEXT_INSN (x
))
4440 df_insn_change_bb (x
, bb
);
4445 /* Emit insn(s) of given code and pattern
4446 at a specified place within the doubly-linked list.
4448 All of the emit_foo global entry points accept an object
4449 X which is either an insn list or a PATTERN of a single
4452 There are thus a few canonical ways to generate code and
4453 emit it at a specific place in the instruction stream. For
4454 example, consider the instruction named SPOT and the fact that
4455 we would like to emit some instructions before SPOT. We might
4459 ... emit the new instructions ...
4460 insns_head = get_insns ();
4463 emit_insn_before (insns_head, SPOT);
4465 It used to be common to generate SEQUENCE rtl instead, but that
4466 is a relic of the past which no longer occurs. The reason is that
4467 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4468 generated would almost certainly die right after it was created. */
4471 emit_pattern_before_noloc (rtx x
, rtx_insn
*before
, rtx_insn
*last
,
4473 rtx_insn
*(*make_raw
) (rtx
))
4477 gcc_assert (before
);
4482 switch (GET_CODE (x
))
4491 insn
= as_a
<rtx_insn
*> (x
);
4494 rtx_insn
*next
= NEXT_INSN (insn
);
4495 add_insn_before (insn
, before
, bb
);
4501 #ifdef ENABLE_RTL_CHECKING
4508 last
= (*make_raw
) (x
);
4509 add_insn_before (last
, before
, bb
);
4516 /* Make X be output before the instruction BEFORE. */
4519 emit_insn_before_noloc (rtx x
, rtx_insn
*before
, basic_block bb
)
4521 return emit_pattern_before_noloc (x
, before
, before
, bb
, make_insn_raw
);
4524 /* Make an instruction with body X and code JUMP_INSN
4525 and output it before the instruction BEFORE. */
4528 emit_jump_insn_before_noloc (rtx x
, rtx_insn
*before
)
4530 return as_a
<rtx_jump_insn
*> (
4531 emit_pattern_before_noloc (x
, before
, NULL
, NULL
,
4532 make_jump_insn_raw
));
4535 /* Make an instruction with body X and code CALL_INSN
4536 and output it before the instruction BEFORE. */
4539 emit_call_insn_before_noloc (rtx x
, rtx_insn
*before
)
4541 return emit_pattern_before_noloc (x
, before
, NULL
, NULL
,
4542 make_call_insn_raw
);
4545 /* Make an instruction with body X and code DEBUG_INSN
4546 and output it before the instruction BEFORE. */
4549 emit_debug_insn_before_noloc (rtx x
, rtx_insn
*before
)
4551 return emit_pattern_before_noloc (x
, before
, NULL
, NULL
,
4552 make_debug_insn_raw
);
4555 /* Make an insn of code BARRIER
4556 and output it before the insn BEFORE. */
4559 emit_barrier_before (rtx_insn
*before
)
4561 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4563 INSN_UID (insn
) = cur_insn_uid
++;
4565 add_insn_before (insn
, before
, NULL
);
4569 /* Emit the label LABEL before the insn BEFORE. */
4572 emit_label_before (rtx_code_label
*label
, rtx_insn
*before
)
4574 gcc_checking_assert (INSN_UID (label
) == 0);
4575 INSN_UID (label
) = cur_insn_uid
++;
4576 add_insn_before (label
, before
, NULL
);
4580 /* Helper for emit_insn_after, handles lists of instructions
4584 emit_insn_after_1 (rtx_insn
*first
, rtx_insn
*after
, basic_block bb
)
4587 rtx_insn
*after_after
;
4588 if (!bb
&& !BARRIER_P (after
))
4589 bb
= BLOCK_FOR_INSN (after
);
4593 df_set_bb_dirty (bb
);
4594 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4595 if (!BARRIER_P (last
))
4597 set_block_for_insn (last
, bb
);
4598 df_insn_rescan (last
);
4600 if (!BARRIER_P (last
))
4602 set_block_for_insn (last
, bb
);
4603 df_insn_rescan (last
);
4605 if (BB_END (bb
) == after
)
4609 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4612 after_after
= NEXT_INSN (after
);
4614 SET_NEXT_INSN (after
) = first
;
4615 SET_PREV_INSN (first
) = after
;
4616 SET_NEXT_INSN (last
) = after_after
;
4618 SET_PREV_INSN (after_after
) = last
;
4620 if (after
== get_last_insn ())
4621 set_last_insn (last
);
4627 emit_pattern_after_noloc (rtx x
, rtx_insn
*after
, basic_block bb
,
4628 rtx_insn
*(*make_raw
)(rtx
))
4630 rtx_insn
*last
= after
;
4637 switch (GET_CODE (x
))
4646 last
= emit_insn_after_1 (as_a
<rtx_insn
*> (x
), after
, bb
);
4649 #ifdef ENABLE_RTL_CHECKING
4656 last
= (*make_raw
) (x
);
4657 add_insn_after (last
, after
, bb
);
4664 /* Make X be output after the insn AFTER and set the BB of insn. If
4665 BB is NULL, an attempt is made to infer the BB from AFTER. */
4668 emit_insn_after_noloc (rtx x
, rtx_insn
*after
, basic_block bb
)
4670 return emit_pattern_after_noloc (x
, after
, bb
, make_insn_raw
);
4674 /* Make an insn of code JUMP_INSN with body X
4675 and output it after the insn AFTER. */
4678 emit_jump_insn_after_noloc (rtx x
, rtx_insn
*after
)
4680 return as_a
<rtx_jump_insn
*> (
4681 emit_pattern_after_noloc (x
, after
, NULL
, make_jump_insn_raw
));
4684 /* Make an instruction with body X and code CALL_INSN
4685 and output it after the instruction AFTER. */
4688 emit_call_insn_after_noloc (rtx x
, rtx_insn
*after
)
4690 return emit_pattern_after_noloc (x
, after
, NULL
, make_call_insn_raw
);
4693 /* Make an instruction with body X and code CALL_INSN
4694 and output it after the instruction AFTER. */
4697 emit_debug_insn_after_noloc (rtx x
, rtx_insn
*after
)
4699 return emit_pattern_after_noloc (x
, after
, NULL
, make_debug_insn_raw
);
4702 /* Make an insn of code BARRIER
4703 and output it after the insn AFTER. */
4706 emit_barrier_after (rtx_insn
*after
)
4708 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4710 INSN_UID (insn
) = cur_insn_uid
++;
4712 add_insn_after (insn
, after
, NULL
);
4716 /* Emit the label LABEL after the insn AFTER. */
4719 emit_label_after (rtx_insn
*label
, rtx_insn
*after
)
4721 gcc_checking_assert (INSN_UID (label
) == 0);
4722 INSN_UID (label
) = cur_insn_uid
++;
4723 add_insn_after (label
, after
, NULL
);
4727 /* Notes require a bit of special handling: Some notes need to have their
4728 BLOCK_FOR_INSN set, others should never have it set, and some should
4729 have it set or clear depending on the context. */
4731 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4732 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4733 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4736 note_outside_basic_block_p (enum insn_note subtype
, bool on_bb_boundary_p
)
4740 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4741 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
4744 /* Notes for var tracking and EH region markers can appear between or
4745 inside basic blocks. If the caller is emitting on the basic block
4746 boundary, do not set BLOCK_FOR_INSN on the new note. */
4747 case NOTE_INSN_VAR_LOCATION
:
4748 case NOTE_INSN_EH_REGION_BEG
:
4749 case NOTE_INSN_EH_REGION_END
:
4750 return on_bb_boundary_p
;
4752 /* Otherwise, BLOCK_FOR_INSN must be set. */
4758 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4761 emit_note_after (enum insn_note subtype
, rtx_insn
*after
)
4763 rtx_note
*note
= make_note_raw (subtype
);
4764 basic_block bb
= BARRIER_P (after
) ? NULL
: BLOCK_FOR_INSN (after
);
4765 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_END (bb
) == after
);
4767 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4768 add_insn_after_nobb (note
, after
);
4770 add_insn_after (note
, after
, bb
);
4774 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4777 emit_note_before (enum insn_note subtype
, rtx_insn
*before
)
4779 rtx_note
*note
= make_note_raw (subtype
);
4780 basic_block bb
= BARRIER_P (before
) ? NULL
: BLOCK_FOR_INSN (before
);
4781 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_HEAD (bb
) == before
);
4783 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4784 add_insn_before_nobb (note
, before
);
4786 add_insn_before (note
, before
, bb
);
4790 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4791 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4794 emit_pattern_after_setloc (rtx pattern
, rtx_insn
*after
, location_t loc
,
4795 rtx_insn
*(*make_raw
) (rtx
))
4797 rtx_insn
*last
= emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4799 if (pattern
== NULL_RTX
|| !loc
)
4802 after
= NEXT_INSN (after
);
4805 if (active_insn_p (after
)
4806 && !JUMP_TABLE_DATA_P (after
) /* FIXME */
4807 && !INSN_LOCATION (after
))
4808 INSN_LOCATION (after
) = loc
;
4811 after
= NEXT_INSN (after
);
4816 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4817 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4821 emit_pattern_after (rtx pattern
, rtx_insn
*after
, bool skip_debug_insns
,
4822 rtx_insn
*(*make_raw
) (rtx
))
4824 rtx_insn
*prev
= after
;
4826 if (skip_debug_insns
)
4827 while (DEBUG_INSN_P (prev
))
4828 prev
= PREV_INSN (prev
);
4831 return emit_pattern_after_setloc (pattern
, after
, INSN_LOCATION (prev
),
4834 return emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4837 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4839 emit_insn_after_setloc (rtx pattern
, rtx_insn
*after
, location_t loc
)
4841 return emit_pattern_after_setloc (pattern
, after
, loc
, make_insn_raw
);
4844 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4846 emit_insn_after (rtx pattern
, rtx_insn
*after
)
4848 return emit_pattern_after (pattern
, after
, true, make_insn_raw
);
4851 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4853 emit_jump_insn_after_setloc (rtx pattern
, rtx_insn
*after
, location_t loc
)
4855 return as_a
<rtx_jump_insn
*> (
4856 emit_pattern_after_setloc (pattern
, after
, loc
, make_jump_insn_raw
));
4859 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4861 emit_jump_insn_after (rtx pattern
, rtx_insn
*after
)
4863 return as_a
<rtx_jump_insn
*> (
4864 emit_pattern_after (pattern
, after
, true, make_jump_insn_raw
));
4867 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4869 emit_call_insn_after_setloc (rtx pattern
, rtx_insn
*after
, location_t loc
)
4871 return emit_pattern_after_setloc (pattern
, after
, loc
, make_call_insn_raw
);
4874 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4876 emit_call_insn_after (rtx pattern
, rtx_insn
*after
)
4878 return emit_pattern_after (pattern
, after
, true, make_call_insn_raw
);
4881 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4883 emit_debug_insn_after_setloc (rtx pattern
, rtx_insn
*after
, location_t loc
)
4885 return emit_pattern_after_setloc (pattern
, after
, loc
, make_debug_insn_raw
);
4888 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4890 emit_debug_insn_after (rtx pattern
, rtx_insn
*after
)
4892 return emit_pattern_after (pattern
, after
, false, make_debug_insn_raw
);
4895 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4896 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4897 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4901 emit_pattern_before_setloc (rtx pattern
, rtx_insn
*before
, location_t loc
,
4902 bool insnp
, rtx_insn
*(*make_raw
) (rtx
))
4904 rtx_insn
*first
= PREV_INSN (before
);
4905 rtx_insn
*last
= emit_pattern_before_noloc (pattern
, before
,
4906 insnp
? before
: NULL
,
4909 if (pattern
== NULL_RTX
|| !loc
)
4913 first
= get_insns ();
4915 first
= NEXT_INSN (first
);
4918 if (active_insn_p (first
)
4919 && !JUMP_TABLE_DATA_P (first
) /* FIXME */
4920 && !INSN_LOCATION (first
))
4921 INSN_LOCATION (first
) = loc
;
4924 first
= NEXT_INSN (first
);
4929 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4930 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4931 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4932 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4935 emit_pattern_before (rtx pattern
, rtx_insn
*before
, bool skip_debug_insns
,
4936 bool insnp
, rtx_insn
*(*make_raw
) (rtx
))
4938 rtx_insn
*next
= before
;
4940 if (skip_debug_insns
)
4941 while (DEBUG_INSN_P (next
))
4942 next
= PREV_INSN (next
);
4945 return emit_pattern_before_setloc (pattern
, before
, INSN_LOCATION (next
),
4948 return emit_pattern_before_noloc (pattern
, before
,
4949 insnp
? before
: NULL
,
4953 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4955 emit_insn_before_setloc (rtx pattern
, rtx_insn
*before
, location_t loc
)
4957 return emit_pattern_before_setloc (pattern
, before
, loc
, true,
4961 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4963 emit_insn_before (rtx pattern
, rtx_insn
*before
)
4965 return emit_pattern_before (pattern
, before
, true, true, make_insn_raw
);
4968 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4970 emit_jump_insn_before_setloc (rtx pattern
, rtx_insn
*before
, location_t loc
)
4972 return as_a
<rtx_jump_insn
*> (
4973 emit_pattern_before_setloc (pattern
, before
, loc
, false,
4974 make_jump_insn_raw
));
4977 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4979 emit_jump_insn_before (rtx pattern
, rtx_insn
*before
)
4981 return as_a
<rtx_jump_insn
*> (
4982 emit_pattern_before (pattern
, before
, true, false,
4983 make_jump_insn_raw
));
4986 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4988 emit_call_insn_before_setloc (rtx pattern
, rtx_insn
*before
, location_t loc
)
4990 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4991 make_call_insn_raw
);
4994 /* Like emit_call_insn_before_noloc,
4995 but set insn_location according to BEFORE. */
4997 emit_call_insn_before (rtx pattern
, rtx_insn
*before
)
4999 return emit_pattern_before (pattern
, before
, true, false,
5000 make_call_insn_raw
);
5003 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
5005 emit_debug_insn_before_setloc (rtx pattern
, rtx_insn
*before
, location_t loc
)
5007 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
5008 make_debug_insn_raw
);
5011 /* Like emit_debug_insn_before_noloc,
5012 but set insn_location according to BEFORE. */
5014 emit_debug_insn_before (rtx pattern
, rtx_insn
*before
)
5016 return emit_pattern_before (pattern
, before
, false, false,
5017 make_debug_insn_raw
);
5020 /* Take X and emit it at the end of the doubly-linked
5023 Returns the last insn emitted. */
5028 rtx_insn
*last
= get_last_insn ();
5034 switch (GET_CODE (x
))
5043 insn
= as_a
<rtx_insn
*> (x
);
5046 rtx_insn
*next
= NEXT_INSN (insn
);
5053 #ifdef ENABLE_RTL_CHECKING
5054 case JUMP_TABLE_DATA
:
5061 last
= make_insn_raw (x
);
5069 /* Make an insn of code DEBUG_INSN with pattern X
5070 and add it to the end of the doubly-linked list. */
5073 emit_debug_insn (rtx x
)
5075 rtx_insn
*last
= get_last_insn ();
5081 switch (GET_CODE (x
))
5090 insn
= as_a
<rtx_insn
*> (x
);
5093 rtx_insn
*next
= NEXT_INSN (insn
);
5100 #ifdef ENABLE_RTL_CHECKING
5101 case JUMP_TABLE_DATA
:
5108 last
= make_debug_insn_raw (x
);
5116 /* Make an insn of code JUMP_INSN with pattern X
5117 and add it to the end of the doubly-linked list. */
5120 emit_jump_insn (rtx x
)
5122 rtx_insn
*last
= NULL
;
5125 switch (GET_CODE (x
))
5134 insn
= as_a
<rtx_insn
*> (x
);
5137 rtx_insn
*next
= NEXT_INSN (insn
);
5144 #ifdef ENABLE_RTL_CHECKING
5145 case JUMP_TABLE_DATA
:
5152 last
= make_jump_insn_raw (x
);
5160 /* Make an insn of code CALL_INSN with pattern X
5161 and add it to the end of the doubly-linked list. */
5164 emit_call_insn (rtx x
)
5168 switch (GET_CODE (x
))
5177 insn
= emit_insn (x
);
5180 #ifdef ENABLE_RTL_CHECKING
5182 case JUMP_TABLE_DATA
:
5188 insn
= make_call_insn_raw (x
);
5196 /* Add the label LABEL to the end of the doubly-linked list. */
5199 emit_label (rtx uncast_label
)
5201 rtx_code_label
*label
= as_a
<rtx_code_label
*> (uncast_label
);
5203 gcc_checking_assert (INSN_UID (label
) == 0);
5204 INSN_UID (label
) = cur_insn_uid
++;
5209 /* Make an insn of code JUMP_TABLE_DATA
5210 and add it to the end of the doubly-linked list. */
5212 rtx_jump_table_data
*
5213 emit_jump_table_data (rtx table
)
5215 rtx_jump_table_data
*jump_table_data
=
5216 as_a
<rtx_jump_table_data
*> (rtx_alloc (JUMP_TABLE_DATA
));
5217 INSN_UID (jump_table_data
) = cur_insn_uid
++;
5218 PATTERN (jump_table_data
) = table
;
5219 BLOCK_FOR_INSN (jump_table_data
) = NULL
;
5220 add_insn (jump_table_data
);
5221 return jump_table_data
;
5224 /* Make an insn of code BARRIER
5225 and add it to the end of the doubly-linked list. */
5230 rtx_barrier
*barrier
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
5231 INSN_UID (barrier
) = cur_insn_uid
++;
5236 /* Emit a copy of note ORIG. */
5239 emit_note_copy (rtx_note
*orig
)
5241 enum insn_note kind
= (enum insn_note
) NOTE_KIND (orig
);
5242 rtx_note
*note
= make_note_raw (kind
);
5243 NOTE_DATA (note
) = NOTE_DATA (orig
);
5248 /* Make an insn of code NOTE or type NOTE_NO
5249 and add it to the end of the doubly-linked list. */
5252 emit_note (enum insn_note kind
)
5254 rtx_note
*note
= make_note_raw (kind
);
5259 /* Emit a clobber of lvalue X. */
5262 emit_clobber (rtx x
)
5264 /* CONCATs should not appear in the insn stream. */
5265 if (GET_CODE (x
) == CONCAT
)
5267 emit_clobber (XEXP (x
, 0));
5268 return emit_clobber (XEXP (x
, 1));
5270 return emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
5273 /* Return a sequence of insns to clobber lvalue X. */
5287 /* Emit a use of rvalue X. */
5292 /* CONCATs should not appear in the insn stream. */
5293 if (GET_CODE (x
) == CONCAT
)
5295 emit_use (XEXP (x
, 0));
5296 return emit_use (XEXP (x
, 1));
5298 return emit_insn (gen_rtx_USE (VOIDmode
, x
));
5301 /* Return a sequence of insns to use rvalue X. */
5315 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5316 Return the set in INSN that such notes describe, or NULL if the notes
5317 have no meaning for INSN. */
5320 set_for_reg_notes (rtx insn
)
5327 pat
= PATTERN (insn
);
5328 if (GET_CODE (pat
) == PARALLEL
)
5330 /* We do not use single_set because that ignores SETs of unused
5331 registers. REG_EQUAL and REG_EQUIV notes really do require the
5332 PARALLEL to have a single SET. */
5333 if (multiple_sets (insn
))
5335 pat
= XVECEXP (pat
, 0, 0);
5338 if (GET_CODE (pat
) != SET
)
5341 reg
= SET_DEST (pat
);
5343 /* Notes apply to the contents of a STRICT_LOW_PART. */
5344 if (GET_CODE (reg
) == STRICT_LOW_PART
5345 || GET_CODE (reg
) == ZERO_EXTRACT
)
5346 reg
= XEXP (reg
, 0);
5348 /* Check that we have a register. */
5349 if (!(REG_P (reg
) || GET_CODE (reg
) == SUBREG
))
5355 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5356 note of this type already exists, remove it first. */
5359 set_unique_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
5361 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
5367 /* We need to support the REG_EQUAL on USE trick of find_reloads. */
5368 if (!set_for_reg_notes (insn
) && GET_CODE (PATTERN (insn
)) != USE
)
5371 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5372 It serves no useful purpose and breaks eliminate_regs. */
5373 if (GET_CODE (datum
) == ASM_OPERANDS
)
5376 /* Notes with side effects are dangerous. Even if the side-effect
5377 initially mirrors one in PATTERN (INSN), later optimizations
5378 might alter the way that the final register value is calculated
5379 and so move or alter the side-effect in some way. The note would
5380 then no longer be a valid substitution for SET_SRC. */
5381 if (side_effects_p (datum
))
5390 XEXP (note
, 0) = datum
;
5393 add_reg_note (insn
, kind
, datum
);
5394 note
= REG_NOTES (insn
);
5401 df_notes_rescan (as_a
<rtx_insn
*> (insn
));
5410 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5412 set_dst_reg_note (rtx insn
, enum reg_note kind
, rtx datum
, rtx dst
)
5414 rtx set
= set_for_reg_notes (insn
);
5416 if (set
&& SET_DEST (set
) == dst
)
5417 return set_unique_reg_note (insn
, kind
, datum
);
5421 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5422 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5425 If X is a label, it is simply added into the insn chain. */
5428 emit (rtx x
, bool allow_barrier_p
)
5430 enum rtx_code code
= classify_insn (x
);
5435 return emit_label (x
);
5437 return emit_insn (x
);
5440 rtx_insn
*insn
= emit_jump_insn (x
);
5442 && (any_uncondjump_p (insn
) || GET_CODE (x
) == RETURN
))
5443 return emit_barrier ();
5447 return emit_call_insn (x
);
5449 return emit_debug_insn (x
);
5455 /* Space for free sequence stack entries. */
5456 static GTY ((deletable
)) struct sequence_stack
*free_sequence_stack
;
5458 /* Begin emitting insns to a sequence. If this sequence will contain
5459 something that might cause the compiler to pop arguments to function
5460 calls (because those pops have previously been deferred; see
5461 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5462 before calling this function. That will ensure that the deferred
5463 pops are not accidentally emitted in the middle of this sequence. */
5466 start_sequence (void)
5468 struct sequence_stack
*tem
;
5470 if (free_sequence_stack
!= NULL
)
5472 tem
= free_sequence_stack
;
5473 free_sequence_stack
= tem
->next
;
5476 tem
= ggc_alloc
<sequence_stack
> ();
5478 tem
->next
= get_current_sequence ()->next
;
5479 tem
->first
= get_insns ();
5480 tem
->last
= get_last_insn ();
5481 get_current_sequence ()->next
= tem
;
5487 /* Set up the insn chain starting with FIRST as the current sequence,
5488 saving the previously current one. See the documentation for
5489 start_sequence for more information about how to use this function. */
5492 push_to_sequence (rtx_insn
*first
)
5498 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
))
5501 set_first_insn (first
);
5502 set_last_insn (last
);
5505 /* Like push_to_sequence, but take the last insn as an argument to avoid
5506 looping through the list. */
5509 push_to_sequence2 (rtx_insn
*first
, rtx_insn
*last
)
5513 set_first_insn (first
);
5514 set_last_insn (last
);
5517 /* Set up the outer-level insn chain
5518 as the current sequence, saving the previously current one. */
5521 push_topmost_sequence (void)
5523 struct sequence_stack
*top
;
5527 top
= get_topmost_sequence ();
5528 set_first_insn (top
->first
);
5529 set_last_insn (top
->last
);
5532 /* After emitting to the outer-level insn chain, update the outer-level
5533 insn chain, and restore the previous saved state. */
5536 pop_topmost_sequence (void)
5538 struct sequence_stack
*top
;
5540 top
= get_topmost_sequence ();
5541 top
->first
= get_insns ();
5542 top
->last
= get_last_insn ();
5547 /* After emitting to a sequence, restore previous saved state.
5549 To get the contents of the sequence just made, you must call
5550 `get_insns' *before* calling here.
5552 If the compiler might have deferred popping arguments while
5553 generating this sequence, and this sequence will not be immediately
5554 inserted into the instruction stream, use do_pending_stack_adjust
5555 before calling get_insns. That will ensure that the deferred
5556 pops are inserted into this sequence, and not into some random
5557 location in the instruction stream. See INHIBIT_DEFER_POP for more
5558 information about deferred popping of arguments. */
5563 struct sequence_stack
*tem
= get_current_sequence ()->next
;
5565 set_first_insn (tem
->first
);
5566 set_last_insn (tem
->last
);
5567 get_current_sequence ()->next
= tem
->next
;
5569 memset (tem
, 0, sizeof (*tem
));
5570 tem
->next
= free_sequence_stack
;
5571 free_sequence_stack
= tem
;
5574 /* Return 1 if currently emitting into a sequence. */
5577 in_sequence_p (void)
5579 return get_current_sequence ()->next
!= 0;
5582 /* Put the various virtual registers into REGNO_REG_RTX. */
5585 init_virtual_regs (void)
5587 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
5588 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
5589 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
5590 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
5591 regno_reg_rtx
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
5592 regno_reg_rtx
[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
]
5593 = virtual_preferred_stack_boundary_rtx
;
5597 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5598 static rtx copy_insn_scratch_in
[MAX_RECOG_OPERANDS
];
5599 static rtx copy_insn_scratch_out
[MAX_RECOG_OPERANDS
];
5600 static int copy_insn_n_scratches
;
5602 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5603 copied an ASM_OPERANDS.
5604 In that case, it is the original input-operand vector. */
5605 static rtvec orig_asm_operands_vector
;
5607 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5608 copied an ASM_OPERANDS.
5609 In that case, it is the copied input-operand vector. */
5610 static rtvec copy_asm_operands_vector
;
5612 /* Likewise for the constraints vector. */
5613 static rtvec orig_asm_constraints_vector
;
5614 static rtvec copy_asm_constraints_vector
;
5616 /* Recursively create a new copy of an rtx for copy_insn.
5617 This function differs from copy_rtx in that it handles SCRATCHes and
5618 ASM_OPERANDs properly.
5619 Normally, this function is not used directly; use copy_insn as front end.
5620 However, you could first copy an insn pattern with copy_insn and then use
5621 this function afterwards to properly copy any REG_NOTEs containing
5625 copy_insn_1 (rtx orig
)
5630 const char *format_ptr
;
5635 code
= GET_CODE (orig
);
5649 /* Share clobbers of hard registers, but do not share pseudo reg
5650 clobbers or clobbers of hard registers that originated as pseudos.
5651 This is needed to allow safe register renaming. */
5652 if (REG_P (XEXP (orig
, 0))
5653 && HARD_REGISTER_NUM_P (REGNO (XEXP (orig
, 0)))
5654 && HARD_REGISTER_NUM_P (ORIGINAL_REGNO (XEXP (orig
, 0))))
5659 for (i
= 0; i
< copy_insn_n_scratches
; i
++)
5660 if (copy_insn_scratch_in
[i
] == orig
)
5661 return copy_insn_scratch_out
[i
];
5665 if (shared_const_p (orig
))
5669 /* A MEM with a constant address is not sharable. The problem is that
5670 the constant address may need to be reloaded. If the mem is shared,
5671 then reloading one copy of this mem will cause all copies to appear
5672 to have been reloaded. */
5678 /* Copy the various flags, fields, and other information. We assume
5679 that all fields need copying, and then clear the fields that should
5680 not be copied. That is the sensible default behavior, and forces
5681 us to explicitly document why we are *not* copying a flag. */
5682 copy
= shallow_copy_rtx (orig
);
5684 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5687 RTX_FLAG (copy
, jump
) = 0;
5688 RTX_FLAG (copy
, call
) = 0;
5689 RTX_FLAG (copy
, frame_related
) = 0;
5692 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
5694 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
5695 switch (*format_ptr
++)
5698 if (XEXP (orig
, i
) != NULL
)
5699 XEXP (copy
, i
) = copy_insn_1 (XEXP (orig
, i
));
5704 if (XVEC (orig
, i
) == orig_asm_constraints_vector
)
5705 XVEC (copy
, i
) = copy_asm_constraints_vector
;
5706 else if (XVEC (orig
, i
) == orig_asm_operands_vector
)
5707 XVEC (copy
, i
) = copy_asm_operands_vector
;
5708 else if (XVEC (orig
, i
) != NULL
)
5710 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
5711 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
5712 XVECEXP (copy
, i
, j
) = copy_insn_1 (XVECEXP (orig
, i
, j
));
5724 /* These are left unchanged. */
5731 if (code
== SCRATCH
)
5733 i
= copy_insn_n_scratches
++;
5734 gcc_assert (i
< MAX_RECOG_OPERANDS
);
5735 copy_insn_scratch_in
[i
] = orig
;
5736 copy_insn_scratch_out
[i
] = copy
;
5738 else if (code
== ASM_OPERANDS
)
5740 orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
5741 copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
5742 orig_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig
);
5743 copy_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
5749 /* Create a new copy of an rtx.
5750 This function differs from copy_rtx in that it handles SCRATCHes and
5751 ASM_OPERANDs properly.
5752 INSN doesn't really have to be a full INSN; it could be just the
5755 copy_insn (rtx insn
)
5757 copy_insn_n_scratches
= 0;
5758 orig_asm_operands_vector
= 0;
5759 orig_asm_constraints_vector
= 0;
5760 copy_asm_operands_vector
= 0;
5761 copy_asm_constraints_vector
= 0;
5762 return copy_insn_1 (insn
);
5765 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5766 on that assumption that INSN itself remains in its original place. */
5769 copy_delay_slot_insn (rtx_insn
*insn
)
5771 /* Copy INSN with its rtx_code, all its notes, location etc. */
5772 insn
= as_a
<rtx_insn
*> (copy_rtx (insn
));
5773 INSN_UID (insn
) = cur_insn_uid
++;
5777 /* Initialize data structures and variables in this file
5778 before generating rtl for each function. */
5783 set_first_insn (NULL
);
5784 set_last_insn (NULL
);
5785 if (param_min_nondebug_insn_uid
)
5786 cur_insn_uid
= param_min_nondebug_insn_uid
;
5789 cur_debug_insn_uid
= 1;
5790 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
5791 first_label_num
= label_num
;
5792 get_current_sequence ()->next
= NULL
;
5794 /* Init the tables that describe all the pseudo regs. */
5796 crtl
->emit
.regno_pointer_align_length
= LAST_VIRTUAL_REGISTER
+ 101;
5798 crtl
->emit
.regno_pointer_align
5799 = XCNEWVEC (unsigned char, crtl
->emit
.regno_pointer_align_length
);
5802 = ggc_cleared_vec_alloc
<rtx
> (crtl
->emit
.regno_pointer_align_length
);
5804 /* Put copies of all the hard registers into regno_reg_rtx. */
5805 memcpy (regno_reg_rtx
,
5806 initial_regno_reg_rtx
,
5807 FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
5809 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5810 init_virtual_regs ();
5812 /* Indicate that the virtual registers and stack locations are
5814 REG_POINTER (stack_pointer_rtx
) = 1;
5815 REG_POINTER (frame_pointer_rtx
) = 1;
5816 REG_POINTER (hard_frame_pointer_rtx
) = 1;
5817 REG_POINTER (arg_pointer_rtx
) = 1;
5819 REG_POINTER (virtual_incoming_args_rtx
) = 1;
5820 REG_POINTER (virtual_stack_vars_rtx
) = 1;
5821 REG_POINTER (virtual_stack_dynamic_rtx
) = 1;
5822 REG_POINTER (virtual_outgoing_args_rtx
) = 1;
5823 REG_POINTER (virtual_cfa_rtx
) = 1;
5825 #ifdef STACK_BOUNDARY
5826 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
;
5827 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5828 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5829 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
;
5831 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5832 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
) = STACK_BOUNDARY
;
5833 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
) = STACK_BOUNDARY
;
5834 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5836 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = BITS_PER_WORD
;
5839 #ifdef INIT_EXPANDERS
5844 /* Return the value of element I of CONST_VECTOR X as a wide_int. */
5847 const_vector_int_elt (const_rtx x
, unsigned int i
)
5849 /* First handle elements that are directly encoded. */
5850 machine_mode elt_mode
= GET_MODE_INNER (GET_MODE (x
));
5851 if (i
< (unsigned int) XVECLEN (x
, 0))
5852 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x
, i
), elt_mode
);
5854 /* Identify the pattern that contains element I and work out the index of
5855 the last encoded element for that pattern. */
5856 unsigned int encoded_nelts
= const_vector_encoded_nelts (x
);
5857 unsigned int npatterns
= CONST_VECTOR_NPATTERNS (x
);
5858 unsigned int count
= i
/ npatterns
;
5859 unsigned int pattern
= i
% npatterns
;
5860 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
5862 /* If there are no steps, the final encoded value is the right one. */
5863 if (!CONST_VECTOR_STEPPED_P (x
))
5864 return rtx_mode_t (CONST_VECTOR_ENCODED_ELT (x
, final_i
), elt_mode
);
5866 /* Otherwise work out the value from the last two encoded elements. */
5867 rtx v1
= CONST_VECTOR_ENCODED_ELT (x
, final_i
- npatterns
);
5868 rtx v2
= CONST_VECTOR_ENCODED_ELT (x
, final_i
);
5869 wide_int diff
= wi::sub (rtx_mode_t (v2
, elt_mode
),
5870 rtx_mode_t (v1
, elt_mode
));
5871 return wi::add (rtx_mode_t (v2
, elt_mode
), (count
- 2) * diff
);
5874 /* Return the value of element I of CONST_VECTOR X. */
5877 const_vector_elt (const_rtx x
, unsigned int i
)
5879 /* First handle elements that are directly encoded. */
5880 if (i
< (unsigned int) XVECLEN (x
, 0))
5881 return CONST_VECTOR_ENCODED_ELT (x
, i
);
5883 /* If there are no steps, the final encoded value is the right one. */
5884 if (!CONST_VECTOR_STEPPED_P (x
))
5886 /* Identify the pattern that contains element I and work out the index of
5887 the last encoded element for that pattern. */
5888 unsigned int encoded_nelts
= const_vector_encoded_nelts (x
);
5889 unsigned int npatterns
= CONST_VECTOR_NPATTERNS (x
);
5890 unsigned int pattern
= i
% npatterns
;
5891 unsigned int final_i
= encoded_nelts
- npatterns
+ pattern
;
5892 return CONST_VECTOR_ENCODED_ELT (x
, final_i
);
5895 /* Otherwise work out the value from the last two encoded elements. */
5896 return immed_wide_int_const (const_vector_int_elt (x
, i
),
5897 GET_MODE_INNER (GET_MODE (x
)));
5900 /* Return true if X is a valid element for a CONST_VECTOR of the given
5904 valid_for_const_vector_p (machine_mode
, rtx x
)
5906 return (CONST_SCALAR_INT_P (x
)
5907 || CONST_POLY_INT_P (x
)
5908 || CONST_DOUBLE_AS_FLOAT_P (x
)
5909 || CONST_FIXED_P (x
));
5912 /* Generate a vector constant of mode MODE in which every element has
5916 gen_const_vec_duplicate (machine_mode mode
, rtx elt
)
5918 rtx_vector_builder
builder (mode
, 1, 1);
5919 builder
.quick_push (elt
);
5920 return builder
.build ();
5923 /* Return a vector rtx of mode MODE in which every element has value X.
5924 The result will be a constant if X is constant. */
5927 gen_vec_duplicate (machine_mode mode
, rtx x
)
5929 if (valid_for_const_vector_p (mode
, x
))
5930 return gen_const_vec_duplicate (mode
, x
);
5931 return gen_rtx_VEC_DUPLICATE (mode
, x
);
5934 /* A subroutine of const_vec_series_p that handles the case in which:
5936 (GET_CODE (X) == CONST_VECTOR
5937 && CONST_VECTOR_NPATTERNS (X) == 1
5938 && !CONST_VECTOR_DUPLICATE_P (X))
5940 is known to hold. */
5943 const_vec_series_p_1 (const_rtx x
, rtx
*base_out
, rtx
*step_out
)
5945 /* Stepped sequences are only defined for integers, to avoid specifying
5946 rounding behavior. */
5947 if (GET_MODE_CLASS (GET_MODE (x
)) != MODE_VECTOR_INT
)
5950 /* A non-duplicated vector with two elements can always be seen as a
5951 series with a nonzero step. Longer vectors must have a stepped
5953 if (maybe_ne (CONST_VECTOR_NUNITS (x
), 2)
5954 && !CONST_VECTOR_STEPPED_P (x
))
5957 /* Calculate the step between the first and second elements. */
5958 scalar_mode inner
= GET_MODE_INNER (GET_MODE (x
));
5959 rtx base
= CONST_VECTOR_ELT (x
, 0);
5960 rtx step
= simplify_binary_operation (MINUS
, inner
,
5961 CONST_VECTOR_ENCODED_ELT (x
, 1), base
);
5962 if (rtx_equal_p (step
, CONST0_RTX (inner
)))
5965 /* If we have a stepped encoding, check that the step between the
5966 second and third elements is the same as STEP. */
5967 if (CONST_VECTOR_STEPPED_P (x
))
5969 rtx diff
= simplify_binary_operation (MINUS
, inner
,
5970 CONST_VECTOR_ENCODED_ELT (x
, 2),
5971 CONST_VECTOR_ENCODED_ELT (x
, 1));
5972 if (!rtx_equal_p (step
, diff
))
5981 /* Generate a vector constant of mode MODE in which element I has
5982 the value BASE + I * STEP. */
5985 gen_const_vec_series (machine_mode mode
, rtx base
, rtx step
)
5987 gcc_assert (valid_for_const_vector_p (mode
, base
)
5988 && valid_for_const_vector_p (mode
, step
));
5990 rtx_vector_builder
builder (mode
, 1, 3);
5991 builder
.quick_push (base
);
5992 for (int i
= 1; i
< 3; ++i
)
5993 builder
.quick_push (simplify_gen_binary (PLUS
, GET_MODE_INNER (mode
),
5994 builder
[i
- 1], step
));
5995 return builder
.build ();
5998 /* Generate a vector of mode MODE in which element I has the value
5999 BASE + I * STEP. The result will be a constant if BASE and STEP
6000 are both constants. */
6003 gen_vec_series (machine_mode mode
, rtx base
, rtx step
)
6005 if (step
== const0_rtx
)
6006 return gen_vec_duplicate (mode
, base
);
6007 if (valid_for_const_vector_p (mode
, base
)
6008 && valid_for_const_vector_p (mode
, step
))
6009 return gen_const_vec_series (mode
, base
, step
);
6010 return gen_rtx_VEC_SERIES (mode
, base
, step
);
6013 /* Generate a new vector constant for mode MODE and constant value
6017 gen_const_vector (machine_mode mode
, int constant
)
6019 machine_mode inner
= GET_MODE_INNER (mode
);
6021 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner
));
6023 rtx el
= const_tiny_rtx
[constant
][(int) inner
];
6026 return gen_const_vec_duplicate (mode
, el
);
6029 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
6030 all elements are zero, and the one vector when all elements are one. */
6032 gen_rtx_CONST_VECTOR (machine_mode mode
, rtvec v
)
6034 gcc_assert (known_eq (GET_MODE_NUNITS (mode
), GET_NUM_ELEM (v
)));
6036 /* If the values are all the same, check to see if we can use one of the
6037 standard constant vectors. */
6038 if (rtvec_all_equal_p (v
))
6039 return gen_const_vec_duplicate (mode
, RTVEC_ELT (v
, 0));
6041 unsigned int nunits
= GET_NUM_ELEM (v
);
6042 rtx_vector_builder
builder (mode
, nunits
, 1);
6043 for (unsigned int i
= 0; i
< nunits
; ++i
)
6044 builder
.quick_push (RTVEC_ELT (v
, i
));
6045 return builder
.build (v
);
6048 /* Initialise global register information required by all functions. */
6051 init_emit_regs (void)
6057 /* Reset register attributes */
6058 reg_attrs_htab
->empty ();
6060 /* We need reg_raw_mode, so initialize the modes now. */
6061 init_reg_modes_target ();
6063 /* Assign register numbers to the globally defined register rtx. */
6064 stack_pointer_rtx
= gen_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
6065 frame_pointer_rtx
= gen_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
6066 hard_frame_pointer_rtx
= gen_raw_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
);
6067 arg_pointer_rtx
= gen_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
6068 virtual_incoming_args_rtx
=
6069 gen_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
6070 virtual_stack_vars_rtx
=
6071 gen_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
6072 virtual_stack_dynamic_rtx
=
6073 gen_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
6074 virtual_outgoing_args_rtx
=
6075 gen_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
6076 virtual_cfa_rtx
= gen_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
6077 virtual_preferred_stack_boundary_rtx
=
6078 gen_raw_REG (Pmode
, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
);
6080 /* Initialize RTL for commonly used hard registers. These are
6081 copied into regno_reg_rtx as we begin to compile each function. */
6082 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
6083 initial_regno_reg_rtx
[i
] = gen_raw_REG (reg_raw_mode
[i
], i
);
6085 #ifdef RETURN_ADDRESS_POINTER_REGNUM
6086 return_address_pointer_rtx
6087 = gen_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
6090 pic_offset_table_rtx
= NULL_RTX
;
6091 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
6092 pic_offset_table_rtx
= gen_raw_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
6094 for (i
= 0; i
< (int) MAX_MACHINE_MODE
; i
++)
6096 mode
= (machine_mode
) i
;
6097 attrs
= ggc_cleared_alloc
<mem_attrs
> ();
6098 attrs
->align
= BITS_PER_UNIT
;
6099 attrs
->addrspace
= ADDR_SPACE_GENERIC
;
6100 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
6102 attrs
->size_known_p
= true;
6103 attrs
->size
= GET_MODE_SIZE (mode
);
6104 if (STRICT_ALIGNMENT
)
6105 attrs
->align
= GET_MODE_ALIGNMENT (mode
);
6107 mode_mem_attrs
[i
] = attrs
;
6110 split_branch_probability
= profile_probability::uninitialized ();
6113 /* Initialize global machine_mode variables. */
6116 init_derived_machine_modes (void)
6118 opt_scalar_int_mode mode_iter
, opt_byte_mode
, opt_word_mode
;
6119 FOR_EACH_MODE_IN_CLASS (mode_iter
, MODE_INT
)
6121 scalar_int_mode mode
= mode_iter
.require ();
6123 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
6124 && !opt_byte_mode
.exists ())
6125 opt_byte_mode
= mode
;
6127 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
6128 && !opt_word_mode
.exists ())
6129 opt_word_mode
= mode
;
6132 byte_mode
= opt_byte_mode
.require ();
6133 word_mode
= opt_word_mode
.require ();
6134 ptr_mode
= as_a
<scalar_int_mode
>
6135 (mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0).require ());
6138 /* Create some permanent unique rtl objects shared between all functions. */
6141 init_emit_once (void)
6145 scalar_float_mode double_mode
;
6146 opt_scalar_mode smode_iter
;
6148 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
6149 CONST_FIXED, and memory attribute hash tables. */
6150 const_int_htab
= hash_table
<const_int_hasher
>::create_ggc (37);
6152 #if TARGET_SUPPORTS_WIDE_INT
6153 const_wide_int_htab
= hash_table
<const_wide_int_hasher
>::create_ggc (37);
6155 const_double_htab
= hash_table
<const_double_hasher
>::create_ggc (37);
6157 if (NUM_POLY_INT_COEFFS
> 1)
6158 const_poly_int_htab
= hash_table
<const_poly_int_hasher
>::create_ggc (37);
6160 const_fixed_htab
= hash_table
<const_fixed_hasher
>::create_ggc (37);
6162 reg_attrs_htab
= hash_table
<reg_attr_hasher
>::create_ggc (37);
6164 #ifdef INIT_EXPANDERS
6165 /* This is to initialize {init|mark|free}_machine_status before the first
6166 call to push_function_context_to. This is needed by the Chill front
6167 end which calls push_function_context_to before the first call to
6168 init_function_start. */
6172 /* Create the unique rtx's for certain rtx codes and operand values. */
6174 /* Process stack-limiting command-line options. */
6175 if (opt_fstack_limit_symbol_arg
!= NULL
)
6177 = gen_rtx_SYMBOL_REF (Pmode
, ggc_strdup (opt_fstack_limit_symbol_arg
));
6178 if (opt_fstack_limit_register_no
>= 0)
6179 stack_limit_rtx
= gen_rtx_REG (Pmode
, opt_fstack_limit_register_no
);
6181 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
6182 tries to use these variables. */
6183 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
6184 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
6185 gen_rtx_raw_CONST_INT (VOIDmode
, (HOST_WIDE_INT
) i
);
6187 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
6188 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
6189 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
6191 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
6193 double_mode
= float_mode_for_size (DOUBLE_TYPE_SIZE
).require ();
6195 real_from_integer (&dconst0
, double_mode
, 0, SIGNED
);
6196 real_from_integer (&dconst1
, double_mode
, 1, SIGNED
);
6197 real_from_integer (&dconst2
, double_mode
, 2, SIGNED
);
6202 dconsthalf
= dconst1
;
6203 SET_REAL_EXP (&dconsthalf
, REAL_EXP (&dconsthalf
) - 1);
6205 for (i
= 0; i
< 3; i
++)
6207 const REAL_VALUE_TYPE
*const r
=
6208 (i
== 0 ? &dconst0
: i
== 1 ? &dconst1
: &dconst2
);
6210 FOR_EACH_MODE_IN_CLASS (mode
, MODE_FLOAT
)
6211 const_tiny_rtx
[i
][(int) mode
] =
6212 const_double_from_real_value (*r
, mode
);
6214 FOR_EACH_MODE_IN_CLASS (mode
, MODE_DECIMAL_FLOAT
)
6215 const_tiny_rtx
[i
][(int) mode
] =
6216 const_double_from_real_value (*r
, mode
);
6218 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
6220 FOR_EACH_MODE_IN_CLASS (mode
, MODE_INT
)
6221 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
6223 for (mode
= MIN_MODE_PARTIAL_INT
;
6224 mode
<= MAX_MODE_PARTIAL_INT
;
6225 mode
= (machine_mode
)((int)(mode
) + 1))
6226 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
6229 const_tiny_rtx
[3][(int) VOIDmode
] = constm1_rtx
;
6231 FOR_EACH_MODE_IN_CLASS (mode
, MODE_INT
)
6232 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
6234 /* For BImode, 1 and -1 are unsigned and signed interpretations
6235 of the same value. */
6236 const_tiny_rtx
[0][(int) BImode
] = const0_rtx
;
6237 const_tiny_rtx
[1][(int) BImode
] = const_true_rtx
;
6238 const_tiny_rtx
[3][(int) BImode
] = const_true_rtx
;
6240 for (mode
= MIN_MODE_PARTIAL_INT
;
6241 mode
<= MAX_MODE_PARTIAL_INT
;
6242 mode
= (machine_mode
)((int)(mode
) + 1))
6243 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
6245 FOR_EACH_MODE_IN_CLASS (mode
, MODE_COMPLEX_INT
)
6247 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
6248 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
6251 FOR_EACH_MODE_IN_CLASS (mode
, MODE_COMPLEX_FLOAT
)
6253 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
6254 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
6257 /* As for BImode, "all 1" and "all -1" are unsigned and signed
6258 interpretations of the same value. */
6259 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_BOOL
)
6261 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6262 const_tiny_rtx
[3][(int) mode
] = gen_const_vector (mode
, 3);
6263 const_tiny_rtx
[1][(int) mode
] = const_tiny_rtx
[3][(int) mode
];
6266 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_INT
)
6268 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6269 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6270 const_tiny_rtx
[3][(int) mode
] = gen_const_vector (mode
, 3);
6273 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_FLOAT
)
6275 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6276 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6279 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_FRACT
)
6281 scalar_mode smode
= smode_iter
.require ();
6282 FCONST0 (smode
).data
.high
= 0;
6283 FCONST0 (smode
).data
.low
= 0;
6284 FCONST0 (smode
).mode
= smode
;
6285 const_tiny_rtx
[0][(int) smode
]
6286 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6289 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_UFRACT
)
6291 scalar_mode smode
= smode_iter
.require ();
6292 FCONST0 (smode
).data
.high
= 0;
6293 FCONST0 (smode
).data
.low
= 0;
6294 FCONST0 (smode
).mode
= smode
;
6295 const_tiny_rtx
[0][(int) smode
]
6296 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6299 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_ACCUM
)
6301 scalar_mode smode
= smode_iter
.require ();
6302 FCONST0 (smode
).data
.high
= 0;
6303 FCONST0 (smode
).data
.low
= 0;
6304 FCONST0 (smode
).mode
= smode
;
6305 const_tiny_rtx
[0][(int) smode
]
6306 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6308 /* We store the value 1. */
6309 FCONST1 (smode
).data
.high
= 0;
6310 FCONST1 (smode
).data
.low
= 0;
6311 FCONST1 (smode
).mode
= smode
;
6312 FCONST1 (smode
).data
6313 = double_int_one
.lshift (GET_MODE_FBIT (smode
),
6314 HOST_BITS_PER_DOUBLE_INT
,
6315 SIGNED_FIXED_POINT_MODE_P (smode
));
6316 const_tiny_rtx
[1][(int) smode
]
6317 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode
), smode
);
6320 FOR_EACH_MODE_IN_CLASS (smode_iter
, MODE_UACCUM
)
6322 scalar_mode smode
= smode_iter
.require ();
6323 FCONST0 (smode
).data
.high
= 0;
6324 FCONST0 (smode
).data
.low
= 0;
6325 FCONST0 (smode
).mode
= smode
;
6326 const_tiny_rtx
[0][(int) smode
]
6327 = CONST_FIXED_FROM_FIXED_VALUE (FCONST0 (smode
), smode
);
6329 /* We store the value 1. */
6330 FCONST1 (smode
).data
.high
= 0;
6331 FCONST1 (smode
).data
.low
= 0;
6332 FCONST1 (smode
).mode
= smode
;
6333 FCONST1 (smode
).data
6334 = double_int_one
.lshift (GET_MODE_FBIT (smode
),
6335 HOST_BITS_PER_DOUBLE_INT
,
6336 SIGNED_FIXED_POINT_MODE_P (smode
));
6337 const_tiny_rtx
[1][(int) smode
]
6338 = CONST_FIXED_FROM_FIXED_VALUE (FCONST1 (smode
), smode
);
6341 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_FRACT
)
6343 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6346 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_UFRACT
)
6348 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6351 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_ACCUM
)
6353 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6354 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6357 FOR_EACH_MODE_IN_CLASS (mode
, MODE_VECTOR_UACCUM
)
6359 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6360 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6363 for (i
= (int) CCmode
; i
< (int) MAX_MACHINE_MODE
; ++i
)
6364 if (GET_MODE_CLASS ((machine_mode
) i
) == MODE_CC
)
6365 const_tiny_rtx
[0][i
] = const0_rtx
;
6367 pc_rtx
= gen_rtx_fmt_ (PC
, VOIDmode
);
6368 ret_rtx
= gen_rtx_fmt_ (RETURN
, VOIDmode
);
6369 simple_return_rtx
= gen_rtx_fmt_ (SIMPLE_RETURN
, VOIDmode
);
6370 invalid_insn_rtx
= gen_rtx_INSN (VOIDmode
,
6374 /*pattern=*/NULL_RTX
,
6377 /*reg_notes=*/NULL_RTX
);
6380 /* Produce exact duplicate of insn INSN after AFTER.
6381 Care updating of libcall regions if present. */
6384 emit_copy_of_insn_after (rtx_insn
*insn
, rtx_insn
*after
)
6389 switch (GET_CODE (insn
))
6392 new_rtx
= emit_insn_after (copy_insn (PATTERN (insn
)), after
);
6396 new_rtx
= emit_jump_insn_after (copy_insn (PATTERN (insn
)), after
);
6397 CROSSING_JUMP_P (new_rtx
) = CROSSING_JUMP_P (insn
);
6401 new_rtx
= emit_debug_insn_after (copy_insn (PATTERN (insn
)), after
);
6405 new_rtx
= emit_call_insn_after (copy_insn (PATTERN (insn
)), after
);
6406 if (CALL_INSN_FUNCTION_USAGE (insn
))
6407 CALL_INSN_FUNCTION_USAGE (new_rtx
)
6408 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn
));
6409 SIBLING_CALL_P (new_rtx
) = SIBLING_CALL_P (insn
);
6410 RTL_CONST_CALL_P (new_rtx
) = RTL_CONST_CALL_P (insn
);
6411 RTL_PURE_CALL_P (new_rtx
) = RTL_PURE_CALL_P (insn
);
6412 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx
)
6413 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
);
6420 /* Update LABEL_NUSES. */
6421 mark_jump_label (PATTERN (new_rtx
), new_rtx
, 0);
6423 INSN_LOCATION (new_rtx
) = INSN_LOCATION (insn
);
6425 /* If the old insn is frame related, then so is the new one. This is
6426 primarily needed for IA-64 unwind info which marks epilogue insns,
6427 which may be duplicated by the basic block reordering code. */
6428 RTX_FRAME_RELATED_P (new_rtx
) = RTX_FRAME_RELATED_P (insn
);
6430 /* Locate the end of existing REG_NOTES in NEW_RTX. */
6431 rtx
*ptail
= ®_NOTES (new_rtx
);
6432 while (*ptail
!= NULL_RTX
)
6433 ptail
= &XEXP (*ptail
, 1);
6435 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6436 will make them. REG_LABEL_TARGETs are created there too, but are
6437 supposed to be sticky, so we copy them. */
6438 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
6439 if (REG_NOTE_KIND (link
) != REG_LABEL_OPERAND
)
6441 *ptail
= duplicate_reg_note (link
);
6442 ptail
= &XEXP (*ptail
, 1);
6445 INSN_CODE (new_rtx
) = INSN_CODE (insn
);
6449 static GTY((deletable
)) rtx hard_reg_clobbers
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
6451 gen_hard_reg_clobber (machine_mode mode
, unsigned int regno
)
6453 if (hard_reg_clobbers
[mode
][regno
])
6454 return hard_reg_clobbers
[mode
][regno
];
6456 return (hard_reg_clobbers
[mode
][regno
] =
6457 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (mode
, regno
)));
6460 location_t prologue_location
;
6461 location_t epilogue_location
;
6463 /* Hold current location information and last location information, so the
6464 datastructures are built lazily only when some instructions in given
6465 place are needed. */
6466 static location_t curr_location
;
6468 /* Allocate insn location datastructure. */
6470 insn_locations_init (void)
6472 prologue_location
= epilogue_location
= 0;
6473 curr_location
= UNKNOWN_LOCATION
;
6476 /* At the end of emit stage, clear current location. */
6478 insn_locations_finalize (void)
6480 epilogue_location
= curr_location
;
6481 curr_location
= UNKNOWN_LOCATION
;
6484 /* Set current location. */
6486 set_curr_insn_location (location_t location
)
6488 curr_location
= location
;
6491 /* Get current location. */
6493 curr_insn_location (void)
6495 return curr_location
;
6498 /* Set the location of the insn chain starting at INSN to LOC. */
6500 set_insn_locations (rtx_insn
*insn
, location_t loc
)
6505 INSN_LOCATION (insn
) = loc
;
6506 insn
= NEXT_INSN (insn
);
6510 /* Return lexical scope block insn belongs to. */
6512 insn_scope (const rtx_insn
*insn
)
6514 return LOCATION_BLOCK (INSN_LOCATION (insn
));
6517 /* Return line number of the statement that produced this insn. */
6519 insn_line (const rtx_insn
*insn
)
6521 return LOCATION_LINE (INSN_LOCATION (insn
));
6524 /* Return source file of the statement that produced this insn. */
6526 insn_file (const rtx_insn
*insn
)
6528 return LOCATION_FILE (INSN_LOCATION (insn
));
6531 /* Return expanded location of the statement that produced this insn. */
6533 insn_location (const rtx_insn
*insn
)
6535 return expand_location (INSN_LOCATION (insn
));
6538 /* Return true if memory model MODEL requires a pre-operation (release-style)
6539 barrier or a post-operation (acquire-style) barrier. While not universal,
6540 this function matches behavior of several targets. */
6543 need_atomic_barrier_p (enum memmodel model
, bool pre
)
6545 switch (model
& MEMMODEL_BASE_MASK
)
6547 case MEMMODEL_RELAXED
:
6548 case MEMMODEL_CONSUME
:
6550 case MEMMODEL_RELEASE
:
6552 case MEMMODEL_ACQUIRE
:
6554 case MEMMODEL_ACQ_REL
:
6555 case MEMMODEL_SEQ_CST
:
6562 /* Return a constant shift amount for shifting a value of mode MODE
6566 gen_int_shift_amount (machine_mode
, poly_int64 value
)
6568 /* Use a 64-bit mode, to avoid any truncation.
6570 ??? Perhaps this should be automatically derived from the .md files
6571 instead, or perhaps have a target hook. */
6572 scalar_int_mode shift_mode
= (BITS_PER_UNIT
== 8
6574 : int_mode_for_size (64, 0).require ());
6575 return gen_int_mode (value
, shift_mode
);
6578 /* Initialize fields of rtl_data related to stack alignment. */
6581 rtl_data::init_stack_alignment ()
6583 stack_alignment_needed
= STACK_BOUNDARY
;
6584 max_used_stack_slot_alignment
= STACK_BOUNDARY
;
6585 stack_alignment_estimated
= 0;
6586 preferred_stack_boundary
= STACK_BOUNDARY
;
6590 #include "gt-emit-rtl.h"