1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
36 #include "coretypes.h"
38 #include "diagnostic-core.h"
43 #include "double-int.h"
51 #include "fold-const.h"
54 #include "hard-reg-set.h"
57 #include "basic-block.h"
61 #include "stringpool.h"
63 #include "statistics.h"
64 #include "fixed-value.h"
65 #include "insn-config.h"
77 #include "langhooks.h"
84 struct target_rtl default_target_rtl
;
86 struct target_rtl
*this_target_rtl
= &default_target_rtl
;
89 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
91 /* Commonly used modes. */
93 machine_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
94 machine_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
95 machine_mode double_mode
; /* Mode whose width is DOUBLE_TYPE_SIZE. */
96 machine_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
98 /* Datastructures maintained for currently processed function in RTL form. */
100 struct rtl_data x_rtl
;
102 /* Indexed by pseudo register number, gives the rtx for that pseudo.
103 Allocated in parallel with regno_pointer_align.
104 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
105 with length attribute nested in top level structures. */
109 /* This is *not* reset after each function. It gives each CODE_LABEL
110 in the entire compilation a unique label number. */
112 static GTY(()) int label_num
= 1;
114 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
115 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
116 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
117 is set only for MODE_INT and MODE_VECTOR_INT modes. */
119 rtx const_tiny_rtx
[4][(int) MAX_MACHINE_MODE
];
123 REAL_VALUE_TYPE dconst0
;
124 REAL_VALUE_TYPE dconst1
;
125 REAL_VALUE_TYPE dconst2
;
126 REAL_VALUE_TYPE dconstm1
;
127 REAL_VALUE_TYPE dconsthalf
;
129 /* Record fixed-point constant 0 and 1. */
130 FIXED_VALUE_TYPE fconst0
[MAX_FCONST0
];
131 FIXED_VALUE_TYPE fconst1
[MAX_FCONST1
];
133 /* We make one copy of (const_int C) where C is in
134 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
135 to save space during the compilation and simplify comparisons of
138 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
140 /* Standard pieces of rtx, to be substituted directly into things. */
143 rtx simple_return_rtx
;
146 /* A hash table storing CONST_INTs whose absolute value is greater
147 than MAX_SAVED_CONST_INT. */
149 struct const_int_hasher
: ggc_cache_hasher
<rtx
>
151 typedef HOST_WIDE_INT compare_type
;
153 static hashval_t
hash (rtx i
);
154 static bool equal (rtx i
, HOST_WIDE_INT h
);
157 static GTY ((cache
)) hash_table
<const_int_hasher
> *const_int_htab
;
159 struct const_wide_int_hasher
: ggc_cache_hasher
<rtx
>
161 static hashval_t
hash (rtx x
);
162 static bool equal (rtx x
, rtx y
);
165 static GTY ((cache
)) hash_table
<const_wide_int_hasher
> *const_wide_int_htab
;
167 /* A hash table storing register attribute structures. */
168 struct reg_attr_hasher
: ggc_cache_hasher
<reg_attrs
*>
170 static hashval_t
hash (reg_attrs
*x
);
171 static bool equal (reg_attrs
*a
, reg_attrs
*b
);
174 static GTY ((cache
)) hash_table
<reg_attr_hasher
> *reg_attrs_htab
;
176 /* A hash table storing all CONST_DOUBLEs. */
177 struct const_double_hasher
: ggc_cache_hasher
<rtx
>
179 static hashval_t
hash (rtx x
);
180 static bool equal (rtx x
, rtx y
);
183 static GTY ((cache
)) hash_table
<const_double_hasher
> *const_double_htab
;
185 /* A hash table storing all CONST_FIXEDs. */
186 struct const_fixed_hasher
: ggc_cache_hasher
<rtx
>
188 static hashval_t
hash (rtx x
);
189 static bool equal (rtx x
, rtx y
);
192 static GTY ((cache
)) hash_table
<const_fixed_hasher
> *const_fixed_htab
;
194 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
195 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
196 #define first_label_num (crtl->emit.x_first_label_num)
198 static void set_used_decls (tree
);
199 static void mark_label_nuses (rtx
);
200 #if TARGET_SUPPORTS_WIDE_INT
201 static rtx
lookup_const_wide_int (rtx
);
203 static rtx
lookup_const_double (rtx
);
204 static rtx
lookup_const_fixed (rtx
);
205 static reg_attrs
*get_reg_attrs (tree
, int);
206 static rtx
gen_const_vector (machine_mode
, int);
207 static void copy_rtx_if_shared_1 (rtx
*orig
);
209 /* Probability of the conditional branch currently proceeded by try_split.
210 Set to -1 otherwise. */
211 int split_branch_probability
= -1;
213 /* Returns a hash code for X (which is a really a CONST_INT). */
216 const_int_hasher::hash (rtx x
)
218 return (hashval_t
) INTVAL (x
);
221 /* Returns nonzero if the value represented by X (which is really a
222 CONST_INT) is the same as that given by Y (which is really a
226 const_int_hasher::equal (rtx x
, HOST_WIDE_INT y
)
228 return (INTVAL (x
) == y
);
231 #if TARGET_SUPPORTS_WIDE_INT
232 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
235 const_wide_int_hasher::hash (rtx x
)
238 unsigned HOST_WIDE_INT hash
= 0;
241 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
242 hash
+= CONST_WIDE_INT_ELT (xr
, i
);
244 return (hashval_t
) hash
;
247 /* Returns nonzero if the value represented by X (which is really a
248 CONST_WIDE_INT) is the same as that given by Y (which is really a
252 const_wide_int_hasher::equal (rtx x
, rtx y
)
257 if (CONST_WIDE_INT_NUNITS (xr
) != CONST_WIDE_INT_NUNITS (yr
))
260 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
261 if (CONST_WIDE_INT_ELT (xr
, i
) != CONST_WIDE_INT_ELT (yr
, i
))
268 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
270 const_double_hasher::hash (rtx x
)
272 const_rtx
const value
= x
;
275 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (value
) == VOIDmode
)
276 h
= CONST_DOUBLE_LOW (value
) ^ CONST_DOUBLE_HIGH (value
);
279 h
= real_hash (CONST_DOUBLE_REAL_VALUE (value
));
280 /* MODE is used in the comparison, so it should be in the hash. */
281 h
^= GET_MODE (value
);
286 /* Returns nonzero if the value represented by X (really a ...)
287 is the same as that represented by Y (really a ...) */
289 const_double_hasher::equal (rtx x
, rtx y
)
291 const_rtx
const a
= x
, b
= y
;
293 if (GET_MODE (a
) != GET_MODE (b
))
295 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (a
) == VOIDmode
)
296 return (CONST_DOUBLE_LOW (a
) == CONST_DOUBLE_LOW (b
)
297 && CONST_DOUBLE_HIGH (a
) == CONST_DOUBLE_HIGH (b
));
299 return real_identical (CONST_DOUBLE_REAL_VALUE (a
),
300 CONST_DOUBLE_REAL_VALUE (b
));
303 /* Returns a hash code for X (which is really a CONST_FIXED). */
306 const_fixed_hasher::hash (rtx x
)
308 const_rtx
const value
= x
;
311 h
= fixed_hash (CONST_FIXED_VALUE (value
));
312 /* MODE is used in the comparison, so it should be in the hash. */
313 h
^= GET_MODE (value
);
317 /* Returns nonzero if the value represented by X is the same as that
321 const_fixed_hasher::equal (rtx x
, rtx y
)
323 const_rtx
const a
= x
, b
= y
;
325 if (GET_MODE (a
) != GET_MODE (b
))
327 return fixed_identical (CONST_FIXED_VALUE (a
), CONST_FIXED_VALUE (b
));
330 /* Return true if the given memory attributes are equal. */
333 mem_attrs_eq_p (const struct mem_attrs
*p
, const struct mem_attrs
*q
)
339 return (p
->alias
== q
->alias
340 && p
->offset_known_p
== q
->offset_known_p
341 && (!p
->offset_known_p
|| p
->offset
== q
->offset
)
342 && p
->size_known_p
== q
->size_known_p
343 && (!p
->size_known_p
|| p
->size
== q
->size
)
344 && p
->align
== q
->align
345 && p
->addrspace
== q
->addrspace
346 && (p
->expr
== q
->expr
347 || (p
->expr
!= NULL_TREE
&& q
->expr
!= NULL_TREE
348 && operand_equal_p (p
->expr
, q
->expr
, 0))));
351 /* Set MEM's memory attributes so that they are the same as ATTRS. */
354 set_mem_attrs (rtx mem
, mem_attrs
*attrs
)
356 /* If everything is the default, we can just clear the attributes. */
357 if (mem_attrs_eq_p (attrs
, mode_mem_attrs
[(int) GET_MODE (mem
)]))
364 || !mem_attrs_eq_p (attrs
, MEM_ATTRS (mem
)))
366 MEM_ATTRS (mem
) = ggc_alloc
<mem_attrs
> ();
367 memcpy (MEM_ATTRS (mem
), attrs
, sizeof (mem_attrs
));
371 /* Returns a hash code for X (which is a really a reg_attrs *). */
374 reg_attr_hasher::hash (reg_attrs
*x
)
376 const reg_attrs
*const p
= x
;
378 return ((p
->offset
* 1000) ^ (intptr_t) p
->decl
);
381 /* Returns nonzero if the value represented by X is the same as that given by
385 reg_attr_hasher::equal (reg_attrs
*x
, reg_attrs
*y
)
387 const reg_attrs
*const p
= x
;
388 const reg_attrs
*const q
= y
;
390 return (p
->decl
== q
->decl
&& p
->offset
== q
->offset
);
392 /* Allocate a new reg_attrs structure and insert it into the hash table if
393 one identical to it is not already in the table. We are doing this for
397 get_reg_attrs (tree decl
, int offset
)
401 /* If everything is the default, we can just return zero. */
402 if (decl
== 0 && offset
== 0)
406 attrs
.offset
= offset
;
408 reg_attrs
**slot
= reg_attrs_htab
->find_slot (&attrs
, INSERT
);
411 *slot
= ggc_alloc
<reg_attrs
> ();
412 memcpy (*slot
, &attrs
, sizeof (reg_attrs
));
420 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
421 and to block register equivalences to be seen across this insn. */
426 rtx x
= gen_rtx_ASM_INPUT (VOIDmode
, "");
427 MEM_VOLATILE_P (x
) = true;
433 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
434 don't attempt to share with the various global pieces of rtl (such as
435 frame_pointer_rtx). */
438 gen_raw_REG (machine_mode mode
, int regno
)
440 rtx x
= gen_rtx_raw_REG (mode
, regno
);
441 ORIGINAL_REGNO (x
) = regno
;
445 /* There are some RTL codes that require special attention; the generation
446 functions do the raw handling. If you add to this list, modify
447 special_rtx in gengenrtl.c as well. */
450 gen_rtx_EXPR_LIST (machine_mode mode
, rtx expr
, rtx expr_list
)
452 return as_a
<rtx_expr_list
*> (gen_rtx_fmt_ee (EXPR_LIST
, mode
, expr
,
457 gen_rtx_INSN_LIST (machine_mode mode
, rtx insn
, rtx insn_list
)
459 return as_a
<rtx_insn_list
*> (gen_rtx_fmt_ue (INSN_LIST
, mode
, insn
,
464 gen_rtx_INSN (machine_mode mode
, rtx_insn
*prev_insn
, rtx_insn
*next_insn
,
465 basic_block bb
, rtx pattern
, int location
, int code
,
468 return as_a
<rtx_insn
*> (gen_rtx_fmt_uuBeiie (INSN
, mode
,
469 prev_insn
, next_insn
,
470 bb
, pattern
, location
, code
,
475 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED
, HOST_WIDE_INT arg
)
477 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
478 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
480 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
481 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
482 return const_true_rtx
;
485 /* Look up the CONST_INT in the hash table. */
486 rtx
*slot
= const_int_htab
->find_slot_with_hash (arg
, (hashval_t
) arg
,
489 *slot
= gen_rtx_raw_CONST_INT (VOIDmode
, arg
);
495 gen_int_mode (HOST_WIDE_INT c
, machine_mode mode
)
497 return GEN_INT (trunc_int_for_mode (c
, mode
));
500 /* CONST_DOUBLEs might be created from pairs of integers, or from
501 REAL_VALUE_TYPEs. Also, their length is known only at run time,
502 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
504 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
505 hash table. If so, return its counterpart; otherwise add it
506 to the hash table and return it. */
508 lookup_const_double (rtx real
)
510 rtx
*slot
= const_double_htab
->find_slot (real
, INSERT
);
517 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
518 VALUE in mode MODE. */
520 const_double_from_real_value (REAL_VALUE_TYPE value
, machine_mode mode
)
522 rtx real
= rtx_alloc (CONST_DOUBLE
);
523 PUT_MODE (real
, mode
);
527 return lookup_const_double (real
);
530 /* Determine whether FIXED, a CONST_FIXED, already exists in the
531 hash table. If so, return its counterpart; otherwise add it
532 to the hash table and return it. */
535 lookup_const_fixed (rtx fixed
)
537 rtx
*slot
= const_fixed_htab
->find_slot (fixed
, INSERT
);
544 /* Return a CONST_FIXED rtx for a fixed-point value specified by
545 VALUE in mode MODE. */
548 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value
, machine_mode mode
)
550 rtx fixed
= rtx_alloc (CONST_FIXED
);
551 PUT_MODE (fixed
, mode
);
555 return lookup_const_fixed (fixed
);
558 #if TARGET_SUPPORTS_WIDE_INT == 0
559 /* Constructs double_int from rtx CST. */
562 rtx_to_double_int (const_rtx cst
)
566 if (CONST_INT_P (cst
))
567 r
= double_int::from_shwi (INTVAL (cst
));
568 else if (CONST_DOUBLE_AS_INT_P (cst
))
570 r
.low
= CONST_DOUBLE_LOW (cst
);
571 r
.high
= CONST_DOUBLE_HIGH (cst
);
580 #if TARGET_SUPPORTS_WIDE_INT
581 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
582 If so, return its counterpart; otherwise add it to the hash table and
586 lookup_const_wide_int (rtx wint
)
588 rtx
*slot
= const_wide_int_htab
->find_slot (wint
, INSERT
);
596 /* Return an rtx constant for V, given that the constant has mode MODE.
597 The returned rtx will be a CONST_INT if V fits, otherwise it will be
598 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
599 (if TARGET_SUPPORTS_WIDE_INT). */
602 immed_wide_int_const (const wide_int_ref
&v
, machine_mode mode
)
604 unsigned int len
= v
.get_len ();
605 unsigned int prec
= GET_MODE_PRECISION (mode
);
607 /* Allow truncation but not extension since we do not know if the
608 number is signed or unsigned. */
609 gcc_assert (prec
<= v
.get_precision ());
611 if (len
< 2 || prec
<= HOST_BITS_PER_WIDE_INT
)
612 return gen_int_mode (v
.elt (0), mode
);
614 #if TARGET_SUPPORTS_WIDE_INT
618 unsigned int blocks_needed
619 = (prec
+ HOST_BITS_PER_WIDE_INT
- 1) / HOST_BITS_PER_WIDE_INT
;
621 if (len
> blocks_needed
)
624 value
= const_wide_int_alloc (len
);
626 /* It is so tempting to just put the mode in here. Must control
628 PUT_MODE (value
, VOIDmode
);
629 CWI_PUT_NUM_ELEM (value
, len
);
631 for (i
= 0; i
< len
; i
++)
632 CONST_WIDE_INT_ELT (value
, i
) = v
.elt (i
);
634 return lookup_const_wide_int (value
);
637 return immed_double_const (v
.elt (0), v
.elt (1), mode
);
641 #if TARGET_SUPPORTS_WIDE_INT == 0
642 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
643 of ints: I0 is the low-order word and I1 is the high-order word.
644 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
645 implied upper bits are copies of the high bit of i1. The value
646 itself is neither signed nor unsigned. Do not use this routine for
647 non-integer modes; convert to REAL_VALUE_TYPE and use
648 CONST_DOUBLE_FROM_REAL_VALUE. */
651 immed_double_const (HOST_WIDE_INT i0
, HOST_WIDE_INT i1
, machine_mode mode
)
656 /* There are the following cases (note that there are no modes with
657 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
659 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
661 2) If the value of the integer fits into HOST_WIDE_INT anyway
662 (i.e., i1 consists only from copies of the sign bit, and sign
663 of i0 and i1 are the same), then we return a CONST_INT for i0.
664 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
665 if (mode
!= VOIDmode
)
667 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
668 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
669 /* We can get a 0 for an error mark. */
670 || GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
671 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
672 || GET_MODE_CLASS (mode
) == MODE_POINTER_BOUNDS
);
674 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
675 return gen_int_mode (i0
, mode
);
678 /* If this integer fits in one word, return a CONST_INT. */
679 if ((i1
== 0 && i0
>= 0) || (i1
== ~0 && i0
< 0))
682 /* We use VOIDmode for integers. */
683 value
= rtx_alloc (CONST_DOUBLE
);
684 PUT_MODE (value
, VOIDmode
);
686 CONST_DOUBLE_LOW (value
) = i0
;
687 CONST_DOUBLE_HIGH (value
) = i1
;
689 for (i
= 2; i
< (sizeof CONST_DOUBLE_FORMAT
- 1); i
++)
690 XWINT (value
, i
) = 0;
692 return lookup_const_double (value
);
697 gen_rtx_REG (machine_mode mode
, unsigned int regno
)
699 /* In case the MD file explicitly references the frame pointer, have
700 all such references point to the same frame pointer. This is
701 used during frame pointer elimination to distinguish the explicit
702 references to these registers from pseudos that happened to be
705 If we have eliminated the frame pointer or arg pointer, we will
706 be using it as a normal register, for example as a spill
707 register. In such cases, we might be accessing it in a mode that
708 is not Pmode and therefore cannot use the pre-allocated rtx.
710 Also don't do this when we are making new REGs in reload, since
711 we don't want to get confused with the real pointers. */
713 if (mode
== Pmode
&& !reload_in_progress
&& !lra_in_progress
)
715 if (regno
== FRAME_POINTER_REGNUM
716 && (!reload_completed
|| frame_pointer_needed
))
717 return frame_pointer_rtx
;
719 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
720 && regno
== HARD_FRAME_POINTER_REGNUM
721 && (!reload_completed
|| frame_pointer_needed
))
722 return hard_frame_pointer_rtx
;
723 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
724 if (regno
== ARG_POINTER_REGNUM
)
725 return arg_pointer_rtx
;
727 #ifdef RETURN_ADDRESS_POINTER_REGNUM
728 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
729 return return_address_pointer_rtx
;
731 if (regno
== (unsigned) PIC_OFFSET_TABLE_REGNUM
732 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
733 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
734 return pic_offset_table_rtx
;
735 if (regno
== STACK_POINTER_REGNUM
)
736 return stack_pointer_rtx
;
740 /* If the per-function register table has been set up, try to re-use
741 an existing entry in that table to avoid useless generation of RTL.
743 This code is disabled for now until we can fix the various backends
744 which depend on having non-shared hard registers in some cases. Long
745 term we want to re-enable this code as it can significantly cut down
746 on the amount of useless RTL that gets generated.
748 We'll also need to fix some code that runs after reload that wants to
749 set ORIGINAL_REGNO. */
754 && regno
< FIRST_PSEUDO_REGISTER
755 && reg_raw_mode
[regno
] == mode
)
756 return regno_reg_rtx
[regno
];
759 return gen_raw_REG (mode
, regno
);
763 gen_rtx_MEM (machine_mode mode
, rtx addr
)
765 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
767 /* This field is not cleared by the mere allocation of the rtx, so
774 /* Generate a memory referring to non-trapping constant memory. */
777 gen_const_mem (machine_mode mode
, rtx addr
)
779 rtx mem
= gen_rtx_MEM (mode
, addr
);
780 MEM_READONLY_P (mem
) = 1;
781 MEM_NOTRAP_P (mem
) = 1;
785 /* Generate a MEM referring to fixed portions of the frame, e.g., register
789 gen_frame_mem (machine_mode mode
, rtx addr
)
791 rtx mem
= gen_rtx_MEM (mode
, addr
);
792 MEM_NOTRAP_P (mem
) = 1;
793 set_mem_alias_set (mem
, get_frame_alias_set ());
797 /* Generate a MEM referring to a temporary use of the stack, not part
798 of the fixed stack frame. For example, something which is pushed
799 by a target splitter. */
801 gen_tmp_stack_mem (machine_mode mode
, rtx addr
)
803 rtx mem
= gen_rtx_MEM (mode
, addr
);
804 MEM_NOTRAP_P (mem
) = 1;
805 if (!cfun
->calls_alloca
)
806 set_mem_alias_set (mem
, get_frame_alias_set ());
810 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
811 this construct would be valid, and false otherwise. */
814 validate_subreg (machine_mode omode
, machine_mode imode
,
815 const_rtx reg
, unsigned int offset
)
817 unsigned int isize
= GET_MODE_SIZE (imode
);
818 unsigned int osize
= GET_MODE_SIZE (omode
);
820 /* All subregs must be aligned. */
821 if (offset
% osize
!= 0)
824 /* The subreg offset cannot be outside the inner object. */
828 /* ??? This should not be here. Temporarily continue to allow word_mode
829 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
830 Generally, backends are doing something sketchy but it'll take time to
832 if (omode
== word_mode
)
834 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
835 is the culprit here, and not the backends. */
836 else if (osize
>= UNITS_PER_WORD
&& isize
>= osize
)
838 /* Allow component subregs of complex and vector. Though given the below
839 extraction rules, it's not always clear what that means. */
840 else if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
841 && GET_MODE_INNER (imode
) == omode
)
843 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
844 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
845 represent this. It's questionable if this ought to be represented at
846 all -- why can't this all be hidden in post-reload splitters that make
847 arbitrarily mode changes to the registers themselves. */
848 else if (VECTOR_MODE_P (omode
) && GET_MODE_INNER (omode
) == imode
)
850 /* Subregs involving floating point modes are not allowed to
851 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
852 (subreg:SI (reg:DF) 0) isn't. */
853 else if (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))
855 if (! (isize
== osize
856 /* LRA can use subreg to store a floating point value in
857 an integer mode. Although the floating point and the
858 integer modes need the same number of hard registers,
859 the size of floating point mode can be less than the
860 integer mode. LRA also uses subregs for a register
861 should be used in different mode in on insn. */
866 /* Paradoxical subregs must have offset zero. */
870 /* This is a normal subreg. Verify that the offset is representable. */
872 /* For hard registers, we already have most of these rules collected in
873 subreg_offset_representable_p. */
874 if (reg
&& REG_P (reg
) && HARD_REGISTER_P (reg
))
876 unsigned int regno
= REGNO (reg
);
878 #ifdef CANNOT_CHANGE_MODE_CLASS
879 if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
880 && GET_MODE_INNER (imode
) == omode
)
882 else if (REG_CANNOT_CHANGE_MODE_P (regno
, imode
, omode
))
886 return subreg_offset_representable_p (regno
, imode
, offset
, omode
);
889 /* For pseudo registers, we want most of the same checks. Namely:
890 If the register no larger than a word, the subreg must be lowpart.
891 If the register is larger than a word, the subreg must be the lowpart
892 of a subword. A subreg does *not* perform arbitrary bit extraction.
893 Given that we've already checked mode/offset alignment, we only have
894 to check subword subregs here. */
895 if (osize
< UNITS_PER_WORD
896 && ! (lra_in_progress
&& (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))))
898 machine_mode wmode
= isize
> UNITS_PER_WORD
? word_mode
: imode
;
899 unsigned int low_off
= subreg_lowpart_offset (omode
, wmode
);
900 if (offset
% UNITS_PER_WORD
!= low_off
)
907 gen_rtx_SUBREG (machine_mode mode
, rtx reg
, int offset
)
909 gcc_assert (validate_subreg (mode
, GET_MODE (reg
), reg
, offset
));
910 return gen_rtx_raw_SUBREG (mode
, reg
, offset
);
913 /* Generate a SUBREG representing the least-significant part of REG if MODE
914 is smaller than mode of REG, otherwise paradoxical SUBREG. */
917 gen_lowpart_SUBREG (machine_mode mode
, rtx reg
)
921 inmode
= GET_MODE (reg
);
922 if (inmode
== VOIDmode
)
924 return gen_rtx_SUBREG (mode
, reg
,
925 subreg_lowpart_offset (mode
, inmode
));
929 gen_rtx_VAR_LOCATION (machine_mode mode
, tree decl
, rtx loc
,
930 enum var_init_status status
)
932 rtx x
= gen_rtx_fmt_te (VAR_LOCATION
, mode
, decl
, loc
);
933 PAT_VAR_LOCATION_STATUS (x
) = status
;
938 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
941 gen_rtvec (int n
, ...)
949 /* Don't allocate an empty rtvec... */
956 rt_val
= rtvec_alloc (n
);
958 for (i
= 0; i
< n
; i
++)
959 rt_val
->elem
[i
] = va_arg (p
, rtx
);
966 gen_rtvec_v (int n
, rtx
*argp
)
971 /* Don't allocate an empty rtvec... */
975 rt_val
= rtvec_alloc (n
);
977 for (i
= 0; i
< n
; i
++)
978 rt_val
->elem
[i
] = *argp
++;
984 gen_rtvec_v (int n
, rtx_insn
**argp
)
989 /* Don't allocate an empty rtvec... */
993 rt_val
= rtvec_alloc (n
);
995 for (i
= 0; i
< n
; i
++)
996 rt_val
->elem
[i
] = *argp
++;
1002 /* Return the number of bytes between the start of an OUTER_MODE
1003 in-memory value and the start of an INNER_MODE in-memory value,
1004 given that the former is a lowpart of the latter. It may be a
1005 paradoxical lowpart, in which case the offset will be negative
1006 on big-endian targets. */
1009 byte_lowpart_offset (machine_mode outer_mode
,
1010 machine_mode inner_mode
)
1012 if (GET_MODE_SIZE (outer_mode
) < GET_MODE_SIZE (inner_mode
))
1013 return subreg_lowpart_offset (outer_mode
, inner_mode
);
1015 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
1018 /* Generate a REG rtx for a new pseudo register of mode MODE.
1019 This pseudo is assigned the next sequential register number. */
1022 gen_reg_rtx (machine_mode mode
)
1025 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
1027 gcc_assert (can_create_pseudo_p ());
1029 /* If a virtual register with bigger mode alignment is generated,
1030 increase stack alignment estimation because it might be spilled
1032 if (SUPPORTS_STACK_ALIGNMENT
1033 && crtl
->stack_alignment_estimated
< align
1034 && !crtl
->stack_realign_processed
)
1036 unsigned int min_align
= MINIMUM_ALIGNMENT (NULL
, mode
, align
);
1037 if (crtl
->stack_alignment_estimated
< min_align
)
1038 crtl
->stack_alignment_estimated
= min_align
;
1041 if (generating_concat_p
1042 && (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
1043 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
))
1045 /* For complex modes, don't make a single pseudo.
1046 Instead, make a CONCAT of two pseudos.
1047 This allows noncontiguous allocation of the real and imaginary parts,
1048 which makes much better code. Besides, allocating DCmode
1049 pseudos overstrains reload on some machines like the 386. */
1050 rtx realpart
, imagpart
;
1051 machine_mode partmode
= GET_MODE_INNER (mode
);
1053 realpart
= gen_reg_rtx (partmode
);
1054 imagpart
= gen_reg_rtx (partmode
);
1055 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
1058 /* Do not call gen_reg_rtx with uninitialized crtl. */
1059 gcc_assert (crtl
->emit
.regno_pointer_align_length
);
1061 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1062 enough to have an element for this pseudo reg number. */
1064 if (reg_rtx_no
== crtl
->emit
.regno_pointer_align_length
)
1066 int old_size
= crtl
->emit
.regno_pointer_align_length
;
1070 tmp
= XRESIZEVEC (char, crtl
->emit
.regno_pointer_align
, old_size
* 2);
1071 memset (tmp
+ old_size
, 0, old_size
);
1072 crtl
->emit
.regno_pointer_align
= (unsigned char *) tmp
;
1074 new1
= GGC_RESIZEVEC (rtx
, regno_reg_rtx
, old_size
* 2);
1075 memset (new1
+ old_size
, 0, old_size
* sizeof (rtx
));
1076 regno_reg_rtx
= new1
;
1078 crtl
->emit
.regno_pointer_align_length
= old_size
* 2;
1081 val
= gen_raw_REG (mode
, reg_rtx_no
);
1082 regno_reg_rtx
[reg_rtx_no
++] = val
;
1086 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1089 reg_is_parm_p (rtx reg
)
1093 gcc_assert (REG_P (reg
));
1094 decl
= REG_EXPR (reg
);
1095 return (decl
&& TREE_CODE (decl
) == PARM_DECL
);
1098 /* Update NEW with the same attributes as REG, but with OFFSET added
1099 to the REG_OFFSET. */
1102 update_reg_offset (rtx new_rtx
, rtx reg
, int offset
)
1104 REG_ATTRS (new_rtx
) = get_reg_attrs (REG_EXPR (reg
),
1105 REG_OFFSET (reg
) + offset
);
1108 /* Generate a register with same attributes as REG, but with OFFSET
1109 added to the REG_OFFSET. */
1112 gen_rtx_REG_offset (rtx reg
, machine_mode mode
, unsigned int regno
,
1115 rtx new_rtx
= gen_rtx_REG (mode
, regno
);
1117 update_reg_offset (new_rtx
, reg
, offset
);
1121 /* Generate a new pseudo-register with the same attributes as REG, but
1122 with OFFSET added to the REG_OFFSET. */
1125 gen_reg_rtx_offset (rtx reg
, machine_mode mode
, int offset
)
1127 rtx new_rtx
= gen_reg_rtx (mode
);
1129 update_reg_offset (new_rtx
, reg
, offset
);
1133 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1134 new register is a (possibly paradoxical) lowpart of the old one. */
1137 adjust_reg_mode (rtx reg
, machine_mode mode
)
1139 update_reg_offset (reg
, reg
, byte_lowpart_offset (mode
, GET_MODE (reg
)));
1140 PUT_MODE (reg
, mode
);
1143 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1144 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1147 set_reg_attrs_from_value (rtx reg
, rtx x
)
1150 bool can_be_reg_pointer
= true;
1152 /* Don't call mark_reg_pointer for incompatible pointer sign
1154 while (GET_CODE (x
) == SIGN_EXTEND
1155 || GET_CODE (x
) == ZERO_EXTEND
1156 || GET_CODE (x
) == TRUNCATE
1157 || (GET_CODE (x
) == SUBREG
&& subreg_lowpart_p (x
)))
1159 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1160 if ((GET_CODE (x
) == SIGN_EXTEND
&& POINTERS_EXTEND_UNSIGNED
)
1161 || (GET_CODE (x
) != SIGN_EXTEND
&& ! POINTERS_EXTEND_UNSIGNED
))
1162 can_be_reg_pointer
= false;
1167 /* Hard registers can be reused for multiple purposes within the same
1168 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1169 on them is wrong. */
1170 if (HARD_REGISTER_P (reg
))
1173 offset
= byte_lowpart_offset (GET_MODE (reg
), GET_MODE (x
));
1176 if (MEM_OFFSET_KNOWN_P (x
))
1177 REG_ATTRS (reg
) = get_reg_attrs (MEM_EXPR (x
),
1178 MEM_OFFSET (x
) + offset
);
1179 if (can_be_reg_pointer
&& MEM_POINTER (x
))
1180 mark_reg_pointer (reg
, 0);
1185 update_reg_offset (reg
, x
, offset
);
1186 if (can_be_reg_pointer
&& REG_POINTER (x
))
1187 mark_reg_pointer (reg
, REGNO_POINTER_ALIGN (REGNO (x
)));
1191 /* Generate a REG rtx for a new pseudo register, copying the mode
1192 and attributes from X. */
1195 gen_reg_rtx_and_attrs (rtx x
)
1197 rtx reg
= gen_reg_rtx (GET_MODE (x
));
1198 set_reg_attrs_from_value (reg
, x
);
1202 /* Set the register attributes for registers contained in PARM_RTX.
1203 Use needed values from memory attributes of MEM. */
1206 set_reg_attrs_for_parm (rtx parm_rtx
, rtx mem
)
1208 if (REG_P (parm_rtx
))
1209 set_reg_attrs_from_value (parm_rtx
, mem
);
1210 else if (GET_CODE (parm_rtx
) == PARALLEL
)
1212 /* Check for a NULL entry in the first slot, used to indicate that the
1213 parameter goes both on the stack and in registers. */
1214 int i
= XEXP (XVECEXP (parm_rtx
, 0, 0), 0) ? 0 : 1;
1215 for (; i
< XVECLEN (parm_rtx
, 0); i
++)
1217 rtx x
= XVECEXP (parm_rtx
, 0, i
);
1218 if (REG_P (XEXP (x
, 0)))
1219 REG_ATTRS (XEXP (x
, 0))
1220 = get_reg_attrs (MEM_EXPR (mem
),
1221 INTVAL (XEXP (x
, 1)));
1226 /* Set the REG_ATTRS for registers in value X, given that X represents
1230 set_reg_attrs_for_decl_rtl (tree t
, rtx x
)
1232 if (GET_CODE (x
) == SUBREG
)
1234 gcc_assert (subreg_lowpart_p (x
));
1239 = get_reg_attrs (t
, byte_lowpart_offset (GET_MODE (x
),
1241 if (GET_CODE (x
) == CONCAT
)
1243 if (REG_P (XEXP (x
, 0)))
1244 REG_ATTRS (XEXP (x
, 0)) = get_reg_attrs (t
, 0);
1245 if (REG_P (XEXP (x
, 1)))
1246 REG_ATTRS (XEXP (x
, 1))
1247 = get_reg_attrs (t
, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x
, 0))));
1249 if (GET_CODE (x
) == PARALLEL
)
1253 /* Check for a NULL entry, used to indicate that the parameter goes
1254 both on the stack and in registers. */
1255 if (XEXP (XVECEXP (x
, 0, 0), 0))
1260 for (i
= start
; i
< XVECLEN (x
, 0); i
++)
1262 rtx y
= XVECEXP (x
, 0, i
);
1263 if (REG_P (XEXP (y
, 0)))
1264 REG_ATTRS (XEXP (y
, 0)) = get_reg_attrs (t
, INTVAL (XEXP (y
, 1)));
1269 /* Assign the RTX X to declaration T. */
1272 set_decl_rtl (tree t
, rtx x
)
1274 DECL_WRTL_CHECK (t
)->decl_with_rtl
.rtl
= x
;
1276 set_reg_attrs_for_decl_rtl (t
, x
);
1279 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1280 if the ABI requires the parameter to be passed by reference. */
1283 set_decl_incoming_rtl (tree t
, rtx x
, bool by_reference_p
)
1285 DECL_INCOMING_RTL (t
) = x
;
1286 if (x
&& !by_reference_p
)
1287 set_reg_attrs_for_decl_rtl (t
, x
);
1290 /* Identify REG (which may be a CONCAT) as a user register. */
1293 mark_user_reg (rtx reg
)
1295 if (GET_CODE (reg
) == CONCAT
)
1297 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
1298 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
1302 gcc_assert (REG_P (reg
));
1303 REG_USERVAR_P (reg
) = 1;
1307 /* Identify REG as a probable pointer register and show its alignment
1308 as ALIGN, if nonzero. */
1311 mark_reg_pointer (rtx reg
, int align
)
1313 if (! REG_POINTER (reg
))
1315 REG_POINTER (reg
) = 1;
1318 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1320 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
1321 /* We can no-longer be sure just how aligned this pointer is. */
1322 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1325 /* Return 1 plus largest pseudo reg number used in the current function. */
1333 /* Return 1 + the largest label number used so far in the current function. */
1336 max_label_num (void)
1341 /* Return first label number used in this function (if any were used). */
1344 get_first_label_num (void)
1346 return first_label_num
;
1349 /* If the rtx for label was created during the expansion of a nested
1350 function, then first_label_num won't include this label number.
1351 Fix this now so that array indices work later. */
1354 maybe_set_first_label_num (rtx x
)
1356 if (CODE_LABEL_NUMBER (x
) < first_label_num
)
1357 first_label_num
= CODE_LABEL_NUMBER (x
);
1360 /* Return a value representing some low-order bits of X, where the number
1361 of low-order bits is given by MODE. Note that no conversion is done
1362 between floating-point and fixed-point values, rather, the bit
1363 representation is returned.
1365 This function handles the cases in common between gen_lowpart, below,
1366 and two variants in cse.c and combine.c. These are the cases that can
1367 be safely handled at all points in the compilation.
1369 If this is not a case we can handle, return 0. */
1372 gen_lowpart_common (machine_mode mode
, rtx x
)
1374 int msize
= GET_MODE_SIZE (mode
);
1377 machine_mode innermode
;
1379 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1380 so we have to make one up. Yuk. */
1381 innermode
= GET_MODE (x
);
1383 && msize
* BITS_PER_UNIT
<= HOST_BITS_PER_WIDE_INT
)
1384 innermode
= mode_for_size (HOST_BITS_PER_WIDE_INT
, MODE_INT
, 0);
1385 else if (innermode
== VOIDmode
)
1386 innermode
= mode_for_size (HOST_BITS_PER_DOUBLE_INT
, MODE_INT
, 0);
1388 xsize
= GET_MODE_SIZE (innermode
);
1390 gcc_assert (innermode
!= VOIDmode
&& innermode
!= BLKmode
);
1392 if (innermode
== mode
)
1395 /* MODE must occupy no more words than the mode of X. */
1396 if ((msize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
1397 > ((xsize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))
1400 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1401 if (SCALAR_FLOAT_MODE_P (mode
) && msize
> xsize
)
1404 offset
= subreg_lowpart_offset (mode
, innermode
);
1406 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
1407 && (GET_MODE_CLASS (mode
) == MODE_INT
1408 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
))
1410 /* If we are getting the low-order part of something that has been
1411 sign- or zero-extended, we can either just use the object being
1412 extended or make a narrower extension. If we want an even smaller
1413 piece than the size of the object being extended, call ourselves
1416 This case is used mostly by combine and cse. */
1418 if (GET_MODE (XEXP (x
, 0)) == mode
)
1420 else if (msize
< GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))))
1421 return gen_lowpart_common (mode
, XEXP (x
, 0));
1422 else if (msize
< xsize
)
1423 return gen_rtx_fmt_e (GET_CODE (x
), mode
, XEXP (x
, 0));
1425 else if (GET_CODE (x
) == SUBREG
|| REG_P (x
)
1426 || GET_CODE (x
) == CONCAT
|| GET_CODE (x
) == CONST_VECTOR
1427 || CONST_DOUBLE_AS_FLOAT_P (x
) || CONST_SCALAR_INT_P (x
))
1428 return simplify_gen_subreg (mode
, x
, innermode
, offset
);
1430 /* Otherwise, we can't do this. */
1435 gen_highpart (machine_mode mode
, rtx x
)
1437 unsigned int msize
= GET_MODE_SIZE (mode
);
1440 /* This case loses if X is a subreg. To catch bugs early,
1441 complain if an invalid MODE is used even in other cases. */
1442 gcc_assert (msize
<= UNITS_PER_WORD
1443 || msize
== (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x
)));
1445 result
= simplify_gen_subreg (mode
, x
, GET_MODE (x
),
1446 subreg_highpart_offset (mode
, GET_MODE (x
)));
1447 gcc_assert (result
);
1449 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1450 the target if we have a MEM. gen_highpart must return a valid operand,
1451 emitting code if necessary to do so. */
1454 result
= validize_mem (result
);
1455 gcc_assert (result
);
1461 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1462 be VOIDmode constant. */
1464 gen_highpart_mode (machine_mode outermode
, machine_mode innermode
, rtx exp
)
1466 if (GET_MODE (exp
) != VOIDmode
)
1468 gcc_assert (GET_MODE (exp
) == innermode
);
1469 return gen_highpart (outermode
, exp
);
1471 return simplify_gen_subreg (outermode
, exp
, innermode
,
1472 subreg_highpart_offset (outermode
, innermode
));
1475 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1478 subreg_lowpart_offset (machine_mode outermode
, machine_mode innermode
)
1480 unsigned int offset
= 0;
1481 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1485 if (WORDS_BIG_ENDIAN
)
1486 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1487 if (BYTES_BIG_ENDIAN
)
1488 offset
+= difference
% UNITS_PER_WORD
;
1494 /* Return offset in bytes to get OUTERMODE high part
1495 of the value in mode INNERMODE stored in memory in target format. */
1497 subreg_highpart_offset (machine_mode outermode
, machine_mode innermode
)
1499 unsigned int offset
= 0;
1500 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1502 gcc_assert (GET_MODE_SIZE (innermode
) >= GET_MODE_SIZE (outermode
));
1506 if (! WORDS_BIG_ENDIAN
)
1507 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1508 if (! BYTES_BIG_ENDIAN
)
1509 offset
+= difference
% UNITS_PER_WORD
;
1515 /* Return 1 iff X, assumed to be a SUBREG,
1516 refers to the least significant part of its containing reg.
1517 If X is not a SUBREG, always return 1 (it is its own low part!). */
1520 subreg_lowpart_p (const_rtx x
)
1522 if (GET_CODE (x
) != SUBREG
)
1524 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1527 return (subreg_lowpart_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)))
1528 == SUBREG_BYTE (x
));
1531 /* Return true if X is a paradoxical subreg, false otherwise. */
1533 paradoxical_subreg_p (const_rtx x
)
1535 if (GET_CODE (x
) != SUBREG
)
1537 return (GET_MODE_PRECISION (GET_MODE (x
))
1538 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x
))));
1541 /* Return subword OFFSET of operand OP.
1542 The word number, OFFSET, is interpreted as the word number starting
1543 at the low-order address. OFFSET 0 is the low-order word if not
1544 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1546 If we cannot extract the required word, we return zero. Otherwise,
1547 an rtx corresponding to the requested word will be returned.
1549 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1550 reload has completed, a valid address will always be returned. After
1551 reload, if a valid address cannot be returned, we return zero.
1553 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1554 it is the responsibility of the caller.
1556 MODE is the mode of OP in case it is a CONST_INT.
1558 ??? This is still rather broken for some cases. The problem for the
1559 moment is that all callers of this thing provide no 'goal mode' to
1560 tell us to work with. This exists because all callers were written
1561 in a word based SUBREG world.
1562 Now use of this function can be deprecated by simplify_subreg in most
1567 operand_subword (rtx op
, unsigned int offset
, int validate_address
, machine_mode mode
)
1569 if (mode
== VOIDmode
)
1570 mode
= GET_MODE (op
);
1572 gcc_assert (mode
!= VOIDmode
);
1574 /* If OP is narrower than a word, fail. */
1576 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
))
1579 /* If we want a word outside OP, return zero. */
1581 && (offset
+ 1) * UNITS_PER_WORD
> GET_MODE_SIZE (mode
))
1584 /* Form a new MEM at the requested address. */
1587 rtx new_rtx
= adjust_address_nv (op
, word_mode
, offset
* UNITS_PER_WORD
);
1589 if (! validate_address
)
1592 else if (reload_completed
)
1594 if (! strict_memory_address_addr_space_p (word_mode
,
1596 MEM_ADDR_SPACE (op
)))
1600 return replace_equiv_address (new_rtx
, XEXP (new_rtx
, 0));
1603 /* Rest can be handled by simplify_subreg. */
1604 return simplify_gen_subreg (word_mode
, op
, mode
, (offset
* UNITS_PER_WORD
));
1607 /* Similar to `operand_subword', but never return 0. If we can't
1608 extract the required subword, put OP into a register and try again.
1609 The second attempt must succeed. We always validate the address in
1612 MODE is the mode of OP, in case it is CONST_INT. */
1615 operand_subword_force (rtx op
, unsigned int offset
, machine_mode mode
)
1617 rtx result
= operand_subword (op
, offset
, 1, mode
);
1622 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1624 /* If this is a register which can not be accessed by words, copy it
1625 to a pseudo register. */
1627 op
= copy_to_reg (op
);
1629 op
= force_reg (mode
, op
);
1632 result
= operand_subword (op
, offset
, 1, mode
);
1633 gcc_assert (result
);
1638 /* Returns 1 if both MEM_EXPR can be considered equal
1642 mem_expr_equal_p (const_tree expr1
, const_tree expr2
)
1647 if (! expr1
|| ! expr2
)
1650 if (TREE_CODE (expr1
) != TREE_CODE (expr2
))
1653 return operand_equal_p (expr1
, expr2
, 0);
1656 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1657 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1661 get_mem_align_offset (rtx mem
, unsigned int align
)
1664 unsigned HOST_WIDE_INT offset
;
1666 /* This function can't use
1667 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1668 || (MAX (MEM_ALIGN (mem),
1669 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1673 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1675 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1676 for <variable>. get_inner_reference doesn't handle it and
1677 even if it did, the alignment in that case needs to be determined
1678 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1679 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1680 isn't sufficiently aligned, the object it is in might be. */
1681 gcc_assert (MEM_P (mem
));
1682 expr
= MEM_EXPR (mem
);
1683 if (expr
== NULL_TREE
|| !MEM_OFFSET_KNOWN_P (mem
))
1686 offset
= MEM_OFFSET (mem
);
1689 if (DECL_ALIGN (expr
) < align
)
1692 else if (INDIRECT_REF_P (expr
))
1694 if (TYPE_ALIGN (TREE_TYPE (expr
)) < (unsigned int) align
)
1697 else if (TREE_CODE (expr
) == COMPONENT_REF
)
1701 tree inner
= TREE_OPERAND (expr
, 0);
1702 tree field
= TREE_OPERAND (expr
, 1);
1703 tree byte_offset
= component_ref_field_offset (expr
);
1704 tree bit_offset
= DECL_FIELD_BIT_OFFSET (field
);
1707 || !tree_fits_uhwi_p (byte_offset
)
1708 || !tree_fits_uhwi_p (bit_offset
))
1711 offset
+= tree_to_uhwi (byte_offset
);
1712 offset
+= tree_to_uhwi (bit_offset
) / BITS_PER_UNIT
;
1714 if (inner
== NULL_TREE
)
1716 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field
))
1717 < (unsigned int) align
)
1721 else if (DECL_P (inner
))
1723 if (DECL_ALIGN (inner
) < align
)
1727 else if (TREE_CODE (inner
) != COMPONENT_REF
)
1735 return offset
& ((align
/ BITS_PER_UNIT
) - 1);
1738 /* Given REF (a MEM) and T, either the type of X or the expression
1739 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1740 if we are making a new object of this type. BITPOS is nonzero if
1741 there is an offset outstanding on T that will be applied later. */
1744 set_mem_attributes_minus_bitpos (rtx ref
, tree t
, int objectp
,
1745 HOST_WIDE_INT bitpos
)
1747 HOST_WIDE_INT apply_bitpos
= 0;
1749 struct mem_attrs attrs
, *defattrs
, *refattrs
;
1752 /* It can happen that type_for_mode was given a mode for which there
1753 is no language-level type. In which case it returns NULL, which
1758 type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
1759 if (type
== error_mark_node
)
1762 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1763 wrong answer, as it assumes that DECL_RTL already has the right alias
1764 info. Callers should not set DECL_RTL until after the call to
1765 set_mem_attributes. */
1766 gcc_assert (!DECL_P (t
) || ref
!= DECL_RTL_IF_SET (t
));
1768 memset (&attrs
, 0, sizeof (attrs
));
1770 /* Get the alias set from the expression or type (perhaps using a
1771 front-end routine) and use it. */
1772 attrs
.alias
= get_alias_set (t
);
1774 MEM_VOLATILE_P (ref
) |= TYPE_VOLATILE (type
);
1775 MEM_POINTER (ref
) = POINTER_TYPE_P (type
);
1777 /* Default values from pre-existing memory attributes if present. */
1778 refattrs
= MEM_ATTRS (ref
);
1781 /* ??? Can this ever happen? Calling this routine on a MEM that
1782 already carries memory attributes should probably be invalid. */
1783 attrs
.expr
= refattrs
->expr
;
1784 attrs
.offset_known_p
= refattrs
->offset_known_p
;
1785 attrs
.offset
= refattrs
->offset
;
1786 attrs
.size_known_p
= refattrs
->size_known_p
;
1787 attrs
.size
= refattrs
->size
;
1788 attrs
.align
= refattrs
->align
;
1791 /* Otherwise, default values from the mode of the MEM reference. */
1794 defattrs
= mode_mem_attrs
[(int) GET_MODE (ref
)];
1795 gcc_assert (!defattrs
->expr
);
1796 gcc_assert (!defattrs
->offset_known_p
);
1798 /* Respect mode size. */
1799 attrs
.size_known_p
= defattrs
->size_known_p
;
1800 attrs
.size
= defattrs
->size
;
1801 /* ??? Is this really necessary? We probably should always get
1802 the size from the type below. */
1804 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1805 if T is an object, always compute the object alignment below. */
1807 attrs
.align
= defattrs
->align
;
1809 attrs
.align
= BITS_PER_UNIT
;
1810 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1811 e.g. if the type carries an alignment attribute. Should we be
1812 able to simply always use TYPE_ALIGN? */
1815 /* We can set the alignment from the type if we are making an object,
1816 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1817 if (objectp
|| TREE_CODE (t
) == INDIRECT_REF
|| TYPE_ALIGN_OK (type
))
1818 attrs
.align
= MAX (attrs
.align
, TYPE_ALIGN (type
));
1820 /* If the size is known, we can set that. */
1821 tree new_size
= TYPE_SIZE_UNIT (type
);
1823 /* The address-space is that of the type. */
1824 as
= TYPE_ADDR_SPACE (type
);
1826 /* If T is not a type, we may be able to deduce some more information about
1832 if (TREE_THIS_VOLATILE (t
))
1833 MEM_VOLATILE_P (ref
) = 1;
1835 /* Now remove any conversions: they don't change what the underlying
1836 object is. Likewise for SAVE_EXPR. */
1837 while (CONVERT_EXPR_P (t
)
1838 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
1839 || TREE_CODE (t
) == SAVE_EXPR
)
1840 t
= TREE_OPERAND (t
, 0);
1842 /* Note whether this expression can trap. */
1843 MEM_NOTRAP_P (ref
) = !tree_could_trap_p (t
);
1845 base
= get_base_address (t
);
1849 && TREE_READONLY (base
)
1850 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
))
1851 && !TREE_THIS_VOLATILE (base
))
1852 MEM_READONLY_P (ref
) = 1;
1854 /* Mark static const strings readonly as well. */
1855 if (TREE_CODE (base
) == STRING_CST
1856 && TREE_READONLY (base
)
1857 && TREE_STATIC (base
))
1858 MEM_READONLY_P (ref
) = 1;
1860 /* Address-space information is on the base object. */
1861 if (TREE_CODE (base
) == MEM_REF
1862 || TREE_CODE (base
) == TARGET_MEM_REF
)
1863 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base
,
1866 as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
1869 /* If this expression uses it's parent's alias set, mark it such
1870 that we won't change it. */
1871 if (component_uses_parent_alias_set_from (t
) != NULL_TREE
)
1872 MEM_KEEP_ALIAS_SET_P (ref
) = 1;
1874 /* If this is a decl, set the attributes of the MEM from it. */
1878 attrs
.offset_known_p
= true;
1880 apply_bitpos
= bitpos
;
1881 new_size
= DECL_SIZE_UNIT (t
);
1884 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1885 else if (CONSTANT_CLASS_P (t
))
1888 /* If this is a field reference, record it. */
1889 else if (TREE_CODE (t
) == COMPONENT_REF
)
1892 attrs
.offset_known_p
= true;
1894 apply_bitpos
= bitpos
;
1895 if (DECL_BIT_FIELD (TREE_OPERAND (t
, 1)))
1896 new_size
= DECL_SIZE_UNIT (TREE_OPERAND (t
, 1));
1899 /* If this is an array reference, look for an outer field reference. */
1900 else if (TREE_CODE (t
) == ARRAY_REF
)
1902 tree off_tree
= size_zero_node
;
1903 /* We can't modify t, because we use it at the end of the
1909 tree index
= TREE_OPERAND (t2
, 1);
1910 tree low_bound
= array_ref_low_bound (t2
);
1911 tree unit_size
= array_ref_element_size (t2
);
1913 /* We assume all arrays have sizes that are a multiple of a byte.
1914 First subtract the lower bound, if any, in the type of the
1915 index, then convert to sizetype and multiply by the size of
1916 the array element. */
1917 if (! integer_zerop (low_bound
))
1918 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
1921 off_tree
= size_binop (PLUS_EXPR
,
1922 size_binop (MULT_EXPR
,
1923 fold_convert (sizetype
,
1927 t2
= TREE_OPERAND (t2
, 0);
1929 while (TREE_CODE (t2
) == ARRAY_REF
);
1932 || TREE_CODE (t2
) == COMPONENT_REF
)
1935 attrs
.offset_known_p
= false;
1936 if (tree_fits_uhwi_p (off_tree
))
1938 attrs
.offset_known_p
= true;
1939 attrs
.offset
= tree_to_uhwi (off_tree
);
1940 apply_bitpos
= bitpos
;
1943 /* Else do not record a MEM_EXPR. */
1946 /* If this is an indirect reference, record it. */
1947 else if (TREE_CODE (t
) == MEM_REF
1948 || TREE_CODE (t
) == TARGET_MEM_REF
)
1951 attrs
.offset_known_p
= true;
1953 apply_bitpos
= bitpos
;
1956 /* Compute the alignment. */
1957 unsigned int obj_align
;
1958 unsigned HOST_WIDE_INT obj_bitpos
;
1959 get_object_alignment_1 (t
, &obj_align
, &obj_bitpos
);
1960 obj_bitpos
= (obj_bitpos
- bitpos
) & (obj_align
- 1);
1961 if (obj_bitpos
!= 0)
1962 obj_align
= (obj_bitpos
& -obj_bitpos
);
1963 attrs
.align
= MAX (attrs
.align
, obj_align
);
1966 if (tree_fits_uhwi_p (new_size
))
1968 attrs
.size_known_p
= true;
1969 attrs
.size
= tree_to_uhwi (new_size
);
1972 /* If we modified OFFSET based on T, then subtract the outstanding
1973 bit position offset. Similarly, increase the size of the accessed
1974 object to contain the negative offset. */
1977 gcc_assert (attrs
.offset_known_p
);
1978 attrs
.offset
-= apply_bitpos
/ BITS_PER_UNIT
;
1979 if (attrs
.size_known_p
)
1980 attrs
.size
+= apply_bitpos
/ BITS_PER_UNIT
;
1983 /* Now set the attributes we computed above. */
1984 attrs
.addrspace
= as
;
1985 set_mem_attrs (ref
, &attrs
);
1989 set_mem_attributes (rtx ref
, tree t
, int objectp
)
1991 set_mem_attributes_minus_bitpos (ref
, t
, objectp
, 0);
1994 /* Set the alias set of MEM to SET. */
1997 set_mem_alias_set (rtx mem
, alias_set_type set
)
1999 struct mem_attrs attrs
;
2001 /* If the new and old alias sets don't conflict, something is wrong. */
2002 gcc_checking_assert (alias_sets_conflict_p (set
, MEM_ALIAS_SET (mem
)));
2003 attrs
= *get_mem_attrs (mem
);
2005 set_mem_attrs (mem
, &attrs
);
2008 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2011 set_mem_addr_space (rtx mem
, addr_space_t addrspace
)
2013 struct mem_attrs attrs
;
2015 attrs
= *get_mem_attrs (mem
);
2016 attrs
.addrspace
= addrspace
;
2017 set_mem_attrs (mem
, &attrs
);
2020 /* Set the alignment of MEM to ALIGN bits. */
2023 set_mem_align (rtx mem
, unsigned int align
)
2025 struct mem_attrs attrs
;
2027 attrs
= *get_mem_attrs (mem
);
2028 attrs
.align
= align
;
2029 set_mem_attrs (mem
, &attrs
);
2032 /* Set the expr for MEM to EXPR. */
2035 set_mem_expr (rtx mem
, tree expr
)
2037 struct mem_attrs attrs
;
2039 attrs
= *get_mem_attrs (mem
);
2041 set_mem_attrs (mem
, &attrs
);
2044 /* Set the offset of MEM to OFFSET. */
2047 set_mem_offset (rtx mem
, HOST_WIDE_INT offset
)
2049 struct mem_attrs attrs
;
2051 attrs
= *get_mem_attrs (mem
);
2052 attrs
.offset_known_p
= true;
2053 attrs
.offset
= offset
;
2054 set_mem_attrs (mem
, &attrs
);
2057 /* Clear the offset of MEM. */
2060 clear_mem_offset (rtx mem
)
2062 struct mem_attrs attrs
;
2064 attrs
= *get_mem_attrs (mem
);
2065 attrs
.offset_known_p
= false;
2066 set_mem_attrs (mem
, &attrs
);
2069 /* Set the size of MEM to SIZE. */
2072 set_mem_size (rtx mem
, HOST_WIDE_INT size
)
2074 struct mem_attrs attrs
;
2076 attrs
= *get_mem_attrs (mem
);
2077 attrs
.size_known_p
= true;
2079 set_mem_attrs (mem
, &attrs
);
2082 /* Clear the size of MEM. */
2085 clear_mem_size (rtx mem
)
2087 struct mem_attrs attrs
;
2089 attrs
= *get_mem_attrs (mem
);
2090 attrs
.size_known_p
= false;
2091 set_mem_attrs (mem
, &attrs
);
2094 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2095 and its address changed to ADDR. (VOIDmode means don't change the mode.
2096 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2097 returned memory location is required to be valid. INPLACE is true if any
2098 changes can be made directly to MEMREF or false if MEMREF must be treated
2101 The memory attributes are not changed. */
2104 change_address_1 (rtx memref
, machine_mode mode
, rtx addr
, int validate
,
2110 gcc_assert (MEM_P (memref
));
2111 as
= MEM_ADDR_SPACE (memref
);
2112 if (mode
== VOIDmode
)
2113 mode
= GET_MODE (memref
);
2115 addr
= XEXP (memref
, 0);
2116 if (mode
== GET_MODE (memref
) && addr
== XEXP (memref
, 0)
2117 && (!validate
|| memory_address_addr_space_p (mode
, addr
, as
)))
2120 /* Don't validate address for LRA. LRA can make the address valid
2121 by itself in most efficient way. */
2122 if (validate
&& !lra_in_progress
)
2124 if (reload_in_progress
|| reload_completed
)
2125 gcc_assert (memory_address_addr_space_p (mode
, addr
, as
));
2127 addr
= memory_address_addr_space (mode
, addr
, as
);
2130 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
2135 XEXP (memref
, 0) = addr
;
2139 new_rtx
= gen_rtx_MEM (mode
, addr
);
2140 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2144 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2145 way we are changing MEMREF, so we only preserve the alias set. */
2148 change_address (rtx memref
, machine_mode mode
, rtx addr
)
2150 rtx new_rtx
= change_address_1 (memref
, mode
, addr
, 1, false);
2151 machine_mode mmode
= GET_MODE (new_rtx
);
2152 struct mem_attrs attrs
, *defattrs
;
2154 attrs
= *get_mem_attrs (memref
);
2155 defattrs
= mode_mem_attrs
[(int) mmode
];
2156 attrs
.expr
= NULL_TREE
;
2157 attrs
.offset_known_p
= false;
2158 attrs
.size_known_p
= defattrs
->size_known_p
;
2159 attrs
.size
= defattrs
->size
;
2160 attrs
.align
= defattrs
->align
;
2162 /* If there are no changes, just return the original memory reference. */
2163 if (new_rtx
== memref
)
2165 if (mem_attrs_eq_p (get_mem_attrs (memref
), &attrs
))
2168 new_rtx
= gen_rtx_MEM (mmode
, XEXP (memref
, 0));
2169 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2172 set_mem_attrs (new_rtx
, &attrs
);
2176 /* Return a memory reference like MEMREF, but with its mode changed
2177 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2178 nonzero, the memory address is forced to be valid.
2179 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2180 and the caller is responsible for adjusting MEMREF base register.
2181 If ADJUST_OBJECT is zero, the underlying object associated with the
2182 memory reference is left unchanged and the caller is responsible for
2183 dealing with it. Otherwise, if the new memory reference is outside
2184 the underlying object, even partially, then the object is dropped.
2185 SIZE, if nonzero, is the size of an access in cases where MODE
2186 has no inherent size. */
2189 adjust_address_1 (rtx memref
, machine_mode mode
, HOST_WIDE_INT offset
,
2190 int validate
, int adjust_address
, int adjust_object
,
2193 rtx addr
= XEXP (memref
, 0);
2195 machine_mode address_mode
;
2197 struct mem_attrs attrs
= *get_mem_attrs (memref
), *defattrs
;
2198 unsigned HOST_WIDE_INT max_align
;
2199 #ifdef POINTERS_EXTEND_UNSIGNED
2200 machine_mode pointer_mode
2201 = targetm
.addr_space
.pointer_mode (attrs
.addrspace
);
2204 /* VOIDmode means no mode change for change_address_1. */
2205 if (mode
== VOIDmode
)
2206 mode
= GET_MODE (memref
);
2208 /* Take the size of non-BLKmode accesses from the mode. */
2209 defattrs
= mode_mem_attrs
[(int) mode
];
2210 if (defattrs
->size_known_p
)
2211 size
= defattrs
->size
;
2213 /* If there are no changes, just return the original memory reference. */
2214 if (mode
== GET_MODE (memref
) && !offset
2215 && (size
== 0 || (attrs
.size_known_p
&& attrs
.size
== size
))
2216 && (!validate
|| memory_address_addr_space_p (mode
, addr
,
2220 /* ??? Prefer to create garbage instead of creating shared rtl.
2221 This may happen even if offset is nonzero -- consider
2222 (plus (plus reg reg) const_int) -- so do this always. */
2223 addr
= copy_rtx (addr
);
2225 /* Convert a possibly large offset to a signed value within the
2226 range of the target address space. */
2227 address_mode
= get_address_mode (memref
);
2228 pbits
= GET_MODE_BITSIZE (address_mode
);
2229 if (HOST_BITS_PER_WIDE_INT
> pbits
)
2231 int shift
= HOST_BITS_PER_WIDE_INT
- pbits
;
2232 offset
= (((HOST_WIDE_INT
) ((unsigned HOST_WIDE_INT
) offset
<< shift
))
2238 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2239 object, we can merge it into the LO_SUM. */
2240 if (GET_MODE (memref
) != BLKmode
&& GET_CODE (addr
) == LO_SUM
2242 && (unsigned HOST_WIDE_INT
) offset
2243 < GET_MODE_ALIGNMENT (GET_MODE (memref
)) / BITS_PER_UNIT
)
2244 addr
= gen_rtx_LO_SUM (address_mode
, XEXP (addr
, 0),
2245 plus_constant (address_mode
,
2246 XEXP (addr
, 1), offset
));
2247 #ifdef POINTERS_EXTEND_UNSIGNED
2248 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2249 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2250 the fact that pointers are not allowed to overflow. */
2251 else if (POINTERS_EXTEND_UNSIGNED
> 0
2252 && GET_CODE (addr
) == ZERO_EXTEND
2253 && GET_MODE (XEXP (addr
, 0)) == pointer_mode
2254 && trunc_int_for_mode (offset
, pointer_mode
) == offset
)
2255 addr
= gen_rtx_ZERO_EXTEND (address_mode
,
2256 plus_constant (pointer_mode
,
2257 XEXP (addr
, 0), offset
));
2260 addr
= plus_constant (address_mode
, addr
, offset
);
2263 new_rtx
= change_address_1 (memref
, mode
, addr
, validate
, false);
2265 /* If the address is a REG, change_address_1 rightfully returns memref,
2266 but this would destroy memref's MEM_ATTRS. */
2267 if (new_rtx
== memref
&& offset
!= 0)
2268 new_rtx
= copy_rtx (new_rtx
);
2270 /* Conservatively drop the object if we don't know where we start from. */
2271 if (adjust_object
&& (!attrs
.offset_known_p
|| !attrs
.size_known_p
))
2273 attrs
.expr
= NULL_TREE
;
2277 /* Compute the new values of the memory attributes due to this adjustment.
2278 We add the offsets and update the alignment. */
2279 if (attrs
.offset_known_p
)
2281 attrs
.offset
+= offset
;
2283 /* Drop the object if the new left end is not within its bounds. */
2284 if (adjust_object
&& attrs
.offset
< 0)
2286 attrs
.expr
= NULL_TREE
;
2291 /* Compute the new alignment by taking the MIN of the alignment and the
2292 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2296 max_align
= (offset
& -offset
) * BITS_PER_UNIT
;
2297 attrs
.align
= MIN (attrs
.align
, max_align
);
2302 /* Drop the object if the new right end is not within its bounds. */
2303 if (adjust_object
&& (offset
+ size
) > attrs
.size
)
2305 attrs
.expr
= NULL_TREE
;
2308 attrs
.size_known_p
= true;
2311 else if (attrs
.size_known_p
)
2313 gcc_assert (!adjust_object
);
2314 attrs
.size
-= offset
;
2315 /* ??? The store_by_pieces machinery generates negative sizes,
2316 so don't assert for that here. */
2319 set_mem_attrs (new_rtx
, &attrs
);
2324 /* Return a memory reference like MEMREF, but with its mode changed
2325 to MODE and its address changed to ADDR, which is assumed to be
2326 MEMREF offset by OFFSET bytes. If VALIDATE is
2327 nonzero, the memory address is forced to be valid. */
2330 adjust_automodify_address_1 (rtx memref
, machine_mode mode
, rtx addr
,
2331 HOST_WIDE_INT offset
, int validate
)
2333 memref
= change_address_1 (memref
, VOIDmode
, addr
, validate
, false);
2334 return adjust_address_1 (memref
, mode
, offset
, validate
, 0, 0, 0);
2337 /* Return a memory reference like MEMREF, but whose address is changed by
2338 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2339 known to be in OFFSET (possibly 1). */
2342 offset_address (rtx memref
, rtx offset
, unsigned HOST_WIDE_INT pow2
)
2344 rtx new_rtx
, addr
= XEXP (memref
, 0);
2345 machine_mode address_mode
;
2346 struct mem_attrs attrs
, *defattrs
;
2348 attrs
= *get_mem_attrs (memref
);
2349 address_mode
= get_address_mode (memref
);
2350 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2352 /* At this point we don't know _why_ the address is invalid. It
2353 could have secondary memory references, multiplies or anything.
2355 However, if we did go and rearrange things, we can wind up not
2356 being able to recognize the magic around pic_offset_table_rtx.
2357 This stuff is fragile, and is yet another example of why it is
2358 bad to expose PIC machinery too early. */
2359 if (! memory_address_addr_space_p (GET_MODE (memref
), new_rtx
,
2361 && GET_CODE (addr
) == PLUS
2362 && XEXP (addr
, 0) == pic_offset_table_rtx
)
2364 addr
= force_reg (GET_MODE (addr
), addr
);
2365 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2368 update_temp_slot_address (XEXP (memref
, 0), new_rtx
);
2369 new_rtx
= change_address_1 (memref
, VOIDmode
, new_rtx
, 1, false);
2371 /* If there are no changes, just return the original memory reference. */
2372 if (new_rtx
== memref
)
2375 /* Update the alignment to reflect the offset. Reset the offset, which
2377 defattrs
= mode_mem_attrs
[(int) GET_MODE (new_rtx
)];
2378 attrs
.offset_known_p
= false;
2379 attrs
.size_known_p
= defattrs
->size_known_p
;
2380 attrs
.size
= defattrs
->size
;
2381 attrs
.align
= MIN (attrs
.align
, pow2
* BITS_PER_UNIT
);
2382 set_mem_attrs (new_rtx
, &attrs
);
2386 /* Return a memory reference like MEMREF, but with its address changed to
2387 ADDR. The caller is asserting that the actual piece of memory pointed
2388 to is the same, just the form of the address is being changed, such as
2389 by putting something into a register. INPLACE is true if any changes
2390 can be made directly to MEMREF or false if MEMREF must be treated as
2394 replace_equiv_address (rtx memref
, rtx addr
, bool inplace
)
2396 /* change_address_1 copies the memory attribute structure without change
2397 and that's exactly what we want here. */
2398 update_temp_slot_address (XEXP (memref
, 0), addr
);
2399 return change_address_1 (memref
, VOIDmode
, addr
, 1, inplace
);
2402 /* Likewise, but the reference is not required to be valid. */
2405 replace_equiv_address_nv (rtx memref
, rtx addr
, bool inplace
)
2407 return change_address_1 (memref
, VOIDmode
, addr
, 0, inplace
);
2410 /* Return a memory reference like MEMREF, but with its mode widened to
2411 MODE and offset by OFFSET. This would be used by targets that e.g.
2412 cannot issue QImode memory operations and have to use SImode memory
2413 operations plus masking logic. */
2416 widen_memory_access (rtx memref
, machine_mode mode
, HOST_WIDE_INT offset
)
2418 rtx new_rtx
= adjust_address_1 (memref
, mode
, offset
, 1, 1, 0, 0);
2419 struct mem_attrs attrs
;
2420 unsigned int size
= GET_MODE_SIZE (mode
);
2422 /* If there are no changes, just return the original memory reference. */
2423 if (new_rtx
== memref
)
2426 attrs
= *get_mem_attrs (new_rtx
);
2428 /* If we don't know what offset we were at within the expression, then
2429 we can't know if we've overstepped the bounds. */
2430 if (! attrs
.offset_known_p
)
2431 attrs
.expr
= NULL_TREE
;
2435 if (TREE_CODE (attrs
.expr
) == COMPONENT_REF
)
2437 tree field
= TREE_OPERAND (attrs
.expr
, 1);
2438 tree offset
= component_ref_field_offset (attrs
.expr
);
2440 if (! DECL_SIZE_UNIT (field
))
2442 attrs
.expr
= NULL_TREE
;
2446 /* Is the field at least as large as the access? If so, ok,
2447 otherwise strip back to the containing structure. */
2448 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
2449 && compare_tree_int (DECL_SIZE_UNIT (field
), size
) >= 0
2450 && attrs
.offset
>= 0)
2453 if (! tree_fits_uhwi_p (offset
))
2455 attrs
.expr
= NULL_TREE
;
2459 attrs
.expr
= TREE_OPERAND (attrs
.expr
, 0);
2460 attrs
.offset
+= tree_to_uhwi (offset
);
2461 attrs
.offset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
2464 /* Similarly for the decl. */
2465 else if (DECL_P (attrs
.expr
)
2466 && DECL_SIZE_UNIT (attrs
.expr
)
2467 && TREE_CODE (DECL_SIZE_UNIT (attrs
.expr
)) == INTEGER_CST
2468 && compare_tree_int (DECL_SIZE_UNIT (attrs
.expr
), size
) >= 0
2469 && (! attrs
.offset_known_p
|| attrs
.offset
>= 0))
2473 /* The widened memory access overflows the expression, which means
2474 that it could alias another expression. Zap it. */
2475 attrs
.expr
= NULL_TREE
;
2481 attrs
.offset_known_p
= false;
2483 /* The widened memory may alias other stuff, so zap the alias set. */
2484 /* ??? Maybe use get_alias_set on any remaining expression. */
2486 attrs
.size_known_p
= true;
2488 set_mem_attrs (new_rtx
, &attrs
);
2492 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2493 static GTY(()) tree spill_slot_decl
;
2496 get_spill_slot_decl (bool force_build_p
)
2498 tree d
= spill_slot_decl
;
2500 struct mem_attrs attrs
;
2502 if (d
|| !force_build_p
)
2505 d
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
2506 VAR_DECL
, get_identifier ("%sfp"), void_type_node
);
2507 DECL_ARTIFICIAL (d
) = 1;
2508 DECL_IGNORED_P (d
) = 1;
2510 spill_slot_decl
= d
;
2512 rd
= gen_rtx_MEM (BLKmode
, frame_pointer_rtx
);
2513 MEM_NOTRAP_P (rd
) = 1;
2514 attrs
= *mode_mem_attrs
[(int) BLKmode
];
2515 attrs
.alias
= new_alias_set ();
2517 set_mem_attrs (rd
, &attrs
);
2518 SET_DECL_RTL (d
, rd
);
2523 /* Given MEM, a result from assign_stack_local, fill in the memory
2524 attributes as appropriate for a register allocator spill slot.
2525 These slots are not aliasable by other memory. We arrange for
2526 them all to use a single MEM_EXPR, so that the aliasing code can
2527 work properly in the case of shared spill slots. */
2530 set_mem_attrs_for_spill (rtx mem
)
2532 struct mem_attrs attrs
;
2535 attrs
= *get_mem_attrs (mem
);
2536 attrs
.expr
= get_spill_slot_decl (true);
2537 attrs
.alias
= MEM_ALIAS_SET (DECL_RTL (attrs
.expr
));
2538 attrs
.addrspace
= ADDR_SPACE_GENERIC
;
2540 /* We expect the incoming memory to be of the form:
2541 (mem:MODE (plus (reg sfp) (const_int offset)))
2542 with perhaps the plus missing for offset = 0. */
2543 addr
= XEXP (mem
, 0);
2544 attrs
.offset_known_p
= true;
2546 if (GET_CODE (addr
) == PLUS
2547 && CONST_INT_P (XEXP (addr
, 1)))
2548 attrs
.offset
= INTVAL (XEXP (addr
, 1));
2550 set_mem_attrs (mem
, &attrs
);
2551 MEM_NOTRAP_P (mem
) = 1;
2554 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2557 gen_label_rtx (void)
2559 return as_a
<rtx_code_label
*> (
2560 gen_rtx_CODE_LABEL (VOIDmode
, NULL_RTX
, NULL_RTX
,
2561 NULL
, label_num
++, NULL
));
2564 /* For procedure integration. */
2566 /* Install new pointers to the first and last insns in the chain.
2567 Also, set cur_insn_uid to one higher than the last in use.
2568 Used for an inline-procedure after copying the insn chain. */
2571 set_new_first_and_last_insn (rtx_insn
*first
, rtx_insn
*last
)
2575 set_first_insn (first
);
2576 set_last_insn (last
);
2579 if (MIN_NONDEBUG_INSN_UID
|| MAY_HAVE_DEBUG_INSNS
)
2581 int debug_count
= 0;
2583 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
- 1;
2584 cur_debug_insn_uid
= 0;
2586 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2587 if (INSN_UID (insn
) < MIN_NONDEBUG_INSN_UID
)
2588 cur_debug_insn_uid
= MAX (cur_debug_insn_uid
, INSN_UID (insn
));
2591 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2592 if (DEBUG_INSN_P (insn
))
2597 cur_debug_insn_uid
= MIN_NONDEBUG_INSN_UID
+ debug_count
;
2599 cur_debug_insn_uid
++;
2602 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2603 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2608 /* Go through all the RTL insn bodies and copy any invalid shared
2609 structure. This routine should only be called once. */
2612 unshare_all_rtl_1 (rtx_insn
*insn
)
2614 /* Unshare just about everything else. */
2615 unshare_all_rtl_in_chain (insn
);
2617 /* Make sure the addresses of stack slots found outside the insn chain
2618 (such as, in DECL_RTL of a variable) are not shared
2619 with the insn chain.
2621 This special care is necessary when the stack slot MEM does not
2622 actually appear in the insn chain. If it does appear, its address
2623 is unshared from all else at that point. */
2624 stack_slot_list
= safe_as_a
<rtx_expr_list
*> (
2625 copy_rtx_if_shared (stack_slot_list
));
2628 /* Go through all the RTL insn bodies and copy any invalid shared
2629 structure, again. This is a fairly expensive thing to do so it
2630 should be done sparingly. */
2633 unshare_all_rtl_again (rtx_insn
*insn
)
2638 for (p
= insn
; p
; p
= NEXT_INSN (p
))
2641 reset_used_flags (PATTERN (p
));
2642 reset_used_flags (REG_NOTES (p
));
2644 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p
));
2647 /* Make sure that virtual stack slots are not shared. */
2648 set_used_decls (DECL_INITIAL (cfun
->decl
));
2650 /* Make sure that virtual parameters are not shared. */
2651 for (decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2652 set_used_flags (DECL_RTL (decl
));
2654 reset_used_flags (stack_slot_list
);
2656 unshare_all_rtl_1 (insn
);
2660 unshare_all_rtl (void)
2662 unshare_all_rtl_1 (get_insns ());
2667 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2668 Recursively does the same for subexpressions. */
2671 verify_rtx_sharing (rtx orig
, rtx insn
)
2676 const char *format_ptr
;
2681 code
= GET_CODE (x
);
2683 /* These types may be freely shared. */
2699 /* SCRATCH must be shared because they represent distinct values. */
2702 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2703 clobbers or clobbers of hard registers that originated as pseudos.
2704 This is needed to allow safe register renaming. */
2705 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2706 && ORIGINAL_REGNO (XEXP (x
, 0)) == REGNO (XEXP (x
, 0)))
2711 if (shared_const_p (orig
))
2716 /* A MEM is allowed to be shared if its address is constant. */
2717 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
2718 || reload_completed
|| reload_in_progress
)
2727 /* This rtx may not be shared. If it has already been seen,
2728 replace it with a copy of itself. */
2729 #ifdef ENABLE_CHECKING
2730 if (RTX_FLAG (x
, used
))
2732 error ("invalid rtl sharing found in the insn");
2734 error ("shared rtx");
2736 internal_error ("internal consistency failure");
2739 gcc_assert (!RTX_FLAG (x
, used
));
2741 RTX_FLAG (x
, used
) = 1;
2743 /* Now scan the subexpressions recursively. */
2745 format_ptr
= GET_RTX_FORMAT (code
);
2747 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2749 switch (*format_ptr
++)
2752 verify_rtx_sharing (XEXP (x
, i
), insn
);
2756 if (XVEC (x
, i
) != NULL
)
2759 int len
= XVECLEN (x
, i
);
2761 for (j
= 0; j
< len
; j
++)
2763 /* We allow sharing of ASM_OPERANDS inside single
2765 if (j
&& GET_CODE (XVECEXP (x
, i
, j
)) == SET
2766 && (GET_CODE (SET_SRC (XVECEXP (x
, i
, j
)))
2768 verify_rtx_sharing (SET_DEST (XVECEXP (x
, i
, j
)), insn
);
2770 verify_rtx_sharing (XVECEXP (x
, i
, j
), insn
);
2779 /* Reset used-flags for INSN. */
2782 reset_insn_used_flags (rtx insn
)
2784 gcc_assert (INSN_P (insn
));
2785 reset_used_flags (PATTERN (insn
));
2786 reset_used_flags (REG_NOTES (insn
));
2788 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2791 /* Go through all the RTL insn bodies and clear all the USED bits. */
2794 reset_all_used_flags (void)
2798 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2801 rtx pat
= PATTERN (p
);
2802 if (GET_CODE (pat
) != SEQUENCE
)
2803 reset_insn_used_flags (p
);
2806 gcc_assert (REG_NOTES (p
) == NULL
);
2807 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2809 rtx insn
= XVECEXP (pat
, 0, i
);
2811 reset_insn_used_flags (insn
);
2817 /* Verify sharing in INSN. */
2820 verify_insn_sharing (rtx insn
)
2822 gcc_assert (INSN_P (insn
));
2823 reset_used_flags (PATTERN (insn
));
2824 reset_used_flags (REG_NOTES (insn
));
2826 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2829 /* Go through all the RTL insn bodies and check that there is no unexpected
2830 sharing in between the subexpressions. */
2833 verify_rtl_sharing (void)
2837 timevar_push (TV_VERIFY_RTL_SHARING
);
2839 reset_all_used_flags ();
2841 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2844 rtx pat
= PATTERN (p
);
2845 if (GET_CODE (pat
) != SEQUENCE
)
2846 verify_insn_sharing (p
);
2848 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2850 rtx insn
= XVECEXP (pat
, 0, i
);
2852 verify_insn_sharing (insn
);
2856 reset_all_used_flags ();
2858 timevar_pop (TV_VERIFY_RTL_SHARING
);
2861 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2862 Assumes the mark bits are cleared at entry. */
2865 unshare_all_rtl_in_chain (rtx_insn
*insn
)
2867 for (; insn
; insn
= NEXT_INSN (insn
))
2870 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
2871 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
2873 CALL_INSN_FUNCTION_USAGE (insn
)
2874 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn
));
2878 /* Go through all virtual stack slots of a function and mark them as
2879 shared. We never replace the DECL_RTLs themselves with a copy,
2880 but expressions mentioned into a DECL_RTL cannot be shared with
2881 expressions in the instruction stream.
2883 Note that reload may convert pseudo registers into memories in-place.
2884 Pseudo registers are always shared, but MEMs never are. Thus if we
2885 reset the used flags on MEMs in the instruction stream, we must set
2886 them again on MEMs that appear in DECL_RTLs. */
2889 set_used_decls (tree blk
)
2894 for (t
= BLOCK_VARS (blk
); t
; t
= DECL_CHAIN (t
))
2895 if (DECL_RTL_SET_P (t
))
2896 set_used_flags (DECL_RTL (t
));
2898 /* Now process sub-blocks. */
2899 for (t
= BLOCK_SUBBLOCKS (blk
); t
; t
= BLOCK_CHAIN (t
))
2903 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2904 Recursively does the same for subexpressions. Uses
2905 copy_rtx_if_shared_1 to reduce stack space. */
2908 copy_rtx_if_shared (rtx orig
)
2910 copy_rtx_if_shared_1 (&orig
);
2914 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2915 use. Recursively does the same for subexpressions. */
2918 copy_rtx_if_shared_1 (rtx
*orig1
)
2924 const char *format_ptr
;
2928 /* Repeat is used to turn tail-recursion into iteration. */
2935 code
= GET_CODE (x
);
2937 /* These types may be freely shared. */
2953 /* SCRATCH must be shared because they represent distinct values. */
2956 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2957 clobbers or clobbers of hard registers that originated as pseudos.
2958 This is needed to allow safe register renaming. */
2959 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2960 && ORIGINAL_REGNO (XEXP (x
, 0)) == REGNO (XEXP (x
, 0)))
2965 if (shared_const_p (x
))
2975 /* The chain of insns is not being copied. */
2982 /* This rtx may not be shared. If it has already been seen,
2983 replace it with a copy of itself. */
2985 if (RTX_FLAG (x
, used
))
2987 x
= shallow_copy_rtx (x
);
2990 RTX_FLAG (x
, used
) = 1;
2992 /* Now scan the subexpressions recursively.
2993 We can store any replaced subexpressions directly into X
2994 since we know X is not shared! Any vectors in X
2995 must be copied if X was copied. */
2997 format_ptr
= GET_RTX_FORMAT (code
);
2998 length
= GET_RTX_LENGTH (code
);
3001 for (i
= 0; i
< length
; i
++)
3003 switch (*format_ptr
++)
3007 copy_rtx_if_shared_1 (last_ptr
);
3008 last_ptr
= &XEXP (x
, i
);
3012 if (XVEC (x
, i
) != NULL
)
3015 int len
= XVECLEN (x
, i
);
3017 /* Copy the vector iff I copied the rtx and the length
3019 if (copied
&& len
> 0)
3020 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
3022 /* Call recursively on all inside the vector. */
3023 for (j
= 0; j
< len
; j
++)
3026 copy_rtx_if_shared_1 (last_ptr
);
3027 last_ptr
= &XVECEXP (x
, i
, j
);
3042 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3045 mark_used_flags (rtx x
, int flag
)
3049 const char *format_ptr
;
3052 /* Repeat is used to turn tail-recursion into iteration. */
3057 code
= GET_CODE (x
);
3059 /* These types may be freely shared so we needn't do any resetting
3083 /* The chain of insns is not being copied. */
3090 RTX_FLAG (x
, used
) = flag
;
3092 format_ptr
= GET_RTX_FORMAT (code
);
3093 length
= GET_RTX_LENGTH (code
);
3095 for (i
= 0; i
< length
; i
++)
3097 switch (*format_ptr
++)
3105 mark_used_flags (XEXP (x
, i
), flag
);
3109 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3110 mark_used_flags (XVECEXP (x
, i
, j
), flag
);
3116 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3117 to look for shared sub-parts. */
3120 reset_used_flags (rtx x
)
3122 mark_used_flags (x
, 0);
3125 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3126 to look for shared sub-parts. */
3129 set_used_flags (rtx x
)
3131 mark_used_flags (x
, 1);
3134 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3135 Return X or the rtx for the pseudo reg the value of X was copied into.
3136 OTHER must be valid as a SET_DEST. */
3139 make_safe_from (rtx x
, rtx other
)
3142 switch (GET_CODE (other
))
3145 other
= SUBREG_REG (other
);
3147 case STRICT_LOW_PART
:
3150 other
= XEXP (other
, 0);
3159 && GET_CODE (x
) != SUBREG
)
3161 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
3162 || reg_mentioned_p (other
, x
))))
3164 rtx temp
= gen_reg_rtx (GET_MODE (x
));
3165 emit_move_insn (temp
, x
);
3171 /* Emission of insns (adding them to the doubly-linked list). */
3173 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3176 get_last_insn_anywhere (void)
3178 struct sequence_stack
*seq
;
3179 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
3185 /* Return the first nonnote insn emitted in current sequence or current
3186 function. This routine looks inside SEQUENCEs. */
3189 get_first_nonnote_insn (void)
3191 rtx_insn
*insn
= get_insns ();
3196 for (insn
= next_insn (insn
);
3197 insn
&& NOTE_P (insn
);
3198 insn
= next_insn (insn
))
3202 if (NONJUMP_INSN_P (insn
)
3203 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3204 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3211 /* Return the last nonnote insn emitted in current sequence or current
3212 function. This routine looks inside SEQUENCEs. */
3215 get_last_nonnote_insn (void)
3217 rtx_insn
*insn
= get_last_insn ();
3222 for (insn
= previous_insn (insn
);
3223 insn
&& NOTE_P (insn
);
3224 insn
= previous_insn (insn
))
3228 if (NONJUMP_INSN_P (insn
))
3229 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3230 insn
= seq
->insn (seq
->len () - 1);
3237 /* Return the number of actual (non-debug) insns emitted in this
3241 get_max_insn_count (void)
3243 int n
= cur_insn_uid
;
3245 /* The table size must be stable across -g, to avoid codegen
3246 differences due to debug insns, and not be affected by
3247 -fmin-insn-uid, to avoid excessive table size and to simplify
3248 debugging of -fcompare-debug failures. */
3249 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3250 n
-= cur_debug_insn_uid
;
3252 n
-= MIN_NONDEBUG_INSN_UID
;
3258 /* Return the next insn. If it is a SEQUENCE, return the first insn
3262 next_insn (rtx_insn
*insn
)
3266 insn
= NEXT_INSN (insn
);
3267 if (insn
&& NONJUMP_INSN_P (insn
)
3268 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3269 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3275 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3279 previous_insn (rtx_insn
*insn
)
3283 insn
= PREV_INSN (insn
);
3284 if (insn
&& NONJUMP_INSN_P (insn
))
3285 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3286 insn
= seq
->insn (seq
->len () - 1);
3292 /* Return the next insn after INSN that is not a NOTE. This routine does not
3293 look inside SEQUENCEs. */
3296 next_nonnote_insn (rtx uncast_insn
)
3298 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3301 insn
= NEXT_INSN (insn
);
3302 if (insn
== 0 || !NOTE_P (insn
))
3309 /* Return the next insn after INSN that is not a NOTE, but stop the
3310 search before we enter another basic block. This routine does not
3311 look inside SEQUENCEs. */
3314 next_nonnote_insn_bb (rtx_insn
*insn
)
3318 insn
= NEXT_INSN (insn
);
3319 if (insn
== 0 || !NOTE_P (insn
))
3321 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3328 /* Return the previous insn before INSN that is not a NOTE. This routine does
3329 not look inside SEQUENCEs. */
3332 prev_nonnote_insn (rtx uncast_insn
)
3334 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3338 insn
= PREV_INSN (insn
);
3339 if (insn
== 0 || !NOTE_P (insn
))
3346 /* Return the previous insn before INSN that is not a NOTE, but stop
3347 the search before we enter another basic block. This routine does
3348 not look inside SEQUENCEs. */
3351 prev_nonnote_insn_bb (rtx uncast_insn
)
3353 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3357 insn
= PREV_INSN (insn
);
3358 if (insn
== 0 || !NOTE_P (insn
))
3360 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3367 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3368 routine does not look inside SEQUENCEs. */
3371 next_nondebug_insn (rtx uncast_insn
)
3373 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3377 insn
= NEXT_INSN (insn
);
3378 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3385 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3386 This routine does not look inside SEQUENCEs. */
3389 prev_nondebug_insn (rtx uncast_insn
)
3391 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3395 insn
= PREV_INSN (insn
);
3396 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3403 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3404 This routine does not look inside SEQUENCEs. */
3407 next_nonnote_nondebug_insn (rtx uncast_insn
)
3409 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3413 insn
= NEXT_INSN (insn
);
3414 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3421 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3422 This routine does not look inside SEQUENCEs. */
3425 prev_nonnote_nondebug_insn (rtx uncast_insn
)
3427 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3431 insn
= PREV_INSN (insn
);
3432 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3439 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3440 or 0, if there is none. This routine does not look inside
3444 next_real_insn (rtx uncast_insn
)
3446 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3450 insn
= NEXT_INSN (insn
);
3451 if (insn
== 0 || INSN_P (insn
))
3458 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3459 or 0, if there is none. This routine does not look inside
3463 prev_real_insn (rtx uncast_insn
)
3465 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3469 insn
= PREV_INSN (insn
);
3470 if (insn
== 0 || INSN_P (insn
))
3477 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3478 This routine does not look inside SEQUENCEs. */
3481 last_call_insn (void)
3485 for (insn
= get_last_insn ();
3486 insn
&& !CALL_P (insn
);
3487 insn
= PREV_INSN (insn
))
3490 return safe_as_a
<rtx_call_insn
*> (insn
);
3493 /* Find the next insn after INSN that really does something. This routine
3494 does not look inside SEQUENCEs. After reload this also skips over
3495 standalone USE and CLOBBER insn. */
3498 active_insn_p (const_rtx insn
)
3500 return (CALL_P (insn
) || JUMP_P (insn
)
3501 || JUMP_TABLE_DATA_P (insn
) /* FIXME */
3502 || (NONJUMP_INSN_P (insn
)
3503 && (! reload_completed
3504 || (GET_CODE (PATTERN (insn
)) != USE
3505 && GET_CODE (PATTERN (insn
)) != CLOBBER
))));
3509 next_active_insn (rtx uncast_insn
)
3511 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3515 insn
= NEXT_INSN (insn
);
3516 if (insn
== 0 || active_insn_p (insn
))
3523 /* Find the last insn before INSN that really does something. This routine
3524 does not look inside SEQUENCEs. After reload this also skips over
3525 standalone USE and CLOBBER insn. */
3528 prev_active_insn (rtx uncast_insn
)
3530 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3534 insn
= PREV_INSN (insn
);
3535 if (insn
== 0 || active_insn_p (insn
))
3542 /* Return the next insn that uses CC0 after INSN, which is assumed to
3543 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3544 applied to the result of this function should yield INSN).
3546 Normally, this is simply the next insn. However, if a REG_CC_USER note
3547 is present, it contains the insn that uses CC0.
3549 Return 0 if we can't find the insn. */
3552 next_cc0_user (rtx uncast_insn
)
3554 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3556 rtx note
= find_reg_note (insn
, REG_CC_USER
, NULL_RTX
);
3559 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3561 insn
= next_nonnote_insn (insn
);
3562 if (insn
&& NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3563 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3565 if (insn
&& INSN_P (insn
) && reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
3571 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3572 note, it is the previous insn. */
3575 prev_cc0_setter (rtx uncast_insn
)
3577 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3579 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
3582 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3584 insn
= prev_nonnote_insn (insn
);
3585 gcc_assert (sets_cc0_p (PATTERN (insn
)));
3591 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3594 find_auto_inc (const_rtx x
, const_rtx reg
)
3596 subrtx_iterator::array_type array
;
3597 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
3599 const_rtx x
= *iter
;
3600 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
3601 && rtx_equal_p (reg
, XEXP (x
, 0)))
3608 /* Increment the label uses for all labels present in rtx. */
3611 mark_label_nuses (rtx x
)
3617 code
= GET_CODE (x
);
3618 if (code
== LABEL_REF
&& LABEL_P (LABEL_REF_LABEL (x
)))
3619 LABEL_NUSES (LABEL_REF_LABEL (x
))++;
3621 fmt
= GET_RTX_FORMAT (code
);
3622 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3625 mark_label_nuses (XEXP (x
, i
));
3626 else if (fmt
[i
] == 'E')
3627 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3628 mark_label_nuses (XVECEXP (x
, i
, j
));
3633 /* Try splitting insns that can be split for better scheduling.
3634 PAT is the pattern which might split.
3635 TRIAL is the insn providing PAT.
3636 LAST is nonzero if we should return the last insn of the sequence produced.
3638 If this routine succeeds in splitting, it returns the first or last
3639 replacement insn depending on the value of LAST. Otherwise, it
3640 returns TRIAL. If the insn to be returned can be split, it will be. */
3643 try_split (rtx pat
, rtx uncast_trial
, int last
)
3645 rtx_insn
*trial
= as_a
<rtx_insn
*> (uncast_trial
);
3646 rtx_insn
*before
= PREV_INSN (trial
);
3647 rtx_insn
*after
= NEXT_INSN (trial
);
3649 rtx_insn
*seq
, *tem
;
3651 rtx_insn
*insn_last
, *insn
;
3653 rtx call_insn
= NULL_RTX
;
3655 /* We're not good at redistributing frame information. */
3656 if (RTX_FRAME_RELATED_P (trial
))
3659 if (any_condjump_p (trial
)
3660 && (note
= find_reg_note (trial
, REG_BR_PROB
, 0)))
3661 split_branch_probability
= XINT (note
, 0);
3662 probability
= split_branch_probability
;
3664 seq
= safe_as_a
<rtx_insn
*> (split_insns (pat
, trial
));
3666 split_branch_probability
= -1;
3671 /* Avoid infinite loop if any insn of the result matches
3672 the original pattern. */
3676 if (INSN_P (insn_last
)
3677 && rtx_equal_p (PATTERN (insn_last
), pat
))
3679 if (!NEXT_INSN (insn_last
))
3681 insn_last
= NEXT_INSN (insn_last
);
3684 /* We will be adding the new sequence to the function. The splitters
3685 may have introduced invalid RTL sharing, so unshare the sequence now. */
3686 unshare_all_rtl_in_chain (seq
);
3688 /* Mark labels and copy flags. */
3689 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3694 CROSSING_JUMP_P (insn
) = CROSSING_JUMP_P (trial
);
3695 mark_jump_label (PATTERN (insn
), insn
, 0);
3697 if (probability
!= -1
3698 && any_condjump_p (insn
)
3699 && !find_reg_note (insn
, REG_BR_PROB
, 0))
3701 /* We can preserve the REG_BR_PROB notes only if exactly
3702 one jump is created, otherwise the machine description
3703 is responsible for this step using
3704 split_branch_probability variable. */
3705 gcc_assert (njumps
== 1);
3706 add_int_reg_note (insn
, REG_BR_PROB
, probability
);
3711 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3712 in SEQ and copy any additional information across. */
3715 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3721 gcc_assert (call_insn
== NULL_RTX
);
3724 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3725 target may have explicitly specified. */
3726 p
= &CALL_INSN_FUNCTION_USAGE (insn
);
3729 *p
= CALL_INSN_FUNCTION_USAGE (trial
);
3731 /* If the old call was a sibling call, the new one must
3733 SIBLING_CALL_P (insn
) = SIBLING_CALL_P (trial
);
3735 /* If the new call is the last instruction in the sequence,
3736 it will effectively replace the old call in-situ. Otherwise
3737 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3738 so that it comes immediately after the new call. */
3739 if (NEXT_INSN (insn
))
3740 for (next
= NEXT_INSN (trial
);
3741 next
&& NOTE_P (next
);
3742 next
= NEXT_INSN (next
))
3743 if (NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
)
3746 add_insn_after (next
, insn
, NULL
);
3752 /* Copy notes, particularly those related to the CFG. */
3753 for (note
= REG_NOTES (trial
); note
; note
= XEXP (note
, 1))
3755 switch (REG_NOTE_KIND (note
))
3758 copy_reg_eh_region_note_backward (note
, insn_last
, NULL
);
3764 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3767 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3771 case REG_NON_LOCAL_GOTO
:
3772 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3775 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3781 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3783 rtx reg
= XEXP (note
, 0);
3784 if (!FIND_REG_INC_NOTE (insn
, reg
)
3785 && find_auto_inc (PATTERN (insn
), reg
))
3786 add_reg_note (insn
, REG_INC
, reg
);
3792 fixup_args_size_notes (NULL
, insn_last
, INTVAL (XEXP (note
, 0)));
3796 gcc_assert (call_insn
!= NULL_RTX
);
3797 add_reg_note (call_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3805 /* If there are LABELS inside the split insns increment the
3806 usage count so we don't delete the label. */
3810 while (insn
!= NULL_RTX
)
3812 /* JUMP_P insns have already been "marked" above. */
3813 if (NONJUMP_INSN_P (insn
))
3814 mark_label_nuses (PATTERN (insn
));
3816 insn
= PREV_INSN (insn
);
3820 tem
= emit_insn_after_setloc (seq
, trial
, INSN_LOCATION (trial
));
3822 delete_insn (trial
);
3824 /* Recursively call try_split for each new insn created; by the
3825 time control returns here that insn will be fully split, so
3826 set LAST and continue from the insn after the one returned.
3827 We can't use next_active_insn here since AFTER may be a note.
3828 Ignore deleted insns, which can be occur if not optimizing. */
3829 for (tem
= NEXT_INSN (before
); tem
!= after
; tem
= NEXT_INSN (tem
))
3830 if (! tem
->deleted () && INSN_P (tem
))
3831 tem
= try_split (PATTERN (tem
), tem
, 1);
3833 /* Return either the first or the last insn, depending on which was
3836 ? (after
? PREV_INSN (after
) : get_last_insn ())
3837 : NEXT_INSN (before
);
3840 /* Make and return an INSN rtx, initializing all its slots.
3841 Store PATTERN in the pattern slots. */
3844 make_insn_raw (rtx pattern
)
3848 insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
3850 INSN_UID (insn
) = cur_insn_uid
++;
3851 PATTERN (insn
) = pattern
;
3852 INSN_CODE (insn
) = -1;
3853 REG_NOTES (insn
) = NULL
;
3854 INSN_LOCATION (insn
) = curr_insn_location ();
3855 BLOCK_FOR_INSN (insn
) = NULL
;
3857 #ifdef ENABLE_RTL_CHECKING
3860 && (returnjump_p (insn
)
3861 || (GET_CODE (insn
) == SET
3862 && SET_DEST (insn
) == pc_rtx
)))
3864 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3872 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3875 make_debug_insn_raw (rtx pattern
)
3877 rtx_debug_insn
*insn
;
3879 insn
= as_a
<rtx_debug_insn
*> (rtx_alloc (DEBUG_INSN
));
3880 INSN_UID (insn
) = cur_debug_insn_uid
++;
3881 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3882 INSN_UID (insn
) = cur_insn_uid
++;
3884 PATTERN (insn
) = pattern
;
3885 INSN_CODE (insn
) = -1;
3886 REG_NOTES (insn
) = NULL
;
3887 INSN_LOCATION (insn
) = curr_insn_location ();
3888 BLOCK_FOR_INSN (insn
) = NULL
;
3893 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3896 make_jump_insn_raw (rtx pattern
)
3898 rtx_jump_insn
*insn
;
3900 insn
= as_a
<rtx_jump_insn
*> (rtx_alloc (JUMP_INSN
));
3901 INSN_UID (insn
) = cur_insn_uid
++;
3903 PATTERN (insn
) = pattern
;
3904 INSN_CODE (insn
) = -1;
3905 REG_NOTES (insn
) = NULL
;
3906 JUMP_LABEL (insn
) = NULL
;
3907 INSN_LOCATION (insn
) = curr_insn_location ();
3908 BLOCK_FOR_INSN (insn
) = NULL
;
3913 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3916 make_call_insn_raw (rtx pattern
)
3918 rtx_call_insn
*insn
;
3920 insn
= as_a
<rtx_call_insn
*> (rtx_alloc (CALL_INSN
));
3921 INSN_UID (insn
) = cur_insn_uid
++;
3923 PATTERN (insn
) = pattern
;
3924 INSN_CODE (insn
) = -1;
3925 REG_NOTES (insn
) = NULL
;
3926 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
3927 INSN_LOCATION (insn
) = curr_insn_location ();
3928 BLOCK_FOR_INSN (insn
) = NULL
;
3933 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3936 make_note_raw (enum insn_note subtype
)
3938 /* Some notes are never created this way at all. These notes are
3939 only created by patching out insns. */
3940 gcc_assert (subtype
!= NOTE_INSN_DELETED_LABEL
3941 && subtype
!= NOTE_INSN_DELETED_DEBUG_LABEL
);
3943 rtx_note
*note
= as_a
<rtx_note
*> (rtx_alloc (NOTE
));
3944 INSN_UID (note
) = cur_insn_uid
++;
3945 NOTE_KIND (note
) = subtype
;
3946 BLOCK_FOR_INSN (note
) = NULL
;
3947 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
3951 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3952 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3953 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3956 link_insn_into_chain (rtx_insn
*insn
, rtx_insn
*prev
, rtx_insn
*next
)
3958 SET_PREV_INSN (insn
) = prev
;
3959 SET_NEXT_INSN (insn
) = next
;
3962 SET_NEXT_INSN (prev
) = insn
;
3963 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
3965 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
3966 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = insn
;
3971 SET_PREV_INSN (next
) = insn
;
3972 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
3974 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
3975 SET_PREV_INSN (sequence
->insn (0)) = insn
;
3979 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3981 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (insn
));
3982 SET_PREV_INSN (sequence
->insn (0)) = prev
;
3983 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
3987 /* Add INSN to the end of the doubly-linked list.
3988 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3991 add_insn (rtx_insn
*insn
)
3993 rtx_insn
*prev
= get_last_insn ();
3994 link_insn_into_chain (insn
, prev
, NULL
);
3995 if (NULL
== get_insns ())
3996 set_first_insn (insn
);
3997 set_last_insn (insn
);
4000 /* Add INSN into the doubly-linked list after insn AFTER. */
4003 add_insn_after_nobb (rtx_insn
*insn
, rtx_insn
*after
)
4005 rtx_insn
*next
= NEXT_INSN (after
);
4007 gcc_assert (!optimize
|| !after
->deleted ());
4009 link_insn_into_chain (insn
, after
, next
);
4013 struct sequence_stack
*seq
;
4015 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4016 if (after
== seq
->last
)
4024 /* Add INSN into the doubly-linked list before insn BEFORE. */
4027 add_insn_before_nobb (rtx_insn
*insn
, rtx_insn
*before
)
4029 rtx_insn
*prev
= PREV_INSN (before
);
4031 gcc_assert (!optimize
|| !before
->deleted ());
4033 link_insn_into_chain (insn
, prev
, before
);
4037 struct sequence_stack
*seq
;
4039 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4040 if (before
== seq
->first
)
4050 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4051 If BB is NULL, an attempt is made to infer the bb from before.
4053 This and the next function should be the only functions called
4054 to insert an insn once delay slots have been filled since only
4055 they know how to update a SEQUENCE. */
4058 add_insn_after (rtx uncast_insn
, rtx uncast_after
, basic_block bb
)
4060 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4061 rtx_insn
*after
= as_a
<rtx_insn
*> (uncast_after
);
4062 add_insn_after_nobb (insn
, after
);
4063 if (!BARRIER_P (after
)
4064 && !BARRIER_P (insn
)
4065 && (bb
= BLOCK_FOR_INSN (after
)))
4067 set_block_for_insn (insn
, bb
);
4069 df_insn_rescan (insn
);
4070 /* Should not happen as first in the BB is always
4071 either NOTE or LABEL. */
4072 if (BB_END (bb
) == after
4073 /* Avoid clobbering of structure when creating new BB. */
4074 && !BARRIER_P (insn
)
4075 && !NOTE_INSN_BASIC_BLOCK_P (insn
))
4080 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4081 If BB is NULL, an attempt is made to infer the bb from before.
4083 This and the previous function should be the only functions called
4084 to insert an insn once delay slots have been filled since only
4085 they know how to update a SEQUENCE. */
4088 add_insn_before (rtx uncast_insn
, rtx uncast_before
, basic_block bb
)
4090 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4091 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4092 add_insn_before_nobb (insn
, before
);
4095 && !BARRIER_P (before
)
4096 && !BARRIER_P (insn
))
4097 bb
= BLOCK_FOR_INSN (before
);
4101 set_block_for_insn (insn
, bb
);
4103 df_insn_rescan (insn
);
4104 /* Should not happen as first in the BB is always either NOTE or
4106 gcc_assert (BB_HEAD (bb
) != insn
4107 /* Avoid clobbering of structure when creating new BB. */
4109 || NOTE_INSN_BASIC_BLOCK_P (insn
));
4113 /* Replace insn with an deleted instruction note. */
4116 set_insn_deleted (rtx insn
)
4119 df_insn_delete (as_a
<rtx_insn
*> (insn
));
4120 PUT_CODE (insn
, NOTE
);
4121 NOTE_KIND (insn
) = NOTE_INSN_DELETED
;
4125 /* Unlink INSN from the insn chain.
4127 This function knows how to handle sequences.
4129 This function does not invalidate data flow information associated with
4130 INSN (i.e. does not call df_insn_delete). That makes this function
4131 usable for only disconnecting an insn from the chain, and re-emit it
4134 To later insert INSN elsewhere in the insn chain via add_insn and
4135 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4136 the caller. Nullifying them here breaks many insn chain walks.
4138 To really delete an insn and related DF information, use delete_insn. */
4141 remove_insn (rtx uncast_insn
)
4143 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4144 rtx_insn
*next
= NEXT_INSN (insn
);
4145 rtx_insn
*prev
= PREV_INSN (insn
);
4150 SET_NEXT_INSN (prev
) = next
;
4151 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4153 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4154 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4159 struct sequence_stack
*seq
;
4161 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4162 if (insn
== seq
->first
)
4173 SET_PREV_INSN (next
) = prev
;
4174 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4176 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4177 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4182 struct sequence_stack
*seq
;
4184 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4185 if (insn
== seq
->last
)
4194 /* Fix up basic block boundaries, if necessary. */
4195 if (!BARRIER_P (insn
)
4196 && (bb
= BLOCK_FOR_INSN (insn
)))
4198 if (BB_HEAD (bb
) == insn
)
4200 /* Never ever delete the basic block note without deleting whole
4202 gcc_assert (!NOTE_P (insn
));
4203 BB_HEAD (bb
) = next
;
4205 if (BB_END (bb
) == insn
)
4210 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4213 add_function_usage_to (rtx call_insn
, rtx call_fusage
)
4215 gcc_assert (call_insn
&& CALL_P (call_insn
));
4217 /* Put the register usage information on the CALL. If there is already
4218 some usage information, put ours at the end. */
4219 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
4223 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
4224 link
= XEXP (link
, 1))
4227 XEXP (link
, 1) = call_fusage
;
4230 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
4233 /* Delete all insns made since FROM.
4234 FROM becomes the new last instruction. */
4237 delete_insns_since (rtx_insn
*from
)
4242 SET_NEXT_INSN (from
) = 0;
4243 set_last_insn (from
);
4246 /* This function is deprecated, please use sequences instead.
4248 Move a consecutive bunch of insns to a different place in the chain.
4249 The insns to be moved are those between FROM and TO.
4250 They are moved to a new position after the insn AFTER.
4251 AFTER must not be FROM or TO or any insn in between.
4253 This function does not know about SEQUENCEs and hence should not be
4254 called after delay-slot filling has been done. */
4257 reorder_insns_nobb (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4259 #ifdef ENABLE_CHECKING
4261 for (x
= from
; x
!= to
; x
= NEXT_INSN (x
))
4262 gcc_assert (after
!= x
);
4263 gcc_assert (after
!= to
);
4266 /* Splice this bunch out of where it is now. */
4267 if (PREV_INSN (from
))
4268 SET_NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
4270 SET_PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
4271 if (get_last_insn () == to
)
4272 set_last_insn (PREV_INSN (from
));
4273 if (get_insns () == from
)
4274 set_first_insn (NEXT_INSN (to
));
4276 /* Make the new neighbors point to it and it to them. */
4277 if (NEXT_INSN (after
))
4278 SET_PREV_INSN (NEXT_INSN (after
)) = to
;
4280 SET_NEXT_INSN (to
) = NEXT_INSN (after
);
4281 SET_PREV_INSN (from
) = after
;
4282 SET_NEXT_INSN (after
) = from
;
4283 if (after
== get_last_insn ())
4287 /* Same as function above, but take care to update BB boundaries. */
4289 reorder_insns (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4291 rtx_insn
*prev
= PREV_INSN (from
);
4292 basic_block bb
, bb2
;
4294 reorder_insns_nobb (from
, to
, after
);
4296 if (!BARRIER_P (after
)
4297 && (bb
= BLOCK_FOR_INSN (after
)))
4300 df_set_bb_dirty (bb
);
4302 if (!BARRIER_P (from
)
4303 && (bb2
= BLOCK_FOR_INSN (from
)))
4305 if (BB_END (bb2
) == to
)
4306 BB_END (bb2
) = prev
;
4307 df_set_bb_dirty (bb2
);
4310 if (BB_END (bb
) == after
)
4313 for (x
= from
; x
!= NEXT_INSN (to
); x
= NEXT_INSN (x
))
4315 df_insn_change_bb (x
, bb
);
4320 /* Emit insn(s) of given code and pattern
4321 at a specified place within the doubly-linked list.
4323 All of the emit_foo global entry points accept an object
4324 X which is either an insn list or a PATTERN of a single
4327 There are thus a few canonical ways to generate code and
4328 emit it at a specific place in the instruction stream. For
4329 example, consider the instruction named SPOT and the fact that
4330 we would like to emit some instructions before SPOT. We might
4334 ... emit the new instructions ...
4335 insns_head = get_insns ();
4338 emit_insn_before (insns_head, SPOT);
4340 It used to be common to generate SEQUENCE rtl instead, but that
4341 is a relic of the past which no longer occurs. The reason is that
4342 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4343 generated would almost certainly die right after it was created. */
4346 emit_pattern_before_noloc (rtx x
, rtx before
, rtx last
, basic_block bb
,
4347 rtx_insn
*(*make_raw
) (rtx
))
4351 gcc_assert (before
);
4354 return safe_as_a
<rtx_insn
*> (last
);
4356 switch (GET_CODE (x
))
4365 insn
= as_a
<rtx_insn
*> (x
);
4368 rtx_insn
*next
= NEXT_INSN (insn
);
4369 add_insn_before (insn
, before
, bb
);
4375 #ifdef ENABLE_RTL_CHECKING
4382 last
= (*make_raw
) (x
);
4383 add_insn_before (last
, before
, bb
);
4387 return safe_as_a
<rtx_insn
*> (last
);
4390 /* Make X be output before the instruction BEFORE. */
4393 emit_insn_before_noloc (rtx x
, rtx_insn
*before
, basic_block bb
)
4395 return emit_pattern_before_noloc (x
, before
, before
, bb
, make_insn_raw
);
4398 /* Make an instruction with body X and code JUMP_INSN
4399 and output it before the instruction BEFORE. */
4402 emit_jump_insn_before_noloc (rtx x
, rtx_insn
*before
)
4404 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4405 make_jump_insn_raw
);
4408 /* Make an instruction with body X and code CALL_INSN
4409 and output it before the instruction BEFORE. */
4412 emit_call_insn_before_noloc (rtx x
, rtx_insn
*before
)
4414 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4415 make_call_insn_raw
);
4418 /* Make an instruction with body X and code DEBUG_INSN
4419 and output it before the instruction BEFORE. */
4422 emit_debug_insn_before_noloc (rtx x
, rtx before
)
4424 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4425 make_debug_insn_raw
);
4428 /* Make an insn of code BARRIER
4429 and output it before the insn BEFORE. */
4432 emit_barrier_before (rtx before
)
4434 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4436 INSN_UID (insn
) = cur_insn_uid
++;
4438 add_insn_before (insn
, before
, NULL
);
4442 /* Emit the label LABEL before the insn BEFORE. */
4445 emit_label_before (rtx label
, rtx_insn
*before
)
4447 gcc_checking_assert (INSN_UID (label
) == 0);
4448 INSN_UID (label
) = cur_insn_uid
++;
4449 add_insn_before (label
, before
, NULL
);
4450 return as_a
<rtx_insn
*> (label
);
4453 /* Helper for emit_insn_after, handles lists of instructions
4457 emit_insn_after_1 (rtx_insn
*first
, rtx uncast_after
, basic_block bb
)
4459 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4461 rtx_insn
*after_after
;
4462 if (!bb
&& !BARRIER_P (after
))
4463 bb
= BLOCK_FOR_INSN (after
);
4467 df_set_bb_dirty (bb
);
4468 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4469 if (!BARRIER_P (last
))
4471 set_block_for_insn (last
, bb
);
4472 df_insn_rescan (last
);
4474 if (!BARRIER_P (last
))
4476 set_block_for_insn (last
, bb
);
4477 df_insn_rescan (last
);
4479 if (BB_END (bb
) == after
)
4483 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4486 after_after
= NEXT_INSN (after
);
4488 SET_NEXT_INSN (after
) = first
;
4489 SET_PREV_INSN (first
) = after
;
4490 SET_NEXT_INSN (last
) = after_after
;
4492 SET_PREV_INSN (after_after
) = last
;
4494 if (after
== get_last_insn ())
4495 set_last_insn (last
);
4501 emit_pattern_after_noloc (rtx x
, rtx uncast_after
, basic_block bb
,
4502 rtx_insn
*(*make_raw
)(rtx
))
4504 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4505 rtx_insn
*last
= after
;
4512 switch (GET_CODE (x
))
4521 last
= emit_insn_after_1 (as_a
<rtx_insn
*> (x
), after
, bb
);
4524 #ifdef ENABLE_RTL_CHECKING
4531 last
= (*make_raw
) (x
);
4532 add_insn_after (last
, after
, bb
);
4539 /* Make X be output after the insn AFTER and set the BB of insn. If
4540 BB is NULL, an attempt is made to infer the BB from AFTER. */
4543 emit_insn_after_noloc (rtx x
, rtx after
, basic_block bb
)
4545 return emit_pattern_after_noloc (x
, after
, bb
, make_insn_raw
);
4549 /* Make an insn of code JUMP_INSN with body X
4550 and output it after the insn AFTER. */
4553 emit_jump_insn_after_noloc (rtx x
, rtx after
)
4555 return emit_pattern_after_noloc (x
, after
, NULL
, make_jump_insn_raw
);
4558 /* Make an instruction with body X and code CALL_INSN
4559 and output it after the instruction AFTER. */
4562 emit_call_insn_after_noloc (rtx x
, rtx after
)
4564 return emit_pattern_after_noloc (x
, after
, NULL
, make_call_insn_raw
);
4567 /* Make an instruction with body X and code CALL_INSN
4568 and output it after the instruction AFTER. */
4571 emit_debug_insn_after_noloc (rtx x
, rtx after
)
4573 return emit_pattern_after_noloc (x
, after
, NULL
, make_debug_insn_raw
);
4576 /* Make an insn of code BARRIER
4577 and output it after the insn AFTER. */
4580 emit_barrier_after (rtx after
)
4582 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4584 INSN_UID (insn
) = cur_insn_uid
++;
4586 add_insn_after (insn
, after
, NULL
);
4590 /* Emit the label LABEL after the insn AFTER. */
4593 emit_label_after (rtx label
, rtx_insn
*after
)
4595 gcc_checking_assert (INSN_UID (label
) == 0);
4596 INSN_UID (label
) = cur_insn_uid
++;
4597 add_insn_after (label
, after
, NULL
);
4598 return as_a
<rtx_insn
*> (label
);
4601 /* Notes require a bit of special handling: Some notes need to have their
4602 BLOCK_FOR_INSN set, others should never have it set, and some should
4603 have it set or clear depending on the context. */
4605 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4606 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4607 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4610 note_outside_basic_block_p (enum insn_note subtype
, bool on_bb_boundary_p
)
4614 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4615 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
4618 /* Notes for var tracking and EH region markers can appear between or
4619 inside basic blocks. If the caller is emitting on the basic block
4620 boundary, do not set BLOCK_FOR_INSN on the new note. */
4621 case NOTE_INSN_VAR_LOCATION
:
4622 case NOTE_INSN_CALL_ARG_LOCATION
:
4623 case NOTE_INSN_EH_REGION_BEG
:
4624 case NOTE_INSN_EH_REGION_END
:
4625 return on_bb_boundary_p
;
4627 /* Otherwise, BLOCK_FOR_INSN must be set. */
4633 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4636 emit_note_after (enum insn_note subtype
, rtx uncast_after
)
4638 rtx_insn
*after
= as_a
<rtx_insn
*> (uncast_after
);
4639 rtx_note
*note
= make_note_raw (subtype
);
4640 basic_block bb
= BARRIER_P (after
) ? NULL
: BLOCK_FOR_INSN (after
);
4641 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_END (bb
) == after
);
4643 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4644 add_insn_after_nobb (note
, after
);
4646 add_insn_after (note
, after
, bb
);
4650 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4653 emit_note_before (enum insn_note subtype
, rtx uncast_before
)
4655 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4656 rtx_note
*note
= make_note_raw (subtype
);
4657 basic_block bb
= BARRIER_P (before
) ? NULL
: BLOCK_FOR_INSN (before
);
4658 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_HEAD (bb
) == before
);
4660 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4661 add_insn_before_nobb (note
, before
);
4663 add_insn_before (note
, before
, bb
);
4667 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4668 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4671 emit_pattern_after_setloc (rtx pattern
, rtx uncast_after
, int loc
,
4672 rtx_insn
*(*make_raw
) (rtx
))
4674 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4675 rtx last
= emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4677 if (pattern
== NULL_RTX
|| !loc
)
4678 return safe_as_a
<rtx_insn
*> (last
);
4680 after
= NEXT_INSN (after
);
4683 if (active_insn_p (after
) && !INSN_LOCATION (after
))
4684 INSN_LOCATION (after
) = loc
;
4687 after
= NEXT_INSN (after
);
4689 return safe_as_a
<rtx_insn
*> (last
);
4692 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4693 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4697 emit_pattern_after (rtx pattern
, rtx uncast_after
, bool skip_debug_insns
,
4698 rtx_insn
*(*make_raw
) (rtx
))
4700 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4701 rtx_insn
*prev
= after
;
4703 if (skip_debug_insns
)
4704 while (DEBUG_INSN_P (prev
))
4705 prev
= PREV_INSN (prev
);
4708 return emit_pattern_after_setloc (pattern
, after
, INSN_LOCATION (prev
),
4711 return emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4714 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4716 emit_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4718 return emit_pattern_after_setloc (pattern
, after
, loc
, make_insn_raw
);
4721 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4723 emit_insn_after (rtx pattern
, rtx after
)
4725 return emit_pattern_after (pattern
, after
, true, make_insn_raw
);
4728 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4730 emit_jump_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4732 return emit_pattern_after_setloc (pattern
, after
, loc
, make_jump_insn_raw
);
4735 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4737 emit_jump_insn_after (rtx pattern
, rtx after
)
4739 return emit_pattern_after (pattern
, after
, true, make_jump_insn_raw
);
4742 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4744 emit_call_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4746 return emit_pattern_after_setloc (pattern
, after
, loc
, make_call_insn_raw
);
4749 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4751 emit_call_insn_after (rtx pattern
, rtx after
)
4753 return emit_pattern_after (pattern
, after
, true, make_call_insn_raw
);
4756 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4758 emit_debug_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4760 return emit_pattern_after_setloc (pattern
, after
, loc
, make_debug_insn_raw
);
4763 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4765 emit_debug_insn_after (rtx pattern
, rtx after
)
4767 return emit_pattern_after (pattern
, after
, false, make_debug_insn_raw
);
4770 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4771 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4772 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4776 emit_pattern_before_setloc (rtx pattern
, rtx uncast_before
, int loc
, bool insnp
,
4777 rtx_insn
*(*make_raw
) (rtx
))
4779 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4780 rtx_insn
*first
= PREV_INSN (before
);
4781 rtx_insn
*last
= emit_pattern_before_noloc (pattern
, before
,
4782 insnp
? before
: NULL_RTX
,
4785 if (pattern
== NULL_RTX
|| !loc
)
4789 first
= get_insns ();
4791 first
= NEXT_INSN (first
);
4794 if (active_insn_p (first
) && !INSN_LOCATION (first
))
4795 INSN_LOCATION (first
) = loc
;
4798 first
= NEXT_INSN (first
);
4803 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4804 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4805 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4806 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4809 emit_pattern_before (rtx pattern
, rtx uncast_before
, bool skip_debug_insns
,
4810 bool insnp
, rtx_insn
*(*make_raw
) (rtx
))
4812 rtx_insn
*before
= safe_as_a
<rtx_insn
*> (uncast_before
);
4813 rtx_insn
*next
= before
;
4815 if (skip_debug_insns
)
4816 while (DEBUG_INSN_P (next
))
4817 next
= PREV_INSN (next
);
4820 return emit_pattern_before_setloc (pattern
, before
, INSN_LOCATION (next
),
4823 return emit_pattern_before_noloc (pattern
, before
,
4824 insnp
? before
: NULL_RTX
,
4828 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4830 emit_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4832 return emit_pattern_before_setloc (pattern
, before
, loc
, true,
4836 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4838 emit_insn_before (rtx pattern
, rtx before
)
4840 return emit_pattern_before (pattern
, before
, true, true, make_insn_raw
);
4843 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4845 emit_jump_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4847 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4848 make_jump_insn_raw
);
4851 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4853 emit_jump_insn_before (rtx pattern
, rtx before
)
4855 return emit_pattern_before (pattern
, before
, true, false,
4856 make_jump_insn_raw
);
4859 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4861 emit_call_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4863 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4864 make_call_insn_raw
);
4867 /* Like emit_call_insn_before_noloc,
4868 but set insn_location according to BEFORE. */
4870 emit_call_insn_before (rtx pattern
, rtx_insn
*before
)
4872 return emit_pattern_before (pattern
, before
, true, false,
4873 make_call_insn_raw
);
4876 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4878 emit_debug_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4880 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4881 make_debug_insn_raw
);
4884 /* Like emit_debug_insn_before_noloc,
4885 but set insn_location according to BEFORE. */
4887 emit_debug_insn_before (rtx pattern
, rtx before
)
4889 return emit_pattern_before (pattern
, before
, false, false,
4890 make_debug_insn_raw
);
4893 /* Take X and emit it at the end of the doubly-linked
4896 Returns the last insn emitted. */
4901 rtx_insn
*last
= get_last_insn ();
4907 switch (GET_CODE (x
))
4916 insn
= as_a
<rtx_insn
*> (x
);
4919 rtx_insn
*next
= NEXT_INSN (insn
);
4926 #ifdef ENABLE_RTL_CHECKING
4927 case JUMP_TABLE_DATA
:
4934 last
= make_insn_raw (x
);
4942 /* Make an insn of code DEBUG_INSN with pattern X
4943 and add it to the end of the doubly-linked list. */
4946 emit_debug_insn (rtx x
)
4948 rtx_insn
*last
= get_last_insn ();
4954 switch (GET_CODE (x
))
4963 insn
= as_a
<rtx_insn
*> (x
);
4966 rtx_insn
*next
= NEXT_INSN (insn
);
4973 #ifdef ENABLE_RTL_CHECKING
4974 case JUMP_TABLE_DATA
:
4981 last
= make_debug_insn_raw (x
);
4989 /* Make an insn of code JUMP_INSN with pattern X
4990 and add it to the end of the doubly-linked list. */
4993 emit_jump_insn (rtx x
)
4995 rtx_insn
*last
= NULL
;
4998 switch (GET_CODE (x
))
5007 insn
= as_a
<rtx_insn
*> (x
);
5010 rtx_insn
*next
= NEXT_INSN (insn
);
5017 #ifdef ENABLE_RTL_CHECKING
5018 case JUMP_TABLE_DATA
:
5025 last
= make_jump_insn_raw (x
);
5033 /* Make an insn of code CALL_INSN with pattern X
5034 and add it to the end of the doubly-linked list. */
5037 emit_call_insn (rtx x
)
5041 switch (GET_CODE (x
))
5050 insn
= emit_insn (x
);
5053 #ifdef ENABLE_RTL_CHECKING
5055 case JUMP_TABLE_DATA
:
5061 insn
= make_call_insn_raw (x
);
5069 /* Add the label LABEL to the end of the doubly-linked list. */
5072 emit_label (rtx label
)
5074 gcc_checking_assert (INSN_UID (label
) == 0);
5075 INSN_UID (label
) = cur_insn_uid
++;
5076 add_insn (as_a
<rtx_insn
*> (label
));
5077 return as_a
<rtx_insn
*> (label
);
5080 /* Make an insn of code JUMP_TABLE_DATA
5081 and add it to the end of the doubly-linked list. */
5083 rtx_jump_table_data
*
5084 emit_jump_table_data (rtx table
)
5086 rtx_jump_table_data
*jump_table_data
=
5087 as_a
<rtx_jump_table_data
*> (rtx_alloc (JUMP_TABLE_DATA
));
5088 INSN_UID (jump_table_data
) = cur_insn_uid
++;
5089 PATTERN (jump_table_data
) = table
;
5090 BLOCK_FOR_INSN (jump_table_data
) = NULL
;
5091 add_insn (jump_table_data
);
5092 return jump_table_data
;
5095 /* Make an insn of code BARRIER
5096 and add it to the end of the doubly-linked list. */
5101 rtx_barrier
*barrier
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
5102 INSN_UID (barrier
) = cur_insn_uid
++;
5107 /* Emit a copy of note ORIG. */
5110 emit_note_copy (rtx_note
*orig
)
5112 enum insn_note kind
= (enum insn_note
) NOTE_KIND (orig
);
5113 rtx_note
*note
= make_note_raw (kind
);
5114 NOTE_DATA (note
) = NOTE_DATA (orig
);
5119 /* Make an insn of code NOTE or type NOTE_NO
5120 and add it to the end of the doubly-linked list. */
5123 emit_note (enum insn_note kind
)
5125 rtx_note
*note
= make_note_raw (kind
);
5130 /* Emit a clobber of lvalue X. */
5133 emit_clobber (rtx x
)
5135 /* CONCATs should not appear in the insn stream. */
5136 if (GET_CODE (x
) == CONCAT
)
5138 emit_clobber (XEXP (x
, 0));
5139 return emit_clobber (XEXP (x
, 1));
5141 return emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
5144 /* Return a sequence of insns to clobber lvalue X. */
5158 /* Emit a use of rvalue X. */
5163 /* CONCATs should not appear in the insn stream. */
5164 if (GET_CODE (x
) == CONCAT
)
5166 emit_use (XEXP (x
, 0));
5167 return emit_use (XEXP (x
, 1));
5169 return emit_insn (gen_rtx_USE (VOIDmode
, x
));
5172 /* Return a sequence of insns to use rvalue X. */
5186 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5187 Return the set in INSN that such notes describe, or NULL if the notes
5188 have no meaning for INSN. */
5191 set_for_reg_notes (rtx insn
)
5198 pat
= PATTERN (insn
);
5199 if (GET_CODE (pat
) == PARALLEL
)
5201 /* We do not use single_set because that ignores SETs of unused
5202 registers. REG_EQUAL and REG_EQUIV notes really do require the
5203 PARALLEL to have a single SET. */
5204 if (multiple_sets (insn
))
5206 pat
= XVECEXP (pat
, 0, 0);
5209 if (GET_CODE (pat
) != SET
)
5212 reg
= SET_DEST (pat
);
5214 /* Notes apply to the contents of a STRICT_LOW_PART. */
5215 if (GET_CODE (reg
) == STRICT_LOW_PART
)
5216 reg
= XEXP (reg
, 0);
5218 /* Check that we have a register. */
5219 if (!(REG_P (reg
) || GET_CODE (reg
) == SUBREG
))
5225 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5226 note of this type already exists, remove it first. */
5229 set_unique_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
5231 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
5237 if (!set_for_reg_notes (insn
))
5240 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5241 It serves no useful purpose and breaks eliminate_regs. */
5242 if (GET_CODE (datum
) == ASM_OPERANDS
)
5245 /* Notes with side effects are dangerous. Even if the side-effect
5246 initially mirrors one in PATTERN (INSN), later optimizations
5247 might alter the way that the final register value is calculated
5248 and so move or alter the side-effect in some way. The note would
5249 then no longer be a valid substitution for SET_SRC. */
5250 if (side_effects_p (datum
))
5259 XEXP (note
, 0) = datum
;
5262 add_reg_note (insn
, kind
, datum
);
5263 note
= REG_NOTES (insn
);
5270 df_notes_rescan (as_a
<rtx_insn
*> (insn
));
5279 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5281 set_dst_reg_note (rtx insn
, enum reg_note kind
, rtx datum
, rtx dst
)
5283 rtx set
= set_for_reg_notes (insn
);
5285 if (set
&& SET_DEST (set
) == dst
)
5286 return set_unique_reg_note (insn
, kind
, datum
);
5290 /* Return an indication of which type of insn should have X as a body.
5291 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5293 static enum rtx_code
5294 classify_insn (rtx x
)
5298 if (GET_CODE (x
) == CALL
)
5300 if (ANY_RETURN_P (x
))
5302 if (GET_CODE (x
) == SET
)
5304 if (SET_DEST (x
) == pc_rtx
)
5306 else if (GET_CODE (SET_SRC (x
)) == CALL
)
5311 if (GET_CODE (x
) == PARALLEL
)
5314 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
5315 if (GET_CODE (XVECEXP (x
, 0, j
)) == CALL
)
5317 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
5318 && SET_DEST (XVECEXP (x
, 0, j
)) == pc_rtx
)
5320 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
5321 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == CALL
)
5327 /* Emit the rtl pattern X as an appropriate kind of insn.
5328 If X is a label, it is simply added into the insn chain. */
5333 enum rtx_code code
= classify_insn (x
);
5338 return emit_label (x
);
5340 return emit_insn (x
);
5343 rtx_insn
*insn
= emit_jump_insn (x
);
5344 if (any_uncondjump_p (insn
) || GET_CODE (x
) == RETURN
)
5345 return emit_barrier ();
5349 return emit_call_insn (x
);
5351 return emit_debug_insn (x
);
5357 /* Space for free sequence stack entries. */
5358 static GTY ((deletable
)) struct sequence_stack
*free_sequence_stack
;
5360 /* Begin emitting insns to a sequence. If this sequence will contain
5361 something that might cause the compiler to pop arguments to function
5362 calls (because those pops have previously been deferred; see
5363 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5364 before calling this function. That will ensure that the deferred
5365 pops are not accidentally emitted in the middle of this sequence. */
5368 start_sequence (void)
5370 struct sequence_stack
*tem
;
5372 if (free_sequence_stack
!= NULL
)
5374 tem
= free_sequence_stack
;
5375 free_sequence_stack
= tem
->next
;
5378 tem
= ggc_alloc
<sequence_stack
> ();
5380 tem
->next
= get_current_sequence ()->next
;
5381 tem
->first
= get_insns ();
5382 tem
->last
= get_last_insn ();
5383 get_current_sequence ()->next
= tem
;
5389 /* Set up the insn chain starting with FIRST as the current sequence,
5390 saving the previously current one. See the documentation for
5391 start_sequence for more information about how to use this function. */
5394 push_to_sequence (rtx_insn
*first
)
5400 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
))
5403 set_first_insn (first
);
5404 set_last_insn (last
);
5407 /* Like push_to_sequence, but take the last insn as an argument to avoid
5408 looping through the list. */
5411 push_to_sequence2 (rtx_insn
*first
, rtx_insn
*last
)
5415 set_first_insn (first
);
5416 set_last_insn (last
);
5419 /* Set up the outer-level insn chain
5420 as the current sequence, saving the previously current one. */
5423 push_topmost_sequence (void)
5425 struct sequence_stack
*top
;
5429 top
= get_topmost_sequence ();
5430 set_first_insn (top
->first
);
5431 set_last_insn (top
->last
);
5434 /* After emitting to the outer-level insn chain, update the outer-level
5435 insn chain, and restore the previous saved state. */
5438 pop_topmost_sequence (void)
5440 struct sequence_stack
*top
;
5442 top
= get_topmost_sequence ();
5443 top
->first
= get_insns ();
5444 top
->last
= get_last_insn ();
5449 /* After emitting to a sequence, restore previous saved state.
5451 To get the contents of the sequence just made, you must call
5452 `get_insns' *before* calling here.
5454 If the compiler might have deferred popping arguments while
5455 generating this sequence, and this sequence will not be immediately
5456 inserted into the instruction stream, use do_pending_stack_adjust
5457 before calling get_insns. That will ensure that the deferred
5458 pops are inserted into this sequence, and not into some random
5459 location in the instruction stream. See INHIBIT_DEFER_POP for more
5460 information about deferred popping of arguments. */
5465 struct sequence_stack
*tem
= get_current_sequence ()->next
;
5467 set_first_insn (tem
->first
);
5468 set_last_insn (tem
->last
);
5469 get_current_sequence ()->next
= tem
->next
;
5471 memset (tem
, 0, sizeof (*tem
));
5472 tem
->next
= free_sequence_stack
;
5473 free_sequence_stack
= tem
;
5476 /* Return 1 if currently emitting into a sequence. */
5479 in_sequence_p (void)
5481 return get_current_sequence ()->next
!= 0;
5484 /* Put the various virtual registers into REGNO_REG_RTX. */
5487 init_virtual_regs (void)
5489 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
5490 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
5491 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
5492 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
5493 regno_reg_rtx
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
5494 regno_reg_rtx
[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
]
5495 = virtual_preferred_stack_boundary_rtx
;
5499 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5500 static rtx copy_insn_scratch_in
[MAX_RECOG_OPERANDS
];
5501 static rtx copy_insn_scratch_out
[MAX_RECOG_OPERANDS
];
5502 static int copy_insn_n_scratches
;
5504 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5505 copied an ASM_OPERANDS.
5506 In that case, it is the original input-operand vector. */
5507 static rtvec orig_asm_operands_vector
;
5509 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5510 copied an ASM_OPERANDS.
5511 In that case, it is the copied input-operand vector. */
5512 static rtvec copy_asm_operands_vector
;
5514 /* Likewise for the constraints vector. */
5515 static rtvec orig_asm_constraints_vector
;
5516 static rtvec copy_asm_constraints_vector
;
5518 /* Recursively create a new copy of an rtx for copy_insn.
5519 This function differs from copy_rtx in that it handles SCRATCHes and
5520 ASM_OPERANDs properly.
5521 Normally, this function is not used directly; use copy_insn as front end.
5522 However, you could first copy an insn pattern with copy_insn and then use
5523 this function afterwards to properly copy any REG_NOTEs containing
5527 copy_insn_1 (rtx orig
)
5532 const char *format_ptr
;
5537 code
= GET_CODE (orig
);
5552 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5553 clobbers or clobbers of hard registers that originated as pseudos.
5554 This is needed to allow safe register renaming. */
5555 if (REG_P (XEXP (orig
, 0)) && REGNO (XEXP (orig
, 0)) < FIRST_PSEUDO_REGISTER
5556 && ORIGINAL_REGNO (XEXP (orig
, 0)) == REGNO (XEXP (orig
, 0)))
5561 for (i
= 0; i
< copy_insn_n_scratches
; i
++)
5562 if (copy_insn_scratch_in
[i
] == orig
)
5563 return copy_insn_scratch_out
[i
];
5567 if (shared_const_p (orig
))
5571 /* A MEM with a constant address is not sharable. The problem is that
5572 the constant address may need to be reloaded. If the mem is shared,
5573 then reloading one copy of this mem will cause all copies to appear
5574 to have been reloaded. */
5580 /* Copy the various flags, fields, and other information. We assume
5581 that all fields need copying, and then clear the fields that should
5582 not be copied. That is the sensible default behavior, and forces
5583 us to explicitly document why we are *not* copying a flag. */
5584 copy
= shallow_copy_rtx (orig
);
5586 /* We do not copy the USED flag, which is used as a mark bit during
5587 walks over the RTL. */
5588 RTX_FLAG (copy
, used
) = 0;
5590 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5593 RTX_FLAG (copy
, jump
) = 0;
5594 RTX_FLAG (copy
, call
) = 0;
5595 RTX_FLAG (copy
, frame_related
) = 0;
5598 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
5600 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
5601 switch (*format_ptr
++)
5604 if (XEXP (orig
, i
) != NULL
)
5605 XEXP (copy
, i
) = copy_insn_1 (XEXP (orig
, i
));
5610 if (XVEC (orig
, i
) == orig_asm_constraints_vector
)
5611 XVEC (copy
, i
) = copy_asm_constraints_vector
;
5612 else if (XVEC (orig
, i
) == orig_asm_operands_vector
)
5613 XVEC (copy
, i
) = copy_asm_operands_vector
;
5614 else if (XVEC (orig
, i
) != NULL
)
5616 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
5617 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
5618 XVECEXP (copy
, i
, j
) = copy_insn_1 (XVECEXP (orig
, i
, j
));
5629 /* These are left unchanged. */
5636 if (code
== SCRATCH
)
5638 i
= copy_insn_n_scratches
++;
5639 gcc_assert (i
< MAX_RECOG_OPERANDS
);
5640 copy_insn_scratch_in
[i
] = orig
;
5641 copy_insn_scratch_out
[i
] = copy
;
5643 else if (code
== ASM_OPERANDS
)
5645 orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
5646 copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
5647 orig_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig
);
5648 copy_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
5654 /* Create a new copy of an rtx.
5655 This function differs from copy_rtx in that it handles SCRATCHes and
5656 ASM_OPERANDs properly.
5657 INSN doesn't really have to be a full INSN; it could be just the
5660 copy_insn (rtx insn
)
5662 copy_insn_n_scratches
= 0;
5663 orig_asm_operands_vector
= 0;
5664 orig_asm_constraints_vector
= 0;
5665 copy_asm_operands_vector
= 0;
5666 copy_asm_constraints_vector
= 0;
5667 return copy_insn_1 (insn
);
5670 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5671 on that assumption that INSN itself remains in its original place. */
5674 copy_delay_slot_insn (rtx_insn
*insn
)
5676 /* Copy INSN with its rtx_code, all its notes, location etc. */
5677 insn
= as_a
<rtx_insn
*> (copy_rtx (insn
));
5678 INSN_UID (insn
) = cur_insn_uid
++;
5682 /* Initialize data structures and variables in this file
5683 before generating rtl for each function. */
5688 set_first_insn (NULL
);
5689 set_last_insn (NULL
);
5690 if (MIN_NONDEBUG_INSN_UID
)
5691 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
;
5694 cur_debug_insn_uid
= 1;
5695 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
5696 first_label_num
= label_num
;
5697 get_current_sequence ()->next
= NULL
;
5699 /* Init the tables that describe all the pseudo regs. */
5701 crtl
->emit
.regno_pointer_align_length
= LAST_VIRTUAL_REGISTER
+ 101;
5703 crtl
->emit
.regno_pointer_align
5704 = XCNEWVEC (unsigned char, crtl
->emit
.regno_pointer_align_length
);
5706 regno_reg_rtx
= ggc_vec_alloc
<rtx
> (crtl
->emit
.regno_pointer_align_length
);
5708 /* Put copies of all the hard registers into regno_reg_rtx. */
5709 memcpy (regno_reg_rtx
,
5710 initial_regno_reg_rtx
,
5711 FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
5713 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5714 init_virtual_regs ();
5716 /* Indicate that the virtual registers and stack locations are
5718 REG_POINTER (stack_pointer_rtx
) = 1;
5719 REG_POINTER (frame_pointer_rtx
) = 1;
5720 REG_POINTER (hard_frame_pointer_rtx
) = 1;
5721 REG_POINTER (arg_pointer_rtx
) = 1;
5723 REG_POINTER (virtual_incoming_args_rtx
) = 1;
5724 REG_POINTER (virtual_stack_vars_rtx
) = 1;
5725 REG_POINTER (virtual_stack_dynamic_rtx
) = 1;
5726 REG_POINTER (virtual_outgoing_args_rtx
) = 1;
5727 REG_POINTER (virtual_cfa_rtx
) = 1;
5729 #ifdef STACK_BOUNDARY
5730 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
;
5731 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5732 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5733 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
;
5735 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5736 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
) = STACK_BOUNDARY
;
5737 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
) = STACK_BOUNDARY
;
5738 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5739 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = BITS_PER_WORD
;
5742 #ifdef INIT_EXPANDERS
5747 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5750 gen_const_vector (machine_mode mode
, int constant
)
5757 units
= GET_MODE_NUNITS (mode
);
5758 inner
= GET_MODE_INNER (mode
);
5760 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner
));
5762 v
= rtvec_alloc (units
);
5764 /* We need to call this function after we set the scalar const_tiny_rtx
5766 gcc_assert (const_tiny_rtx
[constant
][(int) inner
]);
5768 for (i
= 0; i
< units
; ++i
)
5769 RTVEC_ELT (v
, i
) = const_tiny_rtx
[constant
][(int) inner
];
5771 tem
= gen_rtx_raw_CONST_VECTOR (mode
, v
);
5775 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5776 all elements are zero, and the one vector when all elements are one. */
5778 gen_rtx_CONST_VECTOR (machine_mode mode
, rtvec v
)
5780 machine_mode inner
= GET_MODE_INNER (mode
);
5781 int nunits
= GET_MODE_NUNITS (mode
);
5785 /* Check to see if all of the elements have the same value. */
5786 x
= RTVEC_ELT (v
, nunits
- 1);
5787 for (i
= nunits
- 2; i
>= 0; i
--)
5788 if (RTVEC_ELT (v
, i
) != x
)
5791 /* If the values are all the same, check to see if we can use one of the
5792 standard constant vectors. */
5795 if (x
== CONST0_RTX (inner
))
5796 return CONST0_RTX (mode
);
5797 else if (x
== CONST1_RTX (inner
))
5798 return CONST1_RTX (mode
);
5799 else if (x
== CONSTM1_RTX (inner
))
5800 return CONSTM1_RTX (mode
);
5803 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
5806 /* Initialise global register information required by all functions. */
5809 init_emit_regs (void)
5815 /* Reset register attributes */
5816 reg_attrs_htab
->empty ();
5818 /* We need reg_raw_mode, so initialize the modes now. */
5819 init_reg_modes_target ();
5821 /* Assign register numbers to the globally defined register rtx. */
5822 stack_pointer_rtx
= gen_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
5823 frame_pointer_rtx
= gen_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
5824 hard_frame_pointer_rtx
= gen_raw_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
);
5825 arg_pointer_rtx
= gen_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
5826 virtual_incoming_args_rtx
=
5827 gen_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
5828 virtual_stack_vars_rtx
=
5829 gen_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
5830 virtual_stack_dynamic_rtx
=
5831 gen_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
5832 virtual_outgoing_args_rtx
=
5833 gen_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
5834 virtual_cfa_rtx
= gen_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
5835 virtual_preferred_stack_boundary_rtx
=
5836 gen_raw_REG (Pmode
, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
);
5838 /* Initialize RTL for commonly used hard registers. These are
5839 copied into regno_reg_rtx as we begin to compile each function. */
5840 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
5841 initial_regno_reg_rtx
[i
] = gen_raw_REG (reg_raw_mode
[i
], i
);
5843 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5844 return_address_pointer_rtx
5845 = gen_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
5848 pic_offset_table_rtx
= NULL_RTX
;
5849 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
5850 pic_offset_table_rtx
= gen_raw_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
5852 for (i
= 0; i
< (int) MAX_MACHINE_MODE
; i
++)
5854 mode
= (machine_mode
) i
;
5855 attrs
= ggc_cleared_alloc
<mem_attrs
> ();
5856 attrs
->align
= BITS_PER_UNIT
;
5857 attrs
->addrspace
= ADDR_SPACE_GENERIC
;
5858 if (mode
!= BLKmode
)
5860 attrs
->size_known_p
= true;
5861 attrs
->size
= GET_MODE_SIZE (mode
);
5862 if (STRICT_ALIGNMENT
)
5863 attrs
->align
= GET_MODE_ALIGNMENT (mode
);
5865 mode_mem_attrs
[i
] = attrs
;
5869 /* Initialize global machine_mode variables. */
5872 init_derived_machine_modes (void)
5874 byte_mode
= VOIDmode
;
5875 word_mode
= VOIDmode
;
5877 for (machine_mode mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5879 mode
= GET_MODE_WIDER_MODE (mode
))
5881 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
5882 && byte_mode
== VOIDmode
)
5885 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
5886 && word_mode
== VOIDmode
)
5890 ptr_mode
= mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0);
5893 /* Create some permanent unique rtl objects shared between all functions. */
5896 init_emit_once (void)
5900 machine_mode double_mode
;
5902 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5903 CONST_FIXED, and memory attribute hash tables. */
5904 const_int_htab
= hash_table
<const_int_hasher
>::create_ggc (37);
5906 #if TARGET_SUPPORTS_WIDE_INT
5907 const_wide_int_htab
= hash_table
<const_wide_int_hasher
>::create_ggc (37);
5909 const_double_htab
= hash_table
<const_double_hasher
>::create_ggc (37);
5911 const_fixed_htab
= hash_table
<const_fixed_hasher
>::create_ggc (37);
5913 reg_attrs_htab
= hash_table
<reg_attr_hasher
>::create_ggc (37);
5915 #ifdef INIT_EXPANDERS
5916 /* This is to initialize {init|mark|free}_machine_status before the first
5917 call to push_function_context_to. This is needed by the Chill front
5918 end which calls push_function_context_to before the first call to
5919 init_function_start. */
5923 /* Create the unique rtx's for certain rtx codes and operand values. */
5925 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5926 tries to use these variables. */
5927 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
5928 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
5929 gen_rtx_raw_CONST_INT (VOIDmode
, (HOST_WIDE_INT
) i
);
5931 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
5932 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
5933 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
5935 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
5937 double_mode
= mode_for_size (DOUBLE_TYPE_SIZE
, MODE_FLOAT
, 0);
5939 real_from_integer (&dconst0
, double_mode
, 0, SIGNED
);
5940 real_from_integer (&dconst1
, double_mode
, 1, SIGNED
);
5941 real_from_integer (&dconst2
, double_mode
, 2, SIGNED
);
5946 dconsthalf
= dconst1
;
5947 SET_REAL_EXP (&dconsthalf
, REAL_EXP (&dconsthalf
) - 1);
5949 for (i
= 0; i
< 3; i
++)
5951 const REAL_VALUE_TYPE
*const r
=
5952 (i
== 0 ? &dconst0
: i
== 1 ? &dconst1
: &dconst2
);
5954 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
5956 mode
= GET_MODE_WIDER_MODE (mode
))
5957 const_tiny_rtx
[i
][(int) mode
] =
5958 CONST_DOUBLE_FROM_REAL_VALUE (*r
, mode
);
5960 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT
);
5962 mode
= GET_MODE_WIDER_MODE (mode
))
5963 const_tiny_rtx
[i
][(int) mode
] =
5964 CONST_DOUBLE_FROM_REAL_VALUE (*r
, mode
);
5966 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
5968 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5970 mode
= GET_MODE_WIDER_MODE (mode
))
5971 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5973 for (mode
= MIN_MODE_PARTIAL_INT
;
5974 mode
<= MAX_MODE_PARTIAL_INT
;
5975 mode
= (machine_mode
)((int)(mode
) + 1))
5976 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5979 const_tiny_rtx
[3][(int) VOIDmode
] = constm1_rtx
;
5981 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5983 mode
= GET_MODE_WIDER_MODE (mode
))
5984 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
5986 for (mode
= MIN_MODE_PARTIAL_INT
;
5987 mode
<= MAX_MODE_PARTIAL_INT
;
5988 mode
= (machine_mode
)((int)(mode
) + 1))
5989 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
5991 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT
);
5993 mode
= GET_MODE_WIDER_MODE (mode
))
5995 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
5996 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
5999 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT
);
6001 mode
= GET_MODE_WIDER_MODE (mode
))
6003 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
6004 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
6007 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT
);
6009 mode
= GET_MODE_WIDER_MODE (mode
))
6011 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6012 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6013 const_tiny_rtx
[3][(int) mode
] = gen_const_vector (mode
, 3);
6016 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT
);
6018 mode
= GET_MODE_WIDER_MODE (mode
))
6020 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6021 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6024 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FRACT
);
6026 mode
= GET_MODE_WIDER_MODE (mode
))
6028 FCONST0 (mode
).data
.high
= 0;
6029 FCONST0 (mode
).data
.low
= 0;
6030 FCONST0 (mode
).mode
= mode
;
6031 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6032 FCONST0 (mode
), mode
);
6035 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UFRACT
);
6037 mode
= GET_MODE_WIDER_MODE (mode
))
6039 FCONST0 (mode
).data
.high
= 0;
6040 FCONST0 (mode
).data
.low
= 0;
6041 FCONST0 (mode
).mode
= mode
;
6042 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6043 FCONST0 (mode
), mode
);
6046 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_ACCUM
);
6048 mode
= GET_MODE_WIDER_MODE (mode
))
6050 FCONST0 (mode
).data
.high
= 0;
6051 FCONST0 (mode
).data
.low
= 0;
6052 FCONST0 (mode
).mode
= mode
;
6053 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6054 FCONST0 (mode
), mode
);
6056 /* We store the value 1. */
6057 FCONST1 (mode
).data
.high
= 0;
6058 FCONST1 (mode
).data
.low
= 0;
6059 FCONST1 (mode
).mode
= mode
;
6061 = double_int_one
.lshift (GET_MODE_FBIT (mode
),
6062 HOST_BITS_PER_DOUBLE_INT
,
6063 SIGNED_FIXED_POINT_MODE_P (mode
));
6064 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6065 FCONST1 (mode
), mode
);
6068 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UACCUM
);
6070 mode
= GET_MODE_WIDER_MODE (mode
))
6072 FCONST0 (mode
).data
.high
= 0;
6073 FCONST0 (mode
).data
.low
= 0;
6074 FCONST0 (mode
).mode
= mode
;
6075 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6076 FCONST0 (mode
), mode
);
6078 /* We store the value 1. */
6079 FCONST1 (mode
).data
.high
= 0;
6080 FCONST1 (mode
).data
.low
= 0;
6081 FCONST1 (mode
).mode
= mode
;
6083 = double_int_one
.lshift (GET_MODE_FBIT (mode
),
6084 HOST_BITS_PER_DOUBLE_INT
,
6085 SIGNED_FIXED_POINT_MODE_P (mode
));
6086 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6087 FCONST1 (mode
), mode
);
6090 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT
);
6092 mode
= GET_MODE_WIDER_MODE (mode
))
6094 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6097 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT
);
6099 mode
= GET_MODE_WIDER_MODE (mode
))
6101 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6104 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM
);
6106 mode
= GET_MODE_WIDER_MODE (mode
))
6108 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6109 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6112 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM
);
6114 mode
= GET_MODE_WIDER_MODE (mode
))
6116 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6117 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6120 for (i
= (int) CCmode
; i
< (int) MAX_MACHINE_MODE
; ++i
)
6121 if (GET_MODE_CLASS ((machine_mode
) i
) == MODE_CC
)
6122 const_tiny_rtx
[0][i
] = const0_rtx
;
6124 const_tiny_rtx
[0][(int) BImode
] = const0_rtx
;
6125 if (STORE_FLAG_VALUE
== 1)
6126 const_tiny_rtx
[1][(int) BImode
] = const1_rtx
;
6128 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS
);
6130 mode
= GET_MODE_WIDER_MODE (mode
))
6132 wide_int wi_zero
= wi::zero (GET_MODE_PRECISION (mode
));
6133 const_tiny_rtx
[0][mode
] = immed_wide_int_const (wi_zero
, mode
);
6136 pc_rtx
= gen_rtx_fmt_ (PC
, VOIDmode
);
6137 ret_rtx
= gen_rtx_fmt_ (RETURN
, VOIDmode
);
6138 simple_return_rtx
= gen_rtx_fmt_ (SIMPLE_RETURN
, VOIDmode
);
6139 cc0_rtx
= gen_rtx_fmt_ (CC0
, VOIDmode
);
6142 /* Produce exact duplicate of insn INSN after AFTER.
6143 Care updating of libcall regions if present. */
6146 emit_copy_of_insn_after (rtx_insn
*insn
, rtx_insn
*after
)
6151 switch (GET_CODE (insn
))
6154 new_rtx
= emit_insn_after (copy_insn (PATTERN (insn
)), after
);
6158 new_rtx
= emit_jump_insn_after (copy_insn (PATTERN (insn
)), after
);
6159 CROSSING_JUMP_P (new_rtx
) = CROSSING_JUMP_P (insn
);
6163 new_rtx
= emit_debug_insn_after (copy_insn (PATTERN (insn
)), after
);
6167 new_rtx
= emit_call_insn_after (copy_insn (PATTERN (insn
)), after
);
6168 if (CALL_INSN_FUNCTION_USAGE (insn
))
6169 CALL_INSN_FUNCTION_USAGE (new_rtx
)
6170 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn
));
6171 SIBLING_CALL_P (new_rtx
) = SIBLING_CALL_P (insn
);
6172 RTL_CONST_CALL_P (new_rtx
) = RTL_CONST_CALL_P (insn
);
6173 RTL_PURE_CALL_P (new_rtx
) = RTL_PURE_CALL_P (insn
);
6174 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx
)
6175 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
);
6182 /* Update LABEL_NUSES. */
6183 mark_jump_label (PATTERN (new_rtx
), new_rtx
, 0);
6185 INSN_LOCATION (new_rtx
) = INSN_LOCATION (insn
);
6187 /* If the old insn is frame related, then so is the new one. This is
6188 primarily needed for IA-64 unwind info which marks epilogue insns,
6189 which may be duplicated by the basic block reordering code. */
6190 RTX_FRAME_RELATED_P (new_rtx
) = RTX_FRAME_RELATED_P (insn
);
6192 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6193 will make them. REG_LABEL_TARGETs are created there too, but are
6194 supposed to be sticky, so we copy them. */
6195 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
6196 if (REG_NOTE_KIND (link
) != REG_LABEL_OPERAND
)
6198 if (GET_CODE (link
) == EXPR_LIST
)
6199 add_reg_note (new_rtx
, REG_NOTE_KIND (link
),
6200 copy_insn_1 (XEXP (link
, 0)));
6202 add_shallow_copy_of_reg_note (new_rtx
, link
);
6205 INSN_CODE (new_rtx
) = INSN_CODE (insn
);
6209 static GTY((deletable
)) rtx hard_reg_clobbers
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
6211 gen_hard_reg_clobber (machine_mode mode
, unsigned int regno
)
6213 if (hard_reg_clobbers
[mode
][regno
])
6214 return hard_reg_clobbers
[mode
][regno
];
6216 return (hard_reg_clobbers
[mode
][regno
] =
6217 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (mode
, regno
)));
6220 location_t prologue_location
;
6221 location_t epilogue_location
;
6223 /* Hold current location information and last location information, so the
6224 datastructures are built lazily only when some instructions in given
6225 place are needed. */
6226 static location_t curr_location
;
6228 /* Allocate insn location datastructure. */
6230 insn_locations_init (void)
6232 prologue_location
= epilogue_location
= 0;
6233 curr_location
= UNKNOWN_LOCATION
;
6236 /* At the end of emit stage, clear current location. */
6238 insn_locations_finalize (void)
6240 epilogue_location
= curr_location
;
6241 curr_location
= UNKNOWN_LOCATION
;
6244 /* Set current location. */
6246 set_curr_insn_location (location_t location
)
6248 curr_location
= location
;
6251 /* Get current location. */
6253 curr_insn_location (void)
6255 return curr_location
;
6258 /* Return lexical scope block insn belongs to. */
6260 insn_scope (const rtx_insn
*insn
)
6262 return LOCATION_BLOCK (INSN_LOCATION (insn
));
6265 /* Return line number of the statement that produced this insn. */
6267 insn_line (const rtx_insn
*insn
)
6269 return LOCATION_LINE (INSN_LOCATION (insn
));
6272 /* Return source file of the statement that produced this insn. */
6274 insn_file (const rtx_insn
*insn
)
6276 return LOCATION_FILE (INSN_LOCATION (insn
));
6279 /* Return expanded location of the statement that produced this insn. */
6281 insn_location (const rtx_insn
*insn
)
6283 return expand_location (INSN_LOCATION (insn
));
6286 /* Return true if memory model MODEL requires a pre-operation (release-style)
6287 barrier or a post-operation (acquire-style) barrier. While not universal,
6288 this function matches behavior of several targets. */
6291 need_atomic_barrier_p (enum memmodel model
, bool pre
)
6293 switch (model
& MEMMODEL_MASK
)
6295 case MEMMODEL_RELAXED
:
6296 case MEMMODEL_CONSUME
:
6298 case MEMMODEL_RELEASE
:
6300 case MEMMODEL_ACQUIRE
:
6302 case MEMMODEL_ACQ_REL
:
6303 case MEMMODEL_SEQ_CST
:
6310 #include "gt-emit-rtl.h"