1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
36 #include "coretypes.h"
43 #include "stringpool.h"
44 #include "insn-config.h"
48 #include "diagnostic-core.h"
50 #include "fold-const.h"
59 #include "stor-layout.h"
61 struct target_rtl default_target_rtl
;
63 struct target_rtl
*this_target_rtl
= &default_target_rtl
;
66 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
68 /* Commonly used modes. */
70 machine_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
71 machine_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
72 machine_mode double_mode
; /* Mode whose width is DOUBLE_TYPE_SIZE. */
73 machine_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
75 /* Datastructures maintained for currently processed function in RTL form. */
77 struct rtl_data x_rtl
;
79 /* Indexed by pseudo register number, gives the rtx for that pseudo.
80 Allocated in parallel with regno_pointer_align.
81 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
82 with length attribute nested in top level structures. */
86 /* This is *not* reset after each function. It gives each CODE_LABEL
87 in the entire compilation a unique label number. */
89 static GTY(()) int label_num
= 1;
91 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
93 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
94 is set only for MODE_INT and MODE_VECTOR_INT modes. */
96 rtx const_tiny_rtx
[4][(int) MAX_MACHINE_MODE
];
100 REAL_VALUE_TYPE dconst0
;
101 REAL_VALUE_TYPE dconst1
;
102 REAL_VALUE_TYPE dconst2
;
103 REAL_VALUE_TYPE dconstm1
;
104 REAL_VALUE_TYPE dconsthalf
;
106 /* Record fixed-point constant 0 and 1. */
107 FIXED_VALUE_TYPE fconst0
[MAX_FCONST0
];
108 FIXED_VALUE_TYPE fconst1
[MAX_FCONST1
];
110 /* We make one copy of (const_int C) where C is in
111 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
112 to save space during the compilation and simplify comparisons of
115 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
117 /* Standard pieces of rtx, to be substituted directly into things. */
120 rtx simple_return_rtx
;
123 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
124 this pointer should normally never be dereferenced), but is required to be
125 distinct from NULL_RTX. Currently used by peephole2 pass. */
126 rtx_insn
*invalid_insn_rtx
;
128 /* A hash table storing CONST_INTs whose absolute value is greater
129 than MAX_SAVED_CONST_INT. */
131 struct const_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
133 typedef HOST_WIDE_INT compare_type
;
135 static hashval_t
hash (rtx i
);
136 static bool equal (rtx i
, HOST_WIDE_INT h
);
139 static GTY ((cache
)) hash_table
<const_int_hasher
> *const_int_htab
;
141 struct const_wide_int_hasher
: ggc_cache_ptr_hash
<rtx_def
>
143 static hashval_t
hash (rtx x
);
144 static bool equal (rtx x
, rtx y
);
147 static GTY ((cache
)) hash_table
<const_wide_int_hasher
> *const_wide_int_htab
;
149 /* A hash table storing register attribute structures. */
150 struct reg_attr_hasher
: ggc_cache_ptr_hash
<reg_attrs
>
152 static hashval_t
hash (reg_attrs
*x
);
153 static bool equal (reg_attrs
*a
, reg_attrs
*b
);
156 static GTY ((cache
)) hash_table
<reg_attr_hasher
> *reg_attrs_htab
;
158 /* A hash table storing all CONST_DOUBLEs. */
159 struct const_double_hasher
: ggc_cache_ptr_hash
<rtx_def
>
161 static hashval_t
hash (rtx x
);
162 static bool equal (rtx x
, rtx y
);
165 static GTY ((cache
)) hash_table
<const_double_hasher
> *const_double_htab
;
167 /* A hash table storing all CONST_FIXEDs. */
168 struct const_fixed_hasher
: ggc_cache_ptr_hash
<rtx_def
>
170 static hashval_t
hash (rtx x
);
171 static bool equal (rtx x
, rtx y
);
174 static GTY ((cache
)) hash_table
<const_fixed_hasher
> *const_fixed_htab
;
176 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
177 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
178 #define first_label_num (crtl->emit.x_first_label_num)
180 static void set_used_decls (tree
);
181 static void mark_label_nuses (rtx
);
182 #if TARGET_SUPPORTS_WIDE_INT
183 static rtx
lookup_const_wide_int (rtx
);
185 static rtx
lookup_const_double (rtx
);
186 static rtx
lookup_const_fixed (rtx
);
187 static reg_attrs
*get_reg_attrs (tree
, int);
188 static rtx
gen_const_vector (machine_mode
, int);
189 static void copy_rtx_if_shared_1 (rtx
*orig
);
191 /* Probability of the conditional branch currently proceeded by try_split.
192 Set to -1 otherwise. */
193 int split_branch_probability
= -1;
195 /* Returns a hash code for X (which is a really a CONST_INT). */
198 const_int_hasher::hash (rtx x
)
200 return (hashval_t
) INTVAL (x
);
203 /* Returns nonzero if the value represented by X (which is really a
204 CONST_INT) is the same as that given by Y (which is really a
208 const_int_hasher::equal (rtx x
, HOST_WIDE_INT y
)
210 return (INTVAL (x
) == y
);
213 #if TARGET_SUPPORTS_WIDE_INT
214 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
217 const_wide_int_hasher::hash (rtx x
)
220 unsigned HOST_WIDE_INT hash
= 0;
223 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
224 hash
+= CONST_WIDE_INT_ELT (xr
, i
);
226 return (hashval_t
) hash
;
229 /* Returns nonzero if the value represented by X (which is really a
230 CONST_WIDE_INT) is the same as that given by Y (which is really a
234 const_wide_int_hasher::equal (rtx x
, rtx y
)
239 if (CONST_WIDE_INT_NUNITS (xr
) != CONST_WIDE_INT_NUNITS (yr
))
242 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
243 if (CONST_WIDE_INT_ELT (xr
, i
) != CONST_WIDE_INT_ELT (yr
, i
))
250 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
252 const_double_hasher::hash (rtx x
)
254 const_rtx
const value
= x
;
257 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (value
) == VOIDmode
)
258 h
= CONST_DOUBLE_LOW (value
) ^ CONST_DOUBLE_HIGH (value
);
261 h
= real_hash (CONST_DOUBLE_REAL_VALUE (value
));
262 /* MODE is used in the comparison, so it should be in the hash. */
263 h
^= GET_MODE (value
);
268 /* Returns nonzero if the value represented by X (really a ...)
269 is the same as that represented by Y (really a ...) */
271 const_double_hasher::equal (rtx x
, rtx y
)
273 const_rtx
const a
= x
, b
= y
;
275 if (GET_MODE (a
) != GET_MODE (b
))
277 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (a
) == VOIDmode
)
278 return (CONST_DOUBLE_LOW (a
) == CONST_DOUBLE_LOW (b
)
279 && CONST_DOUBLE_HIGH (a
) == CONST_DOUBLE_HIGH (b
));
281 return real_identical (CONST_DOUBLE_REAL_VALUE (a
),
282 CONST_DOUBLE_REAL_VALUE (b
));
285 /* Returns a hash code for X (which is really a CONST_FIXED). */
288 const_fixed_hasher::hash (rtx x
)
290 const_rtx
const value
= x
;
293 h
= fixed_hash (CONST_FIXED_VALUE (value
));
294 /* MODE is used in the comparison, so it should be in the hash. */
295 h
^= GET_MODE (value
);
299 /* Returns nonzero if the value represented by X is the same as that
303 const_fixed_hasher::equal (rtx x
, rtx y
)
305 const_rtx
const a
= x
, b
= y
;
307 if (GET_MODE (a
) != GET_MODE (b
))
309 return fixed_identical (CONST_FIXED_VALUE (a
), CONST_FIXED_VALUE (b
));
312 /* Return true if the given memory attributes are equal. */
315 mem_attrs_eq_p (const struct mem_attrs
*p
, const struct mem_attrs
*q
)
321 return (p
->alias
== q
->alias
322 && p
->offset_known_p
== q
->offset_known_p
323 && (!p
->offset_known_p
|| p
->offset
== q
->offset
)
324 && p
->size_known_p
== q
->size_known_p
325 && (!p
->size_known_p
|| p
->size
== q
->size
)
326 && p
->align
== q
->align
327 && p
->addrspace
== q
->addrspace
328 && (p
->expr
== q
->expr
329 || (p
->expr
!= NULL_TREE
&& q
->expr
!= NULL_TREE
330 && operand_equal_p (p
->expr
, q
->expr
, 0))));
333 /* Set MEM's memory attributes so that they are the same as ATTRS. */
336 set_mem_attrs (rtx mem
, mem_attrs
*attrs
)
338 /* If everything is the default, we can just clear the attributes. */
339 if (mem_attrs_eq_p (attrs
, mode_mem_attrs
[(int) GET_MODE (mem
)]))
346 || !mem_attrs_eq_p (attrs
, MEM_ATTRS (mem
)))
348 MEM_ATTRS (mem
) = ggc_alloc
<mem_attrs
> ();
349 memcpy (MEM_ATTRS (mem
), attrs
, sizeof (mem_attrs
));
353 /* Returns a hash code for X (which is a really a reg_attrs *). */
356 reg_attr_hasher::hash (reg_attrs
*x
)
358 const reg_attrs
*const p
= x
;
360 return ((p
->offset
* 1000) ^ (intptr_t) p
->decl
);
363 /* Returns nonzero if the value represented by X is the same as that given by
367 reg_attr_hasher::equal (reg_attrs
*x
, reg_attrs
*y
)
369 const reg_attrs
*const p
= x
;
370 const reg_attrs
*const q
= y
;
372 return (p
->decl
== q
->decl
&& p
->offset
== q
->offset
);
374 /* Allocate a new reg_attrs structure and insert it into the hash table if
375 one identical to it is not already in the table. We are doing this for
379 get_reg_attrs (tree decl
, int offset
)
383 /* If everything is the default, we can just return zero. */
384 if (decl
== 0 && offset
== 0)
388 attrs
.offset
= offset
;
390 reg_attrs
**slot
= reg_attrs_htab
->find_slot (&attrs
, INSERT
);
393 *slot
= ggc_alloc
<reg_attrs
> ();
394 memcpy (*slot
, &attrs
, sizeof (reg_attrs
));
402 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
403 and to block register equivalences to be seen across this insn. */
408 rtx x
= gen_rtx_ASM_INPUT (VOIDmode
, "");
409 MEM_VOLATILE_P (x
) = true;
415 /* Set the mode and register number of X to MODE and REGNO. */
418 set_mode_and_regno (rtx x
, machine_mode mode
, unsigned int regno
)
420 unsigned int nregs
= (HARD_REGISTER_NUM_P (regno
)
421 ? hard_regno_nregs
[regno
][mode
]
423 PUT_MODE_RAW (x
, mode
);
424 set_regno_raw (x
, regno
, nregs
);
427 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
428 don't attempt to share with the various global pieces of rtl (such as
429 frame_pointer_rtx). */
432 gen_raw_REG (machine_mode mode
, unsigned int regno
)
434 rtx x
= rtx_alloc_stat (REG MEM_STAT_INFO
);
435 set_mode_and_regno (x
, mode
, regno
);
436 REG_ATTRS (x
) = NULL
;
437 ORIGINAL_REGNO (x
) = regno
;
441 /* There are some RTL codes that require special attention; the generation
442 functions do the raw handling. If you add to this list, modify
443 special_rtx in gengenrtl.c as well. */
446 gen_rtx_EXPR_LIST (machine_mode mode
, rtx expr
, rtx expr_list
)
448 return as_a
<rtx_expr_list
*> (gen_rtx_fmt_ee (EXPR_LIST
, mode
, expr
,
453 gen_rtx_INSN_LIST (machine_mode mode
, rtx insn
, rtx insn_list
)
455 return as_a
<rtx_insn_list
*> (gen_rtx_fmt_ue (INSN_LIST
, mode
, insn
,
460 gen_rtx_INSN (machine_mode mode
, rtx_insn
*prev_insn
, rtx_insn
*next_insn
,
461 basic_block bb
, rtx pattern
, int location
, int code
,
464 return as_a
<rtx_insn
*> (gen_rtx_fmt_uuBeiie (INSN
, mode
,
465 prev_insn
, next_insn
,
466 bb
, pattern
, location
, code
,
471 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED
, HOST_WIDE_INT arg
)
473 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
474 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
476 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
477 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
478 return const_true_rtx
;
481 /* Look up the CONST_INT in the hash table. */
482 rtx
*slot
= const_int_htab
->find_slot_with_hash (arg
, (hashval_t
) arg
,
485 *slot
= gen_rtx_raw_CONST_INT (VOIDmode
, arg
);
491 gen_int_mode (HOST_WIDE_INT c
, machine_mode mode
)
493 return GEN_INT (trunc_int_for_mode (c
, mode
));
496 /* CONST_DOUBLEs might be created from pairs of integers, or from
497 REAL_VALUE_TYPEs. Also, their length is known only at run time,
498 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
500 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
501 hash table. If so, return its counterpart; otherwise add it
502 to the hash table and return it. */
504 lookup_const_double (rtx real
)
506 rtx
*slot
= const_double_htab
->find_slot (real
, INSERT
);
513 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
514 VALUE in mode MODE. */
516 const_double_from_real_value (REAL_VALUE_TYPE value
, machine_mode mode
)
518 rtx real
= rtx_alloc (CONST_DOUBLE
);
519 PUT_MODE (real
, mode
);
523 return lookup_const_double (real
);
526 /* Determine whether FIXED, a CONST_FIXED, already exists in the
527 hash table. If so, return its counterpart; otherwise add it
528 to the hash table and return it. */
531 lookup_const_fixed (rtx fixed
)
533 rtx
*slot
= const_fixed_htab
->find_slot (fixed
, INSERT
);
540 /* Return a CONST_FIXED rtx for a fixed-point value specified by
541 VALUE in mode MODE. */
544 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value
, machine_mode mode
)
546 rtx fixed
= rtx_alloc (CONST_FIXED
);
547 PUT_MODE (fixed
, mode
);
551 return lookup_const_fixed (fixed
);
554 #if TARGET_SUPPORTS_WIDE_INT == 0
555 /* Constructs double_int from rtx CST. */
558 rtx_to_double_int (const_rtx cst
)
562 if (CONST_INT_P (cst
))
563 r
= double_int::from_shwi (INTVAL (cst
));
564 else if (CONST_DOUBLE_AS_INT_P (cst
))
566 r
.low
= CONST_DOUBLE_LOW (cst
);
567 r
.high
= CONST_DOUBLE_HIGH (cst
);
576 #if TARGET_SUPPORTS_WIDE_INT
577 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
578 If so, return its counterpart; otherwise add it to the hash table and
582 lookup_const_wide_int (rtx wint
)
584 rtx
*slot
= const_wide_int_htab
->find_slot (wint
, INSERT
);
592 /* Return an rtx constant for V, given that the constant has mode MODE.
593 The returned rtx will be a CONST_INT if V fits, otherwise it will be
594 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
595 (if TARGET_SUPPORTS_WIDE_INT). */
598 immed_wide_int_const (const wide_int_ref
&v
, machine_mode mode
)
600 unsigned int len
= v
.get_len ();
601 unsigned int prec
= GET_MODE_PRECISION (mode
);
603 /* Allow truncation but not extension since we do not know if the
604 number is signed or unsigned. */
605 gcc_assert (prec
<= v
.get_precision ());
607 if (len
< 2 || prec
<= HOST_BITS_PER_WIDE_INT
)
608 return gen_int_mode (v
.elt (0), mode
);
610 #if TARGET_SUPPORTS_WIDE_INT
614 unsigned int blocks_needed
615 = (prec
+ HOST_BITS_PER_WIDE_INT
- 1) / HOST_BITS_PER_WIDE_INT
;
617 if (len
> blocks_needed
)
620 value
= const_wide_int_alloc (len
);
622 /* It is so tempting to just put the mode in here. Must control
624 PUT_MODE (value
, VOIDmode
);
625 CWI_PUT_NUM_ELEM (value
, len
);
627 for (i
= 0; i
< len
; i
++)
628 CONST_WIDE_INT_ELT (value
, i
) = v
.elt (i
);
630 return lookup_const_wide_int (value
);
633 return immed_double_const (v
.elt (0), v
.elt (1), mode
);
637 #if TARGET_SUPPORTS_WIDE_INT == 0
638 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
639 of ints: I0 is the low-order word and I1 is the high-order word.
640 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
641 implied upper bits are copies of the high bit of i1. The value
642 itself is neither signed nor unsigned. Do not use this routine for
643 non-integer modes; convert to REAL_VALUE_TYPE and use
644 const_double_from_real_value. */
647 immed_double_const (HOST_WIDE_INT i0
, HOST_WIDE_INT i1
, machine_mode mode
)
652 /* There are the following cases (note that there are no modes with
653 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
655 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
657 2) If the value of the integer fits into HOST_WIDE_INT anyway
658 (i.e., i1 consists only from copies of the sign bit, and sign
659 of i0 and i1 are the same), then we return a CONST_INT for i0.
660 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
661 if (mode
!= VOIDmode
)
663 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
664 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
665 /* We can get a 0 for an error mark. */
666 || GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
667 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
668 || GET_MODE_CLASS (mode
) == MODE_POINTER_BOUNDS
);
670 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
671 return gen_int_mode (i0
, mode
);
674 /* If this integer fits in one word, return a CONST_INT. */
675 if ((i1
== 0 && i0
>= 0) || (i1
== ~0 && i0
< 0))
678 /* We use VOIDmode for integers. */
679 value
= rtx_alloc (CONST_DOUBLE
);
680 PUT_MODE (value
, VOIDmode
);
682 CONST_DOUBLE_LOW (value
) = i0
;
683 CONST_DOUBLE_HIGH (value
) = i1
;
685 for (i
= 2; i
< (sizeof CONST_DOUBLE_FORMAT
- 1); i
++)
686 XWINT (value
, i
) = 0;
688 return lookup_const_double (value
);
693 gen_rtx_REG (machine_mode mode
, unsigned int regno
)
695 /* In case the MD file explicitly references the frame pointer, have
696 all such references point to the same frame pointer. This is
697 used during frame pointer elimination to distinguish the explicit
698 references to these registers from pseudos that happened to be
701 If we have eliminated the frame pointer or arg pointer, we will
702 be using it as a normal register, for example as a spill
703 register. In such cases, we might be accessing it in a mode that
704 is not Pmode and therefore cannot use the pre-allocated rtx.
706 Also don't do this when we are making new REGs in reload, since
707 we don't want to get confused with the real pointers. */
709 if (mode
== Pmode
&& !reload_in_progress
&& !lra_in_progress
)
711 if (regno
== FRAME_POINTER_REGNUM
712 && (!reload_completed
|| frame_pointer_needed
))
713 return frame_pointer_rtx
;
715 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
716 && regno
== HARD_FRAME_POINTER_REGNUM
717 && (!reload_completed
|| frame_pointer_needed
))
718 return hard_frame_pointer_rtx
;
719 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
720 if (FRAME_POINTER_REGNUM
!= ARG_POINTER_REGNUM
721 && regno
== ARG_POINTER_REGNUM
)
722 return arg_pointer_rtx
;
724 #ifdef RETURN_ADDRESS_POINTER_REGNUM
725 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
726 return return_address_pointer_rtx
;
728 if (regno
== (unsigned) PIC_OFFSET_TABLE_REGNUM
729 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
730 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
731 return pic_offset_table_rtx
;
732 if (regno
== STACK_POINTER_REGNUM
)
733 return stack_pointer_rtx
;
737 /* If the per-function register table has been set up, try to re-use
738 an existing entry in that table to avoid useless generation of RTL.
740 This code is disabled for now until we can fix the various backends
741 which depend on having non-shared hard registers in some cases. Long
742 term we want to re-enable this code as it can significantly cut down
743 on the amount of useless RTL that gets generated.
745 We'll also need to fix some code that runs after reload that wants to
746 set ORIGINAL_REGNO. */
751 && regno
< FIRST_PSEUDO_REGISTER
752 && reg_raw_mode
[regno
] == mode
)
753 return regno_reg_rtx
[regno
];
756 return gen_raw_REG (mode
, regno
);
760 gen_rtx_MEM (machine_mode mode
, rtx addr
)
762 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
764 /* This field is not cleared by the mere allocation of the rtx, so
771 /* Generate a memory referring to non-trapping constant memory. */
774 gen_const_mem (machine_mode mode
, rtx addr
)
776 rtx mem
= gen_rtx_MEM (mode
, addr
);
777 MEM_READONLY_P (mem
) = 1;
778 MEM_NOTRAP_P (mem
) = 1;
782 /* Generate a MEM referring to fixed portions of the frame, e.g., register
786 gen_frame_mem (machine_mode mode
, rtx addr
)
788 rtx mem
= gen_rtx_MEM (mode
, addr
);
789 MEM_NOTRAP_P (mem
) = 1;
790 set_mem_alias_set (mem
, get_frame_alias_set ());
794 /* Generate a MEM referring to a temporary use of the stack, not part
795 of the fixed stack frame. For example, something which is pushed
796 by a target splitter. */
798 gen_tmp_stack_mem (machine_mode mode
, rtx addr
)
800 rtx mem
= gen_rtx_MEM (mode
, addr
);
801 MEM_NOTRAP_P (mem
) = 1;
802 if (!cfun
->calls_alloca
)
803 set_mem_alias_set (mem
, get_frame_alias_set ());
807 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
808 this construct would be valid, and false otherwise. */
811 validate_subreg (machine_mode omode
, machine_mode imode
,
812 const_rtx reg
, unsigned int offset
)
814 unsigned int isize
= GET_MODE_SIZE (imode
);
815 unsigned int osize
= GET_MODE_SIZE (omode
);
817 /* All subregs must be aligned. */
818 if (offset
% osize
!= 0)
821 /* The subreg offset cannot be outside the inner object. */
825 /* ??? This should not be here. Temporarily continue to allow word_mode
826 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
827 Generally, backends are doing something sketchy but it'll take time to
829 if (omode
== word_mode
)
831 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
832 is the culprit here, and not the backends. */
833 else if (osize
>= UNITS_PER_WORD
&& isize
>= osize
)
835 /* Allow component subregs of complex and vector. Though given the below
836 extraction rules, it's not always clear what that means. */
837 else if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
838 && GET_MODE_INNER (imode
) == omode
)
840 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
841 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
842 represent this. It's questionable if this ought to be represented at
843 all -- why can't this all be hidden in post-reload splitters that make
844 arbitrarily mode changes to the registers themselves. */
845 else if (VECTOR_MODE_P (omode
) && GET_MODE_INNER (omode
) == imode
)
847 /* Subregs involving floating point modes are not allowed to
848 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
849 (subreg:SI (reg:DF) 0) isn't. */
850 else if (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))
852 if (! (isize
== osize
853 /* LRA can use subreg to store a floating point value in
854 an integer mode. Although the floating point and the
855 integer modes need the same number of hard registers,
856 the size of floating point mode can be less than the
857 integer mode. LRA also uses subregs for a register
858 should be used in different mode in on insn. */
863 /* Paradoxical subregs must have offset zero. */
867 /* This is a normal subreg. Verify that the offset is representable. */
869 /* For hard registers, we already have most of these rules collected in
870 subreg_offset_representable_p. */
871 if (reg
&& REG_P (reg
) && HARD_REGISTER_P (reg
))
873 unsigned int regno
= REGNO (reg
);
875 #ifdef CANNOT_CHANGE_MODE_CLASS
876 if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
877 && GET_MODE_INNER (imode
) == omode
)
879 else if (REG_CANNOT_CHANGE_MODE_P (regno
, imode
, omode
))
883 return subreg_offset_representable_p (regno
, imode
, offset
, omode
);
886 /* For pseudo registers, we want most of the same checks. Namely:
887 If the register no larger than a word, the subreg must be lowpart.
888 If the register is larger than a word, the subreg must be the lowpart
889 of a subword. A subreg does *not* perform arbitrary bit extraction.
890 Given that we've already checked mode/offset alignment, we only have
891 to check subword subregs here. */
892 if (osize
< UNITS_PER_WORD
893 && ! (lra_in_progress
&& (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))))
895 machine_mode wmode
= isize
> UNITS_PER_WORD
? word_mode
: imode
;
896 unsigned int low_off
= subreg_lowpart_offset (omode
, wmode
);
897 if (offset
% UNITS_PER_WORD
!= low_off
)
904 gen_rtx_SUBREG (machine_mode mode
, rtx reg
, int offset
)
906 gcc_assert (validate_subreg (mode
, GET_MODE (reg
), reg
, offset
));
907 return gen_rtx_raw_SUBREG (mode
, reg
, offset
);
910 /* Generate a SUBREG representing the least-significant part of REG if MODE
911 is smaller than mode of REG, otherwise paradoxical SUBREG. */
914 gen_lowpart_SUBREG (machine_mode mode
, rtx reg
)
918 inmode
= GET_MODE (reg
);
919 if (inmode
== VOIDmode
)
921 return gen_rtx_SUBREG (mode
, reg
,
922 subreg_lowpart_offset (mode
, inmode
));
926 gen_rtx_VAR_LOCATION (machine_mode mode
, tree decl
, rtx loc
,
927 enum var_init_status status
)
929 rtx x
= gen_rtx_fmt_te (VAR_LOCATION
, mode
, decl
, loc
);
930 PAT_VAR_LOCATION_STATUS (x
) = status
;
935 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
938 gen_rtvec (int n
, ...)
946 /* Don't allocate an empty rtvec... */
953 rt_val
= rtvec_alloc (n
);
955 for (i
= 0; i
< n
; i
++)
956 rt_val
->elem
[i
] = va_arg (p
, rtx
);
963 gen_rtvec_v (int n
, rtx
*argp
)
968 /* Don't allocate an empty rtvec... */
972 rt_val
= rtvec_alloc (n
);
974 for (i
= 0; i
< n
; i
++)
975 rt_val
->elem
[i
] = *argp
++;
981 gen_rtvec_v (int n
, rtx_insn
**argp
)
986 /* Don't allocate an empty rtvec... */
990 rt_val
= rtvec_alloc (n
);
992 for (i
= 0; i
< n
; i
++)
993 rt_val
->elem
[i
] = *argp
++;
999 /* Return the number of bytes between the start of an OUTER_MODE
1000 in-memory value and the start of an INNER_MODE in-memory value,
1001 given that the former is a lowpart of the latter. It may be a
1002 paradoxical lowpart, in which case the offset will be negative
1003 on big-endian targets. */
1006 byte_lowpart_offset (machine_mode outer_mode
,
1007 machine_mode inner_mode
)
1009 if (GET_MODE_SIZE (outer_mode
) < GET_MODE_SIZE (inner_mode
))
1010 return subreg_lowpart_offset (outer_mode
, inner_mode
);
1012 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
1015 /* Generate a REG rtx for a new pseudo register of mode MODE.
1016 This pseudo is assigned the next sequential register number. */
1019 gen_reg_rtx (machine_mode mode
)
1022 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
1024 gcc_assert (can_create_pseudo_p ());
1026 /* If a virtual register with bigger mode alignment is generated,
1027 increase stack alignment estimation because it might be spilled
1029 if (SUPPORTS_STACK_ALIGNMENT
1030 && crtl
->stack_alignment_estimated
< align
1031 && !crtl
->stack_realign_processed
)
1033 unsigned int min_align
= MINIMUM_ALIGNMENT (NULL
, mode
, align
);
1034 if (crtl
->stack_alignment_estimated
< min_align
)
1035 crtl
->stack_alignment_estimated
= min_align
;
1038 if (generating_concat_p
1039 && (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
1040 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
))
1042 /* For complex modes, don't make a single pseudo.
1043 Instead, make a CONCAT of two pseudos.
1044 This allows noncontiguous allocation of the real and imaginary parts,
1045 which makes much better code. Besides, allocating DCmode
1046 pseudos overstrains reload on some machines like the 386. */
1047 rtx realpart
, imagpart
;
1048 machine_mode partmode
= GET_MODE_INNER (mode
);
1050 realpart
= gen_reg_rtx (partmode
);
1051 imagpart
= gen_reg_rtx (partmode
);
1052 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
1055 /* Do not call gen_reg_rtx with uninitialized crtl. */
1056 gcc_assert (crtl
->emit
.regno_pointer_align_length
);
1058 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1059 enough to have an element for this pseudo reg number. */
1061 if (reg_rtx_no
== crtl
->emit
.regno_pointer_align_length
)
1063 int old_size
= crtl
->emit
.regno_pointer_align_length
;
1067 tmp
= XRESIZEVEC (char, crtl
->emit
.regno_pointer_align
, old_size
* 2);
1068 memset (tmp
+ old_size
, 0, old_size
);
1069 crtl
->emit
.regno_pointer_align
= (unsigned char *) tmp
;
1071 new1
= GGC_RESIZEVEC (rtx
, regno_reg_rtx
, old_size
* 2);
1072 memset (new1
+ old_size
, 0, old_size
* sizeof (rtx
));
1073 regno_reg_rtx
= new1
;
1075 crtl
->emit
.regno_pointer_align_length
= old_size
* 2;
1078 val
= gen_raw_REG (mode
, reg_rtx_no
);
1079 regno_reg_rtx
[reg_rtx_no
++] = val
;
1083 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1086 reg_is_parm_p (rtx reg
)
1090 gcc_assert (REG_P (reg
));
1091 decl
= REG_EXPR (reg
);
1092 return (decl
&& TREE_CODE (decl
) == PARM_DECL
);
1095 /* Update NEW with the same attributes as REG, but with OFFSET added
1096 to the REG_OFFSET. */
1099 update_reg_offset (rtx new_rtx
, rtx reg
, int offset
)
1101 REG_ATTRS (new_rtx
) = get_reg_attrs (REG_EXPR (reg
),
1102 REG_OFFSET (reg
) + offset
);
1105 /* Generate a register with same attributes as REG, but with OFFSET
1106 added to the REG_OFFSET. */
1109 gen_rtx_REG_offset (rtx reg
, machine_mode mode
, unsigned int regno
,
1112 rtx new_rtx
= gen_rtx_REG (mode
, regno
);
1114 update_reg_offset (new_rtx
, reg
, offset
);
1118 /* Generate a new pseudo-register with the same attributes as REG, but
1119 with OFFSET added to the REG_OFFSET. */
1122 gen_reg_rtx_offset (rtx reg
, machine_mode mode
, int offset
)
1124 rtx new_rtx
= gen_reg_rtx (mode
);
1126 update_reg_offset (new_rtx
, reg
, offset
);
1130 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1131 new register is a (possibly paradoxical) lowpart of the old one. */
1134 adjust_reg_mode (rtx reg
, machine_mode mode
)
1136 update_reg_offset (reg
, reg
, byte_lowpart_offset (mode
, GET_MODE (reg
)));
1137 PUT_MODE (reg
, mode
);
1140 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1141 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1144 set_reg_attrs_from_value (rtx reg
, rtx x
)
1147 bool can_be_reg_pointer
= true;
1149 /* Don't call mark_reg_pointer for incompatible pointer sign
1151 while (GET_CODE (x
) == SIGN_EXTEND
1152 || GET_CODE (x
) == ZERO_EXTEND
1153 || GET_CODE (x
) == TRUNCATE
1154 || (GET_CODE (x
) == SUBREG
&& subreg_lowpart_p (x
)))
1156 #if defined(POINTERS_EXTEND_UNSIGNED)
1157 if (((GET_CODE (x
) == SIGN_EXTEND
&& POINTERS_EXTEND_UNSIGNED
)
1158 || (GET_CODE (x
) != SIGN_EXTEND
&& ! POINTERS_EXTEND_UNSIGNED
))
1159 && !targetm
.have_ptr_extend ())
1160 can_be_reg_pointer
= false;
1165 /* Hard registers can be reused for multiple purposes within the same
1166 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1167 on them is wrong. */
1168 if (HARD_REGISTER_P (reg
))
1171 offset
= byte_lowpart_offset (GET_MODE (reg
), GET_MODE (x
));
1174 if (MEM_OFFSET_KNOWN_P (x
))
1175 REG_ATTRS (reg
) = get_reg_attrs (MEM_EXPR (x
),
1176 MEM_OFFSET (x
) + offset
);
1177 if (can_be_reg_pointer
&& MEM_POINTER (x
))
1178 mark_reg_pointer (reg
, 0);
1183 update_reg_offset (reg
, x
, offset
);
1184 if (can_be_reg_pointer
&& REG_POINTER (x
))
1185 mark_reg_pointer (reg
, REGNO_POINTER_ALIGN (REGNO (x
)));
1189 /* Generate a REG rtx for a new pseudo register, copying the mode
1190 and attributes from X. */
1193 gen_reg_rtx_and_attrs (rtx x
)
1195 rtx reg
= gen_reg_rtx (GET_MODE (x
));
1196 set_reg_attrs_from_value (reg
, x
);
1200 /* Set the register attributes for registers contained in PARM_RTX.
1201 Use needed values from memory attributes of MEM. */
1204 set_reg_attrs_for_parm (rtx parm_rtx
, rtx mem
)
1206 if (REG_P (parm_rtx
))
1207 set_reg_attrs_from_value (parm_rtx
, mem
);
1208 else if (GET_CODE (parm_rtx
) == PARALLEL
)
1210 /* Check for a NULL entry in the first slot, used to indicate that the
1211 parameter goes both on the stack and in registers. */
1212 int i
= XEXP (XVECEXP (parm_rtx
, 0, 0), 0) ? 0 : 1;
1213 for (; i
< XVECLEN (parm_rtx
, 0); i
++)
1215 rtx x
= XVECEXP (parm_rtx
, 0, i
);
1216 if (REG_P (XEXP (x
, 0)))
1217 REG_ATTRS (XEXP (x
, 0))
1218 = get_reg_attrs (MEM_EXPR (mem
),
1219 INTVAL (XEXP (x
, 1)));
1224 /* Set the REG_ATTRS for registers in value X, given that X represents
1228 set_reg_attrs_for_decl_rtl (tree t
, rtx x
)
1233 if (GET_CODE (x
) == SUBREG
)
1235 gcc_assert (subreg_lowpart_p (x
));
1240 = get_reg_attrs (t
, byte_lowpart_offset (GET_MODE (x
),
1243 : TYPE_MODE (TREE_TYPE (tdecl
))));
1244 if (GET_CODE (x
) == CONCAT
)
1246 if (REG_P (XEXP (x
, 0)))
1247 REG_ATTRS (XEXP (x
, 0)) = get_reg_attrs (t
, 0);
1248 if (REG_P (XEXP (x
, 1)))
1249 REG_ATTRS (XEXP (x
, 1))
1250 = get_reg_attrs (t
, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x
, 0))));
1252 if (GET_CODE (x
) == PARALLEL
)
1256 /* Check for a NULL entry, used to indicate that the parameter goes
1257 both on the stack and in registers. */
1258 if (XEXP (XVECEXP (x
, 0, 0), 0))
1263 for (i
= start
; i
< XVECLEN (x
, 0); i
++)
1265 rtx y
= XVECEXP (x
, 0, i
);
1266 if (REG_P (XEXP (y
, 0)))
1267 REG_ATTRS (XEXP (y
, 0)) = get_reg_attrs (t
, INTVAL (XEXP (y
, 1)));
1272 /* Assign the RTX X to declaration T. */
1275 set_decl_rtl (tree t
, rtx x
)
1277 DECL_WRTL_CHECK (t
)->decl_with_rtl
.rtl
= x
;
1279 set_reg_attrs_for_decl_rtl (t
, x
);
1282 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1283 if the ABI requires the parameter to be passed by reference. */
1286 set_decl_incoming_rtl (tree t
, rtx x
, bool by_reference_p
)
1288 DECL_INCOMING_RTL (t
) = x
;
1289 if (x
&& !by_reference_p
)
1290 set_reg_attrs_for_decl_rtl (t
, x
);
1293 /* Identify REG (which may be a CONCAT) as a user register. */
1296 mark_user_reg (rtx reg
)
1298 if (GET_CODE (reg
) == CONCAT
)
1300 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
1301 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
1305 gcc_assert (REG_P (reg
));
1306 REG_USERVAR_P (reg
) = 1;
1310 /* Identify REG as a probable pointer register and show its alignment
1311 as ALIGN, if nonzero. */
1314 mark_reg_pointer (rtx reg
, int align
)
1316 if (! REG_POINTER (reg
))
1318 REG_POINTER (reg
) = 1;
1321 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1323 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
1324 /* We can no-longer be sure just how aligned this pointer is. */
1325 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1328 /* Return 1 plus largest pseudo reg number used in the current function. */
1336 /* Return 1 + the largest label number used so far in the current function. */
1339 max_label_num (void)
1344 /* Return first label number used in this function (if any were used). */
1347 get_first_label_num (void)
1349 return first_label_num
;
1352 /* If the rtx for label was created during the expansion of a nested
1353 function, then first_label_num won't include this label number.
1354 Fix this now so that array indices work later. */
1357 maybe_set_first_label_num (rtx x
)
1359 if (CODE_LABEL_NUMBER (x
) < first_label_num
)
1360 first_label_num
= CODE_LABEL_NUMBER (x
);
1363 /* Return a value representing some low-order bits of X, where the number
1364 of low-order bits is given by MODE. Note that no conversion is done
1365 between floating-point and fixed-point values, rather, the bit
1366 representation is returned.
1368 This function handles the cases in common between gen_lowpart, below,
1369 and two variants in cse.c and combine.c. These are the cases that can
1370 be safely handled at all points in the compilation.
1372 If this is not a case we can handle, return 0. */
1375 gen_lowpart_common (machine_mode mode
, rtx x
)
1377 int msize
= GET_MODE_SIZE (mode
);
1379 machine_mode innermode
;
1381 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1382 so we have to make one up. Yuk. */
1383 innermode
= GET_MODE (x
);
1385 && msize
* BITS_PER_UNIT
<= HOST_BITS_PER_WIDE_INT
)
1386 innermode
= mode_for_size (HOST_BITS_PER_WIDE_INT
, MODE_INT
, 0);
1387 else if (innermode
== VOIDmode
)
1388 innermode
= mode_for_size (HOST_BITS_PER_DOUBLE_INT
, MODE_INT
, 0);
1390 xsize
= GET_MODE_SIZE (innermode
);
1392 gcc_assert (innermode
!= VOIDmode
&& innermode
!= BLKmode
);
1394 if (innermode
== mode
)
1397 /* MODE must occupy no more words than the mode of X. */
1398 if ((msize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
1399 > ((xsize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))
1402 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1403 if (SCALAR_FLOAT_MODE_P (mode
) && msize
> xsize
)
1406 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
1407 && (GET_MODE_CLASS (mode
) == MODE_INT
1408 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
))
1410 /* If we are getting the low-order part of something that has been
1411 sign- or zero-extended, we can either just use the object being
1412 extended or make a narrower extension. If we want an even smaller
1413 piece than the size of the object being extended, call ourselves
1416 This case is used mostly by combine and cse. */
1418 if (GET_MODE (XEXP (x
, 0)) == mode
)
1420 else if (msize
< GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))))
1421 return gen_lowpart_common (mode
, XEXP (x
, 0));
1422 else if (msize
< xsize
)
1423 return gen_rtx_fmt_e (GET_CODE (x
), mode
, XEXP (x
, 0));
1425 else if (GET_CODE (x
) == SUBREG
|| REG_P (x
)
1426 || GET_CODE (x
) == CONCAT
|| GET_CODE (x
) == CONST_VECTOR
1427 || CONST_DOUBLE_AS_FLOAT_P (x
) || CONST_SCALAR_INT_P (x
))
1428 return lowpart_subreg (mode
, x
, innermode
);
1430 /* Otherwise, we can't do this. */
1435 gen_highpart (machine_mode mode
, rtx x
)
1437 unsigned int msize
= GET_MODE_SIZE (mode
);
1440 /* This case loses if X is a subreg. To catch bugs early,
1441 complain if an invalid MODE is used even in other cases. */
1442 gcc_assert (msize
<= UNITS_PER_WORD
1443 || msize
== (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x
)));
1445 result
= simplify_gen_subreg (mode
, x
, GET_MODE (x
),
1446 subreg_highpart_offset (mode
, GET_MODE (x
)));
1447 gcc_assert (result
);
1449 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1450 the target if we have a MEM. gen_highpart must return a valid operand,
1451 emitting code if necessary to do so. */
1454 result
= validize_mem (result
);
1455 gcc_assert (result
);
1461 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1462 be VOIDmode constant. */
1464 gen_highpart_mode (machine_mode outermode
, machine_mode innermode
, rtx exp
)
1466 if (GET_MODE (exp
) != VOIDmode
)
1468 gcc_assert (GET_MODE (exp
) == innermode
);
1469 return gen_highpart (outermode
, exp
);
1471 return simplify_gen_subreg (outermode
, exp
, innermode
,
1472 subreg_highpart_offset (outermode
, innermode
));
1475 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1478 subreg_lowpart_offset (machine_mode outermode
, machine_mode innermode
)
1480 unsigned int offset
= 0;
1481 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1485 if (WORDS_BIG_ENDIAN
)
1486 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1487 if (BYTES_BIG_ENDIAN
)
1488 offset
+= difference
% UNITS_PER_WORD
;
1494 /* Return offset in bytes to get OUTERMODE high part
1495 of the value in mode INNERMODE stored in memory in target format. */
1497 subreg_highpart_offset (machine_mode outermode
, machine_mode innermode
)
1499 unsigned int offset
= 0;
1500 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1502 gcc_assert (GET_MODE_SIZE (innermode
) >= GET_MODE_SIZE (outermode
));
1506 if (! WORDS_BIG_ENDIAN
)
1507 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1508 if (! BYTES_BIG_ENDIAN
)
1509 offset
+= difference
% UNITS_PER_WORD
;
1515 /* Return 1 iff X, assumed to be a SUBREG,
1516 refers to the least significant part of its containing reg.
1517 If X is not a SUBREG, always return 1 (it is its own low part!). */
1520 subreg_lowpart_p (const_rtx x
)
1522 if (GET_CODE (x
) != SUBREG
)
1524 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1527 return (subreg_lowpart_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)))
1528 == SUBREG_BYTE (x
));
1531 /* Return true if X is a paradoxical subreg, false otherwise. */
1533 paradoxical_subreg_p (const_rtx x
)
1535 if (GET_CODE (x
) != SUBREG
)
1537 return (GET_MODE_PRECISION (GET_MODE (x
))
1538 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x
))));
1541 /* Return subword OFFSET of operand OP.
1542 The word number, OFFSET, is interpreted as the word number starting
1543 at the low-order address. OFFSET 0 is the low-order word if not
1544 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1546 If we cannot extract the required word, we return zero. Otherwise,
1547 an rtx corresponding to the requested word will be returned.
1549 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1550 reload has completed, a valid address will always be returned. After
1551 reload, if a valid address cannot be returned, we return zero.
1553 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1554 it is the responsibility of the caller.
1556 MODE is the mode of OP in case it is a CONST_INT.
1558 ??? This is still rather broken for some cases. The problem for the
1559 moment is that all callers of this thing provide no 'goal mode' to
1560 tell us to work with. This exists because all callers were written
1561 in a word based SUBREG world.
1562 Now use of this function can be deprecated by simplify_subreg in most
1567 operand_subword (rtx op
, unsigned int offset
, int validate_address
, machine_mode mode
)
1569 if (mode
== VOIDmode
)
1570 mode
= GET_MODE (op
);
1572 gcc_assert (mode
!= VOIDmode
);
1574 /* If OP is narrower than a word, fail. */
1576 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
))
1579 /* If we want a word outside OP, return zero. */
1581 && (offset
+ 1) * UNITS_PER_WORD
> GET_MODE_SIZE (mode
))
1584 /* Form a new MEM at the requested address. */
1587 rtx new_rtx
= adjust_address_nv (op
, word_mode
, offset
* UNITS_PER_WORD
);
1589 if (! validate_address
)
1592 else if (reload_completed
)
1594 if (! strict_memory_address_addr_space_p (word_mode
,
1596 MEM_ADDR_SPACE (op
)))
1600 return replace_equiv_address (new_rtx
, XEXP (new_rtx
, 0));
1603 /* Rest can be handled by simplify_subreg. */
1604 return simplify_gen_subreg (word_mode
, op
, mode
, (offset
* UNITS_PER_WORD
));
1607 /* Similar to `operand_subword', but never return 0. If we can't
1608 extract the required subword, put OP into a register and try again.
1609 The second attempt must succeed. We always validate the address in
1612 MODE is the mode of OP, in case it is CONST_INT. */
1615 operand_subword_force (rtx op
, unsigned int offset
, machine_mode mode
)
1617 rtx result
= operand_subword (op
, offset
, 1, mode
);
1622 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1624 /* If this is a register which can not be accessed by words, copy it
1625 to a pseudo register. */
1627 op
= copy_to_reg (op
);
1629 op
= force_reg (mode
, op
);
1632 result
= operand_subword (op
, offset
, 1, mode
);
1633 gcc_assert (result
);
1638 /* Returns 1 if both MEM_EXPR can be considered equal
1642 mem_expr_equal_p (const_tree expr1
, const_tree expr2
)
1647 if (! expr1
|| ! expr2
)
1650 if (TREE_CODE (expr1
) != TREE_CODE (expr2
))
1653 return operand_equal_p (expr1
, expr2
, 0);
1656 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1657 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1661 get_mem_align_offset (rtx mem
, unsigned int align
)
1664 unsigned HOST_WIDE_INT offset
;
1666 /* This function can't use
1667 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1668 || (MAX (MEM_ALIGN (mem),
1669 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1673 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1675 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1676 for <variable>. get_inner_reference doesn't handle it and
1677 even if it did, the alignment in that case needs to be determined
1678 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1679 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1680 isn't sufficiently aligned, the object it is in might be. */
1681 gcc_assert (MEM_P (mem
));
1682 expr
= MEM_EXPR (mem
);
1683 if (expr
== NULL_TREE
|| !MEM_OFFSET_KNOWN_P (mem
))
1686 offset
= MEM_OFFSET (mem
);
1689 if (DECL_ALIGN (expr
) < align
)
1692 else if (INDIRECT_REF_P (expr
))
1694 if (TYPE_ALIGN (TREE_TYPE (expr
)) < (unsigned int) align
)
1697 else if (TREE_CODE (expr
) == COMPONENT_REF
)
1701 tree inner
= TREE_OPERAND (expr
, 0);
1702 tree field
= TREE_OPERAND (expr
, 1);
1703 tree byte_offset
= component_ref_field_offset (expr
);
1704 tree bit_offset
= DECL_FIELD_BIT_OFFSET (field
);
1707 || !tree_fits_uhwi_p (byte_offset
)
1708 || !tree_fits_uhwi_p (bit_offset
))
1711 offset
+= tree_to_uhwi (byte_offset
);
1712 offset
+= tree_to_uhwi (bit_offset
) / BITS_PER_UNIT
;
1714 if (inner
== NULL_TREE
)
1716 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field
))
1717 < (unsigned int) align
)
1721 else if (DECL_P (inner
))
1723 if (DECL_ALIGN (inner
) < align
)
1727 else if (TREE_CODE (inner
) != COMPONENT_REF
)
1735 return offset
& ((align
/ BITS_PER_UNIT
) - 1);
1738 /* Given REF (a MEM) and T, either the type of X or the expression
1739 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1740 if we are making a new object of this type. BITPOS is nonzero if
1741 there is an offset outstanding on T that will be applied later. */
1744 set_mem_attributes_minus_bitpos (rtx ref
, tree t
, int objectp
,
1745 HOST_WIDE_INT bitpos
)
1747 HOST_WIDE_INT apply_bitpos
= 0;
1749 struct mem_attrs attrs
, *defattrs
, *refattrs
;
1752 /* It can happen that type_for_mode was given a mode for which there
1753 is no language-level type. In which case it returns NULL, which
1758 type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
1759 if (type
== error_mark_node
)
1762 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1763 wrong answer, as it assumes that DECL_RTL already has the right alias
1764 info. Callers should not set DECL_RTL until after the call to
1765 set_mem_attributes. */
1766 gcc_assert (!DECL_P (t
) || ref
!= DECL_RTL_IF_SET (t
));
1768 memset (&attrs
, 0, sizeof (attrs
));
1770 /* Get the alias set from the expression or type (perhaps using a
1771 front-end routine) and use it. */
1772 attrs
.alias
= get_alias_set (t
);
1774 MEM_VOLATILE_P (ref
) |= TYPE_VOLATILE (type
);
1775 MEM_POINTER (ref
) = POINTER_TYPE_P (type
);
1777 /* Default values from pre-existing memory attributes if present. */
1778 refattrs
= MEM_ATTRS (ref
);
1781 /* ??? Can this ever happen? Calling this routine on a MEM that
1782 already carries memory attributes should probably be invalid. */
1783 attrs
.expr
= refattrs
->expr
;
1784 attrs
.offset_known_p
= refattrs
->offset_known_p
;
1785 attrs
.offset
= refattrs
->offset
;
1786 attrs
.size_known_p
= refattrs
->size_known_p
;
1787 attrs
.size
= refattrs
->size
;
1788 attrs
.align
= refattrs
->align
;
1791 /* Otherwise, default values from the mode of the MEM reference. */
1794 defattrs
= mode_mem_attrs
[(int) GET_MODE (ref
)];
1795 gcc_assert (!defattrs
->expr
);
1796 gcc_assert (!defattrs
->offset_known_p
);
1798 /* Respect mode size. */
1799 attrs
.size_known_p
= defattrs
->size_known_p
;
1800 attrs
.size
= defattrs
->size
;
1801 /* ??? Is this really necessary? We probably should always get
1802 the size from the type below. */
1804 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1805 if T is an object, always compute the object alignment below. */
1807 attrs
.align
= defattrs
->align
;
1809 attrs
.align
= BITS_PER_UNIT
;
1810 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1811 e.g. if the type carries an alignment attribute. Should we be
1812 able to simply always use TYPE_ALIGN? */
1815 /* We can set the alignment from the type if we are making an object,
1816 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1817 if (objectp
|| TREE_CODE (t
) == INDIRECT_REF
|| TYPE_ALIGN_OK (type
))
1818 attrs
.align
= MAX (attrs
.align
, TYPE_ALIGN (type
));
1820 /* If the size is known, we can set that. */
1821 tree new_size
= TYPE_SIZE_UNIT (type
);
1823 /* The address-space is that of the type. */
1824 as
= TYPE_ADDR_SPACE (type
);
1826 /* If T is not a type, we may be able to deduce some more information about
1832 if (TREE_THIS_VOLATILE (t
))
1833 MEM_VOLATILE_P (ref
) = 1;
1835 /* Now remove any conversions: they don't change what the underlying
1836 object is. Likewise for SAVE_EXPR. */
1837 while (CONVERT_EXPR_P (t
)
1838 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
1839 || TREE_CODE (t
) == SAVE_EXPR
)
1840 t
= TREE_OPERAND (t
, 0);
1842 /* Note whether this expression can trap. */
1843 MEM_NOTRAP_P (ref
) = !tree_could_trap_p (t
);
1845 base
= get_base_address (t
);
1849 && TREE_READONLY (base
)
1850 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
))
1851 && !TREE_THIS_VOLATILE (base
))
1852 MEM_READONLY_P (ref
) = 1;
1854 /* Mark static const strings readonly as well. */
1855 if (TREE_CODE (base
) == STRING_CST
1856 && TREE_READONLY (base
)
1857 && TREE_STATIC (base
))
1858 MEM_READONLY_P (ref
) = 1;
1860 /* Address-space information is on the base object. */
1861 if (TREE_CODE (base
) == MEM_REF
1862 || TREE_CODE (base
) == TARGET_MEM_REF
)
1863 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base
,
1866 as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
1869 /* If this expression uses it's parent's alias set, mark it such
1870 that we won't change it. */
1871 if (component_uses_parent_alias_set_from (t
) != NULL_TREE
)
1872 MEM_KEEP_ALIAS_SET_P (ref
) = 1;
1874 /* If this is a decl, set the attributes of the MEM from it. */
1878 attrs
.offset_known_p
= true;
1880 apply_bitpos
= bitpos
;
1881 new_size
= DECL_SIZE_UNIT (t
);
1884 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1885 else if (CONSTANT_CLASS_P (t
))
1888 /* If this is a field reference, record it. */
1889 else if (TREE_CODE (t
) == COMPONENT_REF
)
1892 attrs
.offset_known_p
= true;
1894 apply_bitpos
= bitpos
;
1895 if (DECL_BIT_FIELD (TREE_OPERAND (t
, 1)))
1896 new_size
= DECL_SIZE_UNIT (TREE_OPERAND (t
, 1));
1899 /* If this is an array reference, look for an outer field reference. */
1900 else if (TREE_CODE (t
) == ARRAY_REF
)
1902 tree off_tree
= size_zero_node
;
1903 /* We can't modify t, because we use it at the end of the
1909 tree index
= TREE_OPERAND (t2
, 1);
1910 tree low_bound
= array_ref_low_bound (t2
);
1911 tree unit_size
= array_ref_element_size (t2
);
1913 /* We assume all arrays have sizes that are a multiple of a byte.
1914 First subtract the lower bound, if any, in the type of the
1915 index, then convert to sizetype and multiply by the size of
1916 the array element. */
1917 if (! integer_zerop (low_bound
))
1918 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
1921 off_tree
= size_binop (PLUS_EXPR
,
1922 size_binop (MULT_EXPR
,
1923 fold_convert (sizetype
,
1927 t2
= TREE_OPERAND (t2
, 0);
1929 while (TREE_CODE (t2
) == ARRAY_REF
);
1932 || TREE_CODE (t2
) == COMPONENT_REF
)
1935 attrs
.offset_known_p
= false;
1936 if (tree_fits_uhwi_p (off_tree
))
1938 attrs
.offset_known_p
= true;
1939 attrs
.offset
= tree_to_uhwi (off_tree
);
1940 apply_bitpos
= bitpos
;
1943 /* Else do not record a MEM_EXPR. */
1946 /* If this is an indirect reference, record it. */
1947 else if (TREE_CODE (t
) == MEM_REF
1948 || TREE_CODE (t
) == TARGET_MEM_REF
)
1951 attrs
.offset_known_p
= true;
1953 apply_bitpos
= bitpos
;
1956 /* Compute the alignment. */
1957 unsigned int obj_align
;
1958 unsigned HOST_WIDE_INT obj_bitpos
;
1959 get_object_alignment_1 (t
, &obj_align
, &obj_bitpos
);
1960 obj_bitpos
= (obj_bitpos
- bitpos
) & (obj_align
- 1);
1961 if (obj_bitpos
!= 0)
1962 obj_align
= (obj_bitpos
& -obj_bitpos
);
1963 attrs
.align
= MAX (attrs
.align
, obj_align
);
1966 if (tree_fits_uhwi_p (new_size
))
1968 attrs
.size_known_p
= true;
1969 attrs
.size
= tree_to_uhwi (new_size
);
1972 /* If we modified OFFSET based on T, then subtract the outstanding
1973 bit position offset. Similarly, increase the size of the accessed
1974 object to contain the negative offset. */
1977 gcc_assert (attrs
.offset_known_p
);
1978 attrs
.offset
-= apply_bitpos
/ BITS_PER_UNIT
;
1979 if (attrs
.size_known_p
)
1980 attrs
.size
+= apply_bitpos
/ BITS_PER_UNIT
;
1983 /* Now set the attributes we computed above. */
1984 attrs
.addrspace
= as
;
1985 set_mem_attrs (ref
, &attrs
);
1989 set_mem_attributes (rtx ref
, tree t
, int objectp
)
1991 set_mem_attributes_minus_bitpos (ref
, t
, objectp
, 0);
1994 /* Set the alias set of MEM to SET. */
1997 set_mem_alias_set (rtx mem
, alias_set_type set
)
1999 struct mem_attrs attrs
;
2001 /* If the new and old alias sets don't conflict, something is wrong. */
2002 gcc_checking_assert (alias_sets_conflict_p (set
, MEM_ALIAS_SET (mem
)));
2003 attrs
= *get_mem_attrs (mem
);
2005 set_mem_attrs (mem
, &attrs
);
2008 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2011 set_mem_addr_space (rtx mem
, addr_space_t addrspace
)
2013 struct mem_attrs attrs
;
2015 attrs
= *get_mem_attrs (mem
);
2016 attrs
.addrspace
= addrspace
;
2017 set_mem_attrs (mem
, &attrs
);
2020 /* Set the alignment of MEM to ALIGN bits. */
2023 set_mem_align (rtx mem
, unsigned int align
)
2025 struct mem_attrs attrs
;
2027 attrs
= *get_mem_attrs (mem
);
2028 attrs
.align
= align
;
2029 set_mem_attrs (mem
, &attrs
);
2032 /* Set the expr for MEM to EXPR. */
2035 set_mem_expr (rtx mem
, tree expr
)
2037 struct mem_attrs attrs
;
2039 attrs
= *get_mem_attrs (mem
);
2041 set_mem_attrs (mem
, &attrs
);
2044 /* Set the offset of MEM to OFFSET. */
2047 set_mem_offset (rtx mem
, HOST_WIDE_INT offset
)
2049 struct mem_attrs attrs
;
2051 attrs
= *get_mem_attrs (mem
);
2052 attrs
.offset_known_p
= true;
2053 attrs
.offset
= offset
;
2054 set_mem_attrs (mem
, &attrs
);
2057 /* Clear the offset of MEM. */
2060 clear_mem_offset (rtx mem
)
2062 struct mem_attrs attrs
;
2064 attrs
= *get_mem_attrs (mem
);
2065 attrs
.offset_known_p
= false;
2066 set_mem_attrs (mem
, &attrs
);
2069 /* Set the size of MEM to SIZE. */
2072 set_mem_size (rtx mem
, HOST_WIDE_INT size
)
2074 struct mem_attrs attrs
;
2076 attrs
= *get_mem_attrs (mem
);
2077 attrs
.size_known_p
= true;
2079 set_mem_attrs (mem
, &attrs
);
2082 /* Clear the size of MEM. */
2085 clear_mem_size (rtx mem
)
2087 struct mem_attrs attrs
;
2089 attrs
= *get_mem_attrs (mem
);
2090 attrs
.size_known_p
= false;
2091 set_mem_attrs (mem
, &attrs
);
2094 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2095 and its address changed to ADDR. (VOIDmode means don't change the mode.
2096 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2097 returned memory location is required to be valid. INPLACE is true if any
2098 changes can be made directly to MEMREF or false if MEMREF must be treated
2101 The memory attributes are not changed. */
2104 change_address_1 (rtx memref
, machine_mode mode
, rtx addr
, int validate
,
2110 gcc_assert (MEM_P (memref
));
2111 as
= MEM_ADDR_SPACE (memref
);
2112 if (mode
== VOIDmode
)
2113 mode
= GET_MODE (memref
);
2115 addr
= XEXP (memref
, 0);
2116 if (mode
== GET_MODE (memref
) && addr
== XEXP (memref
, 0)
2117 && (!validate
|| memory_address_addr_space_p (mode
, addr
, as
)))
2120 /* Don't validate address for LRA. LRA can make the address valid
2121 by itself in most efficient way. */
2122 if (validate
&& !lra_in_progress
)
2124 if (reload_in_progress
|| reload_completed
)
2125 gcc_assert (memory_address_addr_space_p (mode
, addr
, as
));
2127 addr
= memory_address_addr_space (mode
, addr
, as
);
2130 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
2135 XEXP (memref
, 0) = addr
;
2139 new_rtx
= gen_rtx_MEM (mode
, addr
);
2140 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2144 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2145 way we are changing MEMREF, so we only preserve the alias set. */
2148 change_address (rtx memref
, machine_mode mode
, rtx addr
)
2150 rtx new_rtx
= change_address_1 (memref
, mode
, addr
, 1, false);
2151 machine_mode mmode
= GET_MODE (new_rtx
);
2152 struct mem_attrs attrs
, *defattrs
;
2154 attrs
= *get_mem_attrs (memref
);
2155 defattrs
= mode_mem_attrs
[(int) mmode
];
2156 attrs
.expr
= NULL_TREE
;
2157 attrs
.offset_known_p
= false;
2158 attrs
.size_known_p
= defattrs
->size_known_p
;
2159 attrs
.size
= defattrs
->size
;
2160 attrs
.align
= defattrs
->align
;
2162 /* If there are no changes, just return the original memory reference. */
2163 if (new_rtx
== memref
)
2165 if (mem_attrs_eq_p (get_mem_attrs (memref
), &attrs
))
2168 new_rtx
= gen_rtx_MEM (mmode
, XEXP (memref
, 0));
2169 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2172 set_mem_attrs (new_rtx
, &attrs
);
2176 /* Return a memory reference like MEMREF, but with its mode changed
2177 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2178 nonzero, the memory address is forced to be valid.
2179 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2180 and the caller is responsible for adjusting MEMREF base register.
2181 If ADJUST_OBJECT is zero, the underlying object associated with the
2182 memory reference is left unchanged and the caller is responsible for
2183 dealing with it. Otherwise, if the new memory reference is outside
2184 the underlying object, even partially, then the object is dropped.
2185 SIZE, if nonzero, is the size of an access in cases where MODE
2186 has no inherent size. */
2189 adjust_address_1 (rtx memref
, machine_mode mode
, HOST_WIDE_INT offset
,
2190 int validate
, int adjust_address
, int adjust_object
,
2193 rtx addr
= XEXP (memref
, 0);
2195 machine_mode address_mode
;
2197 struct mem_attrs attrs
= *get_mem_attrs (memref
), *defattrs
;
2198 unsigned HOST_WIDE_INT max_align
;
2199 #ifdef POINTERS_EXTEND_UNSIGNED
2200 machine_mode pointer_mode
2201 = targetm
.addr_space
.pointer_mode (attrs
.addrspace
);
2204 /* VOIDmode means no mode change for change_address_1. */
2205 if (mode
== VOIDmode
)
2206 mode
= GET_MODE (memref
);
2208 /* Take the size of non-BLKmode accesses from the mode. */
2209 defattrs
= mode_mem_attrs
[(int) mode
];
2210 if (defattrs
->size_known_p
)
2211 size
= defattrs
->size
;
2213 /* If there are no changes, just return the original memory reference. */
2214 if (mode
== GET_MODE (memref
) && !offset
2215 && (size
== 0 || (attrs
.size_known_p
&& attrs
.size
== size
))
2216 && (!validate
|| memory_address_addr_space_p (mode
, addr
,
2220 /* ??? Prefer to create garbage instead of creating shared rtl.
2221 This may happen even if offset is nonzero -- consider
2222 (plus (plus reg reg) const_int) -- so do this always. */
2223 addr
= copy_rtx (addr
);
2225 /* Convert a possibly large offset to a signed value within the
2226 range of the target address space. */
2227 address_mode
= get_address_mode (memref
);
2228 pbits
= GET_MODE_BITSIZE (address_mode
);
2229 if (HOST_BITS_PER_WIDE_INT
> pbits
)
2231 int shift
= HOST_BITS_PER_WIDE_INT
- pbits
;
2232 offset
= (((HOST_WIDE_INT
) ((unsigned HOST_WIDE_INT
) offset
<< shift
))
2238 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2239 object, we can merge it into the LO_SUM. */
2240 if (GET_MODE (memref
) != BLKmode
&& GET_CODE (addr
) == LO_SUM
2242 && (unsigned HOST_WIDE_INT
) offset
2243 < GET_MODE_ALIGNMENT (GET_MODE (memref
)) / BITS_PER_UNIT
)
2244 addr
= gen_rtx_LO_SUM (address_mode
, XEXP (addr
, 0),
2245 plus_constant (address_mode
,
2246 XEXP (addr
, 1), offset
));
2247 #ifdef POINTERS_EXTEND_UNSIGNED
2248 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2249 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2250 the fact that pointers are not allowed to overflow. */
2251 else if (POINTERS_EXTEND_UNSIGNED
> 0
2252 && GET_CODE (addr
) == ZERO_EXTEND
2253 && GET_MODE (XEXP (addr
, 0)) == pointer_mode
2254 && trunc_int_for_mode (offset
, pointer_mode
) == offset
)
2255 addr
= gen_rtx_ZERO_EXTEND (address_mode
,
2256 plus_constant (pointer_mode
,
2257 XEXP (addr
, 0), offset
));
2260 addr
= plus_constant (address_mode
, addr
, offset
);
2263 new_rtx
= change_address_1 (memref
, mode
, addr
, validate
, false);
2265 /* If the address is a REG, change_address_1 rightfully returns memref,
2266 but this would destroy memref's MEM_ATTRS. */
2267 if (new_rtx
== memref
&& offset
!= 0)
2268 new_rtx
= copy_rtx (new_rtx
);
2270 /* Conservatively drop the object if we don't know where we start from. */
2271 if (adjust_object
&& (!attrs
.offset_known_p
|| !attrs
.size_known_p
))
2273 attrs
.expr
= NULL_TREE
;
2277 /* Compute the new values of the memory attributes due to this adjustment.
2278 We add the offsets and update the alignment. */
2279 if (attrs
.offset_known_p
)
2281 attrs
.offset
+= offset
;
2283 /* Drop the object if the new left end is not within its bounds. */
2284 if (adjust_object
&& attrs
.offset
< 0)
2286 attrs
.expr
= NULL_TREE
;
2291 /* Compute the new alignment by taking the MIN of the alignment and the
2292 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2296 max_align
= (offset
& -offset
) * BITS_PER_UNIT
;
2297 attrs
.align
= MIN (attrs
.align
, max_align
);
2302 /* Drop the object if the new right end is not within its bounds. */
2303 if (adjust_object
&& (offset
+ size
) > attrs
.size
)
2305 attrs
.expr
= NULL_TREE
;
2308 attrs
.size_known_p
= true;
2311 else if (attrs
.size_known_p
)
2313 gcc_assert (!adjust_object
);
2314 attrs
.size
-= offset
;
2315 /* ??? The store_by_pieces machinery generates negative sizes,
2316 so don't assert for that here. */
2319 set_mem_attrs (new_rtx
, &attrs
);
2324 /* Return a memory reference like MEMREF, but with its mode changed
2325 to MODE and its address changed to ADDR, which is assumed to be
2326 MEMREF offset by OFFSET bytes. If VALIDATE is
2327 nonzero, the memory address is forced to be valid. */
2330 adjust_automodify_address_1 (rtx memref
, machine_mode mode
, rtx addr
,
2331 HOST_WIDE_INT offset
, int validate
)
2333 memref
= change_address_1 (memref
, VOIDmode
, addr
, validate
, false);
2334 return adjust_address_1 (memref
, mode
, offset
, validate
, 0, 0, 0);
2337 /* Return a memory reference like MEMREF, but whose address is changed by
2338 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2339 known to be in OFFSET (possibly 1). */
2342 offset_address (rtx memref
, rtx offset
, unsigned HOST_WIDE_INT pow2
)
2344 rtx new_rtx
, addr
= XEXP (memref
, 0);
2345 machine_mode address_mode
;
2346 struct mem_attrs attrs
, *defattrs
;
2348 attrs
= *get_mem_attrs (memref
);
2349 address_mode
= get_address_mode (memref
);
2350 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2352 /* At this point we don't know _why_ the address is invalid. It
2353 could have secondary memory references, multiplies or anything.
2355 However, if we did go and rearrange things, we can wind up not
2356 being able to recognize the magic around pic_offset_table_rtx.
2357 This stuff is fragile, and is yet another example of why it is
2358 bad to expose PIC machinery too early. */
2359 if (! memory_address_addr_space_p (GET_MODE (memref
), new_rtx
,
2361 && GET_CODE (addr
) == PLUS
2362 && XEXP (addr
, 0) == pic_offset_table_rtx
)
2364 addr
= force_reg (GET_MODE (addr
), addr
);
2365 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2368 update_temp_slot_address (XEXP (memref
, 0), new_rtx
);
2369 new_rtx
= change_address_1 (memref
, VOIDmode
, new_rtx
, 1, false);
2371 /* If there are no changes, just return the original memory reference. */
2372 if (new_rtx
== memref
)
2375 /* Update the alignment to reflect the offset. Reset the offset, which
2377 defattrs
= mode_mem_attrs
[(int) GET_MODE (new_rtx
)];
2378 attrs
.offset_known_p
= false;
2379 attrs
.size_known_p
= defattrs
->size_known_p
;
2380 attrs
.size
= defattrs
->size
;
2381 attrs
.align
= MIN (attrs
.align
, pow2
* BITS_PER_UNIT
);
2382 set_mem_attrs (new_rtx
, &attrs
);
2386 /* Return a memory reference like MEMREF, but with its address changed to
2387 ADDR. The caller is asserting that the actual piece of memory pointed
2388 to is the same, just the form of the address is being changed, such as
2389 by putting something into a register. INPLACE is true if any changes
2390 can be made directly to MEMREF or false if MEMREF must be treated as
2394 replace_equiv_address (rtx memref
, rtx addr
, bool inplace
)
2396 /* change_address_1 copies the memory attribute structure without change
2397 and that's exactly what we want here. */
2398 update_temp_slot_address (XEXP (memref
, 0), addr
);
2399 return change_address_1 (memref
, VOIDmode
, addr
, 1, inplace
);
2402 /* Likewise, but the reference is not required to be valid. */
2405 replace_equiv_address_nv (rtx memref
, rtx addr
, bool inplace
)
2407 return change_address_1 (memref
, VOIDmode
, addr
, 0, inplace
);
2410 /* Return a memory reference like MEMREF, but with its mode widened to
2411 MODE and offset by OFFSET. This would be used by targets that e.g.
2412 cannot issue QImode memory operations and have to use SImode memory
2413 operations plus masking logic. */
2416 widen_memory_access (rtx memref
, machine_mode mode
, HOST_WIDE_INT offset
)
2418 rtx new_rtx
= adjust_address_1 (memref
, mode
, offset
, 1, 1, 0, 0);
2419 struct mem_attrs attrs
;
2420 unsigned int size
= GET_MODE_SIZE (mode
);
2422 /* If there are no changes, just return the original memory reference. */
2423 if (new_rtx
== memref
)
2426 attrs
= *get_mem_attrs (new_rtx
);
2428 /* If we don't know what offset we were at within the expression, then
2429 we can't know if we've overstepped the bounds. */
2430 if (! attrs
.offset_known_p
)
2431 attrs
.expr
= NULL_TREE
;
2435 if (TREE_CODE (attrs
.expr
) == COMPONENT_REF
)
2437 tree field
= TREE_OPERAND (attrs
.expr
, 1);
2438 tree offset
= component_ref_field_offset (attrs
.expr
);
2440 if (! DECL_SIZE_UNIT (field
))
2442 attrs
.expr
= NULL_TREE
;
2446 /* Is the field at least as large as the access? If so, ok,
2447 otherwise strip back to the containing structure. */
2448 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
2449 && compare_tree_int (DECL_SIZE_UNIT (field
), size
) >= 0
2450 && attrs
.offset
>= 0)
2453 if (! tree_fits_uhwi_p (offset
))
2455 attrs
.expr
= NULL_TREE
;
2459 attrs
.expr
= TREE_OPERAND (attrs
.expr
, 0);
2460 attrs
.offset
+= tree_to_uhwi (offset
);
2461 attrs
.offset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
2464 /* Similarly for the decl. */
2465 else if (DECL_P (attrs
.expr
)
2466 && DECL_SIZE_UNIT (attrs
.expr
)
2467 && TREE_CODE (DECL_SIZE_UNIT (attrs
.expr
)) == INTEGER_CST
2468 && compare_tree_int (DECL_SIZE_UNIT (attrs
.expr
), size
) >= 0
2469 && (! attrs
.offset_known_p
|| attrs
.offset
>= 0))
2473 /* The widened memory access overflows the expression, which means
2474 that it could alias another expression. Zap it. */
2475 attrs
.expr
= NULL_TREE
;
2481 attrs
.offset_known_p
= false;
2483 /* The widened memory may alias other stuff, so zap the alias set. */
2484 /* ??? Maybe use get_alias_set on any remaining expression. */
2486 attrs
.size_known_p
= true;
2488 set_mem_attrs (new_rtx
, &attrs
);
2492 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2493 static GTY(()) tree spill_slot_decl
;
2496 get_spill_slot_decl (bool force_build_p
)
2498 tree d
= spill_slot_decl
;
2500 struct mem_attrs attrs
;
2502 if (d
|| !force_build_p
)
2505 d
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
2506 VAR_DECL
, get_identifier ("%sfp"), void_type_node
);
2507 DECL_ARTIFICIAL (d
) = 1;
2508 DECL_IGNORED_P (d
) = 1;
2510 spill_slot_decl
= d
;
2512 rd
= gen_rtx_MEM (BLKmode
, frame_pointer_rtx
);
2513 MEM_NOTRAP_P (rd
) = 1;
2514 attrs
= *mode_mem_attrs
[(int) BLKmode
];
2515 attrs
.alias
= new_alias_set ();
2517 set_mem_attrs (rd
, &attrs
);
2518 SET_DECL_RTL (d
, rd
);
2523 /* Given MEM, a result from assign_stack_local, fill in the memory
2524 attributes as appropriate for a register allocator spill slot.
2525 These slots are not aliasable by other memory. We arrange for
2526 them all to use a single MEM_EXPR, so that the aliasing code can
2527 work properly in the case of shared spill slots. */
2530 set_mem_attrs_for_spill (rtx mem
)
2532 struct mem_attrs attrs
;
2535 attrs
= *get_mem_attrs (mem
);
2536 attrs
.expr
= get_spill_slot_decl (true);
2537 attrs
.alias
= MEM_ALIAS_SET (DECL_RTL (attrs
.expr
));
2538 attrs
.addrspace
= ADDR_SPACE_GENERIC
;
2540 /* We expect the incoming memory to be of the form:
2541 (mem:MODE (plus (reg sfp) (const_int offset)))
2542 with perhaps the plus missing for offset = 0. */
2543 addr
= XEXP (mem
, 0);
2544 attrs
.offset_known_p
= true;
2546 if (GET_CODE (addr
) == PLUS
2547 && CONST_INT_P (XEXP (addr
, 1)))
2548 attrs
.offset
= INTVAL (XEXP (addr
, 1));
2550 set_mem_attrs (mem
, &attrs
);
2551 MEM_NOTRAP_P (mem
) = 1;
2554 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2557 gen_label_rtx (void)
2559 return as_a
<rtx_code_label
*> (
2560 gen_rtx_CODE_LABEL (VOIDmode
, NULL_RTX
, NULL_RTX
,
2561 NULL
, label_num
++, NULL
));
2564 /* For procedure integration. */
2566 /* Install new pointers to the first and last insns in the chain.
2567 Also, set cur_insn_uid to one higher than the last in use.
2568 Used for an inline-procedure after copying the insn chain. */
2571 set_new_first_and_last_insn (rtx_insn
*first
, rtx_insn
*last
)
2575 set_first_insn (first
);
2576 set_last_insn (last
);
2579 if (MIN_NONDEBUG_INSN_UID
|| MAY_HAVE_DEBUG_INSNS
)
2581 int debug_count
= 0;
2583 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
- 1;
2584 cur_debug_insn_uid
= 0;
2586 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2587 if (INSN_UID (insn
) < MIN_NONDEBUG_INSN_UID
)
2588 cur_debug_insn_uid
= MAX (cur_debug_insn_uid
, INSN_UID (insn
));
2591 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2592 if (DEBUG_INSN_P (insn
))
2597 cur_debug_insn_uid
= MIN_NONDEBUG_INSN_UID
+ debug_count
;
2599 cur_debug_insn_uid
++;
2602 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2603 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2608 /* Go through all the RTL insn bodies and copy any invalid shared
2609 structure. This routine should only be called once. */
2612 unshare_all_rtl_1 (rtx_insn
*insn
)
2614 /* Unshare just about everything else. */
2615 unshare_all_rtl_in_chain (insn
);
2617 /* Make sure the addresses of stack slots found outside the insn chain
2618 (such as, in DECL_RTL of a variable) are not shared
2619 with the insn chain.
2621 This special care is necessary when the stack slot MEM does not
2622 actually appear in the insn chain. If it does appear, its address
2623 is unshared from all else at that point. */
2624 stack_slot_list
= safe_as_a
<rtx_expr_list
*> (
2625 copy_rtx_if_shared (stack_slot_list
));
2628 /* Go through all the RTL insn bodies and copy any invalid shared
2629 structure, again. This is a fairly expensive thing to do so it
2630 should be done sparingly. */
2633 unshare_all_rtl_again (rtx_insn
*insn
)
2638 for (p
= insn
; p
; p
= NEXT_INSN (p
))
2641 reset_used_flags (PATTERN (p
));
2642 reset_used_flags (REG_NOTES (p
));
2644 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p
));
2647 /* Make sure that virtual stack slots are not shared. */
2648 set_used_decls (DECL_INITIAL (cfun
->decl
));
2650 /* Make sure that virtual parameters are not shared. */
2651 for (decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2652 set_used_flags (DECL_RTL (decl
));
2654 reset_used_flags (stack_slot_list
);
2656 unshare_all_rtl_1 (insn
);
2660 unshare_all_rtl (void)
2662 unshare_all_rtl_1 (get_insns ());
2667 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2668 Recursively does the same for subexpressions. */
2671 verify_rtx_sharing (rtx orig
, rtx insn
)
2676 const char *format_ptr
;
2681 code
= GET_CODE (x
);
2683 /* These types may be freely shared. */
2699 /* SCRATCH must be shared because they represent distinct values. */
2702 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2703 clobbers or clobbers of hard registers that originated as pseudos.
2704 This is needed to allow safe register renaming. */
2705 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2706 && ORIGINAL_REGNO (XEXP (x
, 0)) == REGNO (XEXP (x
, 0)))
2711 if (shared_const_p (orig
))
2716 /* A MEM is allowed to be shared if its address is constant. */
2717 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
2718 || reload_completed
|| reload_in_progress
)
2727 /* This rtx may not be shared. If it has already been seen,
2728 replace it with a copy of itself. */
2729 if (flag_checking
&& RTX_FLAG (x
, used
))
2731 error ("invalid rtl sharing found in the insn");
2733 error ("shared rtx");
2735 internal_error ("internal consistency failure");
2737 gcc_assert (!RTX_FLAG (x
, used
));
2739 RTX_FLAG (x
, used
) = 1;
2741 /* Now scan the subexpressions recursively. */
2743 format_ptr
= GET_RTX_FORMAT (code
);
2745 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2747 switch (*format_ptr
++)
2750 verify_rtx_sharing (XEXP (x
, i
), insn
);
2754 if (XVEC (x
, i
) != NULL
)
2757 int len
= XVECLEN (x
, i
);
2759 for (j
= 0; j
< len
; j
++)
2761 /* We allow sharing of ASM_OPERANDS inside single
2763 if (j
&& GET_CODE (XVECEXP (x
, i
, j
)) == SET
2764 && (GET_CODE (SET_SRC (XVECEXP (x
, i
, j
)))
2766 verify_rtx_sharing (SET_DEST (XVECEXP (x
, i
, j
)), insn
);
2768 verify_rtx_sharing (XVECEXP (x
, i
, j
), insn
);
2777 /* Reset used-flags for INSN. */
2780 reset_insn_used_flags (rtx insn
)
2782 gcc_assert (INSN_P (insn
));
2783 reset_used_flags (PATTERN (insn
));
2784 reset_used_flags (REG_NOTES (insn
));
2786 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2789 /* Go through all the RTL insn bodies and clear all the USED bits. */
2792 reset_all_used_flags (void)
2796 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2799 rtx pat
= PATTERN (p
);
2800 if (GET_CODE (pat
) != SEQUENCE
)
2801 reset_insn_used_flags (p
);
2804 gcc_assert (REG_NOTES (p
) == NULL
);
2805 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2807 rtx insn
= XVECEXP (pat
, 0, i
);
2809 reset_insn_used_flags (insn
);
2815 /* Verify sharing in INSN. */
2818 verify_insn_sharing (rtx insn
)
2820 gcc_assert (INSN_P (insn
));
2821 reset_used_flags (PATTERN (insn
));
2822 reset_used_flags (REG_NOTES (insn
));
2824 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2827 /* Go through all the RTL insn bodies and check that there is no unexpected
2828 sharing in between the subexpressions. */
2831 verify_rtl_sharing (void)
2835 timevar_push (TV_VERIFY_RTL_SHARING
);
2837 reset_all_used_flags ();
2839 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2842 rtx pat
= PATTERN (p
);
2843 if (GET_CODE (pat
) != SEQUENCE
)
2844 verify_insn_sharing (p
);
2846 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2848 rtx insn
= XVECEXP (pat
, 0, i
);
2850 verify_insn_sharing (insn
);
2854 reset_all_used_flags ();
2856 timevar_pop (TV_VERIFY_RTL_SHARING
);
2859 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2860 Assumes the mark bits are cleared at entry. */
2863 unshare_all_rtl_in_chain (rtx_insn
*insn
)
2865 for (; insn
; insn
= NEXT_INSN (insn
))
2868 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
2869 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
2871 CALL_INSN_FUNCTION_USAGE (insn
)
2872 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn
));
2876 /* Go through all virtual stack slots of a function and mark them as
2877 shared. We never replace the DECL_RTLs themselves with a copy,
2878 but expressions mentioned into a DECL_RTL cannot be shared with
2879 expressions in the instruction stream.
2881 Note that reload may convert pseudo registers into memories in-place.
2882 Pseudo registers are always shared, but MEMs never are. Thus if we
2883 reset the used flags on MEMs in the instruction stream, we must set
2884 them again on MEMs that appear in DECL_RTLs. */
2887 set_used_decls (tree blk
)
2892 for (t
= BLOCK_VARS (blk
); t
; t
= DECL_CHAIN (t
))
2893 if (DECL_RTL_SET_P (t
))
2894 set_used_flags (DECL_RTL (t
));
2896 /* Now process sub-blocks. */
2897 for (t
= BLOCK_SUBBLOCKS (blk
); t
; t
= BLOCK_CHAIN (t
))
2901 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2902 Recursively does the same for subexpressions. Uses
2903 copy_rtx_if_shared_1 to reduce stack space. */
2906 copy_rtx_if_shared (rtx orig
)
2908 copy_rtx_if_shared_1 (&orig
);
2912 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2913 use. Recursively does the same for subexpressions. */
2916 copy_rtx_if_shared_1 (rtx
*orig1
)
2922 const char *format_ptr
;
2926 /* Repeat is used to turn tail-recursion into iteration. */
2933 code
= GET_CODE (x
);
2935 /* These types may be freely shared. */
2951 /* SCRATCH must be shared because they represent distinct values. */
2954 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2955 clobbers or clobbers of hard registers that originated as pseudos.
2956 This is needed to allow safe register renaming. */
2957 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2958 && ORIGINAL_REGNO (XEXP (x
, 0)) == REGNO (XEXP (x
, 0)))
2963 if (shared_const_p (x
))
2973 /* The chain of insns is not being copied. */
2980 /* This rtx may not be shared. If it has already been seen,
2981 replace it with a copy of itself. */
2983 if (RTX_FLAG (x
, used
))
2985 x
= shallow_copy_rtx (x
);
2988 RTX_FLAG (x
, used
) = 1;
2990 /* Now scan the subexpressions recursively.
2991 We can store any replaced subexpressions directly into X
2992 since we know X is not shared! Any vectors in X
2993 must be copied if X was copied. */
2995 format_ptr
= GET_RTX_FORMAT (code
);
2996 length
= GET_RTX_LENGTH (code
);
2999 for (i
= 0; i
< length
; i
++)
3001 switch (*format_ptr
++)
3005 copy_rtx_if_shared_1 (last_ptr
);
3006 last_ptr
= &XEXP (x
, i
);
3010 if (XVEC (x
, i
) != NULL
)
3013 int len
= XVECLEN (x
, i
);
3015 /* Copy the vector iff I copied the rtx and the length
3017 if (copied
&& len
> 0)
3018 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
3020 /* Call recursively on all inside the vector. */
3021 for (j
= 0; j
< len
; j
++)
3024 copy_rtx_if_shared_1 (last_ptr
);
3025 last_ptr
= &XVECEXP (x
, i
, j
);
3040 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3043 mark_used_flags (rtx x
, int flag
)
3047 const char *format_ptr
;
3050 /* Repeat is used to turn tail-recursion into iteration. */
3055 code
= GET_CODE (x
);
3057 /* These types may be freely shared so we needn't do any resetting
3081 /* The chain of insns is not being copied. */
3088 RTX_FLAG (x
, used
) = flag
;
3090 format_ptr
= GET_RTX_FORMAT (code
);
3091 length
= GET_RTX_LENGTH (code
);
3093 for (i
= 0; i
< length
; i
++)
3095 switch (*format_ptr
++)
3103 mark_used_flags (XEXP (x
, i
), flag
);
3107 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3108 mark_used_flags (XVECEXP (x
, i
, j
), flag
);
3114 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3115 to look for shared sub-parts. */
3118 reset_used_flags (rtx x
)
3120 mark_used_flags (x
, 0);
3123 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3124 to look for shared sub-parts. */
3127 set_used_flags (rtx x
)
3129 mark_used_flags (x
, 1);
3132 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3133 Return X or the rtx for the pseudo reg the value of X was copied into.
3134 OTHER must be valid as a SET_DEST. */
3137 make_safe_from (rtx x
, rtx other
)
3140 switch (GET_CODE (other
))
3143 other
= SUBREG_REG (other
);
3145 case STRICT_LOW_PART
:
3148 other
= XEXP (other
, 0);
3157 && GET_CODE (x
) != SUBREG
)
3159 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
3160 || reg_mentioned_p (other
, x
))))
3162 rtx temp
= gen_reg_rtx (GET_MODE (x
));
3163 emit_move_insn (temp
, x
);
3169 /* Emission of insns (adding them to the doubly-linked list). */
3171 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3174 get_last_insn_anywhere (void)
3176 struct sequence_stack
*seq
;
3177 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
3183 /* Return the first nonnote insn emitted in current sequence or current
3184 function. This routine looks inside SEQUENCEs. */
3187 get_first_nonnote_insn (void)
3189 rtx_insn
*insn
= get_insns ();
3194 for (insn
= next_insn (insn
);
3195 insn
&& NOTE_P (insn
);
3196 insn
= next_insn (insn
))
3200 if (NONJUMP_INSN_P (insn
)
3201 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3202 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3209 /* Return the last nonnote insn emitted in current sequence or current
3210 function. This routine looks inside SEQUENCEs. */
3213 get_last_nonnote_insn (void)
3215 rtx_insn
*insn
= get_last_insn ();
3220 for (insn
= previous_insn (insn
);
3221 insn
&& NOTE_P (insn
);
3222 insn
= previous_insn (insn
))
3226 if (NONJUMP_INSN_P (insn
))
3227 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3228 insn
= seq
->insn (seq
->len () - 1);
3235 /* Return the number of actual (non-debug) insns emitted in this
3239 get_max_insn_count (void)
3241 int n
= cur_insn_uid
;
3243 /* The table size must be stable across -g, to avoid codegen
3244 differences due to debug insns, and not be affected by
3245 -fmin-insn-uid, to avoid excessive table size and to simplify
3246 debugging of -fcompare-debug failures. */
3247 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3248 n
-= cur_debug_insn_uid
;
3250 n
-= MIN_NONDEBUG_INSN_UID
;
3256 /* Return the next insn. If it is a SEQUENCE, return the first insn
3260 next_insn (rtx_insn
*insn
)
3264 insn
= NEXT_INSN (insn
);
3265 if (insn
&& NONJUMP_INSN_P (insn
)
3266 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3267 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3273 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3277 previous_insn (rtx_insn
*insn
)
3281 insn
= PREV_INSN (insn
);
3282 if (insn
&& NONJUMP_INSN_P (insn
))
3283 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3284 insn
= seq
->insn (seq
->len () - 1);
3290 /* Return the next insn after INSN that is not a NOTE. This routine does not
3291 look inside SEQUENCEs. */
3294 next_nonnote_insn (rtx uncast_insn
)
3296 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3299 insn
= NEXT_INSN (insn
);
3300 if (insn
== 0 || !NOTE_P (insn
))
3307 /* Return the next insn after INSN that is not a NOTE, but stop the
3308 search before we enter another basic block. This routine does not
3309 look inside SEQUENCEs. */
3312 next_nonnote_insn_bb (rtx_insn
*insn
)
3316 insn
= NEXT_INSN (insn
);
3317 if (insn
== 0 || !NOTE_P (insn
))
3319 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3326 /* Return the previous insn before INSN that is not a NOTE. This routine does
3327 not look inside SEQUENCEs. */
3330 prev_nonnote_insn (rtx uncast_insn
)
3332 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3336 insn
= PREV_INSN (insn
);
3337 if (insn
== 0 || !NOTE_P (insn
))
3344 /* Return the previous insn before INSN that is not a NOTE, but stop
3345 the search before we enter another basic block. This routine does
3346 not look inside SEQUENCEs. */
3349 prev_nonnote_insn_bb (rtx uncast_insn
)
3351 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3355 insn
= PREV_INSN (insn
);
3356 if (insn
== 0 || !NOTE_P (insn
))
3358 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3365 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3366 routine does not look inside SEQUENCEs. */
3369 next_nondebug_insn (rtx uncast_insn
)
3371 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3375 insn
= NEXT_INSN (insn
);
3376 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3383 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3384 This routine does not look inside SEQUENCEs. */
3387 prev_nondebug_insn (rtx uncast_insn
)
3389 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3393 insn
= PREV_INSN (insn
);
3394 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3401 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3402 This routine does not look inside SEQUENCEs. */
3405 next_nonnote_nondebug_insn (rtx uncast_insn
)
3407 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3411 insn
= NEXT_INSN (insn
);
3412 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3419 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3420 This routine does not look inside SEQUENCEs. */
3423 prev_nonnote_nondebug_insn (rtx uncast_insn
)
3425 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3429 insn
= PREV_INSN (insn
);
3430 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3437 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3438 or 0, if there is none. This routine does not look inside
3442 next_real_insn (rtx uncast_insn
)
3444 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3448 insn
= NEXT_INSN (insn
);
3449 if (insn
== 0 || INSN_P (insn
))
3456 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3457 or 0, if there is none. This routine does not look inside
3461 prev_real_insn (rtx uncast_insn
)
3463 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3467 insn
= PREV_INSN (insn
);
3468 if (insn
== 0 || INSN_P (insn
))
3475 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3476 This routine does not look inside SEQUENCEs. */
3479 last_call_insn (void)
3483 for (insn
= get_last_insn ();
3484 insn
&& !CALL_P (insn
);
3485 insn
= PREV_INSN (insn
))
3488 return safe_as_a
<rtx_call_insn
*> (insn
);
3491 /* Find the next insn after INSN that really does something. This routine
3492 does not look inside SEQUENCEs. After reload this also skips over
3493 standalone USE and CLOBBER insn. */
3496 active_insn_p (const_rtx insn
)
3498 return (CALL_P (insn
) || JUMP_P (insn
)
3499 || JUMP_TABLE_DATA_P (insn
) /* FIXME */
3500 || (NONJUMP_INSN_P (insn
)
3501 && (! reload_completed
3502 || (GET_CODE (PATTERN (insn
)) != USE
3503 && GET_CODE (PATTERN (insn
)) != CLOBBER
))));
3507 next_active_insn (rtx uncast_insn
)
3509 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3513 insn
= NEXT_INSN (insn
);
3514 if (insn
== 0 || active_insn_p (insn
))
3521 /* Find the last insn before INSN that really does something. This routine
3522 does not look inside SEQUENCEs. After reload this also skips over
3523 standalone USE and CLOBBER insn. */
3526 prev_active_insn (rtx uncast_insn
)
3528 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3532 insn
= PREV_INSN (insn
);
3533 if (insn
== 0 || active_insn_p (insn
))
3540 /* Return the next insn that uses CC0 after INSN, which is assumed to
3541 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3542 applied to the result of this function should yield INSN).
3544 Normally, this is simply the next insn. However, if a REG_CC_USER note
3545 is present, it contains the insn that uses CC0.
3547 Return 0 if we can't find the insn. */
3550 next_cc0_user (rtx uncast_insn
)
3552 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3554 rtx note
= find_reg_note (insn
, REG_CC_USER
, NULL_RTX
);
3557 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3559 insn
= next_nonnote_insn (insn
);
3560 if (insn
&& NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3561 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3563 if (insn
&& INSN_P (insn
) && reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
3569 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3570 note, it is the previous insn. */
3573 prev_cc0_setter (rtx_insn
*insn
)
3575 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
3578 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3580 insn
= prev_nonnote_insn (insn
);
3581 gcc_assert (sets_cc0_p (PATTERN (insn
)));
3586 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3589 find_auto_inc (const_rtx x
, const_rtx reg
)
3591 subrtx_iterator::array_type array
;
3592 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
3594 const_rtx x
= *iter
;
3595 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
3596 && rtx_equal_p (reg
, XEXP (x
, 0)))
3602 /* Increment the label uses for all labels present in rtx. */
3605 mark_label_nuses (rtx x
)
3611 code
= GET_CODE (x
);
3612 if (code
== LABEL_REF
&& LABEL_P (LABEL_REF_LABEL (x
)))
3613 LABEL_NUSES (LABEL_REF_LABEL (x
))++;
3615 fmt
= GET_RTX_FORMAT (code
);
3616 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3619 mark_label_nuses (XEXP (x
, i
));
3620 else if (fmt
[i
] == 'E')
3621 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3622 mark_label_nuses (XVECEXP (x
, i
, j
));
3627 /* Try splitting insns that can be split for better scheduling.
3628 PAT is the pattern which might split.
3629 TRIAL is the insn providing PAT.
3630 LAST is nonzero if we should return the last insn of the sequence produced.
3632 If this routine succeeds in splitting, it returns the first or last
3633 replacement insn depending on the value of LAST. Otherwise, it
3634 returns TRIAL. If the insn to be returned can be split, it will be. */
3637 try_split (rtx pat
, rtx_insn
*trial
, int last
)
3639 rtx_insn
*before
= PREV_INSN (trial
);
3640 rtx_insn
*after
= NEXT_INSN (trial
);
3642 rtx_insn
*seq
, *tem
;
3644 rtx_insn
*insn_last
, *insn
;
3646 rtx_insn
*call_insn
= NULL
;
3648 /* We're not good at redistributing frame information. */
3649 if (RTX_FRAME_RELATED_P (trial
))
3652 if (any_condjump_p (trial
)
3653 && (note
= find_reg_note (trial
, REG_BR_PROB
, 0)))
3654 split_branch_probability
= XINT (note
, 0);
3655 probability
= split_branch_probability
;
3657 seq
= split_insns (pat
, trial
);
3659 split_branch_probability
= -1;
3664 /* Avoid infinite loop if any insn of the result matches
3665 the original pattern. */
3669 if (INSN_P (insn_last
)
3670 && rtx_equal_p (PATTERN (insn_last
), pat
))
3672 if (!NEXT_INSN (insn_last
))
3674 insn_last
= NEXT_INSN (insn_last
);
3677 /* We will be adding the new sequence to the function. The splitters
3678 may have introduced invalid RTL sharing, so unshare the sequence now. */
3679 unshare_all_rtl_in_chain (seq
);
3681 /* Mark labels and copy flags. */
3682 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3687 CROSSING_JUMP_P (insn
) = CROSSING_JUMP_P (trial
);
3688 mark_jump_label (PATTERN (insn
), insn
, 0);
3690 if (probability
!= -1
3691 && any_condjump_p (insn
)
3692 && !find_reg_note (insn
, REG_BR_PROB
, 0))
3694 /* We can preserve the REG_BR_PROB notes only if exactly
3695 one jump is created, otherwise the machine description
3696 is responsible for this step using
3697 split_branch_probability variable. */
3698 gcc_assert (njumps
== 1);
3699 add_int_reg_note (insn
, REG_BR_PROB
, probability
);
3704 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3705 in SEQ and copy any additional information across. */
3708 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3714 gcc_assert (call_insn
== NULL_RTX
);
3717 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3718 target may have explicitly specified. */
3719 p
= &CALL_INSN_FUNCTION_USAGE (insn
);
3722 *p
= CALL_INSN_FUNCTION_USAGE (trial
);
3724 /* If the old call was a sibling call, the new one must
3726 SIBLING_CALL_P (insn
) = SIBLING_CALL_P (trial
);
3728 /* If the new call is the last instruction in the sequence,
3729 it will effectively replace the old call in-situ. Otherwise
3730 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3731 so that it comes immediately after the new call. */
3732 if (NEXT_INSN (insn
))
3733 for (next
= NEXT_INSN (trial
);
3734 next
&& NOTE_P (next
);
3735 next
= NEXT_INSN (next
))
3736 if (NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
)
3739 add_insn_after (next
, insn
, NULL
);
3745 /* Copy notes, particularly those related to the CFG. */
3746 for (note
= REG_NOTES (trial
); note
; note
= XEXP (note
, 1))
3748 switch (REG_NOTE_KIND (note
))
3751 copy_reg_eh_region_note_backward (note
, insn_last
, NULL
);
3757 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3760 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3764 case REG_NON_LOCAL_GOTO
:
3765 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3768 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3776 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3778 rtx reg
= XEXP (note
, 0);
3779 if (!FIND_REG_INC_NOTE (insn
, reg
)
3780 && find_auto_inc (PATTERN (insn
), reg
))
3781 add_reg_note (insn
, REG_INC
, reg
);
3786 fixup_args_size_notes (NULL
, insn_last
, INTVAL (XEXP (note
, 0)));
3790 gcc_assert (call_insn
!= NULL_RTX
);
3791 add_reg_note (call_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3799 /* If there are LABELS inside the split insns increment the
3800 usage count so we don't delete the label. */
3804 while (insn
!= NULL_RTX
)
3806 /* JUMP_P insns have already been "marked" above. */
3807 if (NONJUMP_INSN_P (insn
))
3808 mark_label_nuses (PATTERN (insn
));
3810 insn
= PREV_INSN (insn
);
3814 tem
= emit_insn_after_setloc (seq
, trial
, INSN_LOCATION (trial
));
3816 delete_insn (trial
);
3818 /* Recursively call try_split for each new insn created; by the
3819 time control returns here that insn will be fully split, so
3820 set LAST and continue from the insn after the one returned.
3821 We can't use next_active_insn here since AFTER may be a note.
3822 Ignore deleted insns, which can be occur if not optimizing. */
3823 for (tem
= NEXT_INSN (before
); tem
!= after
; tem
= NEXT_INSN (tem
))
3824 if (! tem
->deleted () && INSN_P (tem
))
3825 tem
= try_split (PATTERN (tem
), tem
, 1);
3827 /* Return either the first or the last insn, depending on which was
3830 ? (after
? PREV_INSN (after
) : get_last_insn ())
3831 : NEXT_INSN (before
);
3834 /* Make and return an INSN rtx, initializing all its slots.
3835 Store PATTERN in the pattern slots. */
3838 make_insn_raw (rtx pattern
)
3842 insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
3844 INSN_UID (insn
) = cur_insn_uid
++;
3845 PATTERN (insn
) = pattern
;
3846 INSN_CODE (insn
) = -1;
3847 REG_NOTES (insn
) = NULL
;
3848 INSN_LOCATION (insn
) = curr_insn_location ();
3849 BLOCK_FOR_INSN (insn
) = NULL
;
3851 #ifdef ENABLE_RTL_CHECKING
3854 && (returnjump_p (insn
)
3855 || (GET_CODE (insn
) == SET
3856 && SET_DEST (insn
) == pc_rtx
)))
3858 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3866 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3869 make_debug_insn_raw (rtx pattern
)
3871 rtx_debug_insn
*insn
;
3873 insn
= as_a
<rtx_debug_insn
*> (rtx_alloc (DEBUG_INSN
));
3874 INSN_UID (insn
) = cur_debug_insn_uid
++;
3875 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3876 INSN_UID (insn
) = cur_insn_uid
++;
3878 PATTERN (insn
) = pattern
;
3879 INSN_CODE (insn
) = -1;
3880 REG_NOTES (insn
) = NULL
;
3881 INSN_LOCATION (insn
) = curr_insn_location ();
3882 BLOCK_FOR_INSN (insn
) = NULL
;
3887 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3890 make_jump_insn_raw (rtx pattern
)
3892 rtx_jump_insn
*insn
;
3894 insn
= as_a
<rtx_jump_insn
*> (rtx_alloc (JUMP_INSN
));
3895 INSN_UID (insn
) = cur_insn_uid
++;
3897 PATTERN (insn
) = pattern
;
3898 INSN_CODE (insn
) = -1;
3899 REG_NOTES (insn
) = NULL
;
3900 JUMP_LABEL (insn
) = NULL
;
3901 INSN_LOCATION (insn
) = curr_insn_location ();
3902 BLOCK_FOR_INSN (insn
) = NULL
;
3907 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3910 make_call_insn_raw (rtx pattern
)
3912 rtx_call_insn
*insn
;
3914 insn
= as_a
<rtx_call_insn
*> (rtx_alloc (CALL_INSN
));
3915 INSN_UID (insn
) = cur_insn_uid
++;
3917 PATTERN (insn
) = pattern
;
3918 INSN_CODE (insn
) = -1;
3919 REG_NOTES (insn
) = NULL
;
3920 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
3921 INSN_LOCATION (insn
) = curr_insn_location ();
3922 BLOCK_FOR_INSN (insn
) = NULL
;
3927 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3930 make_note_raw (enum insn_note subtype
)
3932 /* Some notes are never created this way at all. These notes are
3933 only created by patching out insns. */
3934 gcc_assert (subtype
!= NOTE_INSN_DELETED_LABEL
3935 && subtype
!= NOTE_INSN_DELETED_DEBUG_LABEL
);
3937 rtx_note
*note
= as_a
<rtx_note
*> (rtx_alloc (NOTE
));
3938 INSN_UID (note
) = cur_insn_uid
++;
3939 NOTE_KIND (note
) = subtype
;
3940 BLOCK_FOR_INSN (note
) = NULL
;
3941 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
3945 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3946 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3947 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3950 link_insn_into_chain (rtx_insn
*insn
, rtx_insn
*prev
, rtx_insn
*next
)
3952 SET_PREV_INSN (insn
) = prev
;
3953 SET_NEXT_INSN (insn
) = next
;
3956 SET_NEXT_INSN (prev
) = insn
;
3957 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
3959 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
3960 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = insn
;
3965 SET_PREV_INSN (next
) = insn
;
3966 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
3968 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
3969 SET_PREV_INSN (sequence
->insn (0)) = insn
;
3973 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3975 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (insn
));
3976 SET_PREV_INSN (sequence
->insn (0)) = prev
;
3977 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
3981 /* Add INSN to the end of the doubly-linked list.
3982 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3985 add_insn (rtx_insn
*insn
)
3987 rtx_insn
*prev
= get_last_insn ();
3988 link_insn_into_chain (insn
, prev
, NULL
);
3989 if (NULL
== get_insns ())
3990 set_first_insn (insn
);
3991 set_last_insn (insn
);
3994 /* Add INSN into the doubly-linked list after insn AFTER. */
3997 add_insn_after_nobb (rtx_insn
*insn
, rtx_insn
*after
)
3999 rtx_insn
*next
= NEXT_INSN (after
);
4001 gcc_assert (!optimize
|| !after
->deleted ());
4003 link_insn_into_chain (insn
, after
, next
);
4007 struct sequence_stack
*seq
;
4009 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4010 if (after
== seq
->last
)
4018 /* Add INSN into the doubly-linked list before insn BEFORE. */
4021 add_insn_before_nobb (rtx_insn
*insn
, rtx_insn
*before
)
4023 rtx_insn
*prev
= PREV_INSN (before
);
4025 gcc_assert (!optimize
|| !before
->deleted ());
4027 link_insn_into_chain (insn
, prev
, before
);
4031 struct sequence_stack
*seq
;
4033 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4034 if (before
== seq
->first
)
4044 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4045 If BB is NULL, an attempt is made to infer the bb from before.
4047 This and the next function should be the only functions called
4048 to insert an insn once delay slots have been filled since only
4049 they know how to update a SEQUENCE. */
4052 add_insn_after (rtx uncast_insn
, rtx uncast_after
, basic_block bb
)
4054 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4055 rtx_insn
*after
= as_a
<rtx_insn
*> (uncast_after
);
4056 add_insn_after_nobb (insn
, after
);
4057 if (!BARRIER_P (after
)
4058 && !BARRIER_P (insn
)
4059 && (bb
= BLOCK_FOR_INSN (after
)))
4061 set_block_for_insn (insn
, bb
);
4063 df_insn_rescan (insn
);
4064 /* Should not happen as first in the BB is always
4065 either NOTE or LABEL. */
4066 if (BB_END (bb
) == after
4067 /* Avoid clobbering of structure when creating new BB. */
4068 && !BARRIER_P (insn
)
4069 && !NOTE_INSN_BASIC_BLOCK_P (insn
))
4074 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4075 If BB is NULL, an attempt is made to infer the bb from before.
4077 This and the previous function should be the only functions called
4078 to insert an insn once delay slots have been filled since only
4079 they know how to update a SEQUENCE. */
4082 add_insn_before (rtx uncast_insn
, rtx uncast_before
, basic_block bb
)
4084 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4085 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4086 add_insn_before_nobb (insn
, before
);
4089 && !BARRIER_P (before
)
4090 && !BARRIER_P (insn
))
4091 bb
= BLOCK_FOR_INSN (before
);
4095 set_block_for_insn (insn
, bb
);
4097 df_insn_rescan (insn
);
4098 /* Should not happen as first in the BB is always either NOTE or
4100 gcc_assert (BB_HEAD (bb
) != insn
4101 /* Avoid clobbering of structure when creating new BB. */
4103 || NOTE_INSN_BASIC_BLOCK_P (insn
));
4107 /* Replace insn with an deleted instruction note. */
4110 set_insn_deleted (rtx insn
)
4113 df_insn_delete (as_a
<rtx_insn
*> (insn
));
4114 PUT_CODE (insn
, NOTE
);
4115 NOTE_KIND (insn
) = NOTE_INSN_DELETED
;
4119 /* Unlink INSN from the insn chain.
4121 This function knows how to handle sequences.
4123 This function does not invalidate data flow information associated with
4124 INSN (i.e. does not call df_insn_delete). That makes this function
4125 usable for only disconnecting an insn from the chain, and re-emit it
4128 To later insert INSN elsewhere in the insn chain via add_insn and
4129 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4130 the caller. Nullifying them here breaks many insn chain walks.
4132 To really delete an insn and related DF information, use delete_insn. */
4135 remove_insn (rtx uncast_insn
)
4137 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4138 rtx_insn
*next
= NEXT_INSN (insn
);
4139 rtx_insn
*prev
= PREV_INSN (insn
);
4144 SET_NEXT_INSN (prev
) = next
;
4145 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4147 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4148 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4153 struct sequence_stack
*seq
;
4155 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4156 if (insn
== seq
->first
)
4167 SET_PREV_INSN (next
) = prev
;
4168 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4170 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4171 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4176 struct sequence_stack
*seq
;
4178 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4179 if (insn
== seq
->last
)
4188 /* Fix up basic block boundaries, if necessary. */
4189 if (!BARRIER_P (insn
)
4190 && (bb
= BLOCK_FOR_INSN (insn
)))
4192 if (BB_HEAD (bb
) == insn
)
4194 /* Never ever delete the basic block note without deleting whole
4196 gcc_assert (!NOTE_P (insn
));
4197 BB_HEAD (bb
) = next
;
4199 if (BB_END (bb
) == insn
)
4204 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4207 add_function_usage_to (rtx call_insn
, rtx call_fusage
)
4209 gcc_assert (call_insn
&& CALL_P (call_insn
));
4211 /* Put the register usage information on the CALL. If there is already
4212 some usage information, put ours at the end. */
4213 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
4217 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
4218 link
= XEXP (link
, 1))
4221 XEXP (link
, 1) = call_fusage
;
4224 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
4227 /* Delete all insns made since FROM.
4228 FROM becomes the new last instruction. */
4231 delete_insns_since (rtx_insn
*from
)
4236 SET_NEXT_INSN (from
) = 0;
4237 set_last_insn (from
);
4240 /* This function is deprecated, please use sequences instead.
4242 Move a consecutive bunch of insns to a different place in the chain.
4243 The insns to be moved are those between FROM and TO.
4244 They are moved to a new position after the insn AFTER.
4245 AFTER must not be FROM or TO or any insn in between.
4247 This function does not know about SEQUENCEs and hence should not be
4248 called after delay-slot filling has been done. */
4251 reorder_insns_nobb (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4255 for (rtx_insn
*x
= from
; x
!= to
; x
= NEXT_INSN (x
))
4256 gcc_assert (after
!= x
);
4257 gcc_assert (after
!= to
);
4260 /* Splice this bunch out of where it is now. */
4261 if (PREV_INSN (from
))
4262 SET_NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
4264 SET_PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
4265 if (get_last_insn () == to
)
4266 set_last_insn (PREV_INSN (from
));
4267 if (get_insns () == from
)
4268 set_first_insn (NEXT_INSN (to
));
4270 /* Make the new neighbors point to it and it to them. */
4271 if (NEXT_INSN (after
))
4272 SET_PREV_INSN (NEXT_INSN (after
)) = to
;
4274 SET_NEXT_INSN (to
) = NEXT_INSN (after
);
4275 SET_PREV_INSN (from
) = after
;
4276 SET_NEXT_INSN (after
) = from
;
4277 if (after
== get_last_insn ())
4281 /* Same as function above, but take care to update BB boundaries. */
4283 reorder_insns (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4285 rtx_insn
*prev
= PREV_INSN (from
);
4286 basic_block bb
, bb2
;
4288 reorder_insns_nobb (from
, to
, after
);
4290 if (!BARRIER_P (after
)
4291 && (bb
= BLOCK_FOR_INSN (after
)))
4294 df_set_bb_dirty (bb
);
4296 if (!BARRIER_P (from
)
4297 && (bb2
= BLOCK_FOR_INSN (from
)))
4299 if (BB_END (bb2
) == to
)
4300 BB_END (bb2
) = prev
;
4301 df_set_bb_dirty (bb2
);
4304 if (BB_END (bb
) == after
)
4307 for (x
= from
; x
!= NEXT_INSN (to
); x
= NEXT_INSN (x
))
4309 df_insn_change_bb (x
, bb
);
4314 /* Emit insn(s) of given code and pattern
4315 at a specified place within the doubly-linked list.
4317 All of the emit_foo global entry points accept an object
4318 X which is either an insn list or a PATTERN of a single
4321 There are thus a few canonical ways to generate code and
4322 emit it at a specific place in the instruction stream. For
4323 example, consider the instruction named SPOT and the fact that
4324 we would like to emit some instructions before SPOT. We might
4328 ... emit the new instructions ...
4329 insns_head = get_insns ();
4332 emit_insn_before (insns_head, SPOT);
4334 It used to be common to generate SEQUENCE rtl instead, but that
4335 is a relic of the past which no longer occurs. The reason is that
4336 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4337 generated would almost certainly die right after it was created. */
4340 emit_pattern_before_noloc (rtx x
, rtx before
, rtx last
, basic_block bb
,
4341 rtx_insn
*(*make_raw
) (rtx
))
4345 gcc_assert (before
);
4348 return safe_as_a
<rtx_insn
*> (last
);
4350 switch (GET_CODE (x
))
4359 insn
= as_a
<rtx_insn
*> (x
);
4362 rtx_insn
*next
= NEXT_INSN (insn
);
4363 add_insn_before (insn
, before
, bb
);
4369 #ifdef ENABLE_RTL_CHECKING
4376 last
= (*make_raw
) (x
);
4377 add_insn_before (last
, before
, bb
);
4381 return safe_as_a
<rtx_insn
*> (last
);
4384 /* Make X be output before the instruction BEFORE. */
4387 emit_insn_before_noloc (rtx x
, rtx_insn
*before
, basic_block bb
)
4389 return emit_pattern_before_noloc (x
, before
, before
, bb
, make_insn_raw
);
4392 /* Make an instruction with body X and code JUMP_INSN
4393 and output it before the instruction BEFORE. */
4396 emit_jump_insn_before_noloc (rtx x
, rtx_insn
*before
)
4398 return as_a
<rtx_jump_insn
*> (
4399 emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4400 make_jump_insn_raw
));
4403 /* Make an instruction with body X and code CALL_INSN
4404 and output it before the instruction BEFORE. */
4407 emit_call_insn_before_noloc (rtx x
, rtx_insn
*before
)
4409 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4410 make_call_insn_raw
);
4413 /* Make an instruction with body X and code DEBUG_INSN
4414 and output it before the instruction BEFORE. */
4417 emit_debug_insn_before_noloc (rtx x
, rtx before
)
4419 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4420 make_debug_insn_raw
);
4423 /* Make an insn of code BARRIER
4424 and output it before the insn BEFORE. */
4427 emit_barrier_before (rtx before
)
4429 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4431 INSN_UID (insn
) = cur_insn_uid
++;
4433 add_insn_before (insn
, before
, NULL
);
4437 /* Emit the label LABEL before the insn BEFORE. */
4440 emit_label_before (rtx label
, rtx_insn
*before
)
4442 gcc_checking_assert (INSN_UID (label
) == 0);
4443 INSN_UID (label
) = cur_insn_uid
++;
4444 add_insn_before (label
, before
, NULL
);
4445 return as_a
<rtx_code_label
*> (label
);
4448 /* Helper for emit_insn_after, handles lists of instructions
4452 emit_insn_after_1 (rtx_insn
*first
, rtx uncast_after
, basic_block bb
)
4454 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4456 rtx_insn
*after_after
;
4457 if (!bb
&& !BARRIER_P (after
))
4458 bb
= BLOCK_FOR_INSN (after
);
4462 df_set_bb_dirty (bb
);
4463 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4464 if (!BARRIER_P (last
))
4466 set_block_for_insn (last
, bb
);
4467 df_insn_rescan (last
);
4469 if (!BARRIER_P (last
))
4471 set_block_for_insn (last
, bb
);
4472 df_insn_rescan (last
);
4474 if (BB_END (bb
) == after
)
4478 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4481 after_after
= NEXT_INSN (after
);
4483 SET_NEXT_INSN (after
) = first
;
4484 SET_PREV_INSN (first
) = after
;
4485 SET_NEXT_INSN (last
) = after_after
;
4487 SET_PREV_INSN (after_after
) = last
;
4489 if (after
== get_last_insn ())
4490 set_last_insn (last
);
4496 emit_pattern_after_noloc (rtx x
, rtx uncast_after
, basic_block bb
,
4497 rtx_insn
*(*make_raw
)(rtx
))
4499 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4500 rtx_insn
*last
= after
;
4507 switch (GET_CODE (x
))
4516 last
= emit_insn_after_1 (as_a
<rtx_insn
*> (x
), after
, bb
);
4519 #ifdef ENABLE_RTL_CHECKING
4526 last
= (*make_raw
) (x
);
4527 add_insn_after (last
, after
, bb
);
4534 /* Make X be output after the insn AFTER and set the BB of insn. If
4535 BB is NULL, an attempt is made to infer the BB from AFTER. */
4538 emit_insn_after_noloc (rtx x
, rtx after
, basic_block bb
)
4540 return emit_pattern_after_noloc (x
, after
, bb
, make_insn_raw
);
4544 /* Make an insn of code JUMP_INSN with body X
4545 and output it after the insn AFTER. */
4548 emit_jump_insn_after_noloc (rtx x
, rtx after
)
4550 return as_a
<rtx_jump_insn
*> (
4551 emit_pattern_after_noloc (x
, after
, NULL
, make_jump_insn_raw
));
4554 /* Make an instruction with body X and code CALL_INSN
4555 and output it after the instruction AFTER. */
4558 emit_call_insn_after_noloc (rtx x
, rtx after
)
4560 return emit_pattern_after_noloc (x
, after
, NULL
, make_call_insn_raw
);
4563 /* Make an instruction with body X and code CALL_INSN
4564 and output it after the instruction AFTER. */
4567 emit_debug_insn_after_noloc (rtx x
, rtx after
)
4569 return emit_pattern_after_noloc (x
, after
, NULL
, make_debug_insn_raw
);
4572 /* Make an insn of code BARRIER
4573 and output it after the insn AFTER. */
4576 emit_barrier_after (rtx after
)
4578 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4580 INSN_UID (insn
) = cur_insn_uid
++;
4582 add_insn_after (insn
, after
, NULL
);
4586 /* Emit the label LABEL after the insn AFTER. */
4589 emit_label_after (rtx label
, rtx_insn
*after
)
4591 gcc_checking_assert (INSN_UID (label
) == 0);
4592 INSN_UID (label
) = cur_insn_uid
++;
4593 add_insn_after (label
, after
, NULL
);
4594 return as_a
<rtx_insn
*> (label
);
4597 /* Notes require a bit of special handling: Some notes need to have their
4598 BLOCK_FOR_INSN set, others should never have it set, and some should
4599 have it set or clear depending on the context. */
4601 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4602 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4603 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4606 note_outside_basic_block_p (enum insn_note subtype
, bool on_bb_boundary_p
)
4610 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4611 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
4614 /* Notes for var tracking and EH region markers can appear between or
4615 inside basic blocks. If the caller is emitting on the basic block
4616 boundary, do not set BLOCK_FOR_INSN on the new note. */
4617 case NOTE_INSN_VAR_LOCATION
:
4618 case NOTE_INSN_CALL_ARG_LOCATION
:
4619 case NOTE_INSN_EH_REGION_BEG
:
4620 case NOTE_INSN_EH_REGION_END
:
4621 return on_bb_boundary_p
;
4623 /* Otherwise, BLOCK_FOR_INSN must be set. */
4629 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4632 emit_note_after (enum insn_note subtype
, rtx_insn
*after
)
4634 rtx_note
*note
= make_note_raw (subtype
);
4635 basic_block bb
= BARRIER_P (after
) ? NULL
: BLOCK_FOR_INSN (after
);
4636 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_END (bb
) == after
);
4638 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4639 add_insn_after_nobb (note
, after
);
4641 add_insn_after (note
, after
, bb
);
4645 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4648 emit_note_before (enum insn_note subtype
, rtx_insn
*before
)
4650 rtx_note
*note
= make_note_raw (subtype
);
4651 basic_block bb
= BARRIER_P (before
) ? NULL
: BLOCK_FOR_INSN (before
);
4652 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_HEAD (bb
) == before
);
4654 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4655 add_insn_before_nobb (note
, before
);
4657 add_insn_before (note
, before
, bb
);
4661 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4662 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4665 emit_pattern_after_setloc (rtx pattern
, rtx uncast_after
, int loc
,
4666 rtx_insn
*(*make_raw
) (rtx
))
4668 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4669 rtx_insn
*last
= emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4671 if (pattern
== NULL_RTX
|| !loc
)
4674 after
= NEXT_INSN (after
);
4677 if (active_insn_p (after
)
4678 && !JUMP_TABLE_DATA_P (after
) /* FIXME */
4679 && !INSN_LOCATION (after
))
4680 INSN_LOCATION (after
) = loc
;
4683 after
= NEXT_INSN (after
);
4688 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4689 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4693 emit_pattern_after (rtx pattern
, rtx uncast_after
, bool skip_debug_insns
,
4694 rtx_insn
*(*make_raw
) (rtx
))
4696 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4697 rtx_insn
*prev
= after
;
4699 if (skip_debug_insns
)
4700 while (DEBUG_INSN_P (prev
))
4701 prev
= PREV_INSN (prev
);
4704 return emit_pattern_after_setloc (pattern
, after
, INSN_LOCATION (prev
),
4707 return emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4710 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4712 emit_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4714 return emit_pattern_after_setloc (pattern
, after
, loc
, make_insn_raw
);
4717 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4719 emit_insn_after (rtx pattern
, rtx after
)
4721 return emit_pattern_after (pattern
, after
, true, make_insn_raw
);
4724 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4726 emit_jump_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4728 return as_a
<rtx_jump_insn
*> (
4729 emit_pattern_after_setloc (pattern
, after
, loc
, make_jump_insn_raw
));
4732 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4734 emit_jump_insn_after (rtx pattern
, rtx after
)
4736 return as_a
<rtx_jump_insn
*> (
4737 emit_pattern_after (pattern
, after
, true, make_jump_insn_raw
));
4740 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4742 emit_call_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4744 return emit_pattern_after_setloc (pattern
, after
, loc
, make_call_insn_raw
);
4747 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4749 emit_call_insn_after (rtx pattern
, rtx after
)
4751 return emit_pattern_after (pattern
, after
, true, make_call_insn_raw
);
4754 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4756 emit_debug_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4758 return emit_pattern_after_setloc (pattern
, after
, loc
, make_debug_insn_raw
);
4761 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4763 emit_debug_insn_after (rtx pattern
, rtx after
)
4765 return emit_pattern_after (pattern
, after
, false, make_debug_insn_raw
);
4768 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4769 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4770 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4774 emit_pattern_before_setloc (rtx pattern
, rtx uncast_before
, int loc
, bool insnp
,
4775 rtx_insn
*(*make_raw
) (rtx
))
4777 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4778 rtx_insn
*first
= PREV_INSN (before
);
4779 rtx_insn
*last
= emit_pattern_before_noloc (pattern
, before
,
4780 insnp
? before
: NULL_RTX
,
4783 if (pattern
== NULL_RTX
|| !loc
)
4787 first
= get_insns ();
4789 first
= NEXT_INSN (first
);
4792 if (active_insn_p (first
)
4793 && !JUMP_TABLE_DATA_P (first
) /* FIXME */
4794 && !INSN_LOCATION (first
))
4795 INSN_LOCATION (first
) = loc
;
4798 first
= NEXT_INSN (first
);
4803 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4804 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4805 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4806 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4809 emit_pattern_before (rtx pattern
, rtx uncast_before
, bool skip_debug_insns
,
4810 bool insnp
, rtx_insn
*(*make_raw
) (rtx
))
4812 rtx_insn
*before
= safe_as_a
<rtx_insn
*> (uncast_before
);
4813 rtx_insn
*next
= before
;
4815 if (skip_debug_insns
)
4816 while (DEBUG_INSN_P (next
))
4817 next
= PREV_INSN (next
);
4820 return emit_pattern_before_setloc (pattern
, before
, INSN_LOCATION (next
),
4823 return emit_pattern_before_noloc (pattern
, before
,
4824 insnp
? before
: NULL_RTX
,
4828 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4830 emit_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4832 return emit_pattern_before_setloc (pattern
, before
, loc
, true,
4836 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4838 emit_insn_before (rtx pattern
, rtx before
)
4840 return emit_pattern_before (pattern
, before
, true, true, make_insn_raw
);
4843 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4845 emit_jump_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4847 return as_a
<rtx_jump_insn
*> (
4848 emit_pattern_before_setloc (pattern
, before
, loc
, false,
4849 make_jump_insn_raw
));
4852 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4854 emit_jump_insn_before (rtx pattern
, rtx before
)
4856 return as_a
<rtx_jump_insn
*> (
4857 emit_pattern_before (pattern
, before
, true, false,
4858 make_jump_insn_raw
));
4861 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4863 emit_call_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4865 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4866 make_call_insn_raw
);
4869 /* Like emit_call_insn_before_noloc,
4870 but set insn_location according to BEFORE. */
4872 emit_call_insn_before (rtx pattern
, rtx_insn
*before
)
4874 return emit_pattern_before (pattern
, before
, true, false,
4875 make_call_insn_raw
);
4878 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4880 emit_debug_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4882 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4883 make_debug_insn_raw
);
4886 /* Like emit_debug_insn_before_noloc,
4887 but set insn_location according to BEFORE. */
4889 emit_debug_insn_before (rtx pattern
, rtx_insn
*before
)
4891 return emit_pattern_before (pattern
, before
, false, false,
4892 make_debug_insn_raw
);
4895 /* Take X and emit it at the end of the doubly-linked
4898 Returns the last insn emitted. */
4903 rtx_insn
*last
= get_last_insn ();
4909 switch (GET_CODE (x
))
4918 insn
= as_a
<rtx_insn
*> (x
);
4921 rtx_insn
*next
= NEXT_INSN (insn
);
4928 #ifdef ENABLE_RTL_CHECKING
4929 case JUMP_TABLE_DATA
:
4936 last
= make_insn_raw (x
);
4944 /* Make an insn of code DEBUG_INSN with pattern X
4945 and add it to the end of the doubly-linked list. */
4948 emit_debug_insn (rtx x
)
4950 rtx_insn
*last
= get_last_insn ();
4956 switch (GET_CODE (x
))
4965 insn
= as_a
<rtx_insn
*> (x
);
4968 rtx_insn
*next
= NEXT_INSN (insn
);
4975 #ifdef ENABLE_RTL_CHECKING
4976 case JUMP_TABLE_DATA
:
4983 last
= make_debug_insn_raw (x
);
4991 /* Make an insn of code JUMP_INSN with pattern X
4992 and add it to the end of the doubly-linked list. */
4995 emit_jump_insn (rtx x
)
4997 rtx_insn
*last
= NULL
;
5000 switch (GET_CODE (x
))
5009 insn
= as_a
<rtx_insn
*> (x
);
5012 rtx_insn
*next
= NEXT_INSN (insn
);
5019 #ifdef ENABLE_RTL_CHECKING
5020 case JUMP_TABLE_DATA
:
5027 last
= make_jump_insn_raw (x
);
5035 /* Make an insn of code CALL_INSN with pattern X
5036 and add it to the end of the doubly-linked list. */
5039 emit_call_insn (rtx x
)
5043 switch (GET_CODE (x
))
5052 insn
= emit_insn (x
);
5055 #ifdef ENABLE_RTL_CHECKING
5057 case JUMP_TABLE_DATA
:
5063 insn
= make_call_insn_raw (x
);
5071 /* Add the label LABEL to the end of the doubly-linked list. */
5074 emit_label (rtx uncast_label
)
5076 rtx_code_label
*label
= as_a
<rtx_code_label
*> (uncast_label
);
5078 gcc_checking_assert (INSN_UID (label
) == 0);
5079 INSN_UID (label
) = cur_insn_uid
++;
5084 /* Make an insn of code JUMP_TABLE_DATA
5085 and add it to the end of the doubly-linked list. */
5087 rtx_jump_table_data
*
5088 emit_jump_table_data (rtx table
)
5090 rtx_jump_table_data
*jump_table_data
=
5091 as_a
<rtx_jump_table_data
*> (rtx_alloc (JUMP_TABLE_DATA
));
5092 INSN_UID (jump_table_data
) = cur_insn_uid
++;
5093 PATTERN (jump_table_data
) = table
;
5094 BLOCK_FOR_INSN (jump_table_data
) = NULL
;
5095 add_insn (jump_table_data
);
5096 return jump_table_data
;
5099 /* Make an insn of code BARRIER
5100 and add it to the end of the doubly-linked list. */
5105 rtx_barrier
*barrier
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
5106 INSN_UID (barrier
) = cur_insn_uid
++;
5111 /* Emit a copy of note ORIG. */
5114 emit_note_copy (rtx_note
*orig
)
5116 enum insn_note kind
= (enum insn_note
) NOTE_KIND (orig
);
5117 rtx_note
*note
= make_note_raw (kind
);
5118 NOTE_DATA (note
) = NOTE_DATA (orig
);
5123 /* Make an insn of code NOTE or type NOTE_NO
5124 and add it to the end of the doubly-linked list. */
5127 emit_note (enum insn_note kind
)
5129 rtx_note
*note
= make_note_raw (kind
);
5134 /* Emit a clobber of lvalue X. */
5137 emit_clobber (rtx x
)
5139 /* CONCATs should not appear in the insn stream. */
5140 if (GET_CODE (x
) == CONCAT
)
5142 emit_clobber (XEXP (x
, 0));
5143 return emit_clobber (XEXP (x
, 1));
5145 return emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
5148 /* Return a sequence of insns to clobber lvalue X. */
5162 /* Emit a use of rvalue X. */
5167 /* CONCATs should not appear in the insn stream. */
5168 if (GET_CODE (x
) == CONCAT
)
5170 emit_use (XEXP (x
, 0));
5171 return emit_use (XEXP (x
, 1));
5173 return emit_insn (gen_rtx_USE (VOIDmode
, x
));
5176 /* Return a sequence of insns to use rvalue X. */
5190 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5191 Return the set in INSN that such notes describe, or NULL if the notes
5192 have no meaning for INSN. */
5195 set_for_reg_notes (rtx insn
)
5202 pat
= PATTERN (insn
);
5203 if (GET_CODE (pat
) == PARALLEL
)
5205 /* We do not use single_set because that ignores SETs of unused
5206 registers. REG_EQUAL and REG_EQUIV notes really do require the
5207 PARALLEL to have a single SET. */
5208 if (multiple_sets (insn
))
5210 pat
= XVECEXP (pat
, 0, 0);
5213 if (GET_CODE (pat
) != SET
)
5216 reg
= SET_DEST (pat
);
5218 /* Notes apply to the contents of a STRICT_LOW_PART. */
5219 if (GET_CODE (reg
) == STRICT_LOW_PART
5220 || GET_CODE (reg
) == ZERO_EXTRACT
)
5221 reg
= XEXP (reg
, 0);
5223 /* Check that we have a register. */
5224 if (!(REG_P (reg
) || GET_CODE (reg
) == SUBREG
))
5230 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5231 note of this type already exists, remove it first. */
5234 set_unique_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
5236 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
5242 if (!set_for_reg_notes (insn
))
5245 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5246 It serves no useful purpose and breaks eliminate_regs. */
5247 if (GET_CODE (datum
) == ASM_OPERANDS
)
5250 /* Notes with side effects are dangerous. Even if the side-effect
5251 initially mirrors one in PATTERN (INSN), later optimizations
5252 might alter the way that the final register value is calculated
5253 and so move or alter the side-effect in some way. The note would
5254 then no longer be a valid substitution for SET_SRC. */
5255 if (side_effects_p (datum
))
5264 XEXP (note
, 0) = datum
;
5267 add_reg_note (insn
, kind
, datum
);
5268 note
= REG_NOTES (insn
);
5275 df_notes_rescan (as_a
<rtx_insn
*> (insn
));
5284 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5286 set_dst_reg_note (rtx insn
, enum reg_note kind
, rtx datum
, rtx dst
)
5288 rtx set
= set_for_reg_notes (insn
);
5290 if (set
&& SET_DEST (set
) == dst
)
5291 return set_unique_reg_note (insn
, kind
, datum
);
5295 /* Emit the rtl pattern X as an appropriate kind of insn. Also emit a
5296 following barrier if the instruction needs one and if ALLOW_BARRIER_P
5299 If X is a label, it is simply added into the insn chain. */
5302 emit (rtx x
, bool allow_barrier_p
)
5304 enum rtx_code code
= classify_insn (x
);
5309 return emit_label (x
);
5311 return emit_insn (x
);
5314 rtx_insn
*insn
= emit_jump_insn (x
);
5316 && (any_uncondjump_p (insn
) || GET_CODE (x
) == RETURN
))
5317 return emit_barrier ();
5321 return emit_call_insn (x
);
5323 return emit_debug_insn (x
);
5329 /* Space for free sequence stack entries. */
5330 static GTY ((deletable
)) struct sequence_stack
*free_sequence_stack
;
5332 /* Begin emitting insns to a sequence. If this sequence will contain
5333 something that might cause the compiler to pop arguments to function
5334 calls (because those pops have previously been deferred; see
5335 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5336 before calling this function. That will ensure that the deferred
5337 pops are not accidentally emitted in the middle of this sequence. */
5340 start_sequence (void)
5342 struct sequence_stack
*tem
;
5344 if (free_sequence_stack
!= NULL
)
5346 tem
= free_sequence_stack
;
5347 free_sequence_stack
= tem
->next
;
5350 tem
= ggc_alloc
<sequence_stack
> ();
5352 tem
->next
= get_current_sequence ()->next
;
5353 tem
->first
= get_insns ();
5354 tem
->last
= get_last_insn ();
5355 get_current_sequence ()->next
= tem
;
5361 /* Set up the insn chain starting with FIRST as the current sequence,
5362 saving the previously current one. See the documentation for
5363 start_sequence for more information about how to use this function. */
5366 push_to_sequence (rtx_insn
*first
)
5372 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
))
5375 set_first_insn (first
);
5376 set_last_insn (last
);
5379 /* Like push_to_sequence, but take the last insn as an argument to avoid
5380 looping through the list. */
5383 push_to_sequence2 (rtx_insn
*first
, rtx_insn
*last
)
5387 set_first_insn (first
);
5388 set_last_insn (last
);
5391 /* Set up the outer-level insn chain
5392 as the current sequence, saving the previously current one. */
5395 push_topmost_sequence (void)
5397 struct sequence_stack
*top
;
5401 top
= get_topmost_sequence ();
5402 set_first_insn (top
->first
);
5403 set_last_insn (top
->last
);
5406 /* After emitting to the outer-level insn chain, update the outer-level
5407 insn chain, and restore the previous saved state. */
5410 pop_topmost_sequence (void)
5412 struct sequence_stack
*top
;
5414 top
= get_topmost_sequence ();
5415 top
->first
= get_insns ();
5416 top
->last
= get_last_insn ();
5421 /* After emitting to a sequence, restore previous saved state.
5423 To get the contents of the sequence just made, you must call
5424 `get_insns' *before* calling here.
5426 If the compiler might have deferred popping arguments while
5427 generating this sequence, and this sequence will not be immediately
5428 inserted into the instruction stream, use do_pending_stack_adjust
5429 before calling get_insns. That will ensure that the deferred
5430 pops are inserted into this sequence, and not into some random
5431 location in the instruction stream. See INHIBIT_DEFER_POP for more
5432 information about deferred popping of arguments. */
5437 struct sequence_stack
*tem
= get_current_sequence ()->next
;
5439 set_first_insn (tem
->first
);
5440 set_last_insn (tem
->last
);
5441 get_current_sequence ()->next
= tem
->next
;
5443 memset (tem
, 0, sizeof (*tem
));
5444 tem
->next
= free_sequence_stack
;
5445 free_sequence_stack
= tem
;
5448 /* Return 1 if currently emitting into a sequence. */
5451 in_sequence_p (void)
5453 return get_current_sequence ()->next
!= 0;
5456 /* Put the various virtual registers into REGNO_REG_RTX. */
5459 init_virtual_regs (void)
5461 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
5462 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
5463 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
5464 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
5465 regno_reg_rtx
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
5466 regno_reg_rtx
[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
]
5467 = virtual_preferred_stack_boundary_rtx
;
5471 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5472 static rtx copy_insn_scratch_in
[MAX_RECOG_OPERANDS
];
5473 static rtx copy_insn_scratch_out
[MAX_RECOG_OPERANDS
];
5474 static int copy_insn_n_scratches
;
5476 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5477 copied an ASM_OPERANDS.
5478 In that case, it is the original input-operand vector. */
5479 static rtvec orig_asm_operands_vector
;
5481 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5482 copied an ASM_OPERANDS.
5483 In that case, it is the copied input-operand vector. */
5484 static rtvec copy_asm_operands_vector
;
5486 /* Likewise for the constraints vector. */
5487 static rtvec orig_asm_constraints_vector
;
5488 static rtvec copy_asm_constraints_vector
;
5490 /* Recursively create a new copy of an rtx for copy_insn.
5491 This function differs from copy_rtx in that it handles SCRATCHes and
5492 ASM_OPERANDs properly.
5493 Normally, this function is not used directly; use copy_insn as front end.
5494 However, you could first copy an insn pattern with copy_insn and then use
5495 this function afterwards to properly copy any REG_NOTEs containing
5499 copy_insn_1 (rtx orig
)
5504 const char *format_ptr
;
5509 code
= GET_CODE (orig
);
5524 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5525 clobbers or clobbers of hard registers that originated as pseudos.
5526 This is needed to allow safe register renaming. */
5527 if (REG_P (XEXP (orig
, 0)) && REGNO (XEXP (orig
, 0)) < FIRST_PSEUDO_REGISTER
5528 && ORIGINAL_REGNO (XEXP (orig
, 0)) == REGNO (XEXP (orig
, 0)))
5533 for (i
= 0; i
< copy_insn_n_scratches
; i
++)
5534 if (copy_insn_scratch_in
[i
] == orig
)
5535 return copy_insn_scratch_out
[i
];
5539 if (shared_const_p (orig
))
5543 /* A MEM with a constant address is not sharable. The problem is that
5544 the constant address may need to be reloaded. If the mem is shared,
5545 then reloading one copy of this mem will cause all copies to appear
5546 to have been reloaded. */
5552 /* Copy the various flags, fields, and other information. We assume
5553 that all fields need copying, and then clear the fields that should
5554 not be copied. That is the sensible default behavior, and forces
5555 us to explicitly document why we are *not* copying a flag. */
5556 copy
= shallow_copy_rtx (orig
);
5558 /* We do not copy the USED flag, which is used as a mark bit during
5559 walks over the RTL. */
5560 RTX_FLAG (copy
, used
) = 0;
5562 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5565 RTX_FLAG (copy
, jump
) = 0;
5566 RTX_FLAG (copy
, call
) = 0;
5567 RTX_FLAG (copy
, frame_related
) = 0;
5570 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
5572 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
5573 switch (*format_ptr
++)
5576 if (XEXP (orig
, i
) != NULL
)
5577 XEXP (copy
, i
) = copy_insn_1 (XEXP (orig
, i
));
5582 if (XVEC (orig
, i
) == orig_asm_constraints_vector
)
5583 XVEC (copy
, i
) = copy_asm_constraints_vector
;
5584 else if (XVEC (orig
, i
) == orig_asm_operands_vector
)
5585 XVEC (copy
, i
) = copy_asm_operands_vector
;
5586 else if (XVEC (orig
, i
) != NULL
)
5588 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
5589 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
5590 XVECEXP (copy
, i
, j
) = copy_insn_1 (XVECEXP (orig
, i
, j
));
5601 /* These are left unchanged. */
5608 if (code
== SCRATCH
)
5610 i
= copy_insn_n_scratches
++;
5611 gcc_assert (i
< MAX_RECOG_OPERANDS
);
5612 copy_insn_scratch_in
[i
] = orig
;
5613 copy_insn_scratch_out
[i
] = copy
;
5615 else if (code
== ASM_OPERANDS
)
5617 orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
5618 copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
5619 orig_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig
);
5620 copy_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
5626 /* Create a new copy of an rtx.
5627 This function differs from copy_rtx in that it handles SCRATCHes and
5628 ASM_OPERANDs properly.
5629 INSN doesn't really have to be a full INSN; it could be just the
5632 copy_insn (rtx insn
)
5634 copy_insn_n_scratches
= 0;
5635 orig_asm_operands_vector
= 0;
5636 orig_asm_constraints_vector
= 0;
5637 copy_asm_operands_vector
= 0;
5638 copy_asm_constraints_vector
= 0;
5639 return copy_insn_1 (insn
);
5642 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5643 on that assumption that INSN itself remains in its original place. */
5646 copy_delay_slot_insn (rtx_insn
*insn
)
5648 /* Copy INSN with its rtx_code, all its notes, location etc. */
5649 insn
= as_a
<rtx_insn
*> (copy_rtx (insn
));
5650 INSN_UID (insn
) = cur_insn_uid
++;
5654 /* Initialize data structures and variables in this file
5655 before generating rtl for each function. */
5660 set_first_insn (NULL
);
5661 set_last_insn (NULL
);
5662 if (MIN_NONDEBUG_INSN_UID
)
5663 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
;
5666 cur_debug_insn_uid
= 1;
5667 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
5668 first_label_num
= label_num
;
5669 get_current_sequence ()->next
= NULL
;
5671 /* Init the tables that describe all the pseudo regs. */
5673 crtl
->emit
.regno_pointer_align_length
= LAST_VIRTUAL_REGISTER
+ 101;
5675 crtl
->emit
.regno_pointer_align
5676 = XCNEWVEC (unsigned char, crtl
->emit
.regno_pointer_align_length
);
5678 regno_reg_rtx
= ggc_vec_alloc
<rtx
> (crtl
->emit
.regno_pointer_align_length
);
5680 /* Put copies of all the hard registers into regno_reg_rtx. */
5681 memcpy (regno_reg_rtx
,
5682 initial_regno_reg_rtx
,
5683 FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
5685 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5686 init_virtual_regs ();
5688 /* Indicate that the virtual registers and stack locations are
5690 REG_POINTER (stack_pointer_rtx
) = 1;
5691 REG_POINTER (frame_pointer_rtx
) = 1;
5692 REG_POINTER (hard_frame_pointer_rtx
) = 1;
5693 REG_POINTER (arg_pointer_rtx
) = 1;
5695 REG_POINTER (virtual_incoming_args_rtx
) = 1;
5696 REG_POINTER (virtual_stack_vars_rtx
) = 1;
5697 REG_POINTER (virtual_stack_dynamic_rtx
) = 1;
5698 REG_POINTER (virtual_outgoing_args_rtx
) = 1;
5699 REG_POINTER (virtual_cfa_rtx
) = 1;
5701 #ifdef STACK_BOUNDARY
5702 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
;
5703 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5704 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5705 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
;
5707 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5708 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
) = STACK_BOUNDARY
;
5709 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
) = STACK_BOUNDARY
;
5710 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5711 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = BITS_PER_WORD
;
5714 #ifdef INIT_EXPANDERS
5719 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5722 gen_const_vector (machine_mode mode
, int constant
)
5729 units
= GET_MODE_NUNITS (mode
);
5730 inner
= GET_MODE_INNER (mode
);
5732 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner
));
5734 v
= rtvec_alloc (units
);
5736 /* We need to call this function after we set the scalar const_tiny_rtx
5738 gcc_assert (const_tiny_rtx
[constant
][(int) inner
]);
5740 for (i
= 0; i
< units
; ++i
)
5741 RTVEC_ELT (v
, i
) = const_tiny_rtx
[constant
][(int) inner
];
5743 tem
= gen_rtx_raw_CONST_VECTOR (mode
, v
);
5747 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5748 all elements are zero, and the one vector when all elements are one. */
5750 gen_rtx_CONST_VECTOR (machine_mode mode
, rtvec v
)
5752 machine_mode inner
= GET_MODE_INNER (mode
);
5753 int nunits
= GET_MODE_NUNITS (mode
);
5757 /* Check to see if all of the elements have the same value. */
5758 x
= RTVEC_ELT (v
, nunits
- 1);
5759 for (i
= nunits
- 2; i
>= 0; i
--)
5760 if (RTVEC_ELT (v
, i
) != x
)
5763 /* If the values are all the same, check to see if we can use one of the
5764 standard constant vectors. */
5767 if (x
== CONST0_RTX (inner
))
5768 return CONST0_RTX (mode
);
5769 else if (x
== CONST1_RTX (inner
))
5770 return CONST1_RTX (mode
);
5771 else if (x
== CONSTM1_RTX (inner
))
5772 return CONSTM1_RTX (mode
);
5775 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
5778 /* Initialise global register information required by all functions. */
5781 init_emit_regs (void)
5787 /* Reset register attributes */
5788 reg_attrs_htab
->empty ();
5790 /* We need reg_raw_mode, so initialize the modes now. */
5791 init_reg_modes_target ();
5793 /* Assign register numbers to the globally defined register rtx. */
5794 stack_pointer_rtx
= gen_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
5795 frame_pointer_rtx
= gen_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
5796 hard_frame_pointer_rtx
= gen_raw_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
);
5797 arg_pointer_rtx
= gen_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
5798 virtual_incoming_args_rtx
=
5799 gen_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
5800 virtual_stack_vars_rtx
=
5801 gen_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
5802 virtual_stack_dynamic_rtx
=
5803 gen_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
5804 virtual_outgoing_args_rtx
=
5805 gen_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
5806 virtual_cfa_rtx
= gen_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
5807 virtual_preferred_stack_boundary_rtx
=
5808 gen_raw_REG (Pmode
, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
);
5810 /* Initialize RTL for commonly used hard registers. These are
5811 copied into regno_reg_rtx as we begin to compile each function. */
5812 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
5813 initial_regno_reg_rtx
[i
] = gen_raw_REG (reg_raw_mode
[i
], i
);
5815 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5816 return_address_pointer_rtx
5817 = gen_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
5820 pic_offset_table_rtx
= NULL_RTX
;
5821 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
5822 pic_offset_table_rtx
= gen_raw_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
5824 for (i
= 0; i
< (int) MAX_MACHINE_MODE
; i
++)
5826 mode
= (machine_mode
) i
;
5827 attrs
= ggc_cleared_alloc
<mem_attrs
> ();
5828 attrs
->align
= BITS_PER_UNIT
;
5829 attrs
->addrspace
= ADDR_SPACE_GENERIC
;
5830 if (mode
!= BLKmode
)
5832 attrs
->size_known_p
= true;
5833 attrs
->size
= GET_MODE_SIZE (mode
);
5834 if (STRICT_ALIGNMENT
)
5835 attrs
->align
= GET_MODE_ALIGNMENT (mode
);
5837 mode_mem_attrs
[i
] = attrs
;
5841 /* Initialize global machine_mode variables. */
5844 init_derived_machine_modes (void)
5846 byte_mode
= VOIDmode
;
5847 word_mode
= VOIDmode
;
5849 for (machine_mode mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5851 mode
= GET_MODE_WIDER_MODE (mode
))
5853 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
5854 && byte_mode
== VOIDmode
)
5857 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
5858 && word_mode
== VOIDmode
)
5862 ptr_mode
= mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0);
5865 /* Create some permanent unique rtl objects shared between all functions. */
5868 init_emit_once (void)
5872 machine_mode double_mode
;
5874 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5875 CONST_FIXED, and memory attribute hash tables. */
5876 const_int_htab
= hash_table
<const_int_hasher
>::create_ggc (37);
5878 #if TARGET_SUPPORTS_WIDE_INT
5879 const_wide_int_htab
= hash_table
<const_wide_int_hasher
>::create_ggc (37);
5881 const_double_htab
= hash_table
<const_double_hasher
>::create_ggc (37);
5883 const_fixed_htab
= hash_table
<const_fixed_hasher
>::create_ggc (37);
5885 reg_attrs_htab
= hash_table
<reg_attr_hasher
>::create_ggc (37);
5887 #ifdef INIT_EXPANDERS
5888 /* This is to initialize {init|mark|free}_machine_status before the first
5889 call to push_function_context_to. This is needed by the Chill front
5890 end which calls push_function_context_to before the first call to
5891 init_function_start. */
5895 /* Create the unique rtx's for certain rtx codes and operand values. */
5897 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5898 tries to use these variables. */
5899 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
5900 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
5901 gen_rtx_raw_CONST_INT (VOIDmode
, (HOST_WIDE_INT
) i
);
5903 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
5904 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
5905 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
5907 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
5909 double_mode
= mode_for_size (DOUBLE_TYPE_SIZE
, MODE_FLOAT
, 0);
5911 real_from_integer (&dconst0
, double_mode
, 0, SIGNED
);
5912 real_from_integer (&dconst1
, double_mode
, 1, SIGNED
);
5913 real_from_integer (&dconst2
, double_mode
, 2, SIGNED
);
5918 dconsthalf
= dconst1
;
5919 SET_REAL_EXP (&dconsthalf
, REAL_EXP (&dconsthalf
) - 1);
5921 for (i
= 0; i
< 3; i
++)
5923 const REAL_VALUE_TYPE
*const r
=
5924 (i
== 0 ? &dconst0
: i
== 1 ? &dconst1
: &dconst2
);
5926 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
5928 mode
= GET_MODE_WIDER_MODE (mode
))
5929 const_tiny_rtx
[i
][(int) mode
] =
5930 const_double_from_real_value (*r
, mode
);
5932 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT
);
5934 mode
= GET_MODE_WIDER_MODE (mode
))
5935 const_tiny_rtx
[i
][(int) mode
] =
5936 const_double_from_real_value (*r
, mode
);
5938 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
5940 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5942 mode
= GET_MODE_WIDER_MODE (mode
))
5943 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5945 for (mode
= MIN_MODE_PARTIAL_INT
;
5946 mode
<= MAX_MODE_PARTIAL_INT
;
5947 mode
= (machine_mode
)((int)(mode
) + 1))
5948 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5951 const_tiny_rtx
[3][(int) VOIDmode
] = constm1_rtx
;
5953 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5955 mode
= GET_MODE_WIDER_MODE (mode
))
5956 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
5958 for (mode
= MIN_MODE_PARTIAL_INT
;
5959 mode
<= MAX_MODE_PARTIAL_INT
;
5960 mode
= (machine_mode
)((int)(mode
) + 1))
5961 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
5963 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT
);
5965 mode
= GET_MODE_WIDER_MODE (mode
))
5967 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
5968 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
5971 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT
);
5973 mode
= GET_MODE_WIDER_MODE (mode
))
5975 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
5976 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
5979 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT
);
5981 mode
= GET_MODE_WIDER_MODE (mode
))
5983 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5984 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
5985 const_tiny_rtx
[3][(int) mode
] = gen_const_vector (mode
, 3);
5988 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT
);
5990 mode
= GET_MODE_WIDER_MODE (mode
))
5992 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5993 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
5996 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FRACT
);
5998 mode
= GET_MODE_WIDER_MODE (mode
))
6000 FCONST0 (mode
).data
.high
= 0;
6001 FCONST0 (mode
).data
.low
= 0;
6002 FCONST0 (mode
).mode
= mode
;
6003 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6004 FCONST0 (mode
), mode
);
6007 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UFRACT
);
6009 mode
= GET_MODE_WIDER_MODE (mode
))
6011 FCONST0 (mode
).data
.high
= 0;
6012 FCONST0 (mode
).data
.low
= 0;
6013 FCONST0 (mode
).mode
= mode
;
6014 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6015 FCONST0 (mode
), mode
);
6018 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_ACCUM
);
6020 mode
= GET_MODE_WIDER_MODE (mode
))
6022 FCONST0 (mode
).data
.high
= 0;
6023 FCONST0 (mode
).data
.low
= 0;
6024 FCONST0 (mode
).mode
= mode
;
6025 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6026 FCONST0 (mode
), mode
);
6028 /* We store the value 1. */
6029 FCONST1 (mode
).data
.high
= 0;
6030 FCONST1 (mode
).data
.low
= 0;
6031 FCONST1 (mode
).mode
= mode
;
6033 = double_int_one
.lshift (GET_MODE_FBIT (mode
),
6034 HOST_BITS_PER_DOUBLE_INT
,
6035 SIGNED_FIXED_POINT_MODE_P (mode
));
6036 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6037 FCONST1 (mode
), mode
);
6040 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UACCUM
);
6042 mode
= GET_MODE_WIDER_MODE (mode
))
6044 FCONST0 (mode
).data
.high
= 0;
6045 FCONST0 (mode
).data
.low
= 0;
6046 FCONST0 (mode
).mode
= mode
;
6047 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6048 FCONST0 (mode
), mode
);
6050 /* We store the value 1. */
6051 FCONST1 (mode
).data
.high
= 0;
6052 FCONST1 (mode
).data
.low
= 0;
6053 FCONST1 (mode
).mode
= mode
;
6055 = double_int_one
.lshift (GET_MODE_FBIT (mode
),
6056 HOST_BITS_PER_DOUBLE_INT
,
6057 SIGNED_FIXED_POINT_MODE_P (mode
));
6058 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6059 FCONST1 (mode
), mode
);
6062 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT
);
6064 mode
= GET_MODE_WIDER_MODE (mode
))
6066 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6069 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT
);
6071 mode
= GET_MODE_WIDER_MODE (mode
))
6073 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6076 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM
);
6078 mode
= GET_MODE_WIDER_MODE (mode
))
6080 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6081 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6084 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM
);
6086 mode
= GET_MODE_WIDER_MODE (mode
))
6088 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6089 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6092 for (i
= (int) CCmode
; i
< (int) MAX_MACHINE_MODE
; ++i
)
6093 if (GET_MODE_CLASS ((machine_mode
) i
) == MODE_CC
)
6094 const_tiny_rtx
[0][i
] = const0_rtx
;
6096 const_tiny_rtx
[0][(int) BImode
] = const0_rtx
;
6097 if (STORE_FLAG_VALUE
== 1)
6098 const_tiny_rtx
[1][(int) BImode
] = const1_rtx
;
6100 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS
);
6102 mode
= GET_MODE_WIDER_MODE (mode
))
6104 wide_int wi_zero
= wi::zero (GET_MODE_PRECISION (mode
));
6105 const_tiny_rtx
[0][mode
] = immed_wide_int_const (wi_zero
, mode
);
6108 pc_rtx
= gen_rtx_fmt_ (PC
, VOIDmode
);
6109 ret_rtx
= gen_rtx_fmt_ (RETURN
, VOIDmode
);
6110 simple_return_rtx
= gen_rtx_fmt_ (SIMPLE_RETURN
, VOIDmode
);
6111 cc0_rtx
= gen_rtx_fmt_ (CC0
, VOIDmode
);
6112 invalid_insn_rtx
= gen_rtx_INSN (VOIDmode
,
6116 /*pattern=*/NULL_RTX
,
6119 /*reg_notes=*/NULL_RTX
);
6122 /* Produce exact duplicate of insn INSN after AFTER.
6123 Care updating of libcall regions if present. */
6126 emit_copy_of_insn_after (rtx_insn
*insn
, rtx_insn
*after
)
6131 switch (GET_CODE (insn
))
6134 new_rtx
= emit_insn_after (copy_insn (PATTERN (insn
)), after
);
6138 new_rtx
= emit_jump_insn_after (copy_insn (PATTERN (insn
)), after
);
6139 CROSSING_JUMP_P (new_rtx
) = CROSSING_JUMP_P (insn
);
6143 new_rtx
= emit_debug_insn_after (copy_insn (PATTERN (insn
)), after
);
6147 new_rtx
= emit_call_insn_after (copy_insn (PATTERN (insn
)), after
);
6148 if (CALL_INSN_FUNCTION_USAGE (insn
))
6149 CALL_INSN_FUNCTION_USAGE (new_rtx
)
6150 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn
));
6151 SIBLING_CALL_P (new_rtx
) = SIBLING_CALL_P (insn
);
6152 RTL_CONST_CALL_P (new_rtx
) = RTL_CONST_CALL_P (insn
);
6153 RTL_PURE_CALL_P (new_rtx
) = RTL_PURE_CALL_P (insn
);
6154 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx
)
6155 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
);
6162 /* Update LABEL_NUSES. */
6163 mark_jump_label (PATTERN (new_rtx
), new_rtx
, 0);
6165 INSN_LOCATION (new_rtx
) = INSN_LOCATION (insn
);
6167 /* If the old insn is frame related, then so is the new one. This is
6168 primarily needed for IA-64 unwind info which marks epilogue insns,
6169 which may be duplicated by the basic block reordering code. */
6170 RTX_FRAME_RELATED_P (new_rtx
) = RTX_FRAME_RELATED_P (insn
);
6172 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6173 will make them. REG_LABEL_TARGETs are created there too, but are
6174 supposed to be sticky, so we copy them. */
6175 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
6176 if (REG_NOTE_KIND (link
) != REG_LABEL_OPERAND
)
6178 if (GET_CODE (link
) == EXPR_LIST
)
6179 add_reg_note (new_rtx
, REG_NOTE_KIND (link
),
6180 copy_insn_1 (XEXP (link
, 0)));
6182 add_shallow_copy_of_reg_note (new_rtx
, link
);
6185 INSN_CODE (new_rtx
) = INSN_CODE (insn
);
6189 static GTY((deletable
)) rtx hard_reg_clobbers
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
6191 gen_hard_reg_clobber (machine_mode mode
, unsigned int regno
)
6193 if (hard_reg_clobbers
[mode
][regno
])
6194 return hard_reg_clobbers
[mode
][regno
];
6196 return (hard_reg_clobbers
[mode
][regno
] =
6197 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (mode
, regno
)));
6200 location_t prologue_location
;
6201 location_t epilogue_location
;
6203 /* Hold current location information and last location information, so the
6204 datastructures are built lazily only when some instructions in given
6205 place are needed. */
6206 static location_t curr_location
;
6208 /* Allocate insn location datastructure. */
6210 insn_locations_init (void)
6212 prologue_location
= epilogue_location
= 0;
6213 curr_location
= UNKNOWN_LOCATION
;
6216 /* At the end of emit stage, clear current location. */
6218 insn_locations_finalize (void)
6220 epilogue_location
= curr_location
;
6221 curr_location
= UNKNOWN_LOCATION
;
6224 /* Set current location. */
6226 set_curr_insn_location (location_t location
)
6228 curr_location
= location
;
6231 /* Get current location. */
6233 curr_insn_location (void)
6235 return curr_location
;
6238 /* Return lexical scope block insn belongs to. */
6240 insn_scope (const rtx_insn
*insn
)
6242 return LOCATION_BLOCK (INSN_LOCATION (insn
));
6245 /* Return line number of the statement that produced this insn. */
6247 insn_line (const rtx_insn
*insn
)
6249 return LOCATION_LINE (INSN_LOCATION (insn
));
6252 /* Return source file of the statement that produced this insn. */
6254 insn_file (const rtx_insn
*insn
)
6256 return LOCATION_FILE (INSN_LOCATION (insn
));
6259 /* Return expanded location of the statement that produced this insn. */
6261 insn_location (const rtx_insn
*insn
)
6263 return expand_location (INSN_LOCATION (insn
));
6266 /* Return true if memory model MODEL requires a pre-operation (release-style)
6267 barrier or a post-operation (acquire-style) barrier. While not universal,
6268 this function matches behavior of several targets. */
6271 need_atomic_barrier_p (enum memmodel model
, bool pre
)
6273 switch (model
& MEMMODEL_BASE_MASK
)
6275 case MEMMODEL_RELAXED
:
6276 case MEMMODEL_CONSUME
:
6278 case MEMMODEL_RELEASE
:
6280 case MEMMODEL_ACQUIRE
:
6282 case MEMMODEL_ACQ_REL
:
6283 case MEMMODEL_SEQ_CST
:
6290 #include "gt-emit-rtl.h"