1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
36 #include "coretypes.h"
38 #include "diagnostic-core.h"
44 #include "fold-const.h"
47 #include "hard-reg-set.h"
50 #include "basic-block.h"
54 #include "stringpool.h"
55 #include "insn-config.h"
67 #include "langhooks.h"
74 struct target_rtl default_target_rtl
;
76 struct target_rtl
*this_target_rtl
= &default_target_rtl
;
79 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
81 /* Commonly used modes. */
83 machine_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
84 machine_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
85 machine_mode double_mode
; /* Mode whose width is DOUBLE_TYPE_SIZE. */
86 machine_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
88 /* Datastructures maintained for currently processed function in RTL form. */
90 struct rtl_data x_rtl
;
92 /* Indexed by pseudo register number, gives the rtx for that pseudo.
93 Allocated in parallel with regno_pointer_align.
94 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
95 with length attribute nested in top level structures. */
99 /* This is *not* reset after each function. It gives each CODE_LABEL
100 in the entire compilation a unique label number. */
102 static GTY(()) int label_num
= 1;
104 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
105 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
106 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
107 is set only for MODE_INT and MODE_VECTOR_INT modes. */
109 rtx const_tiny_rtx
[4][(int) MAX_MACHINE_MODE
];
113 REAL_VALUE_TYPE dconst0
;
114 REAL_VALUE_TYPE dconst1
;
115 REAL_VALUE_TYPE dconst2
;
116 REAL_VALUE_TYPE dconstm1
;
117 REAL_VALUE_TYPE dconsthalf
;
119 /* Record fixed-point constant 0 and 1. */
120 FIXED_VALUE_TYPE fconst0
[MAX_FCONST0
];
121 FIXED_VALUE_TYPE fconst1
[MAX_FCONST1
];
123 /* We make one copy of (const_int C) where C is in
124 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
125 to save space during the compilation and simplify comparisons of
128 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
130 /* Standard pieces of rtx, to be substituted directly into things. */
133 rtx simple_return_rtx
;
136 /* Marker used for denoting an INSN, which should never be accessed (i.e.,
137 this pointer should normally never be dereferenced), but is required to be
138 distinct from NULL_RTX. Currently used by peephole2 pass. */
139 rtx_insn
*invalid_insn_rtx
;
141 /* A hash table storing CONST_INTs whose absolute value is greater
142 than MAX_SAVED_CONST_INT. */
144 struct const_int_hasher
: ggc_cache_hasher
<rtx
>
146 typedef HOST_WIDE_INT compare_type
;
148 static hashval_t
hash (rtx i
);
149 static bool equal (rtx i
, HOST_WIDE_INT h
);
152 static GTY ((cache
)) hash_table
<const_int_hasher
> *const_int_htab
;
154 struct const_wide_int_hasher
: ggc_cache_hasher
<rtx
>
156 static hashval_t
hash (rtx x
);
157 static bool equal (rtx x
, rtx y
);
160 static GTY ((cache
)) hash_table
<const_wide_int_hasher
> *const_wide_int_htab
;
162 /* A hash table storing register attribute structures. */
163 struct reg_attr_hasher
: ggc_cache_hasher
<reg_attrs
*>
165 static hashval_t
hash (reg_attrs
*x
);
166 static bool equal (reg_attrs
*a
, reg_attrs
*b
);
169 static GTY ((cache
)) hash_table
<reg_attr_hasher
> *reg_attrs_htab
;
171 /* A hash table storing all CONST_DOUBLEs. */
172 struct const_double_hasher
: ggc_cache_hasher
<rtx
>
174 static hashval_t
hash (rtx x
);
175 static bool equal (rtx x
, rtx y
);
178 static GTY ((cache
)) hash_table
<const_double_hasher
> *const_double_htab
;
180 /* A hash table storing all CONST_FIXEDs. */
181 struct const_fixed_hasher
: ggc_cache_hasher
<rtx
>
183 static hashval_t
hash (rtx x
);
184 static bool equal (rtx x
, rtx y
);
187 static GTY ((cache
)) hash_table
<const_fixed_hasher
> *const_fixed_htab
;
189 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
190 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
191 #define first_label_num (crtl->emit.x_first_label_num)
193 static void set_used_decls (tree
);
194 static void mark_label_nuses (rtx
);
195 #if TARGET_SUPPORTS_WIDE_INT
196 static rtx
lookup_const_wide_int (rtx
);
198 static rtx
lookup_const_double (rtx
);
199 static rtx
lookup_const_fixed (rtx
);
200 static reg_attrs
*get_reg_attrs (tree
, int);
201 static rtx
gen_const_vector (machine_mode
, int);
202 static void copy_rtx_if_shared_1 (rtx
*orig
);
204 /* Probability of the conditional branch currently proceeded by try_split.
205 Set to -1 otherwise. */
206 int split_branch_probability
= -1;
208 /* Returns a hash code for X (which is a really a CONST_INT). */
211 const_int_hasher::hash (rtx x
)
213 return (hashval_t
) INTVAL (x
);
216 /* Returns nonzero if the value represented by X (which is really a
217 CONST_INT) is the same as that given by Y (which is really a
221 const_int_hasher::equal (rtx x
, HOST_WIDE_INT y
)
223 return (INTVAL (x
) == y
);
226 #if TARGET_SUPPORTS_WIDE_INT
227 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
230 const_wide_int_hasher::hash (rtx x
)
233 unsigned HOST_WIDE_INT hash
= 0;
236 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
237 hash
+= CONST_WIDE_INT_ELT (xr
, i
);
239 return (hashval_t
) hash
;
242 /* Returns nonzero if the value represented by X (which is really a
243 CONST_WIDE_INT) is the same as that given by Y (which is really a
247 const_wide_int_hasher::equal (rtx x
, rtx y
)
252 if (CONST_WIDE_INT_NUNITS (xr
) != CONST_WIDE_INT_NUNITS (yr
))
255 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
256 if (CONST_WIDE_INT_ELT (xr
, i
) != CONST_WIDE_INT_ELT (yr
, i
))
263 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
265 const_double_hasher::hash (rtx x
)
267 const_rtx
const value
= x
;
270 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (value
) == VOIDmode
)
271 h
= CONST_DOUBLE_LOW (value
) ^ CONST_DOUBLE_HIGH (value
);
274 h
= real_hash (CONST_DOUBLE_REAL_VALUE (value
));
275 /* MODE is used in the comparison, so it should be in the hash. */
276 h
^= GET_MODE (value
);
281 /* Returns nonzero if the value represented by X (really a ...)
282 is the same as that represented by Y (really a ...) */
284 const_double_hasher::equal (rtx x
, rtx y
)
286 const_rtx
const a
= x
, b
= y
;
288 if (GET_MODE (a
) != GET_MODE (b
))
290 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (a
) == VOIDmode
)
291 return (CONST_DOUBLE_LOW (a
) == CONST_DOUBLE_LOW (b
)
292 && CONST_DOUBLE_HIGH (a
) == CONST_DOUBLE_HIGH (b
));
294 return real_identical (CONST_DOUBLE_REAL_VALUE (a
),
295 CONST_DOUBLE_REAL_VALUE (b
));
298 /* Returns a hash code for X (which is really a CONST_FIXED). */
301 const_fixed_hasher::hash (rtx x
)
303 const_rtx
const value
= x
;
306 h
= fixed_hash (CONST_FIXED_VALUE (value
));
307 /* MODE is used in the comparison, so it should be in the hash. */
308 h
^= GET_MODE (value
);
312 /* Returns nonzero if the value represented by X is the same as that
316 const_fixed_hasher::equal (rtx x
, rtx y
)
318 const_rtx
const a
= x
, b
= y
;
320 if (GET_MODE (a
) != GET_MODE (b
))
322 return fixed_identical (CONST_FIXED_VALUE (a
), CONST_FIXED_VALUE (b
));
325 /* Return true if the given memory attributes are equal. */
328 mem_attrs_eq_p (const struct mem_attrs
*p
, const struct mem_attrs
*q
)
334 return (p
->alias
== q
->alias
335 && p
->offset_known_p
== q
->offset_known_p
336 && (!p
->offset_known_p
|| p
->offset
== q
->offset
)
337 && p
->size_known_p
== q
->size_known_p
338 && (!p
->size_known_p
|| p
->size
== q
->size
)
339 && p
->align
== q
->align
340 && p
->addrspace
== q
->addrspace
341 && (p
->expr
== q
->expr
342 || (p
->expr
!= NULL_TREE
&& q
->expr
!= NULL_TREE
343 && operand_equal_p (p
->expr
, q
->expr
, 0))));
346 /* Set MEM's memory attributes so that they are the same as ATTRS. */
349 set_mem_attrs (rtx mem
, mem_attrs
*attrs
)
351 /* If everything is the default, we can just clear the attributes. */
352 if (mem_attrs_eq_p (attrs
, mode_mem_attrs
[(int) GET_MODE (mem
)]))
359 || !mem_attrs_eq_p (attrs
, MEM_ATTRS (mem
)))
361 MEM_ATTRS (mem
) = ggc_alloc
<mem_attrs
> ();
362 memcpy (MEM_ATTRS (mem
), attrs
, sizeof (mem_attrs
));
366 /* Returns a hash code for X (which is a really a reg_attrs *). */
369 reg_attr_hasher::hash (reg_attrs
*x
)
371 const reg_attrs
*const p
= x
;
373 return ((p
->offset
* 1000) ^ (intptr_t) p
->decl
);
376 /* Returns nonzero if the value represented by X is the same as that given by
380 reg_attr_hasher::equal (reg_attrs
*x
, reg_attrs
*y
)
382 const reg_attrs
*const p
= x
;
383 const reg_attrs
*const q
= y
;
385 return (p
->decl
== q
->decl
&& p
->offset
== q
->offset
);
387 /* Allocate a new reg_attrs structure and insert it into the hash table if
388 one identical to it is not already in the table. We are doing this for
392 get_reg_attrs (tree decl
, int offset
)
396 /* If everything is the default, we can just return zero. */
397 if (decl
== 0 && offset
== 0)
401 attrs
.offset
= offset
;
403 reg_attrs
**slot
= reg_attrs_htab
->find_slot (&attrs
, INSERT
);
406 *slot
= ggc_alloc
<reg_attrs
> ();
407 memcpy (*slot
, &attrs
, sizeof (reg_attrs
));
415 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
416 and to block register equivalences to be seen across this insn. */
421 rtx x
= gen_rtx_ASM_INPUT (VOIDmode
, "");
422 MEM_VOLATILE_P (x
) = true;
428 /* Set the mode and register number of X to MODE and REGNO. */
431 set_mode_and_regno (rtx x
, machine_mode mode
, unsigned int regno
)
433 unsigned int nregs
= (HARD_REGISTER_NUM_P (regno
)
434 ? hard_regno_nregs
[regno
][mode
]
436 PUT_MODE_RAW (x
, mode
);
437 set_regno_raw (x
, regno
, nregs
);
440 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
441 don't attempt to share with the various global pieces of rtl (such as
442 frame_pointer_rtx). */
445 gen_raw_REG (machine_mode mode
, unsigned int regno
)
447 rtx x
= rtx_alloc_stat (REG MEM_STAT_INFO
);
448 set_mode_and_regno (x
, mode
, regno
);
449 REG_ATTRS (x
) = NULL
;
450 ORIGINAL_REGNO (x
) = regno
;
454 /* There are some RTL codes that require special attention; the generation
455 functions do the raw handling. If you add to this list, modify
456 special_rtx in gengenrtl.c as well. */
459 gen_rtx_EXPR_LIST (machine_mode mode
, rtx expr
, rtx expr_list
)
461 return as_a
<rtx_expr_list
*> (gen_rtx_fmt_ee (EXPR_LIST
, mode
, expr
,
466 gen_rtx_INSN_LIST (machine_mode mode
, rtx insn
, rtx insn_list
)
468 return as_a
<rtx_insn_list
*> (gen_rtx_fmt_ue (INSN_LIST
, mode
, insn
,
473 gen_rtx_INSN (machine_mode mode
, rtx_insn
*prev_insn
, rtx_insn
*next_insn
,
474 basic_block bb
, rtx pattern
, int location
, int code
,
477 return as_a
<rtx_insn
*> (gen_rtx_fmt_uuBeiie (INSN
, mode
,
478 prev_insn
, next_insn
,
479 bb
, pattern
, location
, code
,
484 gen_rtx_CONST_INT (machine_mode mode ATTRIBUTE_UNUSED
, HOST_WIDE_INT arg
)
486 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
487 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
489 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
490 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
491 return const_true_rtx
;
494 /* Look up the CONST_INT in the hash table. */
495 rtx
*slot
= const_int_htab
->find_slot_with_hash (arg
, (hashval_t
) arg
,
498 *slot
= gen_rtx_raw_CONST_INT (VOIDmode
, arg
);
504 gen_int_mode (HOST_WIDE_INT c
, machine_mode mode
)
506 return GEN_INT (trunc_int_for_mode (c
, mode
));
509 /* CONST_DOUBLEs might be created from pairs of integers, or from
510 REAL_VALUE_TYPEs. Also, their length is known only at run time,
511 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
513 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
514 hash table. If so, return its counterpart; otherwise add it
515 to the hash table and return it. */
517 lookup_const_double (rtx real
)
519 rtx
*slot
= const_double_htab
->find_slot (real
, INSERT
);
526 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
527 VALUE in mode MODE. */
529 const_double_from_real_value (REAL_VALUE_TYPE value
, machine_mode mode
)
531 rtx real
= rtx_alloc (CONST_DOUBLE
);
532 PUT_MODE (real
, mode
);
536 return lookup_const_double (real
);
539 /* Determine whether FIXED, a CONST_FIXED, already exists in the
540 hash table. If so, return its counterpart; otherwise add it
541 to the hash table and return it. */
544 lookup_const_fixed (rtx fixed
)
546 rtx
*slot
= const_fixed_htab
->find_slot (fixed
, INSERT
);
553 /* Return a CONST_FIXED rtx for a fixed-point value specified by
554 VALUE in mode MODE. */
557 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value
, machine_mode mode
)
559 rtx fixed
= rtx_alloc (CONST_FIXED
);
560 PUT_MODE (fixed
, mode
);
564 return lookup_const_fixed (fixed
);
567 #if TARGET_SUPPORTS_WIDE_INT == 0
568 /* Constructs double_int from rtx CST. */
571 rtx_to_double_int (const_rtx cst
)
575 if (CONST_INT_P (cst
))
576 r
= double_int::from_shwi (INTVAL (cst
));
577 else if (CONST_DOUBLE_AS_INT_P (cst
))
579 r
.low
= CONST_DOUBLE_LOW (cst
);
580 r
.high
= CONST_DOUBLE_HIGH (cst
);
589 #if TARGET_SUPPORTS_WIDE_INT
590 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
591 If so, return its counterpart; otherwise add it to the hash table and
595 lookup_const_wide_int (rtx wint
)
597 rtx
*slot
= const_wide_int_htab
->find_slot (wint
, INSERT
);
605 /* Return an rtx constant for V, given that the constant has mode MODE.
606 The returned rtx will be a CONST_INT if V fits, otherwise it will be
607 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
608 (if TARGET_SUPPORTS_WIDE_INT). */
611 immed_wide_int_const (const wide_int_ref
&v
, machine_mode mode
)
613 unsigned int len
= v
.get_len ();
614 unsigned int prec
= GET_MODE_PRECISION (mode
);
616 /* Allow truncation but not extension since we do not know if the
617 number is signed or unsigned. */
618 gcc_assert (prec
<= v
.get_precision ());
620 if (len
< 2 || prec
<= HOST_BITS_PER_WIDE_INT
)
621 return gen_int_mode (v
.elt (0), mode
);
623 #if TARGET_SUPPORTS_WIDE_INT
627 unsigned int blocks_needed
628 = (prec
+ HOST_BITS_PER_WIDE_INT
- 1) / HOST_BITS_PER_WIDE_INT
;
630 if (len
> blocks_needed
)
633 value
= const_wide_int_alloc (len
);
635 /* It is so tempting to just put the mode in here. Must control
637 PUT_MODE (value
, VOIDmode
);
638 CWI_PUT_NUM_ELEM (value
, len
);
640 for (i
= 0; i
< len
; i
++)
641 CONST_WIDE_INT_ELT (value
, i
) = v
.elt (i
);
643 return lookup_const_wide_int (value
);
646 return immed_double_const (v
.elt (0), v
.elt (1), mode
);
650 #if TARGET_SUPPORTS_WIDE_INT == 0
651 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
652 of ints: I0 is the low-order word and I1 is the high-order word.
653 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
654 implied upper bits are copies of the high bit of i1. The value
655 itself is neither signed nor unsigned. Do not use this routine for
656 non-integer modes; convert to REAL_VALUE_TYPE and use
657 CONST_DOUBLE_FROM_REAL_VALUE. */
660 immed_double_const (HOST_WIDE_INT i0
, HOST_WIDE_INT i1
, machine_mode mode
)
665 /* There are the following cases (note that there are no modes with
666 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
668 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
670 2) If the value of the integer fits into HOST_WIDE_INT anyway
671 (i.e., i1 consists only from copies of the sign bit, and sign
672 of i0 and i1 are the same), then we return a CONST_INT for i0.
673 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
674 if (mode
!= VOIDmode
)
676 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
677 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
678 /* We can get a 0 for an error mark. */
679 || GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
680 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
681 || GET_MODE_CLASS (mode
) == MODE_POINTER_BOUNDS
);
683 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
684 return gen_int_mode (i0
, mode
);
687 /* If this integer fits in one word, return a CONST_INT. */
688 if ((i1
== 0 && i0
>= 0) || (i1
== ~0 && i0
< 0))
691 /* We use VOIDmode for integers. */
692 value
= rtx_alloc (CONST_DOUBLE
);
693 PUT_MODE (value
, VOIDmode
);
695 CONST_DOUBLE_LOW (value
) = i0
;
696 CONST_DOUBLE_HIGH (value
) = i1
;
698 for (i
= 2; i
< (sizeof CONST_DOUBLE_FORMAT
- 1); i
++)
699 XWINT (value
, i
) = 0;
701 return lookup_const_double (value
);
706 gen_rtx_REG (machine_mode mode
, unsigned int regno
)
708 /* In case the MD file explicitly references the frame pointer, have
709 all such references point to the same frame pointer. This is
710 used during frame pointer elimination to distinguish the explicit
711 references to these registers from pseudos that happened to be
714 If we have eliminated the frame pointer or arg pointer, we will
715 be using it as a normal register, for example as a spill
716 register. In such cases, we might be accessing it in a mode that
717 is not Pmode and therefore cannot use the pre-allocated rtx.
719 Also don't do this when we are making new REGs in reload, since
720 we don't want to get confused with the real pointers. */
722 if (mode
== Pmode
&& !reload_in_progress
&& !lra_in_progress
)
724 if (regno
== FRAME_POINTER_REGNUM
725 && (!reload_completed
|| frame_pointer_needed
))
726 return frame_pointer_rtx
;
728 if (!HARD_FRAME_POINTER_IS_FRAME_POINTER
729 && regno
== HARD_FRAME_POINTER_REGNUM
730 && (!reload_completed
|| frame_pointer_needed
))
731 return hard_frame_pointer_rtx
;
732 #if !HARD_FRAME_POINTER_IS_ARG_POINTER
733 if (FRAME_POINTER_REGNUM
!= ARG_POINTER_REGNUM
734 && regno
== ARG_POINTER_REGNUM
)
735 return arg_pointer_rtx
;
737 #ifdef RETURN_ADDRESS_POINTER_REGNUM
738 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
739 return return_address_pointer_rtx
;
741 if (regno
== (unsigned) PIC_OFFSET_TABLE_REGNUM
742 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
743 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
744 return pic_offset_table_rtx
;
745 if (regno
== STACK_POINTER_REGNUM
)
746 return stack_pointer_rtx
;
750 /* If the per-function register table has been set up, try to re-use
751 an existing entry in that table to avoid useless generation of RTL.
753 This code is disabled for now until we can fix the various backends
754 which depend on having non-shared hard registers in some cases. Long
755 term we want to re-enable this code as it can significantly cut down
756 on the amount of useless RTL that gets generated.
758 We'll also need to fix some code that runs after reload that wants to
759 set ORIGINAL_REGNO. */
764 && regno
< FIRST_PSEUDO_REGISTER
765 && reg_raw_mode
[regno
] == mode
)
766 return regno_reg_rtx
[regno
];
769 return gen_raw_REG (mode
, regno
);
773 gen_rtx_MEM (machine_mode mode
, rtx addr
)
775 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
777 /* This field is not cleared by the mere allocation of the rtx, so
784 /* Generate a memory referring to non-trapping constant memory. */
787 gen_const_mem (machine_mode mode
, rtx addr
)
789 rtx mem
= gen_rtx_MEM (mode
, addr
);
790 MEM_READONLY_P (mem
) = 1;
791 MEM_NOTRAP_P (mem
) = 1;
795 /* Generate a MEM referring to fixed portions of the frame, e.g., register
799 gen_frame_mem (machine_mode mode
, rtx addr
)
801 rtx mem
= gen_rtx_MEM (mode
, addr
);
802 MEM_NOTRAP_P (mem
) = 1;
803 set_mem_alias_set (mem
, get_frame_alias_set ());
807 /* Generate a MEM referring to a temporary use of the stack, not part
808 of the fixed stack frame. For example, something which is pushed
809 by a target splitter. */
811 gen_tmp_stack_mem (machine_mode mode
, rtx addr
)
813 rtx mem
= gen_rtx_MEM (mode
, addr
);
814 MEM_NOTRAP_P (mem
) = 1;
815 if (!cfun
->calls_alloca
)
816 set_mem_alias_set (mem
, get_frame_alias_set ());
820 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
821 this construct would be valid, and false otherwise. */
824 validate_subreg (machine_mode omode
, machine_mode imode
,
825 const_rtx reg
, unsigned int offset
)
827 unsigned int isize
= GET_MODE_SIZE (imode
);
828 unsigned int osize
= GET_MODE_SIZE (omode
);
830 /* All subregs must be aligned. */
831 if (offset
% osize
!= 0)
834 /* The subreg offset cannot be outside the inner object. */
838 /* ??? This should not be here. Temporarily continue to allow word_mode
839 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
840 Generally, backends are doing something sketchy but it'll take time to
842 if (omode
== word_mode
)
844 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
845 is the culprit here, and not the backends. */
846 else if (osize
>= UNITS_PER_WORD
&& isize
>= osize
)
848 /* Allow component subregs of complex and vector. Though given the below
849 extraction rules, it's not always clear what that means. */
850 else if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
851 && GET_MODE_INNER (imode
) == omode
)
853 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
854 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
855 represent this. It's questionable if this ought to be represented at
856 all -- why can't this all be hidden in post-reload splitters that make
857 arbitrarily mode changes to the registers themselves. */
858 else if (VECTOR_MODE_P (omode
) && GET_MODE_INNER (omode
) == imode
)
860 /* Subregs involving floating point modes are not allowed to
861 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
862 (subreg:SI (reg:DF) 0) isn't. */
863 else if (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))
865 if (! (isize
== osize
866 /* LRA can use subreg to store a floating point value in
867 an integer mode. Although the floating point and the
868 integer modes need the same number of hard registers,
869 the size of floating point mode can be less than the
870 integer mode. LRA also uses subregs for a register
871 should be used in different mode in on insn. */
876 /* Paradoxical subregs must have offset zero. */
880 /* This is a normal subreg. Verify that the offset is representable. */
882 /* For hard registers, we already have most of these rules collected in
883 subreg_offset_representable_p. */
884 if (reg
&& REG_P (reg
) && HARD_REGISTER_P (reg
))
886 unsigned int regno
= REGNO (reg
);
888 #ifdef CANNOT_CHANGE_MODE_CLASS
889 if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
890 && GET_MODE_INNER (imode
) == omode
)
892 else if (REG_CANNOT_CHANGE_MODE_P (regno
, imode
, omode
))
896 return subreg_offset_representable_p (regno
, imode
, offset
, omode
);
899 /* For pseudo registers, we want most of the same checks. Namely:
900 If the register no larger than a word, the subreg must be lowpart.
901 If the register is larger than a word, the subreg must be the lowpart
902 of a subword. A subreg does *not* perform arbitrary bit extraction.
903 Given that we've already checked mode/offset alignment, we only have
904 to check subword subregs here. */
905 if (osize
< UNITS_PER_WORD
906 && ! (lra_in_progress
&& (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))))
908 machine_mode wmode
= isize
> UNITS_PER_WORD
? word_mode
: imode
;
909 unsigned int low_off
= subreg_lowpart_offset (omode
, wmode
);
910 if (offset
% UNITS_PER_WORD
!= low_off
)
917 gen_rtx_SUBREG (machine_mode mode
, rtx reg
, int offset
)
919 gcc_assert (validate_subreg (mode
, GET_MODE (reg
), reg
, offset
));
920 return gen_rtx_raw_SUBREG (mode
, reg
, offset
);
923 /* Generate a SUBREG representing the least-significant part of REG if MODE
924 is smaller than mode of REG, otherwise paradoxical SUBREG. */
927 gen_lowpart_SUBREG (machine_mode mode
, rtx reg
)
931 inmode
= GET_MODE (reg
);
932 if (inmode
== VOIDmode
)
934 return gen_rtx_SUBREG (mode
, reg
,
935 subreg_lowpart_offset (mode
, inmode
));
939 gen_rtx_VAR_LOCATION (machine_mode mode
, tree decl
, rtx loc
,
940 enum var_init_status status
)
942 rtx x
= gen_rtx_fmt_te (VAR_LOCATION
, mode
, decl
, loc
);
943 PAT_VAR_LOCATION_STATUS (x
) = status
;
948 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
951 gen_rtvec (int n
, ...)
959 /* Don't allocate an empty rtvec... */
966 rt_val
= rtvec_alloc (n
);
968 for (i
= 0; i
< n
; i
++)
969 rt_val
->elem
[i
] = va_arg (p
, rtx
);
976 gen_rtvec_v (int n
, rtx
*argp
)
981 /* Don't allocate an empty rtvec... */
985 rt_val
= rtvec_alloc (n
);
987 for (i
= 0; i
< n
; i
++)
988 rt_val
->elem
[i
] = *argp
++;
994 gen_rtvec_v (int n
, rtx_insn
**argp
)
999 /* Don't allocate an empty rtvec... */
1003 rt_val
= rtvec_alloc (n
);
1005 for (i
= 0; i
< n
; i
++)
1006 rt_val
->elem
[i
] = *argp
++;
1012 /* Return the number of bytes between the start of an OUTER_MODE
1013 in-memory value and the start of an INNER_MODE in-memory value,
1014 given that the former is a lowpart of the latter. It may be a
1015 paradoxical lowpart, in which case the offset will be negative
1016 on big-endian targets. */
1019 byte_lowpart_offset (machine_mode outer_mode
,
1020 machine_mode inner_mode
)
1022 if (GET_MODE_SIZE (outer_mode
) < GET_MODE_SIZE (inner_mode
))
1023 return subreg_lowpart_offset (outer_mode
, inner_mode
);
1025 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
1028 /* Generate a REG rtx for a new pseudo register of mode MODE.
1029 This pseudo is assigned the next sequential register number. */
1032 gen_reg_rtx (machine_mode mode
)
1035 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
1037 gcc_assert (can_create_pseudo_p ());
1039 /* If a virtual register with bigger mode alignment is generated,
1040 increase stack alignment estimation because it might be spilled
1042 if (SUPPORTS_STACK_ALIGNMENT
1043 && crtl
->stack_alignment_estimated
< align
1044 && !crtl
->stack_realign_processed
)
1046 unsigned int min_align
= MINIMUM_ALIGNMENT (NULL
, mode
, align
);
1047 if (crtl
->stack_alignment_estimated
< min_align
)
1048 crtl
->stack_alignment_estimated
= min_align
;
1051 if (generating_concat_p
1052 && (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
1053 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
))
1055 /* For complex modes, don't make a single pseudo.
1056 Instead, make a CONCAT of two pseudos.
1057 This allows noncontiguous allocation of the real and imaginary parts,
1058 which makes much better code. Besides, allocating DCmode
1059 pseudos overstrains reload on some machines like the 386. */
1060 rtx realpart
, imagpart
;
1061 machine_mode partmode
= GET_MODE_INNER (mode
);
1063 realpart
= gen_reg_rtx (partmode
);
1064 imagpart
= gen_reg_rtx (partmode
);
1065 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
1068 /* Do not call gen_reg_rtx with uninitialized crtl. */
1069 gcc_assert (crtl
->emit
.regno_pointer_align_length
);
1071 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1072 enough to have an element for this pseudo reg number. */
1074 if (reg_rtx_no
== crtl
->emit
.regno_pointer_align_length
)
1076 int old_size
= crtl
->emit
.regno_pointer_align_length
;
1080 tmp
= XRESIZEVEC (char, crtl
->emit
.regno_pointer_align
, old_size
* 2);
1081 memset (tmp
+ old_size
, 0, old_size
);
1082 crtl
->emit
.regno_pointer_align
= (unsigned char *) tmp
;
1084 new1
= GGC_RESIZEVEC (rtx
, regno_reg_rtx
, old_size
* 2);
1085 memset (new1
+ old_size
, 0, old_size
* sizeof (rtx
));
1086 regno_reg_rtx
= new1
;
1088 crtl
->emit
.regno_pointer_align_length
= old_size
* 2;
1091 val
= gen_raw_REG (mode
, reg_rtx_no
);
1092 regno_reg_rtx
[reg_rtx_no
++] = val
;
1096 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1099 reg_is_parm_p (rtx reg
)
1103 gcc_assert (REG_P (reg
));
1104 decl
= REG_EXPR (reg
);
1105 return (decl
&& TREE_CODE (decl
) == PARM_DECL
);
1108 /* Update NEW with the same attributes as REG, but with OFFSET added
1109 to the REG_OFFSET. */
1112 update_reg_offset (rtx new_rtx
, rtx reg
, int offset
)
1114 REG_ATTRS (new_rtx
) = get_reg_attrs (REG_EXPR (reg
),
1115 REG_OFFSET (reg
) + offset
);
1118 /* Generate a register with same attributes as REG, but with OFFSET
1119 added to the REG_OFFSET. */
1122 gen_rtx_REG_offset (rtx reg
, machine_mode mode
, unsigned int regno
,
1125 rtx new_rtx
= gen_rtx_REG (mode
, regno
);
1127 update_reg_offset (new_rtx
, reg
, offset
);
1131 /* Generate a new pseudo-register with the same attributes as REG, but
1132 with OFFSET added to the REG_OFFSET. */
1135 gen_reg_rtx_offset (rtx reg
, machine_mode mode
, int offset
)
1137 rtx new_rtx
= gen_reg_rtx (mode
);
1139 update_reg_offset (new_rtx
, reg
, offset
);
1143 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1144 new register is a (possibly paradoxical) lowpart of the old one. */
1147 adjust_reg_mode (rtx reg
, machine_mode mode
)
1149 update_reg_offset (reg
, reg
, byte_lowpart_offset (mode
, GET_MODE (reg
)));
1150 PUT_MODE (reg
, mode
);
1153 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1154 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1157 set_reg_attrs_from_value (rtx reg
, rtx x
)
1160 bool can_be_reg_pointer
= true;
1162 /* Don't call mark_reg_pointer for incompatible pointer sign
1164 while (GET_CODE (x
) == SIGN_EXTEND
1165 || GET_CODE (x
) == ZERO_EXTEND
1166 || GET_CODE (x
) == TRUNCATE
1167 || (GET_CODE (x
) == SUBREG
&& subreg_lowpart_p (x
)))
1169 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1170 if ((GET_CODE (x
) == SIGN_EXTEND
&& POINTERS_EXTEND_UNSIGNED
)
1171 || (GET_CODE (x
) != SIGN_EXTEND
&& ! POINTERS_EXTEND_UNSIGNED
))
1172 can_be_reg_pointer
= false;
1177 /* Hard registers can be reused for multiple purposes within the same
1178 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1179 on them is wrong. */
1180 if (HARD_REGISTER_P (reg
))
1183 offset
= byte_lowpart_offset (GET_MODE (reg
), GET_MODE (x
));
1186 if (MEM_OFFSET_KNOWN_P (x
))
1187 REG_ATTRS (reg
) = get_reg_attrs (MEM_EXPR (x
),
1188 MEM_OFFSET (x
) + offset
);
1189 if (can_be_reg_pointer
&& MEM_POINTER (x
))
1190 mark_reg_pointer (reg
, 0);
1195 update_reg_offset (reg
, x
, offset
);
1196 if (can_be_reg_pointer
&& REG_POINTER (x
))
1197 mark_reg_pointer (reg
, REGNO_POINTER_ALIGN (REGNO (x
)));
1201 /* Generate a REG rtx for a new pseudo register, copying the mode
1202 and attributes from X. */
1205 gen_reg_rtx_and_attrs (rtx x
)
1207 rtx reg
= gen_reg_rtx (GET_MODE (x
));
1208 set_reg_attrs_from_value (reg
, x
);
1212 /* Set the register attributes for registers contained in PARM_RTX.
1213 Use needed values from memory attributes of MEM. */
1216 set_reg_attrs_for_parm (rtx parm_rtx
, rtx mem
)
1218 if (REG_P (parm_rtx
))
1219 set_reg_attrs_from_value (parm_rtx
, mem
);
1220 else if (GET_CODE (parm_rtx
) == PARALLEL
)
1222 /* Check for a NULL entry in the first slot, used to indicate that the
1223 parameter goes both on the stack and in registers. */
1224 int i
= XEXP (XVECEXP (parm_rtx
, 0, 0), 0) ? 0 : 1;
1225 for (; i
< XVECLEN (parm_rtx
, 0); i
++)
1227 rtx x
= XVECEXP (parm_rtx
, 0, i
);
1228 if (REG_P (XEXP (x
, 0)))
1229 REG_ATTRS (XEXP (x
, 0))
1230 = get_reg_attrs (MEM_EXPR (mem
),
1231 INTVAL (XEXP (x
, 1)));
1236 /* Set the REG_ATTRS for registers in value X, given that X represents
1240 set_reg_attrs_for_decl_rtl (tree t
, rtx x
)
1242 if (GET_CODE (x
) == SUBREG
)
1244 gcc_assert (subreg_lowpart_p (x
));
1249 = get_reg_attrs (t
, byte_lowpart_offset (GET_MODE (x
),
1251 if (GET_CODE (x
) == CONCAT
)
1253 if (REG_P (XEXP (x
, 0)))
1254 REG_ATTRS (XEXP (x
, 0)) = get_reg_attrs (t
, 0);
1255 if (REG_P (XEXP (x
, 1)))
1256 REG_ATTRS (XEXP (x
, 1))
1257 = get_reg_attrs (t
, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x
, 0))));
1259 if (GET_CODE (x
) == PARALLEL
)
1263 /* Check for a NULL entry, used to indicate that the parameter goes
1264 both on the stack and in registers. */
1265 if (XEXP (XVECEXP (x
, 0, 0), 0))
1270 for (i
= start
; i
< XVECLEN (x
, 0); i
++)
1272 rtx y
= XVECEXP (x
, 0, i
);
1273 if (REG_P (XEXP (y
, 0)))
1274 REG_ATTRS (XEXP (y
, 0)) = get_reg_attrs (t
, INTVAL (XEXP (y
, 1)));
1279 /* Assign the RTX X to declaration T. */
1282 set_decl_rtl (tree t
, rtx x
)
1284 DECL_WRTL_CHECK (t
)->decl_with_rtl
.rtl
= x
;
1286 set_reg_attrs_for_decl_rtl (t
, x
);
1289 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1290 if the ABI requires the parameter to be passed by reference. */
1293 set_decl_incoming_rtl (tree t
, rtx x
, bool by_reference_p
)
1295 DECL_INCOMING_RTL (t
) = x
;
1296 if (x
&& !by_reference_p
)
1297 set_reg_attrs_for_decl_rtl (t
, x
);
1300 /* Identify REG (which may be a CONCAT) as a user register. */
1303 mark_user_reg (rtx reg
)
1305 if (GET_CODE (reg
) == CONCAT
)
1307 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
1308 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
1312 gcc_assert (REG_P (reg
));
1313 REG_USERVAR_P (reg
) = 1;
1317 /* Identify REG as a probable pointer register and show its alignment
1318 as ALIGN, if nonzero. */
1321 mark_reg_pointer (rtx reg
, int align
)
1323 if (! REG_POINTER (reg
))
1325 REG_POINTER (reg
) = 1;
1328 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1330 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
1331 /* We can no-longer be sure just how aligned this pointer is. */
1332 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1335 /* Return 1 plus largest pseudo reg number used in the current function. */
1343 /* Return 1 + the largest label number used so far in the current function. */
1346 max_label_num (void)
1351 /* Return first label number used in this function (if any were used). */
1354 get_first_label_num (void)
1356 return first_label_num
;
1359 /* If the rtx for label was created during the expansion of a nested
1360 function, then first_label_num won't include this label number.
1361 Fix this now so that array indices work later. */
1364 maybe_set_first_label_num (rtx x
)
1366 if (CODE_LABEL_NUMBER (x
) < first_label_num
)
1367 first_label_num
= CODE_LABEL_NUMBER (x
);
1370 /* Return a value representing some low-order bits of X, where the number
1371 of low-order bits is given by MODE. Note that no conversion is done
1372 between floating-point and fixed-point values, rather, the bit
1373 representation is returned.
1375 This function handles the cases in common between gen_lowpart, below,
1376 and two variants in cse.c and combine.c. These are the cases that can
1377 be safely handled at all points in the compilation.
1379 If this is not a case we can handle, return 0. */
1382 gen_lowpart_common (machine_mode mode
, rtx x
)
1384 int msize
= GET_MODE_SIZE (mode
);
1387 machine_mode innermode
;
1389 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1390 so we have to make one up. Yuk. */
1391 innermode
= GET_MODE (x
);
1393 && msize
* BITS_PER_UNIT
<= HOST_BITS_PER_WIDE_INT
)
1394 innermode
= mode_for_size (HOST_BITS_PER_WIDE_INT
, MODE_INT
, 0);
1395 else if (innermode
== VOIDmode
)
1396 innermode
= mode_for_size (HOST_BITS_PER_DOUBLE_INT
, MODE_INT
, 0);
1398 xsize
= GET_MODE_SIZE (innermode
);
1400 gcc_assert (innermode
!= VOIDmode
&& innermode
!= BLKmode
);
1402 if (innermode
== mode
)
1405 /* MODE must occupy no more words than the mode of X. */
1406 if ((msize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
1407 > ((xsize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))
1410 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1411 if (SCALAR_FLOAT_MODE_P (mode
) && msize
> xsize
)
1414 offset
= subreg_lowpart_offset (mode
, innermode
);
1416 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
1417 && (GET_MODE_CLASS (mode
) == MODE_INT
1418 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
))
1420 /* If we are getting the low-order part of something that has been
1421 sign- or zero-extended, we can either just use the object being
1422 extended or make a narrower extension. If we want an even smaller
1423 piece than the size of the object being extended, call ourselves
1426 This case is used mostly by combine and cse. */
1428 if (GET_MODE (XEXP (x
, 0)) == mode
)
1430 else if (msize
< GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))))
1431 return gen_lowpart_common (mode
, XEXP (x
, 0));
1432 else if (msize
< xsize
)
1433 return gen_rtx_fmt_e (GET_CODE (x
), mode
, XEXP (x
, 0));
1435 else if (GET_CODE (x
) == SUBREG
|| REG_P (x
)
1436 || GET_CODE (x
) == CONCAT
|| GET_CODE (x
) == CONST_VECTOR
1437 || CONST_DOUBLE_AS_FLOAT_P (x
) || CONST_SCALAR_INT_P (x
))
1438 return simplify_gen_subreg (mode
, x
, innermode
, offset
);
1440 /* Otherwise, we can't do this. */
1445 gen_highpart (machine_mode mode
, rtx x
)
1447 unsigned int msize
= GET_MODE_SIZE (mode
);
1450 /* This case loses if X is a subreg. To catch bugs early,
1451 complain if an invalid MODE is used even in other cases. */
1452 gcc_assert (msize
<= UNITS_PER_WORD
1453 || msize
== (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x
)));
1455 result
= simplify_gen_subreg (mode
, x
, GET_MODE (x
),
1456 subreg_highpart_offset (mode
, GET_MODE (x
)));
1457 gcc_assert (result
);
1459 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1460 the target if we have a MEM. gen_highpart must return a valid operand,
1461 emitting code if necessary to do so. */
1464 result
= validize_mem (result
);
1465 gcc_assert (result
);
1471 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1472 be VOIDmode constant. */
1474 gen_highpart_mode (machine_mode outermode
, machine_mode innermode
, rtx exp
)
1476 if (GET_MODE (exp
) != VOIDmode
)
1478 gcc_assert (GET_MODE (exp
) == innermode
);
1479 return gen_highpart (outermode
, exp
);
1481 return simplify_gen_subreg (outermode
, exp
, innermode
,
1482 subreg_highpart_offset (outermode
, innermode
));
1485 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1488 subreg_lowpart_offset (machine_mode outermode
, machine_mode innermode
)
1490 unsigned int offset
= 0;
1491 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1495 if (WORDS_BIG_ENDIAN
)
1496 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1497 if (BYTES_BIG_ENDIAN
)
1498 offset
+= difference
% UNITS_PER_WORD
;
1504 /* Return offset in bytes to get OUTERMODE high part
1505 of the value in mode INNERMODE stored in memory in target format. */
1507 subreg_highpart_offset (machine_mode outermode
, machine_mode innermode
)
1509 unsigned int offset
= 0;
1510 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1512 gcc_assert (GET_MODE_SIZE (innermode
) >= GET_MODE_SIZE (outermode
));
1516 if (! WORDS_BIG_ENDIAN
)
1517 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1518 if (! BYTES_BIG_ENDIAN
)
1519 offset
+= difference
% UNITS_PER_WORD
;
1525 /* Return 1 iff X, assumed to be a SUBREG,
1526 refers to the least significant part of its containing reg.
1527 If X is not a SUBREG, always return 1 (it is its own low part!). */
1530 subreg_lowpart_p (const_rtx x
)
1532 if (GET_CODE (x
) != SUBREG
)
1534 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1537 return (subreg_lowpart_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)))
1538 == SUBREG_BYTE (x
));
1541 /* Return true if X is a paradoxical subreg, false otherwise. */
1543 paradoxical_subreg_p (const_rtx x
)
1545 if (GET_CODE (x
) != SUBREG
)
1547 return (GET_MODE_PRECISION (GET_MODE (x
))
1548 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x
))));
1551 /* Return subword OFFSET of operand OP.
1552 The word number, OFFSET, is interpreted as the word number starting
1553 at the low-order address. OFFSET 0 is the low-order word if not
1554 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1556 If we cannot extract the required word, we return zero. Otherwise,
1557 an rtx corresponding to the requested word will be returned.
1559 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1560 reload has completed, a valid address will always be returned. After
1561 reload, if a valid address cannot be returned, we return zero.
1563 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1564 it is the responsibility of the caller.
1566 MODE is the mode of OP in case it is a CONST_INT.
1568 ??? This is still rather broken for some cases. The problem for the
1569 moment is that all callers of this thing provide no 'goal mode' to
1570 tell us to work with. This exists because all callers were written
1571 in a word based SUBREG world.
1572 Now use of this function can be deprecated by simplify_subreg in most
1577 operand_subword (rtx op
, unsigned int offset
, int validate_address
, machine_mode mode
)
1579 if (mode
== VOIDmode
)
1580 mode
= GET_MODE (op
);
1582 gcc_assert (mode
!= VOIDmode
);
1584 /* If OP is narrower than a word, fail. */
1586 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
))
1589 /* If we want a word outside OP, return zero. */
1591 && (offset
+ 1) * UNITS_PER_WORD
> GET_MODE_SIZE (mode
))
1594 /* Form a new MEM at the requested address. */
1597 rtx new_rtx
= adjust_address_nv (op
, word_mode
, offset
* UNITS_PER_WORD
);
1599 if (! validate_address
)
1602 else if (reload_completed
)
1604 if (! strict_memory_address_addr_space_p (word_mode
,
1606 MEM_ADDR_SPACE (op
)))
1610 return replace_equiv_address (new_rtx
, XEXP (new_rtx
, 0));
1613 /* Rest can be handled by simplify_subreg. */
1614 return simplify_gen_subreg (word_mode
, op
, mode
, (offset
* UNITS_PER_WORD
));
1617 /* Similar to `operand_subword', but never return 0. If we can't
1618 extract the required subword, put OP into a register and try again.
1619 The second attempt must succeed. We always validate the address in
1622 MODE is the mode of OP, in case it is CONST_INT. */
1625 operand_subword_force (rtx op
, unsigned int offset
, machine_mode mode
)
1627 rtx result
= operand_subword (op
, offset
, 1, mode
);
1632 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1634 /* If this is a register which can not be accessed by words, copy it
1635 to a pseudo register. */
1637 op
= copy_to_reg (op
);
1639 op
= force_reg (mode
, op
);
1642 result
= operand_subword (op
, offset
, 1, mode
);
1643 gcc_assert (result
);
1648 /* Returns 1 if both MEM_EXPR can be considered equal
1652 mem_expr_equal_p (const_tree expr1
, const_tree expr2
)
1657 if (! expr1
|| ! expr2
)
1660 if (TREE_CODE (expr1
) != TREE_CODE (expr2
))
1663 return operand_equal_p (expr1
, expr2
, 0);
1666 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1667 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1671 get_mem_align_offset (rtx mem
, unsigned int align
)
1674 unsigned HOST_WIDE_INT offset
;
1676 /* This function can't use
1677 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1678 || (MAX (MEM_ALIGN (mem),
1679 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1683 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1685 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1686 for <variable>. get_inner_reference doesn't handle it and
1687 even if it did, the alignment in that case needs to be determined
1688 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1689 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1690 isn't sufficiently aligned, the object it is in might be. */
1691 gcc_assert (MEM_P (mem
));
1692 expr
= MEM_EXPR (mem
);
1693 if (expr
== NULL_TREE
|| !MEM_OFFSET_KNOWN_P (mem
))
1696 offset
= MEM_OFFSET (mem
);
1699 if (DECL_ALIGN (expr
) < align
)
1702 else if (INDIRECT_REF_P (expr
))
1704 if (TYPE_ALIGN (TREE_TYPE (expr
)) < (unsigned int) align
)
1707 else if (TREE_CODE (expr
) == COMPONENT_REF
)
1711 tree inner
= TREE_OPERAND (expr
, 0);
1712 tree field
= TREE_OPERAND (expr
, 1);
1713 tree byte_offset
= component_ref_field_offset (expr
);
1714 tree bit_offset
= DECL_FIELD_BIT_OFFSET (field
);
1717 || !tree_fits_uhwi_p (byte_offset
)
1718 || !tree_fits_uhwi_p (bit_offset
))
1721 offset
+= tree_to_uhwi (byte_offset
);
1722 offset
+= tree_to_uhwi (bit_offset
) / BITS_PER_UNIT
;
1724 if (inner
== NULL_TREE
)
1726 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field
))
1727 < (unsigned int) align
)
1731 else if (DECL_P (inner
))
1733 if (DECL_ALIGN (inner
) < align
)
1737 else if (TREE_CODE (inner
) != COMPONENT_REF
)
1745 return offset
& ((align
/ BITS_PER_UNIT
) - 1);
1748 /* Given REF (a MEM) and T, either the type of X or the expression
1749 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1750 if we are making a new object of this type. BITPOS is nonzero if
1751 there is an offset outstanding on T that will be applied later. */
1754 set_mem_attributes_minus_bitpos (rtx ref
, tree t
, int objectp
,
1755 HOST_WIDE_INT bitpos
)
1757 HOST_WIDE_INT apply_bitpos
= 0;
1759 struct mem_attrs attrs
, *defattrs
, *refattrs
;
1762 /* It can happen that type_for_mode was given a mode for which there
1763 is no language-level type. In which case it returns NULL, which
1768 type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
1769 if (type
== error_mark_node
)
1772 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1773 wrong answer, as it assumes that DECL_RTL already has the right alias
1774 info. Callers should not set DECL_RTL until after the call to
1775 set_mem_attributes. */
1776 gcc_assert (!DECL_P (t
) || ref
!= DECL_RTL_IF_SET (t
));
1778 memset (&attrs
, 0, sizeof (attrs
));
1780 /* Get the alias set from the expression or type (perhaps using a
1781 front-end routine) and use it. */
1782 attrs
.alias
= get_alias_set (t
);
1784 MEM_VOLATILE_P (ref
) |= TYPE_VOLATILE (type
);
1785 MEM_POINTER (ref
) = POINTER_TYPE_P (type
);
1787 /* Default values from pre-existing memory attributes if present. */
1788 refattrs
= MEM_ATTRS (ref
);
1791 /* ??? Can this ever happen? Calling this routine on a MEM that
1792 already carries memory attributes should probably be invalid. */
1793 attrs
.expr
= refattrs
->expr
;
1794 attrs
.offset_known_p
= refattrs
->offset_known_p
;
1795 attrs
.offset
= refattrs
->offset
;
1796 attrs
.size_known_p
= refattrs
->size_known_p
;
1797 attrs
.size
= refattrs
->size
;
1798 attrs
.align
= refattrs
->align
;
1801 /* Otherwise, default values from the mode of the MEM reference. */
1804 defattrs
= mode_mem_attrs
[(int) GET_MODE (ref
)];
1805 gcc_assert (!defattrs
->expr
);
1806 gcc_assert (!defattrs
->offset_known_p
);
1808 /* Respect mode size. */
1809 attrs
.size_known_p
= defattrs
->size_known_p
;
1810 attrs
.size
= defattrs
->size
;
1811 /* ??? Is this really necessary? We probably should always get
1812 the size from the type below. */
1814 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1815 if T is an object, always compute the object alignment below. */
1817 attrs
.align
= defattrs
->align
;
1819 attrs
.align
= BITS_PER_UNIT
;
1820 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1821 e.g. if the type carries an alignment attribute. Should we be
1822 able to simply always use TYPE_ALIGN? */
1825 /* We can set the alignment from the type if we are making an object,
1826 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1827 if (objectp
|| TREE_CODE (t
) == INDIRECT_REF
|| TYPE_ALIGN_OK (type
))
1828 attrs
.align
= MAX (attrs
.align
, TYPE_ALIGN (type
));
1830 /* If the size is known, we can set that. */
1831 tree new_size
= TYPE_SIZE_UNIT (type
);
1833 /* The address-space is that of the type. */
1834 as
= TYPE_ADDR_SPACE (type
);
1836 /* If T is not a type, we may be able to deduce some more information about
1842 if (TREE_THIS_VOLATILE (t
))
1843 MEM_VOLATILE_P (ref
) = 1;
1845 /* Now remove any conversions: they don't change what the underlying
1846 object is. Likewise for SAVE_EXPR. */
1847 while (CONVERT_EXPR_P (t
)
1848 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
1849 || TREE_CODE (t
) == SAVE_EXPR
)
1850 t
= TREE_OPERAND (t
, 0);
1852 /* Note whether this expression can trap. */
1853 MEM_NOTRAP_P (ref
) = !tree_could_trap_p (t
);
1855 base
= get_base_address (t
);
1859 && TREE_READONLY (base
)
1860 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
))
1861 && !TREE_THIS_VOLATILE (base
))
1862 MEM_READONLY_P (ref
) = 1;
1864 /* Mark static const strings readonly as well. */
1865 if (TREE_CODE (base
) == STRING_CST
1866 && TREE_READONLY (base
)
1867 && TREE_STATIC (base
))
1868 MEM_READONLY_P (ref
) = 1;
1870 /* Address-space information is on the base object. */
1871 if (TREE_CODE (base
) == MEM_REF
1872 || TREE_CODE (base
) == TARGET_MEM_REF
)
1873 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base
,
1876 as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
1879 /* If this expression uses it's parent's alias set, mark it such
1880 that we won't change it. */
1881 if (component_uses_parent_alias_set_from (t
) != NULL_TREE
)
1882 MEM_KEEP_ALIAS_SET_P (ref
) = 1;
1884 /* If this is a decl, set the attributes of the MEM from it. */
1888 attrs
.offset_known_p
= true;
1890 apply_bitpos
= bitpos
;
1891 new_size
= DECL_SIZE_UNIT (t
);
1894 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1895 else if (CONSTANT_CLASS_P (t
))
1898 /* If this is a field reference, record it. */
1899 else if (TREE_CODE (t
) == COMPONENT_REF
)
1902 attrs
.offset_known_p
= true;
1904 apply_bitpos
= bitpos
;
1905 if (DECL_BIT_FIELD (TREE_OPERAND (t
, 1)))
1906 new_size
= DECL_SIZE_UNIT (TREE_OPERAND (t
, 1));
1909 /* If this is an array reference, look for an outer field reference. */
1910 else if (TREE_CODE (t
) == ARRAY_REF
)
1912 tree off_tree
= size_zero_node
;
1913 /* We can't modify t, because we use it at the end of the
1919 tree index
= TREE_OPERAND (t2
, 1);
1920 tree low_bound
= array_ref_low_bound (t2
);
1921 tree unit_size
= array_ref_element_size (t2
);
1923 /* We assume all arrays have sizes that are a multiple of a byte.
1924 First subtract the lower bound, if any, in the type of the
1925 index, then convert to sizetype and multiply by the size of
1926 the array element. */
1927 if (! integer_zerop (low_bound
))
1928 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
1931 off_tree
= size_binop (PLUS_EXPR
,
1932 size_binop (MULT_EXPR
,
1933 fold_convert (sizetype
,
1937 t2
= TREE_OPERAND (t2
, 0);
1939 while (TREE_CODE (t2
) == ARRAY_REF
);
1942 || TREE_CODE (t2
) == COMPONENT_REF
)
1945 attrs
.offset_known_p
= false;
1946 if (tree_fits_uhwi_p (off_tree
))
1948 attrs
.offset_known_p
= true;
1949 attrs
.offset
= tree_to_uhwi (off_tree
);
1950 apply_bitpos
= bitpos
;
1953 /* Else do not record a MEM_EXPR. */
1956 /* If this is an indirect reference, record it. */
1957 else if (TREE_CODE (t
) == MEM_REF
1958 || TREE_CODE (t
) == TARGET_MEM_REF
)
1961 attrs
.offset_known_p
= true;
1963 apply_bitpos
= bitpos
;
1966 /* Compute the alignment. */
1967 unsigned int obj_align
;
1968 unsigned HOST_WIDE_INT obj_bitpos
;
1969 get_object_alignment_1 (t
, &obj_align
, &obj_bitpos
);
1970 obj_bitpos
= (obj_bitpos
- bitpos
) & (obj_align
- 1);
1971 if (obj_bitpos
!= 0)
1972 obj_align
= (obj_bitpos
& -obj_bitpos
);
1973 attrs
.align
= MAX (attrs
.align
, obj_align
);
1976 if (tree_fits_uhwi_p (new_size
))
1978 attrs
.size_known_p
= true;
1979 attrs
.size
= tree_to_uhwi (new_size
);
1982 /* If we modified OFFSET based on T, then subtract the outstanding
1983 bit position offset. Similarly, increase the size of the accessed
1984 object to contain the negative offset. */
1987 gcc_assert (attrs
.offset_known_p
);
1988 attrs
.offset
-= apply_bitpos
/ BITS_PER_UNIT
;
1989 if (attrs
.size_known_p
)
1990 attrs
.size
+= apply_bitpos
/ BITS_PER_UNIT
;
1993 /* Now set the attributes we computed above. */
1994 attrs
.addrspace
= as
;
1995 set_mem_attrs (ref
, &attrs
);
1999 set_mem_attributes (rtx ref
, tree t
, int objectp
)
2001 set_mem_attributes_minus_bitpos (ref
, t
, objectp
, 0);
2004 /* Set the alias set of MEM to SET. */
2007 set_mem_alias_set (rtx mem
, alias_set_type set
)
2009 struct mem_attrs attrs
;
2011 /* If the new and old alias sets don't conflict, something is wrong. */
2012 gcc_checking_assert (alias_sets_conflict_p (set
, MEM_ALIAS_SET (mem
)));
2013 attrs
= *get_mem_attrs (mem
);
2015 set_mem_attrs (mem
, &attrs
);
2018 /* Set the address space of MEM to ADDRSPACE (target-defined). */
2021 set_mem_addr_space (rtx mem
, addr_space_t addrspace
)
2023 struct mem_attrs attrs
;
2025 attrs
= *get_mem_attrs (mem
);
2026 attrs
.addrspace
= addrspace
;
2027 set_mem_attrs (mem
, &attrs
);
2030 /* Set the alignment of MEM to ALIGN bits. */
2033 set_mem_align (rtx mem
, unsigned int align
)
2035 struct mem_attrs attrs
;
2037 attrs
= *get_mem_attrs (mem
);
2038 attrs
.align
= align
;
2039 set_mem_attrs (mem
, &attrs
);
2042 /* Set the expr for MEM to EXPR. */
2045 set_mem_expr (rtx mem
, tree expr
)
2047 struct mem_attrs attrs
;
2049 attrs
= *get_mem_attrs (mem
);
2051 set_mem_attrs (mem
, &attrs
);
2054 /* Set the offset of MEM to OFFSET. */
2057 set_mem_offset (rtx mem
, HOST_WIDE_INT offset
)
2059 struct mem_attrs attrs
;
2061 attrs
= *get_mem_attrs (mem
);
2062 attrs
.offset_known_p
= true;
2063 attrs
.offset
= offset
;
2064 set_mem_attrs (mem
, &attrs
);
2067 /* Clear the offset of MEM. */
2070 clear_mem_offset (rtx mem
)
2072 struct mem_attrs attrs
;
2074 attrs
= *get_mem_attrs (mem
);
2075 attrs
.offset_known_p
= false;
2076 set_mem_attrs (mem
, &attrs
);
2079 /* Set the size of MEM to SIZE. */
2082 set_mem_size (rtx mem
, HOST_WIDE_INT size
)
2084 struct mem_attrs attrs
;
2086 attrs
= *get_mem_attrs (mem
);
2087 attrs
.size_known_p
= true;
2089 set_mem_attrs (mem
, &attrs
);
2092 /* Clear the size of MEM. */
2095 clear_mem_size (rtx mem
)
2097 struct mem_attrs attrs
;
2099 attrs
= *get_mem_attrs (mem
);
2100 attrs
.size_known_p
= false;
2101 set_mem_attrs (mem
, &attrs
);
2104 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2105 and its address changed to ADDR. (VOIDmode means don't change the mode.
2106 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2107 returned memory location is required to be valid. INPLACE is true if any
2108 changes can be made directly to MEMREF or false if MEMREF must be treated
2111 The memory attributes are not changed. */
2114 change_address_1 (rtx memref
, machine_mode mode
, rtx addr
, int validate
,
2120 gcc_assert (MEM_P (memref
));
2121 as
= MEM_ADDR_SPACE (memref
);
2122 if (mode
== VOIDmode
)
2123 mode
= GET_MODE (memref
);
2125 addr
= XEXP (memref
, 0);
2126 if (mode
== GET_MODE (memref
) && addr
== XEXP (memref
, 0)
2127 && (!validate
|| memory_address_addr_space_p (mode
, addr
, as
)))
2130 /* Don't validate address for LRA. LRA can make the address valid
2131 by itself in most efficient way. */
2132 if (validate
&& !lra_in_progress
)
2134 if (reload_in_progress
|| reload_completed
)
2135 gcc_assert (memory_address_addr_space_p (mode
, addr
, as
));
2137 addr
= memory_address_addr_space (mode
, addr
, as
);
2140 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
2145 XEXP (memref
, 0) = addr
;
2149 new_rtx
= gen_rtx_MEM (mode
, addr
);
2150 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2154 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2155 way we are changing MEMREF, so we only preserve the alias set. */
2158 change_address (rtx memref
, machine_mode mode
, rtx addr
)
2160 rtx new_rtx
= change_address_1 (memref
, mode
, addr
, 1, false);
2161 machine_mode mmode
= GET_MODE (new_rtx
);
2162 struct mem_attrs attrs
, *defattrs
;
2164 attrs
= *get_mem_attrs (memref
);
2165 defattrs
= mode_mem_attrs
[(int) mmode
];
2166 attrs
.expr
= NULL_TREE
;
2167 attrs
.offset_known_p
= false;
2168 attrs
.size_known_p
= defattrs
->size_known_p
;
2169 attrs
.size
= defattrs
->size
;
2170 attrs
.align
= defattrs
->align
;
2172 /* If there are no changes, just return the original memory reference. */
2173 if (new_rtx
== memref
)
2175 if (mem_attrs_eq_p (get_mem_attrs (memref
), &attrs
))
2178 new_rtx
= gen_rtx_MEM (mmode
, XEXP (memref
, 0));
2179 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2182 set_mem_attrs (new_rtx
, &attrs
);
2186 /* Return a memory reference like MEMREF, but with its mode changed
2187 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2188 nonzero, the memory address is forced to be valid.
2189 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2190 and the caller is responsible for adjusting MEMREF base register.
2191 If ADJUST_OBJECT is zero, the underlying object associated with the
2192 memory reference is left unchanged and the caller is responsible for
2193 dealing with it. Otherwise, if the new memory reference is outside
2194 the underlying object, even partially, then the object is dropped.
2195 SIZE, if nonzero, is the size of an access in cases where MODE
2196 has no inherent size. */
2199 adjust_address_1 (rtx memref
, machine_mode mode
, HOST_WIDE_INT offset
,
2200 int validate
, int adjust_address
, int adjust_object
,
2203 rtx addr
= XEXP (memref
, 0);
2205 machine_mode address_mode
;
2207 struct mem_attrs attrs
= *get_mem_attrs (memref
), *defattrs
;
2208 unsigned HOST_WIDE_INT max_align
;
2209 #ifdef POINTERS_EXTEND_UNSIGNED
2210 machine_mode pointer_mode
2211 = targetm
.addr_space
.pointer_mode (attrs
.addrspace
);
2214 /* VOIDmode means no mode change for change_address_1. */
2215 if (mode
== VOIDmode
)
2216 mode
= GET_MODE (memref
);
2218 /* Take the size of non-BLKmode accesses from the mode. */
2219 defattrs
= mode_mem_attrs
[(int) mode
];
2220 if (defattrs
->size_known_p
)
2221 size
= defattrs
->size
;
2223 /* If there are no changes, just return the original memory reference. */
2224 if (mode
== GET_MODE (memref
) && !offset
2225 && (size
== 0 || (attrs
.size_known_p
&& attrs
.size
== size
))
2226 && (!validate
|| memory_address_addr_space_p (mode
, addr
,
2230 /* ??? Prefer to create garbage instead of creating shared rtl.
2231 This may happen even if offset is nonzero -- consider
2232 (plus (plus reg reg) const_int) -- so do this always. */
2233 addr
= copy_rtx (addr
);
2235 /* Convert a possibly large offset to a signed value within the
2236 range of the target address space. */
2237 address_mode
= get_address_mode (memref
);
2238 pbits
= GET_MODE_BITSIZE (address_mode
);
2239 if (HOST_BITS_PER_WIDE_INT
> pbits
)
2241 int shift
= HOST_BITS_PER_WIDE_INT
- pbits
;
2242 offset
= (((HOST_WIDE_INT
) ((unsigned HOST_WIDE_INT
) offset
<< shift
))
2248 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2249 object, we can merge it into the LO_SUM. */
2250 if (GET_MODE (memref
) != BLKmode
&& GET_CODE (addr
) == LO_SUM
2252 && (unsigned HOST_WIDE_INT
) offset
2253 < GET_MODE_ALIGNMENT (GET_MODE (memref
)) / BITS_PER_UNIT
)
2254 addr
= gen_rtx_LO_SUM (address_mode
, XEXP (addr
, 0),
2255 plus_constant (address_mode
,
2256 XEXP (addr
, 1), offset
));
2257 #ifdef POINTERS_EXTEND_UNSIGNED
2258 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2259 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2260 the fact that pointers are not allowed to overflow. */
2261 else if (POINTERS_EXTEND_UNSIGNED
> 0
2262 && GET_CODE (addr
) == ZERO_EXTEND
2263 && GET_MODE (XEXP (addr
, 0)) == pointer_mode
2264 && trunc_int_for_mode (offset
, pointer_mode
) == offset
)
2265 addr
= gen_rtx_ZERO_EXTEND (address_mode
,
2266 plus_constant (pointer_mode
,
2267 XEXP (addr
, 0), offset
));
2270 addr
= plus_constant (address_mode
, addr
, offset
);
2273 new_rtx
= change_address_1 (memref
, mode
, addr
, validate
, false);
2275 /* If the address is a REG, change_address_1 rightfully returns memref,
2276 but this would destroy memref's MEM_ATTRS. */
2277 if (new_rtx
== memref
&& offset
!= 0)
2278 new_rtx
= copy_rtx (new_rtx
);
2280 /* Conservatively drop the object if we don't know where we start from. */
2281 if (adjust_object
&& (!attrs
.offset_known_p
|| !attrs
.size_known_p
))
2283 attrs
.expr
= NULL_TREE
;
2287 /* Compute the new values of the memory attributes due to this adjustment.
2288 We add the offsets and update the alignment. */
2289 if (attrs
.offset_known_p
)
2291 attrs
.offset
+= offset
;
2293 /* Drop the object if the new left end is not within its bounds. */
2294 if (adjust_object
&& attrs
.offset
< 0)
2296 attrs
.expr
= NULL_TREE
;
2301 /* Compute the new alignment by taking the MIN of the alignment and the
2302 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2306 max_align
= (offset
& -offset
) * BITS_PER_UNIT
;
2307 attrs
.align
= MIN (attrs
.align
, max_align
);
2312 /* Drop the object if the new right end is not within its bounds. */
2313 if (adjust_object
&& (offset
+ size
) > attrs
.size
)
2315 attrs
.expr
= NULL_TREE
;
2318 attrs
.size_known_p
= true;
2321 else if (attrs
.size_known_p
)
2323 gcc_assert (!adjust_object
);
2324 attrs
.size
-= offset
;
2325 /* ??? The store_by_pieces machinery generates negative sizes,
2326 so don't assert for that here. */
2329 set_mem_attrs (new_rtx
, &attrs
);
2334 /* Return a memory reference like MEMREF, but with its mode changed
2335 to MODE and its address changed to ADDR, which is assumed to be
2336 MEMREF offset by OFFSET bytes. If VALIDATE is
2337 nonzero, the memory address is forced to be valid. */
2340 adjust_automodify_address_1 (rtx memref
, machine_mode mode
, rtx addr
,
2341 HOST_WIDE_INT offset
, int validate
)
2343 memref
= change_address_1 (memref
, VOIDmode
, addr
, validate
, false);
2344 return adjust_address_1 (memref
, mode
, offset
, validate
, 0, 0, 0);
2347 /* Return a memory reference like MEMREF, but whose address is changed by
2348 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2349 known to be in OFFSET (possibly 1). */
2352 offset_address (rtx memref
, rtx offset
, unsigned HOST_WIDE_INT pow2
)
2354 rtx new_rtx
, addr
= XEXP (memref
, 0);
2355 machine_mode address_mode
;
2356 struct mem_attrs attrs
, *defattrs
;
2358 attrs
= *get_mem_attrs (memref
);
2359 address_mode
= get_address_mode (memref
);
2360 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2362 /* At this point we don't know _why_ the address is invalid. It
2363 could have secondary memory references, multiplies or anything.
2365 However, if we did go and rearrange things, we can wind up not
2366 being able to recognize the magic around pic_offset_table_rtx.
2367 This stuff is fragile, and is yet another example of why it is
2368 bad to expose PIC machinery too early. */
2369 if (! memory_address_addr_space_p (GET_MODE (memref
), new_rtx
,
2371 && GET_CODE (addr
) == PLUS
2372 && XEXP (addr
, 0) == pic_offset_table_rtx
)
2374 addr
= force_reg (GET_MODE (addr
), addr
);
2375 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2378 update_temp_slot_address (XEXP (memref
, 0), new_rtx
);
2379 new_rtx
= change_address_1 (memref
, VOIDmode
, new_rtx
, 1, false);
2381 /* If there are no changes, just return the original memory reference. */
2382 if (new_rtx
== memref
)
2385 /* Update the alignment to reflect the offset. Reset the offset, which
2387 defattrs
= mode_mem_attrs
[(int) GET_MODE (new_rtx
)];
2388 attrs
.offset_known_p
= false;
2389 attrs
.size_known_p
= defattrs
->size_known_p
;
2390 attrs
.size
= defattrs
->size
;
2391 attrs
.align
= MIN (attrs
.align
, pow2
* BITS_PER_UNIT
);
2392 set_mem_attrs (new_rtx
, &attrs
);
2396 /* Return a memory reference like MEMREF, but with its address changed to
2397 ADDR. The caller is asserting that the actual piece of memory pointed
2398 to is the same, just the form of the address is being changed, such as
2399 by putting something into a register. INPLACE is true if any changes
2400 can be made directly to MEMREF or false if MEMREF must be treated as
2404 replace_equiv_address (rtx memref
, rtx addr
, bool inplace
)
2406 /* change_address_1 copies the memory attribute structure without change
2407 and that's exactly what we want here. */
2408 update_temp_slot_address (XEXP (memref
, 0), addr
);
2409 return change_address_1 (memref
, VOIDmode
, addr
, 1, inplace
);
2412 /* Likewise, but the reference is not required to be valid. */
2415 replace_equiv_address_nv (rtx memref
, rtx addr
, bool inplace
)
2417 return change_address_1 (memref
, VOIDmode
, addr
, 0, inplace
);
2420 /* Return a memory reference like MEMREF, but with its mode widened to
2421 MODE and offset by OFFSET. This would be used by targets that e.g.
2422 cannot issue QImode memory operations and have to use SImode memory
2423 operations plus masking logic. */
2426 widen_memory_access (rtx memref
, machine_mode mode
, HOST_WIDE_INT offset
)
2428 rtx new_rtx
= adjust_address_1 (memref
, mode
, offset
, 1, 1, 0, 0);
2429 struct mem_attrs attrs
;
2430 unsigned int size
= GET_MODE_SIZE (mode
);
2432 /* If there are no changes, just return the original memory reference. */
2433 if (new_rtx
== memref
)
2436 attrs
= *get_mem_attrs (new_rtx
);
2438 /* If we don't know what offset we were at within the expression, then
2439 we can't know if we've overstepped the bounds. */
2440 if (! attrs
.offset_known_p
)
2441 attrs
.expr
= NULL_TREE
;
2445 if (TREE_CODE (attrs
.expr
) == COMPONENT_REF
)
2447 tree field
= TREE_OPERAND (attrs
.expr
, 1);
2448 tree offset
= component_ref_field_offset (attrs
.expr
);
2450 if (! DECL_SIZE_UNIT (field
))
2452 attrs
.expr
= NULL_TREE
;
2456 /* Is the field at least as large as the access? If so, ok,
2457 otherwise strip back to the containing structure. */
2458 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
2459 && compare_tree_int (DECL_SIZE_UNIT (field
), size
) >= 0
2460 && attrs
.offset
>= 0)
2463 if (! tree_fits_uhwi_p (offset
))
2465 attrs
.expr
= NULL_TREE
;
2469 attrs
.expr
= TREE_OPERAND (attrs
.expr
, 0);
2470 attrs
.offset
+= tree_to_uhwi (offset
);
2471 attrs
.offset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
2474 /* Similarly for the decl. */
2475 else if (DECL_P (attrs
.expr
)
2476 && DECL_SIZE_UNIT (attrs
.expr
)
2477 && TREE_CODE (DECL_SIZE_UNIT (attrs
.expr
)) == INTEGER_CST
2478 && compare_tree_int (DECL_SIZE_UNIT (attrs
.expr
), size
) >= 0
2479 && (! attrs
.offset_known_p
|| attrs
.offset
>= 0))
2483 /* The widened memory access overflows the expression, which means
2484 that it could alias another expression. Zap it. */
2485 attrs
.expr
= NULL_TREE
;
2491 attrs
.offset_known_p
= false;
2493 /* The widened memory may alias other stuff, so zap the alias set. */
2494 /* ??? Maybe use get_alias_set on any remaining expression. */
2496 attrs
.size_known_p
= true;
2498 set_mem_attrs (new_rtx
, &attrs
);
2502 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2503 static GTY(()) tree spill_slot_decl
;
2506 get_spill_slot_decl (bool force_build_p
)
2508 tree d
= spill_slot_decl
;
2510 struct mem_attrs attrs
;
2512 if (d
|| !force_build_p
)
2515 d
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
2516 VAR_DECL
, get_identifier ("%sfp"), void_type_node
);
2517 DECL_ARTIFICIAL (d
) = 1;
2518 DECL_IGNORED_P (d
) = 1;
2520 spill_slot_decl
= d
;
2522 rd
= gen_rtx_MEM (BLKmode
, frame_pointer_rtx
);
2523 MEM_NOTRAP_P (rd
) = 1;
2524 attrs
= *mode_mem_attrs
[(int) BLKmode
];
2525 attrs
.alias
= new_alias_set ();
2527 set_mem_attrs (rd
, &attrs
);
2528 SET_DECL_RTL (d
, rd
);
2533 /* Given MEM, a result from assign_stack_local, fill in the memory
2534 attributes as appropriate for a register allocator spill slot.
2535 These slots are not aliasable by other memory. We arrange for
2536 them all to use a single MEM_EXPR, so that the aliasing code can
2537 work properly in the case of shared spill slots. */
2540 set_mem_attrs_for_spill (rtx mem
)
2542 struct mem_attrs attrs
;
2545 attrs
= *get_mem_attrs (mem
);
2546 attrs
.expr
= get_spill_slot_decl (true);
2547 attrs
.alias
= MEM_ALIAS_SET (DECL_RTL (attrs
.expr
));
2548 attrs
.addrspace
= ADDR_SPACE_GENERIC
;
2550 /* We expect the incoming memory to be of the form:
2551 (mem:MODE (plus (reg sfp) (const_int offset)))
2552 with perhaps the plus missing for offset = 0. */
2553 addr
= XEXP (mem
, 0);
2554 attrs
.offset_known_p
= true;
2556 if (GET_CODE (addr
) == PLUS
2557 && CONST_INT_P (XEXP (addr
, 1)))
2558 attrs
.offset
= INTVAL (XEXP (addr
, 1));
2560 set_mem_attrs (mem
, &attrs
);
2561 MEM_NOTRAP_P (mem
) = 1;
2564 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2567 gen_label_rtx (void)
2569 return as_a
<rtx_code_label
*> (
2570 gen_rtx_CODE_LABEL (VOIDmode
, NULL_RTX
, NULL_RTX
,
2571 NULL
, label_num
++, NULL
));
2574 /* For procedure integration. */
2576 /* Install new pointers to the first and last insns in the chain.
2577 Also, set cur_insn_uid to one higher than the last in use.
2578 Used for an inline-procedure after copying the insn chain. */
2581 set_new_first_and_last_insn (rtx_insn
*first
, rtx_insn
*last
)
2585 set_first_insn (first
);
2586 set_last_insn (last
);
2589 if (MIN_NONDEBUG_INSN_UID
|| MAY_HAVE_DEBUG_INSNS
)
2591 int debug_count
= 0;
2593 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
- 1;
2594 cur_debug_insn_uid
= 0;
2596 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2597 if (INSN_UID (insn
) < MIN_NONDEBUG_INSN_UID
)
2598 cur_debug_insn_uid
= MAX (cur_debug_insn_uid
, INSN_UID (insn
));
2601 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2602 if (DEBUG_INSN_P (insn
))
2607 cur_debug_insn_uid
= MIN_NONDEBUG_INSN_UID
+ debug_count
;
2609 cur_debug_insn_uid
++;
2612 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2613 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2618 /* Go through all the RTL insn bodies and copy any invalid shared
2619 structure. This routine should only be called once. */
2622 unshare_all_rtl_1 (rtx_insn
*insn
)
2624 /* Unshare just about everything else. */
2625 unshare_all_rtl_in_chain (insn
);
2627 /* Make sure the addresses of stack slots found outside the insn chain
2628 (such as, in DECL_RTL of a variable) are not shared
2629 with the insn chain.
2631 This special care is necessary when the stack slot MEM does not
2632 actually appear in the insn chain. If it does appear, its address
2633 is unshared from all else at that point. */
2634 stack_slot_list
= safe_as_a
<rtx_expr_list
*> (
2635 copy_rtx_if_shared (stack_slot_list
));
2638 /* Go through all the RTL insn bodies and copy any invalid shared
2639 structure, again. This is a fairly expensive thing to do so it
2640 should be done sparingly. */
2643 unshare_all_rtl_again (rtx_insn
*insn
)
2648 for (p
= insn
; p
; p
= NEXT_INSN (p
))
2651 reset_used_flags (PATTERN (p
));
2652 reset_used_flags (REG_NOTES (p
));
2654 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p
));
2657 /* Make sure that virtual stack slots are not shared. */
2658 set_used_decls (DECL_INITIAL (cfun
->decl
));
2660 /* Make sure that virtual parameters are not shared. */
2661 for (decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2662 set_used_flags (DECL_RTL (decl
));
2664 reset_used_flags (stack_slot_list
);
2666 unshare_all_rtl_1 (insn
);
2670 unshare_all_rtl (void)
2672 unshare_all_rtl_1 (get_insns ());
2677 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2678 Recursively does the same for subexpressions. */
2681 verify_rtx_sharing (rtx orig
, rtx insn
)
2686 const char *format_ptr
;
2691 code
= GET_CODE (x
);
2693 /* These types may be freely shared. */
2709 /* SCRATCH must be shared because they represent distinct values. */
2712 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2713 clobbers or clobbers of hard registers that originated as pseudos.
2714 This is needed to allow safe register renaming. */
2715 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2716 && ORIGINAL_REGNO (XEXP (x
, 0)) == REGNO (XEXP (x
, 0)))
2721 if (shared_const_p (orig
))
2726 /* A MEM is allowed to be shared if its address is constant. */
2727 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
2728 || reload_completed
|| reload_in_progress
)
2737 /* This rtx may not be shared. If it has already been seen,
2738 replace it with a copy of itself. */
2739 #ifdef ENABLE_CHECKING
2740 if (RTX_FLAG (x
, used
))
2742 error ("invalid rtl sharing found in the insn");
2744 error ("shared rtx");
2746 internal_error ("internal consistency failure");
2749 gcc_assert (!RTX_FLAG (x
, used
));
2751 RTX_FLAG (x
, used
) = 1;
2753 /* Now scan the subexpressions recursively. */
2755 format_ptr
= GET_RTX_FORMAT (code
);
2757 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2759 switch (*format_ptr
++)
2762 verify_rtx_sharing (XEXP (x
, i
), insn
);
2766 if (XVEC (x
, i
) != NULL
)
2769 int len
= XVECLEN (x
, i
);
2771 for (j
= 0; j
< len
; j
++)
2773 /* We allow sharing of ASM_OPERANDS inside single
2775 if (j
&& GET_CODE (XVECEXP (x
, i
, j
)) == SET
2776 && (GET_CODE (SET_SRC (XVECEXP (x
, i
, j
)))
2778 verify_rtx_sharing (SET_DEST (XVECEXP (x
, i
, j
)), insn
);
2780 verify_rtx_sharing (XVECEXP (x
, i
, j
), insn
);
2789 /* Reset used-flags for INSN. */
2792 reset_insn_used_flags (rtx insn
)
2794 gcc_assert (INSN_P (insn
));
2795 reset_used_flags (PATTERN (insn
));
2796 reset_used_flags (REG_NOTES (insn
));
2798 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2801 /* Go through all the RTL insn bodies and clear all the USED bits. */
2804 reset_all_used_flags (void)
2808 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2811 rtx pat
= PATTERN (p
);
2812 if (GET_CODE (pat
) != SEQUENCE
)
2813 reset_insn_used_flags (p
);
2816 gcc_assert (REG_NOTES (p
) == NULL
);
2817 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2819 rtx insn
= XVECEXP (pat
, 0, i
);
2821 reset_insn_used_flags (insn
);
2827 /* Verify sharing in INSN. */
2830 verify_insn_sharing (rtx insn
)
2832 gcc_assert (INSN_P (insn
));
2833 reset_used_flags (PATTERN (insn
));
2834 reset_used_flags (REG_NOTES (insn
));
2836 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2839 /* Go through all the RTL insn bodies and check that there is no unexpected
2840 sharing in between the subexpressions. */
2843 verify_rtl_sharing (void)
2847 timevar_push (TV_VERIFY_RTL_SHARING
);
2849 reset_all_used_flags ();
2851 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2854 rtx pat
= PATTERN (p
);
2855 if (GET_CODE (pat
) != SEQUENCE
)
2856 verify_insn_sharing (p
);
2858 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2860 rtx insn
= XVECEXP (pat
, 0, i
);
2862 verify_insn_sharing (insn
);
2866 reset_all_used_flags ();
2868 timevar_pop (TV_VERIFY_RTL_SHARING
);
2871 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2872 Assumes the mark bits are cleared at entry. */
2875 unshare_all_rtl_in_chain (rtx_insn
*insn
)
2877 for (; insn
; insn
= NEXT_INSN (insn
))
2880 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
2881 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
2883 CALL_INSN_FUNCTION_USAGE (insn
)
2884 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn
));
2888 /* Go through all virtual stack slots of a function and mark them as
2889 shared. We never replace the DECL_RTLs themselves with a copy,
2890 but expressions mentioned into a DECL_RTL cannot be shared with
2891 expressions in the instruction stream.
2893 Note that reload may convert pseudo registers into memories in-place.
2894 Pseudo registers are always shared, but MEMs never are. Thus if we
2895 reset the used flags on MEMs in the instruction stream, we must set
2896 them again on MEMs that appear in DECL_RTLs. */
2899 set_used_decls (tree blk
)
2904 for (t
= BLOCK_VARS (blk
); t
; t
= DECL_CHAIN (t
))
2905 if (DECL_RTL_SET_P (t
))
2906 set_used_flags (DECL_RTL (t
));
2908 /* Now process sub-blocks. */
2909 for (t
= BLOCK_SUBBLOCKS (blk
); t
; t
= BLOCK_CHAIN (t
))
2913 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2914 Recursively does the same for subexpressions. Uses
2915 copy_rtx_if_shared_1 to reduce stack space. */
2918 copy_rtx_if_shared (rtx orig
)
2920 copy_rtx_if_shared_1 (&orig
);
2924 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2925 use. Recursively does the same for subexpressions. */
2928 copy_rtx_if_shared_1 (rtx
*orig1
)
2934 const char *format_ptr
;
2938 /* Repeat is used to turn tail-recursion into iteration. */
2945 code
= GET_CODE (x
);
2947 /* These types may be freely shared. */
2963 /* SCRATCH must be shared because they represent distinct values. */
2966 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2967 clobbers or clobbers of hard registers that originated as pseudos.
2968 This is needed to allow safe register renaming. */
2969 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2970 && ORIGINAL_REGNO (XEXP (x
, 0)) == REGNO (XEXP (x
, 0)))
2975 if (shared_const_p (x
))
2985 /* The chain of insns is not being copied. */
2992 /* This rtx may not be shared. If it has already been seen,
2993 replace it with a copy of itself. */
2995 if (RTX_FLAG (x
, used
))
2997 x
= shallow_copy_rtx (x
);
3000 RTX_FLAG (x
, used
) = 1;
3002 /* Now scan the subexpressions recursively.
3003 We can store any replaced subexpressions directly into X
3004 since we know X is not shared! Any vectors in X
3005 must be copied if X was copied. */
3007 format_ptr
= GET_RTX_FORMAT (code
);
3008 length
= GET_RTX_LENGTH (code
);
3011 for (i
= 0; i
< length
; i
++)
3013 switch (*format_ptr
++)
3017 copy_rtx_if_shared_1 (last_ptr
);
3018 last_ptr
= &XEXP (x
, i
);
3022 if (XVEC (x
, i
) != NULL
)
3025 int len
= XVECLEN (x
, i
);
3027 /* Copy the vector iff I copied the rtx and the length
3029 if (copied
&& len
> 0)
3030 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
3032 /* Call recursively on all inside the vector. */
3033 for (j
= 0; j
< len
; j
++)
3036 copy_rtx_if_shared_1 (last_ptr
);
3037 last_ptr
= &XVECEXP (x
, i
, j
);
3052 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3055 mark_used_flags (rtx x
, int flag
)
3059 const char *format_ptr
;
3062 /* Repeat is used to turn tail-recursion into iteration. */
3067 code
= GET_CODE (x
);
3069 /* These types may be freely shared so we needn't do any resetting
3093 /* The chain of insns is not being copied. */
3100 RTX_FLAG (x
, used
) = flag
;
3102 format_ptr
= GET_RTX_FORMAT (code
);
3103 length
= GET_RTX_LENGTH (code
);
3105 for (i
= 0; i
< length
; i
++)
3107 switch (*format_ptr
++)
3115 mark_used_flags (XEXP (x
, i
), flag
);
3119 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3120 mark_used_flags (XVECEXP (x
, i
, j
), flag
);
3126 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3127 to look for shared sub-parts. */
3130 reset_used_flags (rtx x
)
3132 mark_used_flags (x
, 0);
3135 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3136 to look for shared sub-parts. */
3139 set_used_flags (rtx x
)
3141 mark_used_flags (x
, 1);
3144 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3145 Return X or the rtx for the pseudo reg the value of X was copied into.
3146 OTHER must be valid as a SET_DEST. */
3149 make_safe_from (rtx x
, rtx other
)
3152 switch (GET_CODE (other
))
3155 other
= SUBREG_REG (other
);
3157 case STRICT_LOW_PART
:
3160 other
= XEXP (other
, 0);
3169 && GET_CODE (x
) != SUBREG
)
3171 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
3172 || reg_mentioned_p (other
, x
))))
3174 rtx temp
= gen_reg_rtx (GET_MODE (x
));
3175 emit_move_insn (temp
, x
);
3181 /* Emission of insns (adding them to the doubly-linked list). */
3183 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3186 get_last_insn_anywhere (void)
3188 struct sequence_stack
*seq
;
3189 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
3195 /* Return the first nonnote insn emitted in current sequence or current
3196 function. This routine looks inside SEQUENCEs. */
3199 get_first_nonnote_insn (void)
3201 rtx_insn
*insn
= get_insns ();
3206 for (insn
= next_insn (insn
);
3207 insn
&& NOTE_P (insn
);
3208 insn
= next_insn (insn
))
3212 if (NONJUMP_INSN_P (insn
)
3213 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3214 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3221 /* Return the last nonnote insn emitted in current sequence or current
3222 function. This routine looks inside SEQUENCEs. */
3225 get_last_nonnote_insn (void)
3227 rtx_insn
*insn
= get_last_insn ();
3232 for (insn
= previous_insn (insn
);
3233 insn
&& NOTE_P (insn
);
3234 insn
= previous_insn (insn
))
3238 if (NONJUMP_INSN_P (insn
))
3239 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3240 insn
= seq
->insn (seq
->len () - 1);
3247 /* Return the number of actual (non-debug) insns emitted in this
3251 get_max_insn_count (void)
3253 int n
= cur_insn_uid
;
3255 /* The table size must be stable across -g, to avoid codegen
3256 differences due to debug insns, and not be affected by
3257 -fmin-insn-uid, to avoid excessive table size and to simplify
3258 debugging of -fcompare-debug failures. */
3259 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3260 n
-= cur_debug_insn_uid
;
3262 n
-= MIN_NONDEBUG_INSN_UID
;
3268 /* Return the next insn. If it is a SEQUENCE, return the first insn
3272 next_insn (rtx_insn
*insn
)
3276 insn
= NEXT_INSN (insn
);
3277 if (insn
&& NONJUMP_INSN_P (insn
)
3278 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3279 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3285 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3289 previous_insn (rtx_insn
*insn
)
3293 insn
= PREV_INSN (insn
);
3294 if (insn
&& NONJUMP_INSN_P (insn
))
3295 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3296 insn
= seq
->insn (seq
->len () - 1);
3302 /* Return the next insn after INSN that is not a NOTE. This routine does not
3303 look inside SEQUENCEs. */
3306 next_nonnote_insn (rtx uncast_insn
)
3308 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3311 insn
= NEXT_INSN (insn
);
3312 if (insn
== 0 || !NOTE_P (insn
))
3319 /* Return the next insn after INSN that is not a NOTE, but stop the
3320 search before we enter another basic block. This routine does not
3321 look inside SEQUENCEs. */
3324 next_nonnote_insn_bb (rtx_insn
*insn
)
3328 insn
= NEXT_INSN (insn
);
3329 if (insn
== 0 || !NOTE_P (insn
))
3331 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3338 /* Return the previous insn before INSN that is not a NOTE. This routine does
3339 not look inside SEQUENCEs. */
3342 prev_nonnote_insn (rtx uncast_insn
)
3344 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3348 insn
= PREV_INSN (insn
);
3349 if (insn
== 0 || !NOTE_P (insn
))
3356 /* Return the previous insn before INSN that is not a NOTE, but stop
3357 the search before we enter another basic block. This routine does
3358 not look inside SEQUENCEs. */
3361 prev_nonnote_insn_bb (rtx uncast_insn
)
3363 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3367 insn
= PREV_INSN (insn
);
3368 if (insn
== 0 || !NOTE_P (insn
))
3370 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3377 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3378 routine does not look inside SEQUENCEs. */
3381 next_nondebug_insn (rtx uncast_insn
)
3383 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3387 insn
= NEXT_INSN (insn
);
3388 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3395 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3396 This routine does not look inside SEQUENCEs. */
3399 prev_nondebug_insn (rtx uncast_insn
)
3401 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3405 insn
= PREV_INSN (insn
);
3406 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3413 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3414 This routine does not look inside SEQUENCEs. */
3417 next_nonnote_nondebug_insn (rtx uncast_insn
)
3419 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3423 insn
= NEXT_INSN (insn
);
3424 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3431 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3432 This routine does not look inside SEQUENCEs. */
3435 prev_nonnote_nondebug_insn (rtx uncast_insn
)
3437 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3441 insn
= PREV_INSN (insn
);
3442 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3449 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3450 or 0, if there is none. This routine does not look inside
3454 next_real_insn (rtx uncast_insn
)
3456 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3460 insn
= NEXT_INSN (insn
);
3461 if (insn
== 0 || INSN_P (insn
))
3468 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3469 or 0, if there is none. This routine does not look inside
3473 prev_real_insn (rtx uncast_insn
)
3475 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3479 insn
= PREV_INSN (insn
);
3480 if (insn
== 0 || INSN_P (insn
))
3487 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3488 This routine does not look inside SEQUENCEs. */
3491 last_call_insn (void)
3495 for (insn
= get_last_insn ();
3496 insn
&& !CALL_P (insn
);
3497 insn
= PREV_INSN (insn
))
3500 return safe_as_a
<rtx_call_insn
*> (insn
);
3503 /* Find the next insn after INSN that really does something. This routine
3504 does not look inside SEQUENCEs. After reload this also skips over
3505 standalone USE and CLOBBER insn. */
3508 active_insn_p (const_rtx insn
)
3510 return (CALL_P (insn
) || JUMP_P (insn
)
3511 || JUMP_TABLE_DATA_P (insn
) /* FIXME */
3512 || (NONJUMP_INSN_P (insn
)
3513 && (! reload_completed
3514 || (GET_CODE (PATTERN (insn
)) != USE
3515 && GET_CODE (PATTERN (insn
)) != CLOBBER
))));
3519 next_active_insn (rtx uncast_insn
)
3521 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3525 insn
= NEXT_INSN (insn
);
3526 if (insn
== 0 || active_insn_p (insn
))
3533 /* Find the last insn before INSN that really does something. This routine
3534 does not look inside SEQUENCEs. After reload this also skips over
3535 standalone USE and CLOBBER insn. */
3538 prev_active_insn (rtx uncast_insn
)
3540 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3544 insn
= PREV_INSN (insn
);
3545 if (insn
== 0 || active_insn_p (insn
))
3552 /* Return the next insn that uses CC0 after INSN, which is assumed to
3553 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3554 applied to the result of this function should yield INSN).
3556 Normally, this is simply the next insn. However, if a REG_CC_USER note
3557 is present, it contains the insn that uses CC0.
3559 Return 0 if we can't find the insn. */
3562 next_cc0_user (rtx uncast_insn
)
3564 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3566 rtx note
= find_reg_note (insn
, REG_CC_USER
, NULL_RTX
);
3569 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3571 insn
= next_nonnote_insn (insn
);
3572 if (insn
&& NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3573 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3575 if (insn
&& INSN_P (insn
) && reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
3581 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3582 note, it is the previous insn. */
3585 prev_cc0_setter (rtx_insn
*insn
)
3587 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
3590 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3592 insn
= prev_nonnote_insn (insn
);
3593 gcc_assert (sets_cc0_p (PATTERN (insn
)));
3599 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3602 find_auto_inc (const_rtx x
, const_rtx reg
)
3604 subrtx_iterator::array_type array
;
3605 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
3607 const_rtx x
= *iter
;
3608 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
3609 && rtx_equal_p (reg
, XEXP (x
, 0)))
3616 /* Increment the label uses for all labels present in rtx. */
3619 mark_label_nuses (rtx x
)
3625 code
= GET_CODE (x
);
3626 if (code
== LABEL_REF
&& LABEL_P (LABEL_REF_LABEL (x
)))
3627 LABEL_NUSES (LABEL_REF_LABEL (x
))++;
3629 fmt
= GET_RTX_FORMAT (code
);
3630 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3633 mark_label_nuses (XEXP (x
, i
));
3634 else if (fmt
[i
] == 'E')
3635 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3636 mark_label_nuses (XVECEXP (x
, i
, j
));
3641 /* Try splitting insns that can be split for better scheduling.
3642 PAT is the pattern which might split.
3643 TRIAL is the insn providing PAT.
3644 LAST is nonzero if we should return the last insn of the sequence produced.
3646 If this routine succeeds in splitting, it returns the first or last
3647 replacement insn depending on the value of LAST. Otherwise, it
3648 returns TRIAL. If the insn to be returned can be split, it will be. */
3651 try_split (rtx pat
, rtx_insn
*trial
, int last
)
3653 rtx_insn
*before
= PREV_INSN (trial
);
3654 rtx_insn
*after
= NEXT_INSN (trial
);
3656 rtx_insn
*seq
, *tem
;
3658 rtx_insn
*insn_last
, *insn
;
3660 rtx_insn
*call_insn
= NULL
;
3662 /* We're not good at redistributing frame information. */
3663 if (RTX_FRAME_RELATED_P (trial
))
3666 if (any_condjump_p (trial
)
3667 && (note
= find_reg_note (trial
, REG_BR_PROB
, 0)))
3668 split_branch_probability
= XINT (note
, 0);
3669 probability
= split_branch_probability
;
3671 seq
= split_insns (pat
, trial
);
3673 split_branch_probability
= -1;
3678 /* Avoid infinite loop if any insn of the result matches
3679 the original pattern. */
3683 if (INSN_P (insn_last
)
3684 && rtx_equal_p (PATTERN (insn_last
), pat
))
3686 if (!NEXT_INSN (insn_last
))
3688 insn_last
= NEXT_INSN (insn_last
);
3691 /* We will be adding the new sequence to the function. The splitters
3692 may have introduced invalid RTL sharing, so unshare the sequence now. */
3693 unshare_all_rtl_in_chain (seq
);
3695 /* Mark labels and copy flags. */
3696 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3701 CROSSING_JUMP_P (insn
) = CROSSING_JUMP_P (trial
);
3702 mark_jump_label (PATTERN (insn
), insn
, 0);
3704 if (probability
!= -1
3705 && any_condjump_p (insn
)
3706 && !find_reg_note (insn
, REG_BR_PROB
, 0))
3708 /* We can preserve the REG_BR_PROB notes only if exactly
3709 one jump is created, otherwise the machine description
3710 is responsible for this step using
3711 split_branch_probability variable. */
3712 gcc_assert (njumps
== 1);
3713 add_int_reg_note (insn
, REG_BR_PROB
, probability
);
3718 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3719 in SEQ and copy any additional information across. */
3722 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3728 gcc_assert (call_insn
== NULL_RTX
);
3731 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3732 target may have explicitly specified. */
3733 p
= &CALL_INSN_FUNCTION_USAGE (insn
);
3736 *p
= CALL_INSN_FUNCTION_USAGE (trial
);
3738 /* If the old call was a sibling call, the new one must
3740 SIBLING_CALL_P (insn
) = SIBLING_CALL_P (trial
);
3742 /* If the new call is the last instruction in the sequence,
3743 it will effectively replace the old call in-situ. Otherwise
3744 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3745 so that it comes immediately after the new call. */
3746 if (NEXT_INSN (insn
))
3747 for (next
= NEXT_INSN (trial
);
3748 next
&& NOTE_P (next
);
3749 next
= NEXT_INSN (next
))
3750 if (NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
)
3753 add_insn_after (next
, insn
, NULL
);
3759 /* Copy notes, particularly those related to the CFG. */
3760 for (note
= REG_NOTES (trial
); note
; note
= XEXP (note
, 1))
3762 switch (REG_NOTE_KIND (note
))
3765 copy_reg_eh_region_note_backward (note
, insn_last
, NULL
);
3771 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3774 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3778 case REG_NON_LOCAL_GOTO
:
3779 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3782 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3788 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3790 rtx reg
= XEXP (note
, 0);
3791 if (!FIND_REG_INC_NOTE (insn
, reg
)
3792 && find_auto_inc (PATTERN (insn
), reg
))
3793 add_reg_note (insn
, REG_INC
, reg
);
3799 fixup_args_size_notes (NULL
, insn_last
, INTVAL (XEXP (note
, 0)));
3803 gcc_assert (call_insn
!= NULL_RTX
);
3804 add_reg_note (call_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3812 /* If there are LABELS inside the split insns increment the
3813 usage count so we don't delete the label. */
3817 while (insn
!= NULL_RTX
)
3819 /* JUMP_P insns have already been "marked" above. */
3820 if (NONJUMP_INSN_P (insn
))
3821 mark_label_nuses (PATTERN (insn
));
3823 insn
= PREV_INSN (insn
);
3827 tem
= emit_insn_after_setloc (seq
, trial
, INSN_LOCATION (trial
));
3829 delete_insn (trial
);
3831 /* Recursively call try_split for each new insn created; by the
3832 time control returns here that insn will be fully split, so
3833 set LAST and continue from the insn after the one returned.
3834 We can't use next_active_insn here since AFTER may be a note.
3835 Ignore deleted insns, which can be occur if not optimizing. */
3836 for (tem
= NEXT_INSN (before
); tem
!= after
; tem
= NEXT_INSN (tem
))
3837 if (! tem
->deleted () && INSN_P (tem
))
3838 tem
= try_split (PATTERN (tem
), tem
, 1);
3840 /* Return either the first or the last insn, depending on which was
3843 ? (after
? PREV_INSN (after
) : get_last_insn ())
3844 : NEXT_INSN (before
);
3847 /* Make and return an INSN rtx, initializing all its slots.
3848 Store PATTERN in the pattern slots. */
3851 make_insn_raw (rtx pattern
)
3855 insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
3857 INSN_UID (insn
) = cur_insn_uid
++;
3858 PATTERN (insn
) = pattern
;
3859 INSN_CODE (insn
) = -1;
3860 REG_NOTES (insn
) = NULL
;
3861 INSN_LOCATION (insn
) = curr_insn_location ();
3862 BLOCK_FOR_INSN (insn
) = NULL
;
3864 #ifdef ENABLE_RTL_CHECKING
3867 && (returnjump_p (insn
)
3868 || (GET_CODE (insn
) == SET
3869 && SET_DEST (insn
) == pc_rtx
)))
3871 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3879 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3882 make_debug_insn_raw (rtx pattern
)
3884 rtx_debug_insn
*insn
;
3886 insn
= as_a
<rtx_debug_insn
*> (rtx_alloc (DEBUG_INSN
));
3887 INSN_UID (insn
) = cur_debug_insn_uid
++;
3888 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3889 INSN_UID (insn
) = cur_insn_uid
++;
3891 PATTERN (insn
) = pattern
;
3892 INSN_CODE (insn
) = -1;
3893 REG_NOTES (insn
) = NULL
;
3894 INSN_LOCATION (insn
) = curr_insn_location ();
3895 BLOCK_FOR_INSN (insn
) = NULL
;
3900 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3903 make_jump_insn_raw (rtx pattern
)
3905 rtx_jump_insn
*insn
;
3907 insn
= as_a
<rtx_jump_insn
*> (rtx_alloc (JUMP_INSN
));
3908 INSN_UID (insn
) = cur_insn_uid
++;
3910 PATTERN (insn
) = pattern
;
3911 INSN_CODE (insn
) = -1;
3912 REG_NOTES (insn
) = NULL
;
3913 JUMP_LABEL (insn
) = NULL
;
3914 INSN_LOCATION (insn
) = curr_insn_location ();
3915 BLOCK_FOR_INSN (insn
) = NULL
;
3920 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3923 make_call_insn_raw (rtx pattern
)
3925 rtx_call_insn
*insn
;
3927 insn
= as_a
<rtx_call_insn
*> (rtx_alloc (CALL_INSN
));
3928 INSN_UID (insn
) = cur_insn_uid
++;
3930 PATTERN (insn
) = pattern
;
3931 INSN_CODE (insn
) = -1;
3932 REG_NOTES (insn
) = NULL
;
3933 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
3934 INSN_LOCATION (insn
) = curr_insn_location ();
3935 BLOCK_FOR_INSN (insn
) = NULL
;
3940 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3943 make_note_raw (enum insn_note subtype
)
3945 /* Some notes are never created this way at all. These notes are
3946 only created by patching out insns. */
3947 gcc_assert (subtype
!= NOTE_INSN_DELETED_LABEL
3948 && subtype
!= NOTE_INSN_DELETED_DEBUG_LABEL
);
3950 rtx_note
*note
= as_a
<rtx_note
*> (rtx_alloc (NOTE
));
3951 INSN_UID (note
) = cur_insn_uid
++;
3952 NOTE_KIND (note
) = subtype
;
3953 BLOCK_FOR_INSN (note
) = NULL
;
3954 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
3958 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3959 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3960 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3963 link_insn_into_chain (rtx_insn
*insn
, rtx_insn
*prev
, rtx_insn
*next
)
3965 SET_PREV_INSN (insn
) = prev
;
3966 SET_NEXT_INSN (insn
) = next
;
3969 SET_NEXT_INSN (prev
) = insn
;
3970 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
3972 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
3973 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = insn
;
3978 SET_PREV_INSN (next
) = insn
;
3979 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
3981 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
3982 SET_PREV_INSN (sequence
->insn (0)) = insn
;
3986 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3988 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (insn
));
3989 SET_PREV_INSN (sequence
->insn (0)) = prev
;
3990 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
3994 /* Add INSN to the end of the doubly-linked list.
3995 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3998 add_insn (rtx_insn
*insn
)
4000 rtx_insn
*prev
= get_last_insn ();
4001 link_insn_into_chain (insn
, prev
, NULL
);
4002 if (NULL
== get_insns ())
4003 set_first_insn (insn
);
4004 set_last_insn (insn
);
4007 /* Add INSN into the doubly-linked list after insn AFTER. */
4010 add_insn_after_nobb (rtx_insn
*insn
, rtx_insn
*after
)
4012 rtx_insn
*next
= NEXT_INSN (after
);
4014 gcc_assert (!optimize
|| !after
->deleted ());
4016 link_insn_into_chain (insn
, after
, next
);
4020 struct sequence_stack
*seq
;
4022 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4023 if (after
== seq
->last
)
4031 /* Add INSN into the doubly-linked list before insn BEFORE. */
4034 add_insn_before_nobb (rtx_insn
*insn
, rtx_insn
*before
)
4036 rtx_insn
*prev
= PREV_INSN (before
);
4038 gcc_assert (!optimize
|| !before
->deleted ());
4040 link_insn_into_chain (insn
, prev
, before
);
4044 struct sequence_stack
*seq
;
4046 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4047 if (before
== seq
->first
)
4057 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4058 If BB is NULL, an attempt is made to infer the bb from before.
4060 This and the next function should be the only functions called
4061 to insert an insn once delay slots have been filled since only
4062 they know how to update a SEQUENCE. */
4065 add_insn_after (rtx uncast_insn
, rtx uncast_after
, basic_block bb
)
4067 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4068 rtx_insn
*after
= as_a
<rtx_insn
*> (uncast_after
);
4069 add_insn_after_nobb (insn
, after
);
4070 if (!BARRIER_P (after
)
4071 && !BARRIER_P (insn
)
4072 && (bb
= BLOCK_FOR_INSN (after
)))
4074 set_block_for_insn (insn
, bb
);
4076 df_insn_rescan (insn
);
4077 /* Should not happen as first in the BB is always
4078 either NOTE or LABEL. */
4079 if (BB_END (bb
) == after
4080 /* Avoid clobbering of structure when creating new BB. */
4081 && !BARRIER_P (insn
)
4082 && !NOTE_INSN_BASIC_BLOCK_P (insn
))
4087 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4088 If BB is NULL, an attempt is made to infer the bb from before.
4090 This and the previous function should be the only functions called
4091 to insert an insn once delay slots have been filled since only
4092 they know how to update a SEQUENCE. */
4095 add_insn_before (rtx uncast_insn
, rtx uncast_before
, basic_block bb
)
4097 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4098 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4099 add_insn_before_nobb (insn
, before
);
4102 && !BARRIER_P (before
)
4103 && !BARRIER_P (insn
))
4104 bb
= BLOCK_FOR_INSN (before
);
4108 set_block_for_insn (insn
, bb
);
4110 df_insn_rescan (insn
);
4111 /* Should not happen as first in the BB is always either NOTE or
4113 gcc_assert (BB_HEAD (bb
) != insn
4114 /* Avoid clobbering of structure when creating new BB. */
4116 || NOTE_INSN_BASIC_BLOCK_P (insn
));
4120 /* Replace insn with an deleted instruction note. */
4123 set_insn_deleted (rtx insn
)
4126 df_insn_delete (as_a
<rtx_insn
*> (insn
));
4127 PUT_CODE (insn
, NOTE
);
4128 NOTE_KIND (insn
) = NOTE_INSN_DELETED
;
4132 /* Unlink INSN from the insn chain.
4134 This function knows how to handle sequences.
4136 This function does not invalidate data flow information associated with
4137 INSN (i.e. does not call df_insn_delete). That makes this function
4138 usable for only disconnecting an insn from the chain, and re-emit it
4141 To later insert INSN elsewhere in the insn chain via add_insn and
4142 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4143 the caller. Nullifying them here breaks many insn chain walks.
4145 To really delete an insn and related DF information, use delete_insn. */
4148 remove_insn (rtx uncast_insn
)
4150 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4151 rtx_insn
*next
= NEXT_INSN (insn
);
4152 rtx_insn
*prev
= PREV_INSN (insn
);
4157 SET_NEXT_INSN (prev
) = next
;
4158 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4160 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4161 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4166 struct sequence_stack
*seq
;
4168 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4169 if (insn
== seq
->first
)
4180 SET_PREV_INSN (next
) = prev
;
4181 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4183 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4184 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4189 struct sequence_stack
*seq
;
4191 for (seq
= get_current_sequence (); seq
; seq
= seq
->next
)
4192 if (insn
== seq
->last
)
4201 /* Fix up basic block boundaries, if necessary. */
4202 if (!BARRIER_P (insn
)
4203 && (bb
= BLOCK_FOR_INSN (insn
)))
4205 if (BB_HEAD (bb
) == insn
)
4207 /* Never ever delete the basic block note without deleting whole
4209 gcc_assert (!NOTE_P (insn
));
4210 BB_HEAD (bb
) = next
;
4212 if (BB_END (bb
) == insn
)
4217 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4220 add_function_usage_to (rtx call_insn
, rtx call_fusage
)
4222 gcc_assert (call_insn
&& CALL_P (call_insn
));
4224 /* Put the register usage information on the CALL. If there is already
4225 some usage information, put ours at the end. */
4226 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
4230 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
4231 link
= XEXP (link
, 1))
4234 XEXP (link
, 1) = call_fusage
;
4237 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
4240 /* Delete all insns made since FROM.
4241 FROM becomes the new last instruction. */
4244 delete_insns_since (rtx_insn
*from
)
4249 SET_NEXT_INSN (from
) = 0;
4250 set_last_insn (from
);
4253 /* This function is deprecated, please use sequences instead.
4255 Move a consecutive bunch of insns to a different place in the chain.
4256 The insns to be moved are those between FROM and TO.
4257 They are moved to a new position after the insn AFTER.
4258 AFTER must not be FROM or TO or any insn in between.
4260 This function does not know about SEQUENCEs and hence should not be
4261 called after delay-slot filling has been done. */
4264 reorder_insns_nobb (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4266 #ifdef ENABLE_CHECKING
4268 for (x
= from
; x
!= to
; x
= NEXT_INSN (x
))
4269 gcc_assert (after
!= x
);
4270 gcc_assert (after
!= to
);
4273 /* Splice this bunch out of where it is now. */
4274 if (PREV_INSN (from
))
4275 SET_NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
4277 SET_PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
4278 if (get_last_insn () == to
)
4279 set_last_insn (PREV_INSN (from
));
4280 if (get_insns () == from
)
4281 set_first_insn (NEXT_INSN (to
));
4283 /* Make the new neighbors point to it and it to them. */
4284 if (NEXT_INSN (after
))
4285 SET_PREV_INSN (NEXT_INSN (after
)) = to
;
4287 SET_NEXT_INSN (to
) = NEXT_INSN (after
);
4288 SET_PREV_INSN (from
) = after
;
4289 SET_NEXT_INSN (after
) = from
;
4290 if (after
== get_last_insn ())
4294 /* Same as function above, but take care to update BB boundaries. */
4296 reorder_insns (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4298 rtx_insn
*prev
= PREV_INSN (from
);
4299 basic_block bb
, bb2
;
4301 reorder_insns_nobb (from
, to
, after
);
4303 if (!BARRIER_P (after
)
4304 && (bb
= BLOCK_FOR_INSN (after
)))
4307 df_set_bb_dirty (bb
);
4309 if (!BARRIER_P (from
)
4310 && (bb2
= BLOCK_FOR_INSN (from
)))
4312 if (BB_END (bb2
) == to
)
4313 BB_END (bb2
) = prev
;
4314 df_set_bb_dirty (bb2
);
4317 if (BB_END (bb
) == after
)
4320 for (x
= from
; x
!= NEXT_INSN (to
); x
= NEXT_INSN (x
))
4322 df_insn_change_bb (x
, bb
);
4327 /* Emit insn(s) of given code and pattern
4328 at a specified place within the doubly-linked list.
4330 All of the emit_foo global entry points accept an object
4331 X which is either an insn list or a PATTERN of a single
4334 There are thus a few canonical ways to generate code and
4335 emit it at a specific place in the instruction stream. For
4336 example, consider the instruction named SPOT and the fact that
4337 we would like to emit some instructions before SPOT. We might
4341 ... emit the new instructions ...
4342 insns_head = get_insns ();
4345 emit_insn_before (insns_head, SPOT);
4347 It used to be common to generate SEQUENCE rtl instead, but that
4348 is a relic of the past which no longer occurs. The reason is that
4349 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4350 generated would almost certainly die right after it was created. */
4353 emit_pattern_before_noloc (rtx x
, rtx before
, rtx last
, basic_block bb
,
4354 rtx_insn
*(*make_raw
) (rtx
))
4358 gcc_assert (before
);
4361 return safe_as_a
<rtx_insn
*> (last
);
4363 switch (GET_CODE (x
))
4372 insn
= as_a
<rtx_insn
*> (x
);
4375 rtx_insn
*next
= NEXT_INSN (insn
);
4376 add_insn_before (insn
, before
, bb
);
4382 #ifdef ENABLE_RTL_CHECKING
4389 last
= (*make_raw
) (x
);
4390 add_insn_before (last
, before
, bb
);
4394 return safe_as_a
<rtx_insn
*> (last
);
4397 /* Make X be output before the instruction BEFORE. */
4400 emit_insn_before_noloc (rtx x
, rtx_insn
*before
, basic_block bb
)
4402 return emit_pattern_before_noloc (x
, before
, before
, bb
, make_insn_raw
);
4405 /* Make an instruction with body X and code JUMP_INSN
4406 and output it before the instruction BEFORE. */
4409 emit_jump_insn_before_noloc (rtx x
, rtx_insn
*before
)
4411 return as_a
<rtx_jump_insn
*> (
4412 emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4413 make_jump_insn_raw
));
4416 /* Make an instruction with body X and code CALL_INSN
4417 and output it before the instruction BEFORE. */
4420 emit_call_insn_before_noloc (rtx x
, rtx_insn
*before
)
4422 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4423 make_call_insn_raw
);
4426 /* Make an instruction with body X and code DEBUG_INSN
4427 and output it before the instruction BEFORE. */
4430 emit_debug_insn_before_noloc (rtx x
, rtx before
)
4432 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4433 make_debug_insn_raw
);
4436 /* Make an insn of code BARRIER
4437 and output it before the insn BEFORE. */
4440 emit_barrier_before (rtx before
)
4442 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4444 INSN_UID (insn
) = cur_insn_uid
++;
4446 add_insn_before (insn
, before
, NULL
);
4450 /* Emit the label LABEL before the insn BEFORE. */
4453 emit_label_before (rtx label
, rtx_insn
*before
)
4455 gcc_checking_assert (INSN_UID (label
) == 0);
4456 INSN_UID (label
) = cur_insn_uid
++;
4457 add_insn_before (label
, before
, NULL
);
4458 return as_a
<rtx_code_label
*> (label
);
4461 /* Helper for emit_insn_after, handles lists of instructions
4465 emit_insn_after_1 (rtx_insn
*first
, rtx uncast_after
, basic_block bb
)
4467 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4469 rtx_insn
*after_after
;
4470 if (!bb
&& !BARRIER_P (after
))
4471 bb
= BLOCK_FOR_INSN (after
);
4475 df_set_bb_dirty (bb
);
4476 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4477 if (!BARRIER_P (last
))
4479 set_block_for_insn (last
, bb
);
4480 df_insn_rescan (last
);
4482 if (!BARRIER_P (last
))
4484 set_block_for_insn (last
, bb
);
4485 df_insn_rescan (last
);
4487 if (BB_END (bb
) == after
)
4491 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4494 after_after
= NEXT_INSN (after
);
4496 SET_NEXT_INSN (after
) = first
;
4497 SET_PREV_INSN (first
) = after
;
4498 SET_NEXT_INSN (last
) = after_after
;
4500 SET_PREV_INSN (after_after
) = last
;
4502 if (after
== get_last_insn ())
4503 set_last_insn (last
);
4509 emit_pattern_after_noloc (rtx x
, rtx uncast_after
, basic_block bb
,
4510 rtx_insn
*(*make_raw
)(rtx
))
4512 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4513 rtx_insn
*last
= after
;
4520 switch (GET_CODE (x
))
4529 last
= emit_insn_after_1 (as_a
<rtx_insn
*> (x
), after
, bb
);
4532 #ifdef ENABLE_RTL_CHECKING
4539 last
= (*make_raw
) (x
);
4540 add_insn_after (last
, after
, bb
);
4547 /* Make X be output after the insn AFTER and set the BB of insn. If
4548 BB is NULL, an attempt is made to infer the BB from AFTER. */
4551 emit_insn_after_noloc (rtx x
, rtx after
, basic_block bb
)
4553 return emit_pattern_after_noloc (x
, after
, bb
, make_insn_raw
);
4557 /* Make an insn of code JUMP_INSN with body X
4558 and output it after the insn AFTER. */
4561 emit_jump_insn_after_noloc (rtx x
, rtx after
)
4563 return as_a
<rtx_jump_insn
*> (
4564 emit_pattern_after_noloc (x
, after
, NULL
, make_jump_insn_raw
));
4567 /* Make an instruction with body X and code CALL_INSN
4568 and output it after the instruction AFTER. */
4571 emit_call_insn_after_noloc (rtx x
, rtx after
)
4573 return emit_pattern_after_noloc (x
, after
, NULL
, make_call_insn_raw
);
4576 /* Make an instruction with body X and code CALL_INSN
4577 and output it after the instruction AFTER. */
4580 emit_debug_insn_after_noloc (rtx x
, rtx after
)
4582 return emit_pattern_after_noloc (x
, after
, NULL
, make_debug_insn_raw
);
4585 /* Make an insn of code BARRIER
4586 and output it after the insn AFTER. */
4589 emit_barrier_after (rtx after
)
4591 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4593 INSN_UID (insn
) = cur_insn_uid
++;
4595 add_insn_after (insn
, after
, NULL
);
4599 /* Emit the label LABEL after the insn AFTER. */
4602 emit_label_after (rtx label
, rtx_insn
*after
)
4604 gcc_checking_assert (INSN_UID (label
) == 0);
4605 INSN_UID (label
) = cur_insn_uid
++;
4606 add_insn_after (label
, after
, NULL
);
4607 return as_a
<rtx_insn
*> (label
);
4610 /* Notes require a bit of special handling: Some notes need to have their
4611 BLOCK_FOR_INSN set, others should never have it set, and some should
4612 have it set or clear depending on the context. */
4614 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4615 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4616 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4619 note_outside_basic_block_p (enum insn_note subtype
, bool on_bb_boundary_p
)
4623 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4624 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
4627 /* Notes for var tracking and EH region markers can appear between or
4628 inside basic blocks. If the caller is emitting on the basic block
4629 boundary, do not set BLOCK_FOR_INSN on the new note. */
4630 case NOTE_INSN_VAR_LOCATION
:
4631 case NOTE_INSN_CALL_ARG_LOCATION
:
4632 case NOTE_INSN_EH_REGION_BEG
:
4633 case NOTE_INSN_EH_REGION_END
:
4634 return on_bb_boundary_p
;
4636 /* Otherwise, BLOCK_FOR_INSN must be set. */
4642 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4645 emit_note_after (enum insn_note subtype
, rtx_insn
*after
)
4647 rtx_note
*note
= make_note_raw (subtype
);
4648 basic_block bb
= BARRIER_P (after
) ? NULL
: BLOCK_FOR_INSN (after
);
4649 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_END (bb
) == after
);
4651 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4652 add_insn_after_nobb (note
, after
);
4654 add_insn_after (note
, after
, bb
);
4658 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4661 emit_note_before (enum insn_note subtype
, rtx_insn
*before
)
4663 rtx_note
*note
= make_note_raw (subtype
);
4664 basic_block bb
= BARRIER_P (before
) ? NULL
: BLOCK_FOR_INSN (before
);
4665 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_HEAD (bb
) == before
);
4667 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4668 add_insn_before_nobb (note
, before
);
4670 add_insn_before (note
, before
, bb
);
4674 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4675 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4678 emit_pattern_after_setloc (rtx pattern
, rtx uncast_after
, int loc
,
4679 rtx_insn
*(*make_raw
) (rtx
))
4681 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4682 rtx_insn
*last
= emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4684 if (pattern
== NULL_RTX
|| !loc
)
4687 after
= NEXT_INSN (after
);
4690 if (active_insn_p (after
)
4691 && !JUMP_TABLE_DATA_P (after
) /* FIXME */
4692 && !INSN_LOCATION (after
))
4693 INSN_LOCATION (after
) = loc
;
4696 after
= NEXT_INSN (after
);
4701 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4702 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4706 emit_pattern_after (rtx pattern
, rtx uncast_after
, bool skip_debug_insns
,
4707 rtx_insn
*(*make_raw
) (rtx
))
4709 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4710 rtx_insn
*prev
= after
;
4712 if (skip_debug_insns
)
4713 while (DEBUG_INSN_P (prev
))
4714 prev
= PREV_INSN (prev
);
4717 return emit_pattern_after_setloc (pattern
, after
, INSN_LOCATION (prev
),
4720 return emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4723 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4725 emit_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4727 return emit_pattern_after_setloc (pattern
, after
, loc
, make_insn_raw
);
4730 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4732 emit_insn_after (rtx pattern
, rtx after
)
4734 return emit_pattern_after (pattern
, after
, true, make_insn_raw
);
4737 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4739 emit_jump_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4741 return as_a
<rtx_jump_insn
*> (
4742 emit_pattern_after_setloc (pattern
, after
, loc
, make_jump_insn_raw
));
4745 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4747 emit_jump_insn_after (rtx pattern
, rtx after
)
4749 return as_a
<rtx_jump_insn
*> (
4750 emit_pattern_after (pattern
, after
, true, make_jump_insn_raw
));
4753 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4755 emit_call_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4757 return emit_pattern_after_setloc (pattern
, after
, loc
, make_call_insn_raw
);
4760 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4762 emit_call_insn_after (rtx pattern
, rtx after
)
4764 return emit_pattern_after (pattern
, after
, true, make_call_insn_raw
);
4767 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4769 emit_debug_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4771 return emit_pattern_after_setloc (pattern
, after
, loc
, make_debug_insn_raw
);
4774 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4776 emit_debug_insn_after (rtx pattern
, rtx after
)
4778 return emit_pattern_after (pattern
, after
, false, make_debug_insn_raw
);
4781 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4782 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4783 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4787 emit_pattern_before_setloc (rtx pattern
, rtx uncast_before
, int loc
, bool insnp
,
4788 rtx_insn
*(*make_raw
) (rtx
))
4790 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4791 rtx_insn
*first
= PREV_INSN (before
);
4792 rtx_insn
*last
= emit_pattern_before_noloc (pattern
, before
,
4793 insnp
? before
: NULL_RTX
,
4796 if (pattern
== NULL_RTX
|| !loc
)
4800 first
= get_insns ();
4802 first
= NEXT_INSN (first
);
4805 if (active_insn_p (first
)
4806 && !JUMP_TABLE_DATA_P (first
) /* FIXME */
4807 && !INSN_LOCATION (first
))
4808 INSN_LOCATION (first
) = loc
;
4811 first
= NEXT_INSN (first
);
4816 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4817 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4818 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4819 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4822 emit_pattern_before (rtx pattern
, rtx uncast_before
, bool skip_debug_insns
,
4823 bool insnp
, rtx_insn
*(*make_raw
) (rtx
))
4825 rtx_insn
*before
= safe_as_a
<rtx_insn
*> (uncast_before
);
4826 rtx_insn
*next
= before
;
4828 if (skip_debug_insns
)
4829 while (DEBUG_INSN_P (next
))
4830 next
= PREV_INSN (next
);
4833 return emit_pattern_before_setloc (pattern
, before
, INSN_LOCATION (next
),
4836 return emit_pattern_before_noloc (pattern
, before
,
4837 insnp
? before
: NULL_RTX
,
4841 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4843 emit_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4845 return emit_pattern_before_setloc (pattern
, before
, loc
, true,
4849 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4851 emit_insn_before (rtx pattern
, rtx before
)
4853 return emit_pattern_before (pattern
, before
, true, true, make_insn_raw
);
4856 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4858 emit_jump_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4860 return as_a
<rtx_jump_insn
*> (
4861 emit_pattern_before_setloc (pattern
, before
, loc
, false,
4862 make_jump_insn_raw
));
4865 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4867 emit_jump_insn_before (rtx pattern
, rtx before
)
4869 return as_a
<rtx_jump_insn
*> (
4870 emit_pattern_before (pattern
, before
, true, false,
4871 make_jump_insn_raw
));
4874 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4876 emit_call_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4878 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4879 make_call_insn_raw
);
4882 /* Like emit_call_insn_before_noloc,
4883 but set insn_location according to BEFORE. */
4885 emit_call_insn_before (rtx pattern
, rtx_insn
*before
)
4887 return emit_pattern_before (pattern
, before
, true, false,
4888 make_call_insn_raw
);
4891 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4893 emit_debug_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4895 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4896 make_debug_insn_raw
);
4899 /* Like emit_debug_insn_before_noloc,
4900 but set insn_location according to BEFORE. */
4902 emit_debug_insn_before (rtx pattern
, rtx_insn
*before
)
4904 return emit_pattern_before (pattern
, before
, false, false,
4905 make_debug_insn_raw
);
4908 /* Take X and emit it at the end of the doubly-linked
4911 Returns the last insn emitted. */
4916 rtx_insn
*last
= get_last_insn ();
4922 switch (GET_CODE (x
))
4931 insn
= as_a
<rtx_insn
*> (x
);
4934 rtx_insn
*next
= NEXT_INSN (insn
);
4941 #ifdef ENABLE_RTL_CHECKING
4942 case JUMP_TABLE_DATA
:
4949 last
= make_insn_raw (x
);
4957 /* Make an insn of code DEBUG_INSN with pattern X
4958 and add it to the end of the doubly-linked list. */
4961 emit_debug_insn (rtx x
)
4963 rtx_insn
*last
= get_last_insn ();
4969 switch (GET_CODE (x
))
4978 insn
= as_a
<rtx_insn
*> (x
);
4981 rtx_insn
*next
= NEXT_INSN (insn
);
4988 #ifdef ENABLE_RTL_CHECKING
4989 case JUMP_TABLE_DATA
:
4996 last
= make_debug_insn_raw (x
);
5004 /* Make an insn of code JUMP_INSN with pattern X
5005 and add it to the end of the doubly-linked list. */
5008 emit_jump_insn (rtx x
)
5010 rtx_insn
*last
= NULL
;
5013 switch (GET_CODE (x
))
5022 insn
= as_a
<rtx_insn
*> (x
);
5025 rtx_insn
*next
= NEXT_INSN (insn
);
5032 #ifdef ENABLE_RTL_CHECKING
5033 case JUMP_TABLE_DATA
:
5040 last
= make_jump_insn_raw (x
);
5048 /* Make an insn of code CALL_INSN with pattern X
5049 and add it to the end of the doubly-linked list. */
5052 emit_call_insn (rtx x
)
5056 switch (GET_CODE (x
))
5065 insn
= emit_insn (x
);
5068 #ifdef ENABLE_RTL_CHECKING
5070 case JUMP_TABLE_DATA
:
5076 insn
= make_call_insn_raw (x
);
5084 /* Add the label LABEL to the end of the doubly-linked list. */
5087 emit_label (rtx uncast_label
)
5089 rtx_code_label
*label
= as_a
<rtx_code_label
*> (uncast_label
);
5091 gcc_checking_assert (INSN_UID (label
) == 0);
5092 INSN_UID (label
) = cur_insn_uid
++;
5097 /* Make an insn of code JUMP_TABLE_DATA
5098 and add it to the end of the doubly-linked list. */
5100 rtx_jump_table_data
*
5101 emit_jump_table_data (rtx table
)
5103 rtx_jump_table_data
*jump_table_data
=
5104 as_a
<rtx_jump_table_data
*> (rtx_alloc (JUMP_TABLE_DATA
));
5105 INSN_UID (jump_table_data
) = cur_insn_uid
++;
5106 PATTERN (jump_table_data
) = table
;
5107 BLOCK_FOR_INSN (jump_table_data
) = NULL
;
5108 add_insn (jump_table_data
);
5109 return jump_table_data
;
5112 /* Make an insn of code BARRIER
5113 and add it to the end of the doubly-linked list. */
5118 rtx_barrier
*barrier
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
5119 INSN_UID (barrier
) = cur_insn_uid
++;
5124 /* Emit a copy of note ORIG. */
5127 emit_note_copy (rtx_note
*orig
)
5129 enum insn_note kind
= (enum insn_note
) NOTE_KIND (orig
);
5130 rtx_note
*note
= make_note_raw (kind
);
5131 NOTE_DATA (note
) = NOTE_DATA (orig
);
5136 /* Make an insn of code NOTE or type NOTE_NO
5137 and add it to the end of the doubly-linked list. */
5140 emit_note (enum insn_note kind
)
5142 rtx_note
*note
= make_note_raw (kind
);
5147 /* Emit a clobber of lvalue X. */
5150 emit_clobber (rtx x
)
5152 /* CONCATs should not appear in the insn stream. */
5153 if (GET_CODE (x
) == CONCAT
)
5155 emit_clobber (XEXP (x
, 0));
5156 return emit_clobber (XEXP (x
, 1));
5158 return emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
5161 /* Return a sequence of insns to clobber lvalue X. */
5175 /* Emit a use of rvalue X. */
5180 /* CONCATs should not appear in the insn stream. */
5181 if (GET_CODE (x
) == CONCAT
)
5183 emit_use (XEXP (x
, 0));
5184 return emit_use (XEXP (x
, 1));
5186 return emit_insn (gen_rtx_USE (VOIDmode
, x
));
5189 /* Return a sequence of insns to use rvalue X. */
5203 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5204 Return the set in INSN that such notes describe, or NULL if the notes
5205 have no meaning for INSN. */
5208 set_for_reg_notes (rtx insn
)
5215 pat
= PATTERN (insn
);
5216 if (GET_CODE (pat
) == PARALLEL
)
5218 /* We do not use single_set because that ignores SETs of unused
5219 registers. REG_EQUAL and REG_EQUIV notes really do require the
5220 PARALLEL to have a single SET. */
5221 if (multiple_sets (insn
))
5223 pat
= XVECEXP (pat
, 0, 0);
5226 if (GET_CODE (pat
) != SET
)
5229 reg
= SET_DEST (pat
);
5231 /* Notes apply to the contents of a STRICT_LOW_PART. */
5232 if (GET_CODE (reg
) == STRICT_LOW_PART
)
5233 reg
= XEXP (reg
, 0);
5235 /* Check that we have a register. */
5236 if (!(REG_P (reg
) || GET_CODE (reg
) == SUBREG
))
5242 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5243 note of this type already exists, remove it first. */
5246 set_unique_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
5248 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
5254 if (!set_for_reg_notes (insn
))
5257 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5258 It serves no useful purpose and breaks eliminate_regs. */
5259 if (GET_CODE (datum
) == ASM_OPERANDS
)
5262 /* Notes with side effects are dangerous. Even if the side-effect
5263 initially mirrors one in PATTERN (INSN), later optimizations
5264 might alter the way that the final register value is calculated
5265 and so move or alter the side-effect in some way. The note would
5266 then no longer be a valid substitution for SET_SRC. */
5267 if (side_effects_p (datum
))
5276 XEXP (note
, 0) = datum
;
5279 add_reg_note (insn
, kind
, datum
);
5280 note
= REG_NOTES (insn
);
5287 df_notes_rescan (as_a
<rtx_insn
*> (insn
));
5296 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5298 set_dst_reg_note (rtx insn
, enum reg_note kind
, rtx datum
, rtx dst
)
5300 rtx set
= set_for_reg_notes (insn
);
5302 if (set
&& SET_DEST (set
) == dst
)
5303 return set_unique_reg_note (insn
, kind
, datum
);
5307 /* Emit the rtl pattern X as an appropriate kind of insn.
5308 If X is a label, it is simply added into the insn chain. */
5313 enum rtx_code code
= classify_insn (x
);
5318 return emit_label (x
);
5320 return emit_insn (x
);
5323 rtx_insn
*insn
= emit_jump_insn (x
);
5324 if (any_uncondjump_p (insn
) || GET_CODE (x
) == RETURN
)
5325 return emit_barrier ();
5329 return emit_call_insn (x
);
5331 return emit_debug_insn (x
);
5337 /* Space for free sequence stack entries. */
5338 static GTY ((deletable
)) struct sequence_stack
*free_sequence_stack
;
5340 /* Begin emitting insns to a sequence. If this sequence will contain
5341 something that might cause the compiler to pop arguments to function
5342 calls (because those pops have previously been deferred; see
5343 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5344 before calling this function. That will ensure that the deferred
5345 pops are not accidentally emitted in the middle of this sequence. */
5348 start_sequence (void)
5350 struct sequence_stack
*tem
;
5352 if (free_sequence_stack
!= NULL
)
5354 tem
= free_sequence_stack
;
5355 free_sequence_stack
= tem
->next
;
5358 tem
= ggc_alloc
<sequence_stack
> ();
5360 tem
->next
= get_current_sequence ()->next
;
5361 tem
->first
= get_insns ();
5362 tem
->last
= get_last_insn ();
5363 get_current_sequence ()->next
= tem
;
5369 /* Set up the insn chain starting with FIRST as the current sequence,
5370 saving the previously current one. See the documentation for
5371 start_sequence for more information about how to use this function. */
5374 push_to_sequence (rtx_insn
*first
)
5380 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
))
5383 set_first_insn (first
);
5384 set_last_insn (last
);
5387 /* Like push_to_sequence, but take the last insn as an argument to avoid
5388 looping through the list. */
5391 push_to_sequence2 (rtx_insn
*first
, rtx_insn
*last
)
5395 set_first_insn (first
);
5396 set_last_insn (last
);
5399 /* Set up the outer-level insn chain
5400 as the current sequence, saving the previously current one. */
5403 push_topmost_sequence (void)
5405 struct sequence_stack
*top
;
5409 top
= get_topmost_sequence ();
5410 set_first_insn (top
->first
);
5411 set_last_insn (top
->last
);
5414 /* After emitting to the outer-level insn chain, update the outer-level
5415 insn chain, and restore the previous saved state. */
5418 pop_topmost_sequence (void)
5420 struct sequence_stack
*top
;
5422 top
= get_topmost_sequence ();
5423 top
->first
= get_insns ();
5424 top
->last
= get_last_insn ();
5429 /* After emitting to a sequence, restore previous saved state.
5431 To get the contents of the sequence just made, you must call
5432 `get_insns' *before* calling here.
5434 If the compiler might have deferred popping arguments while
5435 generating this sequence, and this sequence will not be immediately
5436 inserted into the instruction stream, use do_pending_stack_adjust
5437 before calling get_insns. That will ensure that the deferred
5438 pops are inserted into this sequence, and not into some random
5439 location in the instruction stream. See INHIBIT_DEFER_POP for more
5440 information about deferred popping of arguments. */
5445 struct sequence_stack
*tem
= get_current_sequence ()->next
;
5447 set_first_insn (tem
->first
);
5448 set_last_insn (tem
->last
);
5449 get_current_sequence ()->next
= tem
->next
;
5451 memset (tem
, 0, sizeof (*tem
));
5452 tem
->next
= free_sequence_stack
;
5453 free_sequence_stack
= tem
;
5456 /* Return 1 if currently emitting into a sequence. */
5459 in_sequence_p (void)
5461 return get_current_sequence ()->next
!= 0;
5464 /* Put the various virtual registers into REGNO_REG_RTX. */
5467 init_virtual_regs (void)
5469 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
5470 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
5471 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
5472 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
5473 regno_reg_rtx
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
5474 regno_reg_rtx
[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
]
5475 = virtual_preferred_stack_boundary_rtx
;
5479 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5480 static rtx copy_insn_scratch_in
[MAX_RECOG_OPERANDS
];
5481 static rtx copy_insn_scratch_out
[MAX_RECOG_OPERANDS
];
5482 static int copy_insn_n_scratches
;
5484 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5485 copied an ASM_OPERANDS.
5486 In that case, it is the original input-operand vector. */
5487 static rtvec orig_asm_operands_vector
;
5489 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5490 copied an ASM_OPERANDS.
5491 In that case, it is the copied input-operand vector. */
5492 static rtvec copy_asm_operands_vector
;
5494 /* Likewise for the constraints vector. */
5495 static rtvec orig_asm_constraints_vector
;
5496 static rtvec copy_asm_constraints_vector
;
5498 /* Recursively create a new copy of an rtx for copy_insn.
5499 This function differs from copy_rtx in that it handles SCRATCHes and
5500 ASM_OPERANDs properly.
5501 Normally, this function is not used directly; use copy_insn as front end.
5502 However, you could first copy an insn pattern with copy_insn and then use
5503 this function afterwards to properly copy any REG_NOTEs containing
5507 copy_insn_1 (rtx orig
)
5512 const char *format_ptr
;
5517 code
= GET_CODE (orig
);
5532 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5533 clobbers or clobbers of hard registers that originated as pseudos.
5534 This is needed to allow safe register renaming. */
5535 if (REG_P (XEXP (orig
, 0)) && REGNO (XEXP (orig
, 0)) < FIRST_PSEUDO_REGISTER
5536 && ORIGINAL_REGNO (XEXP (orig
, 0)) == REGNO (XEXP (orig
, 0)))
5541 for (i
= 0; i
< copy_insn_n_scratches
; i
++)
5542 if (copy_insn_scratch_in
[i
] == orig
)
5543 return copy_insn_scratch_out
[i
];
5547 if (shared_const_p (orig
))
5551 /* A MEM with a constant address is not sharable. The problem is that
5552 the constant address may need to be reloaded. If the mem is shared,
5553 then reloading one copy of this mem will cause all copies to appear
5554 to have been reloaded. */
5560 /* Copy the various flags, fields, and other information. We assume
5561 that all fields need copying, and then clear the fields that should
5562 not be copied. That is the sensible default behavior, and forces
5563 us to explicitly document why we are *not* copying a flag. */
5564 copy
= shallow_copy_rtx (orig
);
5566 /* We do not copy the USED flag, which is used as a mark bit during
5567 walks over the RTL. */
5568 RTX_FLAG (copy
, used
) = 0;
5570 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5573 RTX_FLAG (copy
, jump
) = 0;
5574 RTX_FLAG (copy
, call
) = 0;
5575 RTX_FLAG (copy
, frame_related
) = 0;
5578 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
5580 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
5581 switch (*format_ptr
++)
5584 if (XEXP (orig
, i
) != NULL
)
5585 XEXP (copy
, i
) = copy_insn_1 (XEXP (orig
, i
));
5590 if (XVEC (orig
, i
) == orig_asm_constraints_vector
)
5591 XVEC (copy
, i
) = copy_asm_constraints_vector
;
5592 else if (XVEC (orig
, i
) == orig_asm_operands_vector
)
5593 XVEC (copy
, i
) = copy_asm_operands_vector
;
5594 else if (XVEC (orig
, i
) != NULL
)
5596 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
5597 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
5598 XVECEXP (copy
, i
, j
) = copy_insn_1 (XVECEXP (orig
, i
, j
));
5609 /* These are left unchanged. */
5616 if (code
== SCRATCH
)
5618 i
= copy_insn_n_scratches
++;
5619 gcc_assert (i
< MAX_RECOG_OPERANDS
);
5620 copy_insn_scratch_in
[i
] = orig
;
5621 copy_insn_scratch_out
[i
] = copy
;
5623 else if (code
== ASM_OPERANDS
)
5625 orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
5626 copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
5627 orig_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig
);
5628 copy_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
5634 /* Create a new copy of an rtx.
5635 This function differs from copy_rtx in that it handles SCRATCHes and
5636 ASM_OPERANDs properly.
5637 INSN doesn't really have to be a full INSN; it could be just the
5640 copy_insn (rtx insn
)
5642 copy_insn_n_scratches
= 0;
5643 orig_asm_operands_vector
= 0;
5644 orig_asm_constraints_vector
= 0;
5645 copy_asm_operands_vector
= 0;
5646 copy_asm_constraints_vector
= 0;
5647 return copy_insn_1 (insn
);
5650 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5651 on that assumption that INSN itself remains in its original place. */
5654 copy_delay_slot_insn (rtx_insn
*insn
)
5656 /* Copy INSN with its rtx_code, all its notes, location etc. */
5657 insn
= as_a
<rtx_insn
*> (copy_rtx (insn
));
5658 INSN_UID (insn
) = cur_insn_uid
++;
5662 /* Initialize data structures and variables in this file
5663 before generating rtl for each function. */
5668 set_first_insn (NULL
);
5669 set_last_insn (NULL
);
5670 if (MIN_NONDEBUG_INSN_UID
)
5671 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
;
5674 cur_debug_insn_uid
= 1;
5675 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
5676 first_label_num
= label_num
;
5677 get_current_sequence ()->next
= NULL
;
5679 /* Init the tables that describe all the pseudo regs. */
5681 crtl
->emit
.regno_pointer_align_length
= LAST_VIRTUAL_REGISTER
+ 101;
5683 crtl
->emit
.regno_pointer_align
5684 = XCNEWVEC (unsigned char, crtl
->emit
.regno_pointer_align_length
);
5686 regno_reg_rtx
= ggc_vec_alloc
<rtx
> (crtl
->emit
.regno_pointer_align_length
);
5688 /* Put copies of all the hard registers into regno_reg_rtx. */
5689 memcpy (regno_reg_rtx
,
5690 initial_regno_reg_rtx
,
5691 FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
5693 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5694 init_virtual_regs ();
5696 /* Indicate that the virtual registers and stack locations are
5698 REG_POINTER (stack_pointer_rtx
) = 1;
5699 REG_POINTER (frame_pointer_rtx
) = 1;
5700 REG_POINTER (hard_frame_pointer_rtx
) = 1;
5701 REG_POINTER (arg_pointer_rtx
) = 1;
5703 REG_POINTER (virtual_incoming_args_rtx
) = 1;
5704 REG_POINTER (virtual_stack_vars_rtx
) = 1;
5705 REG_POINTER (virtual_stack_dynamic_rtx
) = 1;
5706 REG_POINTER (virtual_outgoing_args_rtx
) = 1;
5707 REG_POINTER (virtual_cfa_rtx
) = 1;
5709 #ifdef STACK_BOUNDARY
5710 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
;
5711 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5712 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5713 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
;
5715 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5716 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
) = STACK_BOUNDARY
;
5717 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
) = STACK_BOUNDARY
;
5718 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5719 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = BITS_PER_WORD
;
5722 #ifdef INIT_EXPANDERS
5727 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5730 gen_const_vector (machine_mode mode
, int constant
)
5737 units
= GET_MODE_NUNITS (mode
);
5738 inner
= GET_MODE_INNER (mode
);
5740 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner
));
5742 v
= rtvec_alloc (units
);
5744 /* We need to call this function after we set the scalar const_tiny_rtx
5746 gcc_assert (const_tiny_rtx
[constant
][(int) inner
]);
5748 for (i
= 0; i
< units
; ++i
)
5749 RTVEC_ELT (v
, i
) = const_tiny_rtx
[constant
][(int) inner
];
5751 tem
= gen_rtx_raw_CONST_VECTOR (mode
, v
);
5755 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5756 all elements are zero, and the one vector when all elements are one. */
5758 gen_rtx_CONST_VECTOR (machine_mode mode
, rtvec v
)
5760 machine_mode inner
= GET_MODE_INNER (mode
);
5761 int nunits
= GET_MODE_NUNITS (mode
);
5765 /* Check to see if all of the elements have the same value. */
5766 x
= RTVEC_ELT (v
, nunits
- 1);
5767 for (i
= nunits
- 2; i
>= 0; i
--)
5768 if (RTVEC_ELT (v
, i
) != x
)
5771 /* If the values are all the same, check to see if we can use one of the
5772 standard constant vectors. */
5775 if (x
== CONST0_RTX (inner
))
5776 return CONST0_RTX (mode
);
5777 else if (x
== CONST1_RTX (inner
))
5778 return CONST1_RTX (mode
);
5779 else if (x
== CONSTM1_RTX (inner
))
5780 return CONSTM1_RTX (mode
);
5783 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
5786 /* Initialise global register information required by all functions. */
5789 init_emit_regs (void)
5795 /* Reset register attributes */
5796 reg_attrs_htab
->empty ();
5798 /* We need reg_raw_mode, so initialize the modes now. */
5799 init_reg_modes_target ();
5801 /* Assign register numbers to the globally defined register rtx. */
5802 stack_pointer_rtx
= gen_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
5803 frame_pointer_rtx
= gen_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
5804 hard_frame_pointer_rtx
= gen_raw_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
);
5805 arg_pointer_rtx
= gen_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
5806 virtual_incoming_args_rtx
=
5807 gen_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
5808 virtual_stack_vars_rtx
=
5809 gen_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
5810 virtual_stack_dynamic_rtx
=
5811 gen_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
5812 virtual_outgoing_args_rtx
=
5813 gen_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
5814 virtual_cfa_rtx
= gen_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
5815 virtual_preferred_stack_boundary_rtx
=
5816 gen_raw_REG (Pmode
, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
);
5818 /* Initialize RTL for commonly used hard registers. These are
5819 copied into regno_reg_rtx as we begin to compile each function. */
5820 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
5821 initial_regno_reg_rtx
[i
] = gen_raw_REG (reg_raw_mode
[i
], i
);
5823 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5824 return_address_pointer_rtx
5825 = gen_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
5828 pic_offset_table_rtx
= NULL_RTX
;
5829 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
5830 pic_offset_table_rtx
= gen_raw_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
5832 for (i
= 0; i
< (int) MAX_MACHINE_MODE
; i
++)
5834 mode
= (machine_mode
) i
;
5835 attrs
= ggc_cleared_alloc
<mem_attrs
> ();
5836 attrs
->align
= BITS_PER_UNIT
;
5837 attrs
->addrspace
= ADDR_SPACE_GENERIC
;
5838 if (mode
!= BLKmode
)
5840 attrs
->size_known_p
= true;
5841 attrs
->size
= GET_MODE_SIZE (mode
);
5842 if (STRICT_ALIGNMENT
)
5843 attrs
->align
= GET_MODE_ALIGNMENT (mode
);
5845 mode_mem_attrs
[i
] = attrs
;
5849 /* Initialize global machine_mode variables. */
5852 init_derived_machine_modes (void)
5854 byte_mode
= VOIDmode
;
5855 word_mode
= VOIDmode
;
5857 for (machine_mode mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5859 mode
= GET_MODE_WIDER_MODE (mode
))
5861 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
5862 && byte_mode
== VOIDmode
)
5865 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
5866 && word_mode
== VOIDmode
)
5870 ptr_mode
= mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0);
5873 /* Create some permanent unique rtl objects shared between all functions. */
5876 init_emit_once (void)
5880 machine_mode double_mode
;
5882 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5883 CONST_FIXED, and memory attribute hash tables. */
5884 const_int_htab
= hash_table
<const_int_hasher
>::create_ggc (37);
5886 #if TARGET_SUPPORTS_WIDE_INT
5887 const_wide_int_htab
= hash_table
<const_wide_int_hasher
>::create_ggc (37);
5889 const_double_htab
= hash_table
<const_double_hasher
>::create_ggc (37);
5891 const_fixed_htab
= hash_table
<const_fixed_hasher
>::create_ggc (37);
5893 reg_attrs_htab
= hash_table
<reg_attr_hasher
>::create_ggc (37);
5895 #ifdef INIT_EXPANDERS
5896 /* This is to initialize {init|mark|free}_machine_status before the first
5897 call to push_function_context_to. This is needed by the Chill front
5898 end which calls push_function_context_to before the first call to
5899 init_function_start. */
5903 /* Create the unique rtx's for certain rtx codes and operand values. */
5905 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5906 tries to use these variables. */
5907 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
5908 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
5909 gen_rtx_raw_CONST_INT (VOIDmode
, (HOST_WIDE_INT
) i
);
5911 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
5912 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
5913 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
5915 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
5917 double_mode
= mode_for_size (DOUBLE_TYPE_SIZE
, MODE_FLOAT
, 0);
5919 real_from_integer (&dconst0
, double_mode
, 0, SIGNED
);
5920 real_from_integer (&dconst1
, double_mode
, 1, SIGNED
);
5921 real_from_integer (&dconst2
, double_mode
, 2, SIGNED
);
5926 dconsthalf
= dconst1
;
5927 SET_REAL_EXP (&dconsthalf
, REAL_EXP (&dconsthalf
) - 1);
5929 for (i
= 0; i
< 3; i
++)
5931 const REAL_VALUE_TYPE
*const r
=
5932 (i
== 0 ? &dconst0
: i
== 1 ? &dconst1
: &dconst2
);
5934 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
5936 mode
= GET_MODE_WIDER_MODE (mode
))
5937 const_tiny_rtx
[i
][(int) mode
] =
5938 CONST_DOUBLE_FROM_REAL_VALUE (*r
, mode
);
5940 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT
);
5942 mode
= GET_MODE_WIDER_MODE (mode
))
5943 const_tiny_rtx
[i
][(int) mode
] =
5944 CONST_DOUBLE_FROM_REAL_VALUE (*r
, mode
);
5946 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
5948 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5950 mode
= GET_MODE_WIDER_MODE (mode
))
5951 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5953 for (mode
= MIN_MODE_PARTIAL_INT
;
5954 mode
<= MAX_MODE_PARTIAL_INT
;
5955 mode
= (machine_mode
)((int)(mode
) + 1))
5956 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5959 const_tiny_rtx
[3][(int) VOIDmode
] = constm1_rtx
;
5961 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5963 mode
= GET_MODE_WIDER_MODE (mode
))
5964 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
5966 for (mode
= MIN_MODE_PARTIAL_INT
;
5967 mode
<= MAX_MODE_PARTIAL_INT
;
5968 mode
= (machine_mode
)((int)(mode
) + 1))
5969 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
5971 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT
);
5973 mode
= GET_MODE_WIDER_MODE (mode
))
5975 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
5976 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
5979 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT
);
5981 mode
= GET_MODE_WIDER_MODE (mode
))
5983 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
5984 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
5987 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT
);
5989 mode
= GET_MODE_WIDER_MODE (mode
))
5991 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5992 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
5993 const_tiny_rtx
[3][(int) mode
] = gen_const_vector (mode
, 3);
5996 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT
);
5998 mode
= GET_MODE_WIDER_MODE (mode
))
6000 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6001 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6004 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FRACT
);
6006 mode
= GET_MODE_WIDER_MODE (mode
))
6008 FCONST0 (mode
).data
.high
= 0;
6009 FCONST0 (mode
).data
.low
= 0;
6010 FCONST0 (mode
).mode
= mode
;
6011 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6012 FCONST0 (mode
), mode
);
6015 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UFRACT
);
6017 mode
= GET_MODE_WIDER_MODE (mode
))
6019 FCONST0 (mode
).data
.high
= 0;
6020 FCONST0 (mode
).data
.low
= 0;
6021 FCONST0 (mode
).mode
= mode
;
6022 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6023 FCONST0 (mode
), mode
);
6026 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_ACCUM
);
6028 mode
= GET_MODE_WIDER_MODE (mode
))
6030 FCONST0 (mode
).data
.high
= 0;
6031 FCONST0 (mode
).data
.low
= 0;
6032 FCONST0 (mode
).mode
= mode
;
6033 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6034 FCONST0 (mode
), mode
);
6036 /* We store the value 1. */
6037 FCONST1 (mode
).data
.high
= 0;
6038 FCONST1 (mode
).data
.low
= 0;
6039 FCONST1 (mode
).mode
= mode
;
6041 = double_int_one
.lshift (GET_MODE_FBIT (mode
),
6042 HOST_BITS_PER_DOUBLE_INT
,
6043 SIGNED_FIXED_POINT_MODE_P (mode
));
6044 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6045 FCONST1 (mode
), mode
);
6048 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UACCUM
);
6050 mode
= GET_MODE_WIDER_MODE (mode
))
6052 FCONST0 (mode
).data
.high
= 0;
6053 FCONST0 (mode
).data
.low
= 0;
6054 FCONST0 (mode
).mode
= mode
;
6055 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6056 FCONST0 (mode
), mode
);
6058 /* We store the value 1. */
6059 FCONST1 (mode
).data
.high
= 0;
6060 FCONST1 (mode
).data
.low
= 0;
6061 FCONST1 (mode
).mode
= mode
;
6063 = double_int_one
.lshift (GET_MODE_FBIT (mode
),
6064 HOST_BITS_PER_DOUBLE_INT
,
6065 SIGNED_FIXED_POINT_MODE_P (mode
));
6066 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6067 FCONST1 (mode
), mode
);
6070 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT
);
6072 mode
= GET_MODE_WIDER_MODE (mode
))
6074 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6077 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT
);
6079 mode
= GET_MODE_WIDER_MODE (mode
))
6081 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6084 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM
);
6086 mode
= GET_MODE_WIDER_MODE (mode
))
6088 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6089 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6092 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM
);
6094 mode
= GET_MODE_WIDER_MODE (mode
))
6096 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6097 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6100 for (i
= (int) CCmode
; i
< (int) MAX_MACHINE_MODE
; ++i
)
6101 if (GET_MODE_CLASS ((machine_mode
) i
) == MODE_CC
)
6102 const_tiny_rtx
[0][i
] = const0_rtx
;
6104 const_tiny_rtx
[0][(int) BImode
] = const0_rtx
;
6105 if (STORE_FLAG_VALUE
== 1)
6106 const_tiny_rtx
[1][(int) BImode
] = const1_rtx
;
6108 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_POINTER_BOUNDS
);
6110 mode
= GET_MODE_WIDER_MODE (mode
))
6112 wide_int wi_zero
= wi::zero (GET_MODE_PRECISION (mode
));
6113 const_tiny_rtx
[0][mode
] = immed_wide_int_const (wi_zero
, mode
);
6116 pc_rtx
= gen_rtx_fmt_ (PC
, VOIDmode
);
6117 ret_rtx
= gen_rtx_fmt_ (RETURN
, VOIDmode
);
6118 simple_return_rtx
= gen_rtx_fmt_ (SIMPLE_RETURN
, VOIDmode
);
6119 cc0_rtx
= gen_rtx_fmt_ (CC0
, VOIDmode
);
6120 invalid_insn_rtx
= gen_rtx_INSN (VOIDmode
,
6124 /*pattern=*/NULL_RTX
,
6127 /*reg_notes=*/NULL_RTX
);
6130 /* Produce exact duplicate of insn INSN after AFTER.
6131 Care updating of libcall regions if present. */
6134 emit_copy_of_insn_after (rtx_insn
*insn
, rtx_insn
*after
)
6139 switch (GET_CODE (insn
))
6142 new_rtx
= emit_insn_after (copy_insn (PATTERN (insn
)), after
);
6146 new_rtx
= emit_jump_insn_after (copy_insn (PATTERN (insn
)), after
);
6147 CROSSING_JUMP_P (new_rtx
) = CROSSING_JUMP_P (insn
);
6151 new_rtx
= emit_debug_insn_after (copy_insn (PATTERN (insn
)), after
);
6155 new_rtx
= emit_call_insn_after (copy_insn (PATTERN (insn
)), after
);
6156 if (CALL_INSN_FUNCTION_USAGE (insn
))
6157 CALL_INSN_FUNCTION_USAGE (new_rtx
)
6158 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn
));
6159 SIBLING_CALL_P (new_rtx
) = SIBLING_CALL_P (insn
);
6160 RTL_CONST_CALL_P (new_rtx
) = RTL_CONST_CALL_P (insn
);
6161 RTL_PURE_CALL_P (new_rtx
) = RTL_PURE_CALL_P (insn
);
6162 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx
)
6163 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
);
6170 /* Update LABEL_NUSES. */
6171 mark_jump_label (PATTERN (new_rtx
), new_rtx
, 0);
6173 INSN_LOCATION (new_rtx
) = INSN_LOCATION (insn
);
6175 /* If the old insn is frame related, then so is the new one. This is
6176 primarily needed for IA-64 unwind info which marks epilogue insns,
6177 which may be duplicated by the basic block reordering code. */
6178 RTX_FRAME_RELATED_P (new_rtx
) = RTX_FRAME_RELATED_P (insn
);
6180 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6181 will make them. REG_LABEL_TARGETs are created there too, but are
6182 supposed to be sticky, so we copy them. */
6183 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
6184 if (REG_NOTE_KIND (link
) != REG_LABEL_OPERAND
)
6186 if (GET_CODE (link
) == EXPR_LIST
)
6187 add_reg_note (new_rtx
, REG_NOTE_KIND (link
),
6188 copy_insn_1 (XEXP (link
, 0)));
6190 add_shallow_copy_of_reg_note (new_rtx
, link
);
6193 INSN_CODE (new_rtx
) = INSN_CODE (insn
);
6197 static GTY((deletable
)) rtx hard_reg_clobbers
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
6199 gen_hard_reg_clobber (machine_mode mode
, unsigned int regno
)
6201 if (hard_reg_clobbers
[mode
][regno
])
6202 return hard_reg_clobbers
[mode
][regno
];
6204 return (hard_reg_clobbers
[mode
][regno
] =
6205 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (mode
, regno
)));
6208 location_t prologue_location
;
6209 location_t epilogue_location
;
6211 /* Hold current location information and last location information, so the
6212 datastructures are built lazily only when some instructions in given
6213 place are needed. */
6214 static location_t curr_location
;
6216 /* Allocate insn location datastructure. */
6218 insn_locations_init (void)
6220 prologue_location
= epilogue_location
= 0;
6221 curr_location
= UNKNOWN_LOCATION
;
6224 /* At the end of emit stage, clear current location. */
6226 insn_locations_finalize (void)
6228 epilogue_location
= curr_location
;
6229 curr_location
= UNKNOWN_LOCATION
;
6232 /* Set current location. */
6234 set_curr_insn_location (location_t location
)
6236 curr_location
= location
;
6239 /* Get current location. */
6241 curr_insn_location (void)
6243 return curr_location
;
6246 /* Return lexical scope block insn belongs to. */
6248 insn_scope (const rtx_insn
*insn
)
6250 return LOCATION_BLOCK (INSN_LOCATION (insn
));
6253 /* Return line number of the statement that produced this insn. */
6255 insn_line (const rtx_insn
*insn
)
6257 return LOCATION_LINE (INSN_LOCATION (insn
));
6260 /* Return source file of the statement that produced this insn. */
6262 insn_file (const rtx_insn
*insn
)
6264 return LOCATION_FILE (INSN_LOCATION (insn
));
6267 /* Return expanded location of the statement that produced this insn. */
6269 insn_location (const rtx_insn
*insn
)
6271 return expand_location (INSN_LOCATION (insn
));
6274 /* Return true if memory model MODEL requires a pre-operation (release-style)
6275 barrier or a post-operation (acquire-style) barrier. While not universal,
6276 this function matches behavior of several targets. */
6279 need_atomic_barrier_p (enum memmodel model
, bool pre
)
6281 switch (model
& MEMMODEL_BASE_MASK
)
6283 case MEMMODEL_RELAXED
:
6284 case MEMMODEL_CONSUME
:
6286 case MEMMODEL_RELEASE
:
6288 case MEMMODEL_ACQUIRE
:
6290 case MEMMODEL_ACQ_REL
:
6291 case MEMMODEL_SEQ_CST
:
6298 #include "gt-emit-rtl.h"