1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
36 #include "coretypes.h"
38 #include "diagnostic-core.h"
47 #include "hard-reg-set.h"
51 #include "basic-block.h"
55 #include "stringpool.h"
58 #include "insn-config.h"
62 #include "langhooks.h"
69 struct target_rtl default_target_rtl
;
71 struct target_rtl
*this_target_rtl
= &default_target_rtl
;
74 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
76 /* Commonly used modes. */
78 enum machine_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
79 enum machine_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
80 enum machine_mode double_mode
; /* Mode whose width is DOUBLE_TYPE_SIZE. */
81 enum machine_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
83 /* Datastructures maintained for currently processed function in RTL form. */
85 struct rtl_data x_rtl
;
87 /* Indexed by pseudo register number, gives the rtx for that pseudo.
88 Allocated in parallel with regno_pointer_align.
89 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
90 with length attribute nested in top level structures. */
94 /* This is *not* reset after each function. It gives each CODE_LABEL
95 in the entire compilation a unique label number. */
97 static GTY(()) int label_num
= 1;
99 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
100 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
101 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
102 is set only for MODE_INT and MODE_VECTOR_INT modes. */
104 rtx const_tiny_rtx
[4][(int) MAX_MACHINE_MODE
];
108 REAL_VALUE_TYPE dconst0
;
109 REAL_VALUE_TYPE dconst1
;
110 REAL_VALUE_TYPE dconst2
;
111 REAL_VALUE_TYPE dconstm1
;
112 REAL_VALUE_TYPE dconsthalf
;
114 /* Record fixed-point constant 0 and 1. */
115 FIXED_VALUE_TYPE fconst0
[MAX_FCONST0
];
116 FIXED_VALUE_TYPE fconst1
[MAX_FCONST1
];
118 /* We make one copy of (const_int C) where C is in
119 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
120 to save space during the compilation and simplify comparisons of
123 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
125 /* Standard pieces of rtx, to be substituted directly into things. */
128 rtx simple_return_rtx
;
131 /* A hash table storing CONST_INTs whose absolute value is greater
132 than MAX_SAVED_CONST_INT. */
134 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
135 htab_t const_int_htab
;
137 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
138 htab_t const_wide_int_htab
;
140 /* A hash table storing register attribute structures. */
141 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs
)))
142 htab_t reg_attrs_htab
;
144 /* A hash table storing all CONST_DOUBLEs. */
145 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
146 htab_t const_double_htab
;
148 /* A hash table storing all CONST_FIXEDs. */
149 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
150 htab_t const_fixed_htab
;
152 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
153 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
154 #define first_label_num (crtl->emit.x_first_label_num)
156 static void set_used_decls (tree
);
157 static void mark_label_nuses (rtx
);
158 static hashval_t
const_int_htab_hash (const void *);
159 static int const_int_htab_eq (const void *, const void *);
160 #if TARGET_SUPPORTS_WIDE_INT
161 static hashval_t
const_wide_int_htab_hash (const void *);
162 static int const_wide_int_htab_eq (const void *, const void *);
163 static rtx
lookup_const_wide_int (rtx
);
165 static hashval_t
const_double_htab_hash (const void *);
166 static int const_double_htab_eq (const void *, const void *);
167 static rtx
lookup_const_double (rtx
);
168 static hashval_t
const_fixed_htab_hash (const void *);
169 static int const_fixed_htab_eq (const void *, const void *);
170 static rtx
lookup_const_fixed (rtx
);
171 static hashval_t
reg_attrs_htab_hash (const void *);
172 static int reg_attrs_htab_eq (const void *, const void *);
173 static reg_attrs
*get_reg_attrs (tree
, int);
174 static rtx
gen_const_vector (enum machine_mode
, int);
175 static void copy_rtx_if_shared_1 (rtx
*orig
);
177 /* Probability of the conditional branch currently proceeded by try_split.
178 Set to -1 otherwise. */
179 int split_branch_probability
= -1;
181 /* Returns a hash code for X (which is a really a CONST_INT). */
184 const_int_htab_hash (const void *x
)
186 return (hashval_t
) INTVAL ((const_rtx
) x
);
189 /* Returns nonzero if the value represented by X (which is really a
190 CONST_INT) is the same as that given by Y (which is really a
194 const_int_htab_eq (const void *x
, const void *y
)
196 return (INTVAL ((const_rtx
) x
) == *((const HOST_WIDE_INT
*) y
));
199 #if TARGET_SUPPORTS_WIDE_INT
200 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
203 const_wide_int_htab_hash (const void *x
)
206 HOST_WIDE_INT hash
= 0;
207 const_rtx xr
= (const_rtx
) x
;
209 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
210 hash
+= CONST_WIDE_INT_ELT (xr
, i
);
212 return (hashval_t
) hash
;
215 /* Returns nonzero if the value represented by X (which is really a
216 CONST_WIDE_INT) is the same as that given by Y (which is really a
220 const_wide_int_htab_eq (const void *x
, const void *y
)
223 const_rtx xr
= (const_rtx
) x
;
224 const_rtx yr
= (const_rtx
) y
;
225 if (CONST_WIDE_INT_NUNITS (xr
) != CONST_WIDE_INT_NUNITS (yr
))
228 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
229 if (CONST_WIDE_INT_ELT (xr
, i
) != CONST_WIDE_INT_ELT (yr
, i
))
236 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
238 const_double_htab_hash (const void *x
)
240 const_rtx
const value
= (const_rtx
) x
;
243 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (value
) == VOIDmode
)
244 h
= CONST_DOUBLE_LOW (value
) ^ CONST_DOUBLE_HIGH (value
);
247 h
= real_hash (CONST_DOUBLE_REAL_VALUE (value
));
248 /* MODE is used in the comparison, so it should be in the hash. */
249 h
^= GET_MODE (value
);
254 /* Returns nonzero if the value represented by X (really a ...)
255 is the same as that represented by Y (really a ...) */
257 const_double_htab_eq (const void *x
, const void *y
)
259 const_rtx
const a
= (const_rtx
)x
, b
= (const_rtx
)y
;
261 if (GET_MODE (a
) != GET_MODE (b
))
263 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (a
) == VOIDmode
)
264 return (CONST_DOUBLE_LOW (a
) == CONST_DOUBLE_LOW (b
)
265 && CONST_DOUBLE_HIGH (a
) == CONST_DOUBLE_HIGH (b
));
267 return real_identical (CONST_DOUBLE_REAL_VALUE (a
),
268 CONST_DOUBLE_REAL_VALUE (b
));
271 /* Returns a hash code for X (which is really a CONST_FIXED). */
274 const_fixed_htab_hash (const void *x
)
276 const_rtx
const value
= (const_rtx
) x
;
279 h
= fixed_hash (CONST_FIXED_VALUE (value
));
280 /* MODE is used in the comparison, so it should be in the hash. */
281 h
^= GET_MODE (value
);
285 /* Returns nonzero if the value represented by X (really a ...)
286 is the same as that represented by Y (really a ...). */
289 const_fixed_htab_eq (const void *x
, const void *y
)
291 const_rtx
const a
= (const_rtx
) x
, b
= (const_rtx
) y
;
293 if (GET_MODE (a
) != GET_MODE (b
))
295 return fixed_identical (CONST_FIXED_VALUE (a
), CONST_FIXED_VALUE (b
));
298 /* Return true if the given memory attributes are equal. */
301 mem_attrs_eq_p (const struct mem_attrs
*p
, const struct mem_attrs
*q
)
307 return (p
->alias
== q
->alias
308 && p
->offset_known_p
== q
->offset_known_p
309 && (!p
->offset_known_p
|| p
->offset
== q
->offset
)
310 && p
->size_known_p
== q
->size_known_p
311 && (!p
->size_known_p
|| p
->size
== q
->size
)
312 && p
->align
== q
->align
313 && p
->addrspace
== q
->addrspace
314 && (p
->expr
== q
->expr
315 || (p
->expr
!= NULL_TREE
&& q
->expr
!= NULL_TREE
316 && operand_equal_p (p
->expr
, q
->expr
, 0))));
319 /* Set MEM's memory attributes so that they are the same as ATTRS. */
322 set_mem_attrs (rtx mem
, mem_attrs
*attrs
)
324 /* If everything is the default, we can just clear the attributes. */
325 if (mem_attrs_eq_p (attrs
, mode_mem_attrs
[(int) GET_MODE (mem
)]))
332 || !mem_attrs_eq_p (attrs
, MEM_ATTRS (mem
)))
334 MEM_ATTRS (mem
) = ggc_alloc
<mem_attrs
> ();
335 memcpy (MEM_ATTRS (mem
), attrs
, sizeof (mem_attrs
));
339 /* Returns a hash code for X (which is a really a reg_attrs *). */
342 reg_attrs_htab_hash (const void *x
)
344 const reg_attrs
*const p
= (const reg_attrs
*) x
;
346 return ((p
->offset
* 1000) ^ (intptr_t) p
->decl
);
349 /* Returns nonzero if the value represented by X (which is really a
350 reg_attrs *) is the same as that given by Y (which is also really a
354 reg_attrs_htab_eq (const void *x
, const void *y
)
356 const reg_attrs
*const p
= (const reg_attrs
*) x
;
357 const reg_attrs
*const q
= (const reg_attrs
*) y
;
359 return (p
->decl
== q
->decl
&& p
->offset
== q
->offset
);
361 /* Allocate a new reg_attrs structure and insert it into the hash table if
362 one identical to it is not already in the table. We are doing this for
366 get_reg_attrs (tree decl
, int offset
)
371 /* If everything is the default, we can just return zero. */
372 if (decl
== 0 && offset
== 0)
376 attrs
.offset
= offset
;
378 slot
= htab_find_slot (reg_attrs_htab
, &attrs
, INSERT
);
381 *slot
= ggc_alloc
<reg_attrs
> ();
382 memcpy (*slot
, &attrs
, sizeof (reg_attrs
));
385 return (reg_attrs
*) *slot
;
390 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
391 and to block register equivalences to be seen across this insn. */
396 rtx x
= gen_rtx_ASM_INPUT (VOIDmode
, "");
397 MEM_VOLATILE_P (x
) = true;
403 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
404 don't attempt to share with the various global pieces of rtl (such as
405 frame_pointer_rtx). */
408 gen_raw_REG (enum machine_mode mode
, int regno
)
410 rtx x
= gen_rtx_raw_REG (mode
, regno
);
411 ORIGINAL_REGNO (x
) = regno
;
415 /* There are some RTL codes that require special attention; the generation
416 functions do the raw handling. If you add to this list, modify
417 special_rtx in gengenrtl.c as well. */
420 gen_rtx_EXPR_LIST (enum machine_mode mode
, rtx expr
, rtx expr_list
)
422 return as_a
<rtx_expr_list
*> (gen_rtx_fmt_ee (EXPR_LIST
, mode
, expr
,
427 gen_rtx_INSN_LIST (enum machine_mode mode
, rtx insn
, rtx insn_list
)
429 return as_a
<rtx_insn_list
*> (gen_rtx_fmt_ue (INSN_LIST
, mode
, insn
,
434 gen_rtx_INSN (enum machine_mode mode
, rtx_insn
*prev_insn
, rtx_insn
*next_insn
,
435 basic_block bb
, rtx pattern
, int location
, int code
,
438 return as_a
<rtx_insn
*> (gen_rtx_fmt_uuBeiie (INSN
, mode
,
439 prev_insn
, next_insn
,
440 bb
, pattern
, location
, code
,
445 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED
, HOST_WIDE_INT arg
)
449 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
450 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
452 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
453 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
454 return const_true_rtx
;
457 /* Look up the CONST_INT in the hash table. */
458 slot
= htab_find_slot_with_hash (const_int_htab
, &arg
,
459 (hashval_t
) arg
, INSERT
);
461 *slot
= gen_rtx_raw_CONST_INT (VOIDmode
, arg
);
467 gen_int_mode (HOST_WIDE_INT c
, enum machine_mode mode
)
469 return GEN_INT (trunc_int_for_mode (c
, mode
));
472 /* CONST_DOUBLEs might be created from pairs of integers, or from
473 REAL_VALUE_TYPEs. Also, their length is known only at run time,
474 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
476 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
477 hash table. If so, return its counterpart; otherwise add it
478 to the hash table and return it. */
480 lookup_const_double (rtx real
)
482 void **slot
= htab_find_slot (const_double_htab
, real
, INSERT
);
489 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
490 VALUE in mode MODE. */
492 const_double_from_real_value (REAL_VALUE_TYPE value
, enum machine_mode mode
)
494 rtx real
= rtx_alloc (CONST_DOUBLE
);
495 PUT_MODE (real
, mode
);
499 return lookup_const_double (real
);
502 /* Determine whether FIXED, a CONST_FIXED, already exists in the
503 hash table. If so, return its counterpart; otherwise add it
504 to the hash table and return it. */
507 lookup_const_fixed (rtx fixed
)
509 void **slot
= htab_find_slot (const_fixed_htab
, fixed
, INSERT
);
516 /* Return a CONST_FIXED rtx for a fixed-point value specified by
517 VALUE in mode MODE. */
520 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value
, enum machine_mode mode
)
522 rtx fixed
= rtx_alloc (CONST_FIXED
);
523 PUT_MODE (fixed
, mode
);
527 return lookup_const_fixed (fixed
);
530 #if TARGET_SUPPORTS_WIDE_INT == 0
531 /* Constructs double_int from rtx CST. */
534 rtx_to_double_int (const_rtx cst
)
538 if (CONST_INT_P (cst
))
539 r
= double_int::from_shwi (INTVAL (cst
));
540 else if (CONST_DOUBLE_AS_INT_P (cst
))
542 r
.low
= CONST_DOUBLE_LOW (cst
);
543 r
.high
= CONST_DOUBLE_HIGH (cst
);
552 #if TARGET_SUPPORTS_WIDE_INT
553 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
554 If so, return its counterpart; otherwise add it to the hash table and
558 lookup_const_wide_int (rtx wint
)
560 void **slot
= htab_find_slot (const_wide_int_htab
, wint
, INSERT
);
568 /* Return an rtx constant for V, given that the constant has mode MODE.
569 The returned rtx will be a CONST_INT if V fits, otherwise it will be
570 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
571 (if TARGET_SUPPORTS_WIDE_INT). */
574 immed_wide_int_const (const wide_int_ref
&v
, enum machine_mode mode
)
576 unsigned int len
= v
.get_len ();
577 unsigned int prec
= GET_MODE_PRECISION (mode
);
579 /* Allow truncation but not extension since we do not know if the
580 number is signed or unsigned. */
581 gcc_assert (prec
<= v
.get_precision ());
583 if (len
< 2 || prec
<= HOST_BITS_PER_WIDE_INT
)
584 return gen_int_mode (v
.elt (0), mode
);
586 #if TARGET_SUPPORTS_WIDE_INT
590 unsigned int blocks_needed
591 = (prec
+ HOST_BITS_PER_WIDE_INT
- 1) / HOST_BITS_PER_WIDE_INT
;
593 if (len
> blocks_needed
)
596 value
= const_wide_int_alloc (len
);
598 /* It is so tempting to just put the mode in here. Must control
600 PUT_MODE (value
, VOIDmode
);
601 CWI_PUT_NUM_ELEM (value
, len
);
603 for (i
= 0; i
< len
; i
++)
604 CONST_WIDE_INT_ELT (value
, i
) = v
.elt (i
);
606 return lookup_const_wide_int (value
);
609 return immed_double_const (v
.elt (0), v
.elt (1), mode
);
613 #if TARGET_SUPPORTS_WIDE_INT == 0
614 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
615 of ints: I0 is the low-order word and I1 is the high-order word.
616 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
617 implied upper bits are copies of the high bit of i1. The value
618 itself is neither signed nor unsigned. Do not use this routine for
619 non-integer modes; convert to REAL_VALUE_TYPE and use
620 CONST_DOUBLE_FROM_REAL_VALUE. */
623 immed_double_const (HOST_WIDE_INT i0
, HOST_WIDE_INT i1
, enum machine_mode mode
)
628 /* There are the following cases (note that there are no modes with
629 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
631 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
633 2) If the value of the integer fits into HOST_WIDE_INT anyway
634 (i.e., i1 consists only from copies of the sign bit, and sign
635 of i0 and i1 are the same), then we return a CONST_INT for i0.
636 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
637 if (mode
!= VOIDmode
)
639 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
640 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
641 /* We can get a 0 for an error mark. */
642 || GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
643 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
);
645 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
646 return gen_int_mode (i0
, mode
);
649 /* If this integer fits in one word, return a CONST_INT. */
650 if ((i1
== 0 && i0
>= 0) || (i1
== ~0 && i0
< 0))
653 /* We use VOIDmode for integers. */
654 value
= rtx_alloc (CONST_DOUBLE
);
655 PUT_MODE (value
, VOIDmode
);
657 CONST_DOUBLE_LOW (value
) = i0
;
658 CONST_DOUBLE_HIGH (value
) = i1
;
660 for (i
= 2; i
< (sizeof CONST_DOUBLE_FORMAT
- 1); i
++)
661 XWINT (value
, i
) = 0;
663 return lookup_const_double (value
);
668 gen_rtx_REG (enum machine_mode mode
, unsigned int regno
)
670 /* In case the MD file explicitly references the frame pointer, have
671 all such references point to the same frame pointer. This is
672 used during frame pointer elimination to distinguish the explicit
673 references to these registers from pseudos that happened to be
676 If we have eliminated the frame pointer or arg pointer, we will
677 be using it as a normal register, for example as a spill
678 register. In such cases, we might be accessing it in a mode that
679 is not Pmode and therefore cannot use the pre-allocated rtx.
681 Also don't do this when we are making new REGs in reload, since
682 we don't want to get confused with the real pointers. */
684 if (mode
== Pmode
&& !reload_in_progress
&& !lra_in_progress
)
686 if (regno
== FRAME_POINTER_REGNUM
687 && (!reload_completed
|| frame_pointer_needed
))
688 return frame_pointer_rtx
;
689 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
690 if (regno
== HARD_FRAME_POINTER_REGNUM
691 && (!reload_completed
|| frame_pointer_needed
))
692 return hard_frame_pointer_rtx
;
694 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
695 if (regno
== ARG_POINTER_REGNUM
)
696 return arg_pointer_rtx
;
698 #ifdef RETURN_ADDRESS_POINTER_REGNUM
699 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
700 return return_address_pointer_rtx
;
702 if (regno
== (unsigned) PIC_OFFSET_TABLE_REGNUM
703 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
704 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
705 return pic_offset_table_rtx
;
706 if (regno
== STACK_POINTER_REGNUM
)
707 return stack_pointer_rtx
;
711 /* If the per-function register table has been set up, try to re-use
712 an existing entry in that table to avoid useless generation of RTL.
714 This code is disabled for now until we can fix the various backends
715 which depend on having non-shared hard registers in some cases. Long
716 term we want to re-enable this code as it can significantly cut down
717 on the amount of useless RTL that gets generated.
719 We'll also need to fix some code that runs after reload that wants to
720 set ORIGINAL_REGNO. */
725 && regno
< FIRST_PSEUDO_REGISTER
726 && reg_raw_mode
[regno
] == mode
)
727 return regno_reg_rtx
[regno
];
730 return gen_raw_REG (mode
, regno
);
734 gen_rtx_MEM (enum machine_mode mode
, rtx addr
)
736 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
738 /* This field is not cleared by the mere allocation of the rtx, so
745 /* Generate a memory referring to non-trapping constant memory. */
748 gen_const_mem (enum machine_mode mode
, rtx addr
)
750 rtx mem
= gen_rtx_MEM (mode
, addr
);
751 MEM_READONLY_P (mem
) = 1;
752 MEM_NOTRAP_P (mem
) = 1;
756 /* Generate a MEM referring to fixed portions of the frame, e.g., register
760 gen_frame_mem (enum machine_mode mode
, rtx addr
)
762 rtx mem
= gen_rtx_MEM (mode
, addr
);
763 MEM_NOTRAP_P (mem
) = 1;
764 set_mem_alias_set (mem
, get_frame_alias_set ());
768 /* Generate a MEM referring to a temporary use of the stack, not part
769 of the fixed stack frame. For example, something which is pushed
770 by a target splitter. */
772 gen_tmp_stack_mem (enum machine_mode mode
, rtx addr
)
774 rtx mem
= gen_rtx_MEM (mode
, addr
);
775 MEM_NOTRAP_P (mem
) = 1;
776 if (!cfun
->calls_alloca
)
777 set_mem_alias_set (mem
, get_frame_alias_set ());
781 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
782 this construct would be valid, and false otherwise. */
785 validate_subreg (enum machine_mode omode
, enum machine_mode imode
,
786 const_rtx reg
, unsigned int offset
)
788 unsigned int isize
= GET_MODE_SIZE (imode
);
789 unsigned int osize
= GET_MODE_SIZE (omode
);
791 /* All subregs must be aligned. */
792 if (offset
% osize
!= 0)
795 /* The subreg offset cannot be outside the inner object. */
799 /* ??? This should not be here. Temporarily continue to allow word_mode
800 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
801 Generally, backends are doing something sketchy but it'll take time to
803 if (omode
== word_mode
)
805 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
806 is the culprit here, and not the backends. */
807 else if (osize
>= UNITS_PER_WORD
&& isize
>= osize
)
809 /* Allow component subregs of complex and vector. Though given the below
810 extraction rules, it's not always clear what that means. */
811 else if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
812 && GET_MODE_INNER (imode
) == omode
)
814 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
815 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
816 represent this. It's questionable if this ought to be represented at
817 all -- why can't this all be hidden in post-reload splitters that make
818 arbitrarily mode changes to the registers themselves. */
819 else if (VECTOR_MODE_P (omode
) && GET_MODE_INNER (omode
) == imode
)
821 /* Subregs involving floating point modes are not allowed to
822 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
823 (subreg:SI (reg:DF) 0) isn't. */
824 else if (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))
826 if (! (isize
== osize
827 /* LRA can use subreg to store a floating point value in
828 an integer mode. Although the floating point and the
829 integer modes need the same number of hard registers,
830 the size of floating point mode can be less than the
831 integer mode. LRA also uses subregs for a register
832 should be used in different mode in on insn. */
837 /* Paradoxical subregs must have offset zero. */
841 /* This is a normal subreg. Verify that the offset is representable. */
843 /* For hard registers, we already have most of these rules collected in
844 subreg_offset_representable_p. */
845 if (reg
&& REG_P (reg
) && HARD_REGISTER_P (reg
))
847 unsigned int regno
= REGNO (reg
);
849 #ifdef CANNOT_CHANGE_MODE_CLASS
850 if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
851 && GET_MODE_INNER (imode
) == omode
)
853 else if (REG_CANNOT_CHANGE_MODE_P (regno
, imode
, omode
))
857 return subreg_offset_representable_p (regno
, imode
, offset
, omode
);
860 /* For pseudo registers, we want most of the same checks. Namely:
861 If the register no larger than a word, the subreg must be lowpart.
862 If the register is larger than a word, the subreg must be the lowpart
863 of a subword. A subreg does *not* perform arbitrary bit extraction.
864 Given that we've already checked mode/offset alignment, we only have
865 to check subword subregs here. */
866 if (osize
< UNITS_PER_WORD
867 && ! (lra_in_progress
&& (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))))
869 enum machine_mode wmode
= isize
> UNITS_PER_WORD
? word_mode
: imode
;
870 unsigned int low_off
= subreg_lowpart_offset (omode
, wmode
);
871 if (offset
% UNITS_PER_WORD
!= low_off
)
878 gen_rtx_SUBREG (enum machine_mode mode
, rtx reg
, int offset
)
880 gcc_assert (validate_subreg (mode
, GET_MODE (reg
), reg
, offset
));
881 return gen_rtx_raw_SUBREG (mode
, reg
, offset
);
884 /* Generate a SUBREG representing the least-significant part of REG if MODE
885 is smaller than mode of REG, otherwise paradoxical SUBREG. */
888 gen_lowpart_SUBREG (enum machine_mode mode
, rtx reg
)
890 enum machine_mode inmode
;
892 inmode
= GET_MODE (reg
);
893 if (inmode
== VOIDmode
)
895 return gen_rtx_SUBREG (mode
, reg
,
896 subreg_lowpart_offset (mode
, inmode
));
900 gen_rtx_VAR_LOCATION (enum machine_mode mode
, tree decl
, rtx loc
,
901 enum var_init_status status
)
903 rtx x
= gen_rtx_fmt_te (VAR_LOCATION
, mode
, decl
, loc
);
904 PAT_VAR_LOCATION_STATUS (x
) = status
;
909 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
912 gen_rtvec (int n
, ...)
920 /* Don't allocate an empty rtvec... */
927 rt_val
= rtvec_alloc (n
);
929 for (i
= 0; i
< n
; i
++)
930 rt_val
->elem
[i
] = va_arg (p
, rtx
);
937 gen_rtvec_v (int n
, rtx
*argp
)
942 /* Don't allocate an empty rtvec... */
946 rt_val
= rtvec_alloc (n
);
948 for (i
= 0; i
< n
; i
++)
949 rt_val
->elem
[i
] = *argp
++;
955 gen_rtvec_v (int n
, rtx_insn
**argp
)
960 /* Don't allocate an empty rtvec... */
964 rt_val
= rtvec_alloc (n
);
966 for (i
= 0; i
< n
; i
++)
967 rt_val
->elem
[i
] = *argp
++;
973 /* Return the number of bytes between the start of an OUTER_MODE
974 in-memory value and the start of an INNER_MODE in-memory value,
975 given that the former is a lowpart of the latter. It may be a
976 paradoxical lowpart, in which case the offset will be negative
977 on big-endian targets. */
980 byte_lowpart_offset (enum machine_mode outer_mode
,
981 enum machine_mode inner_mode
)
983 if (GET_MODE_SIZE (outer_mode
) < GET_MODE_SIZE (inner_mode
))
984 return subreg_lowpart_offset (outer_mode
, inner_mode
);
986 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
989 /* Generate a REG rtx for a new pseudo register of mode MODE.
990 This pseudo is assigned the next sequential register number. */
993 gen_reg_rtx (enum machine_mode mode
)
996 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
998 gcc_assert (can_create_pseudo_p ());
1000 /* If a virtual register with bigger mode alignment is generated,
1001 increase stack alignment estimation because it might be spilled
1003 if (SUPPORTS_STACK_ALIGNMENT
1004 && crtl
->stack_alignment_estimated
< align
1005 && !crtl
->stack_realign_processed
)
1007 unsigned int min_align
= MINIMUM_ALIGNMENT (NULL
, mode
, align
);
1008 if (crtl
->stack_alignment_estimated
< min_align
)
1009 crtl
->stack_alignment_estimated
= min_align
;
1012 if (generating_concat_p
1013 && (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
1014 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
))
1016 /* For complex modes, don't make a single pseudo.
1017 Instead, make a CONCAT of two pseudos.
1018 This allows noncontiguous allocation of the real and imaginary parts,
1019 which makes much better code. Besides, allocating DCmode
1020 pseudos overstrains reload on some machines like the 386. */
1021 rtx realpart
, imagpart
;
1022 enum machine_mode partmode
= GET_MODE_INNER (mode
);
1024 realpart
= gen_reg_rtx (partmode
);
1025 imagpart
= gen_reg_rtx (partmode
);
1026 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
1029 /* Do not call gen_reg_rtx with uninitialized crtl. */
1030 gcc_assert (crtl
->emit
.regno_pointer_align_length
);
1032 /* Make sure regno_pointer_align, and regno_reg_rtx are large
1033 enough to have an element for this pseudo reg number. */
1035 if (reg_rtx_no
== crtl
->emit
.regno_pointer_align_length
)
1037 int old_size
= crtl
->emit
.regno_pointer_align_length
;
1041 tmp
= XRESIZEVEC (char, crtl
->emit
.regno_pointer_align
, old_size
* 2);
1042 memset (tmp
+ old_size
, 0, old_size
);
1043 crtl
->emit
.regno_pointer_align
= (unsigned char *) tmp
;
1045 new1
= GGC_RESIZEVEC (rtx
, regno_reg_rtx
, old_size
* 2);
1046 memset (new1
+ old_size
, 0, old_size
* sizeof (rtx
));
1047 regno_reg_rtx
= new1
;
1049 crtl
->emit
.regno_pointer_align_length
= old_size
* 2;
1052 val
= gen_raw_REG (mode
, reg_rtx_no
);
1053 regno_reg_rtx
[reg_rtx_no
++] = val
;
1057 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1060 reg_is_parm_p (rtx reg
)
1064 gcc_assert (REG_P (reg
));
1065 decl
= REG_EXPR (reg
);
1066 return (decl
&& TREE_CODE (decl
) == PARM_DECL
);
1069 /* Update NEW with the same attributes as REG, but with OFFSET added
1070 to the REG_OFFSET. */
1073 update_reg_offset (rtx new_rtx
, rtx reg
, int offset
)
1075 REG_ATTRS (new_rtx
) = get_reg_attrs (REG_EXPR (reg
),
1076 REG_OFFSET (reg
) + offset
);
1079 /* Generate a register with same attributes as REG, but with OFFSET
1080 added to the REG_OFFSET. */
1083 gen_rtx_REG_offset (rtx reg
, enum machine_mode mode
, unsigned int regno
,
1086 rtx new_rtx
= gen_rtx_REG (mode
, regno
);
1088 update_reg_offset (new_rtx
, reg
, offset
);
1092 /* Generate a new pseudo-register with the same attributes as REG, but
1093 with OFFSET added to the REG_OFFSET. */
1096 gen_reg_rtx_offset (rtx reg
, enum machine_mode mode
, int offset
)
1098 rtx new_rtx
= gen_reg_rtx (mode
);
1100 update_reg_offset (new_rtx
, reg
, offset
);
1104 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1105 new register is a (possibly paradoxical) lowpart of the old one. */
1108 adjust_reg_mode (rtx reg
, enum machine_mode mode
)
1110 update_reg_offset (reg
, reg
, byte_lowpart_offset (mode
, GET_MODE (reg
)));
1111 PUT_MODE (reg
, mode
);
1114 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1115 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1118 set_reg_attrs_from_value (rtx reg
, rtx x
)
1121 bool can_be_reg_pointer
= true;
1123 /* Don't call mark_reg_pointer for incompatible pointer sign
1125 while (GET_CODE (x
) == SIGN_EXTEND
1126 || GET_CODE (x
) == ZERO_EXTEND
1127 || GET_CODE (x
) == TRUNCATE
1128 || (GET_CODE (x
) == SUBREG
&& subreg_lowpart_p (x
)))
1130 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1131 if ((GET_CODE (x
) == SIGN_EXTEND
&& POINTERS_EXTEND_UNSIGNED
)
1132 || (GET_CODE (x
) != SIGN_EXTEND
&& ! POINTERS_EXTEND_UNSIGNED
))
1133 can_be_reg_pointer
= false;
1138 /* Hard registers can be reused for multiple purposes within the same
1139 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1140 on them is wrong. */
1141 if (HARD_REGISTER_P (reg
))
1144 offset
= byte_lowpart_offset (GET_MODE (reg
), GET_MODE (x
));
1147 if (MEM_OFFSET_KNOWN_P (x
))
1148 REG_ATTRS (reg
) = get_reg_attrs (MEM_EXPR (x
),
1149 MEM_OFFSET (x
) + offset
);
1150 if (can_be_reg_pointer
&& MEM_POINTER (x
))
1151 mark_reg_pointer (reg
, 0);
1156 update_reg_offset (reg
, x
, offset
);
1157 if (can_be_reg_pointer
&& REG_POINTER (x
))
1158 mark_reg_pointer (reg
, REGNO_POINTER_ALIGN (REGNO (x
)));
1162 /* Generate a REG rtx for a new pseudo register, copying the mode
1163 and attributes from X. */
1166 gen_reg_rtx_and_attrs (rtx x
)
1168 rtx reg
= gen_reg_rtx (GET_MODE (x
));
1169 set_reg_attrs_from_value (reg
, x
);
1173 /* Set the register attributes for registers contained in PARM_RTX.
1174 Use needed values from memory attributes of MEM. */
1177 set_reg_attrs_for_parm (rtx parm_rtx
, rtx mem
)
1179 if (REG_P (parm_rtx
))
1180 set_reg_attrs_from_value (parm_rtx
, mem
);
1181 else if (GET_CODE (parm_rtx
) == PARALLEL
)
1183 /* Check for a NULL entry in the first slot, used to indicate that the
1184 parameter goes both on the stack and in registers. */
1185 int i
= XEXP (XVECEXP (parm_rtx
, 0, 0), 0) ? 0 : 1;
1186 for (; i
< XVECLEN (parm_rtx
, 0); i
++)
1188 rtx x
= XVECEXP (parm_rtx
, 0, i
);
1189 if (REG_P (XEXP (x
, 0)))
1190 REG_ATTRS (XEXP (x
, 0))
1191 = get_reg_attrs (MEM_EXPR (mem
),
1192 INTVAL (XEXP (x
, 1)));
1197 /* Set the REG_ATTRS for registers in value X, given that X represents
1201 set_reg_attrs_for_decl_rtl (tree t
, rtx x
)
1203 if (GET_CODE (x
) == SUBREG
)
1205 gcc_assert (subreg_lowpart_p (x
));
1210 = get_reg_attrs (t
, byte_lowpart_offset (GET_MODE (x
),
1212 if (GET_CODE (x
) == CONCAT
)
1214 if (REG_P (XEXP (x
, 0)))
1215 REG_ATTRS (XEXP (x
, 0)) = get_reg_attrs (t
, 0);
1216 if (REG_P (XEXP (x
, 1)))
1217 REG_ATTRS (XEXP (x
, 1))
1218 = get_reg_attrs (t
, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x
, 0))));
1220 if (GET_CODE (x
) == PARALLEL
)
1224 /* Check for a NULL entry, used to indicate that the parameter goes
1225 both on the stack and in registers. */
1226 if (XEXP (XVECEXP (x
, 0, 0), 0))
1231 for (i
= start
; i
< XVECLEN (x
, 0); i
++)
1233 rtx y
= XVECEXP (x
, 0, i
);
1234 if (REG_P (XEXP (y
, 0)))
1235 REG_ATTRS (XEXP (y
, 0)) = get_reg_attrs (t
, INTVAL (XEXP (y
, 1)));
1240 /* Assign the RTX X to declaration T. */
1243 set_decl_rtl (tree t
, rtx x
)
1245 DECL_WRTL_CHECK (t
)->decl_with_rtl
.rtl
= x
;
1247 set_reg_attrs_for_decl_rtl (t
, x
);
1250 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1251 if the ABI requires the parameter to be passed by reference. */
1254 set_decl_incoming_rtl (tree t
, rtx x
, bool by_reference_p
)
1256 DECL_INCOMING_RTL (t
) = x
;
1257 if (x
&& !by_reference_p
)
1258 set_reg_attrs_for_decl_rtl (t
, x
);
1261 /* Identify REG (which may be a CONCAT) as a user register. */
1264 mark_user_reg (rtx reg
)
1266 if (GET_CODE (reg
) == CONCAT
)
1268 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
1269 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
1273 gcc_assert (REG_P (reg
));
1274 REG_USERVAR_P (reg
) = 1;
1278 /* Identify REG as a probable pointer register and show its alignment
1279 as ALIGN, if nonzero. */
1282 mark_reg_pointer (rtx reg
, int align
)
1284 if (! REG_POINTER (reg
))
1286 REG_POINTER (reg
) = 1;
1289 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1291 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
1292 /* We can no-longer be sure just how aligned this pointer is. */
1293 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1296 /* Return 1 plus largest pseudo reg number used in the current function. */
1304 /* Return 1 + the largest label number used so far in the current function. */
1307 max_label_num (void)
1312 /* Return first label number used in this function (if any were used). */
1315 get_first_label_num (void)
1317 return first_label_num
;
1320 /* If the rtx for label was created during the expansion of a nested
1321 function, then first_label_num won't include this label number.
1322 Fix this now so that array indices work later. */
1325 maybe_set_first_label_num (rtx x
)
1327 if (CODE_LABEL_NUMBER (x
) < first_label_num
)
1328 first_label_num
= CODE_LABEL_NUMBER (x
);
1331 /* Return a value representing some low-order bits of X, where the number
1332 of low-order bits is given by MODE. Note that no conversion is done
1333 between floating-point and fixed-point values, rather, the bit
1334 representation is returned.
1336 This function handles the cases in common between gen_lowpart, below,
1337 and two variants in cse.c and combine.c. These are the cases that can
1338 be safely handled at all points in the compilation.
1340 If this is not a case we can handle, return 0. */
1343 gen_lowpart_common (enum machine_mode mode
, rtx x
)
1345 int msize
= GET_MODE_SIZE (mode
);
1348 enum machine_mode innermode
;
1350 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1351 so we have to make one up. Yuk. */
1352 innermode
= GET_MODE (x
);
1354 && msize
* BITS_PER_UNIT
<= HOST_BITS_PER_WIDE_INT
)
1355 innermode
= mode_for_size (HOST_BITS_PER_WIDE_INT
, MODE_INT
, 0);
1356 else if (innermode
== VOIDmode
)
1357 innermode
= mode_for_size (HOST_BITS_PER_DOUBLE_INT
, MODE_INT
, 0);
1359 xsize
= GET_MODE_SIZE (innermode
);
1361 gcc_assert (innermode
!= VOIDmode
&& innermode
!= BLKmode
);
1363 if (innermode
== mode
)
1366 /* MODE must occupy no more words than the mode of X. */
1367 if ((msize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
1368 > ((xsize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))
1371 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1372 if (SCALAR_FLOAT_MODE_P (mode
) && msize
> xsize
)
1375 offset
= subreg_lowpart_offset (mode
, innermode
);
1377 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
1378 && (GET_MODE_CLASS (mode
) == MODE_INT
1379 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
))
1381 /* If we are getting the low-order part of something that has been
1382 sign- or zero-extended, we can either just use the object being
1383 extended or make a narrower extension. If we want an even smaller
1384 piece than the size of the object being extended, call ourselves
1387 This case is used mostly by combine and cse. */
1389 if (GET_MODE (XEXP (x
, 0)) == mode
)
1391 else if (msize
< GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))))
1392 return gen_lowpart_common (mode
, XEXP (x
, 0));
1393 else if (msize
< xsize
)
1394 return gen_rtx_fmt_e (GET_CODE (x
), mode
, XEXP (x
, 0));
1396 else if (GET_CODE (x
) == SUBREG
|| REG_P (x
)
1397 || GET_CODE (x
) == CONCAT
|| GET_CODE (x
) == CONST_VECTOR
1398 || CONST_DOUBLE_AS_FLOAT_P (x
) || CONST_SCALAR_INT_P (x
))
1399 return simplify_gen_subreg (mode
, x
, innermode
, offset
);
1401 /* Otherwise, we can't do this. */
1406 gen_highpart (enum machine_mode mode
, rtx x
)
1408 unsigned int msize
= GET_MODE_SIZE (mode
);
1411 /* This case loses if X is a subreg. To catch bugs early,
1412 complain if an invalid MODE is used even in other cases. */
1413 gcc_assert (msize
<= UNITS_PER_WORD
1414 || msize
== (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x
)));
1416 result
= simplify_gen_subreg (mode
, x
, GET_MODE (x
),
1417 subreg_highpart_offset (mode
, GET_MODE (x
)));
1418 gcc_assert (result
);
1420 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1421 the target if we have a MEM. gen_highpart must return a valid operand,
1422 emitting code if necessary to do so. */
1425 result
= validize_mem (result
);
1426 gcc_assert (result
);
1432 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1433 be VOIDmode constant. */
1435 gen_highpart_mode (enum machine_mode outermode
, enum machine_mode innermode
, rtx exp
)
1437 if (GET_MODE (exp
) != VOIDmode
)
1439 gcc_assert (GET_MODE (exp
) == innermode
);
1440 return gen_highpart (outermode
, exp
);
1442 return simplify_gen_subreg (outermode
, exp
, innermode
,
1443 subreg_highpart_offset (outermode
, innermode
));
1446 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1449 subreg_lowpart_offset (enum machine_mode outermode
, enum machine_mode innermode
)
1451 unsigned int offset
= 0;
1452 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1456 if (WORDS_BIG_ENDIAN
)
1457 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1458 if (BYTES_BIG_ENDIAN
)
1459 offset
+= difference
% UNITS_PER_WORD
;
1465 /* Return offset in bytes to get OUTERMODE high part
1466 of the value in mode INNERMODE stored in memory in target format. */
1468 subreg_highpart_offset (enum machine_mode outermode
, enum machine_mode innermode
)
1470 unsigned int offset
= 0;
1471 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1473 gcc_assert (GET_MODE_SIZE (innermode
) >= GET_MODE_SIZE (outermode
));
1477 if (! WORDS_BIG_ENDIAN
)
1478 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1479 if (! BYTES_BIG_ENDIAN
)
1480 offset
+= difference
% UNITS_PER_WORD
;
1486 /* Return 1 iff X, assumed to be a SUBREG,
1487 refers to the least significant part of its containing reg.
1488 If X is not a SUBREG, always return 1 (it is its own low part!). */
1491 subreg_lowpart_p (const_rtx x
)
1493 if (GET_CODE (x
) != SUBREG
)
1495 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1498 return (subreg_lowpart_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)))
1499 == SUBREG_BYTE (x
));
1502 /* Return true if X is a paradoxical subreg, false otherwise. */
1504 paradoxical_subreg_p (const_rtx x
)
1506 if (GET_CODE (x
) != SUBREG
)
1508 return (GET_MODE_PRECISION (GET_MODE (x
))
1509 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x
))));
1512 /* Return subword OFFSET of operand OP.
1513 The word number, OFFSET, is interpreted as the word number starting
1514 at the low-order address. OFFSET 0 is the low-order word if not
1515 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1517 If we cannot extract the required word, we return zero. Otherwise,
1518 an rtx corresponding to the requested word will be returned.
1520 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1521 reload has completed, a valid address will always be returned. After
1522 reload, if a valid address cannot be returned, we return zero.
1524 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1525 it is the responsibility of the caller.
1527 MODE is the mode of OP in case it is a CONST_INT.
1529 ??? This is still rather broken for some cases. The problem for the
1530 moment is that all callers of this thing provide no 'goal mode' to
1531 tell us to work with. This exists because all callers were written
1532 in a word based SUBREG world.
1533 Now use of this function can be deprecated by simplify_subreg in most
1538 operand_subword (rtx op
, unsigned int offset
, int validate_address
, enum machine_mode mode
)
1540 if (mode
== VOIDmode
)
1541 mode
= GET_MODE (op
);
1543 gcc_assert (mode
!= VOIDmode
);
1545 /* If OP is narrower than a word, fail. */
1547 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
))
1550 /* If we want a word outside OP, return zero. */
1552 && (offset
+ 1) * UNITS_PER_WORD
> GET_MODE_SIZE (mode
))
1555 /* Form a new MEM at the requested address. */
1558 rtx new_rtx
= adjust_address_nv (op
, word_mode
, offset
* UNITS_PER_WORD
);
1560 if (! validate_address
)
1563 else if (reload_completed
)
1565 if (! strict_memory_address_addr_space_p (word_mode
,
1567 MEM_ADDR_SPACE (op
)))
1571 return replace_equiv_address (new_rtx
, XEXP (new_rtx
, 0));
1574 /* Rest can be handled by simplify_subreg. */
1575 return simplify_gen_subreg (word_mode
, op
, mode
, (offset
* UNITS_PER_WORD
));
1578 /* Similar to `operand_subword', but never return 0. If we can't
1579 extract the required subword, put OP into a register and try again.
1580 The second attempt must succeed. We always validate the address in
1583 MODE is the mode of OP, in case it is CONST_INT. */
1586 operand_subword_force (rtx op
, unsigned int offset
, enum machine_mode mode
)
1588 rtx result
= operand_subword (op
, offset
, 1, mode
);
1593 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1595 /* If this is a register which can not be accessed by words, copy it
1596 to a pseudo register. */
1598 op
= copy_to_reg (op
);
1600 op
= force_reg (mode
, op
);
1603 result
= operand_subword (op
, offset
, 1, mode
);
1604 gcc_assert (result
);
1609 /* Returns 1 if both MEM_EXPR can be considered equal
1613 mem_expr_equal_p (const_tree expr1
, const_tree expr2
)
1618 if (! expr1
|| ! expr2
)
1621 if (TREE_CODE (expr1
) != TREE_CODE (expr2
))
1624 return operand_equal_p (expr1
, expr2
, 0);
1627 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1628 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1632 get_mem_align_offset (rtx mem
, unsigned int align
)
1635 unsigned HOST_WIDE_INT offset
;
1637 /* This function can't use
1638 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1639 || (MAX (MEM_ALIGN (mem),
1640 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1644 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1646 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1647 for <variable>. get_inner_reference doesn't handle it and
1648 even if it did, the alignment in that case needs to be determined
1649 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1650 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1651 isn't sufficiently aligned, the object it is in might be. */
1652 gcc_assert (MEM_P (mem
));
1653 expr
= MEM_EXPR (mem
);
1654 if (expr
== NULL_TREE
|| !MEM_OFFSET_KNOWN_P (mem
))
1657 offset
= MEM_OFFSET (mem
);
1660 if (DECL_ALIGN (expr
) < align
)
1663 else if (INDIRECT_REF_P (expr
))
1665 if (TYPE_ALIGN (TREE_TYPE (expr
)) < (unsigned int) align
)
1668 else if (TREE_CODE (expr
) == COMPONENT_REF
)
1672 tree inner
= TREE_OPERAND (expr
, 0);
1673 tree field
= TREE_OPERAND (expr
, 1);
1674 tree byte_offset
= component_ref_field_offset (expr
);
1675 tree bit_offset
= DECL_FIELD_BIT_OFFSET (field
);
1678 || !tree_fits_uhwi_p (byte_offset
)
1679 || !tree_fits_uhwi_p (bit_offset
))
1682 offset
+= tree_to_uhwi (byte_offset
);
1683 offset
+= tree_to_uhwi (bit_offset
) / BITS_PER_UNIT
;
1685 if (inner
== NULL_TREE
)
1687 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field
))
1688 < (unsigned int) align
)
1692 else if (DECL_P (inner
))
1694 if (DECL_ALIGN (inner
) < align
)
1698 else if (TREE_CODE (inner
) != COMPONENT_REF
)
1706 return offset
& ((align
/ BITS_PER_UNIT
) - 1);
1709 /* Given REF (a MEM) and T, either the type of X or the expression
1710 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1711 if we are making a new object of this type. BITPOS is nonzero if
1712 there is an offset outstanding on T that will be applied later. */
1715 set_mem_attributes_minus_bitpos (rtx ref
, tree t
, int objectp
,
1716 HOST_WIDE_INT bitpos
)
1718 HOST_WIDE_INT apply_bitpos
= 0;
1720 struct mem_attrs attrs
, *defattrs
, *refattrs
;
1723 /* It can happen that type_for_mode was given a mode for which there
1724 is no language-level type. In which case it returns NULL, which
1729 type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
1730 if (type
== error_mark_node
)
1733 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1734 wrong answer, as it assumes that DECL_RTL already has the right alias
1735 info. Callers should not set DECL_RTL until after the call to
1736 set_mem_attributes. */
1737 gcc_assert (!DECL_P (t
) || ref
!= DECL_RTL_IF_SET (t
));
1739 memset (&attrs
, 0, sizeof (attrs
));
1741 /* Get the alias set from the expression or type (perhaps using a
1742 front-end routine) and use it. */
1743 attrs
.alias
= get_alias_set (t
);
1745 MEM_VOLATILE_P (ref
) |= TYPE_VOLATILE (type
);
1746 MEM_POINTER (ref
) = POINTER_TYPE_P (type
);
1748 /* Default values from pre-existing memory attributes if present. */
1749 refattrs
= MEM_ATTRS (ref
);
1752 /* ??? Can this ever happen? Calling this routine on a MEM that
1753 already carries memory attributes should probably be invalid. */
1754 attrs
.expr
= refattrs
->expr
;
1755 attrs
.offset_known_p
= refattrs
->offset_known_p
;
1756 attrs
.offset
= refattrs
->offset
;
1757 attrs
.size_known_p
= refattrs
->size_known_p
;
1758 attrs
.size
= refattrs
->size
;
1759 attrs
.align
= refattrs
->align
;
1762 /* Otherwise, default values from the mode of the MEM reference. */
1765 defattrs
= mode_mem_attrs
[(int) GET_MODE (ref
)];
1766 gcc_assert (!defattrs
->expr
);
1767 gcc_assert (!defattrs
->offset_known_p
);
1769 /* Respect mode size. */
1770 attrs
.size_known_p
= defattrs
->size_known_p
;
1771 attrs
.size
= defattrs
->size
;
1772 /* ??? Is this really necessary? We probably should always get
1773 the size from the type below. */
1775 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1776 if T is an object, always compute the object alignment below. */
1778 attrs
.align
= defattrs
->align
;
1780 attrs
.align
= BITS_PER_UNIT
;
1781 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1782 e.g. if the type carries an alignment attribute. Should we be
1783 able to simply always use TYPE_ALIGN? */
1786 /* We can set the alignment from the type if we are making an object,
1787 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1788 if (objectp
|| TREE_CODE (t
) == INDIRECT_REF
|| TYPE_ALIGN_OK (type
))
1789 attrs
.align
= MAX (attrs
.align
, TYPE_ALIGN (type
));
1791 /* If the size is known, we can set that. */
1792 tree new_size
= TYPE_SIZE_UNIT (type
);
1794 /* The address-space is that of the type. */
1795 as
= TYPE_ADDR_SPACE (type
);
1797 /* If T is not a type, we may be able to deduce some more information about
1803 if (TREE_THIS_VOLATILE (t
))
1804 MEM_VOLATILE_P (ref
) = 1;
1806 /* Now remove any conversions: they don't change what the underlying
1807 object is. Likewise for SAVE_EXPR. */
1808 while (CONVERT_EXPR_P (t
)
1809 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
1810 || TREE_CODE (t
) == SAVE_EXPR
)
1811 t
= TREE_OPERAND (t
, 0);
1813 /* Note whether this expression can trap. */
1814 MEM_NOTRAP_P (ref
) = !tree_could_trap_p (t
);
1816 base
= get_base_address (t
);
1820 && TREE_READONLY (base
)
1821 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
))
1822 && !TREE_THIS_VOLATILE (base
))
1823 MEM_READONLY_P (ref
) = 1;
1825 /* Mark static const strings readonly as well. */
1826 if (TREE_CODE (base
) == STRING_CST
1827 && TREE_READONLY (base
)
1828 && TREE_STATIC (base
))
1829 MEM_READONLY_P (ref
) = 1;
1831 /* Address-space information is on the base object. */
1832 if (TREE_CODE (base
) == MEM_REF
1833 || TREE_CODE (base
) == TARGET_MEM_REF
)
1834 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base
,
1837 as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
1840 /* If this expression uses it's parent's alias set, mark it such
1841 that we won't change it. */
1842 if (component_uses_parent_alias_set_from (t
) != NULL_TREE
)
1843 MEM_KEEP_ALIAS_SET_P (ref
) = 1;
1845 /* If this is a decl, set the attributes of the MEM from it. */
1849 attrs
.offset_known_p
= true;
1851 apply_bitpos
= bitpos
;
1852 new_size
= DECL_SIZE_UNIT (t
);
1855 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1856 else if (CONSTANT_CLASS_P (t
))
1859 /* If this is a field reference, record it. */
1860 else if (TREE_CODE (t
) == COMPONENT_REF
)
1863 attrs
.offset_known_p
= true;
1865 apply_bitpos
= bitpos
;
1866 if (DECL_BIT_FIELD (TREE_OPERAND (t
, 1)))
1867 new_size
= DECL_SIZE_UNIT (TREE_OPERAND (t
, 1));
1870 /* If this is an array reference, look for an outer field reference. */
1871 else if (TREE_CODE (t
) == ARRAY_REF
)
1873 tree off_tree
= size_zero_node
;
1874 /* We can't modify t, because we use it at the end of the
1880 tree index
= TREE_OPERAND (t2
, 1);
1881 tree low_bound
= array_ref_low_bound (t2
);
1882 tree unit_size
= array_ref_element_size (t2
);
1884 /* We assume all arrays have sizes that are a multiple of a byte.
1885 First subtract the lower bound, if any, in the type of the
1886 index, then convert to sizetype and multiply by the size of
1887 the array element. */
1888 if (! integer_zerop (low_bound
))
1889 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
1892 off_tree
= size_binop (PLUS_EXPR
,
1893 size_binop (MULT_EXPR
,
1894 fold_convert (sizetype
,
1898 t2
= TREE_OPERAND (t2
, 0);
1900 while (TREE_CODE (t2
) == ARRAY_REF
);
1903 || TREE_CODE (t2
) == COMPONENT_REF
)
1906 attrs
.offset_known_p
= false;
1907 if (tree_fits_uhwi_p (off_tree
))
1909 attrs
.offset_known_p
= true;
1910 attrs
.offset
= tree_to_uhwi (off_tree
);
1911 apply_bitpos
= bitpos
;
1914 /* Else do not record a MEM_EXPR. */
1917 /* If this is an indirect reference, record it. */
1918 else if (TREE_CODE (t
) == MEM_REF
1919 || TREE_CODE (t
) == TARGET_MEM_REF
)
1922 attrs
.offset_known_p
= true;
1924 apply_bitpos
= bitpos
;
1927 /* Compute the alignment. */
1928 unsigned int obj_align
;
1929 unsigned HOST_WIDE_INT obj_bitpos
;
1930 get_object_alignment_1 (t
, &obj_align
, &obj_bitpos
);
1931 obj_bitpos
= (obj_bitpos
- bitpos
) & (obj_align
- 1);
1932 if (obj_bitpos
!= 0)
1933 obj_align
= (obj_bitpos
& -obj_bitpos
);
1934 attrs
.align
= MAX (attrs
.align
, obj_align
);
1937 if (tree_fits_uhwi_p (new_size
))
1939 attrs
.size_known_p
= true;
1940 attrs
.size
= tree_to_uhwi (new_size
);
1943 /* If we modified OFFSET based on T, then subtract the outstanding
1944 bit position offset. Similarly, increase the size of the accessed
1945 object to contain the negative offset. */
1948 gcc_assert (attrs
.offset_known_p
);
1949 attrs
.offset
-= apply_bitpos
/ BITS_PER_UNIT
;
1950 if (attrs
.size_known_p
)
1951 attrs
.size
+= apply_bitpos
/ BITS_PER_UNIT
;
1954 /* Now set the attributes we computed above. */
1955 attrs
.addrspace
= as
;
1956 set_mem_attrs (ref
, &attrs
);
1960 set_mem_attributes (rtx ref
, tree t
, int objectp
)
1962 set_mem_attributes_minus_bitpos (ref
, t
, objectp
, 0);
1965 /* Set the alias set of MEM to SET. */
1968 set_mem_alias_set (rtx mem
, alias_set_type set
)
1970 struct mem_attrs attrs
;
1972 /* If the new and old alias sets don't conflict, something is wrong. */
1973 gcc_checking_assert (alias_sets_conflict_p (set
, MEM_ALIAS_SET (mem
)));
1974 attrs
= *get_mem_attrs (mem
);
1976 set_mem_attrs (mem
, &attrs
);
1979 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1982 set_mem_addr_space (rtx mem
, addr_space_t addrspace
)
1984 struct mem_attrs attrs
;
1986 attrs
= *get_mem_attrs (mem
);
1987 attrs
.addrspace
= addrspace
;
1988 set_mem_attrs (mem
, &attrs
);
1991 /* Set the alignment of MEM to ALIGN bits. */
1994 set_mem_align (rtx mem
, unsigned int align
)
1996 struct mem_attrs attrs
;
1998 attrs
= *get_mem_attrs (mem
);
1999 attrs
.align
= align
;
2000 set_mem_attrs (mem
, &attrs
);
2003 /* Set the expr for MEM to EXPR. */
2006 set_mem_expr (rtx mem
, tree expr
)
2008 struct mem_attrs attrs
;
2010 attrs
= *get_mem_attrs (mem
);
2012 set_mem_attrs (mem
, &attrs
);
2015 /* Set the offset of MEM to OFFSET. */
2018 set_mem_offset (rtx mem
, HOST_WIDE_INT offset
)
2020 struct mem_attrs attrs
;
2022 attrs
= *get_mem_attrs (mem
);
2023 attrs
.offset_known_p
= true;
2024 attrs
.offset
= offset
;
2025 set_mem_attrs (mem
, &attrs
);
2028 /* Clear the offset of MEM. */
2031 clear_mem_offset (rtx mem
)
2033 struct mem_attrs attrs
;
2035 attrs
= *get_mem_attrs (mem
);
2036 attrs
.offset_known_p
= false;
2037 set_mem_attrs (mem
, &attrs
);
2040 /* Set the size of MEM to SIZE. */
2043 set_mem_size (rtx mem
, HOST_WIDE_INT size
)
2045 struct mem_attrs attrs
;
2047 attrs
= *get_mem_attrs (mem
);
2048 attrs
.size_known_p
= true;
2050 set_mem_attrs (mem
, &attrs
);
2053 /* Clear the size of MEM. */
2056 clear_mem_size (rtx mem
)
2058 struct mem_attrs attrs
;
2060 attrs
= *get_mem_attrs (mem
);
2061 attrs
.size_known_p
= false;
2062 set_mem_attrs (mem
, &attrs
);
2065 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2066 and its address changed to ADDR. (VOIDmode means don't change the mode.
2067 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2068 returned memory location is required to be valid. INPLACE is true if any
2069 changes can be made directly to MEMREF or false if MEMREF must be treated
2072 The memory attributes are not changed. */
2075 change_address_1 (rtx memref
, enum machine_mode mode
, rtx addr
, int validate
,
2081 gcc_assert (MEM_P (memref
));
2082 as
= MEM_ADDR_SPACE (memref
);
2083 if (mode
== VOIDmode
)
2084 mode
= GET_MODE (memref
);
2086 addr
= XEXP (memref
, 0);
2087 if (mode
== GET_MODE (memref
) && addr
== XEXP (memref
, 0)
2088 && (!validate
|| memory_address_addr_space_p (mode
, addr
, as
)))
2091 /* Don't validate address for LRA. LRA can make the address valid
2092 by itself in most efficient way. */
2093 if (validate
&& !lra_in_progress
)
2095 if (reload_in_progress
|| reload_completed
)
2096 gcc_assert (memory_address_addr_space_p (mode
, addr
, as
));
2098 addr
= memory_address_addr_space (mode
, addr
, as
);
2101 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
2106 XEXP (memref
, 0) = addr
;
2110 new_rtx
= gen_rtx_MEM (mode
, addr
);
2111 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2115 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2116 way we are changing MEMREF, so we only preserve the alias set. */
2119 change_address (rtx memref
, enum machine_mode mode
, rtx addr
)
2121 rtx new_rtx
= change_address_1 (memref
, mode
, addr
, 1, false);
2122 enum machine_mode mmode
= GET_MODE (new_rtx
);
2123 struct mem_attrs attrs
, *defattrs
;
2125 attrs
= *get_mem_attrs (memref
);
2126 defattrs
= mode_mem_attrs
[(int) mmode
];
2127 attrs
.expr
= NULL_TREE
;
2128 attrs
.offset_known_p
= false;
2129 attrs
.size_known_p
= defattrs
->size_known_p
;
2130 attrs
.size
= defattrs
->size
;
2131 attrs
.align
= defattrs
->align
;
2133 /* If there are no changes, just return the original memory reference. */
2134 if (new_rtx
== memref
)
2136 if (mem_attrs_eq_p (get_mem_attrs (memref
), &attrs
))
2139 new_rtx
= gen_rtx_MEM (mmode
, XEXP (memref
, 0));
2140 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2143 set_mem_attrs (new_rtx
, &attrs
);
2147 /* Return a memory reference like MEMREF, but with its mode changed
2148 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2149 nonzero, the memory address is forced to be valid.
2150 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2151 and the caller is responsible for adjusting MEMREF base register.
2152 If ADJUST_OBJECT is zero, the underlying object associated with the
2153 memory reference is left unchanged and the caller is responsible for
2154 dealing with it. Otherwise, if the new memory reference is outside
2155 the underlying object, even partially, then the object is dropped.
2156 SIZE, if nonzero, is the size of an access in cases where MODE
2157 has no inherent size. */
2160 adjust_address_1 (rtx memref
, enum machine_mode mode
, HOST_WIDE_INT offset
,
2161 int validate
, int adjust_address
, int adjust_object
,
2164 rtx addr
= XEXP (memref
, 0);
2166 enum machine_mode address_mode
;
2168 struct mem_attrs attrs
= *get_mem_attrs (memref
), *defattrs
;
2169 unsigned HOST_WIDE_INT max_align
;
2170 #ifdef POINTERS_EXTEND_UNSIGNED
2171 enum machine_mode pointer_mode
2172 = targetm
.addr_space
.pointer_mode (attrs
.addrspace
);
2175 /* VOIDmode means no mode change for change_address_1. */
2176 if (mode
== VOIDmode
)
2177 mode
= GET_MODE (memref
);
2179 /* Take the size of non-BLKmode accesses from the mode. */
2180 defattrs
= mode_mem_attrs
[(int) mode
];
2181 if (defattrs
->size_known_p
)
2182 size
= defattrs
->size
;
2184 /* If there are no changes, just return the original memory reference. */
2185 if (mode
== GET_MODE (memref
) && !offset
2186 && (size
== 0 || (attrs
.size_known_p
&& attrs
.size
== size
))
2187 && (!validate
|| memory_address_addr_space_p (mode
, addr
,
2191 /* ??? Prefer to create garbage instead of creating shared rtl.
2192 This may happen even if offset is nonzero -- consider
2193 (plus (plus reg reg) const_int) -- so do this always. */
2194 addr
= copy_rtx (addr
);
2196 /* Convert a possibly large offset to a signed value within the
2197 range of the target address space. */
2198 address_mode
= get_address_mode (memref
);
2199 pbits
= GET_MODE_BITSIZE (address_mode
);
2200 if (HOST_BITS_PER_WIDE_INT
> pbits
)
2202 int shift
= HOST_BITS_PER_WIDE_INT
- pbits
;
2203 offset
= (((HOST_WIDE_INT
) ((unsigned HOST_WIDE_INT
) offset
<< shift
))
2209 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2210 object, we can merge it into the LO_SUM. */
2211 if (GET_MODE (memref
) != BLKmode
&& GET_CODE (addr
) == LO_SUM
2213 && (unsigned HOST_WIDE_INT
) offset
2214 < GET_MODE_ALIGNMENT (GET_MODE (memref
)) / BITS_PER_UNIT
)
2215 addr
= gen_rtx_LO_SUM (address_mode
, XEXP (addr
, 0),
2216 plus_constant (address_mode
,
2217 XEXP (addr
, 1), offset
));
2218 #ifdef POINTERS_EXTEND_UNSIGNED
2219 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2220 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2221 the fact that pointers are not allowed to overflow. */
2222 else if (POINTERS_EXTEND_UNSIGNED
> 0
2223 && GET_CODE (addr
) == ZERO_EXTEND
2224 && GET_MODE (XEXP (addr
, 0)) == pointer_mode
2225 && trunc_int_for_mode (offset
, pointer_mode
) == offset
)
2226 addr
= gen_rtx_ZERO_EXTEND (address_mode
,
2227 plus_constant (pointer_mode
,
2228 XEXP (addr
, 0), offset
));
2231 addr
= plus_constant (address_mode
, addr
, offset
);
2234 new_rtx
= change_address_1 (memref
, mode
, addr
, validate
, false);
2236 /* If the address is a REG, change_address_1 rightfully returns memref,
2237 but this would destroy memref's MEM_ATTRS. */
2238 if (new_rtx
== memref
&& offset
!= 0)
2239 new_rtx
= copy_rtx (new_rtx
);
2241 /* Conservatively drop the object if we don't know where we start from. */
2242 if (adjust_object
&& (!attrs
.offset_known_p
|| !attrs
.size_known_p
))
2244 attrs
.expr
= NULL_TREE
;
2248 /* Compute the new values of the memory attributes due to this adjustment.
2249 We add the offsets and update the alignment. */
2250 if (attrs
.offset_known_p
)
2252 attrs
.offset
+= offset
;
2254 /* Drop the object if the new left end is not within its bounds. */
2255 if (adjust_object
&& attrs
.offset
< 0)
2257 attrs
.expr
= NULL_TREE
;
2262 /* Compute the new alignment by taking the MIN of the alignment and the
2263 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2267 max_align
= (offset
& -offset
) * BITS_PER_UNIT
;
2268 attrs
.align
= MIN (attrs
.align
, max_align
);
2273 /* Drop the object if the new right end is not within its bounds. */
2274 if (adjust_object
&& (offset
+ size
) > attrs
.size
)
2276 attrs
.expr
= NULL_TREE
;
2279 attrs
.size_known_p
= true;
2282 else if (attrs
.size_known_p
)
2284 gcc_assert (!adjust_object
);
2285 attrs
.size
-= offset
;
2286 /* ??? The store_by_pieces machinery generates negative sizes,
2287 so don't assert for that here. */
2290 set_mem_attrs (new_rtx
, &attrs
);
2295 /* Return a memory reference like MEMREF, but with its mode changed
2296 to MODE and its address changed to ADDR, which is assumed to be
2297 MEMREF offset by OFFSET bytes. If VALIDATE is
2298 nonzero, the memory address is forced to be valid. */
2301 adjust_automodify_address_1 (rtx memref
, enum machine_mode mode
, rtx addr
,
2302 HOST_WIDE_INT offset
, int validate
)
2304 memref
= change_address_1 (memref
, VOIDmode
, addr
, validate
, false);
2305 return adjust_address_1 (memref
, mode
, offset
, validate
, 0, 0, 0);
2308 /* Return a memory reference like MEMREF, but whose address is changed by
2309 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2310 known to be in OFFSET (possibly 1). */
2313 offset_address (rtx memref
, rtx offset
, unsigned HOST_WIDE_INT pow2
)
2315 rtx new_rtx
, addr
= XEXP (memref
, 0);
2316 enum machine_mode address_mode
;
2317 struct mem_attrs attrs
, *defattrs
;
2319 attrs
= *get_mem_attrs (memref
);
2320 address_mode
= get_address_mode (memref
);
2321 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2323 /* At this point we don't know _why_ the address is invalid. It
2324 could have secondary memory references, multiplies or anything.
2326 However, if we did go and rearrange things, we can wind up not
2327 being able to recognize the magic around pic_offset_table_rtx.
2328 This stuff is fragile, and is yet another example of why it is
2329 bad to expose PIC machinery too early. */
2330 if (! memory_address_addr_space_p (GET_MODE (memref
), new_rtx
,
2332 && GET_CODE (addr
) == PLUS
2333 && XEXP (addr
, 0) == pic_offset_table_rtx
)
2335 addr
= force_reg (GET_MODE (addr
), addr
);
2336 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2339 update_temp_slot_address (XEXP (memref
, 0), new_rtx
);
2340 new_rtx
= change_address_1 (memref
, VOIDmode
, new_rtx
, 1, false);
2342 /* If there are no changes, just return the original memory reference. */
2343 if (new_rtx
== memref
)
2346 /* Update the alignment to reflect the offset. Reset the offset, which
2348 defattrs
= mode_mem_attrs
[(int) GET_MODE (new_rtx
)];
2349 attrs
.offset_known_p
= false;
2350 attrs
.size_known_p
= defattrs
->size_known_p
;
2351 attrs
.size
= defattrs
->size
;
2352 attrs
.align
= MIN (attrs
.align
, pow2
* BITS_PER_UNIT
);
2353 set_mem_attrs (new_rtx
, &attrs
);
2357 /* Return a memory reference like MEMREF, but with its address changed to
2358 ADDR. The caller is asserting that the actual piece of memory pointed
2359 to is the same, just the form of the address is being changed, such as
2360 by putting something into a register. INPLACE is true if any changes
2361 can be made directly to MEMREF or false if MEMREF must be treated as
2365 replace_equiv_address (rtx memref
, rtx addr
, bool inplace
)
2367 /* change_address_1 copies the memory attribute structure without change
2368 and that's exactly what we want here. */
2369 update_temp_slot_address (XEXP (memref
, 0), addr
);
2370 return change_address_1 (memref
, VOIDmode
, addr
, 1, inplace
);
2373 /* Likewise, but the reference is not required to be valid. */
2376 replace_equiv_address_nv (rtx memref
, rtx addr
, bool inplace
)
2378 return change_address_1 (memref
, VOIDmode
, addr
, 0, inplace
);
2381 /* Return a memory reference like MEMREF, but with its mode widened to
2382 MODE and offset by OFFSET. This would be used by targets that e.g.
2383 cannot issue QImode memory operations and have to use SImode memory
2384 operations plus masking logic. */
2387 widen_memory_access (rtx memref
, enum machine_mode mode
, HOST_WIDE_INT offset
)
2389 rtx new_rtx
= adjust_address_1 (memref
, mode
, offset
, 1, 1, 0, 0);
2390 struct mem_attrs attrs
;
2391 unsigned int size
= GET_MODE_SIZE (mode
);
2393 /* If there are no changes, just return the original memory reference. */
2394 if (new_rtx
== memref
)
2397 attrs
= *get_mem_attrs (new_rtx
);
2399 /* If we don't know what offset we were at within the expression, then
2400 we can't know if we've overstepped the bounds. */
2401 if (! attrs
.offset_known_p
)
2402 attrs
.expr
= NULL_TREE
;
2406 if (TREE_CODE (attrs
.expr
) == COMPONENT_REF
)
2408 tree field
= TREE_OPERAND (attrs
.expr
, 1);
2409 tree offset
= component_ref_field_offset (attrs
.expr
);
2411 if (! DECL_SIZE_UNIT (field
))
2413 attrs
.expr
= NULL_TREE
;
2417 /* Is the field at least as large as the access? If so, ok,
2418 otherwise strip back to the containing structure. */
2419 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
2420 && compare_tree_int (DECL_SIZE_UNIT (field
), size
) >= 0
2421 && attrs
.offset
>= 0)
2424 if (! tree_fits_uhwi_p (offset
))
2426 attrs
.expr
= NULL_TREE
;
2430 attrs
.expr
= TREE_OPERAND (attrs
.expr
, 0);
2431 attrs
.offset
+= tree_to_uhwi (offset
);
2432 attrs
.offset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
2435 /* Similarly for the decl. */
2436 else if (DECL_P (attrs
.expr
)
2437 && DECL_SIZE_UNIT (attrs
.expr
)
2438 && TREE_CODE (DECL_SIZE_UNIT (attrs
.expr
)) == INTEGER_CST
2439 && compare_tree_int (DECL_SIZE_UNIT (attrs
.expr
), size
) >= 0
2440 && (! attrs
.offset_known_p
|| attrs
.offset
>= 0))
2444 /* The widened memory access overflows the expression, which means
2445 that it could alias another expression. Zap it. */
2446 attrs
.expr
= NULL_TREE
;
2452 attrs
.offset_known_p
= false;
2454 /* The widened memory may alias other stuff, so zap the alias set. */
2455 /* ??? Maybe use get_alias_set on any remaining expression. */
2457 attrs
.size_known_p
= true;
2459 set_mem_attrs (new_rtx
, &attrs
);
2463 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2464 static GTY(()) tree spill_slot_decl
;
2467 get_spill_slot_decl (bool force_build_p
)
2469 tree d
= spill_slot_decl
;
2471 struct mem_attrs attrs
;
2473 if (d
|| !force_build_p
)
2476 d
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
2477 VAR_DECL
, get_identifier ("%sfp"), void_type_node
);
2478 DECL_ARTIFICIAL (d
) = 1;
2479 DECL_IGNORED_P (d
) = 1;
2481 spill_slot_decl
= d
;
2483 rd
= gen_rtx_MEM (BLKmode
, frame_pointer_rtx
);
2484 MEM_NOTRAP_P (rd
) = 1;
2485 attrs
= *mode_mem_attrs
[(int) BLKmode
];
2486 attrs
.alias
= new_alias_set ();
2488 set_mem_attrs (rd
, &attrs
);
2489 SET_DECL_RTL (d
, rd
);
2494 /* Given MEM, a result from assign_stack_local, fill in the memory
2495 attributes as appropriate for a register allocator spill slot.
2496 These slots are not aliasable by other memory. We arrange for
2497 them all to use a single MEM_EXPR, so that the aliasing code can
2498 work properly in the case of shared spill slots. */
2501 set_mem_attrs_for_spill (rtx mem
)
2503 struct mem_attrs attrs
;
2506 attrs
= *get_mem_attrs (mem
);
2507 attrs
.expr
= get_spill_slot_decl (true);
2508 attrs
.alias
= MEM_ALIAS_SET (DECL_RTL (attrs
.expr
));
2509 attrs
.addrspace
= ADDR_SPACE_GENERIC
;
2511 /* We expect the incoming memory to be of the form:
2512 (mem:MODE (plus (reg sfp) (const_int offset)))
2513 with perhaps the plus missing for offset = 0. */
2514 addr
= XEXP (mem
, 0);
2515 attrs
.offset_known_p
= true;
2517 if (GET_CODE (addr
) == PLUS
2518 && CONST_INT_P (XEXP (addr
, 1)))
2519 attrs
.offset
= INTVAL (XEXP (addr
, 1));
2521 set_mem_attrs (mem
, &attrs
);
2522 MEM_NOTRAP_P (mem
) = 1;
2525 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2528 gen_label_rtx (void)
2530 return as_a
<rtx_code_label
*> (
2531 gen_rtx_CODE_LABEL (VOIDmode
, NULL_RTX
, NULL_RTX
,
2532 NULL
, label_num
++, NULL
));
2535 /* For procedure integration. */
2537 /* Install new pointers to the first and last insns in the chain.
2538 Also, set cur_insn_uid to one higher than the last in use.
2539 Used for an inline-procedure after copying the insn chain. */
2542 set_new_first_and_last_insn (rtx_insn
*first
, rtx_insn
*last
)
2546 set_first_insn (first
);
2547 set_last_insn (last
);
2550 if (MIN_NONDEBUG_INSN_UID
|| MAY_HAVE_DEBUG_INSNS
)
2552 int debug_count
= 0;
2554 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
- 1;
2555 cur_debug_insn_uid
= 0;
2557 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2558 if (INSN_UID (insn
) < MIN_NONDEBUG_INSN_UID
)
2559 cur_debug_insn_uid
= MAX (cur_debug_insn_uid
, INSN_UID (insn
));
2562 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2563 if (DEBUG_INSN_P (insn
))
2568 cur_debug_insn_uid
= MIN_NONDEBUG_INSN_UID
+ debug_count
;
2570 cur_debug_insn_uid
++;
2573 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2574 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2579 /* Go through all the RTL insn bodies and copy any invalid shared
2580 structure. This routine should only be called once. */
2583 unshare_all_rtl_1 (rtx_insn
*insn
)
2585 /* Unshare just about everything else. */
2586 unshare_all_rtl_in_chain (insn
);
2588 /* Make sure the addresses of stack slots found outside the insn chain
2589 (such as, in DECL_RTL of a variable) are not shared
2590 with the insn chain.
2592 This special care is necessary when the stack slot MEM does not
2593 actually appear in the insn chain. If it does appear, its address
2594 is unshared from all else at that point. */
2595 stack_slot_list
= safe_as_a
<rtx_expr_list
*> (
2596 copy_rtx_if_shared (stack_slot_list
));
2599 /* Go through all the RTL insn bodies and copy any invalid shared
2600 structure, again. This is a fairly expensive thing to do so it
2601 should be done sparingly. */
2604 unshare_all_rtl_again (rtx_insn
*insn
)
2609 for (p
= insn
; p
; p
= NEXT_INSN (p
))
2612 reset_used_flags (PATTERN (p
));
2613 reset_used_flags (REG_NOTES (p
));
2615 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p
));
2618 /* Make sure that virtual stack slots are not shared. */
2619 set_used_decls (DECL_INITIAL (cfun
->decl
));
2621 /* Make sure that virtual parameters are not shared. */
2622 for (decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2623 set_used_flags (DECL_RTL (decl
));
2625 reset_used_flags (stack_slot_list
);
2627 unshare_all_rtl_1 (insn
);
2631 unshare_all_rtl (void)
2633 unshare_all_rtl_1 (get_insns ());
2638 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2639 Recursively does the same for subexpressions. */
2642 verify_rtx_sharing (rtx orig
, rtx insn
)
2647 const char *format_ptr
;
2652 code
= GET_CODE (x
);
2654 /* These types may be freely shared. */
2670 /* SCRATCH must be shared because they represent distinct values. */
2673 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2674 clobbers or clobbers of hard registers that originated as pseudos.
2675 This is needed to allow safe register renaming. */
2676 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2677 && ORIGINAL_REGNO (XEXP (x
, 0)) == REGNO (XEXP (x
, 0)))
2682 if (shared_const_p (orig
))
2687 /* A MEM is allowed to be shared if its address is constant. */
2688 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
2689 || reload_completed
|| reload_in_progress
)
2698 /* This rtx may not be shared. If it has already been seen,
2699 replace it with a copy of itself. */
2700 #ifdef ENABLE_CHECKING
2701 if (RTX_FLAG (x
, used
))
2703 error ("invalid rtl sharing found in the insn");
2705 error ("shared rtx");
2707 internal_error ("internal consistency failure");
2710 gcc_assert (!RTX_FLAG (x
, used
));
2712 RTX_FLAG (x
, used
) = 1;
2714 /* Now scan the subexpressions recursively. */
2716 format_ptr
= GET_RTX_FORMAT (code
);
2718 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2720 switch (*format_ptr
++)
2723 verify_rtx_sharing (XEXP (x
, i
), insn
);
2727 if (XVEC (x
, i
) != NULL
)
2730 int len
= XVECLEN (x
, i
);
2732 for (j
= 0; j
< len
; j
++)
2734 /* We allow sharing of ASM_OPERANDS inside single
2736 if (j
&& GET_CODE (XVECEXP (x
, i
, j
)) == SET
2737 && (GET_CODE (SET_SRC (XVECEXP (x
, i
, j
)))
2739 verify_rtx_sharing (SET_DEST (XVECEXP (x
, i
, j
)), insn
);
2741 verify_rtx_sharing (XVECEXP (x
, i
, j
), insn
);
2750 /* Reset used-flags for INSN. */
2753 reset_insn_used_flags (rtx insn
)
2755 gcc_assert (INSN_P (insn
));
2756 reset_used_flags (PATTERN (insn
));
2757 reset_used_flags (REG_NOTES (insn
));
2759 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2762 /* Go through all the RTL insn bodies and clear all the USED bits. */
2765 reset_all_used_flags (void)
2769 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2772 rtx pat
= PATTERN (p
);
2773 if (GET_CODE (pat
) != SEQUENCE
)
2774 reset_insn_used_flags (p
);
2777 gcc_assert (REG_NOTES (p
) == NULL
);
2778 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2780 rtx insn
= XVECEXP (pat
, 0, i
);
2782 reset_insn_used_flags (insn
);
2788 /* Verify sharing in INSN. */
2791 verify_insn_sharing (rtx insn
)
2793 gcc_assert (INSN_P (insn
));
2794 reset_used_flags (PATTERN (insn
));
2795 reset_used_flags (REG_NOTES (insn
));
2797 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2800 /* Go through all the RTL insn bodies and check that there is no unexpected
2801 sharing in between the subexpressions. */
2804 verify_rtl_sharing (void)
2808 timevar_push (TV_VERIFY_RTL_SHARING
);
2810 reset_all_used_flags ();
2812 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2815 rtx pat
= PATTERN (p
);
2816 if (GET_CODE (pat
) != SEQUENCE
)
2817 verify_insn_sharing (p
);
2819 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2821 rtx insn
= XVECEXP (pat
, 0, i
);
2823 verify_insn_sharing (insn
);
2827 reset_all_used_flags ();
2829 timevar_pop (TV_VERIFY_RTL_SHARING
);
2832 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2833 Assumes the mark bits are cleared at entry. */
2836 unshare_all_rtl_in_chain (rtx_insn
*insn
)
2838 for (; insn
; insn
= NEXT_INSN (insn
))
2841 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
2842 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
2844 CALL_INSN_FUNCTION_USAGE (insn
)
2845 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn
));
2849 /* Go through all virtual stack slots of a function and mark them as
2850 shared. We never replace the DECL_RTLs themselves with a copy,
2851 but expressions mentioned into a DECL_RTL cannot be shared with
2852 expressions in the instruction stream.
2854 Note that reload may convert pseudo registers into memories in-place.
2855 Pseudo registers are always shared, but MEMs never are. Thus if we
2856 reset the used flags on MEMs in the instruction stream, we must set
2857 them again on MEMs that appear in DECL_RTLs. */
2860 set_used_decls (tree blk
)
2865 for (t
= BLOCK_VARS (blk
); t
; t
= DECL_CHAIN (t
))
2866 if (DECL_RTL_SET_P (t
))
2867 set_used_flags (DECL_RTL (t
));
2869 /* Now process sub-blocks. */
2870 for (t
= BLOCK_SUBBLOCKS (blk
); t
; t
= BLOCK_CHAIN (t
))
2874 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2875 Recursively does the same for subexpressions. Uses
2876 copy_rtx_if_shared_1 to reduce stack space. */
2879 copy_rtx_if_shared (rtx orig
)
2881 copy_rtx_if_shared_1 (&orig
);
2885 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2886 use. Recursively does the same for subexpressions. */
2889 copy_rtx_if_shared_1 (rtx
*orig1
)
2895 const char *format_ptr
;
2899 /* Repeat is used to turn tail-recursion into iteration. */
2906 code
= GET_CODE (x
);
2908 /* These types may be freely shared. */
2924 /* SCRATCH must be shared because they represent distinct values. */
2927 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2928 clobbers or clobbers of hard registers that originated as pseudos.
2929 This is needed to allow safe register renaming. */
2930 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2931 && ORIGINAL_REGNO (XEXP (x
, 0)) == REGNO (XEXP (x
, 0)))
2936 if (shared_const_p (x
))
2946 /* The chain of insns is not being copied. */
2953 /* This rtx may not be shared. If it has already been seen,
2954 replace it with a copy of itself. */
2956 if (RTX_FLAG (x
, used
))
2958 x
= shallow_copy_rtx (x
);
2961 RTX_FLAG (x
, used
) = 1;
2963 /* Now scan the subexpressions recursively.
2964 We can store any replaced subexpressions directly into X
2965 since we know X is not shared! Any vectors in X
2966 must be copied if X was copied. */
2968 format_ptr
= GET_RTX_FORMAT (code
);
2969 length
= GET_RTX_LENGTH (code
);
2972 for (i
= 0; i
< length
; i
++)
2974 switch (*format_ptr
++)
2978 copy_rtx_if_shared_1 (last_ptr
);
2979 last_ptr
= &XEXP (x
, i
);
2983 if (XVEC (x
, i
) != NULL
)
2986 int len
= XVECLEN (x
, i
);
2988 /* Copy the vector iff I copied the rtx and the length
2990 if (copied
&& len
> 0)
2991 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
2993 /* Call recursively on all inside the vector. */
2994 for (j
= 0; j
< len
; j
++)
2997 copy_rtx_if_shared_1 (last_ptr
);
2998 last_ptr
= &XVECEXP (x
, i
, j
);
3013 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
3016 mark_used_flags (rtx x
, int flag
)
3020 const char *format_ptr
;
3023 /* Repeat is used to turn tail-recursion into iteration. */
3028 code
= GET_CODE (x
);
3030 /* These types may be freely shared so we needn't do any resetting
3054 /* The chain of insns is not being copied. */
3061 RTX_FLAG (x
, used
) = flag
;
3063 format_ptr
= GET_RTX_FORMAT (code
);
3064 length
= GET_RTX_LENGTH (code
);
3066 for (i
= 0; i
< length
; i
++)
3068 switch (*format_ptr
++)
3076 mark_used_flags (XEXP (x
, i
), flag
);
3080 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3081 mark_used_flags (XVECEXP (x
, i
, j
), flag
);
3087 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3088 to look for shared sub-parts. */
3091 reset_used_flags (rtx x
)
3093 mark_used_flags (x
, 0);
3096 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3097 to look for shared sub-parts. */
3100 set_used_flags (rtx x
)
3102 mark_used_flags (x
, 1);
3105 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3106 Return X or the rtx for the pseudo reg the value of X was copied into.
3107 OTHER must be valid as a SET_DEST. */
3110 make_safe_from (rtx x
, rtx other
)
3113 switch (GET_CODE (other
))
3116 other
= SUBREG_REG (other
);
3118 case STRICT_LOW_PART
:
3121 other
= XEXP (other
, 0);
3130 && GET_CODE (x
) != SUBREG
)
3132 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
3133 || reg_mentioned_p (other
, x
))))
3135 rtx temp
= gen_reg_rtx (GET_MODE (x
));
3136 emit_move_insn (temp
, x
);
3142 /* Emission of insns (adding them to the doubly-linked list). */
3144 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3147 get_last_insn_anywhere (void)
3149 struct sequence_stack
*stack
;
3150 if (get_last_insn ())
3151 return get_last_insn ();
3152 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
3153 if (stack
->last
!= 0)
3158 /* Return the first nonnote insn emitted in current sequence or current
3159 function. This routine looks inside SEQUENCEs. */
3162 get_first_nonnote_insn (void)
3164 rtx_insn
*insn
= get_insns ();
3169 for (insn
= next_insn (insn
);
3170 insn
&& NOTE_P (insn
);
3171 insn
= next_insn (insn
))
3175 if (NONJUMP_INSN_P (insn
)
3176 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3177 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3184 /* Return the last nonnote insn emitted in current sequence or current
3185 function. This routine looks inside SEQUENCEs. */
3188 get_last_nonnote_insn (void)
3190 rtx_insn
*insn
= get_last_insn ();
3195 for (insn
= previous_insn (insn
);
3196 insn
&& NOTE_P (insn
);
3197 insn
= previous_insn (insn
))
3201 if (NONJUMP_INSN_P (insn
))
3202 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3203 insn
= seq
->insn (seq
->len () - 1);
3210 /* Return the number of actual (non-debug) insns emitted in this
3214 get_max_insn_count (void)
3216 int n
= cur_insn_uid
;
3218 /* The table size must be stable across -g, to avoid codegen
3219 differences due to debug insns, and not be affected by
3220 -fmin-insn-uid, to avoid excessive table size and to simplify
3221 debugging of -fcompare-debug failures. */
3222 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3223 n
-= cur_debug_insn_uid
;
3225 n
-= MIN_NONDEBUG_INSN_UID
;
3231 /* Return the next insn. If it is a SEQUENCE, return the first insn
3235 next_insn (rtx_insn
*insn
)
3239 insn
= NEXT_INSN (insn
);
3240 if (insn
&& NONJUMP_INSN_P (insn
)
3241 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3242 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3248 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3252 previous_insn (rtx_insn
*insn
)
3256 insn
= PREV_INSN (insn
);
3257 if (insn
&& NONJUMP_INSN_P (insn
))
3258 if (rtx_sequence
*seq
= dyn_cast
<rtx_sequence
*> (PATTERN (insn
)))
3259 insn
= seq
->insn (seq
->len () - 1);
3265 /* Return the next insn after INSN that is not a NOTE. This routine does not
3266 look inside SEQUENCEs. */
3269 next_nonnote_insn (rtx uncast_insn
)
3271 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3274 insn
= NEXT_INSN (insn
);
3275 if (insn
== 0 || !NOTE_P (insn
))
3282 /* Return the next insn after INSN that is not a NOTE, but stop the
3283 search before we enter another basic block. This routine does not
3284 look inside SEQUENCEs. */
3287 next_nonnote_insn_bb (rtx_insn
*insn
)
3291 insn
= NEXT_INSN (insn
);
3292 if (insn
== 0 || !NOTE_P (insn
))
3294 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3301 /* Return the previous insn before INSN that is not a NOTE. This routine does
3302 not look inside SEQUENCEs. */
3305 prev_nonnote_insn (rtx uncast_insn
)
3307 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3311 insn
= PREV_INSN (insn
);
3312 if (insn
== 0 || !NOTE_P (insn
))
3319 /* Return the previous insn before INSN that is not a NOTE, but stop
3320 the search before we enter another basic block. This routine does
3321 not look inside SEQUENCEs. */
3324 prev_nonnote_insn_bb (rtx uncast_insn
)
3326 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3330 insn
= PREV_INSN (insn
);
3331 if (insn
== 0 || !NOTE_P (insn
))
3333 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3340 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3341 routine does not look inside SEQUENCEs. */
3344 next_nondebug_insn (rtx uncast_insn
)
3346 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3350 insn
= NEXT_INSN (insn
);
3351 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3358 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3359 This routine does not look inside SEQUENCEs. */
3362 prev_nondebug_insn (rtx uncast_insn
)
3364 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3368 insn
= PREV_INSN (insn
);
3369 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3376 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3377 This routine does not look inside SEQUENCEs. */
3380 next_nonnote_nondebug_insn (rtx uncast_insn
)
3382 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3386 insn
= NEXT_INSN (insn
);
3387 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3394 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3395 This routine does not look inside SEQUENCEs. */
3398 prev_nonnote_nondebug_insn (rtx uncast_insn
)
3400 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3404 insn
= PREV_INSN (insn
);
3405 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3412 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3413 or 0, if there is none. This routine does not look inside
3417 next_real_insn (rtx uncast_insn
)
3419 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3423 insn
= NEXT_INSN (insn
);
3424 if (insn
== 0 || INSN_P (insn
))
3431 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3432 or 0, if there is none. This routine does not look inside
3436 prev_real_insn (rtx uncast_insn
)
3438 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3442 insn
= PREV_INSN (insn
);
3443 if (insn
== 0 || INSN_P (insn
))
3450 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3451 This routine does not look inside SEQUENCEs. */
3454 last_call_insn (void)
3458 for (insn
= get_last_insn ();
3459 insn
&& !CALL_P (insn
);
3460 insn
= PREV_INSN (insn
))
3463 return safe_as_a
<rtx_call_insn
*> (insn
);
3466 /* Find the next insn after INSN that really does something. This routine
3467 does not look inside SEQUENCEs. After reload this also skips over
3468 standalone USE and CLOBBER insn. */
3471 active_insn_p (const_rtx insn
)
3473 return (CALL_P (insn
) || JUMP_P (insn
)
3474 || JUMP_TABLE_DATA_P (insn
) /* FIXME */
3475 || (NONJUMP_INSN_P (insn
)
3476 && (! reload_completed
3477 || (GET_CODE (PATTERN (insn
)) != USE
3478 && GET_CODE (PATTERN (insn
)) != CLOBBER
))));
3482 next_active_insn (rtx uncast_insn
)
3484 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3488 insn
= NEXT_INSN (insn
);
3489 if (insn
== 0 || active_insn_p (insn
))
3496 /* Find the last insn before INSN that really does something. This routine
3497 does not look inside SEQUENCEs. After reload this also skips over
3498 standalone USE and CLOBBER insn. */
3501 prev_active_insn (rtx uncast_insn
)
3503 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3507 insn
= PREV_INSN (insn
);
3508 if (insn
== 0 || active_insn_p (insn
))
3516 /* Return the next insn that uses CC0 after INSN, which is assumed to
3517 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3518 applied to the result of this function should yield INSN).
3520 Normally, this is simply the next insn. However, if a REG_CC_USER note
3521 is present, it contains the insn that uses CC0.
3523 Return 0 if we can't find the insn. */
3526 next_cc0_user (rtx uncast_insn
)
3528 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3530 rtx note
= find_reg_note (insn
, REG_CC_USER
, NULL_RTX
);
3533 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3535 insn
= next_nonnote_insn (insn
);
3536 if (insn
&& NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3537 insn
= as_a
<rtx_sequence
*> (PATTERN (insn
))->insn (0);
3539 if (insn
&& INSN_P (insn
) && reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
3545 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3546 note, it is the previous insn. */
3549 prev_cc0_setter (rtx uncast_insn
)
3551 rtx_insn
*insn
= safe_as_a
<rtx_insn
*> (uncast_insn
);
3553 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
3556 return safe_as_a
<rtx_insn
*> (XEXP (note
, 0));
3558 insn
= prev_nonnote_insn (insn
);
3559 gcc_assert (sets_cc0_p (PATTERN (insn
)));
3566 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3569 find_auto_inc (const_rtx x
, const_rtx reg
)
3571 subrtx_iterator::array_type array
;
3572 FOR_EACH_SUBRTX (iter
, array
, x
, NONCONST
)
3574 const_rtx x
= *iter
;
3575 if (GET_RTX_CLASS (GET_CODE (x
)) == RTX_AUTOINC
3576 && rtx_equal_p (reg
, XEXP (x
, 0)))
3583 /* Increment the label uses for all labels present in rtx. */
3586 mark_label_nuses (rtx x
)
3592 code
= GET_CODE (x
);
3593 if (code
== LABEL_REF
&& LABEL_P (LABEL_REF_LABEL (x
)))
3594 LABEL_NUSES (LABEL_REF_LABEL (x
))++;
3596 fmt
= GET_RTX_FORMAT (code
);
3597 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3600 mark_label_nuses (XEXP (x
, i
));
3601 else if (fmt
[i
] == 'E')
3602 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3603 mark_label_nuses (XVECEXP (x
, i
, j
));
3608 /* Try splitting insns that can be split for better scheduling.
3609 PAT is the pattern which might split.
3610 TRIAL is the insn providing PAT.
3611 LAST is nonzero if we should return the last insn of the sequence produced.
3613 If this routine succeeds in splitting, it returns the first or last
3614 replacement insn depending on the value of LAST. Otherwise, it
3615 returns TRIAL. If the insn to be returned can be split, it will be. */
3618 try_split (rtx pat
, rtx uncast_trial
, int last
)
3620 rtx_insn
*trial
= as_a
<rtx_insn
*> (uncast_trial
);
3621 rtx_insn
*before
= PREV_INSN (trial
);
3622 rtx_insn
*after
= NEXT_INSN (trial
);
3624 rtx_insn
*seq
, *tem
;
3626 rtx_insn
*insn_last
, *insn
;
3628 rtx call_insn
= NULL_RTX
;
3630 /* We're not good at redistributing frame information. */
3631 if (RTX_FRAME_RELATED_P (trial
))
3634 if (any_condjump_p (trial
)
3635 && (note
= find_reg_note (trial
, REG_BR_PROB
, 0)))
3636 split_branch_probability
= XINT (note
, 0);
3637 probability
= split_branch_probability
;
3639 seq
= safe_as_a
<rtx_insn
*> (split_insns (pat
, trial
));
3641 split_branch_probability
= -1;
3646 /* Avoid infinite loop if any insn of the result matches
3647 the original pattern. */
3651 if (INSN_P (insn_last
)
3652 && rtx_equal_p (PATTERN (insn_last
), pat
))
3654 if (!NEXT_INSN (insn_last
))
3656 insn_last
= NEXT_INSN (insn_last
);
3659 /* We will be adding the new sequence to the function. The splitters
3660 may have introduced invalid RTL sharing, so unshare the sequence now. */
3661 unshare_all_rtl_in_chain (seq
);
3663 /* Mark labels and copy flags. */
3664 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3669 CROSSING_JUMP_P (insn
) = CROSSING_JUMP_P (trial
);
3670 mark_jump_label (PATTERN (insn
), insn
, 0);
3672 if (probability
!= -1
3673 && any_condjump_p (insn
)
3674 && !find_reg_note (insn
, REG_BR_PROB
, 0))
3676 /* We can preserve the REG_BR_PROB notes only if exactly
3677 one jump is created, otherwise the machine description
3678 is responsible for this step using
3679 split_branch_probability variable. */
3680 gcc_assert (njumps
== 1);
3681 add_int_reg_note (insn
, REG_BR_PROB
, probability
);
3686 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3687 in SEQ and copy any additional information across. */
3690 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3696 gcc_assert (call_insn
== NULL_RTX
);
3699 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3700 target may have explicitly specified. */
3701 p
= &CALL_INSN_FUNCTION_USAGE (insn
);
3704 *p
= CALL_INSN_FUNCTION_USAGE (trial
);
3706 /* If the old call was a sibling call, the new one must
3708 SIBLING_CALL_P (insn
) = SIBLING_CALL_P (trial
);
3710 /* If the new call is the last instruction in the sequence,
3711 it will effectively replace the old call in-situ. Otherwise
3712 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3713 so that it comes immediately after the new call. */
3714 if (NEXT_INSN (insn
))
3715 for (next
= NEXT_INSN (trial
);
3716 next
&& NOTE_P (next
);
3717 next
= NEXT_INSN (next
))
3718 if (NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
)
3721 add_insn_after (next
, insn
, NULL
);
3727 /* Copy notes, particularly those related to the CFG. */
3728 for (note
= REG_NOTES (trial
); note
; note
= XEXP (note
, 1))
3730 switch (REG_NOTE_KIND (note
))
3733 copy_reg_eh_region_note_backward (note
, insn_last
, NULL
);
3739 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3742 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3746 case REG_NON_LOCAL_GOTO
:
3747 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3750 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3756 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3758 rtx reg
= XEXP (note
, 0);
3759 if (!FIND_REG_INC_NOTE (insn
, reg
)
3760 && find_auto_inc (PATTERN (insn
), reg
))
3761 add_reg_note (insn
, REG_INC
, reg
);
3767 fixup_args_size_notes (NULL
, insn_last
, INTVAL (XEXP (note
, 0)));
3771 gcc_assert (call_insn
!= NULL_RTX
);
3772 add_reg_note (call_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3780 /* If there are LABELS inside the split insns increment the
3781 usage count so we don't delete the label. */
3785 while (insn
!= NULL_RTX
)
3787 /* JUMP_P insns have already been "marked" above. */
3788 if (NONJUMP_INSN_P (insn
))
3789 mark_label_nuses (PATTERN (insn
));
3791 insn
= PREV_INSN (insn
);
3795 tem
= emit_insn_after_setloc (seq
, trial
, INSN_LOCATION (trial
));
3797 delete_insn (trial
);
3799 /* Recursively call try_split for each new insn created; by the
3800 time control returns here that insn will be fully split, so
3801 set LAST and continue from the insn after the one returned.
3802 We can't use next_active_insn here since AFTER may be a note.
3803 Ignore deleted insns, which can be occur if not optimizing. */
3804 for (tem
= NEXT_INSN (before
); tem
!= after
; tem
= NEXT_INSN (tem
))
3805 if (! tem
->deleted () && INSN_P (tem
))
3806 tem
= try_split (PATTERN (tem
), tem
, 1);
3808 /* Return either the first or the last insn, depending on which was
3811 ? (after
? PREV_INSN (after
) : get_last_insn ())
3812 : NEXT_INSN (before
);
3815 /* Make and return an INSN rtx, initializing all its slots.
3816 Store PATTERN in the pattern slots. */
3819 make_insn_raw (rtx pattern
)
3823 insn
= as_a
<rtx_insn
*> (rtx_alloc (INSN
));
3825 INSN_UID (insn
) = cur_insn_uid
++;
3826 PATTERN (insn
) = pattern
;
3827 INSN_CODE (insn
) = -1;
3828 REG_NOTES (insn
) = NULL
;
3829 INSN_LOCATION (insn
) = curr_insn_location ();
3830 BLOCK_FOR_INSN (insn
) = NULL
;
3832 #ifdef ENABLE_RTL_CHECKING
3835 && (returnjump_p (insn
)
3836 || (GET_CODE (insn
) == SET
3837 && SET_DEST (insn
) == pc_rtx
)))
3839 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3847 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3850 make_debug_insn_raw (rtx pattern
)
3852 rtx_debug_insn
*insn
;
3854 insn
= as_a
<rtx_debug_insn
*> (rtx_alloc (DEBUG_INSN
));
3855 INSN_UID (insn
) = cur_debug_insn_uid
++;
3856 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3857 INSN_UID (insn
) = cur_insn_uid
++;
3859 PATTERN (insn
) = pattern
;
3860 INSN_CODE (insn
) = -1;
3861 REG_NOTES (insn
) = NULL
;
3862 INSN_LOCATION (insn
) = curr_insn_location ();
3863 BLOCK_FOR_INSN (insn
) = NULL
;
3868 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3871 make_jump_insn_raw (rtx pattern
)
3873 rtx_jump_insn
*insn
;
3875 insn
= as_a
<rtx_jump_insn
*> (rtx_alloc (JUMP_INSN
));
3876 INSN_UID (insn
) = cur_insn_uid
++;
3878 PATTERN (insn
) = pattern
;
3879 INSN_CODE (insn
) = -1;
3880 REG_NOTES (insn
) = NULL
;
3881 JUMP_LABEL (insn
) = NULL
;
3882 INSN_LOCATION (insn
) = curr_insn_location ();
3883 BLOCK_FOR_INSN (insn
) = NULL
;
3888 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3891 make_call_insn_raw (rtx pattern
)
3893 rtx_call_insn
*insn
;
3895 insn
= as_a
<rtx_call_insn
*> (rtx_alloc (CALL_INSN
));
3896 INSN_UID (insn
) = cur_insn_uid
++;
3898 PATTERN (insn
) = pattern
;
3899 INSN_CODE (insn
) = -1;
3900 REG_NOTES (insn
) = NULL
;
3901 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
3902 INSN_LOCATION (insn
) = curr_insn_location ();
3903 BLOCK_FOR_INSN (insn
) = NULL
;
3908 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3911 make_note_raw (enum insn_note subtype
)
3913 /* Some notes are never created this way at all. These notes are
3914 only created by patching out insns. */
3915 gcc_assert (subtype
!= NOTE_INSN_DELETED_LABEL
3916 && subtype
!= NOTE_INSN_DELETED_DEBUG_LABEL
);
3918 rtx_note
*note
= as_a
<rtx_note
*> (rtx_alloc (NOTE
));
3919 INSN_UID (note
) = cur_insn_uid
++;
3920 NOTE_KIND (note
) = subtype
;
3921 BLOCK_FOR_INSN (note
) = NULL
;
3922 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
3926 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3927 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3928 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3931 link_insn_into_chain (rtx_insn
*insn
, rtx_insn
*prev
, rtx_insn
*next
)
3933 SET_PREV_INSN (insn
) = prev
;
3934 SET_NEXT_INSN (insn
) = next
;
3937 SET_NEXT_INSN (prev
) = insn
;
3938 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
3940 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
3941 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = insn
;
3946 SET_PREV_INSN (next
) = insn
;
3947 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
3949 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
3950 SET_PREV_INSN (sequence
->insn (0)) = insn
;
3954 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3956 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (insn
));
3957 SET_PREV_INSN (sequence
->insn (0)) = prev
;
3958 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
3962 /* Add INSN to the end of the doubly-linked list.
3963 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3966 add_insn (rtx_insn
*insn
)
3968 rtx_insn
*prev
= get_last_insn ();
3969 link_insn_into_chain (insn
, prev
, NULL
);
3970 if (NULL
== get_insns ())
3971 set_first_insn (insn
);
3972 set_last_insn (insn
);
3975 /* Add INSN into the doubly-linked list after insn AFTER. */
3978 add_insn_after_nobb (rtx_insn
*insn
, rtx_insn
*after
)
3980 rtx_insn
*next
= NEXT_INSN (after
);
3982 gcc_assert (!optimize
|| !after
->deleted ());
3984 link_insn_into_chain (insn
, after
, next
);
3988 if (get_last_insn () == after
)
3989 set_last_insn (insn
);
3992 struct sequence_stack
*stack
= seq_stack
;
3993 /* Scan all pending sequences too. */
3994 for (; stack
; stack
= stack
->next
)
3995 if (after
== stack
->last
)
4004 /* Add INSN into the doubly-linked list before insn BEFORE. */
4007 add_insn_before_nobb (rtx_insn
*insn
, rtx_insn
*before
)
4009 rtx_insn
*prev
= PREV_INSN (before
);
4011 gcc_assert (!optimize
|| !before
->deleted ());
4013 link_insn_into_chain (insn
, prev
, before
);
4017 if (get_insns () == before
)
4018 set_first_insn (insn
);
4021 struct sequence_stack
*stack
= seq_stack
;
4022 /* Scan all pending sequences too. */
4023 for (; stack
; stack
= stack
->next
)
4024 if (before
== stack
->first
)
4026 stack
->first
= insn
;
4035 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
4036 If BB is NULL, an attempt is made to infer the bb from before.
4038 This and the next function should be the only functions called
4039 to insert an insn once delay slots have been filled since only
4040 they know how to update a SEQUENCE. */
4043 add_insn_after (rtx uncast_insn
, rtx uncast_after
, basic_block bb
)
4045 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4046 rtx_insn
*after
= as_a
<rtx_insn
*> (uncast_after
);
4047 add_insn_after_nobb (insn
, after
);
4048 if (!BARRIER_P (after
)
4049 && !BARRIER_P (insn
)
4050 && (bb
= BLOCK_FOR_INSN (after
)))
4052 set_block_for_insn (insn
, bb
);
4054 df_insn_rescan (insn
);
4055 /* Should not happen as first in the BB is always
4056 either NOTE or LABEL. */
4057 if (BB_END (bb
) == after
4058 /* Avoid clobbering of structure when creating new BB. */
4059 && !BARRIER_P (insn
)
4060 && !NOTE_INSN_BASIC_BLOCK_P (insn
))
4065 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4066 If BB is NULL, an attempt is made to infer the bb from before.
4068 This and the previous function should be the only functions called
4069 to insert an insn once delay slots have been filled since only
4070 they know how to update a SEQUENCE. */
4073 add_insn_before (rtx uncast_insn
, rtx uncast_before
, basic_block bb
)
4075 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4076 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4077 add_insn_before_nobb (insn
, before
);
4080 && !BARRIER_P (before
)
4081 && !BARRIER_P (insn
))
4082 bb
= BLOCK_FOR_INSN (before
);
4086 set_block_for_insn (insn
, bb
);
4088 df_insn_rescan (insn
);
4089 /* Should not happen as first in the BB is always either NOTE or
4091 gcc_assert (BB_HEAD (bb
) != insn
4092 /* Avoid clobbering of structure when creating new BB. */
4094 || NOTE_INSN_BASIC_BLOCK_P (insn
));
4098 /* Replace insn with an deleted instruction note. */
4101 set_insn_deleted (rtx insn
)
4104 df_insn_delete (as_a
<rtx_insn
*> (insn
));
4105 PUT_CODE (insn
, NOTE
);
4106 NOTE_KIND (insn
) = NOTE_INSN_DELETED
;
4110 /* Unlink INSN from the insn chain.
4112 This function knows how to handle sequences.
4114 This function does not invalidate data flow information associated with
4115 INSN (i.e. does not call df_insn_delete). That makes this function
4116 usable for only disconnecting an insn from the chain, and re-emit it
4119 To later insert INSN elsewhere in the insn chain via add_insn and
4120 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4121 the caller. Nullifying them here breaks many insn chain walks.
4123 To really delete an insn and related DF information, use delete_insn. */
4126 remove_insn (rtx uncast_insn
)
4128 rtx_insn
*insn
= as_a
<rtx_insn
*> (uncast_insn
);
4129 rtx_insn
*next
= NEXT_INSN (insn
);
4130 rtx_insn
*prev
= PREV_INSN (insn
);
4135 SET_NEXT_INSN (prev
) = next
;
4136 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4138 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (prev
));
4139 SET_NEXT_INSN (sequence
->insn (sequence
->len () - 1)) = next
;
4142 else if (get_insns () == insn
)
4145 SET_PREV_INSN (next
) = NULL
;
4146 set_first_insn (next
);
4150 struct sequence_stack
*stack
= seq_stack
;
4151 /* Scan all pending sequences too. */
4152 for (; stack
; stack
= stack
->next
)
4153 if (insn
== stack
->first
)
4155 stack
->first
= next
;
4164 SET_PREV_INSN (next
) = prev
;
4165 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4167 rtx_sequence
*sequence
= as_a
<rtx_sequence
*> (PATTERN (next
));
4168 SET_PREV_INSN (sequence
->insn (0)) = prev
;
4171 else if (get_last_insn () == insn
)
4172 set_last_insn (prev
);
4175 struct sequence_stack
*stack
= seq_stack
;
4176 /* Scan all pending sequences too. */
4177 for (; stack
; stack
= stack
->next
)
4178 if (insn
== stack
->last
)
4187 /* Fix up basic block boundaries, if necessary. */
4188 if (!BARRIER_P (insn
)
4189 && (bb
= BLOCK_FOR_INSN (insn
)))
4191 if (BB_HEAD (bb
) == insn
)
4193 /* Never ever delete the basic block note without deleting whole
4195 gcc_assert (!NOTE_P (insn
));
4196 BB_HEAD (bb
) = next
;
4198 if (BB_END (bb
) == insn
)
4203 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4206 add_function_usage_to (rtx call_insn
, rtx call_fusage
)
4208 gcc_assert (call_insn
&& CALL_P (call_insn
));
4210 /* Put the register usage information on the CALL. If there is already
4211 some usage information, put ours at the end. */
4212 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
4216 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
4217 link
= XEXP (link
, 1))
4220 XEXP (link
, 1) = call_fusage
;
4223 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
4226 /* Delete all insns made since FROM.
4227 FROM becomes the new last instruction. */
4230 delete_insns_since (rtx_insn
*from
)
4235 SET_NEXT_INSN (from
) = 0;
4236 set_last_insn (from
);
4239 /* This function is deprecated, please use sequences instead.
4241 Move a consecutive bunch of insns to a different place in the chain.
4242 The insns to be moved are those between FROM and TO.
4243 They are moved to a new position after the insn AFTER.
4244 AFTER must not be FROM or TO or any insn in between.
4246 This function does not know about SEQUENCEs and hence should not be
4247 called after delay-slot filling has been done. */
4250 reorder_insns_nobb (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4252 #ifdef ENABLE_CHECKING
4254 for (x
= from
; x
!= to
; x
= NEXT_INSN (x
))
4255 gcc_assert (after
!= x
);
4256 gcc_assert (after
!= to
);
4259 /* Splice this bunch out of where it is now. */
4260 if (PREV_INSN (from
))
4261 SET_NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
4263 SET_PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
4264 if (get_last_insn () == to
)
4265 set_last_insn (PREV_INSN (from
));
4266 if (get_insns () == from
)
4267 set_first_insn (NEXT_INSN (to
));
4269 /* Make the new neighbors point to it and it to them. */
4270 if (NEXT_INSN (after
))
4271 SET_PREV_INSN (NEXT_INSN (after
)) = to
;
4273 SET_NEXT_INSN (to
) = NEXT_INSN (after
);
4274 SET_PREV_INSN (from
) = after
;
4275 SET_NEXT_INSN (after
) = from
;
4276 if (after
== get_last_insn ())
4280 /* Same as function above, but take care to update BB boundaries. */
4282 reorder_insns (rtx_insn
*from
, rtx_insn
*to
, rtx_insn
*after
)
4284 rtx_insn
*prev
= PREV_INSN (from
);
4285 basic_block bb
, bb2
;
4287 reorder_insns_nobb (from
, to
, after
);
4289 if (!BARRIER_P (after
)
4290 && (bb
= BLOCK_FOR_INSN (after
)))
4293 df_set_bb_dirty (bb
);
4295 if (!BARRIER_P (from
)
4296 && (bb2
= BLOCK_FOR_INSN (from
)))
4298 if (BB_END (bb2
) == to
)
4299 BB_END (bb2
) = prev
;
4300 df_set_bb_dirty (bb2
);
4303 if (BB_END (bb
) == after
)
4306 for (x
= from
; x
!= NEXT_INSN (to
); x
= NEXT_INSN (x
))
4308 df_insn_change_bb (x
, bb
);
4313 /* Emit insn(s) of given code and pattern
4314 at a specified place within the doubly-linked list.
4316 All of the emit_foo global entry points accept an object
4317 X which is either an insn list or a PATTERN of a single
4320 There are thus a few canonical ways to generate code and
4321 emit it at a specific place in the instruction stream. For
4322 example, consider the instruction named SPOT and the fact that
4323 we would like to emit some instructions before SPOT. We might
4327 ... emit the new instructions ...
4328 insns_head = get_insns ();
4331 emit_insn_before (insns_head, SPOT);
4333 It used to be common to generate SEQUENCE rtl instead, but that
4334 is a relic of the past which no longer occurs. The reason is that
4335 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4336 generated would almost certainly die right after it was created. */
4339 emit_pattern_before_noloc (rtx x
, rtx before
, rtx last
, basic_block bb
,
4340 rtx_insn
*(*make_raw
) (rtx
))
4344 gcc_assert (before
);
4347 return safe_as_a
<rtx_insn
*> (last
);
4349 switch (GET_CODE (x
))
4358 insn
= as_a
<rtx_insn
*> (x
);
4361 rtx_insn
*next
= NEXT_INSN (insn
);
4362 add_insn_before (insn
, before
, bb
);
4368 #ifdef ENABLE_RTL_CHECKING
4375 last
= (*make_raw
) (x
);
4376 add_insn_before (last
, before
, bb
);
4380 return safe_as_a
<rtx_insn
*> (last
);
4383 /* Make X be output before the instruction BEFORE. */
4386 emit_insn_before_noloc (rtx x
, rtx_insn
*before
, basic_block bb
)
4388 return emit_pattern_before_noloc (x
, before
, before
, bb
, make_insn_raw
);
4391 /* Make an instruction with body X and code JUMP_INSN
4392 and output it before the instruction BEFORE. */
4395 emit_jump_insn_before_noloc (rtx x
, rtx_insn
*before
)
4397 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4398 make_jump_insn_raw
);
4401 /* Make an instruction with body X and code CALL_INSN
4402 and output it before the instruction BEFORE. */
4405 emit_call_insn_before_noloc (rtx x
, rtx_insn
*before
)
4407 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4408 make_call_insn_raw
);
4411 /* Make an instruction with body X and code DEBUG_INSN
4412 and output it before the instruction BEFORE. */
4415 emit_debug_insn_before_noloc (rtx x
, rtx before
)
4417 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4418 make_debug_insn_raw
);
4421 /* Make an insn of code BARRIER
4422 and output it before the insn BEFORE. */
4425 emit_barrier_before (rtx before
)
4427 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4429 INSN_UID (insn
) = cur_insn_uid
++;
4431 add_insn_before (insn
, before
, NULL
);
4435 /* Emit the label LABEL before the insn BEFORE. */
4438 emit_label_before (rtx label
, rtx_insn
*before
)
4440 gcc_checking_assert (INSN_UID (label
) == 0);
4441 INSN_UID (label
) = cur_insn_uid
++;
4442 add_insn_before (label
, before
, NULL
);
4443 return as_a
<rtx_insn
*> (label
);
4446 /* Helper for emit_insn_after, handles lists of instructions
4450 emit_insn_after_1 (rtx_insn
*first
, rtx uncast_after
, basic_block bb
)
4452 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4454 rtx_insn
*after_after
;
4455 if (!bb
&& !BARRIER_P (after
))
4456 bb
= BLOCK_FOR_INSN (after
);
4460 df_set_bb_dirty (bb
);
4461 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4462 if (!BARRIER_P (last
))
4464 set_block_for_insn (last
, bb
);
4465 df_insn_rescan (last
);
4467 if (!BARRIER_P (last
))
4469 set_block_for_insn (last
, bb
);
4470 df_insn_rescan (last
);
4472 if (BB_END (bb
) == after
)
4476 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4479 after_after
= NEXT_INSN (after
);
4481 SET_NEXT_INSN (after
) = first
;
4482 SET_PREV_INSN (first
) = after
;
4483 SET_NEXT_INSN (last
) = after_after
;
4485 SET_PREV_INSN (after_after
) = last
;
4487 if (after
== get_last_insn ())
4488 set_last_insn (last
);
4494 emit_pattern_after_noloc (rtx x
, rtx uncast_after
, basic_block bb
,
4495 rtx_insn
*(*make_raw
)(rtx
))
4497 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4498 rtx_insn
*last
= after
;
4505 switch (GET_CODE (x
))
4514 last
= emit_insn_after_1 (as_a
<rtx_insn
*> (x
), after
, bb
);
4517 #ifdef ENABLE_RTL_CHECKING
4524 last
= (*make_raw
) (x
);
4525 add_insn_after (last
, after
, bb
);
4532 /* Make X be output after the insn AFTER and set the BB of insn. If
4533 BB is NULL, an attempt is made to infer the BB from AFTER. */
4536 emit_insn_after_noloc (rtx x
, rtx after
, basic_block bb
)
4538 return emit_pattern_after_noloc (x
, after
, bb
, make_insn_raw
);
4542 /* Make an insn of code JUMP_INSN with body X
4543 and output it after the insn AFTER. */
4546 emit_jump_insn_after_noloc (rtx x
, rtx after
)
4548 return emit_pattern_after_noloc (x
, after
, NULL
, make_jump_insn_raw
);
4551 /* Make an instruction with body X and code CALL_INSN
4552 and output it after the instruction AFTER. */
4555 emit_call_insn_after_noloc (rtx x
, rtx after
)
4557 return emit_pattern_after_noloc (x
, after
, NULL
, make_call_insn_raw
);
4560 /* Make an instruction with body X and code CALL_INSN
4561 and output it after the instruction AFTER. */
4564 emit_debug_insn_after_noloc (rtx x
, rtx after
)
4566 return emit_pattern_after_noloc (x
, after
, NULL
, make_debug_insn_raw
);
4569 /* Make an insn of code BARRIER
4570 and output it after the insn AFTER. */
4573 emit_barrier_after (rtx after
)
4575 rtx_barrier
*insn
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
4577 INSN_UID (insn
) = cur_insn_uid
++;
4579 add_insn_after (insn
, after
, NULL
);
4583 /* Emit the label LABEL after the insn AFTER. */
4586 emit_label_after (rtx label
, rtx_insn
*after
)
4588 gcc_checking_assert (INSN_UID (label
) == 0);
4589 INSN_UID (label
) = cur_insn_uid
++;
4590 add_insn_after (label
, after
, NULL
);
4591 return as_a
<rtx_insn
*> (label
);
4594 /* Notes require a bit of special handling: Some notes need to have their
4595 BLOCK_FOR_INSN set, others should never have it set, and some should
4596 have it set or clear depending on the context. */
4598 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4599 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4600 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4603 note_outside_basic_block_p (enum insn_note subtype
, bool on_bb_boundary_p
)
4607 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4608 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
4611 /* Notes for var tracking and EH region markers can appear between or
4612 inside basic blocks. If the caller is emitting on the basic block
4613 boundary, do not set BLOCK_FOR_INSN on the new note. */
4614 case NOTE_INSN_VAR_LOCATION
:
4615 case NOTE_INSN_CALL_ARG_LOCATION
:
4616 case NOTE_INSN_EH_REGION_BEG
:
4617 case NOTE_INSN_EH_REGION_END
:
4618 return on_bb_boundary_p
;
4620 /* Otherwise, BLOCK_FOR_INSN must be set. */
4626 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4629 emit_note_after (enum insn_note subtype
, rtx uncast_after
)
4631 rtx_insn
*after
= as_a
<rtx_insn
*> (uncast_after
);
4632 rtx_note
*note
= make_note_raw (subtype
);
4633 basic_block bb
= BARRIER_P (after
) ? NULL
: BLOCK_FOR_INSN (after
);
4634 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_END (bb
) == after
);
4636 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4637 add_insn_after_nobb (note
, after
);
4639 add_insn_after (note
, after
, bb
);
4643 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4646 emit_note_before (enum insn_note subtype
, rtx uncast_before
)
4648 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4649 rtx_note
*note
= make_note_raw (subtype
);
4650 basic_block bb
= BARRIER_P (before
) ? NULL
: BLOCK_FOR_INSN (before
);
4651 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_HEAD (bb
) == before
);
4653 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4654 add_insn_before_nobb (note
, before
);
4656 add_insn_before (note
, before
, bb
);
4660 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4661 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4664 emit_pattern_after_setloc (rtx pattern
, rtx uncast_after
, int loc
,
4665 rtx_insn
*(*make_raw
) (rtx
))
4667 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4668 rtx last
= emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4670 if (pattern
== NULL_RTX
|| !loc
)
4671 return safe_as_a
<rtx_insn
*> (last
);
4673 after
= NEXT_INSN (after
);
4676 if (active_insn_p (after
) && !INSN_LOCATION (after
))
4677 INSN_LOCATION (after
) = loc
;
4680 after
= NEXT_INSN (after
);
4682 return safe_as_a
<rtx_insn
*> (last
);
4685 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4686 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4690 emit_pattern_after (rtx pattern
, rtx uncast_after
, bool skip_debug_insns
,
4691 rtx_insn
*(*make_raw
) (rtx
))
4693 rtx_insn
*after
= safe_as_a
<rtx_insn
*> (uncast_after
);
4694 rtx_insn
*prev
= after
;
4696 if (skip_debug_insns
)
4697 while (DEBUG_INSN_P (prev
))
4698 prev
= PREV_INSN (prev
);
4701 return emit_pattern_after_setloc (pattern
, after
, INSN_LOCATION (prev
),
4704 return emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4707 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4709 emit_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4711 return emit_pattern_after_setloc (pattern
, after
, loc
, make_insn_raw
);
4714 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4716 emit_insn_after (rtx pattern
, rtx after
)
4718 return emit_pattern_after (pattern
, after
, true, make_insn_raw
);
4721 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4723 emit_jump_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4725 return emit_pattern_after_setloc (pattern
, after
, loc
, make_jump_insn_raw
);
4728 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4730 emit_jump_insn_after (rtx pattern
, rtx after
)
4732 return emit_pattern_after (pattern
, after
, true, make_jump_insn_raw
);
4735 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4737 emit_call_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4739 return emit_pattern_after_setloc (pattern
, after
, loc
, make_call_insn_raw
);
4742 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4744 emit_call_insn_after (rtx pattern
, rtx after
)
4746 return emit_pattern_after (pattern
, after
, true, make_call_insn_raw
);
4749 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4751 emit_debug_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4753 return emit_pattern_after_setloc (pattern
, after
, loc
, make_debug_insn_raw
);
4756 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4758 emit_debug_insn_after (rtx pattern
, rtx after
)
4760 return emit_pattern_after (pattern
, after
, false, make_debug_insn_raw
);
4763 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4764 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4765 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4769 emit_pattern_before_setloc (rtx pattern
, rtx uncast_before
, int loc
, bool insnp
,
4770 rtx_insn
*(*make_raw
) (rtx
))
4772 rtx_insn
*before
= as_a
<rtx_insn
*> (uncast_before
);
4773 rtx_insn
*first
= PREV_INSN (before
);
4774 rtx_insn
*last
= emit_pattern_before_noloc (pattern
, before
,
4775 insnp
? before
: NULL_RTX
,
4778 if (pattern
== NULL_RTX
|| !loc
)
4782 first
= get_insns ();
4784 first
= NEXT_INSN (first
);
4787 if (active_insn_p (first
) && !INSN_LOCATION (first
))
4788 INSN_LOCATION (first
) = loc
;
4791 first
= NEXT_INSN (first
);
4796 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4797 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4798 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4799 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4802 emit_pattern_before (rtx pattern
, rtx uncast_before
, bool skip_debug_insns
,
4803 bool insnp
, rtx_insn
*(*make_raw
) (rtx
))
4805 rtx_insn
*before
= safe_as_a
<rtx_insn
*> (uncast_before
);
4806 rtx_insn
*next
= before
;
4808 if (skip_debug_insns
)
4809 while (DEBUG_INSN_P (next
))
4810 next
= PREV_INSN (next
);
4813 return emit_pattern_before_setloc (pattern
, before
, INSN_LOCATION (next
),
4816 return emit_pattern_before_noloc (pattern
, before
,
4817 insnp
? before
: NULL_RTX
,
4821 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4823 emit_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4825 return emit_pattern_before_setloc (pattern
, before
, loc
, true,
4829 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4831 emit_insn_before (rtx pattern
, rtx before
)
4833 return emit_pattern_before (pattern
, before
, true, true, make_insn_raw
);
4836 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4838 emit_jump_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4840 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4841 make_jump_insn_raw
);
4844 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4846 emit_jump_insn_before (rtx pattern
, rtx before
)
4848 return emit_pattern_before (pattern
, before
, true, false,
4849 make_jump_insn_raw
);
4852 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4854 emit_call_insn_before_setloc (rtx pattern
, rtx_insn
*before
, int loc
)
4856 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4857 make_call_insn_raw
);
4860 /* Like emit_call_insn_before_noloc,
4861 but set insn_location according to BEFORE. */
4863 emit_call_insn_before (rtx pattern
, rtx_insn
*before
)
4865 return emit_pattern_before (pattern
, before
, true, false,
4866 make_call_insn_raw
);
4869 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4871 emit_debug_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4873 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4874 make_debug_insn_raw
);
4877 /* Like emit_debug_insn_before_noloc,
4878 but set insn_location according to BEFORE. */
4880 emit_debug_insn_before (rtx pattern
, rtx before
)
4882 return emit_pattern_before (pattern
, before
, false, false,
4883 make_debug_insn_raw
);
4886 /* Take X and emit it at the end of the doubly-linked
4889 Returns the last insn emitted. */
4894 rtx_insn
*last
= get_last_insn ();
4900 switch (GET_CODE (x
))
4909 insn
= as_a
<rtx_insn
*> (x
);
4912 rtx_insn
*next
= NEXT_INSN (insn
);
4919 #ifdef ENABLE_RTL_CHECKING
4920 case JUMP_TABLE_DATA
:
4927 last
= make_insn_raw (x
);
4935 /* Make an insn of code DEBUG_INSN with pattern X
4936 and add it to the end of the doubly-linked list. */
4939 emit_debug_insn (rtx x
)
4941 rtx_insn
*last
= get_last_insn ();
4947 switch (GET_CODE (x
))
4956 insn
= as_a
<rtx_insn
*> (x
);
4959 rtx_insn
*next
= NEXT_INSN (insn
);
4966 #ifdef ENABLE_RTL_CHECKING
4967 case JUMP_TABLE_DATA
:
4974 last
= make_debug_insn_raw (x
);
4982 /* Make an insn of code JUMP_INSN with pattern X
4983 and add it to the end of the doubly-linked list. */
4986 emit_jump_insn (rtx x
)
4988 rtx_insn
*last
= NULL
;
4991 switch (GET_CODE (x
))
5000 insn
= as_a
<rtx_insn
*> (x
);
5003 rtx_insn
*next
= NEXT_INSN (insn
);
5010 #ifdef ENABLE_RTL_CHECKING
5011 case JUMP_TABLE_DATA
:
5018 last
= make_jump_insn_raw (x
);
5026 /* Make an insn of code CALL_INSN with pattern X
5027 and add it to the end of the doubly-linked list. */
5030 emit_call_insn (rtx x
)
5034 switch (GET_CODE (x
))
5043 insn
= emit_insn (x
);
5046 #ifdef ENABLE_RTL_CHECKING
5048 case JUMP_TABLE_DATA
:
5054 insn
= make_call_insn_raw (x
);
5062 /* Add the label LABEL to the end of the doubly-linked list. */
5065 emit_label (rtx label
)
5067 gcc_checking_assert (INSN_UID (label
) == 0);
5068 INSN_UID (label
) = cur_insn_uid
++;
5069 add_insn (as_a
<rtx_insn
*> (label
));
5070 return as_a
<rtx_insn
*> (label
);
5073 /* Make an insn of code JUMP_TABLE_DATA
5074 and add it to the end of the doubly-linked list. */
5076 rtx_jump_table_data
*
5077 emit_jump_table_data (rtx table
)
5079 rtx_jump_table_data
*jump_table_data
=
5080 as_a
<rtx_jump_table_data
*> (rtx_alloc (JUMP_TABLE_DATA
));
5081 INSN_UID (jump_table_data
) = cur_insn_uid
++;
5082 PATTERN (jump_table_data
) = table
;
5083 BLOCK_FOR_INSN (jump_table_data
) = NULL
;
5084 add_insn (jump_table_data
);
5085 return jump_table_data
;
5088 /* Make an insn of code BARRIER
5089 and add it to the end of the doubly-linked list. */
5094 rtx_barrier
*barrier
= as_a
<rtx_barrier
*> (rtx_alloc (BARRIER
));
5095 INSN_UID (barrier
) = cur_insn_uid
++;
5100 /* Emit a copy of note ORIG. */
5103 emit_note_copy (rtx_note
*orig
)
5105 enum insn_note kind
= (enum insn_note
) NOTE_KIND (orig
);
5106 rtx_note
*note
= make_note_raw (kind
);
5107 NOTE_DATA (note
) = NOTE_DATA (orig
);
5112 /* Make an insn of code NOTE or type NOTE_NO
5113 and add it to the end of the doubly-linked list. */
5116 emit_note (enum insn_note kind
)
5118 rtx_note
*note
= make_note_raw (kind
);
5123 /* Emit a clobber of lvalue X. */
5126 emit_clobber (rtx x
)
5128 /* CONCATs should not appear in the insn stream. */
5129 if (GET_CODE (x
) == CONCAT
)
5131 emit_clobber (XEXP (x
, 0));
5132 return emit_clobber (XEXP (x
, 1));
5134 return emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
5137 /* Return a sequence of insns to clobber lvalue X. */
5151 /* Emit a use of rvalue X. */
5156 /* CONCATs should not appear in the insn stream. */
5157 if (GET_CODE (x
) == CONCAT
)
5159 emit_use (XEXP (x
, 0));
5160 return emit_use (XEXP (x
, 1));
5162 return emit_insn (gen_rtx_USE (VOIDmode
, x
));
5165 /* Return a sequence of insns to use rvalue X. */
5179 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5180 Return the set in INSN that such notes describe, or NULL if the notes
5181 have no meaning for INSN. */
5184 set_for_reg_notes (rtx insn
)
5191 pat
= PATTERN (insn
);
5192 if (GET_CODE (pat
) == PARALLEL
)
5194 /* We do not use single_set because that ignores SETs of unused
5195 registers. REG_EQUAL and REG_EQUIV notes really do require the
5196 PARALLEL to have a single SET. */
5197 if (multiple_sets (insn
))
5199 pat
= XVECEXP (pat
, 0, 0);
5202 if (GET_CODE (pat
) != SET
)
5205 reg
= SET_DEST (pat
);
5207 /* Notes apply to the contents of a STRICT_LOW_PART. */
5208 if (GET_CODE (reg
) == STRICT_LOW_PART
)
5209 reg
= XEXP (reg
, 0);
5211 /* Check that we have a register. */
5212 if (!(REG_P (reg
) || GET_CODE (reg
) == SUBREG
))
5218 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5219 note of this type already exists, remove it first. */
5222 set_unique_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
5224 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
5230 if (!set_for_reg_notes (insn
))
5233 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5234 It serves no useful purpose and breaks eliminate_regs. */
5235 if (GET_CODE (datum
) == ASM_OPERANDS
)
5238 /* Notes with side effects are dangerous. Even if the side-effect
5239 initially mirrors one in PATTERN (INSN), later optimizations
5240 might alter the way that the final register value is calculated
5241 and so move or alter the side-effect in some way. The note would
5242 then no longer be a valid substitution for SET_SRC. */
5243 if (side_effects_p (datum
))
5252 XEXP (note
, 0) = datum
;
5255 add_reg_note (insn
, kind
, datum
);
5256 note
= REG_NOTES (insn
);
5263 df_notes_rescan (as_a
<rtx_insn
*> (insn
));
5272 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5274 set_dst_reg_note (rtx insn
, enum reg_note kind
, rtx datum
, rtx dst
)
5276 rtx set
= set_for_reg_notes (insn
);
5278 if (set
&& SET_DEST (set
) == dst
)
5279 return set_unique_reg_note (insn
, kind
, datum
);
5283 /* Return an indication of which type of insn should have X as a body.
5284 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5286 static enum rtx_code
5287 classify_insn (rtx x
)
5291 if (GET_CODE (x
) == CALL
)
5293 if (ANY_RETURN_P (x
))
5295 if (GET_CODE (x
) == SET
)
5297 if (SET_DEST (x
) == pc_rtx
)
5299 else if (GET_CODE (SET_SRC (x
)) == CALL
)
5304 if (GET_CODE (x
) == PARALLEL
)
5307 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
5308 if (GET_CODE (XVECEXP (x
, 0, j
)) == CALL
)
5310 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
5311 && SET_DEST (XVECEXP (x
, 0, j
)) == pc_rtx
)
5313 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
5314 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == CALL
)
5320 /* Emit the rtl pattern X as an appropriate kind of insn.
5321 If X is a label, it is simply added into the insn chain. */
5326 enum rtx_code code
= classify_insn (x
);
5331 return emit_label (x
);
5333 return emit_insn (x
);
5336 rtx_insn
*insn
= emit_jump_insn (x
);
5337 if (any_uncondjump_p (insn
) || GET_CODE (x
) == RETURN
)
5338 return emit_barrier ();
5342 return emit_call_insn (x
);
5344 return emit_debug_insn (x
);
5350 /* Space for free sequence stack entries. */
5351 static GTY ((deletable
)) struct sequence_stack
*free_sequence_stack
;
5353 /* Begin emitting insns to a sequence. If this sequence will contain
5354 something that might cause the compiler to pop arguments to function
5355 calls (because those pops have previously been deferred; see
5356 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5357 before calling this function. That will ensure that the deferred
5358 pops are not accidentally emitted in the middle of this sequence. */
5361 start_sequence (void)
5363 struct sequence_stack
*tem
;
5365 if (free_sequence_stack
!= NULL
)
5367 tem
= free_sequence_stack
;
5368 free_sequence_stack
= tem
->next
;
5371 tem
= ggc_alloc
<sequence_stack
> ();
5373 tem
->next
= seq_stack
;
5374 tem
->first
= get_insns ();
5375 tem
->last
= get_last_insn ();
5383 /* Set up the insn chain starting with FIRST as the current sequence,
5384 saving the previously current one. See the documentation for
5385 start_sequence for more information about how to use this function. */
5388 push_to_sequence (rtx_insn
*first
)
5394 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
))
5397 set_first_insn (first
);
5398 set_last_insn (last
);
5401 /* Like push_to_sequence, but take the last insn as an argument to avoid
5402 looping through the list. */
5405 push_to_sequence2 (rtx_insn
*first
, rtx_insn
*last
)
5409 set_first_insn (first
);
5410 set_last_insn (last
);
5413 /* Set up the outer-level insn chain
5414 as the current sequence, saving the previously current one. */
5417 push_topmost_sequence (void)
5419 struct sequence_stack
*stack
, *top
= NULL
;
5423 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
5426 set_first_insn (top
->first
);
5427 set_last_insn (top
->last
);
5430 /* After emitting to the outer-level insn chain, update the outer-level
5431 insn chain, and restore the previous saved state. */
5434 pop_topmost_sequence (void)
5436 struct sequence_stack
*stack
, *top
= NULL
;
5438 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
5441 top
->first
= get_insns ();
5442 top
->last
= get_last_insn ();
5447 /* After emitting to a sequence, restore previous saved state.
5449 To get the contents of the sequence just made, you must call
5450 `get_insns' *before* calling here.
5452 If the compiler might have deferred popping arguments while
5453 generating this sequence, and this sequence will not be immediately
5454 inserted into the instruction stream, use do_pending_stack_adjust
5455 before calling get_insns. That will ensure that the deferred
5456 pops are inserted into this sequence, and not into some random
5457 location in the instruction stream. See INHIBIT_DEFER_POP for more
5458 information about deferred popping of arguments. */
5463 struct sequence_stack
*tem
= seq_stack
;
5465 set_first_insn (tem
->first
);
5466 set_last_insn (tem
->last
);
5467 seq_stack
= tem
->next
;
5469 memset (tem
, 0, sizeof (*tem
));
5470 tem
->next
= free_sequence_stack
;
5471 free_sequence_stack
= tem
;
5474 /* Return 1 if currently emitting into a sequence. */
5477 in_sequence_p (void)
5479 return seq_stack
!= 0;
5482 /* Put the various virtual registers into REGNO_REG_RTX. */
5485 init_virtual_regs (void)
5487 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
5488 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
5489 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
5490 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
5491 regno_reg_rtx
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
5492 regno_reg_rtx
[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
]
5493 = virtual_preferred_stack_boundary_rtx
;
5497 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5498 static rtx copy_insn_scratch_in
[MAX_RECOG_OPERANDS
];
5499 static rtx copy_insn_scratch_out
[MAX_RECOG_OPERANDS
];
5500 static int copy_insn_n_scratches
;
5502 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5503 copied an ASM_OPERANDS.
5504 In that case, it is the original input-operand vector. */
5505 static rtvec orig_asm_operands_vector
;
5507 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5508 copied an ASM_OPERANDS.
5509 In that case, it is the copied input-operand vector. */
5510 static rtvec copy_asm_operands_vector
;
5512 /* Likewise for the constraints vector. */
5513 static rtvec orig_asm_constraints_vector
;
5514 static rtvec copy_asm_constraints_vector
;
5516 /* Recursively create a new copy of an rtx for copy_insn.
5517 This function differs from copy_rtx in that it handles SCRATCHes and
5518 ASM_OPERANDs properly.
5519 Normally, this function is not used directly; use copy_insn as front end.
5520 However, you could first copy an insn pattern with copy_insn and then use
5521 this function afterwards to properly copy any REG_NOTEs containing
5525 copy_insn_1 (rtx orig
)
5530 const char *format_ptr
;
5535 code
= GET_CODE (orig
);
5550 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5551 clobbers or clobbers of hard registers that originated as pseudos.
5552 This is needed to allow safe register renaming. */
5553 if (REG_P (XEXP (orig
, 0)) && REGNO (XEXP (orig
, 0)) < FIRST_PSEUDO_REGISTER
5554 && ORIGINAL_REGNO (XEXP (orig
, 0)) == REGNO (XEXP (orig
, 0)))
5559 for (i
= 0; i
< copy_insn_n_scratches
; i
++)
5560 if (copy_insn_scratch_in
[i
] == orig
)
5561 return copy_insn_scratch_out
[i
];
5565 if (shared_const_p (orig
))
5569 /* A MEM with a constant address is not sharable. The problem is that
5570 the constant address may need to be reloaded. If the mem is shared,
5571 then reloading one copy of this mem will cause all copies to appear
5572 to have been reloaded. */
5578 /* Copy the various flags, fields, and other information. We assume
5579 that all fields need copying, and then clear the fields that should
5580 not be copied. That is the sensible default behavior, and forces
5581 us to explicitly document why we are *not* copying a flag. */
5582 copy
= shallow_copy_rtx (orig
);
5584 /* We do not copy the USED flag, which is used as a mark bit during
5585 walks over the RTL. */
5586 RTX_FLAG (copy
, used
) = 0;
5588 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5591 RTX_FLAG (copy
, jump
) = 0;
5592 RTX_FLAG (copy
, call
) = 0;
5593 RTX_FLAG (copy
, frame_related
) = 0;
5596 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
5598 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
5599 switch (*format_ptr
++)
5602 if (XEXP (orig
, i
) != NULL
)
5603 XEXP (copy
, i
) = copy_insn_1 (XEXP (orig
, i
));
5608 if (XVEC (orig
, i
) == orig_asm_constraints_vector
)
5609 XVEC (copy
, i
) = copy_asm_constraints_vector
;
5610 else if (XVEC (orig
, i
) == orig_asm_operands_vector
)
5611 XVEC (copy
, i
) = copy_asm_operands_vector
;
5612 else if (XVEC (orig
, i
) != NULL
)
5614 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
5615 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
5616 XVECEXP (copy
, i
, j
) = copy_insn_1 (XVECEXP (orig
, i
, j
));
5627 /* These are left unchanged. */
5634 if (code
== SCRATCH
)
5636 i
= copy_insn_n_scratches
++;
5637 gcc_assert (i
< MAX_RECOG_OPERANDS
);
5638 copy_insn_scratch_in
[i
] = orig
;
5639 copy_insn_scratch_out
[i
] = copy
;
5641 else if (code
== ASM_OPERANDS
)
5643 orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
5644 copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
5645 orig_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig
);
5646 copy_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
5652 /* Create a new copy of an rtx.
5653 This function differs from copy_rtx in that it handles SCRATCHes and
5654 ASM_OPERANDs properly.
5655 INSN doesn't really have to be a full INSN; it could be just the
5658 copy_insn (rtx insn
)
5660 copy_insn_n_scratches
= 0;
5661 orig_asm_operands_vector
= 0;
5662 orig_asm_constraints_vector
= 0;
5663 copy_asm_operands_vector
= 0;
5664 copy_asm_constraints_vector
= 0;
5665 return copy_insn_1 (insn
);
5668 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5669 on that assumption that INSN itself remains in its original place. */
5672 copy_delay_slot_insn (rtx_insn
*insn
)
5674 /* Copy INSN with its rtx_code, all its notes, location etc. */
5675 insn
= as_a
<rtx_insn
*> (copy_rtx (insn
));
5676 INSN_UID (insn
) = cur_insn_uid
++;
5680 /* Initialize data structures and variables in this file
5681 before generating rtl for each function. */
5686 set_first_insn (NULL
);
5687 set_last_insn (NULL
);
5688 if (MIN_NONDEBUG_INSN_UID
)
5689 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
;
5692 cur_debug_insn_uid
= 1;
5693 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
5694 first_label_num
= label_num
;
5697 /* Init the tables that describe all the pseudo regs. */
5699 crtl
->emit
.regno_pointer_align_length
= LAST_VIRTUAL_REGISTER
+ 101;
5701 crtl
->emit
.regno_pointer_align
5702 = XCNEWVEC (unsigned char, crtl
->emit
.regno_pointer_align_length
);
5704 regno_reg_rtx
= ggc_vec_alloc
<rtx
> (crtl
->emit
.regno_pointer_align_length
);
5706 /* Put copies of all the hard registers into regno_reg_rtx. */
5707 memcpy (regno_reg_rtx
,
5708 initial_regno_reg_rtx
,
5709 FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
5711 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5712 init_virtual_regs ();
5714 /* Indicate that the virtual registers and stack locations are
5716 REG_POINTER (stack_pointer_rtx
) = 1;
5717 REG_POINTER (frame_pointer_rtx
) = 1;
5718 REG_POINTER (hard_frame_pointer_rtx
) = 1;
5719 REG_POINTER (arg_pointer_rtx
) = 1;
5721 REG_POINTER (virtual_incoming_args_rtx
) = 1;
5722 REG_POINTER (virtual_stack_vars_rtx
) = 1;
5723 REG_POINTER (virtual_stack_dynamic_rtx
) = 1;
5724 REG_POINTER (virtual_outgoing_args_rtx
) = 1;
5725 REG_POINTER (virtual_cfa_rtx
) = 1;
5727 #ifdef STACK_BOUNDARY
5728 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
;
5729 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5730 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5731 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
;
5733 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5734 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
) = STACK_BOUNDARY
;
5735 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
) = STACK_BOUNDARY
;
5736 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5737 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = BITS_PER_WORD
;
5740 #ifdef INIT_EXPANDERS
5745 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5748 gen_const_vector (enum machine_mode mode
, int constant
)
5753 enum machine_mode inner
;
5755 units
= GET_MODE_NUNITS (mode
);
5756 inner
= GET_MODE_INNER (mode
);
5758 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner
));
5760 v
= rtvec_alloc (units
);
5762 /* We need to call this function after we set the scalar const_tiny_rtx
5764 gcc_assert (const_tiny_rtx
[constant
][(int) inner
]);
5766 for (i
= 0; i
< units
; ++i
)
5767 RTVEC_ELT (v
, i
) = const_tiny_rtx
[constant
][(int) inner
];
5769 tem
= gen_rtx_raw_CONST_VECTOR (mode
, v
);
5773 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5774 all elements are zero, and the one vector when all elements are one. */
5776 gen_rtx_CONST_VECTOR (enum machine_mode mode
, rtvec v
)
5778 enum machine_mode inner
= GET_MODE_INNER (mode
);
5779 int nunits
= GET_MODE_NUNITS (mode
);
5783 /* Check to see if all of the elements have the same value. */
5784 x
= RTVEC_ELT (v
, nunits
- 1);
5785 for (i
= nunits
- 2; i
>= 0; i
--)
5786 if (RTVEC_ELT (v
, i
) != x
)
5789 /* If the values are all the same, check to see if we can use one of the
5790 standard constant vectors. */
5793 if (x
== CONST0_RTX (inner
))
5794 return CONST0_RTX (mode
);
5795 else if (x
== CONST1_RTX (inner
))
5796 return CONST1_RTX (mode
);
5797 else if (x
== CONSTM1_RTX (inner
))
5798 return CONSTM1_RTX (mode
);
5801 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
5804 /* Initialise global register information required by all functions. */
5807 init_emit_regs (void)
5810 enum machine_mode mode
;
5813 /* Reset register attributes */
5814 htab_empty (reg_attrs_htab
);
5816 /* We need reg_raw_mode, so initialize the modes now. */
5817 init_reg_modes_target ();
5819 /* Assign register numbers to the globally defined register rtx. */
5820 stack_pointer_rtx
= gen_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
5821 frame_pointer_rtx
= gen_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
5822 hard_frame_pointer_rtx
= gen_raw_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
);
5823 arg_pointer_rtx
= gen_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
5824 virtual_incoming_args_rtx
=
5825 gen_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
5826 virtual_stack_vars_rtx
=
5827 gen_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
5828 virtual_stack_dynamic_rtx
=
5829 gen_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
5830 virtual_outgoing_args_rtx
=
5831 gen_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
5832 virtual_cfa_rtx
= gen_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
5833 virtual_preferred_stack_boundary_rtx
=
5834 gen_raw_REG (Pmode
, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
);
5836 /* Initialize RTL for commonly used hard registers. These are
5837 copied into regno_reg_rtx as we begin to compile each function. */
5838 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
5839 initial_regno_reg_rtx
[i
] = gen_raw_REG (reg_raw_mode
[i
], i
);
5841 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5842 return_address_pointer_rtx
5843 = gen_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
5846 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
5847 pic_offset_table_rtx
= gen_raw_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
5849 pic_offset_table_rtx
= NULL_RTX
;
5851 for (i
= 0; i
< (int) MAX_MACHINE_MODE
; i
++)
5853 mode
= (enum machine_mode
) i
;
5854 attrs
= ggc_cleared_alloc
<mem_attrs
> ();
5855 attrs
->align
= BITS_PER_UNIT
;
5856 attrs
->addrspace
= ADDR_SPACE_GENERIC
;
5857 if (mode
!= BLKmode
)
5859 attrs
->size_known_p
= true;
5860 attrs
->size
= GET_MODE_SIZE (mode
);
5861 if (STRICT_ALIGNMENT
)
5862 attrs
->align
= GET_MODE_ALIGNMENT (mode
);
5864 mode_mem_attrs
[i
] = attrs
;
5868 /* Initialize global machine_mode variables. */
5871 init_derived_machine_modes (void)
5873 byte_mode
= VOIDmode
;
5874 word_mode
= VOIDmode
;
5876 for (enum machine_mode mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5878 mode
= GET_MODE_WIDER_MODE (mode
))
5880 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
5881 && byte_mode
== VOIDmode
)
5884 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
5885 && word_mode
== VOIDmode
)
5889 ptr_mode
= mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0);
5892 /* Create some permanent unique rtl objects shared between all functions. */
5895 init_emit_once (void)
5898 enum machine_mode mode
;
5899 enum machine_mode double_mode
;
5901 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5902 CONST_FIXED, and memory attribute hash tables. */
5903 const_int_htab
= htab_create_ggc (37, const_int_htab_hash
,
5904 const_int_htab_eq
, NULL
);
5906 #if TARGET_SUPPORTS_WIDE_INT
5907 const_wide_int_htab
= htab_create_ggc (37, const_wide_int_htab_hash
,
5908 const_wide_int_htab_eq
, NULL
);
5910 const_double_htab
= htab_create_ggc (37, const_double_htab_hash
,
5911 const_double_htab_eq
, NULL
);
5913 const_fixed_htab
= htab_create_ggc (37, const_fixed_htab_hash
,
5914 const_fixed_htab_eq
, NULL
);
5916 reg_attrs_htab
= htab_create_ggc (37, reg_attrs_htab_hash
,
5917 reg_attrs_htab_eq
, NULL
);
5919 #ifdef INIT_EXPANDERS
5920 /* This is to initialize {init|mark|free}_machine_status before the first
5921 call to push_function_context_to. This is needed by the Chill front
5922 end which calls push_function_context_to before the first call to
5923 init_function_start. */
5927 /* Create the unique rtx's for certain rtx codes and operand values. */
5929 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5930 tries to use these variables. */
5931 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
5932 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
5933 gen_rtx_raw_CONST_INT (VOIDmode
, (HOST_WIDE_INT
) i
);
5935 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
5936 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
5937 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
5939 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
5941 double_mode
= mode_for_size (DOUBLE_TYPE_SIZE
, MODE_FLOAT
, 0);
5943 real_from_integer (&dconst0
, double_mode
, 0, SIGNED
);
5944 real_from_integer (&dconst1
, double_mode
, 1, SIGNED
);
5945 real_from_integer (&dconst2
, double_mode
, 2, SIGNED
);
5950 dconsthalf
= dconst1
;
5951 SET_REAL_EXP (&dconsthalf
, REAL_EXP (&dconsthalf
) - 1);
5953 for (i
= 0; i
< 3; i
++)
5955 const REAL_VALUE_TYPE
*const r
=
5956 (i
== 0 ? &dconst0
: i
== 1 ? &dconst1
: &dconst2
);
5958 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
5960 mode
= GET_MODE_WIDER_MODE (mode
))
5961 const_tiny_rtx
[i
][(int) mode
] =
5962 CONST_DOUBLE_FROM_REAL_VALUE (*r
, mode
);
5964 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT
);
5966 mode
= GET_MODE_WIDER_MODE (mode
))
5967 const_tiny_rtx
[i
][(int) mode
] =
5968 CONST_DOUBLE_FROM_REAL_VALUE (*r
, mode
);
5970 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
5972 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5974 mode
= GET_MODE_WIDER_MODE (mode
))
5975 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5977 for (mode
= MIN_MODE_PARTIAL_INT
;
5978 mode
<= MAX_MODE_PARTIAL_INT
;
5979 mode
= (enum machine_mode
)((int)(mode
) + 1))
5980 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5983 const_tiny_rtx
[3][(int) VOIDmode
] = constm1_rtx
;
5985 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5987 mode
= GET_MODE_WIDER_MODE (mode
))
5988 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
5990 for (mode
= MIN_MODE_PARTIAL_INT
;
5991 mode
<= MAX_MODE_PARTIAL_INT
;
5992 mode
= (enum machine_mode
)((int)(mode
) + 1))
5993 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
5995 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT
);
5997 mode
= GET_MODE_WIDER_MODE (mode
))
5999 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
6000 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
6003 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT
);
6005 mode
= GET_MODE_WIDER_MODE (mode
))
6007 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
6008 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
6011 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT
);
6013 mode
= GET_MODE_WIDER_MODE (mode
))
6015 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6016 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6017 const_tiny_rtx
[3][(int) mode
] = gen_const_vector (mode
, 3);
6020 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT
);
6022 mode
= GET_MODE_WIDER_MODE (mode
))
6024 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6025 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6028 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FRACT
);
6030 mode
= GET_MODE_WIDER_MODE (mode
))
6032 FCONST0 (mode
).data
.high
= 0;
6033 FCONST0 (mode
).data
.low
= 0;
6034 FCONST0 (mode
).mode
= mode
;
6035 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6036 FCONST0 (mode
), mode
);
6039 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UFRACT
);
6041 mode
= GET_MODE_WIDER_MODE (mode
))
6043 FCONST0 (mode
).data
.high
= 0;
6044 FCONST0 (mode
).data
.low
= 0;
6045 FCONST0 (mode
).mode
= mode
;
6046 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6047 FCONST0 (mode
), mode
);
6050 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_ACCUM
);
6052 mode
= GET_MODE_WIDER_MODE (mode
))
6054 FCONST0 (mode
).data
.high
= 0;
6055 FCONST0 (mode
).data
.low
= 0;
6056 FCONST0 (mode
).mode
= mode
;
6057 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6058 FCONST0 (mode
), mode
);
6060 /* We store the value 1. */
6061 FCONST1 (mode
).data
.high
= 0;
6062 FCONST1 (mode
).data
.low
= 0;
6063 FCONST1 (mode
).mode
= mode
;
6065 = double_int_one
.lshift (GET_MODE_FBIT (mode
),
6066 HOST_BITS_PER_DOUBLE_INT
,
6067 SIGNED_FIXED_POINT_MODE_P (mode
));
6068 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6069 FCONST1 (mode
), mode
);
6072 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UACCUM
);
6074 mode
= GET_MODE_WIDER_MODE (mode
))
6076 FCONST0 (mode
).data
.high
= 0;
6077 FCONST0 (mode
).data
.low
= 0;
6078 FCONST0 (mode
).mode
= mode
;
6079 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6080 FCONST0 (mode
), mode
);
6082 /* We store the value 1. */
6083 FCONST1 (mode
).data
.high
= 0;
6084 FCONST1 (mode
).data
.low
= 0;
6085 FCONST1 (mode
).mode
= mode
;
6087 = double_int_one
.lshift (GET_MODE_FBIT (mode
),
6088 HOST_BITS_PER_DOUBLE_INT
,
6089 SIGNED_FIXED_POINT_MODE_P (mode
));
6090 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6091 FCONST1 (mode
), mode
);
6094 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT
);
6096 mode
= GET_MODE_WIDER_MODE (mode
))
6098 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6101 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT
);
6103 mode
= GET_MODE_WIDER_MODE (mode
))
6105 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6108 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM
);
6110 mode
= GET_MODE_WIDER_MODE (mode
))
6112 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6113 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6116 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM
);
6118 mode
= GET_MODE_WIDER_MODE (mode
))
6120 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6121 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6124 for (i
= (int) CCmode
; i
< (int) MAX_MACHINE_MODE
; ++i
)
6125 if (GET_MODE_CLASS ((enum machine_mode
) i
) == MODE_CC
)
6126 const_tiny_rtx
[0][i
] = const0_rtx
;
6128 const_tiny_rtx
[0][(int) BImode
] = const0_rtx
;
6129 if (STORE_FLAG_VALUE
== 1)
6130 const_tiny_rtx
[1][(int) BImode
] = const1_rtx
;
6132 pc_rtx
= gen_rtx_fmt_ (PC
, VOIDmode
);
6133 ret_rtx
= gen_rtx_fmt_ (RETURN
, VOIDmode
);
6134 simple_return_rtx
= gen_rtx_fmt_ (SIMPLE_RETURN
, VOIDmode
);
6135 cc0_rtx
= gen_rtx_fmt_ (CC0
, VOIDmode
);
6138 /* Produce exact duplicate of insn INSN after AFTER.
6139 Care updating of libcall regions if present. */
6142 emit_copy_of_insn_after (rtx_insn
*insn
, rtx_insn
*after
)
6147 switch (GET_CODE (insn
))
6150 new_rtx
= emit_insn_after (copy_insn (PATTERN (insn
)), after
);
6154 new_rtx
= emit_jump_insn_after (copy_insn (PATTERN (insn
)), after
);
6155 CROSSING_JUMP_P (new_rtx
) = CROSSING_JUMP_P (insn
);
6159 new_rtx
= emit_debug_insn_after (copy_insn (PATTERN (insn
)), after
);
6163 new_rtx
= emit_call_insn_after (copy_insn (PATTERN (insn
)), after
);
6164 if (CALL_INSN_FUNCTION_USAGE (insn
))
6165 CALL_INSN_FUNCTION_USAGE (new_rtx
)
6166 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn
));
6167 SIBLING_CALL_P (new_rtx
) = SIBLING_CALL_P (insn
);
6168 RTL_CONST_CALL_P (new_rtx
) = RTL_CONST_CALL_P (insn
);
6169 RTL_PURE_CALL_P (new_rtx
) = RTL_PURE_CALL_P (insn
);
6170 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx
)
6171 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
);
6178 /* Update LABEL_NUSES. */
6179 mark_jump_label (PATTERN (new_rtx
), new_rtx
, 0);
6181 INSN_LOCATION (new_rtx
) = INSN_LOCATION (insn
);
6183 /* If the old insn is frame related, then so is the new one. This is
6184 primarily needed for IA-64 unwind info which marks epilogue insns,
6185 which may be duplicated by the basic block reordering code. */
6186 RTX_FRAME_RELATED_P (new_rtx
) = RTX_FRAME_RELATED_P (insn
);
6188 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6189 will make them. REG_LABEL_TARGETs are created there too, but are
6190 supposed to be sticky, so we copy them. */
6191 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
6192 if (REG_NOTE_KIND (link
) != REG_LABEL_OPERAND
)
6194 if (GET_CODE (link
) == EXPR_LIST
)
6195 add_reg_note (new_rtx
, REG_NOTE_KIND (link
),
6196 copy_insn_1 (XEXP (link
, 0)));
6198 add_shallow_copy_of_reg_note (new_rtx
, link
);
6201 INSN_CODE (new_rtx
) = INSN_CODE (insn
);
6205 static GTY((deletable
)) rtx hard_reg_clobbers
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
6207 gen_hard_reg_clobber (enum machine_mode mode
, unsigned int regno
)
6209 if (hard_reg_clobbers
[mode
][regno
])
6210 return hard_reg_clobbers
[mode
][regno
];
6212 return (hard_reg_clobbers
[mode
][regno
] =
6213 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (mode
, regno
)));
6216 location_t prologue_location
;
6217 location_t epilogue_location
;
6219 /* Hold current location information and last location information, so the
6220 datastructures are built lazily only when some instructions in given
6221 place are needed. */
6222 static location_t curr_location
;
6224 /* Allocate insn location datastructure. */
6226 insn_locations_init (void)
6228 prologue_location
= epilogue_location
= 0;
6229 curr_location
= UNKNOWN_LOCATION
;
6232 /* At the end of emit stage, clear current location. */
6234 insn_locations_finalize (void)
6236 epilogue_location
= curr_location
;
6237 curr_location
= UNKNOWN_LOCATION
;
6240 /* Set current location. */
6242 set_curr_insn_location (location_t location
)
6244 curr_location
= location
;
6247 /* Get current location. */
6249 curr_insn_location (void)
6251 return curr_location
;
6254 /* Return lexical scope block insn belongs to. */
6256 insn_scope (const rtx_insn
*insn
)
6258 return LOCATION_BLOCK (INSN_LOCATION (insn
));
6261 /* Return line number of the statement that produced this insn. */
6263 insn_line (const rtx_insn
*insn
)
6265 return LOCATION_LINE (INSN_LOCATION (insn
));
6268 /* Return source file of the statement that produced this insn. */
6270 insn_file (const rtx_insn
*insn
)
6272 return LOCATION_FILE (INSN_LOCATION (insn
));
6275 /* Return expanded location of the statement that produced this insn. */
6277 insn_location (const rtx_insn
*insn
)
6279 return expand_location (INSN_LOCATION (insn
));
6282 /* Return true if memory model MODEL requires a pre-operation (release-style)
6283 barrier or a post-operation (acquire-style) barrier. While not universal,
6284 this function matches behavior of several targets. */
6287 need_atomic_barrier_p (enum memmodel model
, bool pre
)
6289 switch (model
& MEMMODEL_MASK
)
6291 case MEMMODEL_RELAXED
:
6292 case MEMMODEL_CONSUME
:
6294 case MEMMODEL_RELEASE
:
6296 case MEMMODEL_ACQUIRE
:
6298 case MEMMODEL_ACQ_REL
:
6299 case MEMMODEL_SEQ_CST
:
6306 #include "gt-emit-rtl.h"