1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2014 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
36 #include "coretypes.h"
38 #include "diagnostic-core.h"
42 #include "basic-block.h"
47 #include "stringpool.h"
50 #include "hard-reg-set.h"
52 #include "insn-config.h"
56 #include "langhooks.h"
62 struct target_rtl default_target_rtl
;
64 struct target_rtl
*this_target_rtl
= &default_target_rtl
;
67 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
69 /* Commonly used modes. */
71 enum machine_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
72 enum machine_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
73 enum machine_mode double_mode
; /* Mode whose width is DOUBLE_TYPE_SIZE. */
74 enum machine_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
76 /* Datastructures maintained for currently processed function in RTL form. */
78 struct rtl_data x_rtl
;
80 /* Indexed by pseudo register number, gives the rtx for that pseudo.
81 Allocated in parallel with regno_pointer_align.
82 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
83 with length attribute nested in top level structures. */
87 /* This is *not* reset after each function. It gives each CODE_LABEL
88 in the entire compilation a unique label number. */
90 static GTY(()) int label_num
= 1;
92 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
93 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
94 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
95 is set only for MODE_INT and MODE_VECTOR_INT modes. */
97 rtx const_tiny_rtx
[4][(int) MAX_MACHINE_MODE
];
101 REAL_VALUE_TYPE dconst0
;
102 REAL_VALUE_TYPE dconst1
;
103 REAL_VALUE_TYPE dconst2
;
104 REAL_VALUE_TYPE dconstm1
;
105 REAL_VALUE_TYPE dconsthalf
;
107 /* Record fixed-point constant 0 and 1. */
108 FIXED_VALUE_TYPE fconst0
[MAX_FCONST0
];
109 FIXED_VALUE_TYPE fconst1
[MAX_FCONST1
];
111 /* We make one copy of (const_int C) where C is in
112 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
113 to save space during the compilation and simplify comparisons of
116 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
118 /* Standard pieces of rtx, to be substituted directly into things. */
121 rtx simple_return_rtx
;
124 /* A hash table storing CONST_INTs whose absolute value is greater
125 than MAX_SAVED_CONST_INT. */
127 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
128 htab_t const_int_htab
;
130 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
131 htab_t const_wide_int_htab
;
133 /* A hash table storing register attribute structures. */
134 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs
)))
135 htab_t reg_attrs_htab
;
137 /* A hash table storing all CONST_DOUBLEs. */
138 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
139 htab_t const_double_htab
;
141 /* A hash table storing all CONST_FIXEDs. */
142 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
143 htab_t const_fixed_htab
;
145 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
146 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
147 #define first_label_num (crtl->emit.x_first_label_num)
149 static void set_used_decls (tree
);
150 static void mark_label_nuses (rtx
);
151 static hashval_t
const_int_htab_hash (const void *);
152 static int const_int_htab_eq (const void *, const void *);
153 #if TARGET_SUPPORTS_WIDE_INT
154 static hashval_t
const_wide_int_htab_hash (const void *);
155 static int const_wide_int_htab_eq (const void *, const void *);
156 static rtx
lookup_const_wide_int (rtx
);
158 static hashval_t
const_double_htab_hash (const void *);
159 static int const_double_htab_eq (const void *, const void *);
160 static rtx
lookup_const_double (rtx
);
161 static hashval_t
const_fixed_htab_hash (const void *);
162 static int const_fixed_htab_eq (const void *, const void *);
163 static rtx
lookup_const_fixed (rtx
);
164 static hashval_t
reg_attrs_htab_hash (const void *);
165 static int reg_attrs_htab_eq (const void *, const void *);
166 static reg_attrs
*get_reg_attrs (tree
, int);
167 static rtx
gen_const_vector (enum machine_mode
, int);
168 static void copy_rtx_if_shared_1 (rtx
*orig
);
170 /* Probability of the conditional branch currently proceeded by try_split.
171 Set to -1 otherwise. */
172 int split_branch_probability
= -1;
174 /* Returns a hash code for X (which is a really a CONST_INT). */
177 const_int_htab_hash (const void *x
)
179 return (hashval_t
) INTVAL ((const_rtx
) x
);
182 /* Returns nonzero if the value represented by X (which is really a
183 CONST_INT) is the same as that given by Y (which is really a
187 const_int_htab_eq (const void *x
, const void *y
)
189 return (INTVAL ((const_rtx
) x
) == *((const HOST_WIDE_INT
*) y
));
192 #if TARGET_SUPPORTS_WIDE_INT
193 /* Returns a hash code for X (which is a really a CONST_WIDE_INT). */
196 const_wide_int_htab_hash (const void *x
)
199 HOST_WIDE_INT hash
= 0;
200 const_rtx xr
= (const_rtx
) x
;
202 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
203 hash
+= CONST_WIDE_INT_ELT (xr
, i
);
205 return (hashval_t
) hash
;
208 /* Returns nonzero if the value represented by X (which is really a
209 CONST_WIDE_INT) is the same as that given by Y (which is really a
213 const_wide_int_htab_eq (const void *x
, const void *y
)
216 const_rtx xr
= (const_rtx
) x
;
217 const_rtx yr
= (const_rtx
) y
;
218 if (CONST_WIDE_INT_NUNITS (xr
) != CONST_WIDE_INT_NUNITS (yr
))
221 for (i
= 0; i
< CONST_WIDE_INT_NUNITS (xr
); i
++)
222 if (CONST_WIDE_INT_ELT (xr
, i
) != CONST_WIDE_INT_ELT (yr
, i
))
229 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
231 const_double_htab_hash (const void *x
)
233 const_rtx
const value
= (const_rtx
) x
;
236 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (value
) == VOIDmode
)
237 h
= CONST_DOUBLE_LOW (value
) ^ CONST_DOUBLE_HIGH (value
);
240 h
= real_hash (CONST_DOUBLE_REAL_VALUE (value
));
241 /* MODE is used in the comparison, so it should be in the hash. */
242 h
^= GET_MODE (value
);
247 /* Returns nonzero if the value represented by X (really a ...)
248 is the same as that represented by Y (really a ...) */
250 const_double_htab_eq (const void *x
, const void *y
)
252 const_rtx
const a
= (const_rtx
)x
, b
= (const_rtx
)y
;
254 if (GET_MODE (a
) != GET_MODE (b
))
256 if (TARGET_SUPPORTS_WIDE_INT
== 0 && GET_MODE (a
) == VOIDmode
)
257 return (CONST_DOUBLE_LOW (a
) == CONST_DOUBLE_LOW (b
)
258 && CONST_DOUBLE_HIGH (a
) == CONST_DOUBLE_HIGH (b
));
260 return real_identical (CONST_DOUBLE_REAL_VALUE (a
),
261 CONST_DOUBLE_REAL_VALUE (b
));
264 /* Returns a hash code for X (which is really a CONST_FIXED). */
267 const_fixed_htab_hash (const void *x
)
269 const_rtx
const value
= (const_rtx
) x
;
272 h
= fixed_hash (CONST_FIXED_VALUE (value
));
273 /* MODE is used in the comparison, so it should be in the hash. */
274 h
^= GET_MODE (value
);
278 /* Returns nonzero if the value represented by X (really a ...)
279 is the same as that represented by Y (really a ...). */
282 const_fixed_htab_eq (const void *x
, const void *y
)
284 const_rtx
const a
= (const_rtx
) x
, b
= (const_rtx
) y
;
286 if (GET_MODE (a
) != GET_MODE (b
))
288 return fixed_identical (CONST_FIXED_VALUE (a
), CONST_FIXED_VALUE (b
));
291 /* Return true if the given memory attributes are equal. */
294 mem_attrs_eq_p (const struct mem_attrs
*p
, const struct mem_attrs
*q
)
300 return (p
->alias
== q
->alias
301 && p
->offset_known_p
== q
->offset_known_p
302 && (!p
->offset_known_p
|| p
->offset
== q
->offset
)
303 && p
->size_known_p
== q
->size_known_p
304 && (!p
->size_known_p
|| p
->size
== q
->size
)
305 && p
->align
== q
->align
306 && p
->addrspace
== q
->addrspace
307 && (p
->expr
== q
->expr
308 || (p
->expr
!= NULL_TREE
&& q
->expr
!= NULL_TREE
309 && operand_equal_p (p
->expr
, q
->expr
, 0))));
312 /* Set MEM's memory attributes so that they are the same as ATTRS. */
315 set_mem_attrs (rtx mem
, mem_attrs
*attrs
)
317 /* If everything is the default, we can just clear the attributes. */
318 if (mem_attrs_eq_p (attrs
, mode_mem_attrs
[(int) GET_MODE (mem
)]))
325 || !mem_attrs_eq_p (attrs
, MEM_ATTRS (mem
)))
327 MEM_ATTRS (mem
) = ggc_alloc
<mem_attrs
> ();
328 memcpy (MEM_ATTRS (mem
), attrs
, sizeof (mem_attrs
));
332 /* Returns a hash code for X (which is a really a reg_attrs *). */
335 reg_attrs_htab_hash (const void *x
)
337 const reg_attrs
*const p
= (const reg_attrs
*) x
;
339 return ((p
->offset
* 1000) ^ (intptr_t) p
->decl
);
342 /* Returns nonzero if the value represented by X (which is really a
343 reg_attrs *) is the same as that given by Y (which is also really a
347 reg_attrs_htab_eq (const void *x
, const void *y
)
349 const reg_attrs
*const p
= (const reg_attrs
*) x
;
350 const reg_attrs
*const q
= (const reg_attrs
*) y
;
352 return (p
->decl
== q
->decl
&& p
->offset
== q
->offset
);
354 /* Allocate a new reg_attrs structure and insert it into the hash table if
355 one identical to it is not already in the table. We are doing this for
359 get_reg_attrs (tree decl
, int offset
)
364 /* If everything is the default, we can just return zero. */
365 if (decl
== 0 && offset
== 0)
369 attrs
.offset
= offset
;
371 slot
= htab_find_slot (reg_attrs_htab
, &attrs
, INSERT
);
374 *slot
= ggc_alloc
<reg_attrs
> ();
375 memcpy (*slot
, &attrs
, sizeof (reg_attrs
));
378 return (reg_attrs
*) *slot
;
383 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
384 and to block register equivalences to be seen across this insn. */
389 rtx x
= gen_rtx_ASM_INPUT (VOIDmode
, "");
390 MEM_VOLATILE_P (x
) = true;
396 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
397 don't attempt to share with the various global pieces of rtl (such as
398 frame_pointer_rtx). */
401 gen_raw_REG (enum machine_mode mode
, int regno
)
403 rtx x
= gen_rtx_raw_REG (mode
, regno
);
404 ORIGINAL_REGNO (x
) = regno
;
408 /* There are some RTL codes that require special attention; the generation
409 functions do the raw handling. If you add to this list, modify
410 special_rtx in gengenrtl.c as well. */
413 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED
, HOST_WIDE_INT arg
)
417 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
418 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
420 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
421 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
422 return const_true_rtx
;
425 /* Look up the CONST_INT in the hash table. */
426 slot
= htab_find_slot_with_hash (const_int_htab
, &arg
,
427 (hashval_t
) arg
, INSERT
);
429 *slot
= gen_rtx_raw_CONST_INT (VOIDmode
, arg
);
435 gen_int_mode (HOST_WIDE_INT c
, enum machine_mode mode
)
437 return GEN_INT (trunc_int_for_mode (c
, mode
));
440 /* CONST_DOUBLEs might be created from pairs of integers, or from
441 REAL_VALUE_TYPEs. Also, their length is known only at run time,
442 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
444 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
445 hash table. If so, return its counterpart; otherwise add it
446 to the hash table and return it. */
448 lookup_const_double (rtx real
)
450 void **slot
= htab_find_slot (const_double_htab
, real
, INSERT
);
457 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
458 VALUE in mode MODE. */
460 const_double_from_real_value (REAL_VALUE_TYPE value
, enum machine_mode mode
)
462 rtx real
= rtx_alloc (CONST_DOUBLE
);
463 PUT_MODE (real
, mode
);
467 return lookup_const_double (real
);
470 /* Determine whether FIXED, a CONST_FIXED, already exists in the
471 hash table. If so, return its counterpart; otherwise add it
472 to the hash table and return it. */
475 lookup_const_fixed (rtx fixed
)
477 void **slot
= htab_find_slot (const_fixed_htab
, fixed
, INSERT
);
484 /* Return a CONST_FIXED rtx for a fixed-point value specified by
485 VALUE in mode MODE. */
488 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value
, enum machine_mode mode
)
490 rtx fixed
= rtx_alloc (CONST_FIXED
);
491 PUT_MODE (fixed
, mode
);
495 return lookup_const_fixed (fixed
);
498 #if TARGET_SUPPORTS_WIDE_INT == 0
499 /* Constructs double_int from rtx CST. */
502 rtx_to_double_int (const_rtx cst
)
506 if (CONST_INT_P (cst
))
507 r
= double_int::from_shwi (INTVAL (cst
));
508 else if (CONST_DOUBLE_AS_INT_P (cst
))
510 r
.low
= CONST_DOUBLE_LOW (cst
);
511 r
.high
= CONST_DOUBLE_HIGH (cst
);
520 #if TARGET_SUPPORTS_WIDE_INT
521 /* Determine whether CONST_WIDE_INT WINT already exists in the hash table.
522 If so, return its counterpart; otherwise add it to the hash table and
526 lookup_const_wide_int (rtx wint
)
528 void **slot
= htab_find_slot (const_wide_int_htab
, wint
, INSERT
);
536 /* Return an rtx constant for V, given that the constant has mode MODE.
537 The returned rtx will be a CONST_INT if V fits, otherwise it will be
538 a CONST_DOUBLE (if !TARGET_SUPPORTS_WIDE_INT) or a CONST_WIDE_INT
539 (if TARGET_SUPPORTS_WIDE_INT). */
542 immed_wide_int_const (const wide_int_ref
&v
, enum machine_mode mode
)
544 unsigned int len
= v
.get_len ();
545 unsigned int prec
= GET_MODE_PRECISION (mode
);
547 /* Allow truncation but not extension since we do not know if the
548 number is signed or unsigned. */
549 gcc_assert (prec
<= v
.get_precision ());
551 if (len
< 2 || prec
<= HOST_BITS_PER_WIDE_INT
)
552 return gen_int_mode (v
.elt (0), mode
);
554 #if TARGET_SUPPORTS_WIDE_INT
558 unsigned int blocks_needed
559 = (prec
+ HOST_BITS_PER_WIDE_INT
- 1) / HOST_BITS_PER_WIDE_INT
;
561 if (len
> blocks_needed
)
564 value
= const_wide_int_alloc (len
);
566 /* It is so tempting to just put the mode in here. Must control
568 PUT_MODE (value
, VOIDmode
);
569 CWI_PUT_NUM_ELEM (value
, len
);
571 for (i
= 0; i
< len
; i
++)
572 CONST_WIDE_INT_ELT (value
, i
) = v
.elt (i
);
574 return lookup_const_wide_int (value
);
577 return immed_double_const (v
.elt (0), v
.elt (1), mode
);
581 #if TARGET_SUPPORTS_WIDE_INT == 0
582 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
583 of ints: I0 is the low-order word and I1 is the high-order word.
584 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
585 implied upper bits are copies of the high bit of i1. The value
586 itself is neither signed nor unsigned. Do not use this routine for
587 non-integer modes; convert to REAL_VALUE_TYPE and use
588 CONST_DOUBLE_FROM_REAL_VALUE. */
591 immed_double_const (HOST_WIDE_INT i0
, HOST_WIDE_INT i1
, enum machine_mode mode
)
596 /* There are the following cases (note that there are no modes with
597 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
599 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
601 2) If the value of the integer fits into HOST_WIDE_INT anyway
602 (i.e., i1 consists only from copies of the sign bit, and sign
603 of i0 and i1 are the same), then we return a CONST_INT for i0.
604 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
605 if (mode
!= VOIDmode
)
607 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
608 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
609 /* We can get a 0 for an error mark. */
610 || GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
611 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
);
613 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
614 return gen_int_mode (i0
, mode
);
617 /* If this integer fits in one word, return a CONST_INT. */
618 if ((i1
== 0 && i0
>= 0) || (i1
== ~0 && i0
< 0))
621 /* We use VOIDmode for integers. */
622 value
= rtx_alloc (CONST_DOUBLE
);
623 PUT_MODE (value
, VOIDmode
);
625 CONST_DOUBLE_LOW (value
) = i0
;
626 CONST_DOUBLE_HIGH (value
) = i1
;
628 for (i
= 2; i
< (sizeof CONST_DOUBLE_FORMAT
- 1); i
++)
629 XWINT (value
, i
) = 0;
631 return lookup_const_double (value
);
636 gen_rtx_REG (enum machine_mode mode
, unsigned int regno
)
638 /* In case the MD file explicitly references the frame pointer, have
639 all such references point to the same frame pointer. This is
640 used during frame pointer elimination to distinguish the explicit
641 references to these registers from pseudos that happened to be
644 If we have eliminated the frame pointer or arg pointer, we will
645 be using it as a normal register, for example as a spill
646 register. In such cases, we might be accessing it in a mode that
647 is not Pmode and therefore cannot use the pre-allocated rtx.
649 Also don't do this when we are making new REGs in reload, since
650 we don't want to get confused with the real pointers. */
652 if (mode
== Pmode
&& !reload_in_progress
&& !lra_in_progress
)
654 if (regno
== FRAME_POINTER_REGNUM
655 && (!reload_completed
|| frame_pointer_needed
))
656 return frame_pointer_rtx
;
657 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
658 if (regno
== HARD_FRAME_POINTER_REGNUM
659 && (!reload_completed
|| frame_pointer_needed
))
660 return hard_frame_pointer_rtx
;
662 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
663 if (regno
== ARG_POINTER_REGNUM
)
664 return arg_pointer_rtx
;
666 #ifdef RETURN_ADDRESS_POINTER_REGNUM
667 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
668 return return_address_pointer_rtx
;
670 if (regno
== (unsigned) PIC_OFFSET_TABLE_REGNUM
671 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
672 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
673 return pic_offset_table_rtx
;
674 if (regno
== STACK_POINTER_REGNUM
)
675 return stack_pointer_rtx
;
679 /* If the per-function register table has been set up, try to re-use
680 an existing entry in that table to avoid useless generation of RTL.
682 This code is disabled for now until we can fix the various backends
683 which depend on having non-shared hard registers in some cases. Long
684 term we want to re-enable this code as it can significantly cut down
685 on the amount of useless RTL that gets generated.
687 We'll also need to fix some code that runs after reload that wants to
688 set ORIGINAL_REGNO. */
693 && regno
< FIRST_PSEUDO_REGISTER
694 && reg_raw_mode
[regno
] == mode
)
695 return regno_reg_rtx
[regno
];
698 return gen_raw_REG (mode
, regno
);
702 gen_rtx_MEM (enum machine_mode mode
, rtx addr
)
704 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
706 /* This field is not cleared by the mere allocation of the rtx, so
713 /* Generate a memory referring to non-trapping constant memory. */
716 gen_const_mem (enum machine_mode mode
, rtx addr
)
718 rtx mem
= gen_rtx_MEM (mode
, addr
);
719 MEM_READONLY_P (mem
) = 1;
720 MEM_NOTRAP_P (mem
) = 1;
724 /* Generate a MEM referring to fixed portions of the frame, e.g., register
728 gen_frame_mem (enum machine_mode mode
, rtx addr
)
730 rtx mem
= gen_rtx_MEM (mode
, addr
);
731 MEM_NOTRAP_P (mem
) = 1;
732 set_mem_alias_set (mem
, get_frame_alias_set ());
736 /* Generate a MEM referring to a temporary use of the stack, not part
737 of the fixed stack frame. For example, something which is pushed
738 by a target splitter. */
740 gen_tmp_stack_mem (enum machine_mode mode
, rtx addr
)
742 rtx mem
= gen_rtx_MEM (mode
, addr
);
743 MEM_NOTRAP_P (mem
) = 1;
744 if (!cfun
->calls_alloca
)
745 set_mem_alias_set (mem
, get_frame_alias_set ());
749 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
750 this construct would be valid, and false otherwise. */
753 validate_subreg (enum machine_mode omode
, enum machine_mode imode
,
754 const_rtx reg
, unsigned int offset
)
756 unsigned int isize
= GET_MODE_SIZE (imode
);
757 unsigned int osize
= GET_MODE_SIZE (omode
);
759 /* All subregs must be aligned. */
760 if (offset
% osize
!= 0)
763 /* The subreg offset cannot be outside the inner object. */
767 /* ??? This should not be here. Temporarily continue to allow word_mode
768 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
769 Generally, backends are doing something sketchy but it'll take time to
771 if (omode
== word_mode
)
773 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
774 is the culprit here, and not the backends. */
775 else if (osize
>= UNITS_PER_WORD
&& isize
>= osize
)
777 /* Allow component subregs of complex and vector. Though given the below
778 extraction rules, it's not always clear what that means. */
779 else if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
780 && GET_MODE_INNER (imode
) == omode
)
782 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
783 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
784 represent this. It's questionable if this ought to be represented at
785 all -- why can't this all be hidden in post-reload splitters that make
786 arbitrarily mode changes to the registers themselves. */
787 else if (VECTOR_MODE_P (omode
) && GET_MODE_INNER (omode
) == imode
)
789 /* Subregs involving floating point modes are not allowed to
790 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
791 (subreg:SI (reg:DF) 0) isn't. */
792 else if (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))
794 if (! (isize
== osize
795 /* LRA can use subreg to store a floating point value in
796 an integer mode. Although the floating point and the
797 integer modes need the same number of hard registers,
798 the size of floating point mode can be less than the
799 integer mode. LRA also uses subregs for a register
800 should be used in different mode in on insn. */
805 /* Paradoxical subregs must have offset zero. */
809 /* This is a normal subreg. Verify that the offset is representable. */
811 /* For hard registers, we already have most of these rules collected in
812 subreg_offset_representable_p. */
813 if (reg
&& REG_P (reg
) && HARD_REGISTER_P (reg
))
815 unsigned int regno
= REGNO (reg
);
817 #ifdef CANNOT_CHANGE_MODE_CLASS
818 if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
819 && GET_MODE_INNER (imode
) == omode
)
821 else if (REG_CANNOT_CHANGE_MODE_P (regno
, imode
, omode
))
825 return subreg_offset_representable_p (regno
, imode
, offset
, omode
);
828 /* For pseudo registers, we want most of the same checks. Namely:
829 If the register no larger than a word, the subreg must be lowpart.
830 If the register is larger than a word, the subreg must be the lowpart
831 of a subword. A subreg does *not* perform arbitrary bit extraction.
832 Given that we've already checked mode/offset alignment, we only have
833 to check subword subregs here. */
834 if (osize
< UNITS_PER_WORD
835 && ! (lra_in_progress
&& (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))))
837 enum machine_mode wmode
= isize
> UNITS_PER_WORD
? word_mode
: imode
;
838 unsigned int low_off
= subreg_lowpart_offset (omode
, wmode
);
839 if (offset
% UNITS_PER_WORD
!= low_off
)
846 gen_rtx_SUBREG (enum machine_mode mode
, rtx reg
, int offset
)
848 gcc_assert (validate_subreg (mode
, GET_MODE (reg
), reg
, offset
));
849 return gen_rtx_raw_SUBREG (mode
, reg
, offset
);
852 /* Generate a SUBREG representing the least-significant part of REG if MODE
853 is smaller than mode of REG, otherwise paradoxical SUBREG. */
856 gen_lowpart_SUBREG (enum machine_mode mode
, rtx reg
)
858 enum machine_mode inmode
;
860 inmode
= GET_MODE (reg
);
861 if (inmode
== VOIDmode
)
863 return gen_rtx_SUBREG (mode
, reg
,
864 subreg_lowpart_offset (mode
, inmode
));
868 gen_rtx_VAR_LOCATION (enum machine_mode mode
, tree decl
, rtx loc
,
869 enum var_init_status status
)
871 rtx x
= gen_rtx_fmt_te (VAR_LOCATION
, mode
, decl
, loc
);
872 PAT_VAR_LOCATION_STATUS (x
) = status
;
877 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
880 gen_rtvec (int n
, ...)
888 /* Don't allocate an empty rtvec... */
895 rt_val
= rtvec_alloc (n
);
897 for (i
= 0; i
< n
; i
++)
898 rt_val
->elem
[i
] = va_arg (p
, rtx
);
905 gen_rtvec_v (int n
, rtx
*argp
)
910 /* Don't allocate an empty rtvec... */
914 rt_val
= rtvec_alloc (n
);
916 for (i
= 0; i
< n
; i
++)
917 rt_val
->elem
[i
] = *argp
++;
922 /* Return the number of bytes between the start of an OUTER_MODE
923 in-memory value and the start of an INNER_MODE in-memory value,
924 given that the former is a lowpart of the latter. It may be a
925 paradoxical lowpart, in which case the offset will be negative
926 on big-endian targets. */
929 byte_lowpart_offset (enum machine_mode outer_mode
,
930 enum machine_mode inner_mode
)
932 if (GET_MODE_SIZE (outer_mode
) < GET_MODE_SIZE (inner_mode
))
933 return subreg_lowpart_offset (outer_mode
, inner_mode
);
935 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
938 /* Generate a REG rtx for a new pseudo register of mode MODE.
939 This pseudo is assigned the next sequential register number. */
942 gen_reg_rtx (enum machine_mode mode
)
945 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
947 gcc_assert (can_create_pseudo_p ());
949 /* If a virtual register with bigger mode alignment is generated,
950 increase stack alignment estimation because it might be spilled
952 if (SUPPORTS_STACK_ALIGNMENT
953 && crtl
->stack_alignment_estimated
< align
954 && !crtl
->stack_realign_processed
)
956 unsigned int min_align
= MINIMUM_ALIGNMENT (NULL
, mode
, align
);
957 if (crtl
->stack_alignment_estimated
< min_align
)
958 crtl
->stack_alignment_estimated
= min_align
;
961 if (generating_concat_p
962 && (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
963 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
))
965 /* For complex modes, don't make a single pseudo.
966 Instead, make a CONCAT of two pseudos.
967 This allows noncontiguous allocation of the real and imaginary parts,
968 which makes much better code. Besides, allocating DCmode
969 pseudos overstrains reload on some machines like the 386. */
970 rtx realpart
, imagpart
;
971 enum machine_mode partmode
= GET_MODE_INNER (mode
);
973 realpart
= gen_reg_rtx (partmode
);
974 imagpart
= gen_reg_rtx (partmode
);
975 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
978 /* Do not call gen_reg_rtx with uninitialized crtl. */
979 gcc_assert (crtl
->emit
.regno_pointer_align_length
);
981 /* Make sure regno_pointer_align, and regno_reg_rtx are large
982 enough to have an element for this pseudo reg number. */
984 if (reg_rtx_no
== crtl
->emit
.regno_pointer_align_length
)
986 int old_size
= crtl
->emit
.regno_pointer_align_length
;
990 tmp
= XRESIZEVEC (char, crtl
->emit
.regno_pointer_align
, old_size
* 2);
991 memset (tmp
+ old_size
, 0, old_size
);
992 crtl
->emit
.regno_pointer_align
= (unsigned char *) tmp
;
994 new1
= GGC_RESIZEVEC (rtx
, regno_reg_rtx
, old_size
* 2);
995 memset (new1
+ old_size
, 0, old_size
* sizeof (rtx
));
996 regno_reg_rtx
= new1
;
998 crtl
->emit
.regno_pointer_align_length
= old_size
* 2;
1001 val
= gen_raw_REG (mode
, reg_rtx_no
);
1002 regno_reg_rtx
[reg_rtx_no
++] = val
;
1006 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
1009 reg_is_parm_p (rtx reg
)
1013 gcc_assert (REG_P (reg
));
1014 decl
= REG_EXPR (reg
);
1015 return (decl
&& TREE_CODE (decl
) == PARM_DECL
);
1018 /* Update NEW with the same attributes as REG, but with OFFSET added
1019 to the REG_OFFSET. */
1022 update_reg_offset (rtx new_rtx
, rtx reg
, int offset
)
1024 REG_ATTRS (new_rtx
) = get_reg_attrs (REG_EXPR (reg
),
1025 REG_OFFSET (reg
) + offset
);
1028 /* Generate a register with same attributes as REG, but with OFFSET
1029 added to the REG_OFFSET. */
1032 gen_rtx_REG_offset (rtx reg
, enum machine_mode mode
, unsigned int regno
,
1035 rtx new_rtx
= gen_rtx_REG (mode
, regno
);
1037 update_reg_offset (new_rtx
, reg
, offset
);
1041 /* Generate a new pseudo-register with the same attributes as REG, but
1042 with OFFSET added to the REG_OFFSET. */
1045 gen_reg_rtx_offset (rtx reg
, enum machine_mode mode
, int offset
)
1047 rtx new_rtx
= gen_reg_rtx (mode
);
1049 update_reg_offset (new_rtx
, reg
, offset
);
1053 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
1054 new register is a (possibly paradoxical) lowpart of the old one. */
1057 adjust_reg_mode (rtx reg
, enum machine_mode mode
)
1059 update_reg_offset (reg
, reg
, byte_lowpart_offset (mode
, GET_MODE (reg
)));
1060 PUT_MODE (reg
, mode
);
1063 /* Copy REG's attributes from X, if X has any attributes. If REG and X
1064 have different modes, REG is a (possibly paradoxical) lowpart of X. */
1067 set_reg_attrs_from_value (rtx reg
, rtx x
)
1070 bool can_be_reg_pointer
= true;
1072 /* Don't call mark_reg_pointer for incompatible pointer sign
1074 while (GET_CODE (x
) == SIGN_EXTEND
1075 || GET_CODE (x
) == ZERO_EXTEND
1076 || GET_CODE (x
) == TRUNCATE
1077 || (GET_CODE (x
) == SUBREG
&& subreg_lowpart_p (x
)))
1079 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
1080 if ((GET_CODE (x
) == SIGN_EXTEND
&& POINTERS_EXTEND_UNSIGNED
)
1081 || (GET_CODE (x
) != SIGN_EXTEND
&& ! POINTERS_EXTEND_UNSIGNED
))
1082 can_be_reg_pointer
= false;
1087 /* Hard registers can be reused for multiple purposes within the same
1088 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1089 on them is wrong. */
1090 if (HARD_REGISTER_P (reg
))
1093 offset
= byte_lowpart_offset (GET_MODE (reg
), GET_MODE (x
));
1096 if (MEM_OFFSET_KNOWN_P (x
))
1097 REG_ATTRS (reg
) = get_reg_attrs (MEM_EXPR (x
),
1098 MEM_OFFSET (x
) + offset
);
1099 if (can_be_reg_pointer
&& MEM_POINTER (x
))
1100 mark_reg_pointer (reg
, 0);
1105 update_reg_offset (reg
, x
, offset
);
1106 if (can_be_reg_pointer
&& REG_POINTER (x
))
1107 mark_reg_pointer (reg
, REGNO_POINTER_ALIGN (REGNO (x
)));
1111 /* Generate a REG rtx for a new pseudo register, copying the mode
1112 and attributes from X. */
1115 gen_reg_rtx_and_attrs (rtx x
)
1117 rtx reg
= gen_reg_rtx (GET_MODE (x
));
1118 set_reg_attrs_from_value (reg
, x
);
1122 /* Set the register attributes for registers contained in PARM_RTX.
1123 Use needed values from memory attributes of MEM. */
1126 set_reg_attrs_for_parm (rtx parm_rtx
, rtx mem
)
1128 if (REG_P (parm_rtx
))
1129 set_reg_attrs_from_value (parm_rtx
, mem
);
1130 else if (GET_CODE (parm_rtx
) == PARALLEL
)
1132 /* Check for a NULL entry in the first slot, used to indicate that the
1133 parameter goes both on the stack and in registers. */
1134 int i
= XEXP (XVECEXP (parm_rtx
, 0, 0), 0) ? 0 : 1;
1135 for (; i
< XVECLEN (parm_rtx
, 0); i
++)
1137 rtx x
= XVECEXP (parm_rtx
, 0, i
);
1138 if (REG_P (XEXP (x
, 0)))
1139 REG_ATTRS (XEXP (x
, 0))
1140 = get_reg_attrs (MEM_EXPR (mem
),
1141 INTVAL (XEXP (x
, 1)));
1146 /* Set the REG_ATTRS for registers in value X, given that X represents
1150 set_reg_attrs_for_decl_rtl (tree t
, rtx x
)
1152 if (GET_CODE (x
) == SUBREG
)
1154 gcc_assert (subreg_lowpart_p (x
));
1159 = get_reg_attrs (t
, byte_lowpart_offset (GET_MODE (x
),
1161 if (GET_CODE (x
) == CONCAT
)
1163 if (REG_P (XEXP (x
, 0)))
1164 REG_ATTRS (XEXP (x
, 0)) = get_reg_attrs (t
, 0);
1165 if (REG_P (XEXP (x
, 1)))
1166 REG_ATTRS (XEXP (x
, 1))
1167 = get_reg_attrs (t
, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x
, 0))));
1169 if (GET_CODE (x
) == PARALLEL
)
1173 /* Check for a NULL entry, used to indicate that the parameter goes
1174 both on the stack and in registers. */
1175 if (XEXP (XVECEXP (x
, 0, 0), 0))
1180 for (i
= start
; i
< XVECLEN (x
, 0); i
++)
1182 rtx y
= XVECEXP (x
, 0, i
);
1183 if (REG_P (XEXP (y
, 0)))
1184 REG_ATTRS (XEXP (y
, 0)) = get_reg_attrs (t
, INTVAL (XEXP (y
, 1)));
1189 /* Assign the RTX X to declaration T. */
1192 set_decl_rtl (tree t
, rtx x
)
1194 DECL_WRTL_CHECK (t
)->decl_with_rtl
.rtl
= x
;
1196 set_reg_attrs_for_decl_rtl (t
, x
);
1199 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1200 if the ABI requires the parameter to be passed by reference. */
1203 set_decl_incoming_rtl (tree t
, rtx x
, bool by_reference_p
)
1205 DECL_INCOMING_RTL (t
) = x
;
1206 if (x
&& !by_reference_p
)
1207 set_reg_attrs_for_decl_rtl (t
, x
);
1210 /* Identify REG (which may be a CONCAT) as a user register. */
1213 mark_user_reg (rtx reg
)
1215 if (GET_CODE (reg
) == CONCAT
)
1217 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
1218 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
1222 gcc_assert (REG_P (reg
));
1223 REG_USERVAR_P (reg
) = 1;
1227 /* Identify REG as a probable pointer register and show its alignment
1228 as ALIGN, if nonzero. */
1231 mark_reg_pointer (rtx reg
, int align
)
1233 if (! REG_POINTER (reg
))
1235 REG_POINTER (reg
) = 1;
1238 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1240 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
1241 /* We can no-longer be sure just how aligned this pointer is. */
1242 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1245 /* Return 1 plus largest pseudo reg number used in the current function. */
1253 /* Return 1 + the largest label number used so far in the current function. */
1256 max_label_num (void)
1261 /* Return first label number used in this function (if any were used). */
1264 get_first_label_num (void)
1266 return first_label_num
;
1269 /* If the rtx for label was created during the expansion of a nested
1270 function, then first_label_num won't include this label number.
1271 Fix this now so that array indices work later. */
1274 maybe_set_first_label_num (rtx x
)
1276 if (CODE_LABEL_NUMBER (x
) < first_label_num
)
1277 first_label_num
= CODE_LABEL_NUMBER (x
);
1280 /* Return a value representing some low-order bits of X, where the number
1281 of low-order bits is given by MODE. Note that no conversion is done
1282 between floating-point and fixed-point values, rather, the bit
1283 representation is returned.
1285 This function handles the cases in common between gen_lowpart, below,
1286 and two variants in cse.c and combine.c. These are the cases that can
1287 be safely handled at all points in the compilation.
1289 If this is not a case we can handle, return 0. */
1292 gen_lowpart_common (enum machine_mode mode
, rtx x
)
1294 int msize
= GET_MODE_SIZE (mode
);
1297 enum machine_mode innermode
;
1299 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1300 so we have to make one up. Yuk. */
1301 innermode
= GET_MODE (x
);
1303 && msize
* BITS_PER_UNIT
<= HOST_BITS_PER_WIDE_INT
)
1304 innermode
= mode_for_size (HOST_BITS_PER_WIDE_INT
, MODE_INT
, 0);
1305 else if (innermode
== VOIDmode
)
1306 innermode
= mode_for_size (HOST_BITS_PER_DOUBLE_INT
, MODE_INT
, 0);
1308 xsize
= GET_MODE_SIZE (innermode
);
1310 gcc_assert (innermode
!= VOIDmode
&& innermode
!= BLKmode
);
1312 if (innermode
== mode
)
1315 /* MODE must occupy no more words than the mode of X. */
1316 if ((msize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
1317 > ((xsize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))
1320 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1321 if (SCALAR_FLOAT_MODE_P (mode
) && msize
> xsize
)
1324 offset
= subreg_lowpart_offset (mode
, innermode
);
1326 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
1327 && (GET_MODE_CLASS (mode
) == MODE_INT
1328 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
))
1330 /* If we are getting the low-order part of something that has been
1331 sign- or zero-extended, we can either just use the object being
1332 extended or make a narrower extension. If we want an even smaller
1333 piece than the size of the object being extended, call ourselves
1336 This case is used mostly by combine and cse. */
1338 if (GET_MODE (XEXP (x
, 0)) == mode
)
1340 else if (msize
< GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))))
1341 return gen_lowpart_common (mode
, XEXP (x
, 0));
1342 else if (msize
< xsize
)
1343 return gen_rtx_fmt_e (GET_CODE (x
), mode
, XEXP (x
, 0));
1345 else if (GET_CODE (x
) == SUBREG
|| REG_P (x
)
1346 || GET_CODE (x
) == CONCAT
|| GET_CODE (x
) == CONST_VECTOR
1347 || CONST_DOUBLE_AS_FLOAT_P (x
) || CONST_SCALAR_INT_P (x
))
1348 return simplify_gen_subreg (mode
, x
, innermode
, offset
);
1350 /* Otherwise, we can't do this. */
1355 gen_highpart (enum machine_mode mode
, rtx x
)
1357 unsigned int msize
= GET_MODE_SIZE (mode
);
1360 /* This case loses if X is a subreg. To catch bugs early,
1361 complain if an invalid MODE is used even in other cases. */
1362 gcc_assert (msize
<= UNITS_PER_WORD
1363 || msize
== (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x
)));
1365 result
= simplify_gen_subreg (mode
, x
, GET_MODE (x
),
1366 subreg_highpart_offset (mode
, GET_MODE (x
)));
1367 gcc_assert (result
);
1369 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1370 the target if we have a MEM. gen_highpart must return a valid operand,
1371 emitting code if necessary to do so. */
1374 result
= validize_mem (result
);
1375 gcc_assert (result
);
1381 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1382 be VOIDmode constant. */
1384 gen_highpart_mode (enum machine_mode outermode
, enum machine_mode innermode
, rtx exp
)
1386 if (GET_MODE (exp
) != VOIDmode
)
1388 gcc_assert (GET_MODE (exp
) == innermode
);
1389 return gen_highpart (outermode
, exp
);
1391 return simplify_gen_subreg (outermode
, exp
, innermode
,
1392 subreg_highpart_offset (outermode
, innermode
));
1395 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1398 subreg_lowpart_offset (enum machine_mode outermode
, enum machine_mode innermode
)
1400 unsigned int offset
= 0;
1401 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1405 if (WORDS_BIG_ENDIAN
)
1406 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1407 if (BYTES_BIG_ENDIAN
)
1408 offset
+= difference
% UNITS_PER_WORD
;
1414 /* Return offset in bytes to get OUTERMODE high part
1415 of the value in mode INNERMODE stored in memory in target format. */
1417 subreg_highpart_offset (enum machine_mode outermode
, enum machine_mode innermode
)
1419 unsigned int offset
= 0;
1420 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1422 gcc_assert (GET_MODE_SIZE (innermode
) >= GET_MODE_SIZE (outermode
));
1426 if (! WORDS_BIG_ENDIAN
)
1427 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1428 if (! BYTES_BIG_ENDIAN
)
1429 offset
+= difference
% UNITS_PER_WORD
;
1435 /* Return 1 iff X, assumed to be a SUBREG,
1436 refers to the least significant part of its containing reg.
1437 If X is not a SUBREG, always return 1 (it is its own low part!). */
1440 subreg_lowpart_p (const_rtx x
)
1442 if (GET_CODE (x
) != SUBREG
)
1444 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1447 return (subreg_lowpart_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)))
1448 == SUBREG_BYTE (x
));
1451 /* Return true if X is a paradoxical subreg, false otherwise. */
1453 paradoxical_subreg_p (const_rtx x
)
1455 if (GET_CODE (x
) != SUBREG
)
1457 return (GET_MODE_PRECISION (GET_MODE (x
))
1458 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x
))));
1461 /* Return subword OFFSET of operand OP.
1462 The word number, OFFSET, is interpreted as the word number starting
1463 at the low-order address. OFFSET 0 is the low-order word if not
1464 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1466 If we cannot extract the required word, we return zero. Otherwise,
1467 an rtx corresponding to the requested word will be returned.
1469 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1470 reload has completed, a valid address will always be returned. After
1471 reload, if a valid address cannot be returned, we return zero.
1473 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1474 it is the responsibility of the caller.
1476 MODE is the mode of OP in case it is a CONST_INT.
1478 ??? This is still rather broken for some cases. The problem for the
1479 moment is that all callers of this thing provide no 'goal mode' to
1480 tell us to work with. This exists because all callers were written
1481 in a word based SUBREG world.
1482 Now use of this function can be deprecated by simplify_subreg in most
1487 operand_subword (rtx op
, unsigned int offset
, int validate_address
, enum machine_mode mode
)
1489 if (mode
== VOIDmode
)
1490 mode
= GET_MODE (op
);
1492 gcc_assert (mode
!= VOIDmode
);
1494 /* If OP is narrower than a word, fail. */
1496 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
))
1499 /* If we want a word outside OP, return zero. */
1501 && (offset
+ 1) * UNITS_PER_WORD
> GET_MODE_SIZE (mode
))
1504 /* Form a new MEM at the requested address. */
1507 rtx new_rtx
= adjust_address_nv (op
, word_mode
, offset
* UNITS_PER_WORD
);
1509 if (! validate_address
)
1512 else if (reload_completed
)
1514 if (! strict_memory_address_addr_space_p (word_mode
,
1516 MEM_ADDR_SPACE (op
)))
1520 return replace_equiv_address (new_rtx
, XEXP (new_rtx
, 0));
1523 /* Rest can be handled by simplify_subreg. */
1524 return simplify_gen_subreg (word_mode
, op
, mode
, (offset
* UNITS_PER_WORD
));
1527 /* Similar to `operand_subword', but never return 0. If we can't
1528 extract the required subword, put OP into a register and try again.
1529 The second attempt must succeed. We always validate the address in
1532 MODE is the mode of OP, in case it is CONST_INT. */
1535 operand_subword_force (rtx op
, unsigned int offset
, enum machine_mode mode
)
1537 rtx result
= operand_subword (op
, offset
, 1, mode
);
1542 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1544 /* If this is a register which can not be accessed by words, copy it
1545 to a pseudo register. */
1547 op
= copy_to_reg (op
);
1549 op
= force_reg (mode
, op
);
1552 result
= operand_subword (op
, offset
, 1, mode
);
1553 gcc_assert (result
);
1558 /* Returns 1 if both MEM_EXPR can be considered equal
1562 mem_expr_equal_p (const_tree expr1
, const_tree expr2
)
1567 if (! expr1
|| ! expr2
)
1570 if (TREE_CODE (expr1
) != TREE_CODE (expr2
))
1573 return operand_equal_p (expr1
, expr2
, 0);
1576 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1577 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1581 get_mem_align_offset (rtx mem
, unsigned int align
)
1584 unsigned HOST_WIDE_INT offset
;
1586 /* This function can't use
1587 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1588 || (MAX (MEM_ALIGN (mem),
1589 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1593 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1595 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1596 for <variable>. get_inner_reference doesn't handle it and
1597 even if it did, the alignment in that case needs to be determined
1598 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1599 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1600 isn't sufficiently aligned, the object it is in might be. */
1601 gcc_assert (MEM_P (mem
));
1602 expr
= MEM_EXPR (mem
);
1603 if (expr
== NULL_TREE
|| !MEM_OFFSET_KNOWN_P (mem
))
1606 offset
= MEM_OFFSET (mem
);
1609 if (DECL_ALIGN (expr
) < align
)
1612 else if (INDIRECT_REF_P (expr
))
1614 if (TYPE_ALIGN (TREE_TYPE (expr
)) < (unsigned int) align
)
1617 else if (TREE_CODE (expr
) == COMPONENT_REF
)
1621 tree inner
= TREE_OPERAND (expr
, 0);
1622 tree field
= TREE_OPERAND (expr
, 1);
1623 tree byte_offset
= component_ref_field_offset (expr
);
1624 tree bit_offset
= DECL_FIELD_BIT_OFFSET (field
);
1627 || !tree_fits_uhwi_p (byte_offset
)
1628 || !tree_fits_uhwi_p (bit_offset
))
1631 offset
+= tree_to_uhwi (byte_offset
);
1632 offset
+= tree_to_uhwi (bit_offset
) / BITS_PER_UNIT
;
1634 if (inner
== NULL_TREE
)
1636 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field
))
1637 < (unsigned int) align
)
1641 else if (DECL_P (inner
))
1643 if (DECL_ALIGN (inner
) < align
)
1647 else if (TREE_CODE (inner
) != COMPONENT_REF
)
1655 return offset
& ((align
/ BITS_PER_UNIT
) - 1);
1658 /* Given REF (a MEM) and T, either the type of X or the expression
1659 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1660 if we are making a new object of this type. BITPOS is nonzero if
1661 there is an offset outstanding on T that will be applied later. */
1664 set_mem_attributes_minus_bitpos (rtx ref
, tree t
, int objectp
,
1665 HOST_WIDE_INT bitpos
)
1667 HOST_WIDE_INT apply_bitpos
= 0;
1669 struct mem_attrs attrs
, *defattrs
, *refattrs
;
1672 /* It can happen that type_for_mode was given a mode for which there
1673 is no language-level type. In which case it returns NULL, which
1678 type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
1679 if (type
== error_mark_node
)
1682 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1683 wrong answer, as it assumes that DECL_RTL already has the right alias
1684 info. Callers should not set DECL_RTL until after the call to
1685 set_mem_attributes. */
1686 gcc_assert (!DECL_P (t
) || ref
!= DECL_RTL_IF_SET (t
));
1688 memset (&attrs
, 0, sizeof (attrs
));
1690 /* Get the alias set from the expression or type (perhaps using a
1691 front-end routine) and use it. */
1692 attrs
.alias
= get_alias_set (t
);
1694 MEM_VOLATILE_P (ref
) |= TYPE_VOLATILE (type
);
1695 MEM_POINTER (ref
) = POINTER_TYPE_P (type
);
1697 /* Default values from pre-existing memory attributes if present. */
1698 refattrs
= MEM_ATTRS (ref
);
1701 /* ??? Can this ever happen? Calling this routine on a MEM that
1702 already carries memory attributes should probably be invalid. */
1703 attrs
.expr
= refattrs
->expr
;
1704 attrs
.offset_known_p
= refattrs
->offset_known_p
;
1705 attrs
.offset
= refattrs
->offset
;
1706 attrs
.size_known_p
= refattrs
->size_known_p
;
1707 attrs
.size
= refattrs
->size
;
1708 attrs
.align
= refattrs
->align
;
1711 /* Otherwise, default values from the mode of the MEM reference. */
1714 defattrs
= mode_mem_attrs
[(int) GET_MODE (ref
)];
1715 gcc_assert (!defattrs
->expr
);
1716 gcc_assert (!defattrs
->offset_known_p
);
1718 /* Respect mode size. */
1719 attrs
.size_known_p
= defattrs
->size_known_p
;
1720 attrs
.size
= defattrs
->size
;
1721 /* ??? Is this really necessary? We probably should always get
1722 the size from the type below. */
1724 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1725 if T is an object, always compute the object alignment below. */
1727 attrs
.align
= defattrs
->align
;
1729 attrs
.align
= BITS_PER_UNIT
;
1730 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1731 e.g. if the type carries an alignment attribute. Should we be
1732 able to simply always use TYPE_ALIGN? */
1735 /* We can set the alignment from the type if we are making an object,
1736 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1737 if (objectp
|| TREE_CODE (t
) == INDIRECT_REF
|| TYPE_ALIGN_OK (type
))
1738 attrs
.align
= MAX (attrs
.align
, TYPE_ALIGN (type
));
1740 /* If the size is known, we can set that. */
1741 tree new_size
= TYPE_SIZE_UNIT (type
);
1743 /* The address-space is that of the type. */
1744 as
= TYPE_ADDR_SPACE (type
);
1746 /* If T is not a type, we may be able to deduce some more information about
1752 if (TREE_THIS_VOLATILE (t
))
1753 MEM_VOLATILE_P (ref
) = 1;
1755 /* Now remove any conversions: they don't change what the underlying
1756 object is. Likewise for SAVE_EXPR. */
1757 while (CONVERT_EXPR_P (t
)
1758 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
1759 || TREE_CODE (t
) == SAVE_EXPR
)
1760 t
= TREE_OPERAND (t
, 0);
1762 /* Note whether this expression can trap. */
1763 MEM_NOTRAP_P (ref
) = !tree_could_trap_p (t
);
1765 base
= get_base_address (t
);
1769 && TREE_READONLY (base
)
1770 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
))
1771 && !TREE_THIS_VOLATILE (base
))
1772 MEM_READONLY_P (ref
) = 1;
1774 /* Mark static const strings readonly as well. */
1775 if (TREE_CODE (base
) == STRING_CST
1776 && TREE_READONLY (base
)
1777 && TREE_STATIC (base
))
1778 MEM_READONLY_P (ref
) = 1;
1780 /* Address-space information is on the base object. */
1781 if (TREE_CODE (base
) == MEM_REF
1782 || TREE_CODE (base
) == TARGET_MEM_REF
)
1783 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base
,
1786 as
= TYPE_ADDR_SPACE (TREE_TYPE (base
));
1789 /* If this expression uses it's parent's alias set, mark it such
1790 that we won't change it. */
1791 if (component_uses_parent_alias_set_from (t
) != NULL_TREE
)
1792 MEM_KEEP_ALIAS_SET_P (ref
) = 1;
1794 /* If this is a decl, set the attributes of the MEM from it. */
1798 attrs
.offset_known_p
= true;
1800 apply_bitpos
= bitpos
;
1801 new_size
= DECL_SIZE_UNIT (t
);
1804 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1805 else if (CONSTANT_CLASS_P (t
))
1808 /* If this is a field reference, record it. */
1809 else if (TREE_CODE (t
) == COMPONENT_REF
)
1812 attrs
.offset_known_p
= true;
1814 apply_bitpos
= bitpos
;
1815 if (DECL_BIT_FIELD (TREE_OPERAND (t
, 1)))
1816 new_size
= DECL_SIZE_UNIT (TREE_OPERAND (t
, 1));
1819 /* If this is an array reference, look for an outer field reference. */
1820 else if (TREE_CODE (t
) == ARRAY_REF
)
1822 tree off_tree
= size_zero_node
;
1823 /* We can't modify t, because we use it at the end of the
1829 tree index
= TREE_OPERAND (t2
, 1);
1830 tree low_bound
= array_ref_low_bound (t2
);
1831 tree unit_size
= array_ref_element_size (t2
);
1833 /* We assume all arrays have sizes that are a multiple of a byte.
1834 First subtract the lower bound, if any, in the type of the
1835 index, then convert to sizetype and multiply by the size of
1836 the array element. */
1837 if (! integer_zerop (low_bound
))
1838 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
1841 off_tree
= size_binop (PLUS_EXPR
,
1842 size_binop (MULT_EXPR
,
1843 fold_convert (sizetype
,
1847 t2
= TREE_OPERAND (t2
, 0);
1849 while (TREE_CODE (t2
) == ARRAY_REF
);
1852 || TREE_CODE (t2
) == COMPONENT_REF
)
1855 attrs
.offset_known_p
= false;
1856 if (tree_fits_uhwi_p (off_tree
))
1858 attrs
.offset_known_p
= true;
1859 attrs
.offset
= tree_to_uhwi (off_tree
);
1860 apply_bitpos
= bitpos
;
1863 /* Else do not record a MEM_EXPR. */
1866 /* If this is an indirect reference, record it. */
1867 else if (TREE_CODE (t
) == MEM_REF
1868 || TREE_CODE (t
) == TARGET_MEM_REF
)
1871 attrs
.offset_known_p
= true;
1873 apply_bitpos
= bitpos
;
1876 /* Compute the alignment. */
1877 unsigned int obj_align
;
1878 unsigned HOST_WIDE_INT obj_bitpos
;
1879 get_object_alignment_1 (t
, &obj_align
, &obj_bitpos
);
1880 obj_bitpos
= (obj_bitpos
- bitpos
) & (obj_align
- 1);
1881 if (obj_bitpos
!= 0)
1882 obj_align
= (obj_bitpos
& -obj_bitpos
);
1883 attrs
.align
= MAX (attrs
.align
, obj_align
);
1886 if (tree_fits_uhwi_p (new_size
))
1888 attrs
.size_known_p
= true;
1889 attrs
.size
= tree_to_uhwi (new_size
);
1892 /* If we modified OFFSET based on T, then subtract the outstanding
1893 bit position offset. Similarly, increase the size of the accessed
1894 object to contain the negative offset. */
1897 gcc_assert (attrs
.offset_known_p
);
1898 attrs
.offset
-= apply_bitpos
/ BITS_PER_UNIT
;
1899 if (attrs
.size_known_p
)
1900 attrs
.size
+= apply_bitpos
/ BITS_PER_UNIT
;
1903 /* Now set the attributes we computed above. */
1904 attrs
.addrspace
= as
;
1905 set_mem_attrs (ref
, &attrs
);
1909 set_mem_attributes (rtx ref
, tree t
, int objectp
)
1911 set_mem_attributes_minus_bitpos (ref
, t
, objectp
, 0);
1914 /* Set the alias set of MEM to SET. */
1917 set_mem_alias_set (rtx mem
, alias_set_type set
)
1919 struct mem_attrs attrs
;
1921 /* If the new and old alias sets don't conflict, something is wrong. */
1922 gcc_checking_assert (alias_sets_conflict_p (set
, MEM_ALIAS_SET (mem
)));
1923 attrs
= *get_mem_attrs (mem
);
1925 set_mem_attrs (mem
, &attrs
);
1928 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1931 set_mem_addr_space (rtx mem
, addr_space_t addrspace
)
1933 struct mem_attrs attrs
;
1935 attrs
= *get_mem_attrs (mem
);
1936 attrs
.addrspace
= addrspace
;
1937 set_mem_attrs (mem
, &attrs
);
1940 /* Set the alignment of MEM to ALIGN bits. */
1943 set_mem_align (rtx mem
, unsigned int align
)
1945 struct mem_attrs attrs
;
1947 attrs
= *get_mem_attrs (mem
);
1948 attrs
.align
= align
;
1949 set_mem_attrs (mem
, &attrs
);
1952 /* Set the expr for MEM to EXPR. */
1955 set_mem_expr (rtx mem
, tree expr
)
1957 struct mem_attrs attrs
;
1959 attrs
= *get_mem_attrs (mem
);
1961 set_mem_attrs (mem
, &attrs
);
1964 /* Set the offset of MEM to OFFSET. */
1967 set_mem_offset (rtx mem
, HOST_WIDE_INT offset
)
1969 struct mem_attrs attrs
;
1971 attrs
= *get_mem_attrs (mem
);
1972 attrs
.offset_known_p
= true;
1973 attrs
.offset
= offset
;
1974 set_mem_attrs (mem
, &attrs
);
1977 /* Clear the offset of MEM. */
1980 clear_mem_offset (rtx mem
)
1982 struct mem_attrs attrs
;
1984 attrs
= *get_mem_attrs (mem
);
1985 attrs
.offset_known_p
= false;
1986 set_mem_attrs (mem
, &attrs
);
1989 /* Set the size of MEM to SIZE. */
1992 set_mem_size (rtx mem
, HOST_WIDE_INT size
)
1994 struct mem_attrs attrs
;
1996 attrs
= *get_mem_attrs (mem
);
1997 attrs
.size_known_p
= true;
1999 set_mem_attrs (mem
, &attrs
);
2002 /* Clear the size of MEM. */
2005 clear_mem_size (rtx mem
)
2007 struct mem_attrs attrs
;
2009 attrs
= *get_mem_attrs (mem
);
2010 attrs
.size_known_p
= false;
2011 set_mem_attrs (mem
, &attrs
);
2014 /* Return a memory reference like MEMREF, but with its mode changed to MODE
2015 and its address changed to ADDR. (VOIDmode means don't change the mode.
2016 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
2017 returned memory location is required to be valid. INPLACE is true if any
2018 changes can be made directly to MEMREF or false if MEMREF must be treated
2021 The memory attributes are not changed. */
2024 change_address_1 (rtx memref
, enum machine_mode mode
, rtx addr
, int validate
,
2030 gcc_assert (MEM_P (memref
));
2031 as
= MEM_ADDR_SPACE (memref
);
2032 if (mode
== VOIDmode
)
2033 mode
= GET_MODE (memref
);
2035 addr
= XEXP (memref
, 0);
2036 if (mode
== GET_MODE (memref
) && addr
== XEXP (memref
, 0)
2037 && (!validate
|| memory_address_addr_space_p (mode
, addr
, as
)))
2040 /* Don't validate address for LRA. LRA can make the address valid
2041 by itself in most efficient way. */
2042 if (validate
&& !lra_in_progress
)
2044 if (reload_in_progress
|| reload_completed
)
2045 gcc_assert (memory_address_addr_space_p (mode
, addr
, as
));
2047 addr
= memory_address_addr_space (mode
, addr
, as
);
2050 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
2055 XEXP (memref
, 0) = addr
;
2059 new_rtx
= gen_rtx_MEM (mode
, addr
);
2060 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2064 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2065 way we are changing MEMREF, so we only preserve the alias set. */
2068 change_address (rtx memref
, enum machine_mode mode
, rtx addr
)
2070 rtx new_rtx
= change_address_1 (memref
, mode
, addr
, 1, false);
2071 enum machine_mode mmode
= GET_MODE (new_rtx
);
2072 struct mem_attrs attrs
, *defattrs
;
2074 attrs
= *get_mem_attrs (memref
);
2075 defattrs
= mode_mem_attrs
[(int) mmode
];
2076 attrs
.expr
= NULL_TREE
;
2077 attrs
.offset_known_p
= false;
2078 attrs
.size_known_p
= defattrs
->size_known_p
;
2079 attrs
.size
= defattrs
->size
;
2080 attrs
.align
= defattrs
->align
;
2082 /* If there are no changes, just return the original memory reference. */
2083 if (new_rtx
== memref
)
2085 if (mem_attrs_eq_p (get_mem_attrs (memref
), &attrs
))
2088 new_rtx
= gen_rtx_MEM (mmode
, XEXP (memref
, 0));
2089 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2092 set_mem_attrs (new_rtx
, &attrs
);
2096 /* Return a memory reference like MEMREF, but with its mode changed
2097 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2098 nonzero, the memory address is forced to be valid.
2099 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2100 and the caller is responsible for adjusting MEMREF base register.
2101 If ADJUST_OBJECT is zero, the underlying object associated with the
2102 memory reference is left unchanged and the caller is responsible for
2103 dealing with it. Otherwise, if the new memory reference is outside
2104 the underlying object, even partially, then the object is dropped.
2105 SIZE, if nonzero, is the size of an access in cases where MODE
2106 has no inherent size. */
2109 adjust_address_1 (rtx memref
, enum machine_mode mode
, HOST_WIDE_INT offset
,
2110 int validate
, int adjust_address
, int adjust_object
,
2113 rtx addr
= XEXP (memref
, 0);
2115 enum machine_mode address_mode
;
2117 struct mem_attrs attrs
= *get_mem_attrs (memref
), *defattrs
;
2118 unsigned HOST_WIDE_INT max_align
;
2119 #ifdef POINTERS_EXTEND_UNSIGNED
2120 enum machine_mode pointer_mode
2121 = targetm
.addr_space
.pointer_mode (attrs
.addrspace
);
2124 /* VOIDmode means no mode change for change_address_1. */
2125 if (mode
== VOIDmode
)
2126 mode
= GET_MODE (memref
);
2128 /* Take the size of non-BLKmode accesses from the mode. */
2129 defattrs
= mode_mem_attrs
[(int) mode
];
2130 if (defattrs
->size_known_p
)
2131 size
= defattrs
->size
;
2133 /* If there are no changes, just return the original memory reference. */
2134 if (mode
== GET_MODE (memref
) && !offset
2135 && (size
== 0 || (attrs
.size_known_p
&& attrs
.size
== size
))
2136 && (!validate
|| memory_address_addr_space_p (mode
, addr
,
2140 /* ??? Prefer to create garbage instead of creating shared rtl.
2141 This may happen even if offset is nonzero -- consider
2142 (plus (plus reg reg) const_int) -- so do this always. */
2143 addr
= copy_rtx (addr
);
2145 /* Convert a possibly large offset to a signed value within the
2146 range of the target address space. */
2147 address_mode
= get_address_mode (memref
);
2148 pbits
= GET_MODE_BITSIZE (address_mode
);
2149 if (HOST_BITS_PER_WIDE_INT
> pbits
)
2151 int shift
= HOST_BITS_PER_WIDE_INT
- pbits
;
2152 offset
= (((HOST_WIDE_INT
) ((unsigned HOST_WIDE_INT
) offset
<< shift
))
2158 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2159 object, we can merge it into the LO_SUM. */
2160 if (GET_MODE (memref
) != BLKmode
&& GET_CODE (addr
) == LO_SUM
2162 && (unsigned HOST_WIDE_INT
) offset
2163 < GET_MODE_ALIGNMENT (GET_MODE (memref
)) / BITS_PER_UNIT
)
2164 addr
= gen_rtx_LO_SUM (address_mode
, XEXP (addr
, 0),
2165 plus_constant (address_mode
,
2166 XEXP (addr
, 1), offset
));
2167 #ifdef POINTERS_EXTEND_UNSIGNED
2168 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2169 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2170 the fact that pointers are not allowed to overflow. */
2171 else if (POINTERS_EXTEND_UNSIGNED
> 0
2172 && GET_CODE (addr
) == ZERO_EXTEND
2173 && GET_MODE (XEXP (addr
, 0)) == pointer_mode
2174 && trunc_int_for_mode (offset
, pointer_mode
) == offset
)
2175 addr
= gen_rtx_ZERO_EXTEND (address_mode
,
2176 plus_constant (pointer_mode
,
2177 XEXP (addr
, 0), offset
));
2180 addr
= plus_constant (address_mode
, addr
, offset
);
2183 new_rtx
= change_address_1 (memref
, mode
, addr
, validate
, false);
2185 /* If the address is a REG, change_address_1 rightfully returns memref,
2186 but this would destroy memref's MEM_ATTRS. */
2187 if (new_rtx
== memref
&& offset
!= 0)
2188 new_rtx
= copy_rtx (new_rtx
);
2190 /* Conservatively drop the object if we don't know where we start from. */
2191 if (adjust_object
&& (!attrs
.offset_known_p
|| !attrs
.size_known_p
))
2193 attrs
.expr
= NULL_TREE
;
2197 /* Compute the new values of the memory attributes due to this adjustment.
2198 We add the offsets and update the alignment. */
2199 if (attrs
.offset_known_p
)
2201 attrs
.offset
+= offset
;
2203 /* Drop the object if the new left end is not within its bounds. */
2204 if (adjust_object
&& attrs
.offset
< 0)
2206 attrs
.expr
= NULL_TREE
;
2211 /* Compute the new alignment by taking the MIN of the alignment and the
2212 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2216 max_align
= (offset
& -offset
) * BITS_PER_UNIT
;
2217 attrs
.align
= MIN (attrs
.align
, max_align
);
2222 /* Drop the object if the new right end is not within its bounds. */
2223 if (adjust_object
&& (offset
+ size
) > attrs
.size
)
2225 attrs
.expr
= NULL_TREE
;
2228 attrs
.size_known_p
= true;
2231 else if (attrs
.size_known_p
)
2233 gcc_assert (!adjust_object
);
2234 attrs
.size
-= offset
;
2235 /* ??? The store_by_pieces machinery generates negative sizes,
2236 so don't assert for that here. */
2239 set_mem_attrs (new_rtx
, &attrs
);
2244 /* Return a memory reference like MEMREF, but with its mode changed
2245 to MODE and its address changed to ADDR, which is assumed to be
2246 MEMREF offset by OFFSET bytes. If VALIDATE is
2247 nonzero, the memory address is forced to be valid. */
2250 adjust_automodify_address_1 (rtx memref
, enum machine_mode mode
, rtx addr
,
2251 HOST_WIDE_INT offset
, int validate
)
2253 memref
= change_address_1 (memref
, VOIDmode
, addr
, validate
, false);
2254 return adjust_address_1 (memref
, mode
, offset
, validate
, 0, 0, 0);
2257 /* Return a memory reference like MEMREF, but whose address is changed by
2258 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2259 known to be in OFFSET (possibly 1). */
2262 offset_address (rtx memref
, rtx offset
, unsigned HOST_WIDE_INT pow2
)
2264 rtx new_rtx
, addr
= XEXP (memref
, 0);
2265 enum machine_mode address_mode
;
2266 struct mem_attrs attrs
, *defattrs
;
2268 attrs
= *get_mem_attrs (memref
);
2269 address_mode
= get_address_mode (memref
);
2270 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2272 /* At this point we don't know _why_ the address is invalid. It
2273 could have secondary memory references, multiplies or anything.
2275 However, if we did go and rearrange things, we can wind up not
2276 being able to recognize the magic around pic_offset_table_rtx.
2277 This stuff is fragile, and is yet another example of why it is
2278 bad to expose PIC machinery too early. */
2279 if (! memory_address_addr_space_p (GET_MODE (memref
), new_rtx
,
2281 && GET_CODE (addr
) == PLUS
2282 && XEXP (addr
, 0) == pic_offset_table_rtx
)
2284 addr
= force_reg (GET_MODE (addr
), addr
);
2285 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2288 update_temp_slot_address (XEXP (memref
, 0), new_rtx
);
2289 new_rtx
= change_address_1 (memref
, VOIDmode
, new_rtx
, 1, false);
2291 /* If there are no changes, just return the original memory reference. */
2292 if (new_rtx
== memref
)
2295 /* Update the alignment to reflect the offset. Reset the offset, which
2297 defattrs
= mode_mem_attrs
[(int) GET_MODE (new_rtx
)];
2298 attrs
.offset_known_p
= false;
2299 attrs
.size_known_p
= defattrs
->size_known_p
;
2300 attrs
.size
= defattrs
->size
;
2301 attrs
.align
= MIN (attrs
.align
, pow2
* BITS_PER_UNIT
);
2302 set_mem_attrs (new_rtx
, &attrs
);
2306 /* Return a memory reference like MEMREF, but with its address changed to
2307 ADDR. The caller is asserting that the actual piece of memory pointed
2308 to is the same, just the form of the address is being changed, such as
2309 by putting something into a register. INPLACE is true if any changes
2310 can be made directly to MEMREF or false if MEMREF must be treated as
2314 replace_equiv_address (rtx memref
, rtx addr
, bool inplace
)
2316 /* change_address_1 copies the memory attribute structure without change
2317 and that's exactly what we want here. */
2318 update_temp_slot_address (XEXP (memref
, 0), addr
);
2319 return change_address_1 (memref
, VOIDmode
, addr
, 1, inplace
);
2322 /* Likewise, but the reference is not required to be valid. */
2325 replace_equiv_address_nv (rtx memref
, rtx addr
, bool inplace
)
2327 return change_address_1 (memref
, VOIDmode
, addr
, 0, inplace
);
2330 /* Return a memory reference like MEMREF, but with its mode widened to
2331 MODE and offset by OFFSET. This would be used by targets that e.g.
2332 cannot issue QImode memory operations and have to use SImode memory
2333 operations plus masking logic. */
2336 widen_memory_access (rtx memref
, enum machine_mode mode
, HOST_WIDE_INT offset
)
2338 rtx new_rtx
= adjust_address_1 (memref
, mode
, offset
, 1, 1, 0, 0);
2339 struct mem_attrs attrs
;
2340 unsigned int size
= GET_MODE_SIZE (mode
);
2342 /* If there are no changes, just return the original memory reference. */
2343 if (new_rtx
== memref
)
2346 attrs
= *get_mem_attrs (new_rtx
);
2348 /* If we don't know what offset we were at within the expression, then
2349 we can't know if we've overstepped the bounds. */
2350 if (! attrs
.offset_known_p
)
2351 attrs
.expr
= NULL_TREE
;
2355 if (TREE_CODE (attrs
.expr
) == COMPONENT_REF
)
2357 tree field
= TREE_OPERAND (attrs
.expr
, 1);
2358 tree offset
= component_ref_field_offset (attrs
.expr
);
2360 if (! DECL_SIZE_UNIT (field
))
2362 attrs
.expr
= NULL_TREE
;
2366 /* Is the field at least as large as the access? If so, ok,
2367 otherwise strip back to the containing structure. */
2368 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
2369 && compare_tree_int (DECL_SIZE_UNIT (field
), size
) >= 0
2370 && attrs
.offset
>= 0)
2373 if (! tree_fits_uhwi_p (offset
))
2375 attrs
.expr
= NULL_TREE
;
2379 attrs
.expr
= TREE_OPERAND (attrs
.expr
, 0);
2380 attrs
.offset
+= tree_to_uhwi (offset
);
2381 attrs
.offset
+= (tree_to_uhwi (DECL_FIELD_BIT_OFFSET (field
))
2384 /* Similarly for the decl. */
2385 else if (DECL_P (attrs
.expr
)
2386 && DECL_SIZE_UNIT (attrs
.expr
)
2387 && TREE_CODE (DECL_SIZE_UNIT (attrs
.expr
)) == INTEGER_CST
2388 && compare_tree_int (DECL_SIZE_UNIT (attrs
.expr
), size
) >= 0
2389 && (! attrs
.offset_known_p
|| attrs
.offset
>= 0))
2393 /* The widened memory access overflows the expression, which means
2394 that it could alias another expression. Zap it. */
2395 attrs
.expr
= NULL_TREE
;
2401 attrs
.offset_known_p
= false;
2403 /* The widened memory may alias other stuff, so zap the alias set. */
2404 /* ??? Maybe use get_alias_set on any remaining expression. */
2406 attrs
.size_known_p
= true;
2408 set_mem_attrs (new_rtx
, &attrs
);
2412 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2413 static GTY(()) tree spill_slot_decl
;
2416 get_spill_slot_decl (bool force_build_p
)
2418 tree d
= spill_slot_decl
;
2420 struct mem_attrs attrs
;
2422 if (d
|| !force_build_p
)
2425 d
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
2426 VAR_DECL
, get_identifier ("%sfp"), void_type_node
);
2427 DECL_ARTIFICIAL (d
) = 1;
2428 DECL_IGNORED_P (d
) = 1;
2430 spill_slot_decl
= d
;
2432 rd
= gen_rtx_MEM (BLKmode
, frame_pointer_rtx
);
2433 MEM_NOTRAP_P (rd
) = 1;
2434 attrs
= *mode_mem_attrs
[(int) BLKmode
];
2435 attrs
.alias
= new_alias_set ();
2437 set_mem_attrs (rd
, &attrs
);
2438 SET_DECL_RTL (d
, rd
);
2443 /* Given MEM, a result from assign_stack_local, fill in the memory
2444 attributes as appropriate for a register allocator spill slot.
2445 These slots are not aliasable by other memory. We arrange for
2446 them all to use a single MEM_EXPR, so that the aliasing code can
2447 work properly in the case of shared spill slots. */
2450 set_mem_attrs_for_spill (rtx mem
)
2452 struct mem_attrs attrs
;
2455 attrs
= *get_mem_attrs (mem
);
2456 attrs
.expr
= get_spill_slot_decl (true);
2457 attrs
.alias
= MEM_ALIAS_SET (DECL_RTL (attrs
.expr
));
2458 attrs
.addrspace
= ADDR_SPACE_GENERIC
;
2460 /* We expect the incoming memory to be of the form:
2461 (mem:MODE (plus (reg sfp) (const_int offset)))
2462 with perhaps the plus missing for offset = 0. */
2463 addr
= XEXP (mem
, 0);
2464 attrs
.offset_known_p
= true;
2466 if (GET_CODE (addr
) == PLUS
2467 && CONST_INT_P (XEXP (addr
, 1)))
2468 attrs
.offset
= INTVAL (XEXP (addr
, 1));
2470 set_mem_attrs (mem
, &attrs
);
2471 MEM_NOTRAP_P (mem
) = 1;
2474 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2477 gen_label_rtx (void)
2479 return gen_rtx_CODE_LABEL (VOIDmode
, NULL_RTX
, NULL_RTX
,
2480 NULL
, label_num
++, NULL
);
2483 /* For procedure integration. */
2485 /* Install new pointers to the first and last insns in the chain.
2486 Also, set cur_insn_uid to one higher than the last in use.
2487 Used for an inline-procedure after copying the insn chain. */
2490 set_new_first_and_last_insn (rtx first
, rtx last
)
2494 set_first_insn (first
);
2495 set_last_insn (last
);
2498 if (MIN_NONDEBUG_INSN_UID
|| MAY_HAVE_DEBUG_INSNS
)
2500 int debug_count
= 0;
2502 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
- 1;
2503 cur_debug_insn_uid
= 0;
2505 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2506 if (INSN_UID (insn
) < MIN_NONDEBUG_INSN_UID
)
2507 cur_debug_insn_uid
= MAX (cur_debug_insn_uid
, INSN_UID (insn
));
2510 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2511 if (DEBUG_INSN_P (insn
))
2516 cur_debug_insn_uid
= MIN_NONDEBUG_INSN_UID
+ debug_count
;
2518 cur_debug_insn_uid
++;
2521 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2522 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2527 /* Go through all the RTL insn bodies and copy any invalid shared
2528 structure. This routine should only be called once. */
2531 unshare_all_rtl_1 (rtx insn
)
2533 /* Unshare just about everything else. */
2534 unshare_all_rtl_in_chain (insn
);
2536 /* Make sure the addresses of stack slots found outside the insn chain
2537 (such as, in DECL_RTL of a variable) are not shared
2538 with the insn chain.
2540 This special care is necessary when the stack slot MEM does not
2541 actually appear in the insn chain. If it does appear, its address
2542 is unshared from all else at that point. */
2543 stack_slot_list
= copy_rtx_if_shared (stack_slot_list
);
2546 /* Go through all the RTL insn bodies and copy any invalid shared
2547 structure, again. This is a fairly expensive thing to do so it
2548 should be done sparingly. */
2551 unshare_all_rtl_again (rtx insn
)
2556 for (p
= insn
; p
; p
= NEXT_INSN (p
))
2559 reset_used_flags (PATTERN (p
));
2560 reset_used_flags (REG_NOTES (p
));
2562 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p
));
2565 /* Make sure that virtual stack slots are not shared. */
2566 set_used_decls (DECL_INITIAL (cfun
->decl
));
2568 /* Make sure that virtual parameters are not shared. */
2569 for (decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2570 set_used_flags (DECL_RTL (decl
));
2572 reset_used_flags (stack_slot_list
);
2574 unshare_all_rtl_1 (insn
);
2578 unshare_all_rtl (void)
2580 unshare_all_rtl_1 (get_insns ());
2585 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2586 Recursively does the same for subexpressions. */
2589 verify_rtx_sharing (rtx orig
, rtx insn
)
2594 const char *format_ptr
;
2599 code
= GET_CODE (x
);
2601 /* These types may be freely shared. */
2617 /* SCRATCH must be shared because they represent distinct values. */
2620 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2621 clobbers or clobbers of hard registers that originated as pseudos.
2622 This is needed to allow safe register renaming. */
2623 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2624 && ORIGINAL_REGNO (XEXP (x
, 0)) == REGNO (XEXP (x
, 0)))
2629 if (shared_const_p (orig
))
2634 /* A MEM is allowed to be shared if its address is constant. */
2635 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
2636 || reload_completed
|| reload_in_progress
)
2645 /* This rtx may not be shared. If it has already been seen,
2646 replace it with a copy of itself. */
2647 #ifdef ENABLE_CHECKING
2648 if (RTX_FLAG (x
, used
))
2650 error ("invalid rtl sharing found in the insn");
2652 error ("shared rtx");
2654 internal_error ("internal consistency failure");
2657 gcc_assert (!RTX_FLAG (x
, used
));
2659 RTX_FLAG (x
, used
) = 1;
2661 /* Now scan the subexpressions recursively. */
2663 format_ptr
= GET_RTX_FORMAT (code
);
2665 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2667 switch (*format_ptr
++)
2670 verify_rtx_sharing (XEXP (x
, i
), insn
);
2674 if (XVEC (x
, i
) != NULL
)
2677 int len
= XVECLEN (x
, i
);
2679 for (j
= 0; j
< len
; j
++)
2681 /* We allow sharing of ASM_OPERANDS inside single
2683 if (j
&& GET_CODE (XVECEXP (x
, i
, j
)) == SET
2684 && (GET_CODE (SET_SRC (XVECEXP (x
, i
, j
)))
2686 verify_rtx_sharing (SET_DEST (XVECEXP (x
, i
, j
)), insn
);
2688 verify_rtx_sharing (XVECEXP (x
, i
, j
), insn
);
2697 /* Reset used-flags for INSN. */
2700 reset_insn_used_flags (rtx insn
)
2702 gcc_assert (INSN_P (insn
));
2703 reset_used_flags (PATTERN (insn
));
2704 reset_used_flags (REG_NOTES (insn
));
2706 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2709 /* Go through all the RTL insn bodies and clear all the USED bits. */
2712 reset_all_used_flags (void)
2716 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2719 rtx pat
= PATTERN (p
);
2720 if (GET_CODE (pat
) != SEQUENCE
)
2721 reset_insn_used_flags (p
);
2724 gcc_assert (REG_NOTES (p
) == NULL
);
2725 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2727 rtx insn
= XVECEXP (pat
, 0, i
);
2729 reset_insn_used_flags (insn
);
2735 /* Verify sharing in INSN. */
2738 verify_insn_sharing (rtx insn
)
2740 gcc_assert (INSN_P (insn
));
2741 reset_used_flags (PATTERN (insn
));
2742 reset_used_flags (REG_NOTES (insn
));
2744 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn
));
2747 /* Go through all the RTL insn bodies and check that there is no unexpected
2748 sharing in between the subexpressions. */
2751 verify_rtl_sharing (void)
2755 timevar_push (TV_VERIFY_RTL_SHARING
);
2757 reset_all_used_flags ();
2759 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2762 rtx pat
= PATTERN (p
);
2763 if (GET_CODE (pat
) != SEQUENCE
)
2764 verify_insn_sharing (p
);
2766 for (int i
= 0; i
< XVECLEN (pat
, 0); i
++)
2768 rtx insn
= XVECEXP (pat
, 0, i
);
2770 verify_insn_sharing (insn
);
2774 reset_all_used_flags ();
2776 timevar_pop (TV_VERIFY_RTL_SHARING
);
2779 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2780 Assumes the mark bits are cleared at entry. */
2783 unshare_all_rtl_in_chain (rtx insn
)
2785 for (; insn
; insn
= NEXT_INSN (insn
))
2788 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
2789 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
2791 CALL_INSN_FUNCTION_USAGE (insn
)
2792 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn
));
2796 /* Go through all virtual stack slots of a function and mark them as
2797 shared. We never replace the DECL_RTLs themselves with a copy,
2798 but expressions mentioned into a DECL_RTL cannot be shared with
2799 expressions in the instruction stream.
2801 Note that reload may convert pseudo registers into memories in-place.
2802 Pseudo registers are always shared, but MEMs never are. Thus if we
2803 reset the used flags on MEMs in the instruction stream, we must set
2804 them again on MEMs that appear in DECL_RTLs. */
2807 set_used_decls (tree blk
)
2812 for (t
= BLOCK_VARS (blk
); t
; t
= DECL_CHAIN (t
))
2813 if (DECL_RTL_SET_P (t
))
2814 set_used_flags (DECL_RTL (t
));
2816 /* Now process sub-blocks. */
2817 for (t
= BLOCK_SUBBLOCKS (blk
); t
; t
= BLOCK_CHAIN (t
))
2821 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2822 Recursively does the same for subexpressions. Uses
2823 copy_rtx_if_shared_1 to reduce stack space. */
2826 copy_rtx_if_shared (rtx orig
)
2828 copy_rtx_if_shared_1 (&orig
);
2832 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2833 use. Recursively does the same for subexpressions. */
2836 copy_rtx_if_shared_1 (rtx
*orig1
)
2842 const char *format_ptr
;
2846 /* Repeat is used to turn tail-recursion into iteration. */
2853 code
= GET_CODE (x
);
2855 /* These types may be freely shared. */
2871 /* SCRATCH must be shared because they represent distinct values. */
2874 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2875 clobbers or clobbers of hard registers that originated as pseudos.
2876 This is needed to allow safe register renaming. */
2877 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
2878 && ORIGINAL_REGNO (XEXP (x
, 0)) == REGNO (XEXP (x
, 0)))
2883 if (shared_const_p (x
))
2893 /* The chain of insns is not being copied. */
2900 /* This rtx may not be shared. If it has already been seen,
2901 replace it with a copy of itself. */
2903 if (RTX_FLAG (x
, used
))
2905 x
= shallow_copy_rtx (x
);
2908 RTX_FLAG (x
, used
) = 1;
2910 /* Now scan the subexpressions recursively.
2911 We can store any replaced subexpressions directly into X
2912 since we know X is not shared! Any vectors in X
2913 must be copied if X was copied. */
2915 format_ptr
= GET_RTX_FORMAT (code
);
2916 length
= GET_RTX_LENGTH (code
);
2919 for (i
= 0; i
< length
; i
++)
2921 switch (*format_ptr
++)
2925 copy_rtx_if_shared_1 (last_ptr
);
2926 last_ptr
= &XEXP (x
, i
);
2930 if (XVEC (x
, i
) != NULL
)
2933 int len
= XVECLEN (x
, i
);
2935 /* Copy the vector iff I copied the rtx and the length
2937 if (copied
&& len
> 0)
2938 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
2940 /* Call recursively on all inside the vector. */
2941 for (j
= 0; j
< len
; j
++)
2944 copy_rtx_if_shared_1 (last_ptr
);
2945 last_ptr
= &XVECEXP (x
, i
, j
);
2960 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2963 mark_used_flags (rtx x
, int flag
)
2967 const char *format_ptr
;
2970 /* Repeat is used to turn tail-recursion into iteration. */
2975 code
= GET_CODE (x
);
2977 /* These types may be freely shared so we needn't do any resetting
3001 /* The chain of insns is not being copied. */
3008 RTX_FLAG (x
, used
) = flag
;
3010 format_ptr
= GET_RTX_FORMAT (code
);
3011 length
= GET_RTX_LENGTH (code
);
3013 for (i
= 0; i
< length
; i
++)
3015 switch (*format_ptr
++)
3023 mark_used_flags (XEXP (x
, i
), flag
);
3027 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
3028 mark_used_flags (XVECEXP (x
, i
, j
), flag
);
3034 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
3035 to look for shared sub-parts. */
3038 reset_used_flags (rtx x
)
3040 mark_used_flags (x
, 0);
3043 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
3044 to look for shared sub-parts. */
3047 set_used_flags (rtx x
)
3049 mark_used_flags (x
, 1);
3052 /* Copy X if necessary so that it won't be altered by changes in OTHER.
3053 Return X or the rtx for the pseudo reg the value of X was copied into.
3054 OTHER must be valid as a SET_DEST. */
3057 make_safe_from (rtx x
, rtx other
)
3060 switch (GET_CODE (other
))
3063 other
= SUBREG_REG (other
);
3065 case STRICT_LOW_PART
:
3068 other
= XEXP (other
, 0);
3077 && GET_CODE (x
) != SUBREG
)
3079 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
3080 || reg_mentioned_p (other
, x
))))
3082 rtx temp
= gen_reg_rtx (GET_MODE (x
));
3083 emit_move_insn (temp
, x
);
3089 /* Emission of insns (adding them to the doubly-linked list). */
3091 /* Return the last insn emitted, even if it is in a sequence now pushed. */
3094 get_last_insn_anywhere (void)
3096 struct sequence_stack
*stack
;
3097 if (get_last_insn ())
3098 return get_last_insn ();
3099 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
3100 if (stack
->last
!= 0)
3105 /* Return the first nonnote insn emitted in current sequence or current
3106 function. This routine looks inside SEQUENCEs. */
3109 get_first_nonnote_insn (void)
3111 rtx insn
= get_insns ();
3116 for (insn
= next_insn (insn
);
3117 insn
&& NOTE_P (insn
);
3118 insn
= next_insn (insn
))
3122 if (NONJUMP_INSN_P (insn
)
3123 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3124 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3131 /* Return the last nonnote insn emitted in current sequence or current
3132 function. This routine looks inside SEQUENCEs. */
3135 get_last_nonnote_insn (void)
3137 rtx insn
= get_last_insn ();
3142 for (insn
= previous_insn (insn
);
3143 insn
&& NOTE_P (insn
);
3144 insn
= previous_insn (insn
))
3148 if (NONJUMP_INSN_P (insn
)
3149 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3150 insn
= XVECEXP (PATTERN (insn
), 0,
3151 XVECLEN (PATTERN (insn
), 0) - 1);
3158 /* Return the number of actual (non-debug) insns emitted in this
3162 get_max_insn_count (void)
3164 int n
= cur_insn_uid
;
3166 /* The table size must be stable across -g, to avoid codegen
3167 differences due to debug insns, and not be affected by
3168 -fmin-insn-uid, to avoid excessive table size and to simplify
3169 debugging of -fcompare-debug failures. */
3170 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3171 n
-= cur_debug_insn_uid
;
3173 n
-= MIN_NONDEBUG_INSN_UID
;
3179 /* Return the next insn. If it is a SEQUENCE, return the first insn
3183 next_insn (rtx insn
)
3187 insn
= NEXT_INSN (insn
);
3188 if (insn
&& NONJUMP_INSN_P (insn
)
3189 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3190 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3196 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3200 previous_insn (rtx insn
)
3204 insn
= PREV_INSN (insn
);
3205 if (insn
&& NONJUMP_INSN_P (insn
)
3206 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3207 insn
= XVECEXP (PATTERN (insn
), 0, XVECLEN (PATTERN (insn
), 0) - 1);
3213 /* Return the next insn after INSN that is not a NOTE. This routine does not
3214 look inside SEQUENCEs. */
3217 next_nonnote_insn (rtx insn
)
3221 insn
= NEXT_INSN (insn
);
3222 if (insn
== 0 || !NOTE_P (insn
))
3229 /* Return the next insn after INSN that is not a NOTE, but stop the
3230 search before we enter another basic block. This routine does not
3231 look inside SEQUENCEs. */
3234 next_nonnote_insn_bb (rtx insn
)
3238 insn
= NEXT_INSN (insn
);
3239 if (insn
== 0 || !NOTE_P (insn
))
3241 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3248 /* Return the previous insn before INSN that is not a NOTE. This routine does
3249 not look inside SEQUENCEs. */
3252 prev_nonnote_insn (rtx insn
)
3256 insn
= PREV_INSN (insn
);
3257 if (insn
== 0 || !NOTE_P (insn
))
3264 /* Return the previous insn before INSN that is not a NOTE, but stop
3265 the search before we enter another basic block. This routine does
3266 not look inside SEQUENCEs. */
3269 prev_nonnote_insn_bb (rtx insn
)
3273 insn
= PREV_INSN (insn
);
3274 if (insn
== 0 || !NOTE_P (insn
))
3276 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3283 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3284 routine does not look inside SEQUENCEs. */
3287 next_nondebug_insn (rtx insn
)
3291 insn
= NEXT_INSN (insn
);
3292 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3299 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3300 This routine does not look inside SEQUENCEs. */
3303 prev_nondebug_insn (rtx insn
)
3307 insn
= PREV_INSN (insn
);
3308 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3315 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3316 This routine does not look inside SEQUENCEs. */
3319 next_nonnote_nondebug_insn (rtx insn
)
3323 insn
= NEXT_INSN (insn
);
3324 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3331 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3332 This routine does not look inside SEQUENCEs. */
3335 prev_nonnote_nondebug_insn (rtx insn
)
3339 insn
= PREV_INSN (insn
);
3340 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3347 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3348 or 0, if there is none. This routine does not look inside
3352 next_real_insn (rtx insn
)
3356 insn
= NEXT_INSN (insn
);
3357 if (insn
== 0 || INSN_P (insn
))
3364 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3365 or 0, if there is none. This routine does not look inside
3369 prev_real_insn (rtx insn
)
3373 insn
= PREV_INSN (insn
);
3374 if (insn
== 0 || INSN_P (insn
))
3381 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3382 This routine does not look inside SEQUENCEs. */
3385 last_call_insn (void)
3389 for (insn
= get_last_insn ();
3390 insn
&& !CALL_P (insn
);
3391 insn
= PREV_INSN (insn
))
3397 /* Find the next insn after INSN that really does something. This routine
3398 does not look inside SEQUENCEs. After reload this also skips over
3399 standalone USE and CLOBBER insn. */
3402 active_insn_p (const_rtx insn
)
3404 return (CALL_P (insn
) || JUMP_P (insn
)
3405 || JUMP_TABLE_DATA_P (insn
) /* FIXME */
3406 || (NONJUMP_INSN_P (insn
)
3407 && (! reload_completed
3408 || (GET_CODE (PATTERN (insn
)) != USE
3409 && GET_CODE (PATTERN (insn
)) != CLOBBER
))));
3413 next_active_insn (rtx insn
)
3417 insn
= NEXT_INSN (insn
);
3418 if (insn
== 0 || active_insn_p (insn
))
3425 /* Find the last insn before INSN that really does something. This routine
3426 does not look inside SEQUENCEs. After reload this also skips over
3427 standalone USE and CLOBBER insn. */
3430 prev_active_insn (rtx insn
)
3434 insn
= PREV_INSN (insn
);
3435 if (insn
== 0 || active_insn_p (insn
))
3443 /* Return the next insn that uses CC0 after INSN, which is assumed to
3444 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3445 applied to the result of this function should yield INSN).
3447 Normally, this is simply the next insn. However, if a REG_CC_USER note
3448 is present, it contains the insn that uses CC0.
3450 Return 0 if we can't find the insn. */
3453 next_cc0_user (rtx insn
)
3455 rtx note
= find_reg_note (insn
, REG_CC_USER
, NULL_RTX
);
3458 return XEXP (note
, 0);
3460 insn
= next_nonnote_insn (insn
);
3461 if (insn
&& NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3462 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3464 if (insn
&& INSN_P (insn
) && reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
3470 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3471 note, it is the previous insn. */
3474 prev_cc0_setter (rtx insn
)
3476 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
3479 return XEXP (note
, 0);
3481 insn
= prev_nonnote_insn (insn
);
3482 gcc_assert (sets_cc0_p (PATTERN (insn
)));
3489 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3492 find_auto_inc (rtx
*xp
, void *data
)
3495 rtx reg
= (rtx
) data
;
3497 if (GET_RTX_CLASS (GET_CODE (x
)) != RTX_AUTOINC
)
3500 switch (GET_CODE (x
))
3508 if (rtx_equal_p (reg
, XEXP (x
, 0)))
3519 /* Increment the label uses for all labels present in rtx. */
3522 mark_label_nuses (rtx x
)
3528 code
= GET_CODE (x
);
3529 if (code
== LABEL_REF
&& LABEL_P (XEXP (x
, 0)))
3530 LABEL_NUSES (XEXP (x
, 0))++;
3532 fmt
= GET_RTX_FORMAT (code
);
3533 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3536 mark_label_nuses (XEXP (x
, i
));
3537 else if (fmt
[i
] == 'E')
3538 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3539 mark_label_nuses (XVECEXP (x
, i
, j
));
3544 /* Try splitting insns that can be split for better scheduling.
3545 PAT is the pattern which might split.
3546 TRIAL is the insn providing PAT.
3547 LAST is nonzero if we should return the last insn of the sequence produced.
3549 If this routine succeeds in splitting, it returns the first or last
3550 replacement insn depending on the value of LAST. Otherwise, it
3551 returns TRIAL. If the insn to be returned can be split, it will be. */
3554 try_split (rtx pat
, rtx trial
, int last
)
3556 rtx before
= PREV_INSN (trial
);
3557 rtx after
= NEXT_INSN (trial
);
3558 int has_barrier
= 0;
3561 rtx insn_last
, insn
;
3563 rtx call_insn
= NULL_RTX
;
3565 /* We're not good at redistributing frame information. */
3566 if (RTX_FRAME_RELATED_P (trial
))
3569 if (any_condjump_p (trial
)
3570 && (note
= find_reg_note (trial
, REG_BR_PROB
, 0)))
3571 split_branch_probability
= XINT (note
, 0);
3572 probability
= split_branch_probability
;
3574 seq
= split_insns (pat
, trial
);
3576 split_branch_probability
= -1;
3578 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3579 We may need to handle this specially. */
3580 if (after
&& BARRIER_P (after
))
3583 after
= NEXT_INSN (after
);
3589 /* Avoid infinite loop if any insn of the result matches
3590 the original pattern. */
3594 if (INSN_P (insn_last
)
3595 && rtx_equal_p (PATTERN (insn_last
), pat
))
3597 if (!NEXT_INSN (insn_last
))
3599 insn_last
= NEXT_INSN (insn_last
);
3602 /* We will be adding the new sequence to the function. The splitters
3603 may have introduced invalid RTL sharing, so unshare the sequence now. */
3604 unshare_all_rtl_in_chain (seq
);
3606 /* Mark labels and copy flags. */
3607 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3612 CROSSING_JUMP_P (insn
) = CROSSING_JUMP_P (trial
);
3613 mark_jump_label (PATTERN (insn
), insn
, 0);
3615 if (probability
!= -1
3616 && any_condjump_p (insn
)
3617 && !find_reg_note (insn
, REG_BR_PROB
, 0))
3619 /* We can preserve the REG_BR_PROB notes only if exactly
3620 one jump is created, otherwise the machine description
3621 is responsible for this step using
3622 split_branch_probability variable. */
3623 gcc_assert (njumps
== 1);
3624 add_int_reg_note (insn
, REG_BR_PROB
, probability
);
3629 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3630 in SEQ and copy any additional information across. */
3633 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3638 gcc_assert (call_insn
== NULL_RTX
);
3641 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3642 target may have explicitly specified. */
3643 p
= &CALL_INSN_FUNCTION_USAGE (insn
);
3646 *p
= CALL_INSN_FUNCTION_USAGE (trial
);
3648 /* If the old call was a sibling call, the new one must
3650 SIBLING_CALL_P (insn
) = SIBLING_CALL_P (trial
);
3652 /* If the new call is the last instruction in the sequence,
3653 it will effectively replace the old call in-situ. Otherwise
3654 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3655 so that it comes immediately after the new call. */
3656 if (NEXT_INSN (insn
))
3657 for (next
= NEXT_INSN (trial
);
3658 next
&& NOTE_P (next
);
3659 next
= NEXT_INSN (next
))
3660 if (NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
)
3663 add_insn_after (next
, insn
, NULL
);
3669 /* Copy notes, particularly those related to the CFG. */
3670 for (note
= REG_NOTES (trial
); note
; note
= XEXP (note
, 1))
3672 switch (REG_NOTE_KIND (note
))
3675 copy_reg_eh_region_note_backward (note
, insn_last
, NULL
);
3681 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3684 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3688 case REG_NON_LOCAL_GOTO
:
3689 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3692 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3698 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3700 rtx reg
= XEXP (note
, 0);
3701 if (!FIND_REG_INC_NOTE (insn
, reg
)
3702 && for_each_rtx (&PATTERN (insn
), find_auto_inc
, reg
) > 0)
3703 add_reg_note (insn
, REG_INC
, reg
);
3709 fixup_args_size_notes (NULL_RTX
, insn_last
, INTVAL (XEXP (note
, 0)));
3713 gcc_assert (call_insn
!= NULL_RTX
);
3714 add_reg_note (call_insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3722 /* If there are LABELS inside the split insns increment the
3723 usage count so we don't delete the label. */
3727 while (insn
!= NULL_RTX
)
3729 /* JUMP_P insns have already been "marked" above. */
3730 if (NONJUMP_INSN_P (insn
))
3731 mark_label_nuses (PATTERN (insn
));
3733 insn
= PREV_INSN (insn
);
3737 tem
= emit_insn_after_setloc (seq
, trial
, INSN_LOCATION (trial
));
3739 delete_insn (trial
);
3741 emit_barrier_after (tem
);
3743 /* Recursively call try_split for each new insn created; by the
3744 time control returns here that insn will be fully split, so
3745 set LAST and continue from the insn after the one returned.
3746 We can't use next_active_insn here since AFTER may be a note.
3747 Ignore deleted insns, which can be occur if not optimizing. */
3748 for (tem
= NEXT_INSN (before
); tem
!= after
; tem
= NEXT_INSN (tem
))
3749 if (! INSN_DELETED_P (tem
) && INSN_P (tem
))
3750 tem
= try_split (PATTERN (tem
), tem
, 1);
3752 /* Return either the first or the last insn, depending on which was
3755 ? (after
? PREV_INSN (after
) : get_last_insn ())
3756 : NEXT_INSN (before
);
3759 /* Make and return an INSN rtx, initializing all its slots.
3760 Store PATTERN in the pattern slots. */
3763 make_insn_raw (rtx pattern
)
3767 insn
= rtx_alloc (INSN
);
3769 INSN_UID (insn
) = cur_insn_uid
++;
3770 PATTERN (insn
) = pattern
;
3771 INSN_CODE (insn
) = -1;
3772 REG_NOTES (insn
) = NULL
;
3773 INSN_LOCATION (insn
) = curr_insn_location ();
3774 BLOCK_FOR_INSN (insn
) = NULL
;
3776 #ifdef ENABLE_RTL_CHECKING
3779 && (returnjump_p (insn
)
3780 || (GET_CODE (insn
) == SET
3781 && SET_DEST (insn
) == pc_rtx
)))
3783 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3791 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3794 make_debug_insn_raw (rtx pattern
)
3798 insn
= rtx_alloc (DEBUG_INSN
);
3799 INSN_UID (insn
) = cur_debug_insn_uid
++;
3800 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3801 INSN_UID (insn
) = cur_insn_uid
++;
3803 PATTERN (insn
) = pattern
;
3804 INSN_CODE (insn
) = -1;
3805 REG_NOTES (insn
) = NULL
;
3806 INSN_LOCATION (insn
) = curr_insn_location ();
3807 BLOCK_FOR_INSN (insn
) = NULL
;
3812 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3815 make_jump_insn_raw (rtx pattern
)
3819 insn
= rtx_alloc (JUMP_INSN
);
3820 INSN_UID (insn
) = cur_insn_uid
++;
3822 PATTERN (insn
) = pattern
;
3823 INSN_CODE (insn
) = -1;
3824 REG_NOTES (insn
) = NULL
;
3825 JUMP_LABEL (insn
) = NULL
;
3826 INSN_LOCATION (insn
) = curr_insn_location ();
3827 BLOCK_FOR_INSN (insn
) = NULL
;
3832 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3835 make_call_insn_raw (rtx pattern
)
3839 insn
= rtx_alloc (CALL_INSN
);
3840 INSN_UID (insn
) = cur_insn_uid
++;
3842 PATTERN (insn
) = pattern
;
3843 INSN_CODE (insn
) = -1;
3844 REG_NOTES (insn
) = NULL
;
3845 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
3846 INSN_LOCATION (insn
) = curr_insn_location ();
3847 BLOCK_FOR_INSN (insn
) = NULL
;
3852 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3855 make_note_raw (enum insn_note subtype
)
3857 /* Some notes are never created this way at all. These notes are
3858 only created by patching out insns. */
3859 gcc_assert (subtype
!= NOTE_INSN_DELETED_LABEL
3860 && subtype
!= NOTE_INSN_DELETED_DEBUG_LABEL
);
3862 rtx note
= rtx_alloc (NOTE
);
3863 INSN_UID (note
) = cur_insn_uid
++;
3864 NOTE_KIND (note
) = subtype
;
3865 BLOCK_FOR_INSN (note
) = NULL
;
3866 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
3870 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3871 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3872 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3875 link_insn_into_chain (rtx insn
, rtx prev
, rtx next
)
3877 PREV_INSN (insn
) = prev
;
3878 NEXT_INSN (insn
) = next
;
3881 NEXT_INSN (prev
) = insn
;
3882 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
3884 rtx sequence
= PATTERN (prev
);
3885 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = insn
;
3890 PREV_INSN (next
) = insn
;
3891 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
3892 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = insn
;
3895 if (NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3897 rtx sequence
= PATTERN (insn
);
3898 PREV_INSN (XVECEXP (sequence
, 0, 0)) = prev
;
3899 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = next
;
3903 /* Add INSN to the end of the doubly-linked list.
3904 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3909 rtx prev
= get_last_insn ();
3910 link_insn_into_chain (insn
, prev
, NULL
);
3911 if (NULL
== get_insns ())
3912 set_first_insn (insn
);
3913 set_last_insn (insn
);
3916 /* Add INSN into the doubly-linked list after insn AFTER. */
3919 add_insn_after_nobb (rtx insn
, rtx after
)
3921 rtx next
= NEXT_INSN (after
);
3923 gcc_assert (!optimize
|| !INSN_DELETED_P (after
));
3925 link_insn_into_chain (insn
, after
, next
);
3929 if (get_last_insn () == after
)
3930 set_last_insn (insn
);
3933 struct sequence_stack
*stack
= seq_stack
;
3934 /* Scan all pending sequences too. */
3935 for (; stack
; stack
= stack
->next
)
3936 if (after
== stack
->last
)
3945 /* Add INSN into the doubly-linked list before insn BEFORE. */
3948 add_insn_before_nobb (rtx insn
, rtx before
)
3950 rtx prev
= PREV_INSN (before
);
3952 gcc_assert (!optimize
|| !INSN_DELETED_P (before
));
3954 link_insn_into_chain (insn
, prev
, before
);
3958 if (get_insns () == before
)
3959 set_first_insn (insn
);
3962 struct sequence_stack
*stack
= seq_stack
;
3963 /* Scan all pending sequences too. */
3964 for (; stack
; stack
= stack
->next
)
3965 if (before
== stack
->first
)
3967 stack
->first
= insn
;
3976 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
3977 If BB is NULL, an attempt is made to infer the bb from before.
3979 This and the next function should be the only functions called
3980 to insert an insn once delay slots have been filled since only
3981 they know how to update a SEQUENCE. */
3984 add_insn_after (rtx insn
, rtx after
, basic_block bb
)
3986 add_insn_after_nobb (insn
, after
);
3987 if (!BARRIER_P (after
)
3988 && !BARRIER_P (insn
)
3989 && (bb
= BLOCK_FOR_INSN (after
)))
3991 set_block_for_insn (insn
, bb
);
3993 df_insn_rescan (insn
);
3994 /* Should not happen as first in the BB is always
3995 either NOTE or LABEL. */
3996 if (BB_END (bb
) == after
3997 /* Avoid clobbering of structure when creating new BB. */
3998 && !BARRIER_P (insn
)
3999 && !NOTE_INSN_BASIC_BLOCK_P (insn
))
4004 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
4005 If BB is NULL, an attempt is made to infer the bb from before.
4007 This and the previous function should be the only functions called
4008 to insert an insn once delay slots have been filled since only
4009 they know how to update a SEQUENCE. */
4012 add_insn_before (rtx insn
, rtx before
, basic_block bb
)
4014 add_insn_before_nobb (insn
, before
);
4017 && !BARRIER_P (before
)
4018 && !BARRIER_P (insn
))
4019 bb
= BLOCK_FOR_INSN (before
);
4023 set_block_for_insn (insn
, bb
);
4025 df_insn_rescan (insn
);
4026 /* Should not happen as first in the BB is always either NOTE or
4028 gcc_assert (BB_HEAD (bb
) != insn
4029 /* Avoid clobbering of structure when creating new BB. */
4031 || NOTE_INSN_BASIC_BLOCK_P (insn
));
4035 /* Replace insn with an deleted instruction note. */
4038 set_insn_deleted (rtx insn
)
4041 df_insn_delete (insn
);
4042 PUT_CODE (insn
, NOTE
);
4043 NOTE_KIND (insn
) = NOTE_INSN_DELETED
;
4047 /* Unlink INSN from the insn chain.
4049 This function knows how to handle sequences.
4051 This function does not invalidate data flow information associated with
4052 INSN (i.e. does not call df_insn_delete). That makes this function
4053 usable for only disconnecting an insn from the chain, and re-emit it
4056 To later insert INSN elsewhere in the insn chain via add_insn and
4057 similar functions, PREV_INSN and NEXT_INSN must be nullified by
4058 the caller. Nullifying them here breaks many insn chain walks.
4060 To really delete an insn and related DF information, use delete_insn. */
4063 remove_insn (rtx insn
)
4065 rtx next
= NEXT_INSN (insn
);
4066 rtx prev
= PREV_INSN (insn
);
4071 NEXT_INSN (prev
) = next
;
4072 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
4074 rtx sequence
= PATTERN (prev
);
4075 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = next
;
4078 else if (get_insns () == insn
)
4081 PREV_INSN (next
) = NULL
;
4082 set_first_insn (next
);
4086 struct sequence_stack
*stack
= seq_stack
;
4087 /* Scan all pending sequences too. */
4088 for (; stack
; stack
= stack
->next
)
4089 if (insn
== stack
->first
)
4091 stack
->first
= next
;
4100 PREV_INSN (next
) = prev
;
4101 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
4102 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = prev
;
4104 else if (get_last_insn () == insn
)
4105 set_last_insn (prev
);
4108 struct sequence_stack
*stack
= seq_stack
;
4109 /* Scan all pending sequences too. */
4110 for (; stack
; stack
= stack
->next
)
4111 if (insn
== stack
->last
)
4120 /* Fix up basic block boundaries, if necessary. */
4121 if (!BARRIER_P (insn
)
4122 && (bb
= BLOCK_FOR_INSN (insn
)))
4124 if (BB_HEAD (bb
) == insn
)
4126 /* Never ever delete the basic block note without deleting whole
4128 gcc_assert (!NOTE_P (insn
));
4129 BB_HEAD (bb
) = next
;
4131 if (BB_END (bb
) == insn
)
4136 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4139 add_function_usage_to (rtx call_insn
, rtx call_fusage
)
4141 gcc_assert (call_insn
&& CALL_P (call_insn
));
4143 /* Put the register usage information on the CALL. If there is already
4144 some usage information, put ours at the end. */
4145 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
4149 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
4150 link
= XEXP (link
, 1))
4153 XEXP (link
, 1) = call_fusage
;
4156 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
4159 /* Delete all insns made since FROM.
4160 FROM becomes the new last instruction. */
4163 delete_insns_since (rtx from
)
4168 NEXT_INSN (from
) = 0;
4169 set_last_insn (from
);
4172 /* This function is deprecated, please use sequences instead.
4174 Move a consecutive bunch of insns to a different place in the chain.
4175 The insns to be moved are those between FROM and TO.
4176 They are moved to a new position after the insn AFTER.
4177 AFTER must not be FROM or TO or any insn in between.
4179 This function does not know about SEQUENCEs and hence should not be
4180 called after delay-slot filling has been done. */
4183 reorder_insns_nobb (rtx from
, rtx to
, rtx after
)
4185 #ifdef ENABLE_CHECKING
4187 for (x
= from
; x
!= to
; x
= NEXT_INSN (x
))
4188 gcc_assert (after
!= x
);
4189 gcc_assert (after
!= to
);
4192 /* Splice this bunch out of where it is now. */
4193 if (PREV_INSN (from
))
4194 NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
4196 PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
4197 if (get_last_insn () == to
)
4198 set_last_insn (PREV_INSN (from
));
4199 if (get_insns () == from
)
4200 set_first_insn (NEXT_INSN (to
));
4202 /* Make the new neighbors point to it and it to them. */
4203 if (NEXT_INSN (after
))
4204 PREV_INSN (NEXT_INSN (after
)) = to
;
4206 NEXT_INSN (to
) = NEXT_INSN (after
);
4207 PREV_INSN (from
) = after
;
4208 NEXT_INSN (after
) = from
;
4209 if (after
== get_last_insn ())
4213 /* Same as function above, but take care to update BB boundaries. */
4215 reorder_insns (rtx from
, rtx to
, rtx after
)
4217 rtx prev
= PREV_INSN (from
);
4218 basic_block bb
, bb2
;
4220 reorder_insns_nobb (from
, to
, after
);
4222 if (!BARRIER_P (after
)
4223 && (bb
= BLOCK_FOR_INSN (after
)))
4226 df_set_bb_dirty (bb
);
4228 if (!BARRIER_P (from
)
4229 && (bb2
= BLOCK_FOR_INSN (from
)))
4231 if (BB_END (bb2
) == to
)
4232 BB_END (bb2
) = prev
;
4233 df_set_bb_dirty (bb2
);
4236 if (BB_END (bb
) == after
)
4239 for (x
= from
; x
!= NEXT_INSN (to
); x
= NEXT_INSN (x
))
4241 df_insn_change_bb (x
, bb
);
4246 /* Emit insn(s) of given code and pattern
4247 at a specified place within the doubly-linked list.
4249 All of the emit_foo global entry points accept an object
4250 X which is either an insn list or a PATTERN of a single
4253 There are thus a few canonical ways to generate code and
4254 emit it at a specific place in the instruction stream. For
4255 example, consider the instruction named SPOT and the fact that
4256 we would like to emit some instructions before SPOT. We might
4260 ... emit the new instructions ...
4261 insns_head = get_insns ();
4264 emit_insn_before (insns_head, SPOT);
4266 It used to be common to generate SEQUENCE rtl instead, but that
4267 is a relic of the past which no longer occurs. The reason is that
4268 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4269 generated would almost certainly die right after it was created. */
4272 emit_pattern_before_noloc (rtx x
, rtx before
, rtx last
, basic_block bb
,
4273 rtx (*make_raw
) (rtx
))
4277 gcc_assert (before
);
4282 switch (GET_CODE (x
))
4294 rtx next
= NEXT_INSN (insn
);
4295 add_insn_before (insn
, before
, bb
);
4301 #ifdef ENABLE_RTL_CHECKING
4308 last
= (*make_raw
) (x
);
4309 add_insn_before (last
, before
, bb
);
4316 /* Make X be output before the instruction BEFORE. */
4319 emit_insn_before_noloc (rtx x
, rtx before
, basic_block bb
)
4321 return emit_pattern_before_noloc (x
, before
, before
, bb
, make_insn_raw
);
4324 /* Make an instruction with body X and code JUMP_INSN
4325 and output it before the instruction BEFORE. */
4328 emit_jump_insn_before_noloc (rtx x
, rtx before
)
4330 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4331 make_jump_insn_raw
);
4334 /* Make an instruction with body X and code CALL_INSN
4335 and output it before the instruction BEFORE. */
4338 emit_call_insn_before_noloc (rtx x
, rtx before
)
4340 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4341 make_call_insn_raw
);
4344 /* Make an instruction with body X and code DEBUG_INSN
4345 and output it before the instruction BEFORE. */
4348 emit_debug_insn_before_noloc (rtx x
, rtx before
)
4350 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4351 make_debug_insn_raw
);
4354 /* Make an insn of code BARRIER
4355 and output it before the insn BEFORE. */
4358 emit_barrier_before (rtx before
)
4360 rtx insn
= rtx_alloc (BARRIER
);
4362 INSN_UID (insn
) = cur_insn_uid
++;
4364 add_insn_before (insn
, before
, NULL
);
4368 /* Emit the label LABEL before the insn BEFORE. */
4371 emit_label_before (rtx label
, rtx before
)
4373 gcc_checking_assert (INSN_UID (label
) == 0);
4374 INSN_UID (label
) = cur_insn_uid
++;
4375 add_insn_before (label
, before
, NULL
);
4379 /* Helper for emit_insn_after, handles lists of instructions
4383 emit_insn_after_1 (rtx first
, rtx after
, basic_block bb
)
4387 if (!bb
&& !BARRIER_P (after
))
4388 bb
= BLOCK_FOR_INSN (after
);
4392 df_set_bb_dirty (bb
);
4393 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4394 if (!BARRIER_P (last
))
4396 set_block_for_insn (last
, bb
);
4397 df_insn_rescan (last
);
4399 if (!BARRIER_P (last
))
4401 set_block_for_insn (last
, bb
);
4402 df_insn_rescan (last
);
4404 if (BB_END (bb
) == after
)
4408 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4411 after_after
= NEXT_INSN (after
);
4413 NEXT_INSN (after
) = first
;
4414 PREV_INSN (first
) = after
;
4415 NEXT_INSN (last
) = after_after
;
4417 PREV_INSN (after_after
) = last
;
4419 if (after
== get_last_insn ())
4420 set_last_insn (last
);
4426 emit_pattern_after_noloc (rtx x
, rtx after
, basic_block bb
,
4427 rtx (*make_raw
)(rtx
))
4436 switch (GET_CODE (x
))
4445 last
= emit_insn_after_1 (x
, after
, bb
);
4448 #ifdef ENABLE_RTL_CHECKING
4455 last
= (*make_raw
) (x
);
4456 add_insn_after (last
, after
, bb
);
4463 /* Make X be output after the insn AFTER and set the BB of insn. If
4464 BB is NULL, an attempt is made to infer the BB from AFTER. */
4467 emit_insn_after_noloc (rtx x
, rtx after
, basic_block bb
)
4469 return emit_pattern_after_noloc (x
, after
, bb
, make_insn_raw
);
4473 /* Make an insn of code JUMP_INSN with body X
4474 and output it after the insn AFTER. */
4477 emit_jump_insn_after_noloc (rtx x
, rtx after
)
4479 return emit_pattern_after_noloc (x
, after
, NULL
, make_jump_insn_raw
);
4482 /* Make an instruction with body X and code CALL_INSN
4483 and output it after the instruction AFTER. */
4486 emit_call_insn_after_noloc (rtx x
, rtx after
)
4488 return emit_pattern_after_noloc (x
, after
, NULL
, make_call_insn_raw
);
4491 /* Make an instruction with body X and code CALL_INSN
4492 and output it after the instruction AFTER. */
4495 emit_debug_insn_after_noloc (rtx x
, rtx after
)
4497 return emit_pattern_after_noloc (x
, after
, NULL
, make_debug_insn_raw
);
4500 /* Make an insn of code BARRIER
4501 and output it after the insn AFTER. */
4504 emit_barrier_after (rtx after
)
4506 rtx insn
= rtx_alloc (BARRIER
);
4508 INSN_UID (insn
) = cur_insn_uid
++;
4510 add_insn_after (insn
, after
, NULL
);
4514 /* Emit the label LABEL after the insn AFTER. */
4517 emit_label_after (rtx label
, rtx after
)
4519 gcc_checking_assert (INSN_UID (label
) == 0);
4520 INSN_UID (label
) = cur_insn_uid
++;
4521 add_insn_after (label
, after
, NULL
);
4525 /* Notes require a bit of special handling: Some notes need to have their
4526 BLOCK_FOR_INSN set, others should never have it set, and some should
4527 have it set or clear depending on the context. */
4529 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4530 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4531 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4534 note_outside_basic_block_p (enum insn_note subtype
, bool on_bb_boundary_p
)
4538 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4539 case NOTE_INSN_SWITCH_TEXT_SECTIONS
:
4542 /* Notes for var tracking and EH region markers can appear between or
4543 inside basic blocks. If the caller is emitting on the basic block
4544 boundary, do not set BLOCK_FOR_INSN on the new note. */
4545 case NOTE_INSN_VAR_LOCATION
:
4546 case NOTE_INSN_CALL_ARG_LOCATION
:
4547 case NOTE_INSN_EH_REGION_BEG
:
4548 case NOTE_INSN_EH_REGION_END
:
4549 return on_bb_boundary_p
;
4551 /* Otherwise, BLOCK_FOR_INSN must be set. */
4557 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4560 emit_note_after (enum insn_note subtype
, rtx after
)
4562 rtx note
= make_note_raw (subtype
);
4563 basic_block bb
= BARRIER_P (after
) ? NULL
: BLOCK_FOR_INSN (after
);
4564 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_END (bb
) == after
);
4566 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4567 add_insn_after_nobb (note
, after
);
4569 add_insn_after (note
, after
, bb
);
4573 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4576 emit_note_before (enum insn_note subtype
, rtx before
)
4578 rtx note
= make_note_raw (subtype
);
4579 basic_block bb
= BARRIER_P (before
) ? NULL
: BLOCK_FOR_INSN (before
);
4580 bool on_bb_boundary_p
= (bb
!= NULL
&& BB_HEAD (bb
) == before
);
4582 if (note_outside_basic_block_p (subtype
, on_bb_boundary_p
))
4583 add_insn_before_nobb (note
, before
);
4585 add_insn_before (note
, before
, bb
);
4589 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4590 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4593 emit_pattern_after_setloc (rtx pattern
, rtx after
, int loc
,
4594 rtx (*make_raw
) (rtx
))
4596 rtx last
= emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4598 if (pattern
== NULL_RTX
|| !loc
)
4601 after
= NEXT_INSN (after
);
4604 if (active_insn_p (after
) && !INSN_LOCATION (after
))
4605 INSN_LOCATION (after
) = loc
;
4608 after
= NEXT_INSN (after
);
4613 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4614 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4618 emit_pattern_after (rtx pattern
, rtx after
, bool skip_debug_insns
,
4619 rtx (*make_raw
) (rtx
))
4623 if (skip_debug_insns
)
4624 while (DEBUG_INSN_P (prev
))
4625 prev
= PREV_INSN (prev
);
4628 return emit_pattern_after_setloc (pattern
, after
, INSN_LOCATION (prev
),
4631 return emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4634 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4636 emit_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4638 return emit_pattern_after_setloc (pattern
, after
, loc
, make_insn_raw
);
4641 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4643 emit_insn_after (rtx pattern
, rtx after
)
4645 return emit_pattern_after (pattern
, after
, true, make_insn_raw
);
4648 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4650 emit_jump_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4652 return emit_pattern_after_setloc (pattern
, after
, loc
, make_jump_insn_raw
);
4655 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4657 emit_jump_insn_after (rtx pattern
, rtx after
)
4659 return emit_pattern_after (pattern
, after
, true, make_jump_insn_raw
);
4662 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4664 emit_call_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4666 return emit_pattern_after_setloc (pattern
, after
, loc
, make_call_insn_raw
);
4669 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4671 emit_call_insn_after (rtx pattern
, rtx after
)
4673 return emit_pattern_after (pattern
, after
, true, make_call_insn_raw
);
4676 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4678 emit_debug_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4680 return emit_pattern_after_setloc (pattern
, after
, loc
, make_debug_insn_raw
);
4683 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4685 emit_debug_insn_after (rtx pattern
, rtx after
)
4687 return emit_pattern_after (pattern
, after
, false, make_debug_insn_raw
);
4690 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4691 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4692 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4696 emit_pattern_before_setloc (rtx pattern
, rtx before
, int loc
, bool insnp
,
4697 rtx (*make_raw
) (rtx
))
4699 rtx first
= PREV_INSN (before
);
4700 rtx last
= emit_pattern_before_noloc (pattern
, before
,
4701 insnp
? before
: NULL_RTX
,
4704 if (pattern
== NULL_RTX
|| !loc
)
4708 first
= get_insns ();
4710 first
= NEXT_INSN (first
);
4713 if (active_insn_p (first
) && !INSN_LOCATION (first
))
4714 INSN_LOCATION (first
) = loc
;
4717 first
= NEXT_INSN (first
);
4722 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4723 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4724 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4725 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4728 emit_pattern_before (rtx pattern
, rtx before
, bool skip_debug_insns
,
4729 bool insnp
, rtx (*make_raw
) (rtx
))
4733 if (skip_debug_insns
)
4734 while (DEBUG_INSN_P (next
))
4735 next
= PREV_INSN (next
);
4738 return emit_pattern_before_setloc (pattern
, before
, INSN_LOCATION (next
),
4741 return emit_pattern_before_noloc (pattern
, before
,
4742 insnp
? before
: NULL_RTX
,
4746 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4748 emit_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4750 return emit_pattern_before_setloc (pattern
, before
, loc
, true,
4754 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4756 emit_insn_before (rtx pattern
, rtx before
)
4758 return emit_pattern_before (pattern
, before
, true, true, make_insn_raw
);
4761 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4763 emit_jump_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4765 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4766 make_jump_insn_raw
);
4769 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4771 emit_jump_insn_before (rtx pattern
, rtx before
)
4773 return emit_pattern_before (pattern
, before
, true, false,
4774 make_jump_insn_raw
);
4777 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4779 emit_call_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4781 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4782 make_call_insn_raw
);
4785 /* Like emit_call_insn_before_noloc,
4786 but set insn_location according to BEFORE. */
4788 emit_call_insn_before (rtx pattern
, rtx before
)
4790 return emit_pattern_before (pattern
, before
, true, false,
4791 make_call_insn_raw
);
4794 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4796 emit_debug_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4798 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4799 make_debug_insn_raw
);
4802 /* Like emit_debug_insn_before_noloc,
4803 but set insn_location according to BEFORE. */
4805 emit_debug_insn_before (rtx pattern
, rtx before
)
4807 return emit_pattern_before (pattern
, before
, false, false,
4808 make_debug_insn_raw
);
4811 /* Take X and emit it at the end of the doubly-linked
4814 Returns the last insn emitted. */
4819 rtx last
= get_last_insn ();
4825 switch (GET_CODE (x
))
4837 rtx next
= NEXT_INSN (insn
);
4844 #ifdef ENABLE_RTL_CHECKING
4845 case JUMP_TABLE_DATA
:
4852 last
= make_insn_raw (x
);
4860 /* Make an insn of code DEBUG_INSN with pattern X
4861 and add it to the end of the doubly-linked list. */
4864 emit_debug_insn (rtx x
)
4866 rtx last
= get_last_insn ();
4872 switch (GET_CODE (x
))
4884 rtx next
= NEXT_INSN (insn
);
4891 #ifdef ENABLE_RTL_CHECKING
4892 case JUMP_TABLE_DATA
:
4899 last
= make_debug_insn_raw (x
);
4907 /* Make an insn of code JUMP_INSN with pattern X
4908 and add it to the end of the doubly-linked list. */
4911 emit_jump_insn (rtx x
)
4913 rtx last
= NULL_RTX
, insn
;
4915 switch (GET_CODE (x
))
4927 rtx next
= NEXT_INSN (insn
);
4934 #ifdef ENABLE_RTL_CHECKING
4935 case JUMP_TABLE_DATA
:
4942 last
= make_jump_insn_raw (x
);
4950 /* Make an insn of code CALL_INSN with pattern X
4951 and add it to the end of the doubly-linked list. */
4954 emit_call_insn (rtx x
)
4958 switch (GET_CODE (x
))
4967 insn
= emit_insn (x
);
4970 #ifdef ENABLE_RTL_CHECKING
4972 case JUMP_TABLE_DATA
:
4978 insn
= make_call_insn_raw (x
);
4986 /* Add the label LABEL to the end of the doubly-linked list. */
4989 emit_label (rtx label
)
4991 gcc_checking_assert (INSN_UID (label
) == 0);
4992 INSN_UID (label
) = cur_insn_uid
++;
4997 /* Make an insn of code JUMP_TABLE_DATA
4998 and add it to the end of the doubly-linked list. */
5001 emit_jump_table_data (rtx table
)
5003 rtx jump_table_data
= rtx_alloc (JUMP_TABLE_DATA
);
5004 INSN_UID (jump_table_data
) = cur_insn_uid
++;
5005 PATTERN (jump_table_data
) = table
;
5006 BLOCK_FOR_INSN (jump_table_data
) = NULL
;
5007 add_insn (jump_table_data
);
5008 return jump_table_data
;
5011 /* Make an insn of code BARRIER
5012 and add it to the end of the doubly-linked list. */
5017 rtx barrier
= rtx_alloc (BARRIER
);
5018 INSN_UID (barrier
) = cur_insn_uid
++;
5023 /* Emit a copy of note ORIG. */
5026 emit_note_copy (rtx orig
)
5028 enum insn_note kind
= (enum insn_note
) NOTE_KIND (orig
);
5029 rtx note
= make_note_raw (kind
);
5030 NOTE_DATA (note
) = NOTE_DATA (orig
);
5035 /* Make an insn of code NOTE or type NOTE_NO
5036 and add it to the end of the doubly-linked list. */
5039 emit_note (enum insn_note kind
)
5041 rtx note
= make_note_raw (kind
);
5046 /* Emit a clobber of lvalue X. */
5049 emit_clobber (rtx x
)
5051 /* CONCATs should not appear in the insn stream. */
5052 if (GET_CODE (x
) == CONCAT
)
5054 emit_clobber (XEXP (x
, 0));
5055 return emit_clobber (XEXP (x
, 1));
5057 return emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
5060 /* Return a sequence of insns to clobber lvalue X. */
5074 /* Emit a use of rvalue X. */
5079 /* CONCATs should not appear in the insn stream. */
5080 if (GET_CODE (x
) == CONCAT
)
5082 emit_use (XEXP (x
, 0));
5083 return emit_use (XEXP (x
, 1));
5085 return emit_insn (gen_rtx_USE (VOIDmode
, x
));
5088 /* Return a sequence of insns to use rvalue X. */
5102 /* Notes like REG_EQUAL and REG_EQUIV refer to a set in an instruction.
5103 Return the set in INSN that such notes describe, or NULL if the notes
5104 have no meaning for INSN. */
5107 set_for_reg_notes (rtx insn
)
5114 pat
= PATTERN (insn
);
5115 if (GET_CODE (pat
) == PARALLEL
)
5117 /* We do not use single_set because that ignores SETs of unused
5118 registers. REG_EQUAL and REG_EQUIV notes really do require the
5119 PARALLEL to have a single SET. */
5120 if (multiple_sets (insn
))
5122 pat
= XVECEXP (pat
, 0, 0);
5125 if (GET_CODE (pat
) != SET
)
5128 reg
= SET_DEST (pat
);
5130 /* Notes apply to the contents of a STRICT_LOW_PART. */
5131 if (GET_CODE (reg
) == STRICT_LOW_PART
)
5132 reg
= XEXP (reg
, 0);
5134 /* Check that we have a register. */
5135 if (!(REG_P (reg
) || GET_CODE (reg
) == SUBREG
))
5141 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5142 note of this type already exists, remove it first. */
5145 set_unique_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
5147 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
5153 if (!set_for_reg_notes (insn
))
5156 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5157 It serves no useful purpose and breaks eliminate_regs. */
5158 if (GET_CODE (datum
) == ASM_OPERANDS
)
5167 XEXP (note
, 0) = datum
;
5170 add_reg_note (insn
, kind
, datum
);
5171 note
= REG_NOTES (insn
);
5178 df_notes_rescan (insn
);
5187 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5189 set_dst_reg_note (rtx insn
, enum reg_note kind
, rtx datum
, rtx dst
)
5191 rtx set
= set_for_reg_notes (insn
);
5193 if (set
&& SET_DEST (set
) == dst
)
5194 return set_unique_reg_note (insn
, kind
, datum
);
5198 /* Return an indication of which type of insn should have X as a body.
5199 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5201 static enum rtx_code
5202 classify_insn (rtx x
)
5206 if (GET_CODE (x
) == CALL
)
5208 if (ANY_RETURN_P (x
))
5210 if (GET_CODE (x
) == SET
)
5212 if (SET_DEST (x
) == pc_rtx
)
5214 else if (GET_CODE (SET_SRC (x
)) == CALL
)
5219 if (GET_CODE (x
) == PARALLEL
)
5222 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
5223 if (GET_CODE (XVECEXP (x
, 0, j
)) == CALL
)
5225 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
5226 && SET_DEST (XVECEXP (x
, 0, j
)) == pc_rtx
)
5228 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
5229 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == CALL
)
5235 /* Emit the rtl pattern X as an appropriate kind of insn.
5236 If X is a label, it is simply added into the insn chain. */
5241 enum rtx_code code
= classify_insn (x
);
5246 return emit_label (x
);
5248 return emit_insn (x
);
5251 rtx insn
= emit_jump_insn (x
);
5252 if (any_uncondjump_p (insn
) || GET_CODE (x
) == RETURN
)
5253 return emit_barrier ();
5257 return emit_call_insn (x
);
5259 return emit_debug_insn (x
);
5265 /* Space for free sequence stack entries. */
5266 static GTY ((deletable
)) struct sequence_stack
*free_sequence_stack
;
5268 /* Begin emitting insns to a sequence. If this sequence will contain
5269 something that might cause the compiler to pop arguments to function
5270 calls (because those pops have previously been deferred; see
5271 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5272 before calling this function. That will ensure that the deferred
5273 pops are not accidentally emitted in the middle of this sequence. */
5276 start_sequence (void)
5278 struct sequence_stack
*tem
;
5280 if (free_sequence_stack
!= NULL
)
5282 tem
= free_sequence_stack
;
5283 free_sequence_stack
= tem
->next
;
5286 tem
= ggc_alloc
<sequence_stack
> ();
5288 tem
->next
= seq_stack
;
5289 tem
->first
= get_insns ();
5290 tem
->last
= get_last_insn ();
5298 /* Set up the insn chain starting with FIRST as the current sequence,
5299 saving the previously current one. See the documentation for
5300 start_sequence for more information about how to use this function. */
5303 push_to_sequence (rtx first
)
5309 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
))
5312 set_first_insn (first
);
5313 set_last_insn (last
);
5316 /* Like push_to_sequence, but take the last insn as an argument to avoid
5317 looping through the list. */
5320 push_to_sequence2 (rtx first
, rtx last
)
5324 set_first_insn (first
);
5325 set_last_insn (last
);
5328 /* Set up the outer-level insn chain
5329 as the current sequence, saving the previously current one. */
5332 push_topmost_sequence (void)
5334 struct sequence_stack
*stack
, *top
= NULL
;
5338 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
5341 set_first_insn (top
->first
);
5342 set_last_insn (top
->last
);
5345 /* After emitting to the outer-level insn chain, update the outer-level
5346 insn chain, and restore the previous saved state. */
5349 pop_topmost_sequence (void)
5351 struct sequence_stack
*stack
, *top
= NULL
;
5353 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
5356 top
->first
= get_insns ();
5357 top
->last
= get_last_insn ();
5362 /* After emitting to a sequence, restore previous saved state.
5364 To get the contents of the sequence just made, you must call
5365 `get_insns' *before* calling here.
5367 If the compiler might have deferred popping arguments while
5368 generating this sequence, and this sequence will not be immediately
5369 inserted into the instruction stream, use do_pending_stack_adjust
5370 before calling get_insns. That will ensure that the deferred
5371 pops are inserted into this sequence, and not into some random
5372 location in the instruction stream. See INHIBIT_DEFER_POP for more
5373 information about deferred popping of arguments. */
5378 struct sequence_stack
*tem
= seq_stack
;
5380 set_first_insn (tem
->first
);
5381 set_last_insn (tem
->last
);
5382 seq_stack
= tem
->next
;
5384 memset (tem
, 0, sizeof (*tem
));
5385 tem
->next
= free_sequence_stack
;
5386 free_sequence_stack
= tem
;
5389 /* Return 1 if currently emitting into a sequence. */
5392 in_sequence_p (void)
5394 return seq_stack
!= 0;
5397 /* Put the various virtual registers into REGNO_REG_RTX. */
5400 init_virtual_regs (void)
5402 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
5403 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
5404 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
5405 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
5406 regno_reg_rtx
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
5407 regno_reg_rtx
[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
]
5408 = virtual_preferred_stack_boundary_rtx
;
5412 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5413 static rtx copy_insn_scratch_in
[MAX_RECOG_OPERANDS
];
5414 static rtx copy_insn_scratch_out
[MAX_RECOG_OPERANDS
];
5415 static int copy_insn_n_scratches
;
5417 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5418 copied an ASM_OPERANDS.
5419 In that case, it is the original input-operand vector. */
5420 static rtvec orig_asm_operands_vector
;
5422 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5423 copied an ASM_OPERANDS.
5424 In that case, it is the copied input-operand vector. */
5425 static rtvec copy_asm_operands_vector
;
5427 /* Likewise for the constraints vector. */
5428 static rtvec orig_asm_constraints_vector
;
5429 static rtvec copy_asm_constraints_vector
;
5431 /* Recursively create a new copy of an rtx for copy_insn.
5432 This function differs from copy_rtx in that it handles SCRATCHes and
5433 ASM_OPERANDs properly.
5434 Normally, this function is not used directly; use copy_insn as front end.
5435 However, you could first copy an insn pattern with copy_insn and then use
5436 this function afterwards to properly copy any REG_NOTEs containing
5440 copy_insn_1 (rtx orig
)
5445 const char *format_ptr
;
5450 code
= GET_CODE (orig
);
5465 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5466 clobbers or clobbers of hard registers that originated as pseudos.
5467 This is needed to allow safe register renaming. */
5468 if (REG_P (XEXP (orig
, 0)) && REGNO (XEXP (orig
, 0)) < FIRST_PSEUDO_REGISTER
5469 && ORIGINAL_REGNO (XEXP (orig
, 0)) == REGNO (XEXP (orig
, 0)))
5474 for (i
= 0; i
< copy_insn_n_scratches
; i
++)
5475 if (copy_insn_scratch_in
[i
] == orig
)
5476 return copy_insn_scratch_out
[i
];
5480 if (shared_const_p (orig
))
5484 /* A MEM with a constant address is not sharable. The problem is that
5485 the constant address may need to be reloaded. If the mem is shared,
5486 then reloading one copy of this mem will cause all copies to appear
5487 to have been reloaded. */
5493 /* Copy the various flags, fields, and other information. We assume
5494 that all fields need copying, and then clear the fields that should
5495 not be copied. That is the sensible default behavior, and forces
5496 us to explicitly document why we are *not* copying a flag. */
5497 copy
= shallow_copy_rtx (orig
);
5499 /* We do not copy the USED flag, which is used as a mark bit during
5500 walks over the RTL. */
5501 RTX_FLAG (copy
, used
) = 0;
5503 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5506 RTX_FLAG (copy
, jump
) = 0;
5507 RTX_FLAG (copy
, call
) = 0;
5508 RTX_FLAG (copy
, frame_related
) = 0;
5511 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
5513 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
5514 switch (*format_ptr
++)
5517 if (XEXP (orig
, i
) != NULL
)
5518 XEXP (copy
, i
) = copy_insn_1 (XEXP (orig
, i
));
5523 if (XVEC (orig
, i
) == orig_asm_constraints_vector
)
5524 XVEC (copy
, i
) = copy_asm_constraints_vector
;
5525 else if (XVEC (orig
, i
) == orig_asm_operands_vector
)
5526 XVEC (copy
, i
) = copy_asm_operands_vector
;
5527 else if (XVEC (orig
, i
) != NULL
)
5529 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
5530 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
5531 XVECEXP (copy
, i
, j
) = copy_insn_1 (XVECEXP (orig
, i
, j
));
5542 /* These are left unchanged. */
5549 if (code
== SCRATCH
)
5551 i
= copy_insn_n_scratches
++;
5552 gcc_assert (i
< MAX_RECOG_OPERANDS
);
5553 copy_insn_scratch_in
[i
] = orig
;
5554 copy_insn_scratch_out
[i
] = copy
;
5556 else if (code
== ASM_OPERANDS
)
5558 orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
5559 copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
5560 orig_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig
);
5561 copy_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
5567 /* Create a new copy of an rtx.
5568 This function differs from copy_rtx in that it handles SCRATCHes and
5569 ASM_OPERANDs properly.
5570 INSN doesn't really have to be a full INSN; it could be just the
5573 copy_insn (rtx insn
)
5575 copy_insn_n_scratches
= 0;
5576 orig_asm_operands_vector
= 0;
5577 orig_asm_constraints_vector
= 0;
5578 copy_asm_operands_vector
= 0;
5579 copy_asm_constraints_vector
= 0;
5580 return copy_insn_1 (insn
);
5583 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5584 on that assumption that INSN itself remains in its original place. */
5587 copy_delay_slot_insn (rtx insn
)
5589 /* Copy INSN with its rtx_code, all its notes, location etc. */
5590 insn
= copy_rtx (insn
);
5591 INSN_UID (insn
) = cur_insn_uid
++;
5595 /* Initialize data structures and variables in this file
5596 before generating rtl for each function. */
5601 set_first_insn (NULL
);
5602 set_last_insn (NULL
);
5603 if (MIN_NONDEBUG_INSN_UID
)
5604 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
;
5607 cur_debug_insn_uid
= 1;
5608 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
5609 first_label_num
= label_num
;
5612 /* Init the tables that describe all the pseudo regs. */
5614 crtl
->emit
.regno_pointer_align_length
= LAST_VIRTUAL_REGISTER
+ 101;
5616 crtl
->emit
.regno_pointer_align
5617 = XCNEWVEC (unsigned char, crtl
->emit
.regno_pointer_align_length
);
5619 regno_reg_rtx
= ggc_vec_alloc
<rtx
> (crtl
->emit
.regno_pointer_align_length
);
5621 /* Put copies of all the hard registers into regno_reg_rtx. */
5622 memcpy (regno_reg_rtx
,
5623 initial_regno_reg_rtx
,
5624 FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
5626 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5627 init_virtual_regs ();
5629 /* Indicate that the virtual registers and stack locations are
5631 REG_POINTER (stack_pointer_rtx
) = 1;
5632 REG_POINTER (frame_pointer_rtx
) = 1;
5633 REG_POINTER (hard_frame_pointer_rtx
) = 1;
5634 REG_POINTER (arg_pointer_rtx
) = 1;
5636 REG_POINTER (virtual_incoming_args_rtx
) = 1;
5637 REG_POINTER (virtual_stack_vars_rtx
) = 1;
5638 REG_POINTER (virtual_stack_dynamic_rtx
) = 1;
5639 REG_POINTER (virtual_outgoing_args_rtx
) = 1;
5640 REG_POINTER (virtual_cfa_rtx
) = 1;
5642 #ifdef STACK_BOUNDARY
5643 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
;
5644 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5645 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5646 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
;
5648 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5649 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
) = STACK_BOUNDARY
;
5650 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
) = STACK_BOUNDARY
;
5651 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5652 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = BITS_PER_WORD
;
5655 #ifdef INIT_EXPANDERS
5660 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5663 gen_const_vector (enum machine_mode mode
, int constant
)
5668 enum machine_mode inner
;
5670 units
= GET_MODE_NUNITS (mode
);
5671 inner
= GET_MODE_INNER (mode
);
5673 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner
));
5675 v
= rtvec_alloc (units
);
5677 /* We need to call this function after we set the scalar const_tiny_rtx
5679 gcc_assert (const_tiny_rtx
[constant
][(int) inner
]);
5681 for (i
= 0; i
< units
; ++i
)
5682 RTVEC_ELT (v
, i
) = const_tiny_rtx
[constant
][(int) inner
];
5684 tem
= gen_rtx_raw_CONST_VECTOR (mode
, v
);
5688 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5689 all elements are zero, and the one vector when all elements are one. */
5691 gen_rtx_CONST_VECTOR (enum machine_mode mode
, rtvec v
)
5693 enum machine_mode inner
= GET_MODE_INNER (mode
);
5694 int nunits
= GET_MODE_NUNITS (mode
);
5698 /* Check to see if all of the elements have the same value. */
5699 x
= RTVEC_ELT (v
, nunits
- 1);
5700 for (i
= nunits
- 2; i
>= 0; i
--)
5701 if (RTVEC_ELT (v
, i
) != x
)
5704 /* If the values are all the same, check to see if we can use one of the
5705 standard constant vectors. */
5708 if (x
== CONST0_RTX (inner
))
5709 return CONST0_RTX (mode
);
5710 else if (x
== CONST1_RTX (inner
))
5711 return CONST1_RTX (mode
);
5712 else if (x
== CONSTM1_RTX (inner
))
5713 return CONSTM1_RTX (mode
);
5716 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
5719 /* Initialise global register information required by all functions. */
5722 init_emit_regs (void)
5725 enum machine_mode mode
;
5728 /* Reset register attributes */
5729 htab_empty (reg_attrs_htab
);
5731 /* We need reg_raw_mode, so initialize the modes now. */
5732 init_reg_modes_target ();
5734 /* Assign register numbers to the globally defined register rtx. */
5735 stack_pointer_rtx
= gen_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
5736 frame_pointer_rtx
= gen_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
5737 hard_frame_pointer_rtx
= gen_raw_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
);
5738 arg_pointer_rtx
= gen_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
5739 virtual_incoming_args_rtx
=
5740 gen_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
5741 virtual_stack_vars_rtx
=
5742 gen_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
5743 virtual_stack_dynamic_rtx
=
5744 gen_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
5745 virtual_outgoing_args_rtx
=
5746 gen_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
5747 virtual_cfa_rtx
= gen_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
5748 virtual_preferred_stack_boundary_rtx
=
5749 gen_raw_REG (Pmode
, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
);
5751 /* Initialize RTL for commonly used hard registers. These are
5752 copied into regno_reg_rtx as we begin to compile each function. */
5753 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
5754 initial_regno_reg_rtx
[i
] = gen_raw_REG (reg_raw_mode
[i
], i
);
5756 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5757 return_address_pointer_rtx
5758 = gen_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
5761 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
5762 pic_offset_table_rtx
= gen_raw_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
5764 pic_offset_table_rtx
= NULL_RTX
;
5766 for (i
= 0; i
< (int) MAX_MACHINE_MODE
; i
++)
5768 mode
= (enum machine_mode
) i
;
5769 attrs
= ggc_cleared_alloc
<mem_attrs
> ();
5770 attrs
->align
= BITS_PER_UNIT
;
5771 attrs
->addrspace
= ADDR_SPACE_GENERIC
;
5772 if (mode
!= BLKmode
)
5774 attrs
->size_known_p
= true;
5775 attrs
->size
= GET_MODE_SIZE (mode
);
5776 if (STRICT_ALIGNMENT
)
5777 attrs
->align
= GET_MODE_ALIGNMENT (mode
);
5779 mode_mem_attrs
[i
] = attrs
;
5783 /* Initialize global machine_mode variables. */
5786 init_derived_machine_modes (void)
5788 byte_mode
= VOIDmode
;
5789 word_mode
= VOIDmode
;
5791 for (enum machine_mode mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5793 mode
= GET_MODE_WIDER_MODE (mode
))
5795 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
5796 && byte_mode
== VOIDmode
)
5799 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
5800 && word_mode
== VOIDmode
)
5804 ptr_mode
= mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0);
5807 /* Create some permanent unique rtl objects shared between all functions. */
5810 init_emit_once (void)
5813 enum machine_mode mode
;
5814 enum machine_mode double_mode
;
5816 /* Initialize the CONST_INT, CONST_WIDE_INT, CONST_DOUBLE,
5817 CONST_FIXED, and memory attribute hash tables. */
5818 const_int_htab
= htab_create_ggc (37, const_int_htab_hash
,
5819 const_int_htab_eq
, NULL
);
5821 #if TARGET_SUPPORTS_WIDE_INT
5822 const_wide_int_htab
= htab_create_ggc (37, const_wide_int_htab_hash
,
5823 const_wide_int_htab_eq
, NULL
);
5825 const_double_htab
= htab_create_ggc (37, const_double_htab_hash
,
5826 const_double_htab_eq
, NULL
);
5828 const_fixed_htab
= htab_create_ggc (37, const_fixed_htab_hash
,
5829 const_fixed_htab_eq
, NULL
);
5831 reg_attrs_htab
= htab_create_ggc (37, reg_attrs_htab_hash
,
5832 reg_attrs_htab_eq
, NULL
);
5834 #ifdef INIT_EXPANDERS
5835 /* This is to initialize {init|mark|free}_machine_status before the first
5836 call to push_function_context_to. This is needed by the Chill front
5837 end which calls push_function_context_to before the first call to
5838 init_function_start. */
5842 /* Create the unique rtx's for certain rtx codes and operand values. */
5844 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5845 tries to use these variables. */
5846 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
5847 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
5848 gen_rtx_raw_CONST_INT (VOIDmode
, (HOST_WIDE_INT
) i
);
5850 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
5851 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
5852 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
5854 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
5856 double_mode
= mode_for_size (DOUBLE_TYPE_SIZE
, MODE_FLOAT
, 0);
5858 real_from_integer (&dconst0
, double_mode
, 0, SIGNED
);
5859 real_from_integer (&dconst1
, double_mode
, 1, SIGNED
);
5860 real_from_integer (&dconst2
, double_mode
, 2, SIGNED
);
5865 dconsthalf
= dconst1
;
5866 SET_REAL_EXP (&dconsthalf
, REAL_EXP (&dconsthalf
) - 1);
5868 for (i
= 0; i
< 3; i
++)
5870 const REAL_VALUE_TYPE
*const r
=
5871 (i
== 0 ? &dconst0
: i
== 1 ? &dconst1
: &dconst2
);
5873 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
5875 mode
= GET_MODE_WIDER_MODE (mode
))
5876 const_tiny_rtx
[i
][(int) mode
] =
5877 CONST_DOUBLE_FROM_REAL_VALUE (*r
, mode
);
5879 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT
);
5881 mode
= GET_MODE_WIDER_MODE (mode
))
5882 const_tiny_rtx
[i
][(int) mode
] =
5883 CONST_DOUBLE_FROM_REAL_VALUE (*r
, mode
);
5885 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
5887 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5889 mode
= GET_MODE_WIDER_MODE (mode
))
5890 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5892 for (mode
= MIN_MODE_PARTIAL_INT
;
5893 mode
<= MAX_MODE_PARTIAL_INT
;
5894 mode
= (enum machine_mode
)((int)(mode
) + 1))
5895 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5898 const_tiny_rtx
[3][(int) VOIDmode
] = constm1_rtx
;
5900 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5902 mode
= GET_MODE_WIDER_MODE (mode
))
5903 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
5905 for (mode
= MIN_MODE_PARTIAL_INT
;
5906 mode
<= MAX_MODE_PARTIAL_INT
;
5907 mode
= (enum machine_mode
)((int)(mode
) + 1))
5908 const_tiny_rtx
[3][(int) mode
] = constm1_rtx
;
5910 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT
);
5912 mode
= GET_MODE_WIDER_MODE (mode
))
5914 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
5915 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
5918 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT
);
5920 mode
= GET_MODE_WIDER_MODE (mode
))
5922 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
5923 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
5926 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT
);
5928 mode
= GET_MODE_WIDER_MODE (mode
))
5930 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5931 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
5932 const_tiny_rtx
[3][(int) mode
] = gen_const_vector (mode
, 3);
5935 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT
);
5937 mode
= GET_MODE_WIDER_MODE (mode
))
5939 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5940 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
5943 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FRACT
);
5945 mode
= GET_MODE_WIDER_MODE (mode
))
5947 FCONST0 (mode
).data
.high
= 0;
5948 FCONST0 (mode
).data
.low
= 0;
5949 FCONST0 (mode
).mode
= mode
;
5950 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5951 FCONST0 (mode
), mode
);
5954 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UFRACT
);
5956 mode
= GET_MODE_WIDER_MODE (mode
))
5958 FCONST0 (mode
).data
.high
= 0;
5959 FCONST0 (mode
).data
.low
= 0;
5960 FCONST0 (mode
).mode
= mode
;
5961 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5962 FCONST0 (mode
), mode
);
5965 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_ACCUM
);
5967 mode
= GET_MODE_WIDER_MODE (mode
))
5969 FCONST0 (mode
).data
.high
= 0;
5970 FCONST0 (mode
).data
.low
= 0;
5971 FCONST0 (mode
).mode
= mode
;
5972 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5973 FCONST0 (mode
), mode
);
5975 /* We store the value 1. */
5976 FCONST1 (mode
).data
.high
= 0;
5977 FCONST1 (mode
).data
.low
= 0;
5978 FCONST1 (mode
).mode
= mode
;
5980 = double_int_one
.lshift (GET_MODE_FBIT (mode
),
5981 HOST_BITS_PER_DOUBLE_INT
,
5982 SIGNED_FIXED_POINT_MODE_P (mode
));
5983 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5984 FCONST1 (mode
), mode
);
5987 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UACCUM
);
5989 mode
= GET_MODE_WIDER_MODE (mode
))
5991 FCONST0 (mode
).data
.high
= 0;
5992 FCONST0 (mode
).data
.low
= 0;
5993 FCONST0 (mode
).mode
= mode
;
5994 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5995 FCONST0 (mode
), mode
);
5997 /* We store the value 1. */
5998 FCONST1 (mode
).data
.high
= 0;
5999 FCONST1 (mode
).data
.low
= 0;
6000 FCONST1 (mode
).mode
= mode
;
6002 = double_int_one
.lshift (GET_MODE_FBIT (mode
),
6003 HOST_BITS_PER_DOUBLE_INT
,
6004 SIGNED_FIXED_POINT_MODE_P (mode
));
6005 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
6006 FCONST1 (mode
), mode
);
6009 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT
);
6011 mode
= GET_MODE_WIDER_MODE (mode
))
6013 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6016 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT
);
6018 mode
= GET_MODE_WIDER_MODE (mode
))
6020 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6023 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM
);
6025 mode
= GET_MODE_WIDER_MODE (mode
))
6027 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6028 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6031 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM
);
6033 mode
= GET_MODE_WIDER_MODE (mode
))
6035 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
6036 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
6039 for (i
= (int) CCmode
; i
< (int) MAX_MACHINE_MODE
; ++i
)
6040 if (GET_MODE_CLASS ((enum machine_mode
) i
) == MODE_CC
)
6041 const_tiny_rtx
[0][i
] = const0_rtx
;
6043 const_tiny_rtx
[0][(int) BImode
] = const0_rtx
;
6044 if (STORE_FLAG_VALUE
== 1)
6045 const_tiny_rtx
[1][(int) BImode
] = const1_rtx
;
6047 pc_rtx
= gen_rtx_fmt_ (PC
, VOIDmode
);
6048 ret_rtx
= gen_rtx_fmt_ (RETURN
, VOIDmode
);
6049 simple_return_rtx
= gen_rtx_fmt_ (SIMPLE_RETURN
, VOIDmode
);
6050 cc0_rtx
= gen_rtx_fmt_ (CC0
, VOIDmode
);
6053 /* Produce exact duplicate of insn INSN after AFTER.
6054 Care updating of libcall regions if present. */
6057 emit_copy_of_insn_after (rtx insn
, rtx after
)
6061 switch (GET_CODE (insn
))
6064 new_rtx
= emit_insn_after (copy_insn (PATTERN (insn
)), after
);
6068 new_rtx
= emit_jump_insn_after (copy_insn (PATTERN (insn
)), after
);
6069 CROSSING_JUMP_P (new_rtx
) = CROSSING_JUMP_P (insn
);
6073 new_rtx
= emit_debug_insn_after (copy_insn (PATTERN (insn
)), after
);
6077 new_rtx
= emit_call_insn_after (copy_insn (PATTERN (insn
)), after
);
6078 if (CALL_INSN_FUNCTION_USAGE (insn
))
6079 CALL_INSN_FUNCTION_USAGE (new_rtx
)
6080 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn
));
6081 SIBLING_CALL_P (new_rtx
) = SIBLING_CALL_P (insn
);
6082 RTL_CONST_CALL_P (new_rtx
) = RTL_CONST_CALL_P (insn
);
6083 RTL_PURE_CALL_P (new_rtx
) = RTL_PURE_CALL_P (insn
);
6084 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx
)
6085 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
);
6092 /* Update LABEL_NUSES. */
6093 mark_jump_label (PATTERN (new_rtx
), new_rtx
, 0);
6095 INSN_LOCATION (new_rtx
) = INSN_LOCATION (insn
);
6097 /* If the old insn is frame related, then so is the new one. This is
6098 primarily needed for IA-64 unwind info which marks epilogue insns,
6099 which may be duplicated by the basic block reordering code. */
6100 RTX_FRAME_RELATED_P (new_rtx
) = RTX_FRAME_RELATED_P (insn
);
6102 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6103 will make them. REG_LABEL_TARGETs are created there too, but are
6104 supposed to be sticky, so we copy them. */
6105 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
6106 if (REG_NOTE_KIND (link
) != REG_LABEL_OPERAND
)
6108 if (GET_CODE (link
) == EXPR_LIST
)
6109 add_reg_note (new_rtx
, REG_NOTE_KIND (link
),
6110 copy_insn_1 (XEXP (link
, 0)));
6112 add_shallow_copy_of_reg_note (new_rtx
, link
);
6115 INSN_CODE (new_rtx
) = INSN_CODE (insn
);
6119 static GTY((deletable
)) rtx hard_reg_clobbers
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
6121 gen_hard_reg_clobber (enum machine_mode mode
, unsigned int regno
)
6123 if (hard_reg_clobbers
[mode
][regno
])
6124 return hard_reg_clobbers
[mode
][regno
];
6126 return (hard_reg_clobbers
[mode
][regno
] =
6127 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (mode
, regno
)));
6130 location_t prologue_location
;
6131 location_t epilogue_location
;
6133 /* Hold current location information and last location information, so the
6134 datastructures are built lazily only when some instructions in given
6135 place are needed. */
6136 static location_t curr_location
;
6138 /* Allocate insn location datastructure. */
6140 insn_locations_init (void)
6142 prologue_location
= epilogue_location
= 0;
6143 curr_location
= UNKNOWN_LOCATION
;
6146 /* At the end of emit stage, clear current location. */
6148 insn_locations_finalize (void)
6150 epilogue_location
= curr_location
;
6151 curr_location
= UNKNOWN_LOCATION
;
6154 /* Set current location. */
6156 set_curr_insn_location (location_t location
)
6158 curr_location
= location
;
6161 /* Get current location. */
6163 curr_insn_location (void)
6165 return curr_location
;
6168 /* Return lexical scope block insn belongs to. */
6170 insn_scope (const_rtx insn
)
6172 return LOCATION_BLOCK (INSN_LOCATION (insn
));
6175 /* Return line number of the statement that produced this insn. */
6177 insn_line (const_rtx insn
)
6179 return LOCATION_LINE (INSN_LOCATION (insn
));
6182 /* Return source file of the statement that produced this insn. */
6184 insn_file (const_rtx insn
)
6186 return LOCATION_FILE (INSN_LOCATION (insn
));
6189 /* Return expanded location of the statement that produced this insn. */
6191 insn_location (const_rtx insn
)
6193 return expand_location (INSN_LOCATION (insn
));
6196 /* Return true if memory model MODEL requires a pre-operation (release-style)
6197 barrier or a post-operation (acquire-style) barrier. While not universal,
6198 this function matches behavior of several targets. */
6201 need_atomic_barrier_p (enum memmodel model
, bool pre
)
6203 switch (model
& MEMMODEL_MASK
)
6205 case MEMMODEL_RELAXED
:
6206 case MEMMODEL_CONSUME
:
6208 case MEMMODEL_RELEASE
:
6210 case MEMMODEL_ACQUIRE
:
6212 case MEMMODEL_ACQ_REL
:
6213 case MEMMODEL_SEQ_CST
:
6220 #include "gt-emit-rtl.h"