1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* Middle-to-low level generation of rtx code and insns.
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
39 #include "coretypes.h"
41 #include "diagnostic-core.h"
49 #include "hard-reg-set.h"
51 #include "insn-config.h"
54 #include "basic-block.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
62 #include "tree-flow.h"
64 struct target_rtl default_target_rtl
;
66 struct target_rtl
*this_target_rtl
= &default_target_rtl
;
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71 /* Commonly used modes. */
73 enum machine_mode byte_mode
; /* Mode whose width is BITS_PER_UNIT. */
74 enum machine_mode word_mode
; /* Mode whose width is BITS_PER_WORD. */
75 enum machine_mode double_mode
; /* Mode whose width is DOUBLE_TYPE_SIZE. */
76 enum machine_mode ptr_mode
; /* Mode whose width is POINTER_SIZE. */
78 /* Datastructures maintained for currently processed function in RTL form. */
80 struct rtl_data x_rtl
;
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
92 static GTY(()) int label_num
= 1;
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx. */
98 rtx const_tiny_rtx
[3][(int) MAX_MACHINE_MODE
];
102 REAL_VALUE_TYPE dconst0
;
103 REAL_VALUE_TYPE dconst1
;
104 REAL_VALUE_TYPE dconst2
;
105 REAL_VALUE_TYPE dconstm1
;
106 REAL_VALUE_TYPE dconsthalf
;
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0
[MAX_FCONST0
];
110 FIXED_VALUE_TYPE fconst1
[MAX_FCONST1
];
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
117 rtx const_int_rtx
[MAX_SAVED_CONST_INT
* 2 + 1];
119 /* A hash table storing CONST_INTs whose absolute value is greater
120 than MAX_SAVED_CONST_INT. */
122 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
123 htab_t const_int_htab
;
125 /* A hash table storing memory attribute structures. */
126 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs
)))
127 htab_t mem_attrs_htab
;
129 /* A hash table storing register attribute structures. */
130 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs
)))
131 htab_t reg_attrs_htab
;
133 /* A hash table storing all CONST_DOUBLEs. */
134 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
135 htab_t const_double_htab
;
137 /* A hash table storing all CONST_FIXEDs. */
138 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def
)))
139 htab_t const_fixed_htab
;
141 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
142 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
143 #define last_location (crtl->emit.x_last_location)
144 #define first_label_num (crtl->emit.x_first_label_num)
146 static rtx
make_call_insn_raw (rtx
);
147 static rtx
change_address_1 (rtx
, enum machine_mode
, rtx
, int);
148 static void set_used_decls (tree
);
149 static void mark_label_nuses (rtx
);
150 static hashval_t
const_int_htab_hash (const void *);
151 static int const_int_htab_eq (const void *, const void *);
152 static hashval_t
const_double_htab_hash (const void *);
153 static int const_double_htab_eq (const void *, const void *);
154 static rtx
lookup_const_double (rtx
);
155 static hashval_t
const_fixed_htab_hash (const void *);
156 static int const_fixed_htab_eq (const void *, const void *);
157 static rtx
lookup_const_fixed (rtx
);
158 static hashval_t
mem_attrs_htab_hash (const void *);
159 static int mem_attrs_htab_eq (const void *, const void *);
160 static hashval_t
reg_attrs_htab_hash (const void *);
161 static int reg_attrs_htab_eq (const void *, const void *);
162 static reg_attrs
*get_reg_attrs (tree
, int);
163 static rtx
gen_const_vector (enum machine_mode
, int);
164 static void copy_rtx_if_shared_1 (rtx
*orig
);
166 /* Probability of the conditional branch currently proceeded by try_split.
167 Set to -1 otherwise. */
168 int split_branch_probability
= -1;
170 /* Returns a hash code for X (which is a really a CONST_INT). */
173 const_int_htab_hash (const void *x
)
175 return (hashval_t
) INTVAL ((const_rtx
) x
);
178 /* Returns nonzero if the value represented by X (which is really a
179 CONST_INT) is the same as that given by Y (which is really a
183 const_int_htab_eq (const void *x
, const void *y
)
185 return (INTVAL ((const_rtx
) x
) == *((const HOST_WIDE_INT
*) y
));
188 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
190 const_double_htab_hash (const void *x
)
192 const_rtx
const value
= (const_rtx
) x
;
195 if (GET_MODE (value
) == VOIDmode
)
196 h
= CONST_DOUBLE_LOW (value
) ^ CONST_DOUBLE_HIGH (value
);
199 h
= real_hash (CONST_DOUBLE_REAL_VALUE (value
));
200 /* MODE is used in the comparison, so it should be in the hash. */
201 h
^= GET_MODE (value
);
206 /* Returns nonzero if the value represented by X (really a ...)
207 is the same as that represented by Y (really a ...) */
209 const_double_htab_eq (const void *x
, const void *y
)
211 const_rtx
const a
= (const_rtx
)x
, b
= (const_rtx
)y
;
213 if (GET_MODE (a
) != GET_MODE (b
))
215 if (GET_MODE (a
) == VOIDmode
)
216 return (CONST_DOUBLE_LOW (a
) == CONST_DOUBLE_LOW (b
)
217 && CONST_DOUBLE_HIGH (a
) == CONST_DOUBLE_HIGH (b
));
219 return real_identical (CONST_DOUBLE_REAL_VALUE (a
),
220 CONST_DOUBLE_REAL_VALUE (b
));
223 /* Returns a hash code for X (which is really a CONST_FIXED). */
226 const_fixed_htab_hash (const void *x
)
228 const_rtx
const value
= (const_rtx
) x
;
231 h
= fixed_hash (CONST_FIXED_VALUE (value
));
232 /* MODE is used in the comparison, so it should be in the hash. */
233 h
^= GET_MODE (value
);
237 /* Returns nonzero if the value represented by X (really a ...)
238 is the same as that represented by Y (really a ...). */
241 const_fixed_htab_eq (const void *x
, const void *y
)
243 const_rtx
const a
= (const_rtx
) x
, b
= (const_rtx
) y
;
245 if (GET_MODE (a
) != GET_MODE (b
))
247 return fixed_identical (CONST_FIXED_VALUE (a
), CONST_FIXED_VALUE (b
));
250 /* Returns a hash code for X (which is a really a mem_attrs *). */
253 mem_attrs_htab_hash (const void *x
)
255 const mem_attrs
*const p
= (const mem_attrs
*) x
;
257 return (p
->alias
^ (p
->align
* 1000)
258 ^ (p
->addrspace
* 4000)
259 ^ ((p
->offset_known_p
? p
->offset
: 0) * 50000)
260 ^ ((p
->size_known_p
? p
->size
: 0) * 2500000)
261 ^ (size_t) iterative_hash_expr (p
->expr
, 0));
264 /* Return true if the given memory attributes are equal. */
267 mem_attrs_eq_p (const struct mem_attrs
*p
, const struct mem_attrs
*q
)
269 return (p
->alias
== q
->alias
270 && p
->offset_known_p
== q
->offset_known_p
271 && (!p
->offset_known_p
|| p
->offset
== q
->offset
)
272 && p
->size_known_p
== q
->size_known_p
273 && (!p
->size_known_p
|| p
->size
== q
->size
)
274 && p
->align
== q
->align
275 && p
->addrspace
== q
->addrspace
276 && (p
->expr
== q
->expr
277 || (p
->expr
!= NULL_TREE
&& q
->expr
!= NULL_TREE
278 && operand_equal_p (p
->expr
, q
->expr
, 0))));
281 /* Returns nonzero if the value represented by X (which is really a
282 mem_attrs *) is the same as that given by Y (which is also really a
286 mem_attrs_htab_eq (const void *x
, const void *y
)
288 return mem_attrs_eq_p ((const mem_attrs
*) x
, (const mem_attrs
*) y
);
291 /* Set MEM's memory attributes so that they are the same as ATTRS. */
294 set_mem_attrs (rtx mem
, mem_attrs
*attrs
)
298 /* If everything is the default, we can just clear the attributes. */
299 if (mem_attrs_eq_p (attrs
, mode_mem_attrs
[(int) GET_MODE (mem
)]))
305 slot
= htab_find_slot (mem_attrs_htab
, attrs
, INSERT
);
308 *slot
= ggc_alloc_mem_attrs ();
309 memcpy (*slot
, attrs
, sizeof (mem_attrs
));
312 MEM_ATTRS (mem
) = (mem_attrs
*) *slot
;
315 /* Returns a hash code for X (which is a really a reg_attrs *). */
318 reg_attrs_htab_hash (const void *x
)
320 const reg_attrs
*const p
= (const reg_attrs
*) x
;
322 return ((p
->offset
* 1000) ^ (intptr_t) p
->decl
);
325 /* Returns nonzero if the value represented by X (which is really a
326 reg_attrs *) is the same as that given by Y (which is also really a
330 reg_attrs_htab_eq (const void *x
, const void *y
)
332 const reg_attrs
*const p
= (const reg_attrs
*) x
;
333 const reg_attrs
*const q
= (const reg_attrs
*) y
;
335 return (p
->decl
== q
->decl
&& p
->offset
== q
->offset
);
337 /* Allocate a new reg_attrs structure and insert it into the hash table if
338 one identical to it is not already in the table. We are doing this for
342 get_reg_attrs (tree decl
, int offset
)
347 /* If everything is the default, we can just return zero. */
348 if (decl
== 0 && offset
== 0)
352 attrs
.offset
= offset
;
354 slot
= htab_find_slot (reg_attrs_htab
, &attrs
, INSERT
);
357 *slot
= ggc_alloc_reg_attrs ();
358 memcpy (*slot
, &attrs
, sizeof (reg_attrs
));
361 return (reg_attrs
*) *slot
;
366 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
372 rtx x
= gen_rtx_ASM_INPUT (VOIDmode
, "");
373 MEM_VOLATILE_P (x
) = true;
379 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
380 don't attempt to share with the various global pieces of rtl (such as
381 frame_pointer_rtx). */
384 gen_raw_REG (enum machine_mode mode
, int regno
)
386 rtx x
= gen_rtx_raw_REG (mode
, regno
);
387 ORIGINAL_REGNO (x
) = regno
;
391 /* There are some RTL codes that require special attention; the generation
392 functions do the raw handling. If you add to this list, modify
393 special_rtx in gengenrtl.c as well. */
396 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED
, HOST_WIDE_INT arg
)
400 if (arg
>= - MAX_SAVED_CONST_INT
&& arg
<= MAX_SAVED_CONST_INT
)
401 return const_int_rtx
[arg
+ MAX_SAVED_CONST_INT
];
403 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
404 if (const_true_rtx
&& arg
== STORE_FLAG_VALUE
)
405 return const_true_rtx
;
408 /* Look up the CONST_INT in the hash table. */
409 slot
= htab_find_slot_with_hash (const_int_htab
, &arg
,
410 (hashval_t
) arg
, INSERT
);
412 *slot
= gen_rtx_raw_CONST_INT (VOIDmode
, arg
);
418 gen_int_mode (HOST_WIDE_INT c
, enum machine_mode mode
)
420 return GEN_INT (trunc_int_for_mode (c
, mode
));
423 /* CONST_DOUBLEs might be created from pairs of integers, or from
424 REAL_VALUE_TYPEs. Also, their length is known only at run time,
425 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
427 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
428 hash table. If so, return its counterpart; otherwise add it
429 to the hash table and return it. */
431 lookup_const_double (rtx real
)
433 void **slot
= htab_find_slot (const_double_htab
, real
, INSERT
);
440 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
441 VALUE in mode MODE. */
443 const_double_from_real_value (REAL_VALUE_TYPE value
, enum machine_mode mode
)
445 rtx real
= rtx_alloc (CONST_DOUBLE
);
446 PUT_MODE (real
, mode
);
450 return lookup_const_double (real
);
453 /* Determine whether FIXED, a CONST_FIXED, already exists in the
454 hash table. If so, return its counterpart; otherwise add it
455 to the hash table and return it. */
458 lookup_const_fixed (rtx fixed
)
460 void **slot
= htab_find_slot (const_fixed_htab
, fixed
, INSERT
);
467 /* Return a CONST_FIXED rtx for a fixed-point value specified by
468 VALUE in mode MODE. */
471 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value
, enum machine_mode mode
)
473 rtx fixed
= rtx_alloc (CONST_FIXED
);
474 PUT_MODE (fixed
, mode
);
478 return lookup_const_fixed (fixed
);
481 /* Constructs double_int from rtx CST. */
484 rtx_to_double_int (const_rtx cst
)
488 if (CONST_INT_P (cst
))
489 r
= shwi_to_double_int (INTVAL (cst
));
490 else if (CONST_DOUBLE_P (cst
) && GET_MODE (cst
) == VOIDmode
)
492 r
.low
= CONST_DOUBLE_LOW (cst
);
493 r
.high
= CONST_DOUBLE_HIGH (cst
);
502 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
506 immed_double_int_const (double_int i
, enum machine_mode mode
)
508 return immed_double_const (i
.low
, i
.high
, mode
);
511 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
512 of ints: I0 is the low-order word and I1 is the high-order word.
513 Do not use this routine for non-integer modes; convert to
514 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
517 immed_double_const (HOST_WIDE_INT i0
, HOST_WIDE_INT i1
, enum machine_mode mode
)
522 /* There are the following cases (note that there are no modes with
523 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
525 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
527 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
528 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
529 from copies of the sign bit, and sign of i0 and i1 are the same), then
530 we return a CONST_INT for i0.
531 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
532 if (mode
!= VOIDmode
)
534 gcc_assert (GET_MODE_CLASS (mode
) == MODE_INT
535 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
536 /* We can get a 0 for an error mark. */
537 || GET_MODE_CLASS (mode
) == MODE_VECTOR_INT
538 || GET_MODE_CLASS (mode
) == MODE_VECTOR_FLOAT
);
540 if (GET_MODE_BITSIZE (mode
) <= HOST_BITS_PER_WIDE_INT
)
541 return gen_int_mode (i0
, mode
);
543 gcc_assert (GET_MODE_BITSIZE (mode
) == 2 * HOST_BITS_PER_WIDE_INT
);
546 /* If this integer fits in one word, return a CONST_INT. */
547 if ((i1
== 0 && i0
>= 0) || (i1
== ~0 && i0
< 0))
550 /* We use VOIDmode for integers. */
551 value
= rtx_alloc (CONST_DOUBLE
);
552 PUT_MODE (value
, VOIDmode
);
554 CONST_DOUBLE_LOW (value
) = i0
;
555 CONST_DOUBLE_HIGH (value
) = i1
;
557 for (i
= 2; i
< (sizeof CONST_DOUBLE_FORMAT
- 1); i
++)
558 XWINT (value
, i
) = 0;
560 return lookup_const_double (value
);
564 gen_rtx_REG (enum machine_mode mode
, unsigned int regno
)
566 /* In case the MD file explicitly references the frame pointer, have
567 all such references point to the same frame pointer. This is
568 used during frame pointer elimination to distinguish the explicit
569 references to these registers from pseudos that happened to be
572 If we have eliminated the frame pointer or arg pointer, we will
573 be using it as a normal register, for example as a spill
574 register. In such cases, we might be accessing it in a mode that
575 is not Pmode and therefore cannot use the pre-allocated rtx.
577 Also don't do this when we are making new REGs in reload, since
578 we don't want to get confused with the real pointers. */
580 if (mode
== Pmode
&& !reload_in_progress
)
582 if (regno
== FRAME_POINTER_REGNUM
583 && (!reload_completed
|| frame_pointer_needed
))
584 return frame_pointer_rtx
;
585 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
586 if (regno
== HARD_FRAME_POINTER_REGNUM
587 && (!reload_completed
|| frame_pointer_needed
))
588 return hard_frame_pointer_rtx
;
590 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
591 if (regno
== ARG_POINTER_REGNUM
)
592 return arg_pointer_rtx
;
594 #ifdef RETURN_ADDRESS_POINTER_REGNUM
595 if (regno
== RETURN_ADDRESS_POINTER_REGNUM
)
596 return return_address_pointer_rtx
;
598 if (regno
== (unsigned) PIC_OFFSET_TABLE_REGNUM
599 && PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
600 && fixed_regs
[PIC_OFFSET_TABLE_REGNUM
])
601 return pic_offset_table_rtx
;
602 if (regno
== STACK_POINTER_REGNUM
)
603 return stack_pointer_rtx
;
607 /* If the per-function register table has been set up, try to re-use
608 an existing entry in that table to avoid useless generation of RTL.
610 This code is disabled for now until we can fix the various backends
611 which depend on having non-shared hard registers in some cases. Long
612 term we want to re-enable this code as it can significantly cut down
613 on the amount of useless RTL that gets generated.
615 We'll also need to fix some code that runs after reload that wants to
616 set ORIGINAL_REGNO. */
621 && regno
< FIRST_PSEUDO_REGISTER
622 && reg_raw_mode
[regno
] == mode
)
623 return regno_reg_rtx
[regno
];
626 return gen_raw_REG (mode
, regno
);
630 gen_rtx_MEM (enum machine_mode mode
, rtx addr
)
632 rtx rt
= gen_rtx_raw_MEM (mode
, addr
);
634 /* This field is not cleared by the mere allocation of the rtx, so
641 /* Generate a memory referring to non-trapping constant memory. */
644 gen_const_mem (enum machine_mode mode
, rtx addr
)
646 rtx mem
= gen_rtx_MEM (mode
, addr
);
647 MEM_READONLY_P (mem
) = 1;
648 MEM_NOTRAP_P (mem
) = 1;
652 /* Generate a MEM referring to fixed portions of the frame, e.g., register
656 gen_frame_mem (enum machine_mode mode
, rtx addr
)
658 rtx mem
= gen_rtx_MEM (mode
, addr
);
659 MEM_NOTRAP_P (mem
) = 1;
660 set_mem_alias_set (mem
, get_frame_alias_set ());
664 /* Generate a MEM referring to a temporary use of the stack, not part
665 of the fixed stack frame. For example, something which is pushed
666 by a target splitter. */
668 gen_tmp_stack_mem (enum machine_mode mode
, rtx addr
)
670 rtx mem
= gen_rtx_MEM (mode
, addr
);
671 MEM_NOTRAP_P (mem
) = 1;
672 if (!cfun
->calls_alloca
)
673 set_mem_alias_set (mem
, get_frame_alias_set ());
677 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
678 this construct would be valid, and false otherwise. */
681 validate_subreg (enum machine_mode omode
, enum machine_mode imode
,
682 const_rtx reg
, unsigned int offset
)
684 unsigned int isize
= GET_MODE_SIZE (imode
);
685 unsigned int osize
= GET_MODE_SIZE (omode
);
687 /* All subregs must be aligned. */
688 if (offset
% osize
!= 0)
691 /* The subreg offset cannot be outside the inner object. */
695 /* ??? This should not be here. Temporarily continue to allow word_mode
696 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
697 Generally, backends are doing something sketchy but it'll take time to
699 if (omode
== word_mode
)
701 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
702 is the culprit here, and not the backends. */
703 else if (osize
>= UNITS_PER_WORD
&& isize
>= osize
)
705 /* Allow component subregs of complex and vector. Though given the below
706 extraction rules, it's not always clear what that means. */
707 else if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
708 && GET_MODE_INNER (imode
) == omode
)
710 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
711 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
712 represent this. It's questionable if this ought to be represented at
713 all -- why can't this all be hidden in post-reload splitters that make
714 arbitrarily mode changes to the registers themselves. */
715 else if (VECTOR_MODE_P (omode
) && GET_MODE_INNER (omode
) == imode
)
717 /* Subregs involving floating point modes are not allowed to
718 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
719 (subreg:SI (reg:DF) 0) isn't. */
720 else if (FLOAT_MODE_P (imode
) || FLOAT_MODE_P (omode
))
726 /* Paradoxical subregs must have offset zero. */
730 /* This is a normal subreg. Verify that the offset is representable. */
732 /* For hard registers, we already have most of these rules collected in
733 subreg_offset_representable_p. */
734 if (reg
&& REG_P (reg
) && HARD_REGISTER_P (reg
))
736 unsigned int regno
= REGNO (reg
);
738 #ifdef CANNOT_CHANGE_MODE_CLASS
739 if ((COMPLEX_MODE_P (imode
) || VECTOR_MODE_P (imode
))
740 && GET_MODE_INNER (imode
) == omode
)
742 else if (REG_CANNOT_CHANGE_MODE_P (regno
, imode
, omode
))
746 return subreg_offset_representable_p (regno
, imode
, offset
, omode
);
749 /* For pseudo registers, we want most of the same checks. Namely:
750 If the register no larger than a word, the subreg must be lowpart.
751 If the register is larger than a word, the subreg must be the lowpart
752 of a subword. A subreg does *not* perform arbitrary bit extraction.
753 Given that we've already checked mode/offset alignment, we only have
754 to check subword subregs here. */
755 if (osize
< UNITS_PER_WORD
)
757 enum machine_mode wmode
= isize
> UNITS_PER_WORD
? word_mode
: imode
;
758 unsigned int low_off
= subreg_lowpart_offset (omode
, wmode
);
759 if (offset
% UNITS_PER_WORD
!= low_off
)
766 gen_rtx_SUBREG (enum machine_mode mode
, rtx reg
, int offset
)
768 gcc_assert (validate_subreg (mode
, GET_MODE (reg
), reg
, offset
));
769 return gen_rtx_raw_SUBREG (mode
, reg
, offset
);
772 /* Generate a SUBREG representing the least-significant part of REG if MODE
773 is smaller than mode of REG, otherwise paradoxical SUBREG. */
776 gen_lowpart_SUBREG (enum machine_mode mode
, rtx reg
)
778 enum machine_mode inmode
;
780 inmode
= GET_MODE (reg
);
781 if (inmode
== VOIDmode
)
783 return gen_rtx_SUBREG (mode
, reg
,
784 subreg_lowpart_offset (mode
, inmode
));
788 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
791 gen_rtvec (int n
, ...)
799 /* Don't allocate an empty rtvec... */
806 rt_val
= rtvec_alloc (n
);
808 for (i
= 0; i
< n
; i
++)
809 rt_val
->elem
[i
] = va_arg (p
, rtx
);
816 gen_rtvec_v (int n
, rtx
*argp
)
821 /* Don't allocate an empty rtvec... */
825 rt_val
= rtvec_alloc (n
);
827 for (i
= 0; i
< n
; i
++)
828 rt_val
->elem
[i
] = *argp
++;
833 /* Return the number of bytes between the start of an OUTER_MODE
834 in-memory value and the start of an INNER_MODE in-memory value,
835 given that the former is a lowpart of the latter. It may be a
836 paradoxical lowpart, in which case the offset will be negative
837 on big-endian targets. */
840 byte_lowpart_offset (enum machine_mode outer_mode
,
841 enum machine_mode inner_mode
)
843 if (GET_MODE_SIZE (outer_mode
) < GET_MODE_SIZE (inner_mode
))
844 return subreg_lowpart_offset (outer_mode
, inner_mode
);
846 return -subreg_lowpart_offset (inner_mode
, outer_mode
);
849 /* Generate a REG rtx for a new pseudo register of mode MODE.
850 This pseudo is assigned the next sequential register number. */
853 gen_reg_rtx (enum machine_mode mode
)
856 unsigned int align
= GET_MODE_ALIGNMENT (mode
);
858 gcc_assert (can_create_pseudo_p ());
860 /* If a virtual register with bigger mode alignment is generated,
861 increase stack alignment estimation because it might be spilled
863 if (SUPPORTS_STACK_ALIGNMENT
864 && crtl
->stack_alignment_estimated
< align
865 && !crtl
->stack_realign_processed
)
867 unsigned int min_align
= MINIMUM_ALIGNMENT (NULL
, mode
, align
);
868 if (crtl
->stack_alignment_estimated
< min_align
)
869 crtl
->stack_alignment_estimated
= min_align
;
872 if (generating_concat_p
873 && (GET_MODE_CLASS (mode
) == MODE_COMPLEX_FLOAT
874 || GET_MODE_CLASS (mode
) == MODE_COMPLEX_INT
))
876 /* For complex modes, don't make a single pseudo.
877 Instead, make a CONCAT of two pseudos.
878 This allows noncontiguous allocation of the real and imaginary parts,
879 which makes much better code. Besides, allocating DCmode
880 pseudos overstrains reload on some machines like the 386. */
881 rtx realpart
, imagpart
;
882 enum machine_mode partmode
= GET_MODE_INNER (mode
);
884 realpart
= gen_reg_rtx (partmode
);
885 imagpart
= gen_reg_rtx (partmode
);
886 return gen_rtx_CONCAT (mode
, realpart
, imagpart
);
889 /* Make sure regno_pointer_align, and regno_reg_rtx are large
890 enough to have an element for this pseudo reg number. */
892 if (reg_rtx_no
== crtl
->emit
.regno_pointer_align_length
)
894 int old_size
= crtl
->emit
.regno_pointer_align_length
;
898 tmp
= XRESIZEVEC (char, crtl
->emit
.regno_pointer_align
, old_size
* 2);
899 memset (tmp
+ old_size
, 0, old_size
);
900 crtl
->emit
.regno_pointer_align
= (unsigned char *) tmp
;
902 new1
= GGC_RESIZEVEC (rtx
, regno_reg_rtx
, old_size
* 2);
903 memset (new1
+ old_size
, 0, old_size
* sizeof (rtx
));
904 regno_reg_rtx
= new1
;
906 crtl
->emit
.regno_pointer_align_length
= old_size
* 2;
909 val
= gen_raw_REG (mode
, reg_rtx_no
);
910 regno_reg_rtx
[reg_rtx_no
++] = val
;
914 /* Update NEW with the same attributes as REG, but with OFFSET added
915 to the REG_OFFSET. */
918 update_reg_offset (rtx new_rtx
, rtx reg
, int offset
)
920 REG_ATTRS (new_rtx
) = get_reg_attrs (REG_EXPR (reg
),
921 REG_OFFSET (reg
) + offset
);
924 /* Generate a register with same attributes as REG, but with OFFSET
925 added to the REG_OFFSET. */
928 gen_rtx_REG_offset (rtx reg
, enum machine_mode mode
, unsigned int regno
,
931 rtx new_rtx
= gen_rtx_REG (mode
, regno
);
933 update_reg_offset (new_rtx
, reg
, offset
);
937 /* Generate a new pseudo-register with the same attributes as REG, but
938 with OFFSET added to the REG_OFFSET. */
941 gen_reg_rtx_offset (rtx reg
, enum machine_mode mode
, int offset
)
943 rtx new_rtx
= gen_reg_rtx (mode
);
945 update_reg_offset (new_rtx
, reg
, offset
);
949 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
950 new register is a (possibly paradoxical) lowpart of the old one. */
953 adjust_reg_mode (rtx reg
, enum machine_mode mode
)
955 update_reg_offset (reg
, reg
, byte_lowpart_offset (mode
, GET_MODE (reg
)));
956 PUT_MODE (reg
, mode
);
959 /* Copy REG's attributes from X, if X has any attributes. If REG and X
960 have different modes, REG is a (possibly paradoxical) lowpart of X. */
963 set_reg_attrs_from_value (rtx reg
, rtx x
)
967 /* Hard registers can be reused for multiple purposes within the same
968 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
970 if (HARD_REGISTER_P (reg
))
973 offset
= byte_lowpart_offset (GET_MODE (reg
), GET_MODE (x
));
976 if (MEM_OFFSET_KNOWN_P (x
))
977 REG_ATTRS (reg
) = get_reg_attrs (MEM_EXPR (x
),
978 MEM_OFFSET (x
) + offset
);
980 mark_reg_pointer (reg
, 0);
985 update_reg_offset (reg
, x
, offset
);
987 mark_reg_pointer (reg
, REGNO_POINTER_ALIGN (REGNO (x
)));
991 /* Generate a REG rtx for a new pseudo register, copying the mode
992 and attributes from X. */
995 gen_reg_rtx_and_attrs (rtx x
)
997 rtx reg
= gen_reg_rtx (GET_MODE (x
));
998 set_reg_attrs_from_value (reg
, x
);
1002 /* Set the register attributes for registers contained in PARM_RTX.
1003 Use needed values from memory attributes of MEM. */
1006 set_reg_attrs_for_parm (rtx parm_rtx
, rtx mem
)
1008 if (REG_P (parm_rtx
))
1009 set_reg_attrs_from_value (parm_rtx
, mem
);
1010 else if (GET_CODE (parm_rtx
) == PARALLEL
)
1012 /* Check for a NULL entry in the first slot, used to indicate that the
1013 parameter goes both on the stack and in registers. */
1014 int i
= XEXP (XVECEXP (parm_rtx
, 0, 0), 0) ? 0 : 1;
1015 for (; i
< XVECLEN (parm_rtx
, 0); i
++)
1017 rtx x
= XVECEXP (parm_rtx
, 0, i
);
1018 if (REG_P (XEXP (x
, 0)))
1019 REG_ATTRS (XEXP (x
, 0))
1020 = get_reg_attrs (MEM_EXPR (mem
),
1021 INTVAL (XEXP (x
, 1)));
1026 /* Set the REG_ATTRS for registers in value X, given that X represents
1030 set_reg_attrs_for_decl_rtl (tree t
, rtx x
)
1032 if (GET_CODE (x
) == SUBREG
)
1034 gcc_assert (subreg_lowpart_p (x
));
1039 = get_reg_attrs (t
, byte_lowpart_offset (GET_MODE (x
),
1041 if (GET_CODE (x
) == CONCAT
)
1043 if (REG_P (XEXP (x
, 0)))
1044 REG_ATTRS (XEXP (x
, 0)) = get_reg_attrs (t
, 0);
1045 if (REG_P (XEXP (x
, 1)))
1046 REG_ATTRS (XEXP (x
, 1))
1047 = get_reg_attrs (t
, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x
, 0))));
1049 if (GET_CODE (x
) == PARALLEL
)
1053 /* Check for a NULL entry, used to indicate that the parameter goes
1054 both on the stack and in registers. */
1055 if (XEXP (XVECEXP (x
, 0, 0), 0))
1060 for (i
= start
; i
< XVECLEN (x
, 0); i
++)
1062 rtx y
= XVECEXP (x
, 0, i
);
1063 if (REG_P (XEXP (y
, 0)))
1064 REG_ATTRS (XEXP (y
, 0)) = get_reg_attrs (t
, INTVAL (XEXP (y
, 1)));
1069 /* Assign the RTX X to declaration T. */
1072 set_decl_rtl (tree t
, rtx x
)
1074 DECL_WRTL_CHECK (t
)->decl_with_rtl
.rtl
= x
;
1076 set_reg_attrs_for_decl_rtl (t
, x
);
1079 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1080 if the ABI requires the parameter to be passed by reference. */
1083 set_decl_incoming_rtl (tree t
, rtx x
, bool by_reference_p
)
1085 DECL_INCOMING_RTL (t
) = x
;
1086 if (x
&& !by_reference_p
)
1087 set_reg_attrs_for_decl_rtl (t
, x
);
1090 /* Identify REG (which may be a CONCAT) as a user register. */
1093 mark_user_reg (rtx reg
)
1095 if (GET_CODE (reg
) == CONCAT
)
1097 REG_USERVAR_P (XEXP (reg
, 0)) = 1;
1098 REG_USERVAR_P (XEXP (reg
, 1)) = 1;
1102 gcc_assert (REG_P (reg
));
1103 REG_USERVAR_P (reg
) = 1;
1107 /* Identify REG as a probable pointer register and show its alignment
1108 as ALIGN, if nonzero. */
1111 mark_reg_pointer (rtx reg
, int align
)
1113 if (! REG_POINTER (reg
))
1115 REG_POINTER (reg
) = 1;
1118 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1120 else if (align
&& align
< REGNO_POINTER_ALIGN (REGNO (reg
)))
1121 /* We can no-longer be sure just how aligned this pointer is. */
1122 REGNO_POINTER_ALIGN (REGNO (reg
)) = align
;
1125 /* Return 1 plus largest pseudo reg number used in the current function. */
1133 /* Return 1 + the largest label number used so far in the current function. */
1136 max_label_num (void)
1141 /* Return first label number used in this function (if any were used). */
1144 get_first_label_num (void)
1146 return first_label_num
;
1149 /* If the rtx for label was created during the expansion of a nested
1150 function, then first_label_num won't include this label number.
1151 Fix this now so that array indices work later. */
1154 maybe_set_first_label_num (rtx x
)
1156 if (CODE_LABEL_NUMBER (x
) < first_label_num
)
1157 first_label_num
= CODE_LABEL_NUMBER (x
);
1160 /* Return a value representing some low-order bits of X, where the number
1161 of low-order bits is given by MODE. Note that no conversion is done
1162 between floating-point and fixed-point values, rather, the bit
1163 representation is returned.
1165 This function handles the cases in common between gen_lowpart, below,
1166 and two variants in cse.c and combine.c. These are the cases that can
1167 be safely handled at all points in the compilation.
1169 If this is not a case we can handle, return 0. */
1172 gen_lowpart_common (enum machine_mode mode
, rtx x
)
1174 int msize
= GET_MODE_SIZE (mode
);
1177 enum machine_mode innermode
;
1179 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1180 so we have to make one up. Yuk. */
1181 innermode
= GET_MODE (x
);
1183 && msize
* BITS_PER_UNIT
<= HOST_BITS_PER_WIDE_INT
)
1184 innermode
= mode_for_size (HOST_BITS_PER_WIDE_INT
, MODE_INT
, 0);
1185 else if (innermode
== VOIDmode
)
1186 innermode
= mode_for_size (HOST_BITS_PER_WIDE_INT
* 2, MODE_INT
, 0);
1188 xsize
= GET_MODE_SIZE (innermode
);
1190 gcc_assert (innermode
!= VOIDmode
&& innermode
!= BLKmode
);
1192 if (innermode
== mode
)
1195 /* MODE must occupy no more words than the mode of X. */
1196 if ((msize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
1197 > ((xsize
+ (UNITS_PER_WORD
- 1)) / UNITS_PER_WORD
))
1200 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1201 if (SCALAR_FLOAT_MODE_P (mode
) && msize
> xsize
)
1204 offset
= subreg_lowpart_offset (mode
, innermode
);
1206 if ((GET_CODE (x
) == ZERO_EXTEND
|| GET_CODE (x
) == SIGN_EXTEND
)
1207 && (GET_MODE_CLASS (mode
) == MODE_INT
1208 || GET_MODE_CLASS (mode
) == MODE_PARTIAL_INT
))
1210 /* If we are getting the low-order part of something that has been
1211 sign- or zero-extended, we can either just use the object being
1212 extended or make a narrower extension. If we want an even smaller
1213 piece than the size of the object being extended, call ourselves
1216 This case is used mostly by combine and cse. */
1218 if (GET_MODE (XEXP (x
, 0)) == mode
)
1220 else if (msize
< GET_MODE_SIZE (GET_MODE (XEXP (x
, 0))))
1221 return gen_lowpart_common (mode
, XEXP (x
, 0));
1222 else if (msize
< xsize
)
1223 return gen_rtx_fmt_e (GET_CODE (x
), mode
, XEXP (x
, 0));
1225 else if (GET_CODE (x
) == SUBREG
|| REG_P (x
)
1226 || GET_CODE (x
) == CONCAT
|| GET_CODE (x
) == CONST_VECTOR
1227 || GET_CODE (x
) == CONST_DOUBLE
|| CONST_INT_P (x
))
1228 return simplify_gen_subreg (mode
, x
, innermode
, offset
);
1230 /* Otherwise, we can't do this. */
1235 gen_highpart (enum machine_mode mode
, rtx x
)
1237 unsigned int msize
= GET_MODE_SIZE (mode
);
1240 /* This case loses if X is a subreg. To catch bugs early,
1241 complain if an invalid MODE is used even in other cases. */
1242 gcc_assert (msize
<= UNITS_PER_WORD
1243 || msize
== (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x
)));
1245 result
= simplify_gen_subreg (mode
, x
, GET_MODE (x
),
1246 subreg_highpart_offset (mode
, GET_MODE (x
)));
1247 gcc_assert (result
);
1249 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1250 the target if we have a MEM. gen_highpart must return a valid operand,
1251 emitting code if necessary to do so. */
1254 result
= validize_mem (result
);
1255 gcc_assert (result
);
1261 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1262 be VOIDmode constant. */
1264 gen_highpart_mode (enum machine_mode outermode
, enum machine_mode innermode
, rtx exp
)
1266 if (GET_MODE (exp
) != VOIDmode
)
1268 gcc_assert (GET_MODE (exp
) == innermode
);
1269 return gen_highpart (outermode
, exp
);
1271 return simplify_gen_subreg (outermode
, exp
, innermode
,
1272 subreg_highpart_offset (outermode
, innermode
));
1275 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1278 subreg_lowpart_offset (enum machine_mode outermode
, enum machine_mode innermode
)
1280 unsigned int offset
= 0;
1281 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1285 if (WORDS_BIG_ENDIAN
)
1286 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1287 if (BYTES_BIG_ENDIAN
)
1288 offset
+= difference
% UNITS_PER_WORD
;
1294 /* Return offset in bytes to get OUTERMODE high part
1295 of the value in mode INNERMODE stored in memory in target format. */
1297 subreg_highpart_offset (enum machine_mode outermode
, enum machine_mode innermode
)
1299 unsigned int offset
= 0;
1300 int difference
= (GET_MODE_SIZE (innermode
) - GET_MODE_SIZE (outermode
));
1302 gcc_assert (GET_MODE_SIZE (innermode
) >= GET_MODE_SIZE (outermode
));
1306 if (! WORDS_BIG_ENDIAN
)
1307 offset
+= (difference
/ UNITS_PER_WORD
) * UNITS_PER_WORD
;
1308 if (! BYTES_BIG_ENDIAN
)
1309 offset
+= difference
% UNITS_PER_WORD
;
1315 /* Return 1 iff X, assumed to be a SUBREG,
1316 refers to the least significant part of its containing reg.
1317 If X is not a SUBREG, always return 1 (it is its own low part!). */
1320 subreg_lowpart_p (const_rtx x
)
1322 if (GET_CODE (x
) != SUBREG
)
1324 else if (GET_MODE (SUBREG_REG (x
)) == VOIDmode
)
1327 return (subreg_lowpart_offset (GET_MODE (x
), GET_MODE (SUBREG_REG (x
)))
1328 == SUBREG_BYTE (x
));
1331 /* Return true if X is a paradoxical subreg, false otherwise. */
1333 paradoxical_subreg_p (const_rtx x
)
1335 if (GET_CODE (x
) != SUBREG
)
1337 return (GET_MODE_PRECISION (GET_MODE (x
))
1338 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x
))));
1341 /* Return subword OFFSET of operand OP.
1342 The word number, OFFSET, is interpreted as the word number starting
1343 at the low-order address. OFFSET 0 is the low-order word if not
1344 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1346 If we cannot extract the required word, we return zero. Otherwise,
1347 an rtx corresponding to the requested word will be returned.
1349 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1350 reload has completed, a valid address will always be returned. After
1351 reload, if a valid address cannot be returned, we return zero.
1353 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1354 it is the responsibility of the caller.
1356 MODE is the mode of OP in case it is a CONST_INT.
1358 ??? This is still rather broken for some cases. The problem for the
1359 moment is that all callers of this thing provide no 'goal mode' to
1360 tell us to work with. This exists because all callers were written
1361 in a word based SUBREG world.
1362 Now use of this function can be deprecated by simplify_subreg in most
1367 operand_subword (rtx op
, unsigned int offset
, int validate_address
, enum machine_mode mode
)
1369 if (mode
== VOIDmode
)
1370 mode
= GET_MODE (op
);
1372 gcc_assert (mode
!= VOIDmode
);
1374 /* If OP is narrower than a word, fail. */
1376 && (GET_MODE_SIZE (mode
) < UNITS_PER_WORD
))
1379 /* If we want a word outside OP, return zero. */
1381 && (offset
+ 1) * UNITS_PER_WORD
> GET_MODE_SIZE (mode
))
1384 /* Form a new MEM at the requested address. */
1387 rtx new_rtx
= adjust_address_nv (op
, word_mode
, offset
* UNITS_PER_WORD
);
1389 if (! validate_address
)
1392 else if (reload_completed
)
1394 if (! strict_memory_address_addr_space_p (word_mode
,
1396 MEM_ADDR_SPACE (op
)))
1400 return replace_equiv_address (new_rtx
, XEXP (new_rtx
, 0));
1403 /* Rest can be handled by simplify_subreg. */
1404 return simplify_gen_subreg (word_mode
, op
, mode
, (offset
* UNITS_PER_WORD
));
1407 /* Similar to `operand_subword', but never return 0. If we can't
1408 extract the required subword, put OP into a register and try again.
1409 The second attempt must succeed. We always validate the address in
1412 MODE is the mode of OP, in case it is CONST_INT. */
1415 operand_subword_force (rtx op
, unsigned int offset
, enum machine_mode mode
)
1417 rtx result
= operand_subword (op
, offset
, 1, mode
);
1422 if (mode
!= BLKmode
&& mode
!= VOIDmode
)
1424 /* If this is a register which can not be accessed by words, copy it
1425 to a pseudo register. */
1427 op
= copy_to_reg (op
);
1429 op
= force_reg (mode
, op
);
1432 result
= operand_subword (op
, offset
, 1, mode
);
1433 gcc_assert (result
);
1438 /* Returns 1 if both MEM_EXPR can be considered equal
1442 mem_expr_equal_p (const_tree expr1
, const_tree expr2
)
1447 if (! expr1
|| ! expr2
)
1450 if (TREE_CODE (expr1
) != TREE_CODE (expr2
))
1453 return operand_equal_p (expr1
, expr2
, 0);
1456 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1457 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1461 get_mem_align_offset (rtx mem
, unsigned int align
)
1464 unsigned HOST_WIDE_INT offset
;
1466 /* This function can't use
1467 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1468 || (MAX (MEM_ALIGN (mem),
1469 get_object_alignment (MEM_EXPR (mem), align))
1473 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1475 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1476 for <variable>. get_inner_reference doesn't handle it and
1477 even if it did, the alignment in that case needs to be determined
1478 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1479 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1480 isn't sufficiently aligned, the object it is in might be. */
1481 gcc_assert (MEM_P (mem
));
1482 expr
= MEM_EXPR (mem
);
1483 if (expr
== NULL_TREE
|| !MEM_OFFSET_KNOWN_P (mem
))
1486 offset
= MEM_OFFSET (mem
);
1489 if (DECL_ALIGN (expr
) < align
)
1492 else if (INDIRECT_REF_P (expr
))
1494 if (TYPE_ALIGN (TREE_TYPE (expr
)) < (unsigned int) align
)
1497 else if (TREE_CODE (expr
) == COMPONENT_REF
)
1501 tree inner
= TREE_OPERAND (expr
, 0);
1502 tree field
= TREE_OPERAND (expr
, 1);
1503 tree byte_offset
= component_ref_field_offset (expr
);
1504 tree bit_offset
= DECL_FIELD_BIT_OFFSET (field
);
1507 || !host_integerp (byte_offset
, 1)
1508 || !host_integerp (bit_offset
, 1))
1511 offset
+= tree_low_cst (byte_offset
, 1);
1512 offset
+= tree_low_cst (bit_offset
, 1) / BITS_PER_UNIT
;
1514 if (inner
== NULL_TREE
)
1516 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field
))
1517 < (unsigned int) align
)
1521 else if (DECL_P (inner
))
1523 if (DECL_ALIGN (inner
) < align
)
1527 else if (TREE_CODE (inner
) != COMPONENT_REF
)
1535 return offset
& ((align
/ BITS_PER_UNIT
) - 1);
1538 /* Given REF (a MEM) and T, either the type of X or the expression
1539 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1540 if we are making a new object of this type. BITPOS is nonzero if
1541 there is an offset outstanding on T that will be applied later. */
1544 set_mem_attributes_minus_bitpos (rtx ref
, tree t
, int objectp
,
1545 HOST_WIDE_INT bitpos
)
1547 HOST_WIDE_INT apply_bitpos
= 0;
1549 struct mem_attrs attrs
, *defattrs
, *refattrs
;
1551 /* It can happen that type_for_mode was given a mode for which there
1552 is no language-level type. In which case it returns NULL, which
1557 type
= TYPE_P (t
) ? t
: TREE_TYPE (t
);
1558 if (type
== error_mark_node
)
1561 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1562 wrong answer, as it assumes that DECL_RTL already has the right alias
1563 info. Callers should not set DECL_RTL until after the call to
1564 set_mem_attributes. */
1565 gcc_assert (!DECL_P (t
) || ref
!= DECL_RTL_IF_SET (t
));
1567 memset (&attrs
, 0, sizeof (attrs
));
1569 /* Get the alias set from the expression or type (perhaps using a
1570 front-end routine) and use it. */
1571 attrs
.alias
= get_alias_set (t
);
1573 MEM_VOLATILE_P (ref
) |= TYPE_VOLATILE (type
);
1574 MEM_IN_STRUCT_P (ref
)
1575 = AGGREGATE_TYPE_P (type
) || TREE_CODE (type
) == COMPLEX_TYPE
;
1576 MEM_POINTER (ref
) = POINTER_TYPE_P (type
);
1578 /* If we are making an object of this type, or if this is a DECL, we know
1579 that it is a scalar if the type is not an aggregate. */
1580 if ((objectp
|| DECL_P (t
))
1581 && ! AGGREGATE_TYPE_P (type
)
1582 && TREE_CODE (type
) != COMPLEX_TYPE
)
1583 MEM_SCALAR_P (ref
) = 1;
1585 /* Default values from pre-existing memory attributes if present. */
1586 refattrs
= MEM_ATTRS (ref
);
1589 /* ??? Can this ever happen? Calling this routine on a MEM that
1590 already carries memory attributes should probably be invalid. */
1591 attrs
.expr
= refattrs
->expr
;
1592 attrs
.offset_known_p
= refattrs
->offset_known_p
;
1593 attrs
.offset
= refattrs
->offset
;
1594 attrs
.size_known_p
= refattrs
->size_known_p
;
1595 attrs
.size
= refattrs
->size
;
1596 attrs
.align
= refattrs
->align
;
1599 /* Otherwise, default values from the mode of the MEM reference. */
1602 defattrs
= mode_mem_attrs
[(int) GET_MODE (ref
)];
1603 gcc_assert (!defattrs
->expr
);
1604 gcc_assert (!defattrs
->offset_known_p
);
1606 /* Respect mode size. */
1607 attrs
.size_known_p
= defattrs
->size_known_p
;
1608 attrs
.size
= defattrs
->size
;
1609 /* ??? Is this really necessary? We probably should always get
1610 the size from the type below. */
1612 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1613 if T is an object, always compute the object alignment below. */
1615 attrs
.align
= defattrs
->align
;
1617 attrs
.align
= BITS_PER_UNIT
;
1618 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1619 e.g. if the type carries an alignment attribute. Should we be
1620 able to simply always use TYPE_ALIGN? */
1623 /* We can set the alignment from the type if we are making an object,
1624 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1625 if (objectp
|| TREE_CODE (t
) == INDIRECT_REF
|| TYPE_ALIGN_OK (type
))
1626 attrs
.align
= MAX (attrs
.align
, TYPE_ALIGN (type
));
1628 else if (TREE_CODE (t
) == MEM_REF
)
1630 tree op0
= TREE_OPERAND (t
, 0);
1631 if (TREE_CODE (op0
) == ADDR_EXPR
1632 && (DECL_P (TREE_OPERAND (op0
, 0))
1633 || CONSTANT_CLASS_P (TREE_OPERAND (op0
, 0))))
1635 if (DECL_P (TREE_OPERAND (op0
, 0)))
1636 attrs
.align
= DECL_ALIGN (TREE_OPERAND (op0
, 0));
1637 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0
, 0)))
1639 attrs
.align
= TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0
, 0)));
1640 #ifdef CONSTANT_ALIGNMENT
1641 attrs
.align
= CONSTANT_ALIGNMENT (TREE_OPERAND (op0
, 0),
1645 if (TREE_INT_CST_LOW (TREE_OPERAND (t
, 1)) != 0)
1647 unsigned HOST_WIDE_INT ioff
1648 = TREE_INT_CST_LOW (TREE_OPERAND (t
, 1));
1649 unsigned HOST_WIDE_INT aoff
= (ioff
& -ioff
) * BITS_PER_UNIT
;
1650 attrs
.align
= MIN (aoff
, attrs
.align
);
1654 /* ??? This isn't fully correct, we can't set the alignment from the
1655 type in all cases. */
1656 attrs
.align
= MAX (attrs
.align
, TYPE_ALIGN (type
));
1659 else if (TREE_CODE (t
) == TARGET_MEM_REF
)
1660 /* ??? This isn't fully correct, we can't set the alignment from the
1661 type in all cases. */
1662 attrs
.align
= MAX (attrs
.align
, TYPE_ALIGN (type
));
1664 /* If the size is known, we can set that. */
1665 if (TYPE_SIZE_UNIT (type
) && host_integerp (TYPE_SIZE_UNIT (type
), 1))
1667 attrs
.size_known_p
= true;
1668 attrs
.size
= tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
1671 /* If T is not a type, we may be able to deduce some more information about
1676 bool align_computed
= false;
1678 if (TREE_THIS_VOLATILE (t
))
1679 MEM_VOLATILE_P (ref
) = 1;
1681 /* Now remove any conversions: they don't change what the underlying
1682 object is. Likewise for SAVE_EXPR. */
1683 while (CONVERT_EXPR_P (t
)
1684 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
1685 || TREE_CODE (t
) == SAVE_EXPR
)
1686 t
= TREE_OPERAND (t
, 0);
1688 /* Note whether this expression can trap. */
1689 MEM_NOTRAP_P (ref
) = !tree_could_trap_p (t
);
1691 base
= get_base_address (t
);
1692 if (base
&& DECL_P (base
)
1693 && TREE_READONLY (base
)
1694 && (TREE_STATIC (base
) || DECL_EXTERNAL (base
))
1695 && !TREE_THIS_VOLATILE (base
))
1696 MEM_READONLY_P (ref
) = 1;
1698 /* If this expression uses it's parent's alias set, mark it such
1699 that we won't change it. */
1700 if (component_uses_parent_alias_set (t
))
1701 MEM_KEEP_ALIAS_SET_P (ref
) = 1;
1703 /* If this is a decl, set the attributes of the MEM from it. */
1707 attrs
.offset_known_p
= true;
1709 apply_bitpos
= bitpos
;
1710 if (DECL_SIZE_UNIT (t
) && host_integerp (DECL_SIZE_UNIT (t
), 1))
1712 attrs
.size_known_p
= true;
1713 attrs
.size
= tree_low_cst (DECL_SIZE_UNIT (t
), 1);
1716 attrs
.size_known_p
= false;
1717 attrs
.align
= DECL_ALIGN (t
);
1718 align_computed
= true;
1721 /* If this is a constant, we know the alignment. */
1722 else if (CONSTANT_CLASS_P (t
))
1724 attrs
.align
= TYPE_ALIGN (type
);
1725 #ifdef CONSTANT_ALIGNMENT
1726 attrs
.align
= CONSTANT_ALIGNMENT (t
, attrs
.align
);
1728 align_computed
= true;
1731 /* If this is a field reference and not a bit-field, record it. */
1732 /* ??? There is some information that can be gleaned from bit-fields,
1733 such as the word offset in the structure that might be modified.
1734 But skip it for now. */
1735 else if (TREE_CODE (t
) == COMPONENT_REF
1736 && ! DECL_BIT_FIELD (TREE_OPERAND (t
, 1)))
1739 attrs
.offset_known_p
= true;
1741 apply_bitpos
= bitpos
;
1742 /* ??? Any reason the field size would be different than
1743 the size we got from the type? */
1746 /* If this is an array reference, look for an outer field reference. */
1747 else if (TREE_CODE (t
) == ARRAY_REF
)
1749 tree off_tree
= size_zero_node
;
1750 /* We can't modify t, because we use it at the end of the
1756 tree index
= TREE_OPERAND (t2
, 1);
1757 tree low_bound
= array_ref_low_bound (t2
);
1758 tree unit_size
= array_ref_element_size (t2
);
1760 /* We assume all arrays have sizes that are a multiple of a byte.
1761 First subtract the lower bound, if any, in the type of the
1762 index, then convert to sizetype and multiply by the size of
1763 the array element. */
1764 if (! integer_zerop (low_bound
))
1765 index
= fold_build2 (MINUS_EXPR
, TREE_TYPE (index
),
1768 off_tree
= size_binop (PLUS_EXPR
,
1769 size_binop (MULT_EXPR
,
1770 fold_convert (sizetype
,
1774 t2
= TREE_OPERAND (t2
, 0);
1776 while (TREE_CODE (t2
) == ARRAY_REF
);
1781 attrs
.offset_known_p
= false;
1782 if (host_integerp (off_tree
, 1))
1784 HOST_WIDE_INT ioff
= tree_low_cst (off_tree
, 1);
1785 HOST_WIDE_INT aoff
= (ioff
& -ioff
) * BITS_PER_UNIT
;
1786 attrs
.align
= DECL_ALIGN (t2
);
1787 if (aoff
&& (unsigned HOST_WIDE_INT
) aoff
< attrs
.align
)
1789 align_computed
= true;
1790 attrs
.offset_known_p
= true;
1791 attrs
.offset
= ioff
;
1792 apply_bitpos
= bitpos
;
1795 else if (TREE_CODE (t2
) == COMPONENT_REF
)
1798 attrs
.offset_known_p
= false;
1799 if (host_integerp (off_tree
, 1))
1801 attrs
.offset_known_p
= true;
1802 attrs
.offset
= tree_low_cst (off_tree
, 1);
1803 apply_bitpos
= bitpos
;
1805 /* ??? Any reason the field size would be different than
1806 the size we got from the type? */
1809 /* If this is an indirect reference, record it. */
1810 else if (TREE_CODE (t
) == MEM_REF
)
1813 attrs
.offset_known_p
= true;
1815 apply_bitpos
= bitpos
;
1819 /* If this is an indirect reference, record it. */
1820 else if (TREE_CODE (t
) == MEM_REF
1821 || TREE_CODE (t
) == TARGET_MEM_REF
)
1824 attrs
.offset_known_p
= true;
1826 apply_bitpos
= bitpos
;
1829 if (!align_computed
&& !INDIRECT_REF_P (t
))
1831 unsigned int obj_align
= get_object_alignment (t
, BIGGEST_ALIGNMENT
);
1832 attrs
.align
= MAX (attrs
.align
, obj_align
);
1836 /* If we modified OFFSET based on T, then subtract the outstanding
1837 bit position offset. Similarly, increase the size of the accessed
1838 object to contain the negative offset. */
1841 gcc_assert (attrs
.offset_known_p
);
1842 attrs
.offset
-= apply_bitpos
/ BITS_PER_UNIT
;
1843 if (attrs
.size_known_p
)
1844 attrs
.size
+= apply_bitpos
/ BITS_PER_UNIT
;
1847 /* Now set the attributes we computed above. */
1848 attrs
.addrspace
= TYPE_ADDR_SPACE (type
);
1849 set_mem_attrs (ref
, &attrs
);
1851 /* If this is already known to be a scalar or aggregate, we are done. */
1852 if (MEM_IN_STRUCT_P (ref
) || MEM_SCALAR_P (ref
))
1855 /* If it is a reference into an aggregate, this is part of an aggregate.
1856 Otherwise we don't know. */
1857 else if (TREE_CODE (t
) == COMPONENT_REF
|| TREE_CODE (t
) == ARRAY_REF
1858 || TREE_CODE (t
) == ARRAY_RANGE_REF
1859 || TREE_CODE (t
) == BIT_FIELD_REF
)
1860 MEM_IN_STRUCT_P (ref
) = 1;
1864 set_mem_attributes (rtx ref
, tree t
, int objectp
)
1866 set_mem_attributes_minus_bitpos (ref
, t
, objectp
, 0);
1869 /* Set the alias set of MEM to SET. */
1872 set_mem_alias_set (rtx mem
, alias_set_type set
)
1874 struct mem_attrs attrs
;
1876 /* If the new and old alias sets don't conflict, something is wrong. */
1877 gcc_checking_assert (alias_sets_conflict_p (set
, MEM_ALIAS_SET (mem
)));
1878 attrs
= *get_mem_attrs (mem
);
1880 set_mem_attrs (mem
, &attrs
);
1883 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1886 set_mem_addr_space (rtx mem
, addr_space_t addrspace
)
1888 struct mem_attrs attrs
;
1890 attrs
= *get_mem_attrs (mem
);
1891 attrs
.addrspace
= addrspace
;
1892 set_mem_attrs (mem
, &attrs
);
1895 /* Set the alignment of MEM to ALIGN bits. */
1898 set_mem_align (rtx mem
, unsigned int align
)
1900 struct mem_attrs attrs
;
1902 attrs
= *get_mem_attrs (mem
);
1903 attrs
.align
= align
;
1904 set_mem_attrs (mem
, &attrs
);
1907 /* Set the expr for MEM to EXPR. */
1910 set_mem_expr (rtx mem
, tree expr
)
1912 struct mem_attrs attrs
;
1914 attrs
= *get_mem_attrs (mem
);
1916 set_mem_attrs (mem
, &attrs
);
1919 /* Set the offset of MEM to OFFSET. */
1922 set_mem_offset (rtx mem
, HOST_WIDE_INT offset
)
1924 struct mem_attrs attrs
;
1926 attrs
= *get_mem_attrs (mem
);
1927 attrs
.offset_known_p
= true;
1928 attrs
.offset
= offset
;
1929 set_mem_attrs (mem
, &attrs
);
1932 /* Clear the offset of MEM. */
1935 clear_mem_offset (rtx mem
)
1937 struct mem_attrs attrs
;
1939 attrs
= *get_mem_attrs (mem
);
1940 attrs
.offset_known_p
= false;
1941 set_mem_attrs (mem
, &attrs
);
1944 /* Set the size of MEM to SIZE. */
1947 set_mem_size (rtx mem
, HOST_WIDE_INT size
)
1949 struct mem_attrs attrs
;
1951 attrs
= *get_mem_attrs (mem
);
1952 attrs
.size_known_p
= true;
1954 set_mem_attrs (mem
, &attrs
);
1957 /* Clear the size of MEM. */
1960 clear_mem_size (rtx mem
)
1962 struct mem_attrs attrs
;
1964 attrs
= *get_mem_attrs (mem
);
1965 attrs
.size_known_p
= false;
1966 set_mem_attrs (mem
, &attrs
);
1969 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1970 and its address changed to ADDR. (VOIDmode means don't change the mode.
1971 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1972 returned memory location is required to be valid. The memory
1973 attributes are not changed. */
1976 change_address_1 (rtx memref
, enum machine_mode mode
, rtx addr
, int validate
)
1981 gcc_assert (MEM_P (memref
));
1982 as
= MEM_ADDR_SPACE (memref
);
1983 if (mode
== VOIDmode
)
1984 mode
= GET_MODE (memref
);
1986 addr
= XEXP (memref
, 0);
1987 if (mode
== GET_MODE (memref
) && addr
== XEXP (memref
, 0)
1988 && (!validate
|| memory_address_addr_space_p (mode
, addr
, as
)))
1993 if (reload_in_progress
|| reload_completed
)
1994 gcc_assert (memory_address_addr_space_p (mode
, addr
, as
));
1996 addr
= memory_address_addr_space (mode
, addr
, as
);
1999 if (rtx_equal_p (addr
, XEXP (memref
, 0)) && mode
== GET_MODE (memref
))
2002 new_rtx
= gen_rtx_MEM (mode
, addr
);
2003 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2007 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2008 way we are changing MEMREF, so we only preserve the alias set. */
2011 change_address (rtx memref
, enum machine_mode mode
, rtx addr
)
2013 rtx new_rtx
= change_address_1 (memref
, mode
, addr
, 1);
2014 enum machine_mode mmode
= GET_MODE (new_rtx
);
2015 struct mem_attrs attrs
, *defattrs
;
2017 attrs
= *get_mem_attrs (memref
);
2018 defattrs
= mode_mem_attrs
[(int) mmode
];
2019 attrs
.expr
= NULL_TREE
;
2020 attrs
.offset_known_p
= false;
2021 attrs
.size_known_p
= defattrs
->size_known_p
;
2022 attrs
.size
= defattrs
->size
;
2023 attrs
.align
= defattrs
->align
;
2025 /* If there are no changes, just return the original memory reference. */
2026 if (new_rtx
== memref
)
2028 if (mem_attrs_eq_p (get_mem_attrs (memref
), &attrs
))
2031 new_rtx
= gen_rtx_MEM (mmode
, XEXP (memref
, 0));
2032 MEM_COPY_ATTRIBUTES (new_rtx
, memref
);
2035 set_mem_attrs (new_rtx
, &attrs
);
2039 /* Return a memory reference like MEMREF, but with its mode changed
2040 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2041 nonzero, the memory address is forced to be valid.
2042 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2043 and caller is responsible for adjusting MEMREF base register. */
2046 adjust_address_1 (rtx memref
, enum machine_mode mode
, HOST_WIDE_INT offset
,
2047 int validate
, int adjust
)
2049 rtx addr
= XEXP (memref
, 0);
2051 enum machine_mode address_mode
;
2053 struct mem_attrs attrs
, *defattrs
;
2054 unsigned HOST_WIDE_INT max_align
;
2056 attrs
= *get_mem_attrs (memref
);
2058 /* If there are no changes, just return the original memory reference. */
2059 if (mode
== GET_MODE (memref
) && !offset
2060 && (!validate
|| memory_address_addr_space_p (mode
, addr
,
2064 /* ??? Prefer to create garbage instead of creating shared rtl.
2065 This may happen even if offset is nonzero -- consider
2066 (plus (plus reg reg) const_int) -- so do this always. */
2067 addr
= copy_rtx (addr
);
2069 /* Convert a possibly large offset to a signed value within the
2070 range of the target address space. */
2071 address_mode
= targetm
.addr_space
.address_mode (attrs
.addrspace
);
2072 pbits
= GET_MODE_BITSIZE (address_mode
);
2073 if (HOST_BITS_PER_WIDE_INT
> pbits
)
2075 int shift
= HOST_BITS_PER_WIDE_INT
- pbits
;
2076 offset
= (((HOST_WIDE_INT
) ((unsigned HOST_WIDE_INT
) offset
<< shift
))
2082 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2083 object, we can merge it into the LO_SUM. */
2084 if (GET_MODE (memref
) != BLKmode
&& GET_CODE (addr
) == LO_SUM
2086 && (unsigned HOST_WIDE_INT
) offset
2087 < GET_MODE_ALIGNMENT (GET_MODE (memref
)) / BITS_PER_UNIT
)
2088 addr
= gen_rtx_LO_SUM (address_mode
, XEXP (addr
, 0),
2089 plus_constant (XEXP (addr
, 1), offset
));
2091 addr
= plus_constant (addr
, offset
);
2094 new_rtx
= change_address_1 (memref
, mode
, addr
, validate
);
2096 /* If the address is a REG, change_address_1 rightfully returns memref,
2097 but this would destroy memref's MEM_ATTRS. */
2098 if (new_rtx
== memref
&& offset
!= 0)
2099 new_rtx
= copy_rtx (new_rtx
);
2101 /* Compute the new values of the memory attributes due to this adjustment.
2102 We add the offsets and update the alignment. */
2103 if (attrs
.offset_known_p
)
2104 attrs
.offset
+= offset
;
2106 /* Compute the new alignment by taking the MIN of the alignment and the
2107 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2111 max_align
= (offset
& -offset
) * BITS_PER_UNIT
;
2112 attrs
.align
= MIN (attrs
.align
, max_align
);
2115 /* We can compute the size in a number of ways. */
2116 defattrs
= mode_mem_attrs
[(int) GET_MODE (new_rtx
)];
2117 if (defattrs
->size_known_p
)
2119 attrs
.size_known_p
= true;
2120 attrs
.size
= defattrs
->size
;
2122 else if (attrs
.size_known_p
)
2123 attrs
.size
-= offset
;
2125 set_mem_attrs (new_rtx
, &attrs
);
2127 /* At some point, we should validate that this offset is within the object,
2128 if all the appropriate values are known. */
2132 /* Return a memory reference like MEMREF, but with its mode changed
2133 to MODE and its address changed to ADDR, which is assumed to be
2134 MEMREF offset by OFFSET bytes. If VALIDATE is
2135 nonzero, the memory address is forced to be valid. */
2138 adjust_automodify_address_1 (rtx memref
, enum machine_mode mode
, rtx addr
,
2139 HOST_WIDE_INT offset
, int validate
)
2141 memref
= change_address_1 (memref
, VOIDmode
, addr
, validate
);
2142 return adjust_address_1 (memref
, mode
, offset
, validate
, 0);
2145 /* Return a memory reference like MEMREF, but whose address is changed by
2146 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2147 known to be in OFFSET (possibly 1). */
2150 offset_address (rtx memref
, rtx offset
, unsigned HOST_WIDE_INT pow2
)
2152 rtx new_rtx
, addr
= XEXP (memref
, 0);
2153 enum machine_mode address_mode
;
2154 struct mem_attrs attrs
, *defattrs
;
2156 attrs
= *get_mem_attrs (memref
);
2157 address_mode
= targetm
.addr_space
.address_mode (attrs
.addrspace
);
2158 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2160 /* At this point we don't know _why_ the address is invalid. It
2161 could have secondary memory references, multiplies or anything.
2163 However, if we did go and rearrange things, we can wind up not
2164 being able to recognize the magic around pic_offset_table_rtx.
2165 This stuff is fragile, and is yet another example of why it is
2166 bad to expose PIC machinery too early. */
2167 if (! memory_address_addr_space_p (GET_MODE (memref
), new_rtx
,
2169 && GET_CODE (addr
) == PLUS
2170 && XEXP (addr
, 0) == pic_offset_table_rtx
)
2172 addr
= force_reg (GET_MODE (addr
), addr
);
2173 new_rtx
= simplify_gen_binary (PLUS
, address_mode
, addr
, offset
);
2176 update_temp_slot_address (XEXP (memref
, 0), new_rtx
);
2177 new_rtx
= change_address_1 (memref
, VOIDmode
, new_rtx
, 1);
2179 /* If there are no changes, just return the original memory reference. */
2180 if (new_rtx
== memref
)
2183 /* Update the alignment to reflect the offset. Reset the offset, which
2185 defattrs
= mode_mem_attrs
[(int) GET_MODE (new_rtx
)];
2186 attrs
.offset_known_p
= false;
2187 attrs
.size_known_p
= defattrs
->size_known_p
;
2188 attrs
.size
= defattrs
->size
;
2189 attrs
.align
= MIN (attrs
.align
, pow2
* BITS_PER_UNIT
);
2190 set_mem_attrs (new_rtx
, &attrs
);
2194 /* Return a memory reference like MEMREF, but with its address changed to
2195 ADDR. The caller is asserting that the actual piece of memory pointed
2196 to is the same, just the form of the address is being changed, such as
2197 by putting something into a register. */
2200 replace_equiv_address (rtx memref
, rtx addr
)
2202 /* change_address_1 copies the memory attribute structure without change
2203 and that's exactly what we want here. */
2204 update_temp_slot_address (XEXP (memref
, 0), addr
);
2205 return change_address_1 (memref
, VOIDmode
, addr
, 1);
2208 /* Likewise, but the reference is not required to be valid. */
2211 replace_equiv_address_nv (rtx memref
, rtx addr
)
2213 return change_address_1 (memref
, VOIDmode
, addr
, 0);
2216 /* Return a memory reference like MEMREF, but with its mode widened to
2217 MODE and offset by OFFSET. This would be used by targets that e.g.
2218 cannot issue QImode memory operations and have to use SImode memory
2219 operations plus masking logic. */
2222 widen_memory_access (rtx memref
, enum machine_mode mode
, HOST_WIDE_INT offset
)
2224 rtx new_rtx
= adjust_address_1 (memref
, mode
, offset
, 1, 1);
2225 struct mem_attrs attrs
;
2226 unsigned int size
= GET_MODE_SIZE (mode
);
2228 /* If there are no changes, just return the original memory reference. */
2229 if (new_rtx
== memref
)
2232 attrs
= *get_mem_attrs (new_rtx
);
2234 /* If we don't know what offset we were at within the expression, then
2235 we can't know if we've overstepped the bounds. */
2236 if (! attrs
.offset_known_p
)
2237 attrs
.expr
= NULL_TREE
;
2241 if (TREE_CODE (attrs
.expr
) == COMPONENT_REF
)
2243 tree field
= TREE_OPERAND (attrs
.expr
, 1);
2244 tree offset
= component_ref_field_offset (attrs
.expr
);
2246 if (! DECL_SIZE_UNIT (field
))
2248 attrs
.expr
= NULL_TREE
;
2252 /* Is the field at least as large as the access? If so, ok,
2253 otherwise strip back to the containing structure. */
2254 if (TREE_CODE (DECL_SIZE_UNIT (field
)) == INTEGER_CST
2255 && compare_tree_int (DECL_SIZE_UNIT (field
), size
) >= 0
2256 && attrs
.offset
>= 0)
2259 if (! host_integerp (offset
, 1))
2261 attrs
.expr
= NULL_TREE
;
2265 attrs
.expr
= TREE_OPERAND (attrs
.expr
, 0);
2266 attrs
.offset
+= tree_low_cst (offset
, 1);
2267 attrs
.offset
+= (tree_low_cst (DECL_FIELD_BIT_OFFSET (field
), 1)
2270 /* Similarly for the decl. */
2271 else if (DECL_P (attrs
.expr
)
2272 && DECL_SIZE_UNIT (attrs
.expr
)
2273 && TREE_CODE (DECL_SIZE_UNIT (attrs
.expr
)) == INTEGER_CST
2274 && compare_tree_int (DECL_SIZE_UNIT (attrs
.expr
), size
) >= 0
2275 && (! attrs
.offset_known_p
|| attrs
.offset
>= 0))
2279 /* The widened memory access overflows the expression, which means
2280 that it could alias another expression. Zap it. */
2281 attrs
.expr
= NULL_TREE
;
2287 attrs
.offset_known_p
= false;
2289 /* The widened memory may alias other stuff, so zap the alias set. */
2290 /* ??? Maybe use get_alias_set on any remaining expression. */
2292 attrs
.size_known_p
= true;
2294 set_mem_attrs (new_rtx
, &attrs
);
2298 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2299 static GTY(()) tree spill_slot_decl
;
2302 get_spill_slot_decl (bool force_build_p
)
2304 tree d
= spill_slot_decl
;
2306 struct mem_attrs attrs
;
2308 if (d
|| !force_build_p
)
2311 d
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
2312 VAR_DECL
, get_identifier ("%sfp"), void_type_node
);
2313 DECL_ARTIFICIAL (d
) = 1;
2314 DECL_IGNORED_P (d
) = 1;
2316 spill_slot_decl
= d
;
2318 rd
= gen_rtx_MEM (BLKmode
, frame_pointer_rtx
);
2319 MEM_NOTRAP_P (rd
) = 1;
2320 attrs
= *mode_mem_attrs
[(int) BLKmode
];
2321 attrs
.alias
= new_alias_set ();
2323 set_mem_attrs (rd
, &attrs
);
2324 SET_DECL_RTL (d
, rd
);
2329 /* Given MEM, a result from assign_stack_local, fill in the memory
2330 attributes as appropriate for a register allocator spill slot.
2331 These slots are not aliasable by other memory. We arrange for
2332 them all to use a single MEM_EXPR, so that the aliasing code can
2333 work properly in the case of shared spill slots. */
2336 set_mem_attrs_for_spill (rtx mem
)
2338 struct mem_attrs attrs
;
2341 attrs
= *get_mem_attrs (mem
);
2342 attrs
.expr
= get_spill_slot_decl (true);
2343 attrs
.alias
= MEM_ALIAS_SET (DECL_RTL (attrs
.expr
));
2344 attrs
.addrspace
= ADDR_SPACE_GENERIC
;
2346 /* We expect the incoming memory to be of the form:
2347 (mem:MODE (plus (reg sfp) (const_int offset)))
2348 with perhaps the plus missing for offset = 0. */
2349 addr
= XEXP (mem
, 0);
2350 attrs
.offset_known_p
= true;
2352 if (GET_CODE (addr
) == PLUS
2353 && CONST_INT_P (XEXP (addr
, 1)))
2354 attrs
.offset
= INTVAL (XEXP (addr
, 1));
2356 set_mem_attrs (mem
, &attrs
);
2357 MEM_NOTRAP_P (mem
) = 1;
2360 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2363 gen_label_rtx (void)
2365 return gen_rtx_CODE_LABEL (VOIDmode
, 0, NULL_RTX
, NULL_RTX
,
2366 NULL
, label_num
++, NULL
);
2369 /* For procedure integration. */
2371 /* Install new pointers to the first and last insns in the chain.
2372 Also, set cur_insn_uid to one higher than the last in use.
2373 Used for an inline-procedure after copying the insn chain. */
2376 set_new_first_and_last_insn (rtx first
, rtx last
)
2380 set_first_insn (first
);
2381 set_last_insn (last
);
2384 if (MIN_NONDEBUG_INSN_UID
|| MAY_HAVE_DEBUG_INSNS
)
2386 int debug_count
= 0;
2388 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
- 1;
2389 cur_debug_insn_uid
= 0;
2391 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2392 if (INSN_UID (insn
) < MIN_NONDEBUG_INSN_UID
)
2393 cur_debug_insn_uid
= MAX (cur_debug_insn_uid
, INSN_UID (insn
));
2396 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2397 if (DEBUG_INSN_P (insn
))
2402 cur_debug_insn_uid
= MIN_NONDEBUG_INSN_UID
+ debug_count
;
2404 cur_debug_insn_uid
++;
2407 for (insn
= first
; insn
; insn
= NEXT_INSN (insn
))
2408 cur_insn_uid
= MAX (cur_insn_uid
, INSN_UID (insn
));
2413 /* Go through all the RTL insn bodies and copy any invalid shared
2414 structure. This routine should only be called once. */
2417 unshare_all_rtl_1 (rtx insn
)
2419 /* Unshare just about everything else. */
2420 unshare_all_rtl_in_chain (insn
);
2422 /* Make sure the addresses of stack slots found outside the insn chain
2423 (such as, in DECL_RTL of a variable) are not shared
2424 with the insn chain.
2426 This special care is necessary when the stack slot MEM does not
2427 actually appear in the insn chain. If it does appear, its address
2428 is unshared from all else at that point. */
2429 stack_slot_list
= copy_rtx_if_shared (stack_slot_list
);
2432 /* Go through all the RTL insn bodies and copy any invalid shared
2433 structure, again. This is a fairly expensive thing to do so it
2434 should be done sparingly. */
2437 unshare_all_rtl_again (rtx insn
)
2442 for (p
= insn
; p
; p
= NEXT_INSN (p
))
2445 reset_used_flags (PATTERN (p
));
2446 reset_used_flags (REG_NOTES (p
));
2449 /* Make sure that virtual stack slots are not shared. */
2450 set_used_decls (DECL_INITIAL (cfun
->decl
));
2452 /* Make sure that virtual parameters are not shared. */
2453 for (decl
= DECL_ARGUMENTS (cfun
->decl
); decl
; decl
= DECL_CHAIN (decl
))
2454 set_used_flags (DECL_RTL (decl
));
2456 reset_used_flags (stack_slot_list
);
2458 unshare_all_rtl_1 (insn
);
2462 unshare_all_rtl (void)
2464 unshare_all_rtl_1 (get_insns ());
2468 struct rtl_opt_pass pass_unshare_all_rtl
=
2472 "unshare", /* name */
2474 unshare_all_rtl
, /* execute */
2477 0, /* static_pass_number */
2478 TV_NONE
, /* tv_id */
2479 0, /* properties_required */
2480 0, /* properties_provided */
2481 0, /* properties_destroyed */
2482 0, /* todo_flags_start */
2483 TODO_verify_rtl_sharing
/* todo_flags_finish */
2488 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2489 Recursively does the same for subexpressions. */
2492 verify_rtx_sharing (rtx orig
, rtx insn
)
2497 const char *format_ptr
;
2502 code
= GET_CODE (x
);
2504 /* These types may be freely shared. */
2523 /* SCRATCH must be shared because they represent distinct values. */
2525 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
2530 if (shared_const_p (orig
))
2535 /* A MEM is allowed to be shared if its address is constant. */
2536 if (CONSTANT_ADDRESS_P (XEXP (x
, 0))
2537 || reload_completed
|| reload_in_progress
)
2546 /* This rtx may not be shared. If it has already been seen,
2547 replace it with a copy of itself. */
2548 #ifdef ENABLE_CHECKING
2549 if (RTX_FLAG (x
, used
))
2551 error ("invalid rtl sharing found in the insn");
2553 error ("shared rtx");
2555 internal_error ("internal consistency failure");
2558 gcc_assert (!RTX_FLAG (x
, used
));
2560 RTX_FLAG (x
, used
) = 1;
2562 /* Now scan the subexpressions recursively. */
2564 format_ptr
= GET_RTX_FORMAT (code
);
2566 for (i
= 0; i
< GET_RTX_LENGTH (code
); i
++)
2568 switch (*format_ptr
++)
2571 verify_rtx_sharing (XEXP (x
, i
), insn
);
2575 if (XVEC (x
, i
) != NULL
)
2578 int len
= XVECLEN (x
, i
);
2580 for (j
= 0; j
< len
; j
++)
2582 /* We allow sharing of ASM_OPERANDS inside single
2584 if (j
&& GET_CODE (XVECEXP (x
, i
, j
)) == SET
2585 && (GET_CODE (SET_SRC (XVECEXP (x
, i
, j
)))
2587 verify_rtx_sharing (SET_DEST (XVECEXP (x
, i
, j
)), insn
);
2589 verify_rtx_sharing (XVECEXP (x
, i
, j
), insn
);
2598 /* Go through all the RTL insn bodies and check that there is no unexpected
2599 sharing in between the subexpressions. */
2602 verify_rtl_sharing (void)
2606 timevar_push (TV_VERIFY_RTL_SHARING
);
2608 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2611 reset_used_flags (PATTERN (p
));
2612 reset_used_flags (REG_NOTES (p
));
2613 if (GET_CODE (PATTERN (p
)) == SEQUENCE
)
2616 rtx q
, sequence
= PATTERN (p
);
2618 for (i
= 0; i
< XVECLEN (sequence
, 0); i
++)
2620 q
= XVECEXP (sequence
, 0, i
);
2621 gcc_assert (INSN_P (q
));
2622 reset_used_flags (PATTERN (q
));
2623 reset_used_flags (REG_NOTES (q
));
2628 for (p
= get_insns (); p
; p
= NEXT_INSN (p
))
2631 verify_rtx_sharing (PATTERN (p
), p
);
2632 verify_rtx_sharing (REG_NOTES (p
), p
);
2635 timevar_pop (TV_VERIFY_RTL_SHARING
);
2638 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2639 Assumes the mark bits are cleared at entry. */
2642 unshare_all_rtl_in_chain (rtx insn
)
2644 for (; insn
; insn
= NEXT_INSN (insn
))
2647 PATTERN (insn
) = copy_rtx_if_shared (PATTERN (insn
));
2648 REG_NOTES (insn
) = copy_rtx_if_shared (REG_NOTES (insn
));
2652 /* Go through all virtual stack slots of a function and mark them as
2653 shared. We never replace the DECL_RTLs themselves with a copy,
2654 but expressions mentioned into a DECL_RTL cannot be shared with
2655 expressions in the instruction stream.
2657 Note that reload may convert pseudo registers into memories in-place.
2658 Pseudo registers are always shared, but MEMs never are. Thus if we
2659 reset the used flags on MEMs in the instruction stream, we must set
2660 them again on MEMs that appear in DECL_RTLs. */
2663 set_used_decls (tree blk
)
2668 for (t
= BLOCK_VARS (blk
); t
; t
= DECL_CHAIN (t
))
2669 if (DECL_RTL_SET_P (t
))
2670 set_used_flags (DECL_RTL (t
));
2672 /* Now process sub-blocks. */
2673 for (t
= BLOCK_SUBBLOCKS (blk
); t
; t
= BLOCK_CHAIN (t
))
2677 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2678 Recursively does the same for subexpressions. Uses
2679 copy_rtx_if_shared_1 to reduce stack space. */
2682 copy_rtx_if_shared (rtx orig
)
2684 copy_rtx_if_shared_1 (&orig
);
2688 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2689 use. Recursively does the same for subexpressions. */
2692 copy_rtx_if_shared_1 (rtx
*orig1
)
2698 const char *format_ptr
;
2702 /* Repeat is used to turn tail-recursion into iteration. */
2709 code
= GET_CODE (x
);
2711 /* These types may be freely shared. */
2729 /* SCRATCH must be shared because they represent distinct values. */
2732 if (REG_P (XEXP (x
, 0)) && REGNO (XEXP (x
, 0)) < FIRST_PSEUDO_REGISTER
)
2737 if (shared_const_p (x
))
2747 /* The chain of insns is not being copied. */
2754 /* This rtx may not be shared. If it has already been seen,
2755 replace it with a copy of itself. */
2757 if (RTX_FLAG (x
, used
))
2759 x
= shallow_copy_rtx (x
);
2762 RTX_FLAG (x
, used
) = 1;
2764 /* Now scan the subexpressions recursively.
2765 We can store any replaced subexpressions directly into X
2766 since we know X is not shared! Any vectors in X
2767 must be copied if X was copied. */
2769 format_ptr
= GET_RTX_FORMAT (code
);
2770 length
= GET_RTX_LENGTH (code
);
2773 for (i
= 0; i
< length
; i
++)
2775 switch (*format_ptr
++)
2779 copy_rtx_if_shared_1 (last_ptr
);
2780 last_ptr
= &XEXP (x
, i
);
2784 if (XVEC (x
, i
) != NULL
)
2787 int len
= XVECLEN (x
, i
);
2789 /* Copy the vector iff I copied the rtx and the length
2791 if (copied
&& len
> 0)
2792 XVEC (x
, i
) = gen_rtvec_v (len
, XVEC (x
, i
)->elem
);
2794 /* Call recursively on all inside the vector. */
2795 for (j
= 0; j
< len
; j
++)
2798 copy_rtx_if_shared_1 (last_ptr
);
2799 last_ptr
= &XVECEXP (x
, i
, j
);
2814 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2817 mark_used_flags (rtx x
, int flag
)
2821 const char *format_ptr
;
2824 /* Repeat is used to turn tail-recursion into iteration. */
2829 code
= GET_CODE (x
);
2831 /* These types may be freely shared so we needn't do any resetting
2857 /* The chain of insns is not being copied. */
2864 RTX_FLAG (x
, used
) = flag
;
2866 format_ptr
= GET_RTX_FORMAT (code
);
2867 length
= GET_RTX_LENGTH (code
);
2869 for (i
= 0; i
< length
; i
++)
2871 switch (*format_ptr
++)
2879 mark_used_flags (XEXP (x
, i
), flag
);
2883 for (j
= 0; j
< XVECLEN (x
, i
); j
++)
2884 mark_used_flags (XVECEXP (x
, i
, j
), flag
);
2890 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2891 to look for shared sub-parts. */
2894 reset_used_flags (rtx x
)
2896 mark_used_flags (x
, 0);
2899 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2900 to look for shared sub-parts. */
2903 set_used_flags (rtx x
)
2905 mark_used_flags (x
, 1);
2908 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2909 Return X or the rtx for the pseudo reg the value of X was copied into.
2910 OTHER must be valid as a SET_DEST. */
2913 make_safe_from (rtx x
, rtx other
)
2916 switch (GET_CODE (other
))
2919 other
= SUBREG_REG (other
);
2921 case STRICT_LOW_PART
:
2924 other
= XEXP (other
, 0);
2933 && GET_CODE (x
) != SUBREG
)
2935 && (REGNO (other
) < FIRST_PSEUDO_REGISTER
2936 || reg_mentioned_p (other
, x
))))
2938 rtx temp
= gen_reg_rtx (GET_MODE (x
));
2939 emit_move_insn (temp
, x
);
2945 /* Emission of insns (adding them to the doubly-linked list). */
2947 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2950 get_last_insn_anywhere (void)
2952 struct sequence_stack
*stack
;
2953 if (get_last_insn ())
2954 return get_last_insn ();
2955 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
2956 if (stack
->last
!= 0)
2961 /* Return the first nonnote insn emitted in current sequence or current
2962 function. This routine looks inside SEQUENCEs. */
2965 get_first_nonnote_insn (void)
2967 rtx insn
= get_insns ();
2972 for (insn
= next_insn (insn
);
2973 insn
&& NOTE_P (insn
);
2974 insn
= next_insn (insn
))
2978 if (NONJUMP_INSN_P (insn
)
2979 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
2980 insn
= XVECEXP (PATTERN (insn
), 0, 0);
2987 /* Return the last nonnote insn emitted in current sequence or current
2988 function. This routine looks inside SEQUENCEs. */
2991 get_last_nonnote_insn (void)
2993 rtx insn
= get_last_insn ();
2998 for (insn
= previous_insn (insn
);
2999 insn
&& NOTE_P (insn
);
3000 insn
= previous_insn (insn
))
3004 if (NONJUMP_INSN_P (insn
)
3005 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3006 insn
= XVECEXP (PATTERN (insn
), 0,
3007 XVECLEN (PATTERN (insn
), 0) - 1);
3014 /* Return the number of actual (non-debug) insns emitted in this
3018 get_max_insn_count (void)
3020 int n
= cur_insn_uid
;
3022 /* The table size must be stable across -g, to avoid codegen
3023 differences due to debug insns, and not be affected by
3024 -fmin-insn-uid, to avoid excessive table size and to simplify
3025 debugging of -fcompare-debug failures. */
3026 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3027 n
-= cur_debug_insn_uid
;
3029 n
-= MIN_NONDEBUG_INSN_UID
;
3035 /* Return the next insn. If it is a SEQUENCE, return the first insn
3039 next_insn (rtx insn
)
3043 insn
= NEXT_INSN (insn
);
3044 if (insn
&& NONJUMP_INSN_P (insn
)
3045 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3046 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3052 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3056 previous_insn (rtx insn
)
3060 insn
= PREV_INSN (insn
);
3061 if (insn
&& NONJUMP_INSN_P (insn
)
3062 && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3063 insn
= XVECEXP (PATTERN (insn
), 0, XVECLEN (PATTERN (insn
), 0) - 1);
3069 /* Return the next insn after INSN that is not a NOTE. This routine does not
3070 look inside SEQUENCEs. */
3073 next_nonnote_insn (rtx insn
)
3077 insn
= NEXT_INSN (insn
);
3078 if (insn
== 0 || !NOTE_P (insn
))
3085 /* Return the next insn after INSN that is not a NOTE, but stop the
3086 search before we enter another basic block. This routine does not
3087 look inside SEQUENCEs. */
3090 next_nonnote_insn_bb (rtx insn
)
3094 insn
= NEXT_INSN (insn
);
3095 if (insn
== 0 || !NOTE_P (insn
))
3097 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3104 /* Return the previous insn before INSN that is not a NOTE. This routine does
3105 not look inside SEQUENCEs. */
3108 prev_nonnote_insn (rtx insn
)
3112 insn
= PREV_INSN (insn
);
3113 if (insn
== 0 || !NOTE_P (insn
))
3120 /* Return the previous insn before INSN that is not a NOTE, but stop
3121 the search before we enter another basic block. This routine does
3122 not look inside SEQUENCEs. */
3125 prev_nonnote_insn_bb (rtx insn
)
3129 insn
= PREV_INSN (insn
);
3130 if (insn
== 0 || !NOTE_P (insn
))
3132 if (NOTE_INSN_BASIC_BLOCK_P (insn
))
3139 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3140 routine does not look inside SEQUENCEs. */
3143 next_nondebug_insn (rtx insn
)
3147 insn
= NEXT_INSN (insn
);
3148 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3155 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3156 This routine does not look inside SEQUENCEs. */
3159 prev_nondebug_insn (rtx insn
)
3163 insn
= PREV_INSN (insn
);
3164 if (insn
== 0 || !DEBUG_INSN_P (insn
))
3171 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3172 This routine does not look inside SEQUENCEs. */
3175 next_nonnote_nondebug_insn (rtx insn
)
3179 insn
= NEXT_INSN (insn
);
3180 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3187 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3188 This routine does not look inside SEQUENCEs. */
3191 prev_nonnote_nondebug_insn (rtx insn
)
3195 insn
= PREV_INSN (insn
);
3196 if (insn
== 0 || (!NOTE_P (insn
) && !DEBUG_INSN_P (insn
)))
3203 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3204 or 0, if there is none. This routine does not look inside
3208 next_real_insn (rtx insn
)
3212 insn
= NEXT_INSN (insn
);
3213 if (insn
== 0 || INSN_P (insn
))
3220 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3221 or 0, if there is none. This routine does not look inside
3225 prev_real_insn (rtx insn
)
3229 insn
= PREV_INSN (insn
);
3230 if (insn
== 0 || INSN_P (insn
))
3237 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3238 This routine does not look inside SEQUENCEs. */
3241 last_call_insn (void)
3245 for (insn
= get_last_insn ();
3246 insn
&& !CALL_P (insn
);
3247 insn
= PREV_INSN (insn
))
3253 /* Find the next insn after INSN that really does something. This routine
3254 does not look inside SEQUENCEs. After reload this also skips over
3255 standalone USE and CLOBBER insn. */
3258 active_insn_p (const_rtx insn
)
3260 return (CALL_P (insn
) || JUMP_P (insn
)
3261 || (NONJUMP_INSN_P (insn
)
3262 && (! reload_completed
3263 || (GET_CODE (PATTERN (insn
)) != USE
3264 && GET_CODE (PATTERN (insn
)) != CLOBBER
))));
3268 next_active_insn (rtx insn
)
3272 insn
= NEXT_INSN (insn
);
3273 if (insn
== 0 || active_insn_p (insn
))
3280 /* Find the last insn before INSN that really does something. This routine
3281 does not look inside SEQUENCEs. After reload this also skips over
3282 standalone USE and CLOBBER insn. */
3285 prev_active_insn (rtx insn
)
3289 insn
= PREV_INSN (insn
);
3290 if (insn
== 0 || active_insn_p (insn
))
3297 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3300 next_label (rtx insn
)
3304 insn
= NEXT_INSN (insn
);
3305 if (insn
== 0 || LABEL_P (insn
))
3312 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3315 prev_label (rtx insn
)
3319 insn
= PREV_INSN (insn
);
3320 if (insn
== 0 || LABEL_P (insn
))
3327 /* Return the last label to mark the same position as LABEL. Return LABEL
3328 itself if it is null or any return rtx. */
3331 skip_consecutive_labels (rtx label
)
3335 if (label
&& ANY_RETURN_P (label
))
3338 for (insn
= label
; insn
!= 0 && !INSN_P (insn
); insn
= NEXT_INSN (insn
))
3346 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3347 and REG_CC_USER notes so we can find it. */
3350 link_cc0_insns (rtx insn
)
3352 rtx user
= next_nonnote_insn (insn
);
3354 if (NONJUMP_INSN_P (user
) && GET_CODE (PATTERN (user
)) == SEQUENCE
)
3355 user
= XVECEXP (PATTERN (user
), 0, 0);
3357 add_reg_note (user
, REG_CC_SETTER
, insn
);
3358 add_reg_note (insn
, REG_CC_USER
, user
);
3361 /* Return the next insn that uses CC0 after INSN, which is assumed to
3362 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3363 applied to the result of this function should yield INSN).
3365 Normally, this is simply the next insn. However, if a REG_CC_USER note
3366 is present, it contains the insn that uses CC0.
3368 Return 0 if we can't find the insn. */
3371 next_cc0_user (rtx insn
)
3373 rtx note
= find_reg_note (insn
, REG_CC_USER
, NULL_RTX
);
3376 return XEXP (note
, 0);
3378 insn
= next_nonnote_insn (insn
);
3379 if (insn
&& NONJUMP_INSN_P (insn
) && GET_CODE (PATTERN (insn
)) == SEQUENCE
)
3380 insn
= XVECEXP (PATTERN (insn
), 0, 0);
3382 if (insn
&& INSN_P (insn
) && reg_mentioned_p (cc0_rtx
, PATTERN (insn
)))
3388 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3389 note, it is the previous insn. */
3392 prev_cc0_setter (rtx insn
)
3394 rtx note
= find_reg_note (insn
, REG_CC_SETTER
, NULL_RTX
);
3397 return XEXP (note
, 0);
3399 insn
= prev_nonnote_insn (insn
);
3400 gcc_assert (sets_cc0_p (PATTERN (insn
)));
3407 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3410 find_auto_inc (rtx
*xp
, void *data
)
3413 rtx reg
= (rtx
) data
;
3415 if (GET_RTX_CLASS (GET_CODE (x
)) != RTX_AUTOINC
)
3418 switch (GET_CODE (x
))
3426 if (rtx_equal_p (reg
, XEXP (x
, 0)))
3437 /* Increment the label uses for all labels present in rtx. */
3440 mark_label_nuses (rtx x
)
3446 code
= GET_CODE (x
);
3447 if (code
== LABEL_REF
&& LABEL_P (XEXP (x
, 0)))
3448 LABEL_NUSES (XEXP (x
, 0))++;
3450 fmt
= GET_RTX_FORMAT (code
);
3451 for (i
= GET_RTX_LENGTH (code
) - 1; i
>= 0; i
--)
3454 mark_label_nuses (XEXP (x
, i
));
3455 else if (fmt
[i
] == 'E')
3456 for (j
= XVECLEN (x
, i
) - 1; j
>= 0; j
--)
3457 mark_label_nuses (XVECEXP (x
, i
, j
));
3462 /* Try splitting insns that can be split for better scheduling.
3463 PAT is the pattern which might split.
3464 TRIAL is the insn providing PAT.
3465 LAST is nonzero if we should return the last insn of the sequence produced.
3467 If this routine succeeds in splitting, it returns the first or last
3468 replacement insn depending on the value of LAST. Otherwise, it
3469 returns TRIAL. If the insn to be returned can be split, it will be. */
3472 try_split (rtx pat
, rtx trial
, int last
)
3474 rtx before
= PREV_INSN (trial
);
3475 rtx after
= NEXT_INSN (trial
);
3476 int has_barrier
= 0;
3479 rtx insn_last
, insn
;
3482 /* We're not good at redistributing frame information. */
3483 if (RTX_FRAME_RELATED_P (trial
))
3486 if (any_condjump_p (trial
)
3487 && (note
= find_reg_note (trial
, REG_BR_PROB
, 0)))
3488 split_branch_probability
= INTVAL (XEXP (note
, 0));
3489 probability
= split_branch_probability
;
3491 seq
= split_insns (pat
, trial
);
3493 split_branch_probability
= -1;
3495 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3496 We may need to handle this specially. */
3497 if (after
&& BARRIER_P (after
))
3500 after
= NEXT_INSN (after
);
3506 /* Avoid infinite loop if any insn of the result matches
3507 the original pattern. */
3511 if (INSN_P (insn_last
)
3512 && rtx_equal_p (PATTERN (insn_last
), pat
))
3514 if (!NEXT_INSN (insn_last
))
3516 insn_last
= NEXT_INSN (insn_last
);
3519 /* We will be adding the new sequence to the function. The splitters
3520 may have introduced invalid RTL sharing, so unshare the sequence now. */
3521 unshare_all_rtl_in_chain (seq
);
3524 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3528 mark_jump_label (PATTERN (insn
), insn
, 0);
3530 if (probability
!= -1
3531 && any_condjump_p (insn
)
3532 && !find_reg_note (insn
, REG_BR_PROB
, 0))
3534 /* We can preserve the REG_BR_PROB notes only if exactly
3535 one jump is created, otherwise the machine description
3536 is responsible for this step using
3537 split_branch_probability variable. */
3538 gcc_assert (njumps
== 1);
3539 add_reg_note (insn
, REG_BR_PROB
, GEN_INT (probability
));
3544 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3545 in SEQ and copy any additional information across. */
3548 for (insn
= insn_last
; insn
; insn
= PREV_INSN (insn
))
3553 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3554 target may have explicitly specified. */
3555 p
= &CALL_INSN_FUNCTION_USAGE (insn
);
3558 *p
= CALL_INSN_FUNCTION_USAGE (trial
);
3560 /* If the old call was a sibling call, the new one must
3562 SIBLING_CALL_P (insn
) = SIBLING_CALL_P (trial
);
3564 /* If the new call is the last instruction in the sequence,
3565 it will effectively replace the old call in-situ. Otherwise
3566 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3567 so that it comes immediately after the new call. */
3568 if (NEXT_INSN (insn
))
3569 for (next
= NEXT_INSN (trial
);
3570 next
&& NOTE_P (next
);
3571 next
= NEXT_INSN (next
))
3572 if (NOTE_KIND (next
) == NOTE_INSN_CALL_ARG_LOCATION
)
3575 add_insn_after (next
, insn
, NULL
);
3581 /* Copy notes, particularly those related to the CFG. */
3582 for (note
= REG_NOTES (trial
); note
; note
= XEXP (note
, 1))
3584 switch (REG_NOTE_KIND (note
))
3587 copy_reg_eh_region_note_backward (note
, insn_last
, NULL
);
3592 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3595 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3599 case REG_NON_LOCAL_GOTO
:
3600 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3603 add_reg_note (insn
, REG_NOTE_KIND (note
), XEXP (note
, 0));
3609 for (insn
= insn_last
; insn
!= NULL_RTX
; insn
= PREV_INSN (insn
))
3611 rtx reg
= XEXP (note
, 0);
3612 if (!FIND_REG_INC_NOTE (insn
, reg
)
3613 && for_each_rtx (&PATTERN (insn
), find_auto_inc
, reg
) > 0)
3614 add_reg_note (insn
, REG_INC
, reg
);
3620 fixup_args_size_notes (NULL_RTX
, insn_last
, INTVAL (XEXP (note
, 0)));
3628 /* If there are LABELS inside the split insns increment the
3629 usage count so we don't delete the label. */
3633 while (insn
!= NULL_RTX
)
3635 /* JUMP_P insns have already been "marked" above. */
3636 if (NONJUMP_INSN_P (insn
))
3637 mark_label_nuses (PATTERN (insn
));
3639 insn
= PREV_INSN (insn
);
3643 tem
= emit_insn_after_setloc (seq
, trial
, INSN_LOCATOR (trial
));
3645 delete_insn (trial
);
3647 emit_barrier_after (tem
);
3649 /* Recursively call try_split for each new insn created; by the
3650 time control returns here that insn will be fully split, so
3651 set LAST and continue from the insn after the one returned.
3652 We can't use next_active_insn here since AFTER may be a note.
3653 Ignore deleted insns, which can be occur if not optimizing. */
3654 for (tem
= NEXT_INSN (before
); tem
!= after
; tem
= NEXT_INSN (tem
))
3655 if (! INSN_DELETED_P (tem
) && INSN_P (tem
))
3656 tem
= try_split (PATTERN (tem
), tem
, 1);
3658 /* Return either the first or the last insn, depending on which was
3661 ? (after
? PREV_INSN (after
) : get_last_insn ())
3662 : NEXT_INSN (before
);
3665 /* Make and return an INSN rtx, initializing all its slots.
3666 Store PATTERN in the pattern slots. */
3669 make_insn_raw (rtx pattern
)
3673 insn
= rtx_alloc (INSN
);
3675 INSN_UID (insn
) = cur_insn_uid
++;
3676 PATTERN (insn
) = pattern
;
3677 INSN_CODE (insn
) = -1;
3678 REG_NOTES (insn
) = NULL
;
3679 INSN_LOCATOR (insn
) = curr_insn_locator ();
3680 BLOCK_FOR_INSN (insn
) = NULL
;
3682 #ifdef ENABLE_RTL_CHECKING
3685 && (returnjump_p (insn
)
3686 || (GET_CODE (insn
) == SET
3687 && SET_DEST (insn
) == pc_rtx
)))
3689 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3697 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3700 make_debug_insn_raw (rtx pattern
)
3704 insn
= rtx_alloc (DEBUG_INSN
);
3705 INSN_UID (insn
) = cur_debug_insn_uid
++;
3706 if (cur_debug_insn_uid
> MIN_NONDEBUG_INSN_UID
)
3707 INSN_UID (insn
) = cur_insn_uid
++;
3709 PATTERN (insn
) = pattern
;
3710 INSN_CODE (insn
) = -1;
3711 REG_NOTES (insn
) = NULL
;
3712 INSN_LOCATOR (insn
) = curr_insn_locator ();
3713 BLOCK_FOR_INSN (insn
) = NULL
;
3718 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3721 make_jump_insn_raw (rtx pattern
)
3725 insn
= rtx_alloc (JUMP_INSN
);
3726 INSN_UID (insn
) = cur_insn_uid
++;
3728 PATTERN (insn
) = pattern
;
3729 INSN_CODE (insn
) = -1;
3730 REG_NOTES (insn
) = NULL
;
3731 JUMP_LABEL (insn
) = NULL
;
3732 INSN_LOCATOR (insn
) = curr_insn_locator ();
3733 BLOCK_FOR_INSN (insn
) = NULL
;
3738 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3741 make_call_insn_raw (rtx pattern
)
3745 insn
= rtx_alloc (CALL_INSN
);
3746 INSN_UID (insn
) = cur_insn_uid
++;
3748 PATTERN (insn
) = pattern
;
3749 INSN_CODE (insn
) = -1;
3750 REG_NOTES (insn
) = NULL
;
3751 CALL_INSN_FUNCTION_USAGE (insn
) = NULL
;
3752 INSN_LOCATOR (insn
) = curr_insn_locator ();
3753 BLOCK_FOR_INSN (insn
) = NULL
;
3758 /* Add INSN to the end of the doubly-linked list.
3759 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3764 PREV_INSN (insn
) = get_last_insn();
3765 NEXT_INSN (insn
) = 0;
3767 if (NULL
!= get_last_insn())
3768 NEXT_INSN (get_last_insn ()) = insn
;
3770 if (NULL
== get_insns ())
3771 set_first_insn (insn
);
3773 set_last_insn (insn
);
3776 /* Add INSN into the doubly-linked list after insn AFTER. This and
3777 the next should be the only functions called to insert an insn once
3778 delay slots have been filled since only they know how to update a
3782 add_insn_after (rtx insn
, rtx after
, basic_block bb
)
3784 rtx next
= NEXT_INSN (after
);
3786 gcc_assert (!optimize
|| !INSN_DELETED_P (after
));
3788 NEXT_INSN (insn
) = next
;
3789 PREV_INSN (insn
) = after
;
3793 PREV_INSN (next
) = insn
;
3794 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
3795 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = insn
;
3797 else if (get_last_insn () == after
)
3798 set_last_insn (insn
);
3801 struct sequence_stack
*stack
= seq_stack
;
3802 /* Scan all pending sequences too. */
3803 for (; stack
; stack
= stack
->next
)
3804 if (after
== stack
->last
)
3813 if (!BARRIER_P (after
)
3814 && !BARRIER_P (insn
)
3815 && (bb
= BLOCK_FOR_INSN (after
)))
3817 set_block_for_insn (insn
, bb
);
3819 df_insn_rescan (insn
);
3820 /* Should not happen as first in the BB is always
3821 either NOTE or LABEL. */
3822 if (BB_END (bb
) == after
3823 /* Avoid clobbering of structure when creating new BB. */
3824 && !BARRIER_P (insn
)
3825 && !NOTE_INSN_BASIC_BLOCK_P (insn
))
3829 NEXT_INSN (after
) = insn
;
3830 if (NONJUMP_INSN_P (after
) && GET_CODE (PATTERN (after
)) == SEQUENCE
)
3832 rtx sequence
= PATTERN (after
);
3833 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = insn
;
3837 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3838 the previous should be the only functions called to insert an insn
3839 once delay slots have been filled since only they know how to
3840 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3844 add_insn_before (rtx insn
, rtx before
, basic_block bb
)
3846 rtx prev
= PREV_INSN (before
);
3848 gcc_assert (!optimize
|| !INSN_DELETED_P (before
));
3850 PREV_INSN (insn
) = prev
;
3851 NEXT_INSN (insn
) = before
;
3855 NEXT_INSN (prev
) = insn
;
3856 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
3858 rtx sequence
= PATTERN (prev
);
3859 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = insn
;
3862 else if (get_insns () == before
)
3863 set_first_insn (insn
);
3866 struct sequence_stack
*stack
= seq_stack
;
3867 /* Scan all pending sequences too. */
3868 for (; stack
; stack
= stack
->next
)
3869 if (before
== stack
->first
)
3871 stack
->first
= insn
;
3879 && !BARRIER_P (before
)
3880 && !BARRIER_P (insn
))
3881 bb
= BLOCK_FOR_INSN (before
);
3885 set_block_for_insn (insn
, bb
);
3887 df_insn_rescan (insn
);
3888 /* Should not happen as first in the BB is always either NOTE or
3890 gcc_assert (BB_HEAD (bb
) != insn
3891 /* Avoid clobbering of structure when creating new BB. */
3893 || NOTE_INSN_BASIC_BLOCK_P (insn
));
3896 PREV_INSN (before
) = insn
;
3897 if (NONJUMP_INSN_P (before
) && GET_CODE (PATTERN (before
)) == SEQUENCE
)
3898 PREV_INSN (XVECEXP (PATTERN (before
), 0, 0)) = insn
;
3902 /* Replace insn with an deleted instruction note. */
3905 set_insn_deleted (rtx insn
)
3907 df_insn_delete (BLOCK_FOR_INSN (insn
), INSN_UID (insn
));
3908 PUT_CODE (insn
, NOTE
);
3909 NOTE_KIND (insn
) = NOTE_INSN_DELETED
;
3913 /* Remove an insn from its doubly-linked list. This function knows how
3914 to handle sequences. */
3916 remove_insn (rtx insn
)
3918 rtx next
= NEXT_INSN (insn
);
3919 rtx prev
= PREV_INSN (insn
);
3922 /* Later in the code, the block will be marked dirty. */
3923 df_insn_delete (NULL
, INSN_UID (insn
));
3927 NEXT_INSN (prev
) = next
;
3928 if (NONJUMP_INSN_P (prev
) && GET_CODE (PATTERN (prev
)) == SEQUENCE
)
3930 rtx sequence
= PATTERN (prev
);
3931 NEXT_INSN (XVECEXP (sequence
, 0, XVECLEN (sequence
, 0) - 1)) = next
;
3934 else if (get_insns () == insn
)
3937 PREV_INSN (next
) = NULL
;
3938 set_first_insn (next
);
3942 struct sequence_stack
*stack
= seq_stack
;
3943 /* Scan all pending sequences too. */
3944 for (; stack
; stack
= stack
->next
)
3945 if (insn
== stack
->first
)
3947 stack
->first
= next
;
3956 PREV_INSN (next
) = prev
;
3957 if (NONJUMP_INSN_P (next
) && GET_CODE (PATTERN (next
)) == SEQUENCE
)
3958 PREV_INSN (XVECEXP (PATTERN (next
), 0, 0)) = prev
;
3960 else if (get_last_insn () == insn
)
3961 set_last_insn (prev
);
3964 struct sequence_stack
*stack
= seq_stack
;
3965 /* Scan all pending sequences too. */
3966 for (; stack
; stack
= stack
->next
)
3967 if (insn
== stack
->last
)
3975 if (!BARRIER_P (insn
)
3976 && (bb
= BLOCK_FOR_INSN (insn
)))
3978 if (NONDEBUG_INSN_P (insn
))
3979 df_set_bb_dirty (bb
);
3980 if (BB_HEAD (bb
) == insn
)
3982 /* Never ever delete the basic block note without deleting whole
3984 gcc_assert (!NOTE_P (insn
));
3985 BB_HEAD (bb
) = next
;
3987 if (BB_END (bb
) == insn
)
3992 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3995 add_function_usage_to (rtx call_insn
, rtx call_fusage
)
3997 gcc_assert (call_insn
&& CALL_P (call_insn
));
3999 /* Put the register usage information on the CALL. If there is already
4000 some usage information, put ours at the end. */
4001 if (CALL_INSN_FUNCTION_USAGE (call_insn
))
4005 for (link
= CALL_INSN_FUNCTION_USAGE (call_insn
); XEXP (link
, 1) != 0;
4006 link
= XEXP (link
, 1))
4009 XEXP (link
, 1) = call_fusage
;
4012 CALL_INSN_FUNCTION_USAGE (call_insn
) = call_fusage
;
4015 /* Delete all insns made since FROM.
4016 FROM becomes the new last instruction. */
4019 delete_insns_since (rtx from
)
4024 NEXT_INSN (from
) = 0;
4025 set_last_insn (from
);
4028 /* This function is deprecated, please use sequences instead.
4030 Move a consecutive bunch of insns to a different place in the chain.
4031 The insns to be moved are those between FROM and TO.
4032 They are moved to a new position after the insn AFTER.
4033 AFTER must not be FROM or TO or any insn in between.
4035 This function does not know about SEQUENCEs and hence should not be
4036 called after delay-slot filling has been done. */
4039 reorder_insns_nobb (rtx from
, rtx to
, rtx after
)
4041 #ifdef ENABLE_CHECKING
4043 for (x
= from
; x
!= to
; x
= NEXT_INSN (x
))
4044 gcc_assert (after
!= x
);
4045 gcc_assert (after
!= to
);
4048 /* Splice this bunch out of where it is now. */
4049 if (PREV_INSN (from
))
4050 NEXT_INSN (PREV_INSN (from
)) = NEXT_INSN (to
);
4052 PREV_INSN (NEXT_INSN (to
)) = PREV_INSN (from
);
4053 if (get_last_insn () == to
)
4054 set_last_insn (PREV_INSN (from
));
4055 if (get_insns () == from
)
4056 set_first_insn (NEXT_INSN (to
));
4058 /* Make the new neighbors point to it and it to them. */
4059 if (NEXT_INSN (after
))
4060 PREV_INSN (NEXT_INSN (after
)) = to
;
4062 NEXT_INSN (to
) = NEXT_INSN (after
);
4063 PREV_INSN (from
) = after
;
4064 NEXT_INSN (after
) = from
;
4065 if (after
== get_last_insn())
4069 /* Same as function above, but take care to update BB boundaries. */
4071 reorder_insns (rtx from
, rtx to
, rtx after
)
4073 rtx prev
= PREV_INSN (from
);
4074 basic_block bb
, bb2
;
4076 reorder_insns_nobb (from
, to
, after
);
4078 if (!BARRIER_P (after
)
4079 && (bb
= BLOCK_FOR_INSN (after
)))
4082 df_set_bb_dirty (bb
);
4084 if (!BARRIER_P (from
)
4085 && (bb2
= BLOCK_FOR_INSN (from
)))
4087 if (BB_END (bb2
) == to
)
4088 BB_END (bb2
) = prev
;
4089 df_set_bb_dirty (bb2
);
4092 if (BB_END (bb
) == after
)
4095 for (x
= from
; x
!= NEXT_INSN (to
); x
= NEXT_INSN (x
))
4097 df_insn_change_bb (x
, bb
);
4102 /* Emit insn(s) of given code and pattern
4103 at a specified place within the doubly-linked list.
4105 All of the emit_foo global entry points accept an object
4106 X which is either an insn list or a PATTERN of a single
4109 There are thus a few canonical ways to generate code and
4110 emit it at a specific place in the instruction stream. For
4111 example, consider the instruction named SPOT and the fact that
4112 we would like to emit some instructions before SPOT. We might
4116 ... emit the new instructions ...
4117 insns_head = get_insns ();
4120 emit_insn_before (insns_head, SPOT);
4122 It used to be common to generate SEQUENCE rtl instead, but that
4123 is a relic of the past which no longer occurs. The reason is that
4124 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4125 generated would almost certainly die right after it was created. */
4128 emit_pattern_before_noloc (rtx x
, rtx before
, rtx last
, basic_block bb
,
4129 rtx (*make_raw
) (rtx
))
4133 gcc_assert (before
);
4138 switch (GET_CODE (x
))
4150 rtx next
= NEXT_INSN (insn
);
4151 add_insn_before (insn
, before
, bb
);
4157 #ifdef ENABLE_RTL_CHECKING
4164 last
= (*make_raw
) (x
);
4165 add_insn_before (last
, before
, bb
);
4172 /* Make X be output before the instruction BEFORE. */
4175 emit_insn_before_noloc (rtx x
, rtx before
, basic_block bb
)
4177 return emit_pattern_before_noloc (x
, before
, before
, bb
, make_insn_raw
);
4180 /* Make an instruction with body X and code JUMP_INSN
4181 and output it before the instruction BEFORE. */
4184 emit_jump_insn_before_noloc (rtx x
, rtx before
)
4186 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4187 make_jump_insn_raw
);
4190 /* Make an instruction with body X and code CALL_INSN
4191 and output it before the instruction BEFORE. */
4194 emit_call_insn_before_noloc (rtx x
, rtx before
)
4196 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4197 make_call_insn_raw
);
4200 /* Make an instruction with body X and code DEBUG_INSN
4201 and output it before the instruction BEFORE. */
4204 emit_debug_insn_before_noloc (rtx x
, rtx before
)
4206 return emit_pattern_before_noloc (x
, before
, NULL_RTX
, NULL
,
4207 make_debug_insn_raw
);
4210 /* Make an insn of code BARRIER
4211 and output it before the insn BEFORE. */
4214 emit_barrier_before (rtx before
)
4216 rtx insn
= rtx_alloc (BARRIER
);
4218 INSN_UID (insn
) = cur_insn_uid
++;
4220 add_insn_before (insn
, before
, NULL
);
4224 /* Emit the label LABEL before the insn BEFORE. */
4227 emit_label_before (rtx label
, rtx before
)
4229 /* This can be called twice for the same label as a result of the
4230 confusion that follows a syntax error! So make it harmless. */
4231 if (INSN_UID (label
) == 0)
4233 INSN_UID (label
) = cur_insn_uid
++;
4234 add_insn_before (label
, before
, NULL
);
4240 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4243 emit_note_before (enum insn_note subtype
, rtx before
)
4245 rtx note
= rtx_alloc (NOTE
);
4246 INSN_UID (note
) = cur_insn_uid
++;
4247 NOTE_KIND (note
) = subtype
;
4248 BLOCK_FOR_INSN (note
) = NULL
;
4249 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
4251 add_insn_before (note
, before
, NULL
);
4255 /* Helper for emit_insn_after, handles lists of instructions
4259 emit_insn_after_1 (rtx first
, rtx after
, basic_block bb
)
4263 if (!bb
&& !BARRIER_P (after
))
4264 bb
= BLOCK_FOR_INSN (after
);
4268 df_set_bb_dirty (bb
);
4269 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4270 if (!BARRIER_P (last
))
4272 set_block_for_insn (last
, bb
);
4273 df_insn_rescan (last
);
4275 if (!BARRIER_P (last
))
4277 set_block_for_insn (last
, bb
);
4278 df_insn_rescan (last
);
4280 if (BB_END (bb
) == after
)
4284 for (last
= first
; NEXT_INSN (last
); last
= NEXT_INSN (last
))
4287 after_after
= NEXT_INSN (after
);
4289 NEXT_INSN (after
) = first
;
4290 PREV_INSN (first
) = after
;
4291 NEXT_INSN (last
) = after_after
;
4293 PREV_INSN (after_after
) = last
;
4295 if (after
== get_last_insn())
4296 set_last_insn (last
);
4302 emit_pattern_after_noloc (rtx x
, rtx after
, basic_block bb
,
4303 rtx (*make_raw
)(rtx
))
4312 switch (GET_CODE (x
))
4321 last
= emit_insn_after_1 (x
, after
, bb
);
4324 #ifdef ENABLE_RTL_CHECKING
4331 last
= (*make_raw
) (x
);
4332 add_insn_after (last
, after
, bb
);
4339 /* Make X be output after the insn AFTER and set the BB of insn. If
4340 BB is NULL, an attempt is made to infer the BB from AFTER. */
4343 emit_insn_after_noloc (rtx x
, rtx after
, basic_block bb
)
4345 return emit_pattern_after_noloc (x
, after
, bb
, make_insn_raw
);
4349 /* Make an insn of code JUMP_INSN with body X
4350 and output it after the insn AFTER. */
4353 emit_jump_insn_after_noloc (rtx x
, rtx after
)
4355 return emit_pattern_after_noloc (x
, after
, NULL
, make_jump_insn_raw
);
4358 /* Make an instruction with body X and code CALL_INSN
4359 and output it after the instruction AFTER. */
4362 emit_call_insn_after_noloc (rtx x
, rtx after
)
4364 return emit_pattern_after_noloc (x
, after
, NULL
, make_call_insn_raw
);
4367 /* Make an instruction with body X and code CALL_INSN
4368 and output it after the instruction AFTER. */
4371 emit_debug_insn_after_noloc (rtx x
, rtx after
)
4373 return emit_pattern_after_noloc (x
, after
, NULL
, make_debug_insn_raw
);
4376 /* Make an insn of code BARRIER
4377 and output it after the insn AFTER. */
4380 emit_barrier_after (rtx after
)
4382 rtx insn
= rtx_alloc (BARRIER
);
4384 INSN_UID (insn
) = cur_insn_uid
++;
4386 add_insn_after (insn
, after
, NULL
);
4390 /* Emit the label LABEL after the insn AFTER. */
4393 emit_label_after (rtx label
, rtx after
)
4395 /* This can be called twice for the same label
4396 as a result of the confusion that follows a syntax error!
4397 So make it harmless. */
4398 if (INSN_UID (label
) == 0)
4400 INSN_UID (label
) = cur_insn_uid
++;
4401 add_insn_after (label
, after
, NULL
);
4407 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4410 emit_note_after (enum insn_note subtype
, rtx after
)
4412 rtx note
= rtx_alloc (NOTE
);
4413 INSN_UID (note
) = cur_insn_uid
++;
4414 NOTE_KIND (note
) = subtype
;
4415 BLOCK_FOR_INSN (note
) = NULL
;
4416 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
4417 add_insn_after (note
, after
, NULL
);
4421 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4422 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4425 emit_pattern_after_setloc (rtx pattern
, rtx after
, int loc
,
4426 rtx (*make_raw
) (rtx
))
4428 rtx last
= emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4430 if (pattern
== NULL_RTX
|| !loc
)
4433 after
= NEXT_INSN (after
);
4436 if (active_insn_p (after
) && !INSN_LOCATOR (after
))
4437 INSN_LOCATOR (after
) = loc
;
4440 after
= NEXT_INSN (after
);
4445 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4446 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4450 emit_pattern_after (rtx pattern
, rtx after
, bool skip_debug_insns
,
4451 rtx (*make_raw
) (rtx
))
4455 if (skip_debug_insns
)
4456 while (DEBUG_INSN_P (prev
))
4457 prev
= PREV_INSN (prev
);
4460 return emit_pattern_after_setloc (pattern
, after
, INSN_LOCATOR (prev
),
4463 return emit_pattern_after_noloc (pattern
, after
, NULL
, make_raw
);
4466 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4468 emit_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4470 return emit_pattern_after_setloc (pattern
, after
, loc
, make_insn_raw
);
4473 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4475 emit_insn_after (rtx pattern
, rtx after
)
4477 return emit_pattern_after (pattern
, after
, true, make_insn_raw
);
4480 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4482 emit_jump_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4484 return emit_pattern_after_setloc (pattern
, after
, loc
, make_jump_insn_raw
);
4487 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4489 emit_jump_insn_after (rtx pattern
, rtx after
)
4491 return emit_pattern_after (pattern
, after
, true, make_jump_insn_raw
);
4494 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4496 emit_call_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4498 return emit_pattern_after_setloc (pattern
, after
, loc
, make_call_insn_raw
);
4501 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4503 emit_call_insn_after (rtx pattern
, rtx after
)
4505 return emit_pattern_after (pattern
, after
, true, make_call_insn_raw
);
4508 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4510 emit_debug_insn_after_setloc (rtx pattern
, rtx after
, int loc
)
4512 return emit_pattern_after_setloc (pattern
, after
, loc
, make_debug_insn_raw
);
4515 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4517 emit_debug_insn_after (rtx pattern
, rtx after
)
4519 return emit_pattern_after (pattern
, after
, false, make_debug_insn_raw
);
4522 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4523 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4524 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4528 emit_pattern_before_setloc (rtx pattern
, rtx before
, int loc
, bool insnp
,
4529 rtx (*make_raw
) (rtx
))
4531 rtx first
= PREV_INSN (before
);
4532 rtx last
= emit_pattern_before_noloc (pattern
, before
,
4533 insnp
? before
: NULL_RTX
,
4536 if (pattern
== NULL_RTX
|| !loc
)
4540 first
= get_insns ();
4542 first
= NEXT_INSN (first
);
4545 if (active_insn_p (first
) && !INSN_LOCATOR (first
))
4546 INSN_LOCATOR (first
) = loc
;
4549 first
= NEXT_INSN (first
);
4554 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4555 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4556 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4557 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4560 emit_pattern_before (rtx pattern
, rtx before
, bool skip_debug_insns
,
4561 bool insnp
, rtx (*make_raw
) (rtx
))
4565 if (skip_debug_insns
)
4566 while (DEBUG_INSN_P (next
))
4567 next
= PREV_INSN (next
);
4570 return emit_pattern_before_setloc (pattern
, before
, INSN_LOCATOR (next
),
4573 return emit_pattern_before_noloc (pattern
, before
,
4574 insnp
? before
: NULL_RTX
,
4578 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4580 emit_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4582 return emit_pattern_before_setloc (pattern
, before
, loc
, true,
4586 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4588 emit_insn_before (rtx pattern
, rtx before
)
4590 return emit_pattern_before (pattern
, before
, true, true, make_insn_raw
);
4593 /* like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4595 emit_jump_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4597 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4598 make_jump_insn_raw
);
4601 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4603 emit_jump_insn_before (rtx pattern
, rtx before
)
4605 return emit_pattern_before (pattern
, before
, true, false,
4606 make_jump_insn_raw
);
4609 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4611 emit_call_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4613 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4614 make_call_insn_raw
);
4617 /* Like emit_call_insn_before_noloc,
4618 but set insn_locator according to BEFORE. */
4620 emit_call_insn_before (rtx pattern
, rtx before
)
4622 return emit_pattern_before (pattern
, before
, true, false,
4623 make_call_insn_raw
);
4626 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4628 emit_debug_insn_before_setloc (rtx pattern
, rtx before
, int loc
)
4630 return emit_pattern_before_setloc (pattern
, before
, loc
, false,
4631 make_debug_insn_raw
);
4634 /* Like emit_debug_insn_before_noloc,
4635 but set insn_locator according to BEFORE. */
4637 emit_debug_insn_before (rtx pattern
, rtx before
)
4639 return emit_pattern_before (pattern
, before
, false, false,
4640 make_debug_insn_raw
);
4643 /* Take X and emit it at the end of the doubly-linked
4646 Returns the last insn emitted. */
4651 rtx last
= get_last_insn();
4657 switch (GET_CODE (x
))
4669 rtx next
= NEXT_INSN (insn
);
4676 #ifdef ENABLE_RTL_CHECKING
4683 last
= make_insn_raw (x
);
4691 /* Make an insn of code DEBUG_INSN with pattern X
4692 and add it to the end of the doubly-linked list. */
4695 emit_debug_insn (rtx x
)
4697 rtx last
= get_last_insn();
4703 switch (GET_CODE (x
))
4715 rtx next
= NEXT_INSN (insn
);
4722 #ifdef ENABLE_RTL_CHECKING
4729 last
= make_debug_insn_raw (x
);
4737 /* Make an insn of code JUMP_INSN with pattern X
4738 and add it to the end of the doubly-linked list. */
4741 emit_jump_insn (rtx x
)
4743 rtx last
= NULL_RTX
, insn
;
4745 switch (GET_CODE (x
))
4757 rtx next
= NEXT_INSN (insn
);
4764 #ifdef ENABLE_RTL_CHECKING
4771 last
= make_jump_insn_raw (x
);
4779 /* Make an insn of code CALL_INSN with pattern X
4780 and add it to the end of the doubly-linked list. */
4783 emit_call_insn (rtx x
)
4787 switch (GET_CODE (x
))
4796 insn
= emit_insn (x
);
4799 #ifdef ENABLE_RTL_CHECKING
4806 insn
= make_call_insn_raw (x
);
4814 /* Add the label LABEL to the end of the doubly-linked list. */
4817 emit_label (rtx label
)
4819 /* This can be called twice for the same label
4820 as a result of the confusion that follows a syntax error!
4821 So make it harmless. */
4822 if (INSN_UID (label
) == 0)
4824 INSN_UID (label
) = cur_insn_uid
++;
4830 /* Make an insn of code BARRIER
4831 and add it to the end of the doubly-linked list. */
4836 rtx barrier
= rtx_alloc (BARRIER
);
4837 INSN_UID (barrier
) = cur_insn_uid
++;
4842 /* Emit a copy of note ORIG. */
4845 emit_note_copy (rtx orig
)
4849 note
= rtx_alloc (NOTE
);
4851 INSN_UID (note
) = cur_insn_uid
++;
4852 NOTE_DATA (note
) = NOTE_DATA (orig
);
4853 NOTE_KIND (note
) = NOTE_KIND (orig
);
4854 BLOCK_FOR_INSN (note
) = NULL
;
4860 /* Make an insn of code NOTE or type NOTE_NO
4861 and add it to the end of the doubly-linked list. */
4864 emit_note (enum insn_note kind
)
4868 note
= rtx_alloc (NOTE
);
4869 INSN_UID (note
) = cur_insn_uid
++;
4870 NOTE_KIND (note
) = kind
;
4871 memset (&NOTE_DATA (note
), 0, sizeof (NOTE_DATA (note
)));
4872 BLOCK_FOR_INSN (note
) = NULL
;
4877 /* Emit a clobber of lvalue X. */
4880 emit_clobber (rtx x
)
4882 /* CONCATs should not appear in the insn stream. */
4883 if (GET_CODE (x
) == CONCAT
)
4885 emit_clobber (XEXP (x
, 0));
4886 return emit_clobber (XEXP (x
, 1));
4888 return emit_insn (gen_rtx_CLOBBER (VOIDmode
, x
));
4891 /* Return a sequence of insns to clobber lvalue X. */
4905 /* Emit a use of rvalue X. */
4910 /* CONCATs should not appear in the insn stream. */
4911 if (GET_CODE (x
) == CONCAT
)
4913 emit_use (XEXP (x
, 0));
4914 return emit_use (XEXP (x
, 1));
4916 return emit_insn (gen_rtx_USE (VOIDmode
, x
));
4919 /* Return a sequence of insns to use rvalue X. */
4933 /* Cause next statement to emit a line note even if the line number
4937 force_next_line_note (void)
4942 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4943 note of this type already exists, remove it first. */
4946 set_unique_reg_note (rtx insn
, enum reg_note kind
, rtx datum
)
4948 rtx note
= find_reg_note (insn
, kind
, NULL_RTX
);
4954 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4955 has multiple sets (some callers assume single_set
4956 means the insn only has one set, when in fact it
4957 means the insn only has one * useful * set). */
4958 if (GET_CODE (PATTERN (insn
)) == PARALLEL
&& multiple_sets (insn
))
4964 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4965 It serves no useful purpose and breaks eliminate_regs. */
4966 if (GET_CODE (datum
) == ASM_OPERANDS
)
4971 XEXP (note
, 0) = datum
;
4972 df_notes_rescan (insn
);
4980 XEXP (note
, 0) = datum
;
4986 add_reg_note (insn
, kind
, datum
);
4992 df_notes_rescan (insn
);
4998 return REG_NOTES (insn
);
5001 /* Return an indication of which type of insn should have X as a body.
5002 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5004 static enum rtx_code
5005 classify_insn (rtx x
)
5009 if (GET_CODE (x
) == CALL
)
5011 if (GET_CODE (x
) == RETURN
)
5013 if (GET_CODE (x
) == SET
)
5015 if (SET_DEST (x
) == pc_rtx
)
5017 else if (GET_CODE (SET_SRC (x
)) == CALL
)
5022 if (GET_CODE (x
) == PARALLEL
)
5025 for (j
= XVECLEN (x
, 0) - 1; j
>= 0; j
--)
5026 if (GET_CODE (XVECEXP (x
, 0, j
)) == CALL
)
5028 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
5029 && SET_DEST (XVECEXP (x
, 0, j
)) == pc_rtx
)
5031 else if (GET_CODE (XVECEXP (x
, 0, j
)) == SET
5032 && GET_CODE (SET_SRC (XVECEXP (x
, 0, j
))) == CALL
)
5038 /* Emit the rtl pattern X as an appropriate kind of insn.
5039 If X is a label, it is simply added into the insn chain. */
5044 enum rtx_code code
= classify_insn (x
);
5049 return emit_label (x
);
5051 return emit_insn (x
);
5054 rtx insn
= emit_jump_insn (x
);
5055 if (any_uncondjump_p (insn
) || GET_CODE (x
) == RETURN
)
5056 return emit_barrier ();
5060 return emit_call_insn (x
);
5062 return emit_debug_insn (x
);
5068 /* Space for free sequence stack entries. */
5069 static GTY ((deletable
)) struct sequence_stack
*free_sequence_stack
;
5071 /* Begin emitting insns to a sequence. If this sequence will contain
5072 something that might cause the compiler to pop arguments to function
5073 calls (because those pops have previously been deferred; see
5074 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5075 before calling this function. That will ensure that the deferred
5076 pops are not accidentally emitted in the middle of this sequence. */
5079 start_sequence (void)
5081 struct sequence_stack
*tem
;
5083 if (free_sequence_stack
!= NULL
)
5085 tem
= free_sequence_stack
;
5086 free_sequence_stack
= tem
->next
;
5089 tem
= ggc_alloc_sequence_stack ();
5091 tem
->next
= seq_stack
;
5092 tem
->first
= get_insns ();
5093 tem
->last
= get_last_insn ();
5101 /* Set up the insn chain starting with FIRST as the current sequence,
5102 saving the previously current one. See the documentation for
5103 start_sequence for more information about how to use this function. */
5106 push_to_sequence (rtx first
)
5112 for (last
= first
; last
&& NEXT_INSN (last
); last
= NEXT_INSN (last
))
5115 set_first_insn (first
);
5116 set_last_insn (last
);
5119 /* Like push_to_sequence, but take the last insn as an argument to avoid
5120 looping through the list. */
5123 push_to_sequence2 (rtx first
, rtx last
)
5127 set_first_insn (first
);
5128 set_last_insn (last
);
5131 /* Set up the outer-level insn chain
5132 as the current sequence, saving the previously current one. */
5135 push_topmost_sequence (void)
5137 struct sequence_stack
*stack
, *top
= NULL
;
5141 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
5144 set_first_insn (top
->first
);
5145 set_last_insn (top
->last
);
5148 /* After emitting to the outer-level insn chain, update the outer-level
5149 insn chain, and restore the previous saved state. */
5152 pop_topmost_sequence (void)
5154 struct sequence_stack
*stack
, *top
= NULL
;
5156 for (stack
= seq_stack
; stack
; stack
= stack
->next
)
5159 top
->first
= get_insns ();
5160 top
->last
= get_last_insn ();
5165 /* After emitting to a sequence, restore previous saved state.
5167 To get the contents of the sequence just made, you must call
5168 `get_insns' *before* calling here.
5170 If the compiler might have deferred popping arguments while
5171 generating this sequence, and this sequence will not be immediately
5172 inserted into the instruction stream, use do_pending_stack_adjust
5173 before calling get_insns. That will ensure that the deferred
5174 pops are inserted into this sequence, and not into some random
5175 location in the instruction stream. See INHIBIT_DEFER_POP for more
5176 information about deferred popping of arguments. */
5181 struct sequence_stack
*tem
= seq_stack
;
5183 set_first_insn (tem
->first
);
5184 set_last_insn (tem
->last
);
5185 seq_stack
= tem
->next
;
5187 memset (tem
, 0, sizeof (*tem
));
5188 tem
->next
= free_sequence_stack
;
5189 free_sequence_stack
= tem
;
5192 /* Return 1 if currently emitting into a sequence. */
5195 in_sequence_p (void)
5197 return seq_stack
!= 0;
5200 /* Put the various virtual registers into REGNO_REG_RTX. */
5203 init_virtual_regs (void)
5205 regno_reg_rtx
[VIRTUAL_INCOMING_ARGS_REGNUM
] = virtual_incoming_args_rtx
;
5206 regno_reg_rtx
[VIRTUAL_STACK_VARS_REGNUM
] = virtual_stack_vars_rtx
;
5207 regno_reg_rtx
[VIRTUAL_STACK_DYNAMIC_REGNUM
] = virtual_stack_dynamic_rtx
;
5208 regno_reg_rtx
[VIRTUAL_OUTGOING_ARGS_REGNUM
] = virtual_outgoing_args_rtx
;
5209 regno_reg_rtx
[VIRTUAL_CFA_REGNUM
] = virtual_cfa_rtx
;
5210 regno_reg_rtx
[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
]
5211 = virtual_preferred_stack_boundary_rtx
;
5215 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5216 static rtx copy_insn_scratch_in
[MAX_RECOG_OPERANDS
];
5217 static rtx copy_insn_scratch_out
[MAX_RECOG_OPERANDS
];
5218 static int copy_insn_n_scratches
;
5220 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5221 copied an ASM_OPERANDS.
5222 In that case, it is the original input-operand vector. */
5223 static rtvec orig_asm_operands_vector
;
5225 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5226 copied an ASM_OPERANDS.
5227 In that case, it is the copied input-operand vector. */
5228 static rtvec copy_asm_operands_vector
;
5230 /* Likewise for the constraints vector. */
5231 static rtvec orig_asm_constraints_vector
;
5232 static rtvec copy_asm_constraints_vector
;
5234 /* Recursively create a new copy of an rtx for copy_insn.
5235 This function differs from copy_rtx in that it handles SCRATCHes and
5236 ASM_OPERANDs properly.
5237 Normally, this function is not used directly; use copy_insn as front end.
5238 However, you could first copy an insn pattern with copy_insn and then use
5239 this function afterwards to properly copy any REG_NOTEs containing
5243 copy_insn_1 (rtx orig
)
5248 const char *format_ptr
;
5253 code
= GET_CODE (orig
);
5269 if (REG_P (XEXP (orig
, 0)) && REGNO (XEXP (orig
, 0)) < FIRST_PSEUDO_REGISTER
)
5274 for (i
= 0; i
< copy_insn_n_scratches
; i
++)
5275 if (copy_insn_scratch_in
[i
] == orig
)
5276 return copy_insn_scratch_out
[i
];
5280 if (shared_const_p (orig
))
5284 /* A MEM with a constant address is not sharable. The problem is that
5285 the constant address may need to be reloaded. If the mem is shared,
5286 then reloading one copy of this mem will cause all copies to appear
5287 to have been reloaded. */
5293 /* Copy the various flags, fields, and other information. We assume
5294 that all fields need copying, and then clear the fields that should
5295 not be copied. That is the sensible default behavior, and forces
5296 us to explicitly document why we are *not* copying a flag. */
5297 copy
= shallow_copy_rtx (orig
);
5299 /* We do not copy the USED flag, which is used as a mark bit during
5300 walks over the RTL. */
5301 RTX_FLAG (copy
, used
) = 0;
5303 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5306 RTX_FLAG (copy
, jump
) = 0;
5307 RTX_FLAG (copy
, call
) = 0;
5308 RTX_FLAG (copy
, frame_related
) = 0;
5311 format_ptr
= GET_RTX_FORMAT (GET_CODE (copy
));
5313 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (copy
)); i
++)
5314 switch (*format_ptr
++)
5317 if (XEXP (orig
, i
) != NULL
)
5318 XEXP (copy
, i
) = copy_insn_1 (XEXP (orig
, i
));
5323 if (XVEC (orig
, i
) == orig_asm_constraints_vector
)
5324 XVEC (copy
, i
) = copy_asm_constraints_vector
;
5325 else if (XVEC (orig
, i
) == orig_asm_operands_vector
)
5326 XVEC (copy
, i
) = copy_asm_operands_vector
;
5327 else if (XVEC (orig
, i
) != NULL
)
5329 XVEC (copy
, i
) = rtvec_alloc (XVECLEN (orig
, i
));
5330 for (j
= 0; j
< XVECLEN (copy
, i
); j
++)
5331 XVECEXP (copy
, i
, j
) = copy_insn_1 (XVECEXP (orig
, i
, j
));
5342 /* These are left unchanged. */
5349 if (code
== SCRATCH
)
5351 i
= copy_insn_n_scratches
++;
5352 gcc_assert (i
< MAX_RECOG_OPERANDS
);
5353 copy_insn_scratch_in
[i
] = orig
;
5354 copy_insn_scratch_out
[i
] = copy
;
5356 else if (code
== ASM_OPERANDS
)
5358 orig_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (orig
);
5359 copy_asm_operands_vector
= ASM_OPERANDS_INPUT_VEC (copy
);
5360 orig_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig
);
5361 copy_asm_constraints_vector
= ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy
);
5367 /* Create a new copy of an rtx.
5368 This function differs from copy_rtx in that it handles SCRATCHes and
5369 ASM_OPERANDs properly.
5370 INSN doesn't really have to be a full INSN; it could be just the
5373 copy_insn (rtx insn
)
5375 copy_insn_n_scratches
= 0;
5376 orig_asm_operands_vector
= 0;
5377 orig_asm_constraints_vector
= 0;
5378 copy_asm_operands_vector
= 0;
5379 copy_asm_constraints_vector
= 0;
5380 return copy_insn_1 (insn
);
5383 /* Initialize data structures and variables in this file
5384 before generating rtl for each function. */
5389 set_first_insn (NULL
);
5390 set_last_insn (NULL
);
5391 if (MIN_NONDEBUG_INSN_UID
)
5392 cur_insn_uid
= MIN_NONDEBUG_INSN_UID
;
5395 cur_debug_insn_uid
= 1;
5396 reg_rtx_no
= LAST_VIRTUAL_REGISTER
+ 1;
5397 last_location
= UNKNOWN_LOCATION
;
5398 first_label_num
= label_num
;
5401 /* Init the tables that describe all the pseudo regs. */
5403 crtl
->emit
.regno_pointer_align_length
= LAST_VIRTUAL_REGISTER
+ 101;
5405 crtl
->emit
.regno_pointer_align
5406 = XCNEWVEC (unsigned char, crtl
->emit
.regno_pointer_align_length
);
5408 regno_reg_rtx
= ggc_alloc_vec_rtx (crtl
->emit
.regno_pointer_align_length
);
5410 /* Put copies of all the hard registers into regno_reg_rtx. */
5411 memcpy (regno_reg_rtx
,
5412 initial_regno_reg_rtx
,
5413 FIRST_PSEUDO_REGISTER
* sizeof (rtx
));
5415 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5416 init_virtual_regs ();
5418 /* Indicate that the virtual registers and stack locations are
5420 REG_POINTER (stack_pointer_rtx
) = 1;
5421 REG_POINTER (frame_pointer_rtx
) = 1;
5422 REG_POINTER (hard_frame_pointer_rtx
) = 1;
5423 REG_POINTER (arg_pointer_rtx
) = 1;
5425 REG_POINTER (virtual_incoming_args_rtx
) = 1;
5426 REG_POINTER (virtual_stack_vars_rtx
) = 1;
5427 REG_POINTER (virtual_stack_dynamic_rtx
) = 1;
5428 REG_POINTER (virtual_outgoing_args_rtx
) = 1;
5429 REG_POINTER (virtual_cfa_rtx
) = 1;
5431 #ifdef STACK_BOUNDARY
5432 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM
) = STACK_BOUNDARY
;
5433 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5434 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM
) = STACK_BOUNDARY
;
5435 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM
) = STACK_BOUNDARY
;
5437 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5438 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM
) = STACK_BOUNDARY
;
5439 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM
) = STACK_BOUNDARY
;
5440 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM
) = STACK_BOUNDARY
;
5441 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM
) = BITS_PER_WORD
;
5444 #ifdef INIT_EXPANDERS
5449 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5452 gen_const_vector (enum machine_mode mode
, int constant
)
5457 enum machine_mode inner
;
5459 units
= GET_MODE_NUNITS (mode
);
5460 inner
= GET_MODE_INNER (mode
);
5462 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner
));
5464 v
= rtvec_alloc (units
);
5466 /* We need to call this function after we set the scalar const_tiny_rtx
5468 gcc_assert (const_tiny_rtx
[constant
][(int) inner
]);
5470 for (i
= 0; i
< units
; ++i
)
5471 RTVEC_ELT (v
, i
) = const_tiny_rtx
[constant
][(int) inner
];
5473 tem
= gen_rtx_raw_CONST_VECTOR (mode
, v
);
5477 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5478 all elements are zero, and the one vector when all elements are one. */
5480 gen_rtx_CONST_VECTOR (enum machine_mode mode
, rtvec v
)
5482 enum machine_mode inner
= GET_MODE_INNER (mode
);
5483 int nunits
= GET_MODE_NUNITS (mode
);
5487 /* Check to see if all of the elements have the same value. */
5488 x
= RTVEC_ELT (v
, nunits
- 1);
5489 for (i
= nunits
- 2; i
>= 0; i
--)
5490 if (RTVEC_ELT (v
, i
) != x
)
5493 /* If the values are all the same, check to see if we can use one of the
5494 standard constant vectors. */
5497 if (x
== CONST0_RTX (inner
))
5498 return CONST0_RTX (mode
);
5499 else if (x
== CONST1_RTX (inner
))
5500 return CONST1_RTX (mode
);
5503 return gen_rtx_raw_CONST_VECTOR (mode
, v
);
5506 /* Initialise global register information required by all functions. */
5509 init_emit_regs (void)
5512 enum machine_mode mode
;
5515 /* Reset register attributes */
5516 htab_empty (reg_attrs_htab
);
5518 /* We need reg_raw_mode, so initialize the modes now. */
5519 init_reg_modes_target ();
5521 /* Assign register numbers to the globally defined register rtx. */
5522 pc_rtx
= gen_rtx_fmt_ (PC
, VOIDmode
);
5523 ret_rtx
= gen_rtx_fmt_ (RETURN
, VOIDmode
);
5524 cc0_rtx
= gen_rtx_fmt_ (CC0
, VOIDmode
);
5525 stack_pointer_rtx
= gen_raw_REG (Pmode
, STACK_POINTER_REGNUM
);
5526 frame_pointer_rtx
= gen_raw_REG (Pmode
, FRAME_POINTER_REGNUM
);
5527 hard_frame_pointer_rtx
= gen_raw_REG (Pmode
, HARD_FRAME_POINTER_REGNUM
);
5528 arg_pointer_rtx
= gen_raw_REG (Pmode
, ARG_POINTER_REGNUM
);
5529 virtual_incoming_args_rtx
=
5530 gen_raw_REG (Pmode
, VIRTUAL_INCOMING_ARGS_REGNUM
);
5531 virtual_stack_vars_rtx
=
5532 gen_raw_REG (Pmode
, VIRTUAL_STACK_VARS_REGNUM
);
5533 virtual_stack_dynamic_rtx
=
5534 gen_raw_REG (Pmode
, VIRTUAL_STACK_DYNAMIC_REGNUM
);
5535 virtual_outgoing_args_rtx
=
5536 gen_raw_REG (Pmode
, VIRTUAL_OUTGOING_ARGS_REGNUM
);
5537 virtual_cfa_rtx
= gen_raw_REG (Pmode
, VIRTUAL_CFA_REGNUM
);
5538 virtual_preferred_stack_boundary_rtx
=
5539 gen_raw_REG (Pmode
, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM
);
5541 /* Initialize RTL for commonly used hard registers. These are
5542 copied into regno_reg_rtx as we begin to compile each function. */
5543 for (i
= 0; i
< FIRST_PSEUDO_REGISTER
; i
++)
5544 initial_regno_reg_rtx
[i
] = gen_raw_REG (reg_raw_mode
[i
], i
);
5546 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5547 return_address_pointer_rtx
5548 = gen_raw_REG (Pmode
, RETURN_ADDRESS_POINTER_REGNUM
);
5551 if ((unsigned) PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
)
5552 pic_offset_table_rtx
= gen_raw_REG (Pmode
, PIC_OFFSET_TABLE_REGNUM
);
5554 pic_offset_table_rtx
= NULL_RTX
;
5556 for (i
= 0; i
< (int) MAX_MACHINE_MODE
; i
++)
5558 mode
= (enum machine_mode
) i
;
5559 attrs
= ggc_alloc_cleared_mem_attrs ();
5560 attrs
->align
= BITS_PER_UNIT
;
5561 attrs
->addrspace
= ADDR_SPACE_GENERIC
;
5562 if (mode
!= BLKmode
)
5564 attrs
->size_known_p
= true;
5565 attrs
->size
= GET_MODE_SIZE (mode
);
5566 if (STRICT_ALIGNMENT
)
5567 attrs
->align
= GET_MODE_ALIGNMENT (mode
);
5569 mode_mem_attrs
[i
] = attrs
;
5573 /* Create some permanent unique rtl objects shared between all functions. */
5576 init_emit_once (void)
5579 enum machine_mode mode
;
5580 enum machine_mode double_mode
;
5582 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5584 const_int_htab
= htab_create_ggc (37, const_int_htab_hash
,
5585 const_int_htab_eq
, NULL
);
5587 const_double_htab
= htab_create_ggc (37, const_double_htab_hash
,
5588 const_double_htab_eq
, NULL
);
5590 const_fixed_htab
= htab_create_ggc (37, const_fixed_htab_hash
,
5591 const_fixed_htab_eq
, NULL
);
5593 mem_attrs_htab
= htab_create_ggc (37, mem_attrs_htab_hash
,
5594 mem_attrs_htab_eq
, NULL
);
5595 reg_attrs_htab
= htab_create_ggc (37, reg_attrs_htab_hash
,
5596 reg_attrs_htab_eq
, NULL
);
5598 /* Compute the word and byte modes. */
5600 byte_mode
= VOIDmode
;
5601 word_mode
= VOIDmode
;
5602 double_mode
= VOIDmode
;
5604 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5606 mode
= GET_MODE_WIDER_MODE (mode
))
5608 if (GET_MODE_BITSIZE (mode
) == BITS_PER_UNIT
5609 && byte_mode
== VOIDmode
)
5612 if (GET_MODE_BITSIZE (mode
) == BITS_PER_WORD
5613 && word_mode
== VOIDmode
)
5617 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
5619 mode
= GET_MODE_WIDER_MODE (mode
))
5621 if (GET_MODE_BITSIZE (mode
) == DOUBLE_TYPE_SIZE
5622 && double_mode
== VOIDmode
)
5626 ptr_mode
= mode_for_size (POINTER_SIZE
, GET_MODE_CLASS (Pmode
), 0);
5628 #ifdef INIT_EXPANDERS
5629 /* This is to initialize {init|mark|free}_machine_status before the first
5630 call to push_function_context_to. This is needed by the Chill front
5631 end which calls push_function_context_to before the first call to
5632 init_function_start. */
5636 /* Create the unique rtx's for certain rtx codes and operand values. */
5638 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5639 tries to use these variables. */
5640 for (i
= - MAX_SAVED_CONST_INT
; i
<= MAX_SAVED_CONST_INT
; i
++)
5641 const_int_rtx
[i
+ MAX_SAVED_CONST_INT
] =
5642 gen_rtx_raw_CONST_INT (VOIDmode
, (HOST_WIDE_INT
) i
);
5644 if (STORE_FLAG_VALUE
>= - MAX_SAVED_CONST_INT
5645 && STORE_FLAG_VALUE
<= MAX_SAVED_CONST_INT
)
5646 const_true_rtx
= const_int_rtx
[STORE_FLAG_VALUE
+ MAX_SAVED_CONST_INT
];
5648 const_true_rtx
= gen_rtx_CONST_INT (VOIDmode
, STORE_FLAG_VALUE
);
5650 REAL_VALUE_FROM_INT (dconst0
, 0, 0, double_mode
);
5651 REAL_VALUE_FROM_INT (dconst1
, 1, 0, double_mode
);
5652 REAL_VALUE_FROM_INT (dconst2
, 2, 0, double_mode
);
5657 dconsthalf
= dconst1
;
5658 SET_REAL_EXP (&dconsthalf
, REAL_EXP (&dconsthalf
) - 1);
5660 for (i
= 0; i
< (int) ARRAY_SIZE (const_tiny_rtx
); i
++)
5662 const REAL_VALUE_TYPE
*const r
=
5663 (i
== 0 ? &dconst0
: i
== 1 ? &dconst1
: &dconst2
);
5665 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FLOAT
);
5667 mode
= GET_MODE_WIDER_MODE (mode
))
5668 const_tiny_rtx
[i
][(int) mode
] =
5669 CONST_DOUBLE_FROM_REAL_VALUE (*r
, mode
);
5671 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT
);
5673 mode
= GET_MODE_WIDER_MODE (mode
))
5674 const_tiny_rtx
[i
][(int) mode
] =
5675 CONST_DOUBLE_FROM_REAL_VALUE (*r
, mode
);
5677 const_tiny_rtx
[i
][(int) VOIDmode
] = GEN_INT (i
);
5679 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
5681 mode
= GET_MODE_WIDER_MODE (mode
))
5682 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5684 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT
);
5686 mode
= GET_MODE_WIDER_MODE (mode
))
5687 const_tiny_rtx
[i
][(int) mode
] = GEN_INT (i
);
5690 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT
);
5692 mode
= GET_MODE_WIDER_MODE (mode
))
5694 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
5695 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
5698 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT
);
5700 mode
= GET_MODE_WIDER_MODE (mode
))
5702 rtx inner
= const_tiny_rtx
[0][(int)GET_MODE_INNER (mode
)];
5703 const_tiny_rtx
[0][(int) mode
] = gen_rtx_CONCAT (mode
, inner
, inner
);
5706 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT
);
5708 mode
= GET_MODE_WIDER_MODE (mode
))
5710 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5711 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
5714 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT
);
5716 mode
= GET_MODE_WIDER_MODE (mode
))
5718 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5719 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
5722 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_FRACT
);
5724 mode
= GET_MODE_WIDER_MODE (mode
))
5726 FCONST0(mode
).data
.high
= 0;
5727 FCONST0(mode
).data
.low
= 0;
5728 FCONST0(mode
).mode
= mode
;
5729 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5730 FCONST0 (mode
), mode
);
5733 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UFRACT
);
5735 mode
= GET_MODE_WIDER_MODE (mode
))
5737 FCONST0(mode
).data
.high
= 0;
5738 FCONST0(mode
).data
.low
= 0;
5739 FCONST0(mode
).mode
= mode
;
5740 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5741 FCONST0 (mode
), mode
);
5744 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_ACCUM
);
5746 mode
= GET_MODE_WIDER_MODE (mode
))
5748 FCONST0(mode
).data
.high
= 0;
5749 FCONST0(mode
).data
.low
= 0;
5750 FCONST0(mode
).mode
= mode
;
5751 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5752 FCONST0 (mode
), mode
);
5754 /* We store the value 1. */
5755 FCONST1(mode
).data
.high
= 0;
5756 FCONST1(mode
).data
.low
= 0;
5757 FCONST1(mode
).mode
= mode
;
5758 lshift_double (1, 0, GET_MODE_FBIT (mode
),
5759 2 * HOST_BITS_PER_WIDE_INT
,
5760 &FCONST1(mode
).data
.low
,
5761 &FCONST1(mode
).data
.high
,
5762 SIGNED_FIXED_POINT_MODE_P (mode
));
5763 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5764 FCONST1 (mode
), mode
);
5767 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_UACCUM
);
5769 mode
= GET_MODE_WIDER_MODE (mode
))
5771 FCONST0(mode
).data
.high
= 0;
5772 FCONST0(mode
).data
.low
= 0;
5773 FCONST0(mode
).mode
= mode
;
5774 const_tiny_rtx
[0][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5775 FCONST0 (mode
), mode
);
5777 /* We store the value 1. */
5778 FCONST1(mode
).data
.high
= 0;
5779 FCONST1(mode
).data
.low
= 0;
5780 FCONST1(mode
).mode
= mode
;
5781 lshift_double (1, 0, GET_MODE_FBIT (mode
),
5782 2 * HOST_BITS_PER_WIDE_INT
,
5783 &FCONST1(mode
).data
.low
,
5784 &FCONST1(mode
).data
.high
,
5785 SIGNED_FIXED_POINT_MODE_P (mode
));
5786 const_tiny_rtx
[1][(int) mode
] = CONST_FIXED_FROM_FIXED_VALUE (
5787 FCONST1 (mode
), mode
);
5790 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT
);
5792 mode
= GET_MODE_WIDER_MODE (mode
))
5794 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5797 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT
);
5799 mode
= GET_MODE_WIDER_MODE (mode
))
5801 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5804 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM
);
5806 mode
= GET_MODE_WIDER_MODE (mode
))
5808 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5809 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
5812 for (mode
= GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM
);
5814 mode
= GET_MODE_WIDER_MODE (mode
))
5816 const_tiny_rtx
[0][(int) mode
] = gen_const_vector (mode
, 0);
5817 const_tiny_rtx
[1][(int) mode
] = gen_const_vector (mode
, 1);
5820 for (i
= (int) CCmode
; i
< (int) MAX_MACHINE_MODE
; ++i
)
5821 if (GET_MODE_CLASS ((enum machine_mode
) i
) == MODE_CC
)
5822 const_tiny_rtx
[0][i
] = const0_rtx
;
5824 const_tiny_rtx
[0][(int) BImode
] = const0_rtx
;
5825 if (STORE_FLAG_VALUE
== 1)
5826 const_tiny_rtx
[1][(int) BImode
] = const1_rtx
;
5829 /* Produce exact duplicate of insn INSN after AFTER.
5830 Care updating of libcall regions if present. */
5833 emit_copy_of_insn_after (rtx insn
, rtx after
)
5837 switch (GET_CODE (insn
))
5840 new_rtx
= emit_insn_after (copy_insn (PATTERN (insn
)), after
);
5844 new_rtx
= emit_jump_insn_after (copy_insn (PATTERN (insn
)), after
);
5848 new_rtx
= emit_debug_insn_after (copy_insn (PATTERN (insn
)), after
);
5852 new_rtx
= emit_call_insn_after (copy_insn (PATTERN (insn
)), after
);
5853 if (CALL_INSN_FUNCTION_USAGE (insn
))
5854 CALL_INSN_FUNCTION_USAGE (new_rtx
)
5855 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn
));
5856 SIBLING_CALL_P (new_rtx
) = SIBLING_CALL_P (insn
);
5857 RTL_CONST_CALL_P (new_rtx
) = RTL_CONST_CALL_P (insn
);
5858 RTL_PURE_CALL_P (new_rtx
) = RTL_PURE_CALL_P (insn
);
5859 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx
)
5860 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn
);
5867 /* Update LABEL_NUSES. */
5868 mark_jump_label (PATTERN (new_rtx
), new_rtx
, 0);
5870 INSN_LOCATOR (new_rtx
) = INSN_LOCATOR (insn
);
5872 /* If the old insn is frame related, then so is the new one. This is
5873 primarily needed for IA-64 unwind info which marks epilogue insns,
5874 which may be duplicated by the basic block reordering code. */
5875 RTX_FRAME_RELATED_P (new_rtx
) = RTX_FRAME_RELATED_P (insn
);
5877 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5878 will make them. REG_LABEL_TARGETs are created there too, but are
5879 supposed to be sticky, so we copy them. */
5880 for (link
= REG_NOTES (insn
); link
; link
= XEXP (link
, 1))
5881 if (REG_NOTE_KIND (link
) != REG_LABEL_OPERAND
)
5883 if (GET_CODE (link
) == EXPR_LIST
)
5884 add_reg_note (new_rtx
, REG_NOTE_KIND (link
),
5885 copy_insn_1 (XEXP (link
, 0)));
5887 add_reg_note (new_rtx
, REG_NOTE_KIND (link
), XEXP (link
, 0));
5890 INSN_CODE (new_rtx
) = INSN_CODE (insn
);
5894 static GTY((deletable
)) rtx hard_reg_clobbers
[NUM_MACHINE_MODES
][FIRST_PSEUDO_REGISTER
];
5896 gen_hard_reg_clobber (enum machine_mode mode
, unsigned int regno
)
5898 if (hard_reg_clobbers
[mode
][regno
])
5899 return hard_reg_clobbers
[mode
][regno
];
5901 return (hard_reg_clobbers
[mode
][regno
] =
5902 gen_rtx_CLOBBER (VOIDmode
, gen_rtx_REG (mode
, regno
)));
5905 #include "gt-emit-rtl.h"