* inclhack.def (aix_null): New.
[official-gcc.git] / gcc / emit-rtl.c
blob538b1ec8aac179db48d502cf87a1bd3ba45aebf4
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
21 /* Middle-to-low level generation of rtx code and insns.
23 This file contains support functions for creating rtl expressions
24 and manipulating them in the doubly-linked chain of insns.
26 The patterns of the insns are created by machine-dependent
27 routines in insn-emit.c, which is generated automatically from
28 the machine description. These routines make the individual rtx's
29 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
30 which are automatically generated from rtl.def; what is machine
31 dependent is the kind of rtx's they make and what arguments they
32 use. */
34 #include "config.h"
35 #include "system.h"
36 #include "coretypes.h"
37 #include "tm.h"
38 #include "diagnostic-core.h"
39 #include "rtl.h"
40 #include "tree.h"
41 #include "tm_p.h"
42 #include "flags.h"
43 #include "function.h"
44 #include "expr.h"
45 #include "regs.h"
46 #include "hard-reg-set.h"
47 #include "hashtab.h"
48 #include "insn-config.h"
49 #include "recog.h"
50 #include "bitmap.h"
51 #include "basic-block.h"
52 #include "ggc.h"
53 #include "debug.h"
54 #include "langhooks.h"
55 #include "df.h"
56 #include "params.h"
57 #include "target.h"
59 struct target_rtl default_target_rtl;
60 #if SWITCHABLE_TARGET
61 struct target_rtl *this_target_rtl = &default_target_rtl;
62 #endif
64 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
66 /* Commonly used modes. */
68 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
69 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
70 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
71 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
73 /* Datastructures maintained for currently processed function in RTL form. */
75 struct rtl_data x_rtl;
77 /* Indexed by pseudo register number, gives the rtx for that pseudo.
78 Allocated in parallel with regno_pointer_align.
79 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
80 with length attribute nested in top level structures. */
82 rtx * regno_reg_rtx;
84 /* This is *not* reset after each function. It gives each CODE_LABEL
85 in the entire compilation a unique label number. */
87 static GTY(()) int label_num = 1;
89 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
90 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
91 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
92 is set only for MODE_INT and MODE_VECTOR_INT modes. */
94 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
96 rtx const_true_rtx;
98 REAL_VALUE_TYPE dconst0;
99 REAL_VALUE_TYPE dconst1;
100 REAL_VALUE_TYPE dconst2;
101 REAL_VALUE_TYPE dconstm1;
102 REAL_VALUE_TYPE dconsthalf;
104 /* Record fixed-point constant 0 and 1. */
105 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
106 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
108 /* We make one copy of (const_int C) where C is in
109 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
110 to save space during the compilation and simplify comparisons of
111 integers. */
113 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
115 /* Standard pieces of rtx, to be substituted directly into things. */
116 rtx pc_rtx;
117 rtx ret_rtx;
118 rtx simple_return_rtx;
119 rtx cc0_rtx;
121 /* A hash table storing CONST_INTs whose absolute value is greater
122 than MAX_SAVED_CONST_INT. */
124 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
125 htab_t const_int_htab;
127 /* A hash table storing memory attribute structures. */
128 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
129 htab_t mem_attrs_htab;
131 /* A hash table storing register attribute structures. */
132 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
133 htab_t reg_attrs_htab;
135 /* A hash table storing all CONST_DOUBLEs. */
136 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
137 htab_t const_double_htab;
139 /* A hash table storing all CONST_FIXEDs. */
140 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
141 htab_t const_fixed_htab;
143 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
144 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
145 #define first_label_num (crtl->emit.x_first_label_num)
147 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
148 static void set_used_decls (tree);
149 static void mark_label_nuses (rtx);
150 static hashval_t const_int_htab_hash (const void *);
151 static int const_int_htab_eq (const void *, const void *);
152 static hashval_t const_double_htab_hash (const void *);
153 static int const_double_htab_eq (const void *, const void *);
154 static rtx lookup_const_double (rtx);
155 static hashval_t const_fixed_htab_hash (const void *);
156 static int const_fixed_htab_eq (const void *, const void *);
157 static rtx lookup_const_fixed (rtx);
158 static hashval_t mem_attrs_htab_hash (const void *);
159 static int mem_attrs_htab_eq (const void *, const void *);
160 static hashval_t reg_attrs_htab_hash (const void *);
161 static int reg_attrs_htab_eq (const void *, const void *);
162 static reg_attrs *get_reg_attrs (tree, int);
163 static rtx gen_const_vector (enum machine_mode, int);
164 static void copy_rtx_if_shared_1 (rtx *orig);
166 /* Probability of the conditional branch currently proceeded by try_split.
167 Set to -1 otherwise. */
168 int split_branch_probability = -1;
170 /* Returns a hash code for X (which is a really a CONST_INT). */
172 static hashval_t
173 const_int_htab_hash (const void *x)
175 return (hashval_t) INTVAL ((const_rtx) x);
178 /* Returns nonzero if the value represented by X (which is really a
179 CONST_INT) is the same as that given by Y (which is really a
180 HOST_WIDE_INT *). */
182 static int
183 const_int_htab_eq (const void *x, const void *y)
185 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
188 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
189 static hashval_t
190 const_double_htab_hash (const void *x)
192 const_rtx const value = (const_rtx) x;
193 hashval_t h;
195 if (GET_MODE (value) == VOIDmode)
196 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
197 else
199 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
200 /* MODE is used in the comparison, so it should be in the hash. */
201 h ^= GET_MODE (value);
203 return h;
206 /* Returns nonzero if the value represented by X (really a ...)
207 is the same as that represented by Y (really a ...) */
208 static int
209 const_double_htab_eq (const void *x, const void *y)
211 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
213 if (GET_MODE (a) != GET_MODE (b))
214 return 0;
215 if (GET_MODE (a) == VOIDmode)
216 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
217 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
218 else
219 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
220 CONST_DOUBLE_REAL_VALUE (b));
223 /* Returns a hash code for X (which is really a CONST_FIXED). */
225 static hashval_t
226 const_fixed_htab_hash (const void *x)
228 const_rtx const value = (const_rtx) x;
229 hashval_t h;
231 h = fixed_hash (CONST_FIXED_VALUE (value));
232 /* MODE is used in the comparison, so it should be in the hash. */
233 h ^= GET_MODE (value);
234 return h;
237 /* Returns nonzero if the value represented by X (really a ...)
238 is the same as that represented by Y (really a ...). */
240 static int
241 const_fixed_htab_eq (const void *x, const void *y)
243 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
245 if (GET_MODE (a) != GET_MODE (b))
246 return 0;
247 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
250 /* Returns a hash code for X (which is a really a mem_attrs *). */
252 static hashval_t
253 mem_attrs_htab_hash (const void *x)
255 const mem_attrs *const p = (const mem_attrs *) x;
257 return (p->alias ^ (p->align * 1000)
258 ^ (p->addrspace * 4000)
259 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
260 ^ ((p->size_known_p ? p->size : 0) * 2500000)
261 ^ (size_t) iterative_hash_expr (p->expr, 0));
264 /* Return true if the given memory attributes are equal. */
266 static bool
267 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
269 return (p->alias == q->alias
270 && p->offset_known_p == q->offset_known_p
271 && (!p->offset_known_p || p->offset == q->offset)
272 && p->size_known_p == q->size_known_p
273 && (!p->size_known_p || p->size == q->size)
274 && p->align == q->align
275 && p->addrspace == q->addrspace
276 && (p->expr == q->expr
277 || (p->expr != NULL_TREE && q->expr != NULL_TREE
278 && operand_equal_p (p->expr, q->expr, 0))));
281 /* Returns nonzero if the value represented by X (which is really a
282 mem_attrs *) is the same as that given by Y (which is also really a
283 mem_attrs *). */
285 static int
286 mem_attrs_htab_eq (const void *x, const void *y)
288 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
291 /* Set MEM's memory attributes so that they are the same as ATTRS. */
293 static void
294 set_mem_attrs (rtx mem, mem_attrs *attrs)
296 void **slot;
298 /* If everything is the default, we can just clear the attributes. */
299 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
301 MEM_ATTRS (mem) = 0;
302 return;
305 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
306 if (*slot == 0)
308 *slot = ggc_alloc_mem_attrs ();
309 memcpy (*slot, attrs, sizeof (mem_attrs));
312 MEM_ATTRS (mem) = (mem_attrs *) *slot;
315 /* Returns a hash code for X (which is a really a reg_attrs *). */
317 static hashval_t
318 reg_attrs_htab_hash (const void *x)
320 const reg_attrs *const p = (const reg_attrs *) x;
322 return ((p->offset * 1000) ^ (intptr_t) p->decl);
325 /* Returns nonzero if the value represented by X (which is really a
326 reg_attrs *) is the same as that given by Y (which is also really a
327 reg_attrs *). */
329 static int
330 reg_attrs_htab_eq (const void *x, const void *y)
332 const reg_attrs *const p = (const reg_attrs *) x;
333 const reg_attrs *const q = (const reg_attrs *) y;
335 return (p->decl == q->decl && p->offset == q->offset);
337 /* Allocate a new reg_attrs structure and insert it into the hash table if
338 one identical to it is not already in the table. We are doing this for
339 MEM of mode MODE. */
341 static reg_attrs *
342 get_reg_attrs (tree decl, int offset)
344 reg_attrs attrs;
345 void **slot;
347 /* If everything is the default, we can just return zero. */
348 if (decl == 0 && offset == 0)
349 return 0;
351 attrs.decl = decl;
352 attrs.offset = offset;
354 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
355 if (*slot == 0)
357 *slot = ggc_alloc_reg_attrs ();
358 memcpy (*slot, &attrs, sizeof (reg_attrs));
361 return (reg_attrs *) *slot;
365 #if !HAVE_blockage
366 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule,
367 and to block register equivalences to be seen across this insn. */
370 gen_blockage (void)
372 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
373 MEM_VOLATILE_P (x) = true;
374 return x;
376 #endif
379 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
380 don't attempt to share with the various global pieces of rtl (such as
381 frame_pointer_rtx). */
384 gen_raw_REG (enum machine_mode mode, int regno)
386 rtx x = gen_rtx_raw_REG (mode, regno);
387 ORIGINAL_REGNO (x) = regno;
388 return x;
391 /* There are some RTL codes that require special attention; the generation
392 functions do the raw handling. If you add to this list, modify
393 special_rtx in gengenrtl.c as well. */
396 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
398 void **slot;
400 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
401 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
403 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
404 if (const_true_rtx && arg == STORE_FLAG_VALUE)
405 return const_true_rtx;
406 #endif
408 /* Look up the CONST_INT in the hash table. */
409 slot = htab_find_slot_with_hash (const_int_htab, &arg,
410 (hashval_t) arg, INSERT);
411 if (*slot == 0)
412 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
414 return (rtx) *slot;
418 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
420 return GEN_INT (trunc_int_for_mode (c, mode));
423 /* CONST_DOUBLEs might be created from pairs of integers, or from
424 REAL_VALUE_TYPEs. Also, their length is known only at run time,
425 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
427 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
428 hash table. If so, return its counterpart; otherwise add it
429 to the hash table and return it. */
430 static rtx
431 lookup_const_double (rtx real)
433 void **slot = htab_find_slot (const_double_htab, real, INSERT);
434 if (*slot == 0)
435 *slot = real;
437 return (rtx) *slot;
440 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
441 VALUE in mode MODE. */
443 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
445 rtx real = rtx_alloc (CONST_DOUBLE);
446 PUT_MODE (real, mode);
448 real->u.rv = value;
450 return lookup_const_double (real);
453 /* Determine whether FIXED, a CONST_FIXED, already exists in the
454 hash table. If so, return its counterpart; otherwise add it
455 to the hash table and return it. */
457 static rtx
458 lookup_const_fixed (rtx fixed)
460 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
461 if (*slot == 0)
462 *slot = fixed;
464 return (rtx) *slot;
467 /* Return a CONST_FIXED rtx for a fixed-point value specified by
468 VALUE in mode MODE. */
471 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
473 rtx fixed = rtx_alloc (CONST_FIXED);
474 PUT_MODE (fixed, mode);
476 fixed->u.fv = value;
478 return lookup_const_fixed (fixed);
481 /* Constructs double_int from rtx CST. */
483 double_int
484 rtx_to_double_int (const_rtx cst)
486 double_int r;
488 if (CONST_INT_P (cst))
489 r = double_int::from_shwi (INTVAL (cst));
490 else if (CONST_DOUBLE_AS_INT_P (cst))
492 r.low = CONST_DOUBLE_LOW (cst);
493 r.high = CONST_DOUBLE_HIGH (cst);
495 else
496 gcc_unreachable ();
498 return r;
502 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
503 a double_int. */
506 immed_double_int_const (double_int i, enum machine_mode mode)
508 return immed_double_const (i.low, i.high, mode);
511 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
512 of ints: I0 is the low-order word and I1 is the high-order word.
513 For values that are larger than HOST_BITS_PER_DOUBLE_INT, the
514 implied upper bits are copies of the high bit of i1. The value
515 itself is neither signed nor unsigned. Do not use this routine for
516 non-integer modes; convert to REAL_VALUE_TYPE and use
517 CONST_DOUBLE_FROM_REAL_VALUE. */
520 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
522 rtx value;
523 unsigned int i;
525 /* There are the following cases (note that there are no modes with
526 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < HOST_BITS_PER_DOUBLE_INT):
528 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
529 gen_int_mode.
530 2) If the value of the integer fits into HOST_WIDE_INT anyway
531 (i.e., i1 consists only from copies of the sign bit, and sign
532 of i0 and i1 are the same), then we return a CONST_INT for i0.
533 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
534 if (mode != VOIDmode)
536 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
537 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
538 /* We can get a 0 for an error mark. */
539 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
540 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
542 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
543 return gen_int_mode (i0, mode);
546 /* If this integer fits in one word, return a CONST_INT. */
547 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
548 return GEN_INT (i0);
550 /* We use VOIDmode for integers. */
551 value = rtx_alloc (CONST_DOUBLE);
552 PUT_MODE (value, VOIDmode);
554 CONST_DOUBLE_LOW (value) = i0;
555 CONST_DOUBLE_HIGH (value) = i1;
557 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
558 XWINT (value, i) = 0;
560 return lookup_const_double (value);
564 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
566 /* In case the MD file explicitly references the frame pointer, have
567 all such references point to the same frame pointer. This is
568 used during frame pointer elimination to distinguish the explicit
569 references to these registers from pseudos that happened to be
570 assigned to them.
572 If we have eliminated the frame pointer or arg pointer, we will
573 be using it as a normal register, for example as a spill
574 register. In such cases, we might be accessing it in a mode that
575 is not Pmode and therefore cannot use the pre-allocated rtx.
577 Also don't do this when we are making new REGs in reload, since
578 we don't want to get confused with the real pointers. */
580 if (mode == Pmode && !reload_in_progress && !lra_in_progress)
582 if (regno == FRAME_POINTER_REGNUM
583 && (!reload_completed || frame_pointer_needed))
584 return frame_pointer_rtx;
585 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
586 if (regno == HARD_FRAME_POINTER_REGNUM
587 && (!reload_completed || frame_pointer_needed))
588 return hard_frame_pointer_rtx;
589 #endif
590 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
591 if (regno == ARG_POINTER_REGNUM)
592 return arg_pointer_rtx;
593 #endif
594 #ifdef RETURN_ADDRESS_POINTER_REGNUM
595 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
596 return return_address_pointer_rtx;
597 #endif
598 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
599 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
600 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
601 return pic_offset_table_rtx;
602 if (regno == STACK_POINTER_REGNUM)
603 return stack_pointer_rtx;
606 #if 0
607 /* If the per-function register table has been set up, try to re-use
608 an existing entry in that table to avoid useless generation of RTL.
610 This code is disabled for now until we can fix the various backends
611 which depend on having non-shared hard registers in some cases. Long
612 term we want to re-enable this code as it can significantly cut down
613 on the amount of useless RTL that gets generated.
615 We'll also need to fix some code that runs after reload that wants to
616 set ORIGINAL_REGNO. */
618 if (cfun
619 && cfun->emit
620 && regno_reg_rtx
621 && regno < FIRST_PSEUDO_REGISTER
622 && reg_raw_mode[regno] == mode)
623 return regno_reg_rtx[regno];
624 #endif
626 return gen_raw_REG (mode, regno);
630 gen_rtx_MEM (enum machine_mode mode, rtx addr)
632 rtx rt = gen_rtx_raw_MEM (mode, addr);
634 /* This field is not cleared by the mere allocation of the rtx, so
635 we clear it here. */
636 MEM_ATTRS (rt) = 0;
638 return rt;
641 /* Generate a memory referring to non-trapping constant memory. */
644 gen_const_mem (enum machine_mode mode, rtx addr)
646 rtx mem = gen_rtx_MEM (mode, addr);
647 MEM_READONLY_P (mem) = 1;
648 MEM_NOTRAP_P (mem) = 1;
649 return mem;
652 /* Generate a MEM referring to fixed portions of the frame, e.g., register
653 save areas. */
656 gen_frame_mem (enum machine_mode mode, rtx addr)
658 rtx mem = gen_rtx_MEM (mode, addr);
659 MEM_NOTRAP_P (mem) = 1;
660 set_mem_alias_set (mem, get_frame_alias_set ());
661 return mem;
664 /* Generate a MEM referring to a temporary use of the stack, not part
665 of the fixed stack frame. For example, something which is pushed
666 by a target splitter. */
668 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
670 rtx mem = gen_rtx_MEM (mode, addr);
671 MEM_NOTRAP_P (mem) = 1;
672 if (!cfun->calls_alloca)
673 set_mem_alias_set (mem, get_frame_alias_set ());
674 return mem;
677 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
678 this construct would be valid, and false otherwise. */
680 bool
681 validate_subreg (enum machine_mode omode, enum machine_mode imode,
682 const_rtx reg, unsigned int offset)
684 unsigned int isize = GET_MODE_SIZE (imode);
685 unsigned int osize = GET_MODE_SIZE (omode);
687 /* All subregs must be aligned. */
688 if (offset % osize != 0)
689 return false;
691 /* The subreg offset cannot be outside the inner object. */
692 if (offset >= isize)
693 return false;
695 /* ??? This should not be here. Temporarily continue to allow word_mode
696 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
697 Generally, backends are doing something sketchy but it'll take time to
698 fix them all. */
699 if (omode == word_mode)
701 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
702 is the culprit here, and not the backends. */
703 else if (osize >= UNITS_PER_WORD && isize >= osize)
705 /* Allow component subregs of complex and vector. Though given the below
706 extraction rules, it's not always clear what that means. */
707 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
708 && GET_MODE_INNER (imode) == omode)
710 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
711 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
712 represent this. It's questionable if this ought to be represented at
713 all -- why can't this all be hidden in post-reload splitters that make
714 arbitrarily mode changes to the registers themselves. */
715 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
717 /* Subregs involving floating point modes are not allowed to
718 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
719 (subreg:SI (reg:DF) 0) isn't. */
720 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
722 if (! (isize == osize
723 /* LRA can use subreg to store a floating point value in
724 an integer mode. Although the floating point and the
725 integer modes need the same number of hard registers,
726 the size of floating point mode can be less than the
727 integer mode. LRA also uses subregs for a register
728 should be used in different mode in on insn. */
729 || lra_in_progress))
730 return false;
733 /* Paradoxical subregs must have offset zero. */
734 if (osize > isize)
735 return offset == 0;
737 /* This is a normal subreg. Verify that the offset is representable. */
739 /* For hard registers, we already have most of these rules collected in
740 subreg_offset_representable_p. */
741 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
743 unsigned int regno = REGNO (reg);
745 #ifdef CANNOT_CHANGE_MODE_CLASS
746 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
747 && GET_MODE_INNER (imode) == omode)
749 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
750 return false;
751 #endif
753 return subreg_offset_representable_p (regno, imode, offset, omode);
756 /* For pseudo registers, we want most of the same checks. Namely:
757 If the register no larger than a word, the subreg must be lowpart.
758 If the register is larger than a word, the subreg must be the lowpart
759 of a subword. A subreg does *not* perform arbitrary bit extraction.
760 Given that we've already checked mode/offset alignment, we only have
761 to check subword subregs here. */
762 if (osize < UNITS_PER_WORD
763 && ! (lra_in_progress && (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))))
765 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
766 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
767 if (offset % UNITS_PER_WORD != low_off)
768 return false;
770 return true;
774 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
776 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
777 return gen_rtx_raw_SUBREG (mode, reg, offset);
780 /* Generate a SUBREG representing the least-significant part of REG if MODE
781 is smaller than mode of REG, otherwise paradoxical SUBREG. */
784 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
786 enum machine_mode inmode;
788 inmode = GET_MODE (reg);
789 if (inmode == VOIDmode)
790 inmode = mode;
791 return gen_rtx_SUBREG (mode, reg,
792 subreg_lowpart_offset (mode, inmode));
796 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
798 rtvec
799 gen_rtvec (int n, ...)
801 int i;
802 rtvec rt_val;
803 va_list p;
805 va_start (p, n);
807 /* Don't allocate an empty rtvec... */
808 if (n == 0)
810 va_end (p);
811 return NULL_RTVEC;
814 rt_val = rtvec_alloc (n);
816 for (i = 0; i < n; i++)
817 rt_val->elem[i] = va_arg (p, rtx);
819 va_end (p);
820 return rt_val;
823 rtvec
824 gen_rtvec_v (int n, rtx *argp)
826 int i;
827 rtvec rt_val;
829 /* Don't allocate an empty rtvec... */
830 if (n == 0)
831 return NULL_RTVEC;
833 rt_val = rtvec_alloc (n);
835 for (i = 0; i < n; i++)
836 rt_val->elem[i] = *argp++;
838 return rt_val;
841 /* Return the number of bytes between the start of an OUTER_MODE
842 in-memory value and the start of an INNER_MODE in-memory value,
843 given that the former is a lowpart of the latter. It may be a
844 paradoxical lowpart, in which case the offset will be negative
845 on big-endian targets. */
848 byte_lowpart_offset (enum machine_mode outer_mode,
849 enum machine_mode inner_mode)
851 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
852 return subreg_lowpart_offset (outer_mode, inner_mode);
853 else
854 return -subreg_lowpart_offset (inner_mode, outer_mode);
857 /* Generate a REG rtx for a new pseudo register of mode MODE.
858 This pseudo is assigned the next sequential register number. */
861 gen_reg_rtx (enum machine_mode mode)
863 rtx val;
864 unsigned int align = GET_MODE_ALIGNMENT (mode);
866 gcc_assert (can_create_pseudo_p ());
868 /* If a virtual register with bigger mode alignment is generated,
869 increase stack alignment estimation because it might be spilled
870 to stack later. */
871 if (SUPPORTS_STACK_ALIGNMENT
872 && crtl->stack_alignment_estimated < align
873 && !crtl->stack_realign_processed)
875 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
876 if (crtl->stack_alignment_estimated < min_align)
877 crtl->stack_alignment_estimated = min_align;
880 if (generating_concat_p
881 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
882 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
884 /* For complex modes, don't make a single pseudo.
885 Instead, make a CONCAT of two pseudos.
886 This allows noncontiguous allocation of the real and imaginary parts,
887 which makes much better code. Besides, allocating DCmode
888 pseudos overstrains reload on some machines like the 386. */
889 rtx realpart, imagpart;
890 enum machine_mode partmode = GET_MODE_INNER (mode);
892 realpart = gen_reg_rtx (partmode);
893 imagpart = gen_reg_rtx (partmode);
894 return gen_rtx_CONCAT (mode, realpart, imagpart);
897 /* Make sure regno_pointer_align, and regno_reg_rtx are large
898 enough to have an element for this pseudo reg number. */
900 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
902 int old_size = crtl->emit.regno_pointer_align_length;
903 char *tmp;
904 rtx *new1;
906 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
907 memset (tmp + old_size, 0, old_size);
908 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
910 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
911 memset (new1 + old_size, 0, old_size * sizeof (rtx));
912 regno_reg_rtx = new1;
914 crtl->emit.regno_pointer_align_length = old_size * 2;
917 val = gen_raw_REG (mode, reg_rtx_no);
918 regno_reg_rtx[reg_rtx_no++] = val;
919 return val;
922 /* Return TRUE if REG is a PARM_DECL, FALSE otherwise. */
924 bool
925 reg_is_parm_p (rtx reg)
927 tree decl;
929 gcc_assert (REG_P (reg));
930 decl = REG_EXPR (reg);
931 return (decl && TREE_CODE (decl) == PARM_DECL);
934 /* Update NEW with the same attributes as REG, but with OFFSET added
935 to the REG_OFFSET. */
937 static void
938 update_reg_offset (rtx new_rtx, rtx reg, int offset)
940 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
941 REG_OFFSET (reg) + offset);
944 /* Generate a register with same attributes as REG, but with OFFSET
945 added to the REG_OFFSET. */
948 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
949 int offset)
951 rtx new_rtx = gen_rtx_REG (mode, regno);
953 update_reg_offset (new_rtx, reg, offset);
954 return new_rtx;
957 /* Generate a new pseudo-register with the same attributes as REG, but
958 with OFFSET added to the REG_OFFSET. */
961 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
963 rtx new_rtx = gen_reg_rtx (mode);
965 update_reg_offset (new_rtx, reg, offset);
966 return new_rtx;
969 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
970 new register is a (possibly paradoxical) lowpart of the old one. */
972 void
973 adjust_reg_mode (rtx reg, enum machine_mode mode)
975 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
976 PUT_MODE (reg, mode);
979 /* Copy REG's attributes from X, if X has any attributes. If REG and X
980 have different modes, REG is a (possibly paradoxical) lowpart of X. */
982 void
983 set_reg_attrs_from_value (rtx reg, rtx x)
985 int offset;
986 bool can_be_reg_pointer = true;
988 /* Don't call mark_reg_pointer for incompatible pointer sign
989 extension. */
990 while (GET_CODE (x) == SIGN_EXTEND
991 || GET_CODE (x) == ZERO_EXTEND
992 || GET_CODE (x) == TRUNCATE
993 || (GET_CODE (x) == SUBREG && subreg_lowpart_p (x)))
995 #if defined(POINTERS_EXTEND_UNSIGNED) && !defined(HAVE_ptr_extend)
996 if ((GET_CODE (x) == SIGN_EXTEND && POINTERS_EXTEND_UNSIGNED)
997 || (GET_CODE (x) != SIGN_EXTEND && ! POINTERS_EXTEND_UNSIGNED))
998 can_be_reg_pointer = false;
999 #endif
1000 x = XEXP (x, 0);
1003 /* Hard registers can be reused for multiple purposes within the same
1004 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
1005 on them is wrong. */
1006 if (HARD_REGISTER_P (reg))
1007 return;
1009 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
1010 if (MEM_P (x))
1012 if (MEM_OFFSET_KNOWN_P (x))
1013 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
1014 MEM_OFFSET (x) + offset);
1015 if (can_be_reg_pointer && MEM_POINTER (x))
1016 mark_reg_pointer (reg, 0);
1018 else if (REG_P (x))
1020 if (REG_ATTRS (x))
1021 update_reg_offset (reg, x, offset);
1022 if (can_be_reg_pointer && REG_POINTER (x))
1023 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
1027 /* Generate a REG rtx for a new pseudo register, copying the mode
1028 and attributes from X. */
1031 gen_reg_rtx_and_attrs (rtx x)
1033 rtx reg = gen_reg_rtx (GET_MODE (x));
1034 set_reg_attrs_from_value (reg, x);
1035 return reg;
1038 /* Set the register attributes for registers contained in PARM_RTX.
1039 Use needed values from memory attributes of MEM. */
1041 void
1042 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1044 if (REG_P (parm_rtx))
1045 set_reg_attrs_from_value (parm_rtx, mem);
1046 else if (GET_CODE (parm_rtx) == PARALLEL)
1048 /* Check for a NULL entry in the first slot, used to indicate that the
1049 parameter goes both on the stack and in registers. */
1050 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1051 for (; i < XVECLEN (parm_rtx, 0); i++)
1053 rtx x = XVECEXP (parm_rtx, 0, i);
1054 if (REG_P (XEXP (x, 0)))
1055 REG_ATTRS (XEXP (x, 0))
1056 = get_reg_attrs (MEM_EXPR (mem),
1057 INTVAL (XEXP (x, 1)));
1062 /* Set the REG_ATTRS for registers in value X, given that X represents
1063 decl T. */
1065 void
1066 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1068 if (GET_CODE (x) == SUBREG)
1070 gcc_assert (subreg_lowpart_p (x));
1071 x = SUBREG_REG (x);
1073 if (REG_P (x))
1074 REG_ATTRS (x)
1075 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1076 DECL_MODE (t)));
1077 if (GET_CODE (x) == CONCAT)
1079 if (REG_P (XEXP (x, 0)))
1080 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1081 if (REG_P (XEXP (x, 1)))
1082 REG_ATTRS (XEXP (x, 1))
1083 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1085 if (GET_CODE (x) == PARALLEL)
1087 int i, start;
1089 /* Check for a NULL entry, used to indicate that the parameter goes
1090 both on the stack and in registers. */
1091 if (XEXP (XVECEXP (x, 0, 0), 0))
1092 start = 0;
1093 else
1094 start = 1;
1096 for (i = start; i < XVECLEN (x, 0); i++)
1098 rtx y = XVECEXP (x, 0, i);
1099 if (REG_P (XEXP (y, 0)))
1100 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1105 /* Assign the RTX X to declaration T. */
1107 void
1108 set_decl_rtl (tree t, rtx x)
1110 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1111 if (x)
1112 set_reg_attrs_for_decl_rtl (t, x);
1115 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1116 if the ABI requires the parameter to be passed by reference. */
1118 void
1119 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1121 DECL_INCOMING_RTL (t) = x;
1122 if (x && !by_reference_p)
1123 set_reg_attrs_for_decl_rtl (t, x);
1126 /* Identify REG (which may be a CONCAT) as a user register. */
1128 void
1129 mark_user_reg (rtx reg)
1131 if (GET_CODE (reg) == CONCAT)
1133 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1134 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1136 else
1138 gcc_assert (REG_P (reg));
1139 REG_USERVAR_P (reg) = 1;
1143 /* Identify REG as a probable pointer register and show its alignment
1144 as ALIGN, if nonzero. */
1146 void
1147 mark_reg_pointer (rtx reg, int align)
1149 if (! REG_POINTER (reg))
1151 REG_POINTER (reg) = 1;
1153 if (align)
1154 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1156 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1157 /* We can no-longer be sure just how aligned this pointer is. */
1158 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1161 /* Return 1 plus largest pseudo reg number used in the current function. */
1164 max_reg_num (void)
1166 return reg_rtx_no;
1169 /* Return 1 + the largest label number used so far in the current function. */
1172 max_label_num (void)
1174 return label_num;
1177 /* Return first label number used in this function (if any were used). */
1180 get_first_label_num (void)
1182 return first_label_num;
1185 /* If the rtx for label was created during the expansion of a nested
1186 function, then first_label_num won't include this label number.
1187 Fix this now so that array indices work later. */
1189 void
1190 maybe_set_first_label_num (rtx x)
1192 if (CODE_LABEL_NUMBER (x) < first_label_num)
1193 first_label_num = CODE_LABEL_NUMBER (x);
1196 /* Return a value representing some low-order bits of X, where the number
1197 of low-order bits is given by MODE. Note that no conversion is done
1198 between floating-point and fixed-point values, rather, the bit
1199 representation is returned.
1201 This function handles the cases in common between gen_lowpart, below,
1202 and two variants in cse.c and combine.c. These are the cases that can
1203 be safely handled at all points in the compilation.
1205 If this is not a case we can handle, return 0. */
1208 gen_lowpart_common (enum machine_mode mode, rtx x)
1210 int msize = GET_MODE_SIZE (mode);
1211 int xsize;
1212 int offset = 0;
1213 enum machine_mode innermode;
1215 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1216 so we have to make one up. Yuk. */
1217 innermode = GET_MODE (x);
1218 if (CONST_INT_P (x)
1219 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1220 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1221 else if (innermode == VOIDmode)
1222 innermode = mode_for_size (HOST_BITS_PER_DOUBLE_INT, MODE_INT, 0);
1224 xsize = GET_MODE_SIZE (innermode);
1226 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1228 if (innermode == mode)
1229 return x;
1231 /* MODE must occupy no more words than the mode of X. */
1232 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1233 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1234 return 0;
1236 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1237 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1238 return 0;
1240 offset = subreg_lowpart_offset (mode, innermode);
1242 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1243 && (GET_MODE_CLASS (mode) == MODE_INT
1244 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1246 /* If we are getting the low-order part of something that has been
1247 sign- or zero-extended, we can either just use the object being
1248 extended or make a narrower extension. If we want an even smaller
1249 piece than the size of the object being extended, call ourselves
1250 recursively.
1252 This case is used mostly by combine and cse. */
1254 if (GET_MODE (XEXP (x, 0)) == mode)
1255 return XEXP (x, 0);
1256 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1257 return gen_lowpart_common (mode, XEXP (x, 0));
1258 else if (msize < xsize)
1259 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1261 else if (GET_CODE (x) == SUBREG || REG_P (x)
1262 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1263 || CONST_DOUBLE_AS_FLOAT_P (x) || CONST_SCALAR_INT_P (x))
1264 return simplify_gen_subreg (mode, x, innermode, offset);
1266 /* Otherwise, we can't do this. */
1267 return 0;
1271 gen_highpart (enum machine_mode mode, rtx x)
1273 unsigned int msize = GET_MODE_SIZE (mode);
1274 rtx result;
1276 /* This case loses if X is a subreg. To catch bugs early,
1277 complain if an invalid MODE is used even in other cases. */
1278 gcc_assert (msize <= UNITS_PER_WORD
1279 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1281 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1282 subreg_highpart_offset (mode, GET_MODE (x)));
1283 gcc_assert (result);
1285 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1286 the target if we have a MEM. gen_highpart must return a valid operand,
1287 emitting code if necessary to do so. */
1288 if (MEM_P (result))
1290 result = validize_mem (result);
1291 gcc_assert (result);
1294 return result;
1297 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1298 be VOIDmode constant. */
1300 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1302 if (GET_MODE (exp) != VOIDmode)
1304 gcc_assert (GET_MODE (exp) == innermode);
1305 return gen_highpart (outermode, exp);
1307 return simplify_gen_subreg (outermode, exp, innermode,
1308 subreg_highpart_offset (outermode, innermode));
1311 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1313 unsigned int
1314 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1316 unsigned int offset = 0;
1317 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1319 if (difference > 0)
1321 if (WORDS_BIG_ENDIAN)
1322 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1323 if (BYTES_BIG_ENDIAN)
1324 offset += difference % UNITS_PER_WORD;
1327 return offset;
1330 /* Return offset in bytes to get OUTERMODE high part
1331 of the value in mode INNERMODE stored in memory in target format. */
1332 unsigned int
1333 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1335 unsigned int offset = 0;
1336 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1338 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1340 if (difference > 0)
1342 if (! WORDS_BIG_ENDIAN)
1343 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1344 if (! BYTES_BIG_ENDIAN)
1345 offset += difference % UNITS_PER_WORD;
1348 return offset;
1351 /* Return 1 iff X, assumed to be a SUBREG,
1352 refers to the least significant part of its containing reg.
1353 If X is not a SUBREG, always return 1 (it is its own low part!). */
1356 subreg_lowpart_p (const_rtx x)
1358 if (GET_CODE (x) != SUBREG)
1359 return 1;
1360 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1361 return 0;
1363 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1364 == SUBREG_BYTE (x));
1367 /* Return true if X is a paradoxical subreg, false otherwise. */
1368 bool
1369 paradoxical_subreg_p (const_rtx x)
1371 if (GET_CODE (x) != SUBREG)
1372 return false;
1373 return (GET_MODE_PRECISION (GET_MODE (x))
1374 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1377 /* Return subword OFFSET of operand OP.
1378 The word number, OFFSET, is interpreted as the word number starting
1379 at the low-order address. OFFSET 0 is the low-order word if not
1380 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1382 If we cannot extract the required word, we return zero. Otherwise,
1383 an rtx corresponding to the requested word will be returned.
1385 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1386 reload has completed, a valid address will always be returned. After
1387 reload, if a valid address cannot be returned, we return zero.
1389 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1390 it is the responsibility of the caller.
1392 MODE is the mode of OP in case it is a CONST_INT.
1394 ??? This is still rather broken for some cases. The problem for the
1395 moment is that all callers of this thing provide no 'goal mode' to
1396 tell us to work with. This exists because all callers were written
1397 in a word based SUBREG world.
1398 Now use of this function can be deprecated by simplify_subreg in most
1399 cases.
1403 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1405 if (mode == VOIDmode)
1406 mode = GET_MODE (op);
1408 gcc_assert (mode != VOIDmode);
1410 /* If OP is narrower than a word, fail. */
1411 if (mode != BLKmode
1412 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1413 return 0;
1415 /* If we want a word outside OP, return zero. */
1416 if (mode != BLKmode
1417 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1418 return const0_rtx;
1420 /* Form a new MEM at the requested address. */
1421 if (MEM_P (op))
1423 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1425 if (! validate_address)
1426 return new_rtx;
1428 else if (reload_completed)
1430 if (! strict_memory_address_addr_space_p (word_mode,
1431 XEXP (new_rtx, 0),
1432 MEM_ADDR_SPACE (op)))
1433 return 0;
1435 else
1436 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1439 /* Rest can be handled by simplify_subreg. */
1440 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1443 /* Similar to `operand_subword', but never return 0. If we can't
1444 extract the required subword, put OP into a register and try again.
1445 The second attempt must succeed. We always validate the address in
1446 this case.
1448 MODE is the mode of OP, in case it is CONST_INT. */
1451 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1453 rtx result = operand_subword (op, offset, 1, mode);
1455 if (result)
1456 return result;
1458 if (mode != BLKmode && mode != VOIDmode)
1460 /* If this is a register which can not be accessed by words, copy it
1461 to a pseudo register. */
1462 if (REG_P (op))
1463 op = copy_to_reg (op);
1464 else
1465 op = force_reg (mode, op);
1468 result = operand_subword (op, offset, 1, mode);
1469 gcc_assert (result);
1471 return result;
1474 /* Returns 1 if both MEM_EXPR can be considered equal
1475 and 0 otherwise. */
1478 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1480 if (expr1 == expr2)
1481 return 1;
1483 if (! expr1 || ! expr2)
1484 return 0;
1486 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1487 return 0;
1489 return operand_equal_p (expr1, expr2, 0);
1492 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1493 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1494 -1 if not known. */
1497 get_mem_align_offset (rtx mem, unsigned int align)
1499 tree expr;
1500 unsigned HOST_WIDE_INT offset;
1502 /* This function can't use
1503 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1504 || (MAX (MEM_ALIGN (mem),
1505 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1506 < align))
1507 return -1;
1508 else
1509 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1510 for two reasons:
1511 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1512 for <variable>. get_inner_reference doesn't handle it and
1513 even if it did, the alignment in that case needs to be determined
1514 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1515 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1516 isn't sufficiently aligned, the object it is in might be. */
1517 gcc_assert (MEM_P (mem));
1518 expr = MEM_EXPR (mem);
1519 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1520 return -1;
1522 offset = MEM_OFFSET (mem);
1523 if (DECL_P (expr))
1525 if (DECL_ALIGN (expr) < align)
1526 return -1;
1528 else if (INDIRECT_REF_P (expr))
1530 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1531 return -1;
1533 else if (TREE_CODE (expr) == COMPONENT_REF)
1535 while (1)
1537 tree inner = TREE_OPERAND (expr, 0);
1538 tree field = TREE_OPERAND (expr, 1);
1539 tree byte_offset = component_ref_field_offset (expr);
1540 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1542 if (!byte_offset
1543 || !host_integerp (byte_offset, 1)
1544 || !host_integerp (bit_offset, 1))
1545 return -1;
1547 offset += tree_low_cst (byte_offset, 1);
1548 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1550 if (inner == NULL_TREE)
1552 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1553 < (unsigned int) align)
1554 return -1;
1555 break;
1557 else if (DECL_P (inner))
1559 if (DECL_ALIGN (inner) < align)
1560 return -1;
1561 break;
1563 else if (TREE_CODE (inner) != COMPONENT_REF)
1564 return -1;
1565 expr = inner;
1568 else
1569 return -1;
1571 return offset & ((align / BITS_PER_UNIT) - 1);
1574 /* Given REF (a MEM) and T, either the type of X or the expression
1575 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1576 if we are making a new object of this type. BITPOS is nonzero if
1577 there is an offset outstanding on T that will be applied later. */
1579 void
1580 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1581 HOST_WIDE_INT bitpos)
1583 HOST_WIDE_INT apply_bitpos = 0;
1584 tree type;
1585 struct mem_attrs attrs, *defattrs, *refattrs;
1586 addr_space_t as;
1588 /* It can happen that type_for_mode was given a mode for which there
1589 is no language-level type. In which case it returns NULL, which
1590 we can see here. */
1591 if (t == NULL_TREE)
1592 return;
1594 type = TYPE_P (t) ? t : TREE_TYPE (t);
1595 if (type == error_mark_node)
1596 return;
1598 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1599 wrong answer, as it assumes that DECL_RTL already has the right alias
1600 info. Callers should not set DECL_RTL until after the call to
1601 set_mem_attributes. */
1602 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1604 memset (&attrs, 0, sizeof (attrs));
1606 /* Get the alias set from the expression or type (perhaps using a
1607 front-end routine) and use it. */
1608 attrs.alias = get_alias_set (t);
1610 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1611 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1613 /* Default values from pre-existing memory attributes if present. */
1614 refattrs = MEM_ATTRS (ref);
1615 if (refattrs)
1617 /* ??? Can this ever happen? Calling this routine on a MEM that
1618 already carries memory attributes should probably be invalid. */
1619 attrs.expr = refattrs->expr;
1620 attrs.offset_known_p = refattrs->offset_known_p;
1621 attrs.offset = refattrs->offset;
1622 attrs.size_known_p = refattrs->size_known_p;
1623 attrs.size = refattrs->size;
1624 attrs.align = refattrs->align;
1627 /* Otherwise, default values from the mode of the MEM reference. */
1628 else
1630 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1631 gcc_assert (!defattrs->expr);
1632 gcc_assert (!defattrs->offset_known_p);
1634 /* Respect mode size. */
1635 attrs.size_known_p = defattrs->size_known_p;
1636 attrs.size = defattrs->size;
1637 /* ??? Is this really necessary? We probably should always get
1638 the size from the type below. */
1640 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1641 if T is an object, always compute the object alignment below. */
1642 if (TYPE_P (t))
1643 attrs.align = defattrs->align;
1644 else
1645 attrs.align = BITS_PER_UNIT;
1646 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1647 e.g. if the type carries an alignment attribute. Should we be
1648 able to simply always use TYPE_ALIGN? */
1651 /* We can set the alignment from the type if we are making an object,
1652 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1653 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1654 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1656 /* If the size is known, we can set that. */
1657 tree new_size = TYPE_SIZE_UNIT (type);
1659 /* The address-space is that of the type. */
1660 as = TYPE_ADDR_SPACE (type);
1662 /* If T is not a type, we may be able to deduce some more information about
1663 the expression. */
1664 if (! TYPE_P (t))
1666 tree base;
1668 if (TREE_THIS_VOLATILE (t))
1669 MEM_VOLATILE_P (ref) = 1;
1671 /* Now remove any conversions: they don't change what the underlying
1672 object is. Likewise for SAVE_EXPR. */
1673 while (CONVERT_EXPR_P (t)
1674 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1675 || TREE_CODE (t) == SAVE_EXPR)
1676 t = TREE_OPERAND (t, 0);
1678 /* Note whether this expression can trap. */
1679 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1681 base = get_base_address (t);
1682 if (base)
1684 if (DECL_P (base)
1685 && TREE_READONLY (base)
1686 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1687 && !TREE_THIS_VOLATILE (base))
1688 MEM_READONLY_P (ref) = 1;
1690 /* Mark static const strings readonly as well. */
1691 if (TREE_CODE (base) == STRING_CST
1692 && TREE_READONLY (base)
1693 && TREE_STATIC (base))
1694 MEM_READONLY_P (ref) = 1;
1696 /* Address-space information is on the base object. */
1697 if (TREE_CODE (base) == MEM_REF
1698 || TREE_CODE (base) == TARGET_MEM_REF)
1699 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1700 0))));
1701 else
1702 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1705 /* If this expression uses it's parent's alias set, mark it such
1706 that we won't change it. */
1707 if (component_uses_parent_alias_set (t))
1708 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1710 /* If this is a decl, set the attributes of the MEM from it. */
1711 if (DECL_P (t))
1713 attrs.expr = t;
1714 attrs.offset_known_p = true;
1715 attrs.offset = 0;
1716 apply_bitpos = bitpos;
1717 new_size = DECL_SIZE_UNIT (t);
1720 /* ??? If we end up with a constant here do record a MEM_EXPR. */
1721 else if (CONSTANT_CLASS_P (t))
1724 /* If this is a field reference, record it. */
1725 else if (TREE_CODE (t) == COMPONENT_REF)
1727 attrs.expr = t;
1728 attrs.offset_known_p = true;
1729 attrs.offset = 0;
1730 apply_bitpos = bitpos;
1731 if (DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1732 new_size = DECL_SIZE_UNIT (TREE_OPERAND (t, 1));
1735 /* If this is an array reference, look for an outer field reference. */
1736 else if (TREE_CODE (t) == ARRAY_REF)
1738 tree off_tree = size_zero_node;
1739 /* We can't modify t, because we use it at the end of the
1740 function. */
1741 tree t2 = t;
1745 tree index = TREE_OPERAND (t2, 1);
1746 tree low_bound = array_ref_low_bound (t2);
1747 tree unit_size = array_ref_element_size (t2);
1749 /* We assume all arrays have sizes that are a multiple of a byte.
1750 First subtract the lower bound, if any, in the type of the
1751 index, then convert to sizetype and multiply by the size of
1752 the array element. */
1753 if (! integer_zerop (low_bound))
1754 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1755 index, low_bound);
1757 off_tree = size_binop (PLUS_EXPR,
1758 size_binop (MULT_EXPR,
1759 fold_convert (sizetype,
1760 index),
1761 unit_size),
1762 off_tree);
1763 t2 = TREE_OPERAND (t2, 0);
1765 while (TREE_CODE (t2) == ARRAY_REF);
1767 if (DECL_P (t2)
1768 || TREE_CODE (t2) == COMPONENT_REF)
1770 attrs.expr = t2;
1771 attrs.offset_known_p = false;
1772 if (host_integerp (off_tree, 1))
1774 attrs.offset_known_p = true;
1775 attrs.offset = tree_low_cst (off_tree, 1);
1776 apply_bitpos = bitpos;
1779 /* Else do not record a MEM_EXPR. */
1782 /* If this is an indirect reference, record it. */
1783 else if (TREE_CODE (t) == MEM_REF
1784 || TREE_CODE (t) == TARGET_MEM_REF)
1786 attrs.expr = t;
1787 attrs.offset_known_p = true;
1788 attrs.offset = 0;
1789 apply_bitpos = bitpos;
1792 /* Compute the alignment. */
1793 unsigned int obj_align;
1794 unsigned HOST_WIDE_INT obj_bitpos;
1795 get_object_alignment_1 (t, &obj_align, &obj_bitpos);
1796 obj_bitpos = (obj_bitpos - bitpos) & (obj_align - 1);
1797 if (obj_bitpos != 0)
1798 obj_align = (obj_bitpos & -obj_bitpos);
1799 attrs.align = MAX (attrs.align, obj_align);
1802 if (host_integerp (new_size, 1))
1804 attrs.size_known_p = true;
1805 attrs.size = tree_low_cst (new_size, 1);
1808 /* If we modified OFFSET based on T, then subtract the outstanding
1809 bit position offset. Similarly, increase the size of the accessed
1810 object to contain the negative offset. */
1811 if (apply_bitpos)
1813 gcc_assert (attrs.offset_known_p);
1814 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1815 if (attrs.size_known_p)
1816 attrs.size += apply_bitpos / BITS_PER_UNIT;
1819 /* Now set the attributes we computed above. */
1820 attrs.addrspace = as;
1821 set_mem_attrs (ref, &attrs);
1824 void
1825 set_mem_attributes (rtx ref, tree t, int objectp)
1827 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1830 /* Set the alias set of MEM to SET. */
1832 void
1833 set_mem_alias_set (rtx mem, alias_set_type set)
1835 struct mem_attrs attrs;
1837 /* If the new and old alias sets don't conflict, something is wrong. */
1838 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1839 attrs = *get_mem_attrs (mem);
1840 attrs.alias = set;
1841 set_mem_attrs (mem, &attrs);
1844 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1846 void
1847 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1849 struct mem_attrs attrs;
1851 attrs = *get_mem_attrs (mem);
1852 attrs.addrspace = addrspace;
1853 set_mem_attrs (mem, &attrs);
1856 /* Set the alignment of MEM to ALIGN bits. */
1858 void
1859 set_mem_align (rtx mem, unsigned int align)
1861 struct mem_attrs attrs;
1863 attrs = *get_mem_attrs (mem);
1864 attrs.align = align;
1865 set_mem_attrs (mem, &attrs);
1868 /* Set the expr for MEM to EXPR. */
1870 void
1871 set_mem_expr (rtx mem, tree expr)
1873 struct mem_attrs attrs;
1875 attrs = *get_mem_attrs (mem);
1876 attrs.expr = expr;
1877 set_mem_attrs (mem, &attrs);
1880 /* Set the offset of MEM to OFFSET. */
1882 void
1883 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1885 struct mem_attrs attrs;
1887 attrs = *get_mem_attrs (mem);
1888 attrs.offset_known_p = true;
1889 attrs.offset = offset;
1890 set_mem_attrs (mem, &attrs);
1893 /* Clear the offset of MEM. */
1895 void
1896 clear_mem_offset (rtx mem)
1898 struct mem_attrs attrs;
1900 attrs = *get_mem_attrs (mem);
1901 attrs.offset_known_p = false;
1902 set_mem_attrs (mem, &attrs);
1905 /* Set the size of MEM to SIZE. */
1907 void
1908 set_mem_size (rtx mem, HOST_WIDE_INT size)
1910 struct mem_attrs attrs;
1912 attrs = *get_mem_attrs (mem);
1913 attrs.size_known_p = true;
1914 attrs.size = size;
1915 set_mem_attrs (mem, &attrs);
1918 /* Clear the size of MEM. */
1920 void
1921 clear_mem_size (rtx mem)
1923 struct mem_attrs attrs;
1925 attrs = *get_mem_attrs (mem);
1926 attrs.size_known_p = false;
1927 set_mem_attrs (mem, &attrs);
1930 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1931 and its address changed to ADDR. (VOIDmode means don't change the mode.
1932 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1933 returned memory location is required to be valid. The memory
1934 attributes are not changed. */
1936 static rtx
1937 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1939 addr_space_t as;
1940 rtx new_rtx;
1942 gcc_assert (MEM_P (memref));
1943 as = MEM_ADDR_SPACE (memref);
1944 if (mode == VOIDmode)
1945 mode = GET_MODE (memref);
1946 if (addr == 0)
1947 addr = XEXP (memref, 0);
1948 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1949 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1950 return memref;
1952 if (validate)
1954 if (reload_in_progress || reload_completed)
1955 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1956 else
1957 addr = memory_address_addr_space (mode, addr, as);
1960 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1961 return memref;
1963 new_rtx = gen_rtx_MEM (mode, addr);
1964 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1965 return new_rtx;
1968 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1969 way we are changing MEMREF, so we only preserve the alias set. */
1972 change_address (rtx memref, enum machine_mode mode, rtx addr)
1974 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
1975 enum machine_mode mmode = GET_MODE (new_rtx);
1976 struct mem_attrs attrs, *defattrs;
1978 attrs = *get_mem_attrs (memref);
1979 defattrs = mode_mem_attrs[(int) mmode];
1980 attrs.expr = NULL_TREE;
1981 attrs.offset_known_p = false;
1982 attrs.size_known_p = defattrs->size_known_p;
1983 attrs.size = defattrs->size;
1984 attrs.align = defattrs->align;
1986 /* If there are no changes, just return the original memory reference. */
1987 if (new_rtx == memref)
1989 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
1990 return new_rtx;
1992 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1993 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1996 set_mem_attrs (new_rtx, &attrs);
1997 return new_rtx;
2000 /* Return a memory reference like MEMREF, but with its mode changed
2001 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2002 nonzero, the memory address is forced to be valid.
2003 If ADJUST_ADDRESS is zero, OFFSET is only used to update MEM_ATTRS
2004 and the caller is responsible for adjusting MEMREF base register.
2005 If ADJUST_OBJECT is zero, the underlying object associated with the
2006 memory reference is left unchanged and the caller is responsible for
2007 dealing with it. Otherwise, if the new memory reference is outside
2008 the underlying object, even partially, then the object is dropped.
2009 SIZE, if nonzero, is the size of an access in cases where MODE
2010 has no inherent size. */
2013 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2014 int validate, int adjust_address, int adjust_object,
2015 HOST_WIDE_INT size)
2017 rtx addr = XEXP (memref, 0);
2018 rtx new_rtx;
2019 enum machine_mode address_mode;
2020 int pbits;
2021 struct mem_attrs attrs = *get_mem_attrs (memref), *defattrs;
2022 unsigned HOST_WIDE_INT max_align;
2023 #ifdef POINTERS_EXTEND_UNSIGNED
2024 enum machine_mode pointer_mode
2025 = targetm.addr_space.pointer_mode (attrs.addrspace);
2026 #endif
2028 /* VOIDmode means no mode change for change_address_1. */
2029 if (mode == VOIDmode)
2030 mode = GET_MODE (memref);
2032 /* Take the size of non-BLKmode accesses from the mode. */
2033 defattrs = mode_mem_attrs[(int) mode];
2034 if (defattrs->size_known_p)
2035 size = defattrs->size;
2037 /* If there are no changes, just return the original memory reference. */
2038 if (mode == GET_MODE (memref) && !offset
2039 && (size == 0 || (attrs.size_known_p && attrs.size == size))
2040 && (!validate || memory_address_addr_space_p (mode, addr,
2041 attrs.addrspace)))
2042 return memref;
2044 /* ??? Prefer to create garbage instead of creating shared rtl.
2045 This may happen even if offset is nonzero -- consider
2046 (plus (plus reg reg) const_int) -- so do this always. */
2047 addr = copy_rtx (addr);
2049 /* Convert a possibly large offset to a signed value within the
2050 range of the target address space. */
2051 address_mode = get_address_mode (memref);
2052 pbits = GET_MODE_BITSIZE (address_mode);
2053 if (HOST_BITS_PER_WIDE_INT > pbits)
2055 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2056 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2057 >> shift);
2060 if (adjust_address)
2062 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2063 object, we can merge it into the LO_SUM. */
2064 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2065 && offset >= 0
2066 && (unsigned HOST_WIDE_INT) offset
2067 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2068 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2069 plus_constant (address_mode,
2070 XEXP (addr, 1), offset));
2071 #ifdef POINTERS_EXTEND_UNSIGNED
2072 /* If MEMREF is a ZERO_EXTEND from pointer_mode and the offset is valid
2073 in that mode, we merge it into the ZERO_EXTEND. We take advantage of
2074 the fact that pointers are not allowed to overflow. */
2075 else if (POINTERS_EXTEND_UNSIGNED > 0
2076 && GET_CODE (addr) == ZERO_EXTEND
2077 && GET_MODE (XEXP (addr, 0)) == pointer_mode
2078 && trunc_int_for_mode (offset, pointer_mode) == offset)
2079 addr = gen_rtx_ZERO_EXTEND (address_mode,
2080 plus_constant (pointer_mode,
2081 XEXP (addr, 0), offset));
2082 #endif
2083 else
2084 addr = plus_constant (address_mode, addr, offset);
2087 new_rtx = change_address_1 (memref, mode, addr, validate);
2089 /* If the address is a REG, change_address_1 rightfully returns memref,
2090 but this would destroy memref's MEM_ATTRS. */
2091 if (new_rtx == memref && offset != 0)
2092 new_rtx = copy_rtx (new_rtx);
2094 /* Conservatively drop the object if we don't know where we start from. */
2095 if (adjust_object && (!attrs.offset_known_p || !attrs.size_known_p))
2097 attrs.expr = NULL_TREE;
2098 attrs.alias = 0;
2101 /* Compute the new values of the memory attributes due to this adjustment.
2102 We add the offsets and update the alignment. */
2103 if (attrs.offset_known_p)
2105 attrs.offset += offset;
2107 /* Drop the object if the new left end is not within its bounds. */
2108 if (adjust_object && attrs.offset < 0)
2110 attrs.expr = NULL_TREE;
2111 attrs.alias = 0;
2115 /* Compute the new alignment by taking the MIN of the alignment and the
2116 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2117 if zero. */
2118 if (offset != 0)
2120 max_align = (offset & -offset) * BITS_PER_UNIT;
2121 attrs.align = MIN (attrs.align, max_align);
2124 if (size)
2126 /* Drop the object if the new right end is not within its bounds. */
2127 if (adjust_object && (offset + size) > attrs.size)
2129 attrs.expr = NULL_TREE;
2130 attrs.alias = 0;
2132 attrs.size_known_p = true;
2133 attrs.size = size;
2135 else if (attrs.size_known_p)
2137 gcc_assert (!adjust_object);
2138 attrs.size -= offset;
2139 /* ??? The store_by_pieces machinery generates negative sizes,
2140 so don't assert for that here. */
2143 set_mem_attrs (new_rtx, &attrs);
2145 return new_rtx;
2148 /* Return a memory reference like MEMREF, but with its mode changed
2149 to MODE and its address changed to ADDR, which is assumed to be
2150 MEMREF offset by OFFSET bytes. If VALIDATE is
2151 nonzero, the memory address is forced to be valid. */
2154 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2155 HOST_WIDE_INT offset, int validate)
2157 memref = change_address_1 (memref, VOIDmode, addr, validate);
2158 return adjust_address_1 (memref, mode, offset, validate, 0, 0, 0);
2161 /* Return a memory reference like MEMREF, but whose address is changed by
2162 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2163 known to be in OFFSET (possibly 1). */
2166 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2168 rtx new_rtx, addr = XEXP (memref, 0);
2169 enum machine_mode address_mode;
2170 struct mem_attrs attrs, *defattrs;
2172 attrs = *get_mem_attrs (memref);
2173 address_mode = get_address_mode (memref);
2174 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2176 /* At this point we don't know _why_ the address is invalid. It
2177 could have secondary memory references, multiplies or anything.
2179 However, if we did go and rearrange things, we can wind up not
2180 being able to recognize the magic around pic_offset_table_rtx.
2181 This stuff is fragile, and is yet another example of why it is
2182 bad to expose PIC machinery too early. */
2183 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2184 attrs.addrspace)
2185 && GET_CODE (addr) == PLUS
2186 && XEXP (addr, 0) == pic_offset_table_rtx)
2188 addr = force_reg (GET_MODE (addr), addr);
2189 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2192 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2193 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2195 /* If there are no changes, just return the original memory reference. */
2196 if (new_rtx == memref)
2197 return new_rtx;
2199 /* Update the alignment to reflect the offset. Reset the offset, which
2200 we don't know. */
2201 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2202 attrs.offset_known_p = false;
2203 attrs.size_known_p = defattrs->size_known_p;
2204 attrs.size = defattrs->size;
2205 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2206 set_mem_attrs (new_rtx, &attrs);
2207 return new_rtx;
2210 /* Return a memory reference like MEMREF, but with its address changed to
2211 ADDR. The caller is asserting that the actual piece of memory pointed
2212 to is the same, just the form of the address is being changed, such as
2213 by putting something into a register. */
2216 replace_equiv_address (rtx memref, rtx addr)
2218 /* change_address_1 copies the memory attribute structure without change
2219 and that's exactly what we want here. */
2220 update_temp_slot_address (XEXP (memref, 0), addr);
2221 return change_address_1 (memref, VOIDmode, addr, 1);
2224 /* Likewise, but the reference is not required to be valid. */
2227 replace_equiv_address_nv (rtx memref, rtx addr)
2229 return change_address_1 (memref, VOIDmode, addr, 0);
2232 /* Return a memory reference like MEMREF, but with its mode widened to
2233 MODE and offset by OFFSET. This would be used by targets that e.g.
2234 cannot issue QImode memory operations and have to use SImode memory
2235 operations plus masking logic. */
2238 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2240 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1, 0, 0);
2241 struct mem_attrs attrs;
2242 unsigned int size = GET_MODE_SIZE (mode);
2244 /* If there are no changes, just return the original memory reference. */
2245 if (new_rtx == memref)
2246 return new_rtx;
2248 attrs = *get_mem_attrs (new_rtx);
2250 /* If we don't know what offset we were at within the expression, then
2251 we can't know if we've overstepped the bounds. */
2252 if (! attrs.offset_known_p)
2253 attrs.expr = NULL_TREE;
2255 while (attrs.expr)
2257 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2259 tree field = TREE_OPERAND (attrs.expr, 1);
2260 tree offset = component_ref_field_offset (attrs.expr);
2262 if (! DECL_SIZE_UNIT (field))
2264 attrs.expr = NULL_TREE;
2265 break;
2268 /* Is the field at least as large as the access? If so, ok,
2269 otherwise strip back to the containing structure. */
2270 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2271 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2272 && attrs.offset >= 0)
2273 break;
2275 if (! host_integerp (offset, 1))
2277 attrs.expr = NULL_TREE;
2278 break;
2281 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2282 attrs.offset += tree_low_cst (offset, 1);
2283 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2284 / BITS_PER_UNIT);
2286 /* Similarly for the decl. */
2287 else if (DECL_P (attrs.expr)
2288 && DECL_SIZE_UNIT (attrs.expr)
2289 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2290 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2291 && (! attrs.offset_known_p || attrs.offset >= 0))
2292 break;
2293 else
2295 /* The widened memory access overflows the expression, which means
2296 that it could alias another expression. Zap it. */
2297 attrs.expr = NULL_TREE;
2298 break;
2302 if (! attrs.expr)
2303 attrs.offset_known_p = false;
2305 /* The widened memory may alias other stuff, so zap the alias set. */
2306 /* ??? Maybe use get_alias_set on any remaining expression. */
2307 attrs.alias = 0;
2308 attrs.size_known_p = true;
2309 attrs.size = size;
2310 set_mem_attrs (new_rtx, &attrs);
2311 return new_rtx;
2314 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2315 static GTY(()) tree spill_slot_decl;
2317 tree
2318 get_spill_slot_decl (bool force_build_p)
2320 tree d = spill_slot_decl;
2321 rtx rd;
2322 struct mem_attrs attrs;
2324 if (d || !force_build_p)
2325 return d;
2327 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2328 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2329 DECL_ARTIFICIAL (d) = 1;
2330 DECL_IGNORED_P (d) = 1;
2331 TREE_USED (d) = 1;
2332 spill_slot_decl = d;
2334 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2335 MEM_NOTRAP_P (rd) = 1;
2336 attrs = *mode_mem_attrs[(int) BLKmode];
2337 attrs.alias = new_alias_set ();
2338 attrs.expr = d;
2339 set_mem_attrs (rd, &attrs);
2340 SET_DECL_RTL (d, rd);
2342 return d;
2345 /* Given MEM, a result from assign_stack_local, fill in the memory
2346 attributes as appropriate for a register allocator spill slot.
2347 These slots are not aliasable by other memory. We arrange for
2348 them all to use a single MEM_EXPR, so that the aliasing code can
2349 work properly in the case of shared spill slots. */
2351 void
2352 set_mem_attrs_for_spill (rtx mem)
2354 struct mem_attrs attrs;
2355 rtx addr;
2357 attrs = *get_mem_attrs (mem);
2358 attrs.expr = get_spill_slot_decl (true);
2359 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2360 attrs.addrspace = ADDR_SPACE_GENERIC;
2362 /* We expect the incoming memory to be of the form:
2363 (mem:MODE (plus (reg sfp) (const_int offset)))
2364 with perhaps the plus missing for offset = 0. */
2365 addr = XEXP (mem, 0);
2366 attrs.offset_known_p = true;
2367 attrs.offset = 0;
2368 if (GET_CODE (addr) == PLUS
2369 && CONST_INT_P (XEXP (addr, 1)))
2370 attrs.offset = INTVAL (XEXP (addr, 1));
2372 set_mem_attrs (mem, &attrs);
2373 MEM_NOTRAP_P (mem) = 1;
2376 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2379 gen_label_rtx (void)
2381 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2382 NULL, label_num++, NULL);
2385 /* For procedure integration. */
2387 /* Install new pointers to the first and last insns in the chain.
2388 Also, set cur_insn_uid to one higher than the last in use.
2389 Used for an inline-procedure after copying the insn chain. */
2391 void
2392 set_new_first_and_last_insn (rtx first, rtx last)
2394 rtx insn;
2396 set_first_insn (first);
2397 set_last_insn (last);
2398 cur_insn_uid = 0;
2400 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2402 int debug_count = 0;
2404 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2405 cur_debug_insn_uid = 0;
2407 for (insn = first; insn; insn = NEXT_INSN (insn))
2408 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2409 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2410 else
2412 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2413 if (DEBUG_INSN_P (insn))
2414 debug_count++;
2417 if (debug_count)
2418 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2419 else
2420 cur_debug_insn_uid++;
2422 else
2423 for (insn = first; insn; insn = NEXT_INSN (insn))
2424 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2426 cur_insn_uid++;
2429 /* Go through all the RTL insn bodies and copy any invalid shared
2430 structure. This routine should only be called once. */
2432 static void
2433 unshare_all_rtl_1 (rtx insn)
2435 /* Unshare just about everything else. */
2436 unshare_all_rtl_in_chain (insn);
2438 /* Make sure the addresses of stack slots found outside the insn chain
2439 (such as, in DECL_RTL of a variable) are not shared
2440 with the insn chain.
2442 This special care is necessary when the stack slot MEM does not
2443 actually appear in the insn chain. If it does appear, its address
2444 is unshared from all else at that point. */
2445 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2448 /* Go through all the RTL insn bodies and copy any invalid shared
2449 structure, again. This is a fairly expensive thing to do so it
2450 should be done sparingly. */
2452 void
2453 unshare_all_rtl_again (rtx insn)
2455 rtx p;
2456 tree decl;
2458 for (p = insn; p; p = NEXT_INSN (p))
2459 if (INSN_P (p))
2461 reset_used_flags (PATTERN (p));
2462 reset_used_flags (REG_NOTES (p));
2463 if (CALL_P (p))
2464 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2467 /* Make sure that virtual stack slots are not shared. */
2468 set_used_decls (DECL_INITIAL (cfun->decl));
2470 /* Make sure that virtual parameters are not shared. */
2471 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2472 set_used_flags (DECL_RTL (decl));
2474 reset_used_flags (stack_slot_list);
2476 unshare_all_rtl_1 (insn);
2479 unsigned int
2480 unshare_all_rtl (void)
2482 unshare_all_rtl_1 (get_insns ());
2483 return 0;
2487 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2488 Recursively does the same for subexpressions. */
2490 static void
2491 verify_rtx_sharing (rtx orig, rtx insn)
2493 rtx x = orig;
2494 int i;
2495 enum rtx_code code;
2496 const char *format_ptr;
2498 if (x == 0)
2499 return;
2501 code = GET_CODE (x);
2503 /* These types may be freely shared. */
2505 switch (code)
2507 case REG:
2508 case DEBUG_EXPR:
2509 case VALUE:
2510 CASE_CONST_ANY:
2511 case SYMBOL_REF:
2512 case LABEL_REF:
2513 case CODE_LABEL:
2514 case PC:
2515 case CC0:
2516 case RETURN:
2517 case SIMPLE_RETURN:
2518 case SCRATCH:
2519 /* SCRATCH must be shared because they represent distinct values. */
2520 return;
2521 case CLOBBER:
2522 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2523 clobbers or clobbers of hard registers that originated as pseudos.
2524 This is needed to allow safe register renaming. */
2525 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2526 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2527 return;
2528 break;
2530 case CONST:
2531 if (shared_const_p (orig))
2532 return;
2533 break;
2535 case MEM:
2536 /* A MEM is allowed to be shared if its address is constant. */
2537 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2538 || reload_completed || reload_in_progress)
2539 return;
2541 break;
2543 default:
2544 break;
2547 /* This rtx may not be shared. If it has already been seen,
2548 replace it with a copy of itself. */
2549 #ifdef ENABLE_CHECKING
2550 if (RTX_FLAG (x, used))
2552 error ("invalid rtl sharing found in the insn");
2553 debug_rtx (insn);
2554 error ("shared rtx");
2555 debug_rtx (x);
2556 internal_error ("internal consistency failure");
2558 #endif
2559 gcc_assert (!RTX_FLAG (x, used));
2561 RTX_FLAG (x, used) = 1;
2563 /* Now scan the subexpressions recursively. */
2565 format_ptr = GET_RTX_FORMAT (code);
2567 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2569 switch (*format_ptr++)
2571 case 'e':
2572 verify_rtx_sharing (XEXP (x, i), insn);
2573 break;
2575 case 'E':
2576 if (XVEC (x, i) != NULL)
2578 int j;
2579 int len = XVECLEN (x, i);
2581 for (j = 0; j < len; j++)
2583 /* We allow sharing of ASM_OPERANDS inside single
2584 instruction. */
2585 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2586 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2587 == ASM_OPERANDS))
2588 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2589 else
2590 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2593 break;
2596 return;
2599 /* Reset used-flags for INSN. */
2601 static void
2602 reset_insn_used_flags (rtx insn)
2604 gcc_assert (INSN_P (insn));
2605 reset_used_flags (PATTERN (insn));
2606 reset_used_flags (REG_NOTES (insn));
2607 if (CALL_P (insn))
2608 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2611 /* Go through all the RTL insn bodies and clear all the USED bits. */
2613 static void
2614 reset_all_used_flags (void)
2616 rtx p;
2618 for (p = get_insns (); p; p = NEXT_INSN (p))
2619 if (INSN_P (p))
2621 rtx pat = PATTERN (p);
2622 if (GET_CODE (pat) != SEQUENCE)
2623 reset_insn_used_flags (p);
2624 else
2626 gcc_assert (REG_NOTES (p) == NULL);
2627 for (int i = 0; i < XVECLEN (pat, 0); i++)
2628 reset_insn_used_flags (XVECEXP (pat, 0, i));
2633 /* Verify sharing in INSN. */
2635 static void
2636 verify_insn_sharing (rtx insn)
2638 gcc_assert (INSN_P (insn));
2639 reset_used_flags (PATTERN (insn));
2640 reset_used_flags (REG_NOTES (insn));
2641 if (CALL_P (insn))
2642 reset_used_flags (CALL_INSN_FUNCTION_USAGE (insn));
2645 /* Go through all the RTL insn bodies and check that there is no unexpected
2646 sharing in between the subexpressions. */
2648 DEBUG_FUNCTION void
2649 verify_rtl_sharing (void)
2651 rtx p;
2653 timevar_push (TV_VERIFY_RTL_SHARING);
2655 reset_all_used_flags ();
2657 for (p = get_insns (); p; p = NEXT_INSN (p))
2658 if (INSN_P (p))
2660 rtx pat = PATTERN (p);
2661 if (GET_CODE (pat) != SEQUENCE)
2662 verify_insn_sharing (p);
2663 else
2664 for (int i = 0; i < XVECLEN (pat, 0); i++)
2665 verify_insn_sharing (XVECEXP (pat, 0, i));
2668 reset_all_used_flags ();
2670 timevar_pop (TV_VERIFY_RTL_SHARING);
2673 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2674 Assumes the mark bits are cleared at entry. */
2676 void
2677 unshare_all_rtl_in_chain (rtx insn)
2679 for (; insn; insn = NEXT_INSN (insn))
2680 if (INSN_P (insn))
2682 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2683 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2684 if (CALL_P (insn))
2685 CALL_INSN_FUNCTION_USAGE (insn)
2686 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2690 /* Go through all virtual stack slots of a function and mark them as
2691 shared. We never replace the DECL_RTLs themselves with a copy,
2692 but expressions mentioned into a DECL_RTL cannot be shared with
2693 expressions in the instruction stream.
2695 Note that reload may convert pseudo registers into memories in-place.
2696 Pseudo registers are always shared, but MEMs never are. Thus if we
2697 reset the used flags on MEMs in the instruction stream, we must set
2698 them again on MEMs that appear in DECL_RTLs. */
2700 static void
2701 set_used_decls (tree blk)
2703 tree t;
2705 /* Mark decls. */
2706 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2707 if (DECL_RTL_SET_P (t))
2708 set_used_flags (DECL_RTL (t));
2710 /* Now process sub-blocks. */
2711 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2712 set_used_decls (t);
2715 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2716 Recursively does the same for subexpressions. Uses
2717 copy_rtx_if_shared_1 to reduce stack space. */
2720 copy_rtx_if_shared (rtx orig)
2722 copy_rtx_if_shared_1 (&orig);
2723 return orig;
2726 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2727 use. Recursively does the same for subexpressions. */
2729 static void
2730 copy_rtx_if_shared_1 (rtx *orig1)
2732 rtx x;
2733 int i;
2734 enum rtx_code code;
2735 rtx *last_ptr;
2736 const char *format_ptr;
2737 int copied = 0;
2738 int length;
2740 /* Repeat is used to turn tail-recursion into iteration. */
2741 repeat:
2742 x = *orig1;
2744 if (x == 0)
2745 return;
2747 code = GET_CODE (x);
2749 /* These types may be freely shared. */
2751 switch (code)
2753 case REG:
2754 case DEBUG_EXPR:
2755 case VALUE:
2756 CASE_CONST_ANY:
2757 case SYMBOL_REF:
2758 case LABEL_REF:
2759 case CODE_LABEL:
2760 case PC:
2761 case CC0:
2762 case RETURN:
2763 case SIMPLE_RETURN:
2764 case SCRATCH:
2765 /* SCRATCH must be shared because they represent distinct values. */
2766 return;
2767 case CLOBBER:
2768 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
2769 clobbers or clobbers of hard registers that originated as pseudos.
2770 This is needed to allow safe register renaming. */
2771 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER
2772 && ORIGINAL_REGNO (XEXP (x, 0)) == REGNO (XEXP (x, 0)))
2773 return;
2774 break;
2776 case CONST:
2777 if (shared_const_p (x))
2778 return;
2779 break;
2781 case DEBUG_INSN:
2782 case INSN:
2783 case JUMP_INSN:
2784 case CALL_INSN:
2785 case NOTE:
2786 case BARRIER:
2787 /* The chain of insns is not being copied. */
2788 return;
2790 default:
2791 break;
2794 /* This rtx may not be shared. If it has already been seen,
2795 replace it with a copy of itself. */
2797 if (RTX_FLAG (x, used))
2799 x = shallow_copy_rtx (x);
2800 copied = 1;
2802 RTX_FLAG (x, used) = 1;
2804 /* Now scan the subexpressions recursively.
2805 We can store any replaced subexpressions directly into X
2806 since we know X is not shared! Any vectors in X
2807 must be copied if X was copied. */
2809 format_ptr = GET_RTX_FORMAT (code);
2810 length = GET_RTX_LENGTH (code);
2811 last_ptr = NULL;
2813 for (i = 0; i < length; i++)
2815 switch (*format_ptr++)
2817 case 'e':
2818 if (last_ptr)
2819 copy_rtx_if_shared_1 (last_ptr);
2820 last_ptr = &XEXP (x, i);
2821 break;
2823 case 'E':
2824 if (XVEC (x, i) != NULL)
2826 int j;
2827 int len = XVECLEN (x, i);
2829 /* Copy the vector iff I copied the rtx and the length
2830 is nonzero. */
2831 if (copied && len > 0)
2832 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2834 /* Call recursively on all inside the vector. */
2835 for (j = 0; j < len; j++)
2837 if (last_ptr)
2838 copy_rtx_if_shared_1 (last_ptr);
2839 last_ptr = &XVECEXP (x, i, j);
2842 break;
2845 *orig1 = x;
2846 if (last_ptr)
2848 orig1 = last_ptr;
2849 goto repeat;
2851 return;
2854 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2856 static void
2857 mark_used_flags (rtx x, int flag)
2859 int i, j;
2860 enum rtx_code code;
2861 const char *format_ptr;
2862 int length;
2864 /* Repeat is used to turn tail-recursion into iteration. */
2865 repeat:
2866 if (x == 0)
2867 return;
2869 code = GET_CODE (x);
2871 /* These types may be freely shared so we needn't do any resetting
2872 for them. */
2874 switch (code)
2876 case REG:
2877 case DEBUG_EXPR:
2878 case VALUE:
2879 CASE_CONST_ANY:
2880 case SYMBOL_REF:
2881 case CODE_LABEL:
2882 case PC:
2883 case CC0:
2884 case RETURN:
2885 case SIMPLE_RETURN:
2886 return;
2888 case DEBUG_INSN:
2889 case INSN:
2890 case JUMP_INSN:
2891 case CALL_INSN:
2892 case NOTE:
2893 case LABEL_REF:
2894 case BARRIER:
2895 /* The chain of insns is not being copied. */
2896 return;
2898 default:
2899 break;
2902 RTX_FLAG (x, used) = flag;
2904 format_ptr = GET_RTX_FORMAT (code);
2905 length = GET_RTX_LENGTH (code);
2907 for (i = 0; i < length; i++)
2909 switch (*format_ptr++)
2911 case 'e':
2912 if (i == length-1)
2914 x = XEXP (x, i);
2915 goto repeat;
2917 mark_used_flags (XEXP (x, i), flag);
2918 break;
2920 case 'E':
2921 for (j = 0; j < XVECLEN (x, i); j++)
2922 mark_used_flags (XVECEXP (x, i, j), flag);
2923 break;
2928 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2929 to look for shared sub-parts. */
2931 void
2932 reset_used_flags (rtx x)
2934 mark_used_flags (x, 0);
2937 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2938 to look for shared sub-parts. */
2940 void
2941 set_used_flags (rtx x)
2943 mark_used_flags (x, 1);
2946 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2947 Return X or the rtx for the pseudo reg the value of X was copied into.
2948 OTHER must be valid as a SET_DEST. */
2951 make_safe_from (rtx x, rtx other)
2953 while (1)
2954 switch (GET_CODE (other))
2956 case SUBREG:
2957 other = SUBREG_REG (other);
2958 break;
2959 case STRICT_LOW_PART:
2960 case SIGN_EXTEND:
2961 case ZERO_EXTEND:
2962 other = XEXP (other, 0);
2963 break;
2964 default:
2965 goto done;
2967 done:
2968 if ((MEM_P (other)
2969 && ! CONSTANT_P (x)
2970 && !REG_P (x)
2971 && GET_CODE (x) != SUBREG)
2972 || (REG_P (other)
2973 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2974 || reg_mentioned_p (other, x))))
2976 rtx temp = gen_reg_rtx (GET_MODE (x));
2977 emit_move_insn (temp, x);
2978 return temp;
2980 return x;
2983 /* Emission of insns (adding them to the doubly-linked list). */
2985 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2988 get_last_insn_anywhere (void)
2990 struct sequence_stack *stack;
2991 if (get_last_insn ())
2992 return get_last_insn ();
2993 for (stack = seq_stack; stack; stack = stack->next)
2994 if (stack->last != 0)
2995 return stack->last;
2996 return 0;
2999 /* Return the first nonnote insn emitted in current sequence or current
3000 function. This routine looks inside SEQUENCEs. */
3003 get_first_nonnote_insn (void)
3005 rtx insn = get_insns ();
3007 if (insn)
3009 if (NOTE_P (insn))
3010 for (insn = next_insn (insn);
3011 insn && NOTE_P (insn);
3012 insn = next_insn (insn))
3013 continue;
3014 else
3016 if (NONJUMP_INSN_P (insn)
3017 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3018 insn = XVECEXP (PATTERN (insn), 0, 0);
3022 return insn;
3025 /* Return the last nonnote insn emitted in current sequence or current
3026 function. This routine looks inside SEQUENCEs. */
3029 get_last_nonnote_insn (void)
3031 rtx insn = get_last_insn ();
3033 if (insn)
3035 if (NOTE_P (insn))
3036 for (insn = previous_insn (insn);
3037 insn && NOTE_P (insn);
3038 insn = previous_insn (insn))
3039 continue;
3040 else
3042 if (NONJUMP_INSN_P (insn)
3043 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3044 insn = XVECEXP (PATTERN (insn), 0,
3045 XVECLEN (PATTERN (insn), 0) - 1);
3049 return insn;
3052 /* Return the number of actual (non-debug) insns emitted in this
3053 function. */
3056 get_max_insn_count (void)
3058 int n = cur_insn_uid;
3060 /* The table size must be stable across -g, to avoid codegen
3061 differences due to debug insns, and not be affected by
3062 -fmin-insn-uid, to avoid excessive table size and to simplify
3063 debugging of -fcompare-debug failures. */
3064 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3065 n -= cur_debug_insn_uid;
3066 else
3067 n -= MIN_NONDEBUG_INSN_UID;
3069 return n;
3073 /* Return the next insn. If it is a SEQUENCE, return the first insn
3074 of the sequence. */
3077 next_insn (rtx insn)
3079 if (insn)
3081 insn = NEXT_INSN (insn);
3082 if (insn && NONJUMP_INSN_P (insn)
3083 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3084 insn = XVECEXP (PATTERN (insn), 0, 0);
3087 return insn;
3090 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3091 of the sequence. */
3094 previous_insn (rtx insn)
3096 if (insn)
3098 insn = PREV_INSN (insn);
3099 if (insn && NONJUMP_INSN_P (insn)
3100 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3101 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3104 return insn;
3107 /* Return the next insn after INSN that is not a NOTE. This routine does not
3108 look inside SEQUENCEs. */
3111 next_nonnote_insn (rtx insn)
3113 while (insn)
3115 insn = NEXT_INSN (insn);
3116 if (insn == 0 || !NOTE_P (insn))
3117 break;
3120 return insn;
3123 /* Return the next insn after INSN that is not a NOTE, but stop the
3124 search before we enter another basic block. This routine does not
3125 look inside SEQUENCEs. */
3128 next_nonnote_insn_bb (rtx insn)
3130 while (insn)
3132 insn = NEXT_INSN (insn);
3133 if (insn == 0 || !NOTE_P (insn))
3134 break;
3135 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3136 return NULL_RTX;
3139 return insn;
3142 /* Return the previous insn before INSN that is not a NOTE. This routine does
3143 not look inside SEQUENCEs. */
3146 prev_nonnote_insn (rtx insn)
3148 while (insn)
3150 insn = PREV_INSN (insn);
3151 if (insn == 0 || !NOTE_P (insn))
3152 break;
3155 return insn;
3158 /* Return the previous insn before INSN that is not a NOTE, but stop
3159 the search before we enter another basic block. This routine does
3160 not look inside SEQUENCEs. */
3163 prev_nonnote_insn_bb (rtx insn)
3165 while (insn)
3167 insn = PREV_INSN (insn);
3168 if (insn == 0 || !NOTE_P (insn))
3169 break;
3170 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3171 return NULL_RTX;
3174 return insn;
3177 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3178 routine does not look inside SEQUENCEs. */
3181 next_nondebug_insn (rtx insn)
3183 while (insn)
3185 insn = NEXT_INSN (insn);
3186 if (insn == 0 || !DEBUG_INSN_P (insn))
3187 break;
3190 return insn;
3193 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3194 This routine does not look inside SEQUENCEs. */
3197 prev_nondebug_insn (rtx insn)
3199 while (insn)
3201 insn = PREV_INSN (insn);
3202 if (insn == 0 || !DEBUG_INSN_P (insn))
3203 break;
3206 return insn;
3209 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3210 This routine does not look inside SEQUENCEs. */
3213 next_nonnote_nondebug_insn (rtx insn)
3215 while (insn)
3217 insn = NEXT_INSN (insn);
3218 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3219 break;
3222 return insn;
3225 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3226 This routine does not look inside SEQUENCEs. */
3229 prev_nonnote_nondebug_insn (rtx insn)
3231 while (insn)
3233 insn = PREV_INSN (insn);
3234 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3235 break;
3238 return insn;
3241 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3242 or 0, if there is none. This routine does not look inside
3243 SEQUENCEs. */
3246 next_real_insn (rtx insn)
3248 while (insn)
3250 insn = NEXT_INSN (insn);
3251 if (insn == 0 || INSN_P (insn))
3252 break;
3255 return insn;
3258 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3259 or 0, if there is none. This routine does not look inside
3260 SEQUENCEs. */
3263 prev_real_insn (rtx insn)
3265 while (insn)
3267 insn = PREV_INSN (insn);
3268 if (insn == 0 || INSN_P (insn))
3269 break;
3272 return insn;
3275 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3276 This routine does not look inside SEQUENCEs. */
3279 last_call_insn (void)
3281 rtx insn;
3283 for (insn = get_last_insn ();
3284 insn && !CALL_P (insn);
3285 insn = PREV_INSN (insn))
3288 return insn;
3291 /* Find the next insn after INSN that really does something. This routine
3292 does not look inside SEQUENCEs. After reload this also skips over
3293 standalone USE and CLOBBER insn. */
3296 active_insn_p (const_rtx insn)
3298 return (CALL_P (insn) || JUMP_P (insn)
3299 || JUMP_TABLE_DATA_P (insn) /* FIXME */
3300 || (NONJUMP_INSN_P (insn)
3301 && (! reload_completed
3302 || (GET_CODE (PATTERN (insn)) != USE
3303 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3307 next_active_insn (rtx insn)
3309 while (insn)
3311 insn = NEXT_INSN (insn);
3312 if (insn == 0 || active_insn_p (insn))
3313 break;
3316 return insn;
3319 /* Find the last insn before INSN that really does something. This routine
3320 does not look inside SEQUENCEs. After reload this also skips over
3321 standalone USE and CLOBBER insn. */
3324 prev_active_insn (rtx insn)
3326 while (insn)
3328 insn = PREV_INSN (insn);
3329 if (insn == 0 || active_insn_p (insn))
3330 break;
3333 return insn;
3336 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3339 next_label (rtx insn)
3341 while (insn)
3343 insn = NEXT_INSN (insn);
3344 if (insn == 0 || LABEL_P (insn))
3345 break;
3348 return insn;
3351 /* Return the last label to mark the same position as LABEL. Return LABEL
3352 itself if it is null or any return rtx. */
3355 skip_consecutive_labels (rtx label)
3357 rtx insn;
3359 if (label && ANY_RETURN_P (label))
3360 return label;
3362 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3363 if (LABEL_P (insn))
3364 label = insn;
3366 return label;
3369 #ifdef HAVE_cc0
3370 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3371 and REG_CC_USER notes so we can find it. */
3373 void
3374 link_cc0_insns (rtx insn)
3376 rtx user = next_nonnote_insn (insn);
3378 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3379 user = XVECEXP (PATTERN (user), 0, 0);
3381 add_reg_note (user, REG_CC_SETTER, insn);
3382 add_reg_note (insn, REG_CC_USER, user);
3385 /* Return the next insn that uses CC0 after INSN, which is assumed to
3386 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3387 applied to the result of this function should yield INSN).
3389 Normally, this is simply the next insn. However, if a REG_CC_USER note
3390 is present, it contains the insn that uses CC0.
3392 Return 0 if we can't find the insn. */
3395 next_cc0_user (rtx insn)
3397 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3399 if (note)
3400 return XEXP (note, 0);
3402 insn = next_nonnote_insn (insn);
3403 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3404 insn = XVECEXP (PATTERN (insn), 0, 0);
3406 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3407 return insn;
3409 return 0;
3412 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3413 note, it is the previous insn. */
3416 prev_cc0_setter (rtx insn)
3418 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3420 if (note)
3421 return XEXP (note, 0);
3423 insn = prev_nonnote_insn (insn);
3424 gcc_assert (sets_cc0_p (PATTERN (insn)));
3426 return insn;
3428 #endif
3430 #ifdef AUTO_INC_DEC
3431 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3433 static int
3434 find_auto_inc (rtx *xp, void *data)
3436 rtx x = *xp;
3437 rtx reg = (rtx) data;
3439 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3440 return 0;
3442 switch (GET_CODE (x))
3444 case PRE_DEC:
3445 case PRE_INC:
3446 case POST_DEC:
3447 case POST_INC:
3448 case PRE_MODIFY:
3449 case POST_MODIFY:
3450 if (rtx_equal_p (reg, XEXP (x, 0)))
3451 return 1;
3452 break;
3454 default:
3455 gcc_unreachable ();
3457 return -1;
3459 #endif
3461 /* Increment the label uses for all labels present in rtx. */
3463 static void
3464 mark_label_nuses (rtx x)
3466 enum rtx_code code;
3467 int i, j;
3468 const char *fmt;
3470 code = GET_CODE (x);
3471 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3472 LABEL_NUSES (XEXP (x, 0))++;
3474 fmt = GET_RTX_FORMAT (code);
3475 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3477 if (fmt[i] == 'e')
3478 mark_label_nuses (XEXP (x, i));
3479 else if (fmt[i] == 'E')
3480 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3481 mark_label_nuses (XVECEXP (x, i, j));
3486 /* Try splitting insns that can be split for better scheduling.
3487 PAT is the pattern which might split.
3488 TRIAL is the insn providing PAT.
3489 LAST is nonzero if we should return the last insn of the sequence produced.
3491 If this routine succeeds in splitting, it returns the first or last
3492 replacement insn depending on the value of LAST. Otherwise, it
3493 returns TRIAL. If the insn to be returned can be split, it will be. */
3496 try_split (rtx pat, rtx trial, int last)
3498 rtx before = PREV_INSN (trial);
3499 rtx after = NEXT_INSN (trial);
3500 int has_barrier = 0;
3501 rtx note, seq, tem;
3502 int probability;
3503 rtx insn_last, insn;
3504 int njumps = 0;
3506 /* We're not good at redistributing frame information. */
3507 if (RTX_FRAME_RELATED_P (trial))
3508 return trial;
3510 if (any_condjump_p (trial)
3511 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3512 split_branch_probability = INTVAL (XEXP (note, 0));
3513 probability = split_branch_probability;
3515 seq = split_insns (pat, trial);
3517 split_branch_probability = -1;
3519 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3520 We may need to handle this specially. */
3521 if (after && BARRIER_P (after))
3523 has_barrier = 1;
3524 after = NEXT_INSN (after);
3527 if (!seq)
3528 return trial;
3530 /* Avoid infinite loop if any insn of the result matches
3531 the original pattern. */
3532 insn_last = seq;
3533 while (1)
3535 if (INSN_P (insn_last)
3536 && rtx_equal_p (PATTERN (insn_last), pat))
3537 return trial;
3538 if (!NEXT_INSN (insn_last))
3539 break;
3540 insn_last = NEXT_INSN (insn_last);
3543 /* We will be adding the new sequence to the function. The splitters
3544 may have introduced invalid RTL sharing, so unshare the sequence now. */
3545 unshare_all_rtl_in_chain (seq);
3547 /* Mark labels. */
3548 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3550 if (JUMP_P (insn))
3552 mark_jump_label (PATTERN (insn), insn, 0);
3553 njumps++;
3554 if (probability != -1
3555 && any_condjump_p (insn)
3556 && !find_reg_note (insn, REG_BR_PROB, 0))
3558 /* We can preserve the REG_BR_PROB notes only if exactly
3559 one jump is created, otherwise the machine description
3560 is responsible for this step using
3561 split_branch_probability variable. */
3562 gcc_assert (njumps == 1);
3563 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3568 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3569 in SEQ and copy any additional information across. */
3570 if (CALL_P (trial))
3572 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3573 if (CALL_P (insn))
3575 rtx next, *p;
3577 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3578 target may have explicitly specified. */
3579 p = &CALL_INSN_FUNCTION_USAGE (insn);
3580 while (*p)
3581 p = &XEXP (*p, 1);
3582 *p = CALL_INSN_FUNCTION_USAGE (trial);
3584 /* If the old call was a sibling call, the new one must
3585 be too. */
3586 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3588 /* If the new call is the last instruction in the sequence,
3589 it will effectively replace the old call in-situ. Otherwise
3590 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3591 so that it comes immediately after the new call. */
3592 if (NEXT_INSN (insn))
3593 for (next = NEXT_INSN (trial);
3594 next && NOTE_P (next);
3595 next = NEXT_INSN (next))
3596 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3598 remove_insn (next);
3599 add_insn_after (next, insn, NULL);
3600 break;
3605 /* Copy notes, particularly those related to the CFG. */
3606 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3608 switch (REG_NOTE_KIND (note))
3610 case REG_EH_REGION:
3611 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3612 break;
3614 case REG_NORETURN:
3615 case REG_SETJMP:
3616 case REG_TM:
3617 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3619 if (CALL_P (insn))
3620 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3622 break;
3624 case REG_NON_LOCAL_GOTO:
3625 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3627 if (JUMP_P (insn))
3628 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3630 break;
3632 #ifdef AUTO_INC_DEC
3633 case REG_INC:
3634 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3636 rtx reg = XEXP (note, 0);
3637 if (!FIND_REG_INC_NOTE (insn, reg)
3638 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3639 add_reg_note (insn, REG_INC, reg);
3641 break;
3642 #endif
3644 case REG_ARGS_SIZE:
3645 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3646 break;
3648 default:
3649 break;
3653 /* If there are LABELS inside the split insns increment the
3654 usage count so we don't delete the label. */
3655 if (INSN_P (trial))
3657 insn = insn_last;
3658 while (insn != NULL_RTX)
3660 /* JUMP_P insns have already been "marked" above. */
3661 if (NONJUMP_INSN_P (insn))
3662 mark_label_nuses (PATTERN (insn));
3664 insn = PREV_INSN (insn);
3668 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATION (trial));
3670 delete_insn (trial);
3671 if (has_barrier)
3672 emit_barrier_after (tem);
3674 /* Recursively call try_split for each new insn created; by the
3675 time control returns here that insn will be fully split, so
3676 set LAST and continue from the insn after the one returned.
3677 We can't use next_active_insn here since AFTER may be a note.
3678 Ignore deleted insns, which can be occur if not optimizing. */
3679 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3680 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3681 tem = try_split (PATTERN (tem), tem, 1);
3683 /* Return either the first or the last insn, depending on which was
3684 requested. */
3685 return last
3686 ? (after ? PREV_INSN (after) : get_last_insn ())
3687 : NEXT_INSN (before);
3690 /* Make and return an INSN rtx, initializing all its slots.
3691 Store PATTERN in the pattern slots. */
3694 make_insn_raw (rtx pattern)
3696 rtx insn;
3698 insn = rtx_alloc (INSN);
3700 INSN_UID (insn) = cur_insn_uid++;
3701 PATTERN (insn) = pattern;
3702 INSN_CODE (insn) = -1;
3703 REG_NOTES (insn) = NULL;
3704 INSN_LOCATION (insn) = curr_insn_location ();
3705 BLOCK_FOR_INSN (insn) = NULL;
3707 #ifdef ENABLE_RTL_CHECKING
3708 if (insn
3709 && INSN_P (insn)
3710 && (returnjump_p (insn)
3711 || (GET_CODE (insn) == SET
3712 && SET_DEST (insn) == pc_rtx)))
3714 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3715 debug_rtx (insn);
3717 #endif
3719 return insn;
3722 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3724 static rtx
3725 make_debug_insn_raw (rtx pattern)
3727 rtx insn;
3729 insn = rtx_alloc (DEBUG_INSN);
3730 INSN_UID (insn) = cur_debug_insn_uid++;
3731 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3732 INSN_UID (insn) = cur_insn_uid++;
3734 PATTERN (insn) = pattern;
3735 INSN_CODE (insn) = -1;
3736 REG_NOTES (insn) = NULL;
3737 INSN_LOCATION (insn) = curr_insn_location ();
3738 BLOCK_FOR_INSN (insn) = NULL;
3740 return insn;
3743 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3745 static rtx
3746 make_jump_insn_raw (rtx pattern)
3748 rtx insn;
3750 insn = rtx_alloc (JUMP_INSN);
3751 INSN_UID (insn) = cur_insn_uid++;
3753 PATTERN (insn) = pattern;
3754 INSN_CODE (insn) = -1;
3755 REG_NOTES (insn) = NULL;
3756 JUMP_LABEL (insn) = NULL;
3757 INSN_LOCATION (insn) = curr_insn_location ();
3758 BLOCK_FOR_INSN (insn) = NULL;
3760 return insn;
3763 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3765 static rtx
3766 make_call_insn_raw (rtx pattern)
3768 rtx insn;
3770 insn = rtx_alloc (CALL_INSN);
3771 INSN_UID (insn) = cur_insn_uid++;
3773 PATTERN (insn) = pattern;
3774 INSN_CODE (insn) = -1;
3775 REG_NOTES (insn) = NULL;
3776 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3777 INSN_LOCATION (insn) = curr_insn_location ();
3778 BLOCK_FOR_INSN (insn) = NULL;
3780 return insn;
3783 /* Like `make_insn_raw' but make a NOTE instead of an insn. */
3785 static rtx
3786 make_note_raw (enum insn_note subtype)
3788 /* Some notes are never created this way at all. These notes are
3789 only created by patching out insns. */
3790 gcc_assert (subtype != NOTE_INSN_DELETED_LABEL
3791 && subtype != NOTE_INSN_DELETED_DEBUG_LABEL);
3793 rtx note = rtx_alloc (NOTE);
3794 INSN_UID (note) = cur_insn_uid++;
3795 NOTE_KIND (note) = subtype;
3796 BLOCK_FOR_INSN (note) = NULL;
3797 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3798 return note;
3801 /* Add INSN to the end of the doubly-linked list, between PREV and NEXT.
3802 INSN may be any object that can appear in the chain: INSN_P and NOTE_P objects,
3803 but also BARRIERs and JUMP_TABLE_DATAs. PREV and NEXT may be NULL. */
3805 static inline void
3806 link_insn_into_chain (rtx insn, rtx prev, rtx next)
3808 PREV_INSN (insn) = prev;
3809 NEXT_INSN (insn) = next;
3810 if (prev != NULL)
3812 NEXT_INSN (prev) = insn;
3813 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3815 rtx sequence = PATTERN (prev);
3816 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3819 if (next != NULL)
3821 PREV_INSN (next) = insn;
3822 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3823 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3826 if (NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3828 rtx sequence = PATTERN (insn);
3829 PREV_INSN (XVECEXP (sequence, 0, 0)) = prev;
3830 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3834 /* Add INSN to the end of the doubly-linked list.
3835 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3837 void
3838 add_insn (rtx insn)
3840 rtx prev = get_last_insn ();
3841 link_insn_into_chain (insn, prev, NULL);
3842 if (NULL == get_insns ())
3843 set_first_insn (insn);
3844 set_last_insn (insn);
3847 /* Add INSN into the doubly-linked list after insn AFTER. */
3849 static void
3850 add_insn_after_nobb (rtx insn, rtx after)
3852 rtx next = NEXT_INSN (after);
3854 gcc_assert (!optimize || !INSN_DELETED_P (after));
3856 link_insn_into_chain (insn, after, next);
3858 if (next == NULL)
3860 if (get_last_insn () == after)
3861 set_last_insn (insn);
3862 else
3864 struct sequence_stack *stack = seq_stack;
3865 /* Scan all pending sequences too. */
3866 for (; stack; stack = stack->next)
3867 if (after == stack->last)
3869 stack->last = insn;
3870 break;
3876 /* Add INSN into the doubly-linked list before insn BEFORE. */
3878 static void
3879 add_insn_before_nobb (rtx insn, rtx before)
3881 rtx prev = PREV_INSN (before);
3883 gcc_assert (!optimize || !INSN_DELETED_P (before));
3885 link_insn_into_chain (insn, prev, before);
3887 if (prev == NULL)
3889 if (get_insns () == before)
3890 set_first_insn (insn);
3891 else
3893 struct sequence_stack *stack = seq_stack;
3894 /* Scan all pending sequences too. */
3895 for (; stack; stack = stack->next)
3896 if (before == stack->first)
3898 stack->first = insn;
3899 break;
3902 gcc_assert (stack);
3907 /* Like add_insn_after_nobb, but try to set BLOCK_FOR_INSN.
3908 If BB is NULL, an attempt is made to infer the bb from before.
3910 This and the next function should be the only functions called
3911 to insert an insn once delay slots have been filled since only
3912 they know how to update a SEQUENCE. */
3914 void
3915 add_insn_after (rtx insn, rtx after, basic_block bb)
3917 add_insn_after_nobb (insn, after);
3918 if (!BARRIER_P (after)
3919 && !BARRIER_P (insn)
3920 && (bb = BLOCK_FOR_INSN (after)))
3922 set_block_for_insn (insn, bb);
3923 if (INSN_P (insn))
3924 df_insn_rescan (insn);
3925 /* Should not happen as first in the BB is always
3926 either NOTE or LABEL. */
3927 if (BB_END (bb) == after
3928 /* Avoid clobbering of structure when creating new BB. */
3929 && !BARRIER_P (insn)
3930 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3931 BB_END (bb) = insn;
3935 /* Like add_insn_before_nobb, but try to set BLOCK_FOR_INSN.
3936 If BB is NULL, an attempt is made to infer the bb from before.
3938 This and the previous function should be the only functions called
3939 to insert an insn once delay slots have been filled since only
3940 they know how to update a SEQUENCE. */
3942 void
3943 add_insn_before (rtx insn, rtx before, basic_block bb)
3945 add_insn_before_nobb (insn, before);
3947 if (!bb
3948 && !BARRIER_P (before)
3949 && !BARRIER_P (insn))
3950 bb = BLOCK_FOR_INSN (before);
3952 if (bb)
3954 set_block_for_insn (insn, bb);
3955 if (INSN_P (insn))
3956 df_insn_rescan (insn);
3957 /* Should not happen as first in the BB is always either NOTE or
3958 LABEL. */
3959 gcc_assert (BB_HEAD (bb) != insn
3960 /* Avoid clobbering of structure when creating new BB. */
3961 || BARRIER_P (insn)
3962 || NOTE_INSN_BASIC_BLOCK_P (insn));
3966 /* Replace insn with an deleted instruction note. */
3968 void
3969 set_insn_deleted (rtx insn)
3971 if (INSN_P (insn))
3972 df_insn_delete (insn);
3973 PUT_CODE (insn, NOTE);
3974 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3978 /* Unlink INSN from the insn chain.
3980 This function knows how to handle sequences.
3982 This function does not invalidate data flow information associated with
3983 INSN (i.e. does not call df_insn_delete). That makes this function
3984 usable for only disconnecting an insn from the chain, and re-emit it
3985 elsewhere later.
3987 To later insert INSN elsewhere in the insn chain via add_insn and
3988 similar functions, PREV_INSN and NEXT_INSN must be nullified by
3989 the caller. Nullifying them here breaks many insn chain walks.
3991 To really delete an insn and related DF information, use delete_insn. */
3993 void
3994 remove_insn (rtx insn)
3996 rtx next = NEXT_INSN (insn);
3997 rtx prev = PREV_INSN (insn);
3998 basic_block bb;
4000 if (prev)
4002 NEXT_INSN (prev) = next;
4003 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
4005 rtx sequence = PATTERN (prev);
4006 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
4009 else if (get_insns () == insn)
4011 if (next)
4012 PREV_INSN (next) = NULL;
4013 set_first_insn (next);
4015 else
4017 struct sequence_stack *stack = seq_stack;
4018 /* Scan all pending sequences too. */
4019 for (; stack; stack = stack->next)
4020 if (insn == stack->first)
4022 stack->first = next;
4023 break;
4026 gcc_assert (stack);
4029 if (next)
4031 PREV_INSN (next) = prev;
4032 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
4033 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
4035 else if (get_last_insn () == insn)
4036 set_last_insn (prev);
4037 else
4039 struct sequence_stack *stack = seq_stack;
4040 /* Scan all pending sequences too. */
4041 for (; stack; stack = stack->next)
4042 if (insn == stack->last)
4044 stack->last = prev;
4045 break;
4048 gcc_assert (stack);
4051 /* Fix up basic block boundaries, if necessary. */
4052 if (!BARRIER_P (insn)
4053 && (bb = BLOCK_FOR_INSN (insn)))
4055 if (BB_HEAD (bb) == insn)
4057 /* Never ever delete the basic block note without deleting whole
4058 basic block. */
4059 gcc_assert (!NOTE_P (insn));
4060 BB_HEAD (bb) = next;
4062 if (BB_END (bb) == insn)
4063 BB_END (bb) = prev;
4067 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4069 void
4070 add_function_usage_to (rtx call_insn, rtx call_fusage)
4072 gcc_assert (call_insn && CALL_P (call_insn));
4074 /* Put the register usage information on the CALL. If there is already
4075 some usage information, put ours at the end. */
4076 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4078 rtx link;
4080 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4081 link = XEXP (link, 1))
4084 XEXP (link, 1) = call_fusage;
4086 else
4087 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4090 /* Delete all insns made since FROM.
4091 FROM becomes the new last instruction. */
4093 void
4094 delete_insns_since (rtx from)
4096 if (from == 0)
4097 set_first_insn (0);
4098 else
4099 NEXT_INSN (from) = 0;
4100 set_last_insn (from);
4103 /* This function is deprecated, please use sequences instead.
4105 Move a consecutive bunch of insns to a different place in the chain.
4106 The insns to be moved are those between FROM and TO.
4107 They are moved to a new position after the insn AFTER.
4108 AFTER must not be FROM or TO or any insn in between.
4110 This function does not know about SEQUENCEs and hence should not be
4111 called after delay-slot filling has been done. */
4113 void
4114 reorder_insns_nobb (rtx from, rtx to, rtx after)
4116 #ifdef ENABLE_CHECKING
4117 rtx x;
4118 for (x = from; x != to; x = NEXT_INSN (x))
4119 gcc_assert (after != x);
4120 gcc_assert (after != to);
4121 #endif
4123 /* Splice this bunch out of where it is now. */
4124 if (PREV_INSN (from))
4125 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4126 if (NEXT_INSN (to))
4127 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4128 if (get_last_insn () == to)
4129 set_last_insn (PREV_INSN (from));
4130 if (get_insns () == from)
4131 set_first_insn (NEXT_INSN (to));
4133 /* Make the new neighbors point to it and it to them. */
4134 if (NEXT_INSN (after))
4135 PREV_INSN (NEXT_INSN (after)) = to;
4137 NEXT_INSN (to) = NEXT_INSN (after);
4138 PREV_INSN (from) = after;
4139 NEXT_INSN (after) = from;
4140 if (after == get_last_insn())
4141 set_last_insn (to);
4144 /* Same as function above, but take care to update BB boundaries. */
4145 void
4146 reorder_insns (rtx from, rtx to, rtx after)
4148 rtx prev = PREV_INSN (from);
4149 basic_block bb, bb2;
4151 reorder_insns_nobb (from, to, after);
4153 if (!BARRIER_P (after)
4154 && (bb = BLOCK_FOR_INSN (after)))
4156 rtx x;
4157 df_set_bb_dirty (bb);
4159 if (!BARRIER_P (from)
4160 && (bb2 = BLOCK_FOR_INSN (from)))
4162 if (BB_END (bb2) == to)
4163 BB_END (bb2) = prev;
4164 df_set_bb_dirty (bb2);
4167 if (BB_END (bb) == after)
4168 BB_END (bb) = to;
4170 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4171 if (!BARRIER_P (x))
4172 df_insn_change_bb (x, bb);
4177 /* Emit insn(s) of given code and pattern
4178 at a specified place within the doubly-linked list.
4180 All of the emit_foo global entry points accept an object
4181 X which is either an insn list or a PATTERN of a single
4182 instruction.
4184 There are thus a few canonical ways to generate code and
4185 emit it at a specific place in the instruction stream. For
4186 example, consider the instruction named SPOT and the fact that
4187 we would like to emit some instructions before SPOT. We might
4188 do it like this:
4190 start_sequence ();
4191 ... emit the new instructions ...
4192 insns_head = get_insns ();
4193 end_sequence ();
4195 emit_insn_before (insns_head, SPOT);
4197 It used to be common to generate SEQUENCE rtl instead, but that
4198 is a relic of the past which no longer occurs. The reason is that
4199 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4200 generated would almost certainly die right after it was created. */
4202 static rtx
4203 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4204 rtx (*make_raw) (rtx))
4206 rtx insn;
4208 gcc_assert (before);
4210 if (x == NULL_RTX)
4211 return last;
4213 switch (GET_CODE (x))
4215 case DEBUG_INSN:
4216 case INSN:
4217 case JUMP_INSN:
4218 case CALL_INSN:
4219 case CODE_LABEL:
4220 case BARRIER:
4221 case NOTE:
4222 insn = x;
4223 while (insn)
4225 rtx next = NEXT_INSN (insn);
4226 add_insn_before (insn, before, bb);
4227 last = insn;
4228 insn = next;
4230 break;
4232 #ifdef ENABLE_RTL_CHECKING
4233 case SEQUENCE:
4234 gcc_unreachable ();
4235 break;
4236 #endif
4238 default:
4239 last = (*make_raw) (x);
4240 add_insn_before (last, before, bb);
4241 break;
4244 return last;
4247 /* Make X be output before the instruction BEFORE. */
4250 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4252 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4255 /* Make an instruction with body X and code JUMP_INSN
4256 and output it before the instruction BEFORE. */
4259 emit_jump_insn_before_noloc (rtx x, rtx before)
4261 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4262 make_jump_insn_raw);
4265 /* Make an instruction with body X and code CALL_INSN
4266 and output it before the instruction BEFORE. */
4269 emit_call_insn_before_noloc (rtx x, rtx before)
4271 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4272 make_call_insn_raw);
4275 /* Make an instruction with body X and code DEBUG_INSN
4276 and output it before the instruction BEFORE. */
4279 emit_debug_insn_before_noloc (rtx x, rtx before)
4281 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4282 make_debug_insn_raw);
4285 /* Make an insn of code BARRIER
4286 and output it before the insn BEFORE. */
4289 emit_barrier_before (rtx before)
4291 rtx insn = rtx_alloc (BARRIER);
4293 INSN_UID (insn) = cur_insn_uid++;
4295 add_insn_before (insn, before, NULL);
4296 return insn;
4299 /* Emit the label LABEL before the insn BEFORE. */
4302 emit_label_before (rtx label, rtx before)
4304 gcc_checking_assert (INSN_UID (label) == 0);
4305 INSN_UID (label) = cur_insn_uid++;
4306 add_insn_before (label, before, NULL);
4307 return label;
4310 /* Helper for emit_insn_after, handles lists of instructions
4311 efficiently. */
4313 static rtx
4314 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4316 rtx last;
4317 rtx after_after;
4318 if (!bb && !BARRIER_P (after))
4319 bb = BLOCK_FOR_INSN (after);
4321 if (bb)
4323 df_set_bb_dirty (bb);
4324 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4325 if (!BARRIER_P (last))
4327 set_block_for_insn (last, bb);
4328 df_insn_rescan (last);
4330 if (!BARRIER_P (last))
4332 set_block_for_insn (last, bb);
4333 df_insn_rescan (last);
4335 if (BB_END (bb) == after)
4336 BB_END (bb) = last;
4338 else
4339 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4340 continue;
4342 after_after = NEXT_INSN (after);
4344 NEXT_INSN (after) = first;
4345 PREV_INSN (first) = after;
4346 NEXT_INSN (last) = after_after;
4347 if (after_after)
4348 PREV_INSN (after_after) = last;
4350 if (after == get_last_insn())
4351 set_last_insn (last);
4353 return last;
4356 static rtx
4357 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4358 rtx (*make_raw)(rtx))
4360 rtx last = after;
4362 gcc_assert (after);
4364 if (x == NULL_RTX)
4365 return last;
4367 switch (GET_CODE (x))
4369 case DEBUG_INSN:
4370 case INSN:
4371 case JUMP_INSN:
4372 case CALL_INSN:
4373 case CODE_LABEL:
4374 case BARRIER:
4375 case NOTE:
4376 last = emit_insn_after_1 (x, after, bb);
4377 break;
4379 #ifdef ENABLE_RTL_CHECKING
4380 case SEQUENCE:
4381 gcc_unreachable ();
4382 break;
4383 #endif
4385 default:
4386 last = (*make_raw) (x);
4387 add_insn_after (last, after, bb);
4388 break;
4391 return last;
4394 /* Make X be output after the insn AFTER and set the BB of insn. If
4395 BB is NULL, an attempt is made to infer the BB from AFTER. */
4398 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4400 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4404 /* Make an insn of code JUMP_INSN with body X
4405 and output it after the insn AFTER. */
4408 emit_jump_insn_after_noloc (rtx x, rtx after)
4410 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4413 /* Make an instruction with body X and code CALL_INSN
4414 and output it after the instruction AFTER. */
4417 emit_call_insn_after_noloc (rtx x, rtx after)
4419 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4422 /* Make an instruction with body X and code CALL_INSN
4423 and output it after the instruction AFTER. */
4426 emit_debug_insn_after_noloc (rtx x, rtx after)
4428 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4431 /* Make an insn of code BARRIER
4432 and output it after the insn AFTER. */
4435 emit_barrier_after (rtx after)
4437 rtx insn = rtx_alloc (BARRIER);
4439 INSN_UID (insn) = cur_insn_uid++;
4441 add_insn_after (insn, after, NULL);
4442 return insn;
4445 /* Emit the label LABEL after the insn AFTER. */
4448 emit_label_after (rtx label, rtx after)
4450 gcc_checking_assert (INSN_UID (label) == 0);
4451 INSN_UID (label) = cur_insn_uid++;
4452 add_insn_after (label, after, NULL);
4453 return label;
4456 /* Notes require a bit of special handling: Some notes need to have their
4457 BLOCK_FOR_INSN set, others should never have it set, and some should
4458 have it set or clear depending on the context. */
4460 /* Return true iff a note of kind SUBTYPE should be emitted with routines
4461 that never set BLOCK_FOR_INSN on NOTE. BB_BOUNDARY is true if the
4462 caller is asked to emit a note before BB_HEAD, or after BB_END. */
4464 static bool
4465 note_outside_basic_block_p (enum insn_note subtype, bool on_bb_boundary_p)
4467 switch (subtype)
4469 /* NOTE_INSN_SWITCH_TEXT_SECTIONS only appears between basic blocks. */
4470 case NOTE_INSN_SWITCH_TEXT_SECTIONS:
4471 return true;
4473 /* Notes for var tracking and EH region markers can appear between or
4474 inside basic blocks. If the caller is emitting on the basic block
4475 boundary, do not set BLOCK_FOR_INSN on the new note. */
4476 case NOTE_INSN_VAR_LOCATION:
4477 case NOTE_INSN_CALL_ARG_LOCATION:
4478 case NOTE_INSN_EH_REGION_BEG:
4479 case NOTE_INSN_EH_REGION_END:
4480 return on_bb_boundary_p;
4482 /* Otherwise, BLOCK_FOR_INSN must be set. */
4483 default:
4484 return false;
4488 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4491 emit_note_after (enum insn_note subtype, rtx after)
4493 rtx note = make_note_raw (subtype);
4494 basic_block bb = BARRIER_P (after) ? NULL : BLOCK_FOR_INSN (after);
4495 bool on_bb_boundary_p = (bb != NULL && BB_END (bb) == after);
4497 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4498 add_insn_after_nobb (note, after);
4499 else
4500 add_insn_after (note, after, bb);
4501 return note;
4504 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4507 emit_note_before (enum insn_note subtype, rtx before)
4509 rtx note = make_note_raw (subtype);
4510 basic_block bb = BARRIER_P (before) ? NULL : BLOCK_FOR_INSN (before);
4511 bool on_bb_boundary_p = (bb != NULL && BB_HEAD (bb) == before);
4513 if (note_outside_basic_block_p (subtype, on_bb_boundary_p))
4514 add_insn_before_nobb (note, before);
4515 else
4516 add_insn_before (note, before, bb);
4517 return note;
4520 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4521 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4523 static rtx
4524 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4525 rtx (*make_raw) (rtx))
4527 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4529 if (pattern == NULL_RTX || !loc)
4530 return last;
4532 after = NEXT_INSN (after);
4533 while (1)
4535 if (active_insn_p (after) && !INSN_LOCATION (after))
4536 INSN_LOCATION (after) = loc;
4537 if (after == last)
4538 break;
4539 after = NEXT_INSN (after);
4541 return last;
4544 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4545 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4546 any DEBUG_INSNs. */
4548 static rtx
4549 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4550 rtx (*make_raw) (rtx))
4552 rtx prev = after;
4554 if (skip_debug_insns)
4555 while (DEBUG_INSN_P (prev))
4556 prev = PREV_INSN (prev);
4558 if (INSN_P (prev))
4559 return emit_pattern_after_setloc (pattern, after, INSN_LOCATION (prev),
4560 make_raw);
4561 else
4562 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4565 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4567 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4569 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4572 /* Like emit_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4574 emit_insn_after (rtx pattern, rtx after)
4576 return emit_pattern_after (pattern, after, true, make_insn_raw);
4579 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4581 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4583 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4586 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4588 emit_jump_insn_after (rtx pattern, rtx after)
4590 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4593 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4595 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4597 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4600 /* Like emit_call_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4602 emit_call_insn_after (rtx pattern, rtx after)
4604 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4607 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to LOC. */
4609 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4611 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4614 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATION according to AFTER. */
4616 emit_debug_insn_after (rtx pattern, rtx after)
4618 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4621 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4622 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4623 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4624 CALL_INSN, etc. */
4626 static rtx
4627 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4628 rtx (*make_raw) (rtx))
4630 rtx first = PREV_INSN (before);
4631 rtx last = emit_pattern_before_noloc (pattern, before,
4632 insnp ? before : NULL_RTX,
4633 NULL, make_raw);
4635 if (pattern == NULL_RTX || !loc)
4636 return last;
4638 if (!first)
4639 first = get_insns ();
4640 else
4641 first = NEXT_INSN (first);
4642 while (1)
4644 if (active_insn_p (first) && !INSN_LOCATION (first))
4645 INSN_LOCATION (first) = loc;
4646 if (first == last)
4647 break;
4648 first = NEXT_INSN (first);
4650 return last;
4653 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4654 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4655 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4656 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4658 static rtx
4659 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4660 bool insnp, rtx (*make_raw) (rtx))
4662 rtx next = before;
4664 if (skip_debug_insns)
4665 while (DEBUG_INSN_P (next))
4666 next = PREV_INSN (next);
4668 if (INSN_P (next))
4669 return emit_pattern_before_setloc (pattern, before, INSN_LOCATION (next),
4670 insnp, make_raw);
4671 else
4672 return emit_pattern_before_noloc (pattern, before,
4673 insnp ? before : NULL_RTX,
4674 NULL, make_raw);
4677 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4679 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4681 return emit_pattern_before_setloc (pattern, before, loc, true,
4682 make_insn_raw);
4685 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4687 emit_insn_before (rtx pattern, rtx before)
4689 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4692 /* like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4694 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4696 return emit_pattern_before_setloc (pattern, before, loc, false,
4697 make_jump_insn_raw);
4700 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATION according to BEFORE. */
4702 emit_jump_insn_before (rtx pattern, rtx before)
4704 return emit_pattern_before (pattern, before, true, false,
4705 make_jump_insn_raw);
4708 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4710 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4712 return emit_pattern_before_setloc (pattern, before, loc, false,
4713 make_call_insn_raw);
4716 /* Like emit_call_insn_before_noloc,
4717 but set insn_location according to BEFORE. */
4719 emit_call_insn_before (rtx pattern, rtx before)
4721 return emit_pattern_before (pattern, before, true, false,
4722 make_call_insn_raw);
4725 /* Like emit_insn_before_noloc, but set INSN_LOCATION according to LOC. */
4727 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4729 return emit_pattern_before_setloc (pattern, before, loc, false,
4730 make_debug_insn_raw);
4733 /* Like emit_debug_insn_before_noloc,
4734 but set insn_location according to BEFORE. */
4736 emit_debug_insn_before (rtx pattern, rtx before)
4738 return emit_pattern_before (pattern, before, false, false,
4739 make_debug_insn_raw);
4742 /* Take X and emit it at the end of the doubly-linked
4743 INSN list.
4745 Returns the last insn emitted. */
4748 emit_insn (rtx x)
4750 rtx last = get_last_insn();
4751 rtx insn;
4753 if (x == NULL_RTX)
4754 return last;
4756 switch (GET_CODE (x))
4758 case DEBUG_INSN:
4759 case INSN:
4760 case JUMP_INSN:
4761 case CALL_INSN:
4762 case CODE_LABEL:
4763 case BARRIER:
4764 case NOTE:
4765 insn = x;
4766 while (insn)
4768 rtx next = NEXT_INSN (insn);
4769 add_insn (insn);
4770 last = insn;
4771 insn = next;
4773 break;
4775 #ifdef ENABLE_RTL_CHECKING
4776 case JUMP_TABLE_DATA:
4777 case SEQUENCE:
4778 gcc_unreachable ();
4779 break;
4780 #endif
4782 default:
4783 last = make_insn_raw (x);
4784 add_insn (last);
4785 break;
4788 return last;
4791 /* Make an insn of code DEBUG_INSN with pattern X
4792 and add it to the end of the doubly-linked list. */
4795 emit_debug_insn (rtx x)
4797 rtx last = get_last_insn();
4798 rtx insn;
4800 if (x == NULL_RTX)
4801 return last;
4803 switch (GET_CODE (x))
4805 case DEBUG_INSN:
4806 case INSN:
4807 case JUMP_INSN:
4808 case CALL_INSN:
4809 case CODE_LABEL:
4810 case BARRIER:
4811 case NOTE:
4812 insn = x;
4813 while (insn)
4815 rtx next = NEXT_INSN (insn);
4816 add_insn (insn);
4817 last = insn;
4818 insn = next;
4820 break;
4822 #ifdef ENABLE_RTL_CHECKING
4823 case JUMP_TABLE_DATA:
4824 case SEQUENCE:
4825 gcc_unreachable ();
4826 break;
4827 #endif
4829 default:
4830 last = make_debug_insn_raw (x);
4831 add_insn (last);
4832 break;
4835 return last;
4838 /* Make an insn of code JUMP_INSN with pattern X
4839 and add it to the end of the doubly-linked list. */
4842 emit_jump_insn (rtx x)
4844 rtx last = NULL_RTX, insn;
4846 switch (GET_CODE (x))
4848 case DEBUG_INSN:
4849 case INSN:
4850 case JUMP_INSN:
4851 case CALL_INSN:
4852 case CODE_LABEL:
4853 case BARRIER:
4854 case NOTE:
4855 insn = x;
4856 while (insn)
4858 rtx next = NEXT_INSN (insn);
4859 add_insn (insn);
4860 last = insn;
4861 insn = next;
4863 break;
4865 #ifdef ENABLE_RTL_CHECKING
4866 case JUMP_TABLE_DATA:
4867 case SEQUENCE:
4868 gcc_unreachable ();
4869 break;
4870 #endif
4872 default:
4873 last = make_jump_insn_raw (x);
4874 add_insn (last);
4875 break;
4878 return last;
4881 /* Make an insn of code CALL_INSN with pattern X
4882 and add it to the end of the doubly-linked list. */
4885 emit_call_insn (rtx x)
4887 rtx insn;
4889 switch (GET_CODE (x))
4891 case DEBUG_INSN:
4892 case INSN:
4893 case JUMP_INSN:
4894 case CALL_INSN:
4895 case CODE_LABEL:
4896 case BARRIER:
4897 case NOTE:
4898 insn = emit_insn (x);
4899 break;
4901 #ifdef ENABLE_RTL_CHECKING
4902 case SEQUENCE:
4903 case JUMP_TABLE_DATA:
4904 gcc_unreachable ();
4905 break;
4906 #endif
4908 default:
4909 insn = make_call_insn_raw (x);
4910 add_insn (insn);
4911 break;
4914 return insn;
4917 /* Add the label LABEL to the end of the doubly-linked list. */
4920 emit_label (rtx label)
4922 gcc_checking_assert (INSN_UID (label) == 0);
4923 INSN_UID (label) = cur_insn_uid++;
4924 add_insn (label);
4925 return label;
4928 /* Make an insn of code JUMP_TABLE_DATA
4929 and add it to the end of the doubly-linked list. */
4932 emit_jump_table_data (rtx table)
4934 rtx jump_table_data = rtx_alloc (JUMP_TABLE_DATA);
4935 INSN_UID (jump_table_data) = cur_insn_uid++;
4936 PATTERN (jump_table_data) = table;
4937 BLOCK_FOR_INSN (jump_table_data) = NULL;
4938 add_insn (jump_table_data);
4939 return jump_table_data;
4942 /* Make an insn of code BARRIER
4943 and add it to the end of the doubly-linked list. */
4946 emit_barrier (void)
4948 rtx barrier = rtx_alloc (BARRIER);
4949 INSN_UID (barrier) = cur_insn_uid++;
4950 add_insn (barrier);
4951 return barrier;
4954 /* Emit a copy of note ORIG. */
4957 emit_note_copy (rtx orig)
4959 enum insn_note kind = (enum insn_note) NOTE_KIND (orig);
4960 rtx note = make_note_raw (kind);
4961 NOTE_DATA (note) = NOTE_DATA (orig);
4962 add_insn (note);
4963 return note;
4966 /* Make an insn of code NOTE or type NOTE_NO
4967 and add it to the end of the doubly-linked list. */
4970 emit_note (enum insn_note kind)
4972 rtx note = make_note_raw (kind);
4973 add_insn (note);
4974 return note;
4977 /* Emit a clobber of lvalue X. */
4980 emit_clobber (rtx x)
4982 /* CONCATs should not appear in the insn stream. */
4983 if (GET_CODE (x) == CONCAT)
4985 emit_clobber (XEXP (x, 0));
4986 return emit_clobber (XEXP (x, 1));
4988 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4991 /* Return a sequence of insns to clobber lvalue X. */
4994 gen_clobber (rtx x)
4996 rtx seq;
4998 start_sequence ();
4999 emit_clobber (x);
5000 seq = get_insns ();
5001 end_sequence ();
5002 return seq;
5005 /* Emit a use of rvalue X. */
5008 emit_use (rtx x)
5010 /* CONCATs should not appear in the insn stream. */
5011 if (GET_CODE (x) == CONCAT)
5013 emit_use (XEXP (x, 0));
5014 return emit_use (XEXP (x, 1));
5016 return emit_insn (gen_rtx_USE (VOIDmode, x));
5019 /* Return a sequence of insns to use rvalue X. */
5022 gen_use (rtx x)
5024 rtx seq;
5026 start_sequence ();
5027 emit_use (x);
5028 seq = get_insns ();
5029 end_sequence ();
5030 return seq;
5033 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5034 note of this type already exists, remove it first. */
5037 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5039 rtx note = find_reg_note (insn, kind, NULL_RTX);
5041 switch (kind)
5043 case REG_EQUAL:
5044 case REG_EQUIV:
5045 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5046 has multiple sets (some callers assume single_set
5047 means the insn only has one set, when in fact it
5048 means the insn only has one * useful * set). */
5049 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5051 gcc_assert (!note);
5052 return NULL_RTX;
5055 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5056 It serves no useful purpose and breaks eliminate_regs. */
5057 if (GET_CODE (datum) == ASM_OPERANDS)
5058 return NULL_RTX;
5060 if (note)
5062 XEXP (note, 0) = datum;
5063 df_notes_rescan (insn);
5064 return note;
5066 break;
5068 default:
5069 if (note)
5071 XEXP (note, 0) = datum;
5072 return note;
5074 break;
5077 add_reg_note (insn, kind, datum);
5079 switch (kind)
5081 case REG_EQUAL:
5082 case REG_EQUIV:
5083 df_notes_rescan (insn);
5084 break;
5085 default:
5086 break;
5089 return REG_NOTES (insn);
5092 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5094 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5096 rtx set = single_set (insn);
5098 if (set && SET_DEST (set) == dst)
5099 return set_unique_reg_note (insn, kind, datum);
5100 return NULL_RTX;
5103 /* Return an indication of which type of insn should have X as a body.
5104 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5106 static enum rtx_code
5107 classify_insn (rtx x)
5109 if (LABEL_P (x))
5110 return CODE_LABEL;
5111 if (GET_CODE (x) == CALL)
5112 return CALL_INSN;
5113 if (ANY_RETURN_P (x))
5114 return JUMP_INSN;
5115 if (GET_CODE (x) == SET)
5117 if (SET_DEST (x) == pc_rtx)
5118 return JUMP_INSN;
5119 else if (GET_CODE (SET_SRC (x)) == CALL)
5120 return CALL_INSN;
5121 else
5122 return INSN;
5124 if (GET_CODE (x) == PARALLEL)
5126 int j;
5127 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5128 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5129 return CALL_INSN;
5130 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5131 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5132 return JUMP_INSN;
5133 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5134 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5135 return CALL_INSN;
5137 return INSN;
5140 /* Emit the rtl pattern X as an appropriate kind of insn.
5141 If X is a label, it is simply added into the insn chain. */
5144 emit (rtx x)
5146 enum rtx_code code = classify_insn (x);
5148 switch (code)
5150 case CODE_LABEL:
5151 return emit_label (x);
5152 case INSN:
5153 return emit_insn (x);
5154 case JUMP_INSN:
5156 rtx insn = emit_jump_insn (x);
5157 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5158 return emit_barrier ();
5159 return insn;
5161 case CALL_INSN:
5162 return emit_call_insn (x);
5163 case DEBUG_INSN:
5164 return emit_debug_insn (x);
5165 default:
5166 gcc_unreachable ();
5170 /* Space for free sequence stack entries. */
5171 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5173 /* Begin emitting insns to a sequence. If this sequence will contain
5174 something that might cause the compiler to pop arguments to function
5175 calls (because those pops have previously been deferred; see
5176 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5177 before calling this function. That will ensure that the deferred
5178 pops are not accidentally emitted in the middle of this sequence. */
5180 void
5181 start_sequence (void)
5183 struct sequence_stack *tem;
5185 if (free_sequence_stack != NULL)
5187 tem = free_sequence_stack;
5188 free_sequence_stack = tem->next;
5190 else
5191 tem = ggc_alloc_sequence_stack ();
5193 tem->next = seq_stack;
5194 tem->first = get_insns ();
5195 tem->last = get_last_insn ();
5197 seq_stack = tem;
5199 set_first_insn (0);
5200 set_last_insn (0);
5203 /* Set up the insn chain starting with FIRST as the current sequence,
5204 saving the previously current one. See the documentation for
5205 start_sequence for more information about how to use this function. */
5207 void
5208 push_to_sequence (rtx first)
5210 rtx last;
5212 start_sequence ();
5214 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5217 set_first_insn (first);
5218 set_last_insn (last);
5221 /* Like push_to_sequence, but take the last insn as an argument to avoid
5222 looping through the list. */
5224 void
5225 push_to_sequence2 (rtx first, rtx last)
5227 start_sequence ();
5229 set_first_insn (first);
5230 set_last_insn (last);
5233 /* Set up the outer-level insn chain
5234 as the current sequence, saving the previously current one. */
5236 void
5237 push_topmost_sequence (void)
5239 struct sequence_stack *stack, *top = NULL;
5241 start_sequence ();
5243 for (stack = seq_stack; stack; stack = stack->next)
5244 top = stack;
5246 set_first_insn (top->first);
5247 set_last_insn (top->last);
5250 /* After emitting to the outer-level insn chain, update the outer-level
5251 insn chain, and restore the previous saved state. */
5253 void
5254 pop_topmost_sequence (void)
5256 struct sequence_stack *stack, *top = NULL;
5258 for (stack = seq_stack; stack; stack = stack->next)
5259 top = stack;
5261 top->first = get_insns ();
5262 top->last = get_last_insn ();
5264 end_sequence ();
5267 /* After emitting to a sequence, restore previous saved state.
5269 To get the contents of the sequence just made, you must call
5270 `get_insns' *before* calling here.
5272 If the compiler might have deferred popping arguments while
5273 generating this sequence, and this sequence will not be immediately
5274 inserted into the instruction stream, use do_pending_stack_adjust
5275 before calling get_insns. That will ensure that the deferred
5276 pops are inserted into this sequence, and not into some random
5277 location in the instruction stream. See INHIBIT_DEFER_POP for more
5278 information about deferred popping of arguments. */
5280 void
5281 end_sequence (void)
5283 struct sequence_stack *tem = seq_stack;
5285 set_first_insn (tem->first);
5286 set_last_insn (tem->last);
5287 seq_stack = tem->next;
5289 memset (tem, 0, sizeof (*tem));
5290 tem->next = free_sequence_stack;
5291 free_sequence_stack = tem;
5294 /* Return 1 if currently emitting into a sequence. */
5297 in_sequence_p (void)
5299 return seq_stack != 0;
5302 /* Put the various virtual registers into REGNO_REG_RTX. */
5304 static void
5305 init_virtual_regs (void)
5307 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5308 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5309 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5310 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5311 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5312 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5313 = virtual_preferred_stack_boundary_rtx;
5317 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5318 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5319 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5320 static int copy_insn_n_scratches;
5322 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5323 copied an ASM_OPERANDS.
5324 In that case, it is the original input-operand vector. */
5325 static rtvec orig_asm_operands_vector;
5327 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5328 copied an ASM_OPERANDS.
5329 In that case, it is the copied input-operand vector. */
5330 static rtvec copy_asm_operands_vector;
5332 /* Likewise for the constraints vector. */
5333 static rtvec orig_asm_constraints_vector;
5334 static rtvec copy_asm_constraints_vector;
5336 /* Recursively create a new copy of an rtx for copy_insn.
5337 This function differs from copy_rtx in that it handles SCRATCHes and
5338 ASM_OPERANDs properly.
5339 Normally, this function is not used directly; use copy_insn as front end.
5340 However, you could first copy an insn pattern with copy_insn and then use
5341 this function afterwards to properly copy any REG_NOTEs containing
5342 SCRATCHes. */
5345 copy_insn_1 (rtx orig)
5347 rtx copy;
5348 int i, j;
5349 RTX_CODE code;
5350 const char *format_ptr;
5352 if (orig == NULL)
5353 return NULL;
5355 code = GET_CODE (orig);
5357 switch (code)
5359 case REG:
5360 case DEBUG_EXPR:
5361 CASE_CONST_ANY:
5362 case SYMBOL_REF:
5363 case CODE_LABEL:
5364 case PC:
5365 case CC0:
5366 case RETURN:
5367 case SIMPLE_RETURN:
5368 return orig;
5369 case CLOBBER:
5370 /* Share clobbers of hard registers (like cc0), but do not share pseudo reg
5371 clobbers or clobbers of hard registers that originated as pseudos.
5372 This is needed to allow safe register renaming. */
5373 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER
5374 && ORIGINAL_REGNO (XEXP (orig, 0)) == REGNO (XEXP (orig, 0)))
5375 return orig;
5376 break;
5378 case SCRATCH:
5379 for (i = 0; i < copy_insn_n_scratches; i++)
5380 if (copy_insn_scratch_in[i] == orig)
5381 return copy_insn_scratch_out[i];
5382 break;
5384 case CONST:
5385 if (shared_const_p (orig))
5386 return orig;
5387 break;
5389 /* A MEM with a constant address is not sharable. The problem is that
5390 the constant address may need to be reloaded. If the mem is shared,
5391 then reloading one copy of this mem will cause all copies to appear
5392 to have been reloaded. */
5394 default:
5395 break;
5398 /* Copy the various flags, fields, and other information. We assume
5399 that all fields need copying, and then clear the fields that should
5400 not be copied. That is the sensible default behavior, and forces
5401 us to explicitly document why we are *not* copying a flag. */
5402 copy = shallow_copy_rtx (orig);
5404 /* We do not copy the USED flag, which is used as a mark bit during
5405 walks over the RTL. */
5406 RTX_FLAG (copy, used) = 0;
5408 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5409 if (INSN_P (orig))
5411 RTX_FLAG (copy, jump) = 0;
5412 RTX_FLAG (copy, call) = 0;
5413 RTX_FLAG (copy, frame_related) = 0;
5416 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5418 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5419 switch (*format_ptr++)
5421 case 'e':
5422 if (XEXP (orig, i) != NULL)
5423 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5424 break;
5426 case 'E':
5427 case 'V':
5428 if (XVEC (orig, i) == orig_asm_constraints_vector)
5429 XVEC (copy, i) = copy_asm_constraints_vector;
5430 else if (XVEC (orig, i) == orig_asm_operands_vector)
5431 XVEC (copy, i) = copy_asm_operands_vector;
5432 else if (XVEC (orig, i) != NULL)
5434 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5435 for (j = 0; j < XVECLEN (copy, i); j++)
5436 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5438 break;
5440 case 't':
5441 case 'w':
5442 case 'i':
5443 case 's':
5444 case 'S':
5445 case 'u':
5446 case '0':
5447 /* These are left unchanged. */
5448 break;
5450 default:
5451 gcc_unreachable ();
5454 if (code == SCRATCH)
5456 i = copy_insn_n_scratches++;
5457 gcc_assert (i < MAX_RECOG_OPERANDS);
5458 copy_insn_scratch_in[i] = orig;
5459 copy_insn_scratch_out[i] = copy;
5461 else if (code == ASM_OPERANDS)
5463 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5464 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5465 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5466 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5469 return copy;
5472 /* Create a new copy of an rtx.
5473 This function differs from copy_rtx in that it handles SCRATCHes and
5474 ASM_OPERANDs properly.
5475 INSN doesn't really have to be a full INSN; it could be just the
5476 pattern. */
5478 copy_insn (rtx insn)
5480 copy_insn_n_scratches = 0;
5481 orig_asm_operands_vector = 0;
5482 orig_asm_constraints_vector = 0;
5483 copy_asm_operands_vector = 0;
5484 copy_asm_constraints_vector = 0;
5485 return copy_insn_1 (insn);
5488 /* Return a copy of INSN that can be used in a SEQUENCE delay slot,
5489 on that assumption that INSN itself remains in its original place. */
5492 copy_delay_slot_insn (rtx insn)
5494 /* Copy INSN with its rtx_code, all its notes, location etc. */
5495 insn = copy_rtx (insn);
5496 INSN_UID (insn) = cur_insn_uid++;
5497 return insn;
5500 /* Initialize data structures and variables in this file
5501 before generating rtl for each function. */
5503 void
5504 init_emit (void)
5506 set_first_insn (NULL);
5507 set_last_insn (NULL);
5508 if (MIN_NONDEBUG_INSN_UID)
5509 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5510 else
5511 cur_insn_uid = 1;
5512 cur_debug_insn_uid = 1;
5513 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5514 first_label_num = label_num;
5515 seq_stack = NULL;
5517 /* Init the tables that describe all the pseudo regs. */
5519 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5521 crtl->emit.regno_pointer_align
5522 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5524 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5526 /* Put copies of all the hard registers into regno_reg_rtx. */
5527 memcpy (regno_reg_rtx,
5528 initial_regno_reg_rtx,
5529 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5531 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5532 init_virtual_regs ();
5534 /* Indicate that the virtual registers and stack locations are
5535 all pointers. */
5536 REG_POINTER (stack_pointer_rtx) = 1;
5537 REG_POINTER (frame_pointer_rtx) = 1;
5538 REG_POINTER (hard_frame_pointer_rtx) = 1;
5539 REG_POINTER (arg_pointer_rtx) = 1;
5541 REG_POINTER (virtual_incoming_args_rtx) = 1;
5542 REG_POINTER (virtual_stack_vars_rtx) = 1;
5543 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5544 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5545 REG_POINTER (virtual_cfa_rtx) = 1;
5547 #ifdef STACK_BOUNDARY
5548 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5549 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5550 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5551 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5553 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5554 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5555 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5556 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5557 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5558 #endif
5560 #ifdef INIT_EXPANDERS
5561 INIT_EXPANDERS;
5562 #endif
5565 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5567 static rtx
5568 gen_const_vector (enum machine_mode mode, int constant)
5570 rtx tem;
5571 rtvec v;
5572 int units, i;
5573 enum machine_mode inner;
5575 units = GET_MODE_NUNITS (mode);
5576 inner = GET_MODE_INNER (mode);
5578 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5580 v = rtvec_alloc (units);
5582 /* We need to call this function after we set the scalar const_tiny_rtx
5583 entries. */
5584 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5586 for (i = 0; i < units; ++i)
5587 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5589 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5590 return tem;
5593 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5594 all elements are zero, and the one vector when all elements are one. */
5596 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5598 enum machine_mode inner = GET_MODE_INNER (mode);
5599 int nunits = GET_MODE_NUNITS (mode);
5600 rtx x;
5601 int i;
5603 /* Check to see if all of the elements have the same value. */
5604 x = RTVEC_ELT (v, nunits - 1);
5605 for (i = nunits - 2; i >= 0; i--)
5606 if (RTVEC_ELT (v, i) != x)
5607 break;
5609 /* If the values are all the same, check to see if we can use one of the
5610 standard constant vectors. */
5611 if (i == -1)
5613 if (x == CONST0_RTX (inner))
5614 return CONST0_RTX (mode);
5615 else if (x == CONST1_RTX (inner))
5616 return CONST1_RTX (mode);
5617 else if (x == CONSTM1_RTX (inner))
5618 return CONSTM1_RTX (mode);
5621 return gen_rtx_raw_CONST_VECTOR (mode, v);
5624 /* Initialise global register information required by all functions. */
5626 void
5627 init_emit_regs (void)
5629 int i;
5630 enum machine_mode mode;
5631 mem_attrs *attrs;
5633 /* Reset register attributes */
5634 htab_empty (reg_attrs_htab);
5636 /* We need reg_raw_mode, so initialize the modes now. */
5637 init_reg_modes_target ();
5639 /* Assign register numbers to the globally defined register rtx. */
5640 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5641 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5642 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5643 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5644 virtual_incoming_args_rtx =
5645 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5646 virtual_stack_vars_rtx =
5647 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5648 virtual_stack_dynamic_rtx =
5649 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5650 virtual_outgoing_args_rtx =
5651 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5652 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5653 virtual_preferred_stack_boundary_rtx =
5654 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5656 /* Initialize RTL for commonly used hard registers. These are
5657 copied into regno_reg_rtx as we begin to compile each function. */
5658 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5659 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5661 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5662 return_address_pointer_rtx
5663 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5664 #endif
5666 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5667 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5668 else
5669 pic_offset_table_rtx = NULL_RTX;
5671 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5673 mode = (enum machine_mode) i;
5674 attrs = ggc_alloc_cleared_mem_attrs ();
5675 attrs->align = BITS_PER_UNIT;
5676 attrs->addrspace = ADDR_SPACE_GENERIC;
5677 if (mode != BLKmode)
5679 attrs->size_known_p = true;
5680 attrs->size = GET_MODE_SIZE (mode);
5681 if (STRICT_ALIGNMENT)
5682 attrs->align = GET_MODE_ALIGNMENT (mode);
5684 mode_mem_attrs[i] = attrs;
5688 /* Create some permanent unique rtl objects shared between all functions. */
5690 void
5691 init_emit_once (void)
5693 int i;
5694 enum machine_mode mode;
5695 enum machine_mode double_mode;
5697 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5698 hash tables. */
5699 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5700 const_int_htab_eq, NULL);
5702 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5703 const_double_htab_eq, NULL);
5705 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5706 const_fixed_htab_eq, NULL);
5708 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5709 mem_attrs_htab_eq, NULL);
5710 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5711 reg_attrs_htab_eq, NULL);
5713 /* Compute the word and byte modes. */
5715 byte_mode = VOIDmode;
5716 word_mode = VOIDmode;
5717 double_mode = VOIDmode;
5719 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5720 mode != VOIDmode;
5721 mode = GET_MODE_WIDER_MODE (mode))
5723 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5724 && byte_mode == VOIDmode)
5725 byte_mode = mode;
5727 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5728 && word_mode == VOIDmode)
5729 word_mode = mode;
5732 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5733 mode != VOIDmode;
5734 mode = GET_MODE_WIDER_MODE (mode))
5736 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5737 && double_mode == VOIDmode)
5738 double_mode = mode;
5741 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5743 #ifdef INIT_EXPANDERS
5744 /* This is to initialize {init|mark|free}_machine_status before the first
5745 call to push_function_context_to. This is needed by the Chill front
5746 end which calls push_function_context_to before the first call to
5747 init_function_start. */
5748 INIT_EXPANDERS;
5749 #endif
5751 /* Create the unique rtx's for certain rtx codes and operand values. */
5753 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5754 tries to use these variables. */
5755 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5756 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5757 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5759 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5760 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5761 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5762 else
5763 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5765 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5766 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5767 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5769 dconstm1 = dconst1;
5770 dconstm1.sign = 1;
5772 dconsthalf = dconst1;
5773 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5775 for (i = 0; i < 3; i++)
5777 const REAL_VALUE_TYPE *const r =
5778 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5780 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5781 mode != VOIDmode;
5782 mode = GET_MODE_WIDER_MODE (mode))
5783 const_tiny_rtx[i][(int) mode] =
5784 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5786 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5787 mode != VOIDmode;
5788 mode = GET_MODE_WIDER_MODE (mode))
5789 const_tiny_rtx[i][(int) mode] =
5790 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5792 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5794 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5795 mode != VOIDmode;
5796 mode = GET_MODE_WIDER_MODE (mode))
5797 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5799 for (mode = MIN_MODE_PARTIAL_INT;
5800 mode <= MAX_MODE_PARTIAL_INT;
5801 mode = (enum machine_mode)((int)(mode) + 1))
5802 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5805 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5807 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5808 mode != VOIDmode;
5809 mode = GET_MODE_WIDER_MODE (mode))
5810 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5812 for (mode = MIN_MODE_PARTIAL_INT;
5813 mode <= MAX_MODE_PARTIAL_INT;
5814 mode = (enum machine_mode)((int)(mode) + 1))
5815 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5817 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5818 mode != VOIDmode;
5819 mode = GET_MODE_WIDER_MODE (mode))
5821 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5822 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5825 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5826 mode != VOIDmode;
5827 mode = GET_MODE_WIDER_MODE (mode))
5829 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5830 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5833 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5834 mode != VOIDmode;
5835 mode = GET_MODE_WIDER_MODE (mode))
5837 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5838 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5839 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5842 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5843 mode != VOIDmode;
5844 mode = GET_MODE_WIDER_MODE (mode))
5846 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5847 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5850 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5851 mode != VOIDmode;
5852 mode = GET_MODE_WIDER_MODE (mode))
5854 FCONST0(mode).data.high = 0;
5855 FCONST0(mode).data.low = 0;
5856 FCONST0(mode).mode = mode;
5857 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5858 FCONST0 (mode), mode);
5861 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5862 mode != VOIDmode;
5863 mode = GET_MODE_WIDER_MODE (mode))
5865 FCONST0(mode).data.high = 0;
5866 FCONST0(mode).data.low = 0;
5867 FCONST0(mode).mode = mode;
5868 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5869 FCONST0 (mode), mode);
5872 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5873 mode != VOIDmode;
5874 mode = GET_MODE_WIDER_MODE (mode))
5876 FCONST0(mode).data.high = 0;
5877 FCONST0(mode).data.low = 0;
5878 FCONST0(mode).mode = mode;
5879 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5880 FCONST0 (mode), mode);
5882 /* We store the value 1. */
5883 FCONST1(mode).data.high = 0;
5884 FCONST1(mode).data.low = 0;
5885 FCONST1(mode).mode = mode;
5886 FCONST1(mode).data
5887 = double_int_one.lshift (GET_MODE_FBIT (mode),
5888 HOST_BITS_PER_DOUBLE_INT,
5889 SIGNED_FIXED_POINT_MODE_P (mode));
5890 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5891 FCONST1 (mode), mode);
5894 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5895 mode != VOIDmode;
5896 mode = GET_MODE_WIDER_MODE (mode))
5898 FCONST0(mode).data.high = 0;
5899 FCONST0(mode).data.low = 0;
5900 FCONST0(mode).mode = mode;
5901 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5902 FCONST0 (mode), mode);
5904 /* We store the value 1. */
5905 FCONST1(mode).data.high = 0;
5906 FCONST1(mode).data.low = 0;
5907 FCONST1(mode).mode = mode;
5908 FCONST1(mode).data
5909 = double_int_one.lshift (GET_MODE_FBIT (mode),
5910 HOST_BITS_PER_DOUBLE_INT,
5911 SIGNED_FIXED_POINT_MODE_P (mode));
5912 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5913 FCONST1 (mode), mode);
5916 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5917 mode != VOIDmode;
5918 mode = GET_MODE_WIDER_MODE (mode))
5920 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5923 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5924 mode != VOIDmode;
5925 mode = GET_MODE_WIDER_MODE (mode))
5927 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5930 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5931 mode != VOIDmode;
5932 mode = GET_MODE_WIDER_MODE (mode))
5934 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5935 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5938 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5939 mode != VOIDmode;
5940 mode = GET_MODE_WIDER_MODE (mode))
5942 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5943 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5946 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5947 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5948 const_tiny_rtx[0][i] = const0_rtx;
5950 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5951 if (STORE_FLAG_VALUE == 1)
5952 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5954 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5955 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5956 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5957 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5960 /* Produce exact duplicate of insn INSN after AFTER.
5961 Care updating of libcall regions if present. */
5964 emit_copy_of_insn_after (rtx insn, rtx after)
5966 rtx new_rtx, link;
5968 switch (GET_CODE (insn))
5970 case INSN:
5971 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5972 break;
5974 case JUMP_INSN:
5975 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5976 break;
5978 case DEBUG_INSN:
5979 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5980 break;
5982 case CALL_INSN:
5983 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5984 if (CALL_INSN_FUNCTION_USAGE (insn))
5985 CALL_INSN_FUNCTION_USAGE (new_rtx)
5986 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5987 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5988 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5989 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5990 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5991 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5992 break;
5994 default:
5995 gcc_unreachable ();
5998 /* Update LABEL_NUSES. */
5999 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6001 INSN_LOCATION (new_rtx) = INSN_LOCATION (insn);
6003 /* If the old insn is frame related, then so is the new one. This is
6004 primarily needed for IA-64 unwind info which marks epilogue insns,
6005 which may be duplicated by the basic block reordering code. */
6006 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6008 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6009 will make them. REG_LABEL_TARGETs are created there too, but are
6010 supposed to be sticky, so we copy them. */
6011 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6012 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6014 if (GET_CODE (link) == EXPR_LIST)
6015 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6016 copy_insn_1 (XEXP (link, 0)));
6017 else
6018 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
6021 INSN_CODE (new_rtx) = INSN_CODE (insn);
6022 return new_rtx;
6025 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6027 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6029 if (hard_reg_clobbers[mode][regno])
6030 return hard_reg_clobbers[mode][regno];
6031 else
6032 return (hard_reg_clobbers[mode][regno] =
6033 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6036 location_t prologue_location;
6037 location_t epilogue_location;
6039 /* Hold current location information and last location information, so the
6040 datastructures are built lazily only when some instructions in given
6041 place are needed. */
6042 static location_t curr_location;
6044 /* Allocate insn location datastructure. */
6045 void
6046 insn_locations_init (void)
6048 prologue_location = epilogue_location = 0;
6049 curr_location = UNKNOWN_LOCATION;
6052 /* At the end of emit stage, clear current location. */
6053 void
6054 insn_locations_finalize (void)
6056 epilogue_location = curr_location;
6057 curr_location = UNKNOWN_LOCATION;
6060 /* Set current location. */
6061 void
6062 set_curr_insn_location (location_t location)
6064 curr_location = location;
6067 /* Get current location. */
6068 location_t
6069 curr_insn_location (void)
6071 return curr_location;
6074 /* Return lexical scope block insn belongs to. */
6075 tree
6076 insn_scope (const_rtx insn)
6078 return LOCATION_BLOCK (INSN_LOCATION (insn));
6081 /* Return line number of the statement that produced this insn. */
6083 insn_line (const_rtx insn)
6085 return LOCATION_LINE (INSN_LOCATION (insn));
6088 /* Return source file of the statement that produced this insn. */
6089 const char *
6090 insn_file (const_rtx insn)
6092 return LOCATION_FILE (INSN_LOCATION (insn));
6095 /* Return true if memory model MODEL requires a pre-operation (release-style)
6096 barrier or a post-operation (acquire-style) barrier. While not universal,
6097 this function matches behavior of several targets. */
6099 bool
6100 need_atomic_barrier_p (enum memmodel model, bool pre)
6102 switch (model & MEMMODEL_MASK)
6104 case MEMMODEL_RELAXED:
6105 case MEMMODEL_CONSUME:
6106 return false;
6107 case MEMMODEL_RELEASE:
6108 return pre;
6109 case MEMMODEL_ACQUIRE:
6110 return !pre;
6111 case MEMMODEL_ACQ_REL:
6112 case MEMMODEL_SEQ_CST:
6113 return true;
6114 default:
6115 gcc_unreachable ();
6119 #include "gt-emit-rtl.h"