2012-03-17 Janne Blomqvist <jb@gcc.gnu.org>
[official-gcc.git] / gcc / emit-rtl.c
blobfd19fb6ace0782a0330b0a3771c89164e8e4dd03
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* Middle-to-low level generation of rtx code and insns.
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "diagnostic-core.h"
42 #include "rtl.h"
43 #include "tree.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "expr.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "hashtab.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
59 #include "df.h"
60 #include "params.h"
61 #include "target.h"
62 #include "tree-flow.h"
64 struct target_rtl default_target_rtl;
65 #if SWITCHABLE_TARGET
66 struct target_rtl *this_target_rtl = &default_target_rtl;
67 #endif
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71 /* Commonly used modes. */
73 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
76 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
78 /* Datastructures maintained for currently processed function in RTL form. */
80 struct rtl_data x_rtl;
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
87 rtx * regno_reg_rtx;
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
92 static GTY(()) int label_num = 1;
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx and constm1_rtx. CONSTM1_RTX
97 is set only for MODE_INT and MODE_VECTOR_INT modes. */
99 rtx const_tiny_rtx[4][(int) MAX_MACHINE_MODE];
101 rtx const_true_rtx;
103 REAL_VALUE_TYPE dconst0;
104 REAL_VALUE_TYPE dconst1;
105 REAL_VALUE_TYPE dconst2;
106 REAL_VALUE_TYPE dconstm1;
107 REAL_VALUE_TYPE dconsthalf;
109 /* Record fixed-point constant 0 and 1. */
110 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
111 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
113 /* We make one copy of (const_int C) where C is in
114 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
115 to save space during the compilation and simplify comparisons of
116 integers. */
118 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
120 /* Standard pieces of rtx, to be substituted directly into things. */
121 rtx pc_rtx;
122 rtx ret_rtx;
123 rtx simple_return_rtx;
124 rtx cc0_rtx;
126 /* A hash table storing CONST_INTs whose absolute value is greater
127 than MAX_SAVED_CONST_INT. */
129 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
130 htab_t const_int_htab;
132 /* A hash table storing memory attribute structures. */
133 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
134 htab_t mem_attrs_htab;
136 /* A hash table storing register attribute structures. */
137 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
138 htab_t reg_attrs_htab;
140 /* A hash table storing all CONST_DOUBLEs. */
141 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
142 htab_t const_double_htab;
144 /* A hash table storing all CONST_FIXEDs. */
145 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
146 htab_t const_fixed_htab;
148 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
149 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
150 #define last_location (crtl->emit.x_last_location)
151 #define first_label_num (crtl->emit.x_first_label_num)
153 static rtx make_call_insn_raw (rtx);
154 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
155 static void set_used_decls (tree);
156 static void mark_label_nuses (rtx);
157 static hashval_t const_int_htab_hash (const void *);
158 static int const_int_htab_eq (const void *, const void *);
159 static hashval_t const_double_htab_hash (const void *);
160 static int const_double_htab_eq (const void *, const void *);
161 static rtx lookup_const_double (rtx);
162 static hashval_t const_fixed_htab_hash (const void *);
163 static int const_fixed_htab_eq (const void *, const void *);
164 static rtx lookup_const_fixed (rtx);
165 static hashval_t mem_attrs_htab_hash (const void *);
166 static int mem_attrs_htab_eq (const void *, const void *);
167 static hashval_t reg_attrs_htab_hash (const void *);
168 static int reg_attrs_htab_eq (const void *, const void *);
169 static reg_attrs *get_reg_attrs (tree, int);
170 static rtx gen_const_vector (enum machine_mode, int);
171 static void copy_rtx_if_shared_1 (rtx *orig);
173 /* Probability of the conditional branch currently proceeded by try_split.
174 Set to -1 otherwise. */
175 int split_branch_probability = -1;
177 /* Returns a hash code for X (which is a really a CONST_INT). */
179 static hashval_t
180 const_int_htab_hash (const void *x)
182 return (hashval_t) INTVAL ((const_rtx) x);
185 /* Returns nonzero if the value represented by X (which is really a
186 CONST_INT) is the same as that given by Y (which is really a
187 HOST_WIDE_INT *). */
189 static int
190 const_int_htab_eq (const void *x, const void *y)
192 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
195 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
196 static hashval_t
197 const_double_htab_hash (const void *x)
199 const_rtx const value = (const_rtx) x;
200 hashval_t h;
202 if (GET_MODE (value) == VOIDmode)
203 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
204 else
206 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
207 /* MODE is used in the comparison, so it should be in the hash. */
208 h ^= GET_MODE (value);
210 return h;
213 /* Returns nonzero if the value represented by X (really a ...)
214 is the same as that represented by Y (really a ...) */
215 static int
216 const_double_htab_eq (const void *x, const void *y)
218 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
220 if (GET_MODE (a) != GET_MODE (b))
221 return 0;
222 if (GET_MODE (a) == VOIDmode)
223 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
224 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
225 else
226 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
227 CONST_DOUBLE_REAL_VALUE (b));
230 /* Returns a hash code for X (which is really a CONST_FIXED). */
232 static hashval_t
233 const_fixed_htab_hash (const void *x)
235 const_rtx const value = (const_rtx) x;
236 hashval_t h;
238 h = fixed_hash (CONST_FIXED_VALUE (value));
239 /* MODE is used in the comparison, so it should be in the hash. */
240 h ^= GET_MODE (value);
241 return h;
244 /* Returns nonzero if the value represented by X (really a ...)
245 is the same as that represented by Y (really a ...). */
247 static int
248 const_fixed_htab_eq (const void *x, const void *y)
250 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
252 if (GET_MODE (a) != GET_MODE (b))
253 return 0;
254 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
257 /* Returns a hash code for X (which is a really a mem_attrs *). */
259 static hashval_t
260 mem_attrs_htab_hash (const void *x)
262 const mem_attrs *const p = (const mem_attrs *) x;
264 return (p->alias ^ (p->align * 1000)
265 ^ (p->addrspace * 4000)
266 ^ ((p->offset_known_p ? p->offset : 0) * 50000)
267 ^ ((p->size_known_p ? p->size : 0) * 2500000)
268 ^ (size_t) iterative_hash_expr (p->expr, 0));
271 /* Return true if the given memory attributes are equal. */
273 static bool
274 mem_attrs_eq_p (const struct mem_attrs *p, const struct mem_attrs *q)
276 return (p->alias == q->alias
277 && p->offset_known_p == q->offset_known_p
278 && (!p->offset_known_p || p->offset == q->offset)
279 && p->size_known_p == q->size_known_p
280 && (!p->size_known_p || p->size == q->size)
281 && p->align == q->align
282 && p->addrspace == q->addrspace
283 && (p->expr == q->expr
284 || (p->expr != NULL_TREE && q->expr != NULL_TREE
285 && operand_equal_p (p->expr, q->expr, 0))));
288 /* Returns nonzero if the value represented by X (which is really a
289 mem_attrs *) is the same as that given by Y (which is also really a
290 mem_attrs *). */
292 static int
293 mem_attrs_htab_eq (const void *x, const void *y)
295 return mem_attrs_eq_p ((const mem_attrs *) x, (const mem_attrs *) y);
298 /* Set MEM's memory attributes so that they are the same as ATTRS. */
300 static void
301 set_mem_attrs (rtx mem, mem_attrs *attrs)
303 void **slot;
305 /* If everything is the default, we can just clear the attributes. */
306 if (mem_attrs_eq_p (attrs, mode_mem_attrs[(int) GET_MODE (mem)]))
308 MEM_ATTRS (mem) = 0;
309 return;
312 slot = htab_find_slot (mem_attrs_htab, attrs, INSERT);
313 if (*slot == 0)
315 *slot = ggc_alloc_mem_attrs ();
316 memcpy (*slot, attrs, sizeof (mem_attrs));
319 MEM_ATTRS (mem) = (mem_attrs *) *slot;
322 /* Returns a hash code for X (which is a really a reg_attrs *). */
324 static hashval_t
325 reg_attrs_htab_hash (const void *x)
327 const reg_attrs *const p = (const reg_attrs *) x;
329 return ((p->offset * 1000) ^ (intptr_t) p->decl);
332 /* Returns nonzero if the value represented by X (which is really a
333 reg_attrs *) is the same as that given by Y (which is also really a
334 reg_attrs *). */
336 static int
337 reg_attrs_htab_eq (const void *x, const void *y)
339 const reg_attrs *const p = (const reg_attrs *) x;
340 const reg_attrs *const q = (const reg_attrs *) y;
342 return (p->decl == q->decl && p->offset == q->offset);
344 /* Allocate a new reg_attrs structure and insert it into the hash table if
345 one identical to it is not already in the table. We are doing this for
346 MEM of mode MODE. */
348 static reg_attrs *
349 get_reg_attrs (tree decl, int offset)
351 reg_attrs attrs;
352 void **slot;
354 /* If everything is the default, we can just return zero. */
355 if (decl == 0 && offset == 0)
356 return 0;
358 attrs.decl = decl;
359 attrs.offset = offset;
361 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
362 if (*slot == 0)
364 *slot = ggc_alloc_reg_attrs ();
365 memcpy (*slot, &attrs, sizeof (reg_attrs));
368 return (reg_attrs *) *slot;
372 #if !HAVE_blockage
373 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
374 across this insn. */
377 gen_blockage (void)
379 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
380 MEM_VOLATILE_P (x) = true;
381 return x;
383 #endif
386 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
387 don't attempt to share with the various global pieces of rtl (such as
388 frame_pointer_rtx). */
391 gen_raw_REG (enum machine_mode mode, int regno)
393 rtx x = gen_rtx_raw_REG (mode, regno);
394 ORIGINAL_REGNO (x) = regno;
395 return x;
398 /* There are some RTL codes that require special attention; the generation
399 functions do the raw handling. If you add to this list, modify
400 special_rtx in gengenrtl.c as well. */
403 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
405 void **slot;
407 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
408 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
410 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
411 if (const_true_rtx && arg == STORE_FLAG_VALUE)
412 return const_true_rtx;
413 #endif
415 /* Look up the CONST_INT in the hash table. */
416 slot = htab_find_slot_with_hash (const_int_htab, &arg,
417 (hashval_t) arg, INSERT);
418 if (*slot == 0)
419 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
421 return (rtx) *slot;
425 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
427 return GEN_INT (trunc_int_for_mode (c, mode));
430 /* CONST_DOUBLEs might be created from pairs of integers, or from
431 REAL_VALUE_TYPEs. Also, their length is known only at run time,
432 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
434 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
435 hash table. If so, return its counterpart; otherwise add it
436 to the hash table and return it. */
437 static rtx
438 lookup_const_double (rtx real)
440 void **slot = htab_find_slot (const_double_htab, real, INSERT);
441 if (*slot == 0)
442 *slot = real;
444 return (rtx) *slot;
447 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
448 VALUE in mode MODE. */
450 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
452 rtx real = rtx_alloc (CONST_DOUBLE);
453 PUT_MODE (real, mode);
455 real->u.rv = value;
457 return lookup_const_double (real);
460 /* Determine whether FIXED, a CONST_FIXED, already exists in the
461 hash table. If so, return its counterpart; otherwise add it
462 to the hash table and return it. */
464 static rtx
465 lookup_const_fixed (rtx fixed)
467 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
468 if (*slot == 0)
469 *slot = fixed;
471 return (rtx) *slot;
474 /* Return a CONST_FIXED rtx for a fixed-point value specified by
475 VALUE in mode MODE. */
478 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
480 rtx fixed = rtx_alloc (CONST_FIXED);
481 PUT_MODE (fixed, mode);
483 fixed->u.fv = value;
485 return lookup_const_fixed (fixed);
488 /* Constructs double_int from rtx CST. */
490 double_int
491 rtx_to_double_int (const_rtx cst)
493 double_int r;
495 if (CONST_INT_P (cst))
496 r = shwi_to_double_int (INTVAL (cst));
497 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
499 r.low = CONST_DOUBLE_LOW (cst);
500 r.high = CONST_DOUBLE_HIGH (cst);
502 else
503 gcc_unreachable ();
505 return r;
509 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
510 a double_int. */
513 immed_double_int_const (double_int i, enum machine_mode mode)
515 return immed_double_const (i.low, i.high, mode);
518 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
519 of ints: I0 is the low-order word and I1 is the high-order word.
520 Do not use this routine for non-integer modes; convert to
521 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
524 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
526 rtx value;
527 unsigned int i;
529 /* There are the following cases (note that there are no modes with
530 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
532 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
533 gen_int_mode.
534 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
535 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
536 from copies of the sign bit, and sign of i0 and i1 are the same), then
537 we return a CONST_INT for i0.
538 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
539 if (mode != VOIDmode)
541 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
542 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
543 /* We can get a 0 for an error mark. */
544 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
545 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
547 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
548 return gen_int_mode (i0, mode);
550 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
553 /* If this integer fits in one word, return a CONST_INT. */
554 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
555 return GEN_INT (i0);
557 /* We use VOIDmode for integers. */
558 value = rtx_alloc (CONST_DOUBLE);
559 PUT_MODE (value, VOIDmode);
561 CONST_DOUBLE_LOW (value) = i0;
562 CONST_DOUBLE_HIGH (value) = i1;
564 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
565 XWINT (value, i) = 0;
567 return lookup_const_double (value);
571 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
573 /* In case the MD file explicitly references the frame pointer, have
574 all such references point to the same frame pointer. This is
575 used during frame pointer elimination to distinguish the explicit
576 references to these registers from pseudos that happened to be
577 assigned to them.
579 If we have eliminated the frame pointer or arg pointer, we will
580 be using it as a normal register, for example as a spill
581 register. In such cases, we might be accessing it in a mode that
582 is not Pmode and therefore cannot use the pre-allocated rtx.
584 Also don't do this when we are making new REGs in reload, since
585 we don't want to get confused with the real pointers. */
587 if (mode == Pmode && !reload_in_progress)
589 if (regno == FRAME_POINTER_REGNUM
590 && (!reload_completed || frame_pointer_needed))
591 return frame_pointer_rtx;
592 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
593 if (regno == HARD_FRAME_POINTER_REGNUM
594 && (!reload_completed || frame_pointer_needed))
595 return hard_frame_pointer_rtx;
596 #endif
597 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
598 if (regno == ARG_POINTER_REGNUM)
599 return arg_pointer_rtx;
600 #endif
601 #ifdef RETURN_ADDRESS_POINTER_REGNUM
602 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
603 return return_address_pointer_rtx;
604 #endif
605 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
606 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
607 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
608 return pic_offset_table_rtx;
609 if (regno == STACK_POINTER_REGNUM)
610 return stack_pointer_rtx;
613 #if 0
614 /* If the per-function register table has been set up, try to re-use
615 an existing entry in that table to avoid useless generation of RTL.
617 This code is disabled for now until we can fix the various backends
618 which depend on having non-shared hard registers in some cases. Long
619 term we want to re-enable this code as it can significantly cut down
620 on the amount of useless RTL that gets generated.
622 We'll also need to fix some code that runs after reload that wants to
623 set ORIGINAL_REGNO. */
625 if (cfun
626 && cfun->emit
627 && regno_reg_rtx
628 && regno < FIRST_PSEUDO_REGISTER
629 && reg_raw_mode[regno] == mode)
630 return regno_reg_rtx[regno];
631 #endif
633 return gen_raw_REG (mode, regno);
637 gen_rtx_MEM (enum machine_mode mode, rtx addr)
639 rtx rt = gen_rtx_raw_MEM (mode, addr);
641 /* This field is not cleared by the mere allocation of the rtx, so
642 we clear it here. */
643 MEM_ATTRS (rt) = 0;
645 return rt;
648 /* Generate a memory referring to non-trapping constant memory. */
651 gen_const_mem (enum machine_mode mode, rtx addr)
653 rtx mem = gen_rtx_MEM (mode, addr);
654 MEM_READONLY_P (mem) = 1;
655 MEM_NOTRAP_P (mem) = 1;
656 return mem;
659 /* Generate a MEM referring to fixed portions of the frame, e.g., register
660 save areas. */
663 gen_frame_mem (enum machine_mode mode, rtx addr)
665 rtx mem = gen_rtx_MEM (mode, addr);
666 MEM_NOTRAP_P (mem) = 1;
667 set_mem_alias_set (mem, get_frame_alias_set ());
668 return mem;
671 /* Generate a MEM referring to a temporary use of the stack, not part
672 of the fixed stack frame. For example, something which is pushed
673 by a target splitter. */
675 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
677 rtx mem = gen_rtx_MEM (mode, addr);
678 MEM_NOTRAP_P (mem) = 1;
679 if (!cfun->calls_alloca)
680 set_mem_alias_set (mem, get_frame_alias_set ());
681 return mem;
684 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
685 this construct would be valid, and false otherwise. */
687 bool
688 validate_subreg (enum machine_mode omode, enum machine_mode imode,
689 const_rtx reg, unsigned int offset)
691 unsigned int isize = GET_MODE_SIZE (imode);
692 unsigned int osize = GET_MODE_SIZE (omode);
694 /* All subregs must be aligned. */
695 if (offset % osize != 0)
696 return false;
698 /* The subreg offset cannot be outside the inner object. */
699 if (offset >= isize)
700 return false;
702 /* ??? This should not be here. Temporarily continue to allow word_mode
703 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
704 Generally, backends are doing something sketchy but it'll take time to
705 fix them all. */
706 if (omode == word_mode)
708 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
709 is the culprit here, and not the backends. */
710 else if (osize >= UNITS_PER_WORD && isize >= osize)
712 /* Allow component subregs of complex and vector. Though given the below
713 extraction rules, it's not always clear what that means. */
714 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
715 && GET_MODE_INNER (imode) == omode)
717 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
718 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
719 represent this. It's questionable if this ought to be represented at
720 all -- why can't this all be hidden in post-reload splitters that make
721 arbitrarily mode changes to the registers themselves. */
722 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
724 /* Subregs involving floating point modes are not allowed to
725 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
726 (subreg:SI (reg:DF) 0) isn't. */
727 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
729 if (isize != osize)
730 return false;
733 /* Paradoxical subregs must have offset zero. */
734 if (osize > isize)
735 return offset == 0;
737 /* This is a normal subreg. Verify that the offset is representable. */
739 /* For hard registers, we already have most of these rules collected in
740 subreg_offset_representable_p. */
741 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
743 unsigned int regno = REGNO (reg);
745 #ifdef CANNOT_CHANGE_MODE_CLASS
746 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
747 && GET_MODE_INNER (imode) == omode)
749 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
750 return false;
751 #endif
753 return subreg_offset_representable_p (regno, imode, offset, omode);
756 /* For pseudo registers, we want most of the same checks. Namely:
757 If the register no larger than a word, the subreg must be lowpart.
758 If the register is larger than a word, the subreg must be the lowpart
759 of a subword. A subreg does *not* perform arbitrary bit extraction.
760 Given that we've already checked mode/offset alignment, we only have
761 to check subword subregs here. */
762 if (osize < UNITS_PER_WORD)
764 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
765 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
766 if (offset % UNITS_PER_WORD != low_off)
767 return false;
769 return true;
773 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
775 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
776 return gen_rtx_raw_SUBREG (mode, reg, offset);
779 /* Generate a SUBREG representing the least-significant part of REG if MODE
780 is smaller than mode of REG, otherwise paradoxical SUBREG. */
783 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
785 enum machine_mode inmode;
787 inmode = GET_MODE (reg);
788 if (inmode == VOIDmode)
789 inmode = mode;
790 return gen_rtx_SUBREG (mode, reg,
791 subreg_lowpart_offset (mode, inmode));
795 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
797 rtvec
798 gen_rtvec (int n, ...)
800 int i;
801 rtvec rt_val;
802 va_list p;
804 va_start (p, n);
806 /* Don't allocate an empty rtvec... */
807 if (n == 0)
809 va_end (p);
810 return NULL_RTVEC;
813 rt_val = rtvec_alloc (n);
815 for (i = 0; i < n; i++)
816 rt_val->elem[i] = va_arg (p, rtx);
818 va_end (p);
819 return rt_val;
822 rtvec
823 gen_rtvec_v (int n, rtx *argp)
825 int i;
826 rtvec rt_val;
828 /* Don't allocate an empty rtvec... */
829 if (n == 0)
830 return NULL_RTVEC;
832 rt_val = rtvec_alloc (n);
834 for (i = 0; i < n; i++)
835 rt_val->elem[i] = *argp++;
837 return rt_val;
840 /* Return the number of bytes between the start of an OUTER_MODE
841 in-memory value and the start of an INNER_MODE in-memory value,
842 given that the former is a lowpart of the latter. It may be a
843 paradoxical lowpart, in which case the offset will be negative
844 on big-endian targets. */
847 byte_lowpart_offset (enum machine_mode outer_mode,
848 enum machine_mode inner_mode)
850 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
851 return subreg_lowpart_offset (outer_mode, inner_mode);
852 else
853 return -subreg_lowpart_offset (inner_mode, outer_mode);
856 /* Generate a REG rtx for a new pseudo register of mode MODE.
857 This pseudo is assigned the next sequential register number. */
860 gen_reg_rtx (enum machine_mode mode)
862 rtx val;
863 unsigned int align = GET_MODE_ALIGNMENT (mode);
865 gcc_assert (can_create_pseudo_p ());
867 /* If a virtual register with bigger mode alignment is generated,
868 increase stack alignment estimation because it might be spilled
869 to stack later. */
870 if (SUPPORTS_STACK_ALIGNMENT
871 && crtl->stack_alignment_estimated < align
872 && !crtl->stack_realign_processed)
874 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
875 if (crtl->stack_alignment_estimated < min_align)
876 crtl->stack_alignment_estimated = min_align;
879 if (generating_concat_p
880 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
881 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
883 /* For complex modes, don't make a single pseudo.
884 Instead, make a CONCAT of two pseudos.
885 This allows noncontiguous allocation of the real and imaginary parts,
886 which makes much better code. Besides, allocating DCmode
887 pseudos overstrains reload on some machines like the 386. */
888 rtx realpart, imagpart;
889 enum machine_mode partmode = GET_MODE_INNER (mode);
891 realpart = gen_reg_rtx (partmode);
892 imagpart = gen_reg_rtx (partmode);
893 return gen_rtx_CONCAT (mode, realpart, imagpart);
896 /* Make sure regno_pointer_align, and regno_reg_rtx are large
897 enough to have an element for this pseudo reg number. */
899 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
901 int old_size = crtl->emit.regno_pointer_align_length;
902 char *tmp;
903 rtx *new1;
905 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
906 memset (tmp + old_size, 0, old_size);
907 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
909 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
910 memset (new1 + old_size, 0, old_size * sizeof (rtx));
911 regno_reg_rtx = new1;
913 crtl->emit.regno_pointer_align_length = old_size * 2;
916 val = gen_raw_REG (mode, reg_rtx_no);
917 regno_reg_rtx[reg_rtx_no++] = val;
918 return val;
921 /* Update NEW with the same attributes as REG, but with OFFSET added
922 to the REG_OFFSET. */
924 static void
925 update_reg_offset (rtx new_rtx, rtx reg, int offset)
927 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
928 REG_OFFSET (reg) + offset);
931 /* Generate a register with same attributes as REG, but with OFFSET
932 added to the REG_OFFSET. */
935 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
936 int offset)
938 rtx new_rtx = gen_rtx_REG (mode, regno);
940 update_reg_offset (new_rtx, reg, offset);
941 return new_rtx;
944 /* Generate a new pseudo-register with the same attributes as REG, but
945 with OFFSET added to the REG_OFFSET. */
948 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
950 rtx new_rtx = gen_reg_rtx (mode);
952 update_reg_offset (new_rtx, reg, offset);
953 return new_rtx;
956 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
957 new register is a (possibly paradoxical) lowpart of the old one. */
959 void
960 adjust_reg_mode (rtx reg, enum machine_mode mode)
962 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
963 PUT_MODE (reg, mode);
966 /* Copy REG's attributes from X, if X has any attributes. If REG and X
967 have different modes, REG is a (possibly paradoxical) lowpart of X. */
969 void
970 set_reg_attrs_from_value (rtx reg, rtx x)
972 int offset;
974 /* Hard registers can be reused for multiple purposes within the same
975 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
976 on them is wrong. */
977 if (HARD_REGISTER_P (reg))
978 return;
980 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
981 if (MEM_P (x))
983 if (MEM_OFFSET_KNOWN_P (x))
984 REG_ATTRS (reg) = get_reg_attrs (MEM_EXPR (x),
985 MEM_OFFSET (x) + offset);
986 if (MEM_POINTER (x))
987 mark_reg_pointer (reg, 0);
989 else if (REG_P (x))
991 if (REG_ATTRS (x))
992 update_reg_offset (reg, x, offset);
993 if (REG_POINTER (x))
994 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
998 /* Generate a REG rtx for a new pseudo register, copying the mode
999 and attributes from X. */
1002 gen_reg_rtx_and_attrs (rtx x)
1004 rtx reg = gen_reg_rtx (GET_MODE (x));
1005 set_reg_attrs_from_value (reg, x);
1006 return reg;
1009 /* Set the register attributes for registers contained in PARM_RTX.
1010 Use needed values from memory attributes of MEM. */
1012 void
1013 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1015 if (REG_P (parm_rtx))
1016 set_reg_attrs_from_value (parm_rtx, mem);
1017 else if (GET_CODE (parm_rtx) == PARALLEL)
1019 /* Check for a NULL entry in the first slot, used to indicate that the
1020 parameter goes both on the stack and in registers. */
1021 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1022 for (; i < XVECLEN (parm_rtx, 0); i++)
1024 rtx x = XVECEXP (parm_rtx, 0, i);
1025 if (REG_P (XEXP (x, 0)))
1026 REG_ATTRS (XEXP (x, 0))
1027 = get_reg_attrs (MEM_EXPR (mem),
1028 INTVAL (XEXP (x, 1)));
1033 /* Set the REG_ATTRS for registers in value X, given that X represents
1034 decl T. */
1036 void
1037 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1039 if (GET_CODE (x) == SUBREG)
1041 gcc_assert (subreg_lowpart_p (x));
1042 x = SUBREG_REG (x);
1044 if (REG_P (x))
1045 REG_ATTRS (x)
1046 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1047 DECL_MODE (t)));
1048 if (GET_CODE (x) == CONCAT)
1050 if (REG_P (XEXP (x, 0)))
1051 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1052 if (REG_P (XEXP (x, 1)))
1053 REG_ATTRS (XEXP (x, 1))
1054 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1056 if (GET_CODE (x) == PARALLEL)
1058 int i, start;
1060 /* Check for a NULL entry, used to indicate that the parameter goes
1061 both on the stack and in registers. */
1062 if (XEXP (XVECEXP (x, 0, 0), 0))
1063 start = 0;
1064 else
1065 start = 1;
1067 for (i = start; i < XVECLEN (x, 0); i++)
1069 rtx y = XVECEXP (x, 0, i);
1070 if (REG_P (XEXP (y, 0)))
1071 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1076 /* Assign the RTX X to declaration T. */
1078 void
1079 set_decl_rtl (tree t, rtx x)
1081 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1082 if (x)
1083 set_reg_attrs_for_decl_rtl (t, x);
1086 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1087 if the ABI requires the parameter to be passed by reference. */
1089 void
1090 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1092 DECL_INCOMING_RTL (t) = x;
1093 if (x && !by_reference_p)
1094 set_reg_attrs_for_decl_rtl (t, x);
1097 /* Identify REG (which may be a CONCAT) as a user register. */
1099 void
1100 mark_user_reg (rtx reg)
1102 if (GET_CODE (reg) == CONCAT)
1104 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1105 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1107 else
1109 gcc_assert (REG_P (reg));
1110 REG_USERVAR_P (reg) = 1;
1114 /* Identify REG as a probable pointer register and show its alignment
1115 as ALIGN, if nonzero. */
1117 void
1118 mark_reg_pointer (rtx reg, int align)
1120 if (! REG_POINTER (reg))
1122 REG_POINTER (reg) = 1;
1124 if (align)
1125 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1127 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1128 /* We can no-longer be sure just how aligned this pointer is. */
1129 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1132 /* Return 1 plus largest pseudo reg number used in the current function. */
1135 max_reg_num (void)
1137 return reg_rtx_no;
1140 /* Return 1 + the largest label number used so far in the current function. */
1143 max_label_num (void)
1145 return label_num;
1148 /* Return first label number used in this function (if any were used). */
1151 get_first_label_num (void)
1153 return first_label_num;
1156 /* If the rtx for label was created during the expansion of a nested
1157 function, then first_label_num won't include this label number.
1158 Fix this now so that array indices work later. */
1160 void
1161 maybe_set_first_label_num (rtx x)
1163 if (CODE_LABEL_NUMBER (x) < first_label_num)
1164 first_label_num = CODE_LABEL_NUMBER (x);
1167 /* Return a value representing some low-order bits of X, where the number
1168 of low-order bits is given by MODE. Note that no conversion is done
1169 between floating-point and fixed-point values, rather, the bit
1170 representation is returned.
1172 This function handles the cases in common between gen_lowpart, below,
1173 and two variants in cse.c and combine.c. These are the cases that can
1174 be safely handled at all points in the compilation.
1176 If this is not a case we can handle, return 0. */
1179 gen_lowpart_common (enum machine_mode mode, rtx x)
1181 int msize = GET_MODE_SIZE (mode);
1182 int xsize;
1183 int offset = 0;
1184 enum machine_mode innermode;
1186 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1187 so we have to make one up. Yuk. */
1188 innermode = GET_MODE (x);
1189 if (CONST_INT_P (x)
1190 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1191 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1192 else if (innermode == VOIDmode)
1193 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1195 xsize = GET_MODE_SIZE (innermode);
1197 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1199 if (innermode == mode)
1200 return x;
1202 /* MODE must occupy no more words than the mode of X. */
1203 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1204 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1205 return 0;
1207 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1208 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1209 return 0;
1211 offset = subreg_lowpart_offset (mode, innermode);
1213 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1214 && (GET_MODE_CLASS (mode) == MODE_INT
1215 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1217 /* If we are getting the low-order part of something that has been
1218 sign- or zero-extended, we can either just use the object being
1219 extended or make a narrower extension. If we want an even smaller
1220 piece than the size of the object being extended, call ourselves
1221 recursively.
1223 This case is used mostly by combine and cse. */
1225 if (GET_MODE (XEXP (x, 0)) == mode)
1226 return XEXP (x, 0);
1227 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1228 return gen_lowpart_common (mode, XEXP (x, 0));
1229 else if (msize < xsize)
1230 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1232 else if (GET_CODE (x) == SUBREG || REG_P (x)
1233 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1234 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1235 return simplify_gen_subreg (mode, x, innermode, offset);
1237 /* Otherwise, we can't do this. */
1238 return 0;
1242 gen_highpart (enum machine_mode mode, rtx x)
1244 unsigned int msize = GET_MODE_SIZE (mode);
1245 rtx result;
1247 /* This case loses if X is a subreg. To catch bugs early,
1248 complain if an invalid MODE is used even in other cases. */
1249 gcc_assert (msize <= UNITS_PER_WORD
1250 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1252 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1253 subreg_highpart_offset (mode, GET_MODE (x)));
1254 gcc_assert (result);
1256 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1257 the target if we have a MEM. gen_highpart must return a valid operand,
1258 emitting code if necessary to do so. */
1259 if (MEM_P (result))
1261 result = validize_mem (result);
1262 gcc_assert (result);
1265 return result;
1268 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1269 be VOIDmode constant. */
1271 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1273 if (GET_MODE (exp) != VOIDmode)
1275 gcc_assert (GET_MODE (exp) == innermode);
1276 return gen_highpart (outermode, exp);
1278 return simplify_gen_subreg (outermode, exp, innermode,
1279 subreg_highpart_offset (outermode, innermode));
1282 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1284 unsigned int
1285 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1287 unsigned int offset = 0;
1288 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1290 if (difference > 0)
1292 if (WORDS_BIG_ENDIAN)
1293 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1294 if (BYTES_BIG_ENDIAN)
1295 offset += difference % UNITS_PER_WORD;
1298 return offset;
1301 /* Return offset in bytes to get OUTERMODE high part
1302 of the value in mode INNERMODE stored in memory in target format. */
1303 unsigned int
1304 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1306 unsigned int offset = 0;
1307 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1309 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1311 if (difference > 0)
1313 if (! WORDS_BIG_ENDIAN)
1314 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1315 if (! BYTES_BIG_ENDIAN)
1316 offset += difference % UNITS_PER_WORD;
1319 return offset;
1322 /* Return 1 iff X, assumed to be a SUBREG,
1323 refers to the least significant part of its containing reg.
1324 If X is not a SUBREG, always return 1 (it is its own low part!). */
1327 subreg_lowpart_p (const_rtx x)
1329 if (GET_CODE (x) != SUBREG)
1330 return 1;
1331 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1332 return 0;
1334 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1335 == SUBREG_BYTE (x));
1338 /* Return true if X is a paradoxical subreg, false otherwise. */
1339 bool
1340 paradoxical_subreg_p (const_rtx x)
1342 if (GET_CODE (x) != SUBREG)
1343 return false;
1344 return (GET_MODE_PRECISION (GET_MODE (x))
1345 > GET_MODE_PRECISION (GET_MODE (SUBREG_REG (x))));
1348 /* Return subword OFFSET of operand OP.
1349 The word number, OFFSET, is interpreted as the word number starting
1350 at the low-order address. OFFSET 0 is the low-order word if not
1351 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1353 If we cannot extract the required word, we return zero. Otherwise,
1354 an rtx corresponding to the requested word will be returned.
1356 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1357 reload has completed, a valid address will always be returned. After
1358 reload, if a valid address cannot be returned, we return zero.
1360 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1361 it is the responsibility of the caller.
1363 MODE is the mode of OP in case it is a CONST_INT.
1365 ??? This is still rather broken for some cases. The problem for the
1366 moment is that all callers of this thing provide no 'goal mode' to
1367 tell us to work with. This exists because all callers were written
1368 in a word based SUBREG world.
1369 Now use of this function can be deprecated by simplify_subreg in most
1370 cases.
1374 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1376 if (mode == VOIDmode)
1377 mode = GET_MODE (op);
1379 gcc_assert (mode != VOIDmode);
1381 /* If OP is narrower than a word, fail. */
1382 if (mode != BLKmode
1383 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1384 return 0;
1386 /* If we want a word outside OP, return zero. */
1387 if (mode != BLKmode
1388 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1389 return const0_rtx;
1391 /* Form a new MEM at the requested address. */
1392 if (MEM_P (op))
1394 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1396 if (! validate_address)
1397 return new_rtx;
1399 else if (reload_completed)
1401 if (! strict_memory_address_addr_space_p (word_mode,
1402 XEXP (new_rtx, 0),
1403 MEM_ADDR_SPACE (op)))
1404 return 0;
1406 else
1407 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1410 /* Rest can be handled by simplify_subreg. */
1411 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1414 /* Similar to `operand_subword', but never return 0. If we can't
1415 extract the required subword, put OP into a register and try again.
1416 The second attempt must succeed. We always validate the address in
1417 this case.
1419 MODE is the mode of OP, in case it is CONST_INT. */
1422 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1424 rtx result = operand_subword (op, offset, 1, mode);
1426 if (result)
1427 return result;
1429 if (mode != BLKmode && mode != VOIDmode)
1431 /* If this is a register which can not be accessed by words, copy it
1432 to a pseudo register. */
1433 if (REG_P (op))
1434 op = copy_to_reg (op);
1435 else
1436 op = force_reg (mode, op);
1439 result = operand_subword (op, offset, 1, mode);
1440 gcc_assert (result);
1442 return result;
1445 /* Returns 1 if both MEM_EXPR can be considered equal
1446 and 0 otherwise. */
1449 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1451 if (expr1 == expr2)
1452 return 1;
1454 if (! expr1 || ! expr2)
1455 return 0;
1457 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1458 return 0;
1460 return operand_equal_p (expr1, expr2, 0);
1463 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1464 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1465 -1 if not known. */
1468 get_mem_align_offset (rtx mem, unsigned int align)
1470 tree expr;
1471 unsigned HOST_WIDE_INT offset;
1473 /* This function can't use
1474 if (!MEM_EXPR (mem) || !MEM_OFFSET_KNOWN_P (mem)
1475 || (MAX (MEM_ALIGN (mem),
1476 MAX (align, get_object_alignment (MEM_EXPR (mem))))
1477 < align))
1478 return -1;
1479 else
1480 return (- MEM_OFFSET (mem)) & (align / BITS_PER_UNIT - 1);
1481 for two reasons:
1482 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1483 for <variable>. get_inner_reference doesn't handle it and
1484 even if it did, the alignment in that case needs to be determined
1485 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1486 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1487 isn't sufficiently aligned, the object it is in might be. */
1488 gcc_assert (MEM_P (mem));
1489 expr = MEM_EXPR (mem);
1490 if (expr == NULL_TREE || !MEM_OFFSET_KNOWN_P (mem))
1491 return -1;
1493 offset = MEM_OFFSET (mem);
1494 if (DECL_P (expr))
1496 if (DECL_ALIGN (expr) < align)
1497 return -1;
1499 else if (INDIRECT_REF_P (expr))
1501 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1502 return -1;
1504 else if (TREE_CODE (expr) == COMPONENT_REF)
1506 while (1)
1508 tree inner = TREE_OPERAND (expr, 0);
1509 tree field = TREE_OPERAND (expr, 1);
1510 tree byte_offset = component_ref_field_offset (expr);
1511 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1513 if (!byte_offset
1514 || !host_integerp (byte_offset, 1)
1515 || !host_integerp (bit_offset, 1))
1516 return -1;
1518 offset += tree_low_cst (byte_offset, 1);
1519 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1521 if (inner == NULL_TREE)
1523 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1524 < (unsigned int) align)
1525 return -1;
1526 break;
1528 else if (DECL_P (inner))
1530 if (DECL_ALIGN (inner) < align)
1531 return -1;
1532 break;
1534 else if (TREE_CODE (inner) != COMPONENT_REF)
1535 return -1;
1536 expr = inner;
1539 else
1540 return -1;
1542 return offset & ((align / BITS_PER_UNIT) - 1);
1545 /* Given REF (a MEM) and T, either the type of X or the expression
1546 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1547 if we are making a new object of this type. BITPOS is nonzero if
1548 there is an offset outstanding on T that will be applied later. */
1550 void
1551 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1552 HOST_WIDE_INT bitpos)
1554 HOST_WIDE_INT apply_bitpos = 0;
1555 tree type;
1556 struct mem_attrs attrs, *defattrs, *refattrs;
1557 addr_space_t as;
1559 /* It can happen that type_for_mode was given a mode for which there
1560 is no language-level type. In which case it returns NULL, which
1561 we can see here. */
1562 if (t == NULL_TREE)
1563 return;
1565 type = TYPE_P (t) ? t : TREE_TYPE (t);
1566 if (type == error_mark_node)
1567 return;
1569 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1570 wrong answer, as it assumes that DECL_RTL already has the right alias
1571 info. Callers should not set DECL_RTL until after the call to
1572 set_mem_attributes. */
1573 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1575 memset (&attrs, 0, sizeof (attrs));
1577 /* Get the alias set from the expression or type (perhaps using a
1578 front-end routine) and use it. */
1579 attrs.alias = get_alias_set (t);
1581 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1582 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1584 /* Default values from pre-existing memory attributes if present. */
1585 refattrs = MEM_ATTRS (ref);
1586 if (refattrs)
1588 /* ??? Can this ever happen? Calling this routine on a MEM that
1589 already carries memory attributes should probably be invalid. */
1590 attrs.expr = refattrs->expr;
1591 attrs.offset_known_p = refattrs->offset_known_p;
1592 attrs.offset = refattrs->offset;
1593 attrs.size_known_p = refattrs->size_known_p;
1594 attrs.size = refattrs->size;
1595 attrs.align = refattrs->align;
1598 /* Otherwise, default values from the mode of the MEM reference. */
1599 else
1601 defattrs = mode_mem_attrs[(int) GET_MODE (ref)];
1602 gcc_assert (!defattrs->expr);
1603 gcc_assert (!defattrs->offset_known_p);
1605 /* Respect mode size. */
1606 attrs.size_known_p = defattrs->size_known_p;
1607 attrs.size = defattrs->size;
1608 /* ??? Is this really necessary? We probably should always get
1609 the size from the type below. */
1611 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1612 if T is an object, always compute the object alignment below. */
1613 if (TYPE_P (t))
1614 attrs.align = defattrs->align;
1615 else
1616 attrs.align = BITS_PER_UNIT;
1617 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1618 e.g. if the type carries an alignment attribute. Should we be
1619 able to simply always use TYPE_ALIGN? */
1622 /* We can set the alignment from the type if we are making an object,
1623 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1624 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1625 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1627 else if (TREE_CODE (t) == MEM_REF)
1629 tree op0 = TREE_OPERAND (t, 0);
1630 if (TREE_CODE (op0) == ADDR_EXPR
1631 && (DECL_P (TREE_OPERAND (op0, 0))
1632 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1634 if (DECL_P (TREE_OPERAND (op0, 0)))
1635 attrs.align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1636 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1638 attrs.align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1639 #ifdef CONSTANT_ALIGNMENT
1640 attrs.align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0),
1641 attrs.align);
1642 #endif
1644 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1646 unsigned HOST_WIDE_INT ioff
1647 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1648 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1649 attrs.align = MIN (aoff, attrs.align);
1652 else
1653 /* ??? This isn't fully correct, we can't set the alignment from the
1654 type in all cases. */
1655 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1658 else if (TREE_CODE (t) == TARGET_MEM_REF)
1659 /* ??? This isn't fully correct, we can't set the alignment from the
1660 type in all cases. */
1661 attrs.align = MAX (attrs.align, TYPE_ALIGN (type));
1663 /* If the size is known, we can set that. */
1664 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1666 attrs.size_known_p = true;
1667 attrs.size = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
1670 /* If T is not a type, we may be able to deduce some more information about
1671 the expression. */
1672 if (! TYPE_P (t))
1674 tree base;
1675 bool align_computed = false;
1677 if (TREE_THIS_VOLATILE (t))
1678 MEM_VOLATILE_P (ref) = 1;
1680 /* Now remove any conversions: they don't change what the underlying
1681 object is. Likewise for SAVE_EXPR. */
1682 while (CONVERT_EXPR_P (t)
1683 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1684 || TREE_CODE (t) == SAVE_EXPR)
1685 t = TREE_OPERAND (t, 0);
1687 /* Note whether this expression can trap. */
1688 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1690 base = get_base_address (t);
1691 if (base)
1693 if (DECL_P (base)
1694 && TREE_READONLY (base)
1695 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1696 && !TREE_THIS_VOLATILE (base))
1697 MEM_READONLY_P (ref) = 1;
1699 /* Mark static const strings readonly as well. */
1700 if (TREE_CODE (base) == STRING_CST
1701 && TREE_READONLY (base)
1702 && TREE_STATIC (base))
1703 MEM_READONLY_P (ref) = 1;
1705 if (TREE_CODE (base) == MEM_REF
1706 || TREE_CODE (base) == TARGET_MEM_REF)
1707 as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (base,
1708 0))));
1709 else
1710 as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1712 else
1713 as = TYPE_ADDR_SPACE (type);
1715 /* If this expression uses it's parent's alias set, mark it such
1716 that we won't change it. */
1717 if (component_uses_parent_alias_set (t))
1718 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1720 /* If this is a decl, set the attributes of the MEM from it. */
1721 if (DECL_P (t))
1723 attrs.expr = t;
1724 attrs.offset_known_p = true;
1725 attrs.offset = 0;
1726 apply_bitpos = bitpos;
1727 if (DECL_SIZE_UNIT (t) && host_integerp (DECL_SIZE_UNIT (t), 1))
1729 attrs.size_known_p = true;
1730 attrs.size = tree_low_cst (DECL_SIZE_UNIT (t), 1);
1732 else
1733 attrs.size_known_p = false;
1734 attrs.align = DECL_ALIGN (t);
1735 align_computed = true;
1738 /* If this is a constant, we know the alignment. */
1739 else if (CONSTANT_CLASS_P (t))
1741 attrs.align = TYPE_ALIGN (type);
1742 #ifdef CONSTANT_ALIGNMENT
1743 attrs.align = CONSTANT_ALIGNMENT (t, attrs.align);
1744 #endif
1745 align_computed = true;
1748 /* If this is a field reference and not a bit-field, record it. */
1749 /* ??? There is some information that can be gleaned from bit-fields,
1750 such as the word offset in the structure that might be modified.
1751 But skip it for now. */
1752 else if (TREE_CODE (t) == COMPONENT_REF
1753 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1755 attrs.expr = t;
1756 attrs.offset_known_p = true;
1757 attrs.offset = 0;
1758 apply_bitpos = bitpos;
1759 /* ??? Any reason the field size would be different than
1760 the size we got from the type? */
1763 /* If this is an array reference, look for an outer field reference. */
1764 else if (TREE_CODE (t) == ARRAY_REF)
1766 tree off_tree = size_zero_node;
1767 /* We can't modify t, because we use it at the end of the
1768 function. */
1769 tree t2 = t;
1773 tree index = TREE_OPERAND (t2, 1);
1774 tree low_bound = array_ref_low_bound (t2);
1775 tree unit_size = array_ref_element_size (t2);
1777 /* We assume all arrays have sizes that are a multiple of a byte.
1778 First subtract the lower bound, if any, in the type of the
1779 index, then convert to sizetype and multiply by the size of
1780 the array element. */
1781 if (! integer_zerop (low_bound))
1782 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1783 index, low_bound);
1785 off_tree = size_binop (PLUS_EXPR,
1786 size_binop (MULT_EXPR,
1787 fold_convert (sizetype,
1788 index),
1789 unit_size),
1790 off_tree);
1791 t2 = TREE_OPERAND (t2, 0);
1793 while (TREE_CODE (t2) == ARRAY_REF);
1795 if (DECL_P (t2))
1797 attrs.expr = t2;
1798 attrs.offset_known_p = false;
1799 if (host_integerp (off_tree, 1))
1801 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1802 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1803 attrs.align = DECL_ALIGN (t2);
1804 if (aoff && (unsigned HOST_WIDE_INT) aoff < attrs.align)
1805 attrs.align = aoff;
1806 align_computed = true;
1807 attrs.offset_known_p = true;
1808 attrs.offset = ioff;
1809 apply_bitpos = bitpos;
1812 else if (TREE_CODE (t2) == COMPONENT_REF)
1814 attrs.expr = t2;
1815 attrs.offset_known_p = false;
1816 if (host_integerp (off_tree, 1))
1818 attrs.offset_known_p = true;
1819 attrs.offset = tree_low_cst (off_tree, 1);
1820 apply_bitpos = bitpos;
1822 /* ??? Any reason the field size would be different than
1823 the size we got from the type? */
1826 /* If this is an indirect reference, record it. */
1827 else if (TREE_CODE (t) == MEM_REF)
1829 attrs.expr = t;
1830 attrs.offset_known_p = true;
1831 attrs.offset = 0;
1832 apply_bitpos = bitpos;
1836 /* If this is an indirect reference, record it. */
1837 else if (TREE_CODE (t) == MEM_REF
1838 || TREE_CODE (t) == TARGET_MEM_REF)
1840 attrs.expr = t;
1841 attrs.offset_known_p = true;
1842 attrs.offset = 0;
1843 apply_bitpos = bitpos;
1846 if (!align_computed)
1848 unsigned int obj_align = get_object_alignment (t);
1849 attrs.align = MAX (attrs.align, obj_align);
1852 else
1853 as = TYPE_ADDR_SPACE (type);
1855 /* If we modified OFFSET based on T, then subtract the outstanding
1856 bit position offset. Similarly, increase the size of the accessed
1857 object to contain the negative offset. */
1858 if (apply_bitpos)
1860 gcc_assert (attrs.offset_known_p);
1861 attrs.offset -= apply_bitpos / BITS_PER_UNIT;
1862 if (attrs.size_known_p)
1863 attrs.size += apply_bitpos / BITS_PER_UNIT;
1866 /* Now set the attributes we computed above. */
1867 attrs.addrspace = as;
1868 set_mem_attrs (ref, &attrs);
1871 void
1872 set_mem_attributes (rtx ref, tree t, int objectp)
1874 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1877 /* Set the alias set of MEM to SET. */
1879 void
1880 set_mem_alias_set (rtx mem, alias_set_type set)
1882 struct mem_attrs attrs;
1884 /* If the new and old alias sets don't conflict, something is wrong. */
1885 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1886 attrs = *get_mem_attrs (mem);
1887 attrs.alias = set;
1888 set_mem_attrs (mem, &attrs);
1891 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1893 void
1894 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1896 struct mem_attrs attrs;
1898 attrs = *get_mem_attrs (mem);
1899 attrs.addrspace = addrspace;
1900 set_mem_attrs (mem, &attrs);
1903 /* Set the alignment of MEM to ALIGN bits. */
1905 void
1906 set_mem_align (rtx mem, unsigned int align)
1908 struct mem_attrs attrs;
1910 attrs = *get_mem_attrs (mem);
1911 attrs.align = align;
1912 set_mem_attrs (mem, &attrs);
1915 /* Set the expr for MEM to EXPR. */
1917 void
1918 set_mem_expr (rtx mem, tree expr)
1920 struct mem_attrs attrs;
1922 attrs = *get_mem_attrs (mem);
1923 attrs.expr = expr;
1924 set_mem_attrs (mem, &attrs);
1927 /* Set the offset of MEM to OFFSET. */
1929 void
1930 set_mem_offset (rtx mem, HOST_WIDE_INT offset)
1932 struct mem_attrs attrs;
1934 attrs = *get_mem_attrs (mem);
1935 attrs.offset_known_p = true;
1936 attrs.offset = offset;
1937 set_mem_attrs (mem, &attrs);
1940 /* Clear the offset of MEM. */
1942 void
1943 clear_mem_offset (rtx mem)
1945 struct mem_attrs attrs;
1947 attrs = *get_mem_attrs (mem);
1948 attrs.offset_known_p = false;
1949 set_mem_attrs (mem, &attrs);
1952 /* Set the size of MEM to SIZE. */
1954 void
1955 set_mem_size (rtx mem, HOST_WIDE_INT size)
1957 struct mem_attrs attrs;
1959 attrs = *get_mem_attrs (mem);
1960 attrs.size_known_p = true;
1961 attrs.size = size;
1962 set_mem_attrs (mem, &attrs);
1965 /* Clear the size of MEM. */
1967 void
1968 clear_mem_size (rtx mem)
1970 struct mem_attrs attrs;
1972 attrs = *get_mem_attrs (mem);
1973 attrs.size_known_p = false;
1974 set_mem_attrs (mem, &attrs);
1977 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1978 and its address changed to ADDR. (VOIDmode means don't change the mode.
1979 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1980 returned memory location is required to be valid. The memory
1981 attributes are not changed. */
1983 static rtx
1984 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1986 addr_space_t as;
1987 rtx new_rtx;
1989 gcc_assert (MEM_P (memref));
1990 as = MEM_ADDR_SPACE (memref);
1991 if (mode == VOIDmode)
1992 mode = GET_MODE (memref);
1993 if (addr == 0)
1994 addr = XEXP (memref, 0);
1995 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1996 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1997 return memref;
1999 if (validate)
2001 if (reload_in_progress || reload_completed)
2002 gcc_assert (memory_address_addr_space_p (mode, addr, as));
2003 else
2004 addr = memory_address_addr_space (mode, addr, as);
2007 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
2008 return memref;
2010 new_rtx = gen_rtx_MEM (mode, addr);
2011 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2012 return new_rtx;
2015 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
2016 way we are changing MEMREF, so we only preserve the alias set. */
2019 change_address (rtx memref, enum machine_mode mode, rtx addr)
2021 rtx new_rtx = change_address_1 (memref, mode, addr, 1);
2022 enum machine_mode mmode = GET_MODE (new_rtx);
2023 struct mem_attrs attrs, *defattrs;
2025 attrs = *get_mem_attrs (memref);
2026 defattrs = mode_mem_attrs[(int) mmode];
2027 attrs.expr = NULL_TREE;
2028 attrs.offset_known_p = false;
2029 attrs.size_known_p = defattrs->size_known_p;
2030 attrs.size = defattrs->size;
2031 attrs.align = defattrs->align;
2033 /* If there are no changes, just return the original memory reference. */
2034 if (new_rtx == memref)
2036 if (mem_attrs_eq_p (get_mem_attrs (memref), &attrs))
2037 return new_rtx;
2039 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
2040 MEM_COPY_ATTRIBUTES (new_rtx, memref);
2043 set_mem_attrs (new_rtx, &attrs);
2044 return new_rtx;
2047 /* Return a memory reference like MEMREF, but with its mode changed
2048 to MODE and its address offset by OFFSET bytes. If VALIDATE is
2049 nonzero, the memory address is forced to be valid.
2050 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
2051 and caller is responsible for adjusting MEMREF base register. */
2054 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
2055 int validate, int adjust)
2057 rtx addr = XEXP (memref, 0);
2058 rtx new_rtx;
2059 enum machine_mode address_mode;
2060 int pbits;
2061 struct mem_attrs attrs, *defattrs;
2062 unsigned HOST_WIDE_INT max_align;
2064 attrs = *get_mem_attrs (memref);
2066 /* If there are no changes, just return the original memory reference. */
2067 if (mode == GET_MODE (memref) && !offset
2068 && (!validate || memory_address_addr_space_p (mode, addr,
2069 attrs.addrspace)))
2070 return memref;
2072 /* ??? Prefer to create garbage instead of creating shared rtl.
2073 This may happen even if offset is nonzero -- consider
2074 (plus (plus reg reg) const_int) -- so do this always. */
2075 addr = copy_rtx (addr);
2077 /* Convert a possibly large offset to a signed value within the
2078 range of the target address space. */
2079 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
2080 pbits = GET_MODE_BITSIZE (address_mode);
2081 if (HOST_BITS_PER_WIDE_INT > pbits)
2083 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2084 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2085 >> shift);
2088 if (adjust)
2090 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2091 object, we can merge it into the LO_SUM. */
2092 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2093 && offset >= 0
2094 && (unsigned HOST_WIDE_INT) offset
2095 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2096 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2097 plus_constant (XEXP (addr, 1), offset));
2098 else
2099 addr = plus_constant (addr, offset);
2102 new_rtx = change_address_1 (memref, mode, addr, validate);
2104 /* If the address is a REG, change_address_1 rightfully returns memref,
2105 but this would destroy memref's MEM_ATTRS. */
2106 if (new_rtx == memref && offset != 0)
2107 new_rtx = copy_rtx (new_rtx);
2109 /* Compute the new values of the memory attributes due to this adjustment.
2110 We add the offsets and update the alignment. */
2111 if (attrs.offset_known_p)
2112 attrs.offset += offset;
2114 /* Compute the new alignment by taking the MIN of the alignment and the
2115 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2116 if zero. */
2117 if (offset != 0)
2119 max_align = (offset & -offset) * BITS_PER_UNIT;
2120 attrs.align = MIN (attrs.align, max_align);
2123 /* We can compute the size in a number of ways. */
2124 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2125 if (defattrs->size_known_p)
2127 attrs.size_known_p = true;
2128 attrs.size = defattrs->size;
2130 else if (attrs.size_known_p)
2131 attrs.size -= offset;
2133 set_mem_attrs (new_rtx, &attrs);
2135 /* At some point, we should validate that this offset is within the object,
2136 if all the appropriate values are known. */
2137 return new_rtx;
2140 /* Return a memory reference like MEMREF, but with its mode changed
2141 to MODE and its address changed to ADDR, which is assumed to be
2142 MEMREF offset by OFFSET bytes. If VALIDATE is
2143 nonzero, the memory address is forced to be valid. */
2146 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2147 HOST_WIDE_INT offset, int validate)
2149 memref = change_address_1 (memref, VOIDmode, addr, validate);
2150 return adjust_address_1 (memref, mode, offset, validate, 0);
2153 /* Return a memory reference like MEMREF, but whose address is changed by
2154 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2155 known to be in OFFSET (possibly 1). */
2158 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2160 rtx new_rtx, addr = XEXP (memref, 0);
2161 enum machine_mode address_mode;
2162 struct mem_attrs attrs, *defattrs;
2164 attrs = *get_mem_attrs (memref);
2165 address_mode = targetm.addr_space.address_mode (attrs.addrspace);
2166 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2168 /* At this point we don't know _why_ the address is invalid. It
2169 could have secondary memory references, multiplies or anything.
2171 However, if we did go and rearrange things, we can wind up not
2172 being able to recognize the magic around pic_offset_table_rtx.
2173 This stuff is fragile, and is yet another example of why it is
2174 bad to expose PIC machinery too early. */
2175 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx,
2176 attrs.addrspace)
2177 && GET_CODE (addr) == PLUS
2178 && XEXP (addr, 0) == pic_offset_table_rtx)
2180 addr = force_reg (GET_MODE (addr), addr);
2181 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2184 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2185 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2187 /* If there are no changes, just return the original memory reference. */
2188 if (new_rtx == memref)
2189 return new_rtx;
2191 /* Update the alignment to reflect the offset. Reset the offset, which
2192 we don't know. */
2193 defattrs = mode_mem_attrs[(int) GET_MODE (new_rtx)];
2194 attrs.offset_known_p = false;
2195 attrs.size_known_p = defattrs->size_known_p;
2196 attrs.size = defattrs->size;
2197 attrs.align = MIN (attrs.align, pow2 * BITS_PER_UNIT);
2198 set_mem_attrs (new_rtx, &attrs);
2199 return new_rtx;
2202 /* Return a memory reference like MEMREF, but with its address changed to
2203 ADDR. The caller is asserting that the actual piece of memory pointed
2204 to is the same, just the form of the address is being changed, such as
2205 by putting something into a register. */
2208 replace_equiv_address (rtx memref, rtx addr)
2210 /* change_address_1 copies the memory attribute structure without change
2211 and that's exactly what we want here. */
2212 update_temp_slot_address (XEXP (memref, 0), addr);
2213 return change_address_1 (memref, VOIDmode, addr, 1);
2216 /* Likewise, but the reference is not required to be valid. */
2219 replace_equiv_address_nv (rtx memref, rtx addr)
2221 return change_address_1 (memref, VOIDmode, addr, 0);
2224 /* Return a memory reference like MEMREF, but with its mode widened to
2225 MODE and offset by OFFSET. This would be used by targets that e.g.
2226 cannot issue QImode memory operations and have to use SImode memory
2227 operations plus masking logic. */
2230 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2232 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2233 struct mem_attrs attrs;
2234 unsigned int size = GET_MODE_SIZE (mode);
2236 /* If there are no changes, just return the original memory reference. */
2237 if (new_rtx == memref)
2238 return new_rtx;
2240 attrs = *get_mem_attrs (new_rtx);
2242 /* If we don't know what offset we were at within the expression, then
2243 we can't know if we've overstepped the bounds. */
2244 if (! attrs.offset_known_p)
2245 attrs.expr = NULL_TREE;
2247 while (attrs.expr)
2249 if (TREE_CODE (attrs.expr) == COMPONENT_REF)
2251 tree field = TREE_OPERAND (attrs.expr, 1);
2252 tree offset = component_ref_field_offset (attrs.expr);
2254 if (! DECL_SIZE_UNIT (field))
2256 attrs.expr = NULL_TREE;
2257 break;
2260 /* Is the field at least as large as the access? If so, ok,
2261 otherwise strip back to the containing structure. */
2262 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2263 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2264 && attrs.offset >= 0)
2265 break;
2267 if (! host_integerp (offset, 1))
2269 attrs.expr = NULL_TREE;
2270 break;
2273 attrs.expr = TREE_OPERAND (attrs.expr, 0);
2274 attrs.offset += tree_low_cst (offset, 1);
2275 attrs.offset += (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2276 / BITS_PER_UNIT);
2278 /* Similarly for the decl. */
2279 else if (DECL_P (attrs.expr)
2280 && DECL_SIZE_UNIT (attrs.expr)
2281 && TREE_CODE (DECL_SIZE_UNIT (attrs.expr)) == INTEGER_CST
2282 && compare_tree_int (DECL_SIZE_UNIT (attrs.expr), size) >= 0
2283 && (! attrs.offset_known_p || attrs.offset >= 0))
2284 break;
2285 else
2287 /* The widened memory access overflows the expression, which means
2288 that it could alias another expression. Zap it. */
2289 attrs.expr = NULL_TREE;
2290 break;
2294 if (! attrs.expr)
2295 attrs.offset_known_p = false;
2297 /* The widened memory may alias other stuff, so zap the alias set. */
2298 /* ??? Maybe use get_alias_set on any remaining expression. */
2299 attrs.alias = 0;
2300 attrs.size_known_p = true;
2301 attrs.size = size;
2302 set_mem_attrs (new_rtx, &attrs);
2303 return new_rtx;
2306 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2307 static GTY(()) tree spill_slot_decl;
2309 tree
2310 get_spill_slot_decl (bool force_build_p)
2312 tree d = spill_slot_decl;
2313 rtx rd;
2314 struct mem_attrs attrs;
2316 if (d || !force_build_p)
2317 return d;
2319 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2320 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2321 DECL_ARTIFICIAL (d) = 1;
2322 DECL_IGNORED_P (d) = 1;
2323 TREE_USED (d) = 1;
2324 spill_slot_decl = d;
2326 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2327 MEM_NOTRAP_P (rd) = 1;
2328 attrs = *mode_mem_attrs[(int) BLKmode];
2329 attrs.alias = new_alias_set ();
2330 attrs.expr = d;
2331 set_mem_attrs (rd, &attrs);
2332 SET_DECL_RTL (d, rd);
2334 return d;
2337 /* Given MEM, a result from assign_stack_local, fill in the memory
2338 attributes as appropriate for a register allocator spill slot.
2339 These slots are not aliasable by other memory. We arrange for
2340 them all to use a single MEM_EXPR, so that the aliasing code can
2341 work properly in the case of shared spill slots. */
2343 void
2344 set_mem_attrs_for_spill (rtx mem)
2346 struct mem_attrs attrs;
2347 rtx addr;
2349 attrs = *get_mem_attrs (mem);
2350 attrs.expr = get_spill_slot_decl (true);
2351 attrs.alias = MEM_ALIAS_SET (DECL_RTL (attrs.expr));
2352 attrs.addrspace = ADDR_SPACE_GENERIC;
2354 /* We expect the incoming memory to be of the form:
2355 (mem:MODE (plus (reg sfp) (const_int offset)))
2356 with perhaps the plus missing for offset = 0. */
2357 addr = XEXP (mem, 0);
2358 attrs.offset_known_p = true;
2359 attrs.offset = 0;
2360 if (GET_CODE (addr) == PLUS
2361 && CONST_INT_P (XEXP (addr, 1)))
2362 attrs.offset = INTVAL (XEXP (addr, 1));
2364 set_mem_attrs (mem, &attrs);
2365 MEM_NOTRAP_P (mem) = 1;
2368 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2371 gen_label_rtx (void)
2373 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2374 NULL, label_num++, NULL);
2377 /* For procedure integration. */
2379 /* Install new pointers to the first and last insns in the chain.
2380 Also, set cur_insn_uid to one higher than the last in use.
2381 Used for an inline-procedure after copying the insn chain. */
2383 void
2384 set_new_first_and_last_insn (rtx first, rtx last)
2386 rtx insn;
2388 set_first_insn (first);
2389 set_last_insn (last);
2390 cur_insn_uid = 0;
2392 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2394 int debug_count = 0;
2396 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2397 cur_debug_insn_uid = 0;
2399 for (insn = first; insn; insn = NEXT_INSN (insn))
2400 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2401 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2402 else
2404 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2405 if (DEBUG_INSN_P (insn))
2406 debug_count++;
2409 if (debug_count)
2410 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2411 else
2412 cur_debug_insn_uid++;
2414 else
2415 for (insn = first; insn; insn = NEXT_INSN (insn))
2416 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2418 cur_insn_uid++;
2421 /* Go through all the RTL insn bodies and copy any invalid shared
2422 structure. This routine should only be called once. */
2424 static void
2425 unshare_all_rtl_1 (rtx insn)
2427 /* Unshare just about everything else. */
2428 unshare_all_rtl_in_chain (insn);
2430 /* Make sure the addresses of stack slots found outside the insn chain
2431 (such as, in DECL_RTL of a variable) are not shared
2432 with the insn chain.
2434 This special care is necessary when the stack slot MEM does not
2435 actually appear in the insn chain. If it does appear, its address
2436 is unshared from all else at that point. */
2437 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2440 /* Go through all the RTL insn bodies and copy any invalid shared
2441 structure, again. This is a fairly expensive thing to do so it
2442 should be done sparingly. */
2444 void
2445 unshare_all_rtl_again (rtx insn)
2447 rtx p;
2448 tree decl;
2450 for (p = insn; p; p = NEXT_INSN (p))
2451 if (INSN_P (p))
2453 reset_used_flags (PATTERN (p));
2454 reset_used_flags (REG_NOTES (p));
2455 if (CALL_P (p))
2456 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2459 /* Make sure that virtual stack slots are not shared. */
2460 set_used_decls (DECL_INITIAL (cfun->decl));
2462 /* Make sure that virtual parameters are not shared. */
2463 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2464 set_used_flags (DECL_RTL (decl));
2466 reset_used_flags (stack_slot_list);
2468 unshare_all_rtl_1 (insn);
2471 unsigned int
2472 unshare_all_rtl (void)
2474 unshare_all_rtl_1 (get_insns ());
2475 return 0;
2478 struct rtl_opt_pass pass_unshare_all_rtl =
2481 RTL_PASS,
2482 "unshare", /* name */
2483 NULL, /* gate */
2484 unshare_all_rtl, /* execute */
2485 NULL, /* sub */
2486 NULL, /* next */
2487 0, /* static_pass_number */
2488 TV_NONE, /* tv_id */
2489 0, /* properties_required */
2490 0, /* properties_provided */
2491 0, /* properties_destroyed */
2492 0, /* todo_flags_start */
2493 TODO_verify_rtl_sharing /* todo_flags_finish */
2498 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2499 Recursively does the same for subexpressions. */
2501 static void
2502 verify_rtx_sharing (rtx orig, rtx insn)
2504 rtx x = orig;
2505 int i;
2506 enum rtx_code code;
2507 const char *format_ptr;
2509 if (x == 0)
2510 return;
2512 code = GET_CODE (x);
2514 /* These types may be freely shared. */
2516 switch (code)
2518 case REG:
2519 case DEBUG_EXPR:
2520 case VALUE:
2521 case CONST_INT:
2522 case CONST_DOUBLE:
2523 case CONST_FIXED:
2524 case CONST_VECTOR:
2525 case SYMBOL_REF:
2526 case LABEL_REF:
2527 case CODE_LABEL:
2528 case PC:
2529 case CC0:
2530 case RETURN:
2531 case SIMPLE_RETURN:
2532 case SCRATCH:
2533 return;
2534 /* SCRATCH must be shared because they represent distinct values. */
2535 case CLOBBER:
2536 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2537 return;
2538 break;
2540 case CONST:
2541 if (shared_const_p (orig))
2542 return;
2543 break;
2545 case MEM:
2546 /* A MEM is allowed to be shared if its address is constant. */
2547 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2548 || reload_completed || reload_in_progress)
2549 return;
2551 break;
2553 default:
2554 break;
2557 /* This rtx may not be shared. If it has already been seen,
2558 replace it with a copy of itself. */
2559 #ifdef ENABLE_CHECKING
2560 if (RTX_FLAG (x, used))
2562 error ("invalid rtl sharing found in the insn");
2563 debug_rtx (insn);
2564 error ("shared rtx");
2565 debug_rtx (x);
2566 internal_error ("internal consistency failure");
2568 #endif
2569 gcc_assert (!RTX_FLAG (x, used));
2571 RTX_FLAG (x, used) = 1;
2573 /* Now scan the subexpressions recursively. */
2575 format_ptr = GET_RTX_FORMAT (code);
2577 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2579 switch (*format_ptr++)
2581 case 'e':
2582 verify_rtx_sharing (XEXP (x, i), insn);
2583 break;
2585 case 'E':
2586 if (XVEC (x, i) != NULL)
2588 int j;
2589 int len = XVECLEN (x, i);
2591 for (j = 0; j < len; j++)
2593 /* We allow sharing of ASM_OPERANDS inside single
2594 instruction. */
2595 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2596 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2597 == ASM_OPERANDS))
2598 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2599 else
2600 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2603 break;
2606 return;
2609 /* Go through all the RTL insn bodies and check that there is no unexpected
2610 sharing in between the subexpressions. */
2612 DEBUG_FUNCTION void
2613 verify_rtl_sharing (void)
2615 rtx p;
2617 timevar_push (TV_VERIFY_RTL_SHARING);
2619 for (p = get_insns (); p; p = NEXT_INSN (p))
2620 if (INSN_P (p))
2622 reset_used_flags (PATTERN (p));
2623 reset_used_flags (REG_NOTES (p));
2624 if (CALL_P (p))
2625 reset_used_flags (CALL_INSN_FUNCTION_USAGE (p));
2626 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2628 int i;
2629 rtx q, sequence = PATTERN (p);
2631 for (i = 0; i < XVECLEN (sequence, 0); i++)
2633 q = XVECEXP (sequence, 0, i);
2634 gcc_assert (INSN_P (q));
2635 reset_used_flags (PATTERN (q));
2636 reset_used_flags (REG_NOTES (q));
2637 if (CALL_P (q))
2638 reset_used_flags (CALL_INSN_FUNCTION_USAGE (q));
2643 for (p = get_insns (); p; p = NEXT_INSN (p))
2644 if (INSN_P (p))
2646 verify_rtx_sharing (PATTERN (p), p);
2647 verify_rtx_sharing (REG_NOTES (p), p);
2648 if (CALL_P (p))
2649 verify_rtx_sharing (CALL_INSN_FUNCTION_USAGE (p), p);
2652 timevar_pop (TV_VERIFY_RTL_SHARING);
2655 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2656 Assumes the mark bits are cleared at entry. */
2658 void
2659 unshare_all_rtl_in_chain (rtx insn)
2661 for (; insn; insn = NEXT_INSN (insn))
2662 if (INSN_P (insn))
2664 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2665 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2666 if (CALL_P (insn))
2667 CALL_INSN_FUNCTION_USAGE (insn)
2668 = copy_rtx_if_shared (CALL_INSN_FUNCTION_USAGE (insn));
2672 /* Go through all virtual stack slots of a function and mark them as
2673 shared. We never replace the DECL_RTLs themselves with a copy,
2674 but expressions mentioned into a DECL_RTL cannot be shared with
2675 expressions in the instruction stream.
2677 Note that reload may convert pseudo registers into memories in-place.
2678 Pseudo registers are always shared, but MEMs never are. Thus if we
2679 reset the used flags on MEMs in the instruction stream, we must set
2680 them again on MEMs that appear in DECL_RTLs. */
2682 static void
2683 set_used_decls (tree blk)
2685 tree t;
2687 /* Mark decls. */
2688 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2689 if (DECL_RTL_SET_P (t))
2690 set_used_flags (DECL_RTL (t));
2692 /* Now process sub-blocks. */
2693 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2694 set_used_decls (t);
2697 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2698 Recursively does the same for subexpressions. Uses
2699 copy_rtx_if_shared_1 to reduce stack space. */
2702 copy_rtx_if_shared (rtx orig)
2704 copy_rtx_if_shared_1 (&orig);
2705 return orig;
2708 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2709 use. Recursively does the same for subexpressions. */
2711 static void
2712 copy_rtx_if_shared_1 (rtx *orig1)
2714 rtx x;
2715 int i;
2716 enum rtx_code code;
2717 rtx *last_ptr;
2718 const char *format_ptr;
2719 int copied = 0;
2720 int length;
2722 /* Repeat is used to turn tail-recursion into iteration. */
2723 repeat:
2724 x = *orig1;
2726 if (x == 0)
2727 return;
2729 code = GET_CODE (x);
2731 /* These types may be freely shared. */
2733 switch (code)
2735 case REG:
2736 case DEBUG_EXPR:
2737 case VALUE:
2738 case CONST_INT:
2739 case CONST_DOUBLE:
2740 case CONST_FIXED:
2741 case CONST_VECTOR:
2742 case SYMBOL_REF:
2743 case LABEL_REF:
2744 case CODE_LABEL:
2745 case PC:
2746 case CC0:
2747 case RETURN:
2748 case SIMPLE_RETURN:
2749 case SCRATCH:
2750 /* SCRATCH must be shared because they represent distinct values. */
2751 return;
2752 case CLOBBER:
2753 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2754 return;
2755 break;
2757 case CONST:
2758 if (shared_const_p (x))
2759 return;
2760 break;
2762 case DEBUG_INSN:
2763 case INSN:
2764 case JUMP_INSN:
2765 case CALL_INSN:
2766 case NOTE:
2767 case BARRIER:
2768 /* The chain of insns is not being copied. */
2769 return;
2771 default:
2772 break;
2775 /* This rtx may not be shared. If it has already been seen,
2776 replace it with a copy of itself. */
2778 if (RTX_FLAG (x, used))
2780 x = shallow_copy_rtx (x);
2781 copied = 1;
2783 RTX_FLAG (x, used) = 1;
2785 /* Now scan the subexpressions recursively.
2786 We can store any replaced subexpressions directly into X
2787 since we know X is not shared! Any vectors in X
2788 must be copied if X was copied. */
2790 format_ptr = GET_RTX_FORMAT (code);
2791 length = GET_RTX_LENGTH (code);
2792 last_ptr = NULL;
2794 for (i = 0; i < length; i++)
2796 switch (*format_ptr++)
2798 case 'e':
2799 if (last_ptr)
2800 copy_rtx_if_shared_1 (last_ptr);
2801 last_ptr = &XEXP (x, i);
2802 break;
2804 case 'E':
2805 if (XVEC (x, i) != NULL)
2807 int j;
2808 int len = XVECLEN (x, i);
2810 /* Copy the vector iff I copied the rtx and the length
2811 is nonzero. */
2812 if (copied && len > 0)
2813 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2815 /* Call recursively on all inside the vector. */
2816 for (j = 0; j < len; j++)
2818 if (last_ptr)
2819 copy_rtx_if_shared_1 (last_ptr);
2820 last_ptr = &XVECEXP (x, i, j);
2823 break;
2826 *orig1 = x;
2827 if (last_ptr)
2829 orig1 = last_ptr;
2830 goto repeat;
2832 return;
2835 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2837 static void
2838 mark_used_flags (rtx x, int flag)
2840 int i, j;
2841 enum rtx_code code;
2842 const char *format_ptr;
2843 int length;
2845 /* Repeat is used to turn tail-recursion into iteration. */
2846 repeat:
2847 if (x == 0)
2848 return;
2850 code = GET_CODE (x);
2852 /* These types may be freely shared so we needn't do any resetting
2853 for them. */
2855 switch (code)
2857 case REG:
2858 case DEBUG_EXPR:
2859 case VALUE:
2860 case CONST_INT:
2861 case CONST_DOUBLE:
2862 case CONST_FIXED:
2863 case CONST_VECTOR:
2864 case SYMBOL_REF:
2865 case CODE_LABEL:
2866 case PC:
2867 case CC0:
2868 case RETURN:
2869 case SIMPLE_RETURN:
2870 return;
2872 case DEBUG_INSN:
2873 case INSN:
2874 case JUMP_INSN:
2875 case CALL_INSN:
2876 case NOTE:
2877 case LABEL_REF:
2878 case BARRIER:
2879 /* The chain of insns is not being copied. */
2880 return;
2882 default:
2883 break;
2886 RTX_FLAG (x, used) = flag;
2888 format_ptr = GET_RTX_FORMAT (code);
2889 length = GET_RTX_LENGTH (code);
2891 for (i = 0; i < length; i++)
2893 switch (*format_ptr++)
2895 case 'e':
2896 if (i == length-1)
2898 x = XEXP (x, i);
2899 goto repeat;
2901 mark_used_flags (XEXP (x, i), flag);
2902 break;
2904 case 'E':
2905 for (j = 0; j < XVECLEN (x, i); j++)
2906 mark_used_flags (XVECEXP (x, i, j), flag);
2907 break;
2912 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2913 to look for shared sub-parts. */
2915 void
2916 reset_used_flags (rtx x)
2918 mark_used_flags (x, 0);
2921 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2922 to look for shared sub-parts. */
2924 void
2925 set_used_flags (rtx x)
2927 mark_used_flags (x, 1);
2930 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2931 Return X or the rtx for the pseudo reg the value of X was copied into.
2932 OTHER must be valid as a SET_DEST. */
2935 make_safe_from (rtx x, rtx other)
2937 while (1)
2938 switch (GET_CODE (other))
2940 case SUBREG:
2941 other = SUBREG_REG (other);
2942 break;
2943 case STRICT_LOW_PART:
2944 case SIGN_EXTEND:
2945 case ZERO_EXTEND:
2946 other = XEXP (other, 0);
2947 break;
2948 default:
2949 goto done;
2951 done:
2952 if ((MEM_P (other)
2953 && ! CONSTANT_P (x)
2954 && !REG_P (x)
2955 && GET_CODE (x) != SUBREG)
2956 || (REG_P (other)
2957 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2958 || reg_mentioned_p (other, x))))
2960 rtx temp = gen_reg_rtx (GET_MODE (x));
2961 emit_move_insn (temp, x);
2962 return temp;
2964 return x;
2967 /* Emission of insns (adding them to the doubly-linked list). */
2969 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2972 get_last_insn_anywhere (void)
2974 struct sequence_stack *stack;
2975 if (get_last_insn ())
2976 return get_last_insn ();
2977 for (stack = seq_stack; stack; stack = stack->next)
2978 if (stack->last != 0)
2979 return stack->last;
2980 return 0;
2983 /* Return the first nonnote insn emitted in current sequence or current
2984 function. This routine looks inside SEQUENCEs. */
2987 get_first_nonnote_insn (void)
2989 rtx insn = get_insns ();
2991 if (insn)
2993 if (NOTE_P (insn))
2994 for (insn = next_insn (insn);
2995 insn && NOTE_P (insn);
2996 insn = next_insn (insn))
2997 continue;
2998 else
3000 if (NONJUMP_INSN_P (insn)
3001 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3002 insn = XVECEXP (PATTERN (insn), 0, 0);
3006 return insn;
3009 /* Return the last nonnote insn emitted in current sequence or current
3010 function. This routine looks inside SEQUENCEs. */
3013 get_last_nonnote_insn (void)
3015 rtx insn = get_last_insn ();
3017 if (insn)
3019 if (NOTE_P (insn))
3020 for (insn = previous_insn (insn);
3021 insn && NOTE_P (insn);
3022 insn = previous_insn (insn))
3023 continue;
3024 else
3026 if (NONJUMP_INSN_P (insn)
3027 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3028 insn = XVECEXP (PATTERN (insn), 0,
3029 XVECLEN (PATTERN (insn), 0) - 1);
3033 return insn;
3036 /* Return the number of actual (non-debug) insns emitted in this
3037 function. */
3040 get_max_insn_count (void)
3042 int n = cur_insn_uid;
3044 /* The table size must be stable across -g, to avoid codegen
3045 differences due to debug insns, and not be affected by
3046 -fmin-insn-uid, to avoid excessive table size and to simplify
3047 debugging of -fcompare-debug failures. */
3048 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3049 n -= cur_debug_insn_uid;
3050 else
3051 n -= MIN_NONDEBUG_INSN_UID;
3053 return n;
3057 /* Return the next insn. If it is a SEQUENCE, return the first insn
3058 of the sequence. */
3061 next_insn (rtx insn)
3063 if (insn)
3065 insn = NEXT_INSN (insn);
3066 if (insn && NONJUMP_INSN_P (insn)
3067 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3068 insn = XVECEXP (PATTERN (insn), 0, 0);
3071 return insn;
3074 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3075 of the sequence. */
3078 previous_insn (rtx insn)
3080 if (insn)
3082 insn = PREV_INSN (insn);
3083 if (insn && NONJUMP_INSN_P (insn)
3084 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3085 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3088 return insn;
3091 /* Return the next insn after INSN that is not a NOTE. This routine does not
3092 look inside SEQUENCEs. */
3095 next_nonnote_insn (rtx insn)
3097 while (insn)
3099 insn = NEXT_INSN (insn);
3100 if (insn == 0 || !NOTE_P (insn))
3101 break;
3104 return insn;
3107 /* Return the next insn after INSN that is not a NOTE, but stop the
3108 search before we enter another basic block. This routine does not
3109 look inside SEQUENCEs. */
3112 next_nonnote_insn_bb (rtx insn)
3114 while (insn)
3116 insn = NEXT_INSN (insn);
3117 if (insn == 0 || !NOTE_P (insn))
3118 break;
3119 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3120 return NULL_RTX;
3123 return insn;
3126 /* Return the previous insn before INSN that is not a NOTE. This routine does
3127 not look inside SEQUENCEs. */
3130 prev_nonnote_insn (rtx insn)
3132 while (insn)
3134 insn = PREV_INSN (insn);
3135 if (insn == 0 || !NOTE_P (insn))
3136 break;
3139 return insn;
3142 /* Return the previous insn before INSN that is not a NOTE, but stop
3143 the search before we enter another basic block. This routine does
3144 not look inside SEQUENCEs. */
3147 prev_nonnote_insn_bb (rtx insn)
3149 while (insn)
3151 insn = PREV_INSN (insn);
3152 if (insn == 0 || !NOTE_P (insn))
3153 break;
3154 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3155 return NULL_RTX;
3158 return insn;
3161 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3162 routine does not look inside SEQUENCEs. */
3165 next_nondebug_insn (rtx insn)
3167 while (insn)
3169 insn = NEXT_INSN (insn);
3170 if (insn == 0 || !DEBUG_INSN_P (insn))
3171 break;
3174 return insn;
3177 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3178 This routine does not look inside SEQUENCEs. */
3181 prev_nondebug_insn (rtx insn)
3183 while (insn)
3185 insn = PREV_INSN (insn);
3186 if (insn == 0 || !DEBUG_INSN_P (insn))
3187 break;
3190 return insn;
3193 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3194 This routine does not look inside SEQUENCEs. */
3197 next_nonnote_nondebug_insn (rtx insn)
3199 while (insn)
3201 insn = NEXT_INSN (insn);
3202 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3203 break;
3206 return insn;
3209 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3210 This routine does not look inside SEQUENCEs. */
3213 prev_nonnote_nondebug_insn (rtx insn)
3215 while (insn)
3217 insn = PREV_INSN (insn);
3218 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3219 break;
3222 return insn;
3225 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3226 or 0, if there is none. This routine does not look inside
3227 SEQUENCEs. */
3230 next_real_insn (rtx insn)
3232 while (insn)
3234 insn = NEXT_INSN (insn);
3235 if (insn == 0 || INSN_P (insn))
3236 break;
3239 return insn;
3242 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3243 or 0, if there is none. This routine does not look inside
3244 SEQUENCEs. */
3247 prev_real_insn (rtx insn)
3249 while (insn)
3251 insn = PREV_INSN (insn);
3252 if (insn == 0 || INSN_P (insn))
3253 break;
3256 return insn;
3259 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3260 This routine does not look inside SEQUENCEs. */
3263 last_call_insn (void)
3265 rtx insn;
3267 for (insn = get_last_insn ();
3268 insn && !CALL_P (insn);
3269 insn = PREV_INSN (insn))
3272 return insn;
3275 /* Find the next insn after INSN that really does something. This routine
3276 does not look inside SEQUENCEs. After reload this also skips over
3277 standalone USE and CLOBBER insn. */
3280 active_insn_p (const_rtx insn)
3282 return (CALL_P (insn) || JUMP_P (insn)
3283 || (NONJUMP_INSN_P (insn)
3284 && (! reload_completed
3285 || (GET_CODE (PATTERN (insn)) != USE
3286 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3290 next_active_insn (rtx insn)
3292 while (insn)
3294 insn = NEXT_INSN (insn);
3295 if (insn == 0 || active_insn_p (insn))
3296 break;
3299 return insn;
3302 /* Find the last insn before INSN that really does something. This routine
3303 does not look inside SEQUENCEs. After reload this also skips over
3304 standalone USE and CLOBBER insn. */
3307 prev_active_insn (rtx insn)
3309 while (insn)
3311 insn = PREV_INSN (insn);
3312 if (insn == 0 || active_insn_p (insn))
3313 break;
3316 return insn;
3319 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3322 next_label (rtx insn)
3324 while (insn)
3326 insn = NEXT_INSN (insn);
3327 if (insn == 0 || LABEL_P (insn))
3328 break;
3331 return insn;
3334 /* Return the last label to mark the same position as LABEL. Return LABEL
3335 itself if it is null or any return rtx. */
3338 skip_consecutive_labels (rtx label)
3340 rtx insn;
3342 if (label && ANY_RETURN_P (label))
3343 return label;
3345 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3346 if (LABEL_P (insn))
3347 label = insn;
3349 return label;
3352 #ifdef HAVE_cc0
3353 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3354 and REG_CC_USER notes so we can find it. */
3356 void
3357 link_cc0_insns (rtx insn)
3359 rtx user = next_nonnote_insn (insn);
3361 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3362 user = XVECEXP (PATTERN (user), 0, 0);
3364 add_reg_note (user, REG_CC_SETTER, insn);
3365 add_reg_note (insn, REG_CC_USER, user);
3368 /* Return the next insn that uses CC0 after INSN, which is assumed to
3369 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3370 applied to the result of this function should yield INSN).
3372 Normally, this is simply the next insn. However, if a REG_CC_USER note
3373 is present, it contains the insn that uses CC0.
3375 Return 0 if we can't find the insn. */
3378 next_cc0_user (rtx insn)
3380 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3382 if (note)
3383 return XEXP (note, 0);
3385 insn = next_nonnote_insn (insn);
3386 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3387 insn = XVECEXP (PATTERN (insn), 0, 0);
3389 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3390 return insn;
3392 return 0;
3395 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3396 note, it is the previous insn. */
3399 prev_cc0_setter (rtx insn)
3401 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3403 if (note)
3404 return XEXP (note, 0);
3406 insn = prev_nonnote_insn (insn);
3407 gcc_assert (sets_cc0_p (PATTERN (insn)));
3409 return insn;
3411 #endif
3413 #ifdef AUTO_INC_DEC
3414 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3416 static int
3417 find_auto_inc (rtx *xp, void *data)
3419 rtx x = *xp;
3420 rtx reg = (rtx) data;
3422 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3423 return 0;
3425 switch (GET_CODE (x))
3427 case PRE_DEC:
3428 case PRE_INC:
3429 case POST_DEC:
3430 case POST_INC:
3431 case PRE_MODIFY:
3432 case POST_MODIFY:
3433 if (rtx_equal_p (reg, XEXP (x, 0)))
3434 return 1;
3435 break;
3437 default:
3438 gcc_unreachable ();
3440 return -1;
3442 #endif
3444 /* Increment the label uses for all labels present in rtx. */
3446 static void
3447 mark_label_nuses (rtx x)
3449 enum rtx_code code;
3450 int i, j;
3451 const char *fmt;
3453 code = GET_CODE (x);
3454 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3455 LABEL_NUSES (XEXP (x, 0))++;
3457 fmt = GET_RTX_FORMAT (code);
3458 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3460 if (fmt[i] == 'e')
3461 mark_label_nuses (XEXP (x, i));
3462 else if (fmt[i] == 'E')
3463 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3464 mark_label_nuses (XVECEXP (x, i, j));
3469 /* Try splitting insns that can be split for better scheduling.
3470 PAT is the pattern which might split.
3471 TRIAL is the insn providing PAT.
3472 LAST is nonzero if we should return the last insn of the sequence produced.
3474 If this routine succeeds in splitting, it returns the first or last
3475 replacement insn depending on the value of LAST. Otherwise, it
3476 returns TRIAL. If the insn to be returned can be split, it will be. */
3479 try_split (rtx pat, rtx trial, int last)
3481 rtx before = PREV_INSN (trial);
3482 rtx after = NEXT_INSN (trial);
3483 int has_barrier = 0;
3484 rtx note, seq, tem;
3485 int probability;
3486 rtx insn_last, insn;
3487 int njumps = 0;
3489 /* We're not good at redistributing frame information. */
3490 if (RTX_FRAME_RELATED_P (trial))
3491 return trial;
3493 if (any_condjump_p (trial)
3494 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3495 split_branch_probability = INTVAL (XEXP (note, 0));
3496 probability = split_branch_probability;
3498 seq = split_insns (pat, trial);
3500 split_branch_probability = -1;
3502 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3503 We may need to handle this specially. */
3504 if (after && BARRIER_P (after))
3506 has_barrier = 1;
3507 after = NEXT_INSN (after);
3510 if (!seq)
3511 return trial;
3513 /* Avoid infinite loop if any insn of the result matches
3514 the original pattern. */
3515 insn_last = seq;
3516 while (1)
3518 if (INSN_P (insn_last)
3519 && rtx_equal_p (PATTERN (insn_last), pat))
3520 return trial;
3521 if (!NEXT_INSN (insn_last))
3522 break;
3523 insn_last = NEXT_INSN (insn_last);
3526 /* We will be adding the new sequence to the function. The splitters
3527 may have introduced invalid RTL sharing, so unshare the sequence now. */
3528 unshare_all_rtl_in_chain (seq);
3530 /* Mark labels. */
3531 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3533 if (JUMP_P (insn))
3535 mark_jump_label (PATTERN (insn), insn, 0);
3536 njumps++;
3537 if (probability != -1
3538 && any_condjump_p (insn)
3539 && !find_reg_note (insn, REG_BR_PROB, 0))
3541 /* We can preserve the REG_BR_PROB notes only if exactly
3542 one jump is created, otherwise the machine description
3543 is responsible for this step using
3544 split_branch_probability variable. */
3545 gcc_assert (njumps == 1);
3546 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3551 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3552 in SEQ and copy any additional information across. */
3553 if (CALL_P (trial))
3555 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3556 if (CALL_P (insn))
3558 rtx next, *p;
3560 /* Add the old CALL_INSN_FUNCTION_USAGE to whatever the
3561 target may have explicitly specified. */
3562 p = &CALL_INSN_FUNCTION_USAGE (insn);
3563 while (*p)
3564 p = &XEXP (*p, 1);
3565 *p = CALL_INSN_FUNCTION_USAGE (trial);
3567 /* If the old call was a sibling call, the new one must
3568 be too. */
3569 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3571 /* If the new call is the last instruction in the sequence,
3572 it will effectively replace the old call in-situ. Otherwise
3573 we must move any following NOTE_INSN_CALL_ARG_LOCATION note
3574 so that it comes immediately after the new call. */
3575 if (NEXT_INSN (insn))
3576 for (next = NEXT_INSN (trial);
3577 next && NOTE_P (next);
3578 next = NEXT_INSN (next))
3579 if (NOTE_KIND (next) == NOTE_INSN_CALL_ARG_LOCATION)
3581 remove_insn (next);
3582 add_insn_after (next, insn, NULL);
3583 break;
3588 /* Copy notes, particularly those related to the CFG. */
3589 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3591 switch (REG_NOTE_KIND (note))
3593 case REG_EH_REGION:
3594 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3595 break;
3597 case REG_NORETURN:
3598 case REG_SETJMP:
3599 case REG_TM:
3600 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3602 if (CALL_P (insn))
3603 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3605 break;
3607 case REG_NON_LOCAL_GOTO:
3608 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3610 if (JUMP_P (insn))
3611 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3613 break;
3615 #ifdef AUTO_INC_DEC
3616 case REG_INC:
3617 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3619 rtx reg = XEXP (note, 0);
3620 if (!FIND_REG_INC_NOTE (insn, reg)
3621 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3622 add_reg_note (insn, REG_INC, reg);
3624 break;
3625 #endif
3627 case REG_ARGS_SIZE:
3628 fixup_args_size_notes (NULL_RTX, insn_last, INTVAL (XEXP (note, 0)));
3629 break;
3631 default:
3632 break;
3636 /* If there are LABELS inside the split insns increment the
3637 usage count so we don't delete the label. */
3638 if (INSN_P (trial))
3640 insn = insn_last;
3641 while (insn != NULL_RTX)
3643 /* JUMP_P insns have already been "marked" above. */
3644 if (NONJUMP_INSN_P (insn))
3645 mark_label_nuses (PATTERN (insn));
3647 insn = PREV_INSN (insn);
3651 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3653 delete_insn (trial);
3654 if (has_barrier)
3655 emit_barrier_after (tem);
3657 /* Recursively call try_split for each new insn created; by the
3658 time control returns here that insn will be fully split, so
3659 set LAST and continue from the insn after the one returned.
3660 We can't use next_active_insn here since AFTER may be a note.
3661 Ignore deleted insns, which can be occur if not optimizing. */
3662 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3663 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3664 tem = try_split (PATTERN (tem), tem, 1);
3666 /* Return either the first or the last insn, depending on which was
3667 requested. */
3668 return last
3669 ? (after ? PREV_INSN (after) : get_last_insn ())
3670 : NEXT_INSN (before);
3673 /* Make and return an INSN rtx, initializing all its slots.
3674 Store PATTERN in the pattern slots. */
3677 make_insn_raw (rtx pattern)
3679 rtx insn;
3681 insn = rtx_alloc (INSN);
3683 INSN_UID (insn) = cur_insn_uid++;
3684 PATTERN (insn) = pattern;
3685 INSN_CODE (insn) = -1;
3686 REG_NOTES (insn) = NULL;
3687 INSN_LOCATOR (insn) = curr_insn_locator ();
3688 BLOCK_FOR_INSN (insn) = NULL;
3690 #ifdef ENABLE_RTL_CHECKING
3691 if (insn
3692 && INSN_P (insn)
3693 && (returnjump_p (insn)
3694 || (GET_CODE (insn) == SET
3695 && SET_DEST (insn) == pc_rtx)))
3697 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3698 debug_rtx (insn);
3700 #endif
3702 return insn;
3705 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3708 make_debug_insn_raw (rtx pattern)
3710 rtx insn;
3712 insn = rtx_alloc (DEBUG_INSN);
3713 INSN_UID (insn) = cur_debug_insn_uid++;
3714 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3715 INSN_UID (insn) = cur_insn_uid++;
3717 PATTERN (insn) = pattern;
3718 INSN_CODE (insn) = -1;
3719 REG_NOTES (insn) = NULL;
3720 INSN_LOCATOR (insn) = curr_insn_locator ();
3721 BLOCK_FOR_INSN (insn) = NULL;
3723 return insn;
3726 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3729 make_jump_insn_raw (rtx pattern)
3731 rtx insn;
3733 insn = rtx_alloc (JUMP_INSN);
3734 INSN_UID (insn) = cur_insn_uid++;
3736 PATTERN (insn) = pattern;
3737 INSN_CODE (insn) = -1;
3738 REG_NOTES (insn) = NULL;
3739 JUMP_LABEL (insn) = NULL;
3740 INSN_LOCATOR (insn) = curr_insn_locator ();
3741 BLOCK_FOR_INSN (insn) = NULL;
3743 return insn;
3746 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3748 static rtx
3749 make_call_insn_raw (rtx pattern)
3751 rtx insn;
3753 insn = rtx_alloc (CALL_INSN);
3754 INSN_UID (insn) = cur_insn_uid++;
3756 PATTERN (insn) = pattern;
3757 INSN_CODE (insn) = -1;
3758 REG_NOTES (insn) = NULL;
3759 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3760 INSN_LOCATOR (insn) = curr_insn_locator ();
3761 BLOCK_FOR_INSN (insn) = NULL;
3763 return insn;
3766 /* Add INSN to the end of the doubly-linked list.
3767 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3769 void
3770 add_insn (rtx insn)
3772 PREV_INSN (insn) = get_last_insn();
3773 NEXT_INSN (insn) = 0;
3775 if (NULL != get_last_insn())
3776 NEXT_INSN (get_last_insn ()) = insn;
3778 if (NULL == get_insns ())
3779 set_first_insn (insn);
3781 set_last_insn (insn);
3784 /* Add INSN into the doubly-linked list after insn AFTER. This and
3785 the next should be the only functions called to insert an insn once
3786 delay slots have been filled since only they know how to update a
3787 SEQUENCE. */
3789 void
3790 add_insn_after (rtx insn, rtx after, basic_block bb)
3792 rtx next = NEXT_INSN (after);
3794 gcc_assert (!optimize || !INSN_DELETED_P (after));
3796 NEXT_INSN (insn) = next;
3797 PREV_INSN (insn) = after;
3799 if (next)
3801 PREV_INSN (next) = insn;
3802 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3803 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3805 else if (get_last_insn () == after)
3806 set_last_insn (insn);
3807 else
3809 struct sequence_stack *stack = seq_stack;
3810 /* Scan all pending sequences too. */
3811 for (; stack; stack = stack->next)
3812 if (after == stack->last)
3814 stack->last = insn;
3815 break;
3818 gcc_assert (stack);
3821 if (!BARRIER_P (after)
3822 && !BARRIER_P (insn)
3823 && (bb = BLOCK_FOR_INSN (after)))
3825 set_block_for_insn (insn, bb);
3826 if (INSN_P (insn))
3827 df_insn_rescan (insn);
3828 /* Should not happen as first in the BB is always
3829 either NOTE or LABEL. */
3830 if (BB_END (bb) == after
3831 /* Avoid clobbering of structure when creating new BB. */
3832 && !BARRIER_P (insn)
3833 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3834 BB_END (bb) = insn;
3837 NEXT_INSN (after) = insn;
3838 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3840 rtx sequence = PATTERN (after);
3841 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3845 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3846 the previous should be the only functions called to insert an insn
3847 once delay slots have been filled since only they know how to
3848 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3849 bb from before. */
3851 void
3852 add_insn_before (rtx insn, rtx before, basic_block bb)
3854 rtx prev = PREV_INSN (before);
3856 gcc_assert (!optimize || !INSN_DELETED_P (before));
3858 PREV_INSN (insn) = prev;
3859 NEXT_INSN (insn) = before;
3861 if (prev)
3863 NEXT_INSN (prev) = insn;
3864 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3866 rtx sequence = PATTERN (prev);
3867 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3870 else if (get_insns () == before)
3871 set_first_insn (insn);
3872 else
3874 struct sequence_stack *stack = seq_stack;
3875 /* Scan all pending sequences too. */
3876 for (; stack; stack = stack->next)
3877 if (before == stack->first)
3879 stack->first = insn;
3880 break;
3883 gcc_assert (stack);
3886 if (!bb
3887 && !BARRIER_P (before)
3888 && !BARRIER_P (insn))
3889 bb = BLOCK_FOR_INSN (before);
3891 if (bb)
3893 set_block_for_insn (insn, bb);
3894 if (INSN_P (insn))
3895 df_insn_rescan (insn);
3896 /* Should not happen as first in the BB is always either NOTE or
3897 LABEL. */
3898 gcc_assert (BB_HEAD (bb) != insn
3899 /* Avoid clobbering of structure when creating new BB. */
3900 || BARRIER_P (insn)
3901 || NOTE_INSN_BASIC_BLOCK_P (insn));
3904 PREV_INSN (before) = insn;
3905 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3906 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3910 /* Replace insn with an deleted instruction note. */
3912 void
3913 set_insn_deleted (rtx insn)
3915 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3916 PUT_CODE (insn, NOTE);
3917 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3921 /* Remove an insn from its doubly-linked list. This function knows how
3922 to handle sequences. */
3923 void
3924 remove_insn (rtx insn)
3926 rtx next = NEXT_INSN (insn);
3927 rtx prev = PREV_INSN (insn);
3928 basic_block bb;
3930 /* Later in the code, the block will be marked dirty. */
3931 df_insn_delete (NULL, INSN_UID (insn));
3933 if (prev)
3935 NEXT_INSN (prev) = next;
3936 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3938 rtx sequence = PATTERN (prev);
3939 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3942 else if (get_insns () == insn)
3944 if (next)
3945 PREV_INSN (next) = NULL;
3946 set_first_insn (next);
3948 else
3950 struct sequence_stack *stack = seq_stack;
3951 /* Scan all pending sequences too. */
3952 for (; stack; stack = stack->next)
3953 if (insn == stack->first)
3955 stack->first = next;
3956 break;
3959 gcc_assert (stack);
3962 if (next)
3964 PREV_INSN (next) = prev;
3965 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3966 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3968 else if (get_last_insn () == insn)
3969 set_last_insn (prev);
3970 else
3972 struct sequence_stack *stack = seq_stack;
3973 /* Scan all pending sequences too. */
3974 for (; stack; stack = stack->next)
3975 if (insn == stack->last)
3977 stack->last = prev;
3978 break;
3981 gcc_assert (stack);
3983 if (!BARRIER_P (insn)
3984 && (bb = BLOCK_FOR_INSN (insn)))
3986 if (NONDEBUG_INSN_P (insn))
3987 df_set_bb_dirty (bb);
3988 if (BB_HEAD (bb) == insn)
3990 /* Never ever delete the basic block note without deleting whole
3991 basic block. */
3992 gcc_assert (!NOTE_P (insn));
3993 BB_HEAD (bb) = next;
3995 if (BB_END (bb) == insn)
3996 BB_END (bb) = prev;
4000 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
4002 void
4003 add_function_usage_to (rtx call_insn, rtx call_fusage)
4005 gcc_assert (call_insn && CALL_P (call_insn));
4007 /* Put the register usage information on the CALL. If there is already
4008 some usage information, put ours at the end. */
4009 if (CALL_INSN_FUNCTION_USAGE (call_insn))
4011 rtx link;
4013 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
4014 link = XEXP (link, 1))
4017 XEXP (link, 1) = call_fusage;
4019 else
4020 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
4023 /* Delete all insns made since FROM.
4024 FROM becomes the new last instruction. */
4026 void
4027 delete_insns_since (rtx from)
4029 if (from == 0)
4030 set_first_insn (0);
4031 else
4032 NEXT_INSN (from) = 0;
4033 set_last_insn (from);
4036 /* This function is deprecated, please use sequences instead.
4038 Move a consecutive bunch of insns to a different place in the chain.
4039 The insns to be moved are those between FROM and TO.
4040 They are moved to a new position after the insn AFTER.
4041 AFTER must not be FROM or TO or any insn in between.
4043 This function does not know about SEQUENCEs and hence should not be
4044 called after delay-slot filling has been done. */
4046 void
4047 reorder_insns_nobb (rtx from, rtx to, rtx after)
4049 #ifdef ENABLE_CHECKING
4050 rtx x;
4051 for (x = from; x != to; x = NEXT_INSN (x))
4052 gcc_assert (after != x);
4053 gcc_assert (after != to);
4054 #endif
4056 /* Splice this bunch out of where it is now. */
4057 if (PREV_INSN (from))
4058 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
4059 if (NEXT_INSN (to))
4060 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
4061 if (get_last_insn () == to)
4062 set_last_insn (PREV_INSN (from));
4063 if (get_insns () == from)
4064 set_first_insn (NEXT_INSN (to));
4066 /* Make the new neighbors point to it and it to them. */
4067 if (NEXT_INSN (after))
4068 PREV_INSN (NEXT_INSN (after)) = to;
4070 NEXT_INSN (to) = NEXT_INSN (after);
4071 PREV_INSN (from) = after;
4072 NEXT_INSN (after) = from;
4073 if (after == get_last_insn())
4074 set_last_insn (to);
4077 /* Same as function above, but take care to update BB boundaries. */
4078 void
4079 reorder_insns (rtx from, rtx to, rtx after)
4081 rtx prev = PREV_INSN (from);
4082 basic_block bb, bb2;
4084 reorder_insns_nobb (from, to, after);
4086 if (!BARRIER_P (after)
4087 && (bb = BLOCK_FOR_INSN (after)))
4089 rtx x;
4090 df_set_bb_dirty (bb);
4092 if (!BARRIER_P (from)
4093 && (bb2 = BLOCK_FOR_INSN (from)))
4095 if (BB_END (bb2) == to)
4096 BB_END (bb2) = prev;
4097 df_set_bb_dirty (bb2);
4100 if (BB_END (bb) == after)
4101 BB_END (bb) = to;
4103 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4104 if (!BARRIER_P (x))
4105 df_insn_change_bb (x, bb);
4110 /* Emit insn(s) of given code and pattern
4111 at a specified place within the doubly-linked list.
4113 All of the emit_foo global entry points accept an object
4114 X which is either an insn list or a PATTERN of a single
4115 instruction.
4117 There are thus a few canonical ways to generate code and
4118 emit it at a specific place in the instruction stream. For
4119 example, consider the instruction named SPOT and the fact that
4120 we would like to emit some instructions before SPOT. We might
4121 do it like this:
4123 start_sequence ();
4124 ... emit the new instructions ...
4125 insns_head = get_insns ();
4126 end_sequence ();
4128 emit_insn_before (insns_head, SPOT);
4130 It used to be common to generate SEQUENCE rtl instead, but that
4131 is a relic of the past which no longer occurs. The reason is that
4132 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4133 generated would almost certainly die right after it was created. */
4135 static rtx
4136 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4137 rtx (*make_raw) (rtx))
4139 rtx insn;
4141 gcc_assert (before);
4143 if (x == NULL_RTX)
4144 return last;
4146 switch (GET_CODE (x))
4148 case DEBUG_INSN:
4149 case INSN:
4150 case JUMP_INSN:
4151 case CALL_INSN:
4152 case CODE_LABEL:
4153 case BARRIER:
4154 case NOTE:
4155 insn = x;
4156 while (insn)
4158 rtx next = NEXT_INSN (insn);
4159 add_insn_before (insn, before, bb);
4160 last = insn;
4161 insn = next;
4163 break;
4165 #ifdef ENABLE_RTL_CHECKING
4166 case SEQUENCE:
4167 gcc_unreachable ();
4168 break;
4169 #endif
4171 default:
4172 last = (*make_raw) (x);
4173 add_insn_before (last, before, bb);
4174 break;
4177 return last;
4180 /* Make X be output before the instruction BEFORE. */
4183 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4185 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4188 /* Make an instruction with body X and code JUMP_INSN
4189 and output it before the instruction BEFORE. */
4192 emit_jump_insn_before_noloc (rtx x, rtx before)
4194 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4195 make_jump_insn_raw);
4198 /* Make an instruction with body X and code CALL_INSN
4199 and output it before the instruction BEFORE. */
4202 emit_call_insn_before_noloc (rtx x, rtx before)
4204 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4205 make_call_insn_raw);
4208 /* Make an instruction with body X and code DEBUG_INSN
4209 and output it before the instruction BEFORE. */
4212 emit_debug_insn_before_noloc (rtx x, rtx before)
4214 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4215 make_debug_insn_raw);
4218 /* Make an insn of code BARRIER
4219 and output it before the insn BEFORE. */
4222 emit_barrier_before (rtx before)
4224 rtx insn = rtx_alloc (BARRIER);
4226 INSN_UID (insn) = cur_insn_uid++;
4228 add_insn_before (insn, before, NULL);
4229 return insn;
4232 /* Emit the label LABEL before the insn BEFORE. */
4235 emit_label_before (rtx label, rtx before)
4237 /* This can be called twice for the same label as a result of the
4238 confusion that follows a syntax error! So make it harmless. */
4239 if (INSN_UID (label) == 0)
4241 INSN_UID (label) = cur_insn_uid++;
4242 add_insn_before (label, before, NULL);
4245 return label;
4248 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4251 emit_note_before (enum insn_note subtype, rtx before)
4253 rtx note = rtx_alloc (NOTE);
4254 INSN_UID (note) = cur_insn_uid++;
4255 NOTE_KIND (note) = subtype;
4256 BLOCK_FOR_INSN (note) = NULL;
4257 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4259 add_insn_before (note, before, NULL);
4260 return note;
4263 /* Helper for emit_insn_after, handles lists of instructions
4264 efficiently. */
4266 static rtx
4267 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4269 rtx last;
4270 rtx after_after;
4271 if (!bb && !BARRIER_P (after))
4272 bb = BLOCK_FOR_INSN (after);
4274 if (bb)
4276 df_set_bb_dirty (bb);
4277 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4278 if (!BARRIER_P (last))
4280 set_block_for_insn (last, bb);
4281 df_insn_rescan (last);
4283 if (!BARRIER_P (last))
4285 set_block_for_insn (last, bb);
4286 df_insn_rescan (last);
4288 if (BB_END (bb) == after)
4289 BB_END (bb) = last;
4291 else
4292 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4293 continue;
4295 after_after = NEXT_INSN (after);
4297 NEXT_INSN (after) = first;
4298 PREV_INSN (first) = after;
4299 NEXT_INSN (last) = after_after;
4300 if (after_after)
4301 PREV_INSN (after_after) = last;
4303 if (after == get_last_insn())
4304 set_last_insn (last);
4306 return last;
4309 static rtx
4310 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4311 rtx (*make_raw)(rtx))
4313 rtx last = after;
4315 gcc_assert (after);
4317 if (x == NULL_RTX)
4318 return last;
4320 switch (GET_CODE (x))
4322 case DEBUG_INSN:
4323 case INSN:
4324 case JUMP_INSN:
4325 case CALL_INSN:
4326 case CODE_LABEL:
4327 case BARRIER:
4328 case NOTE:
4329 last = emit_insn_after_1 (x, after, bb);
4330 break;
4332 #ifdef ENABLE_RTL_CHECKING
4333 case SEQUENCE:
4334 gcc_unreachable ();
4335 break;
4336 #endif
4338 default:
4339 last = (*make_raw) (x);
4340 add_insn_after (last, after, bb);
4341 break;
4344 return last;
4347 /* Make X be output after the insn AFTER and set the BB of insn. If
4348 BB is NULL, an attempt is made to infer the BB from AFTER. */
4351 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4353 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4357 /* Make an insn of code JUMP_INSN with body X
4358 and output it after the insn AFTER. */
4361 emit_jump_insn_after_noloc (rtx x, rtx after)
4363 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4366 /* Make an instruction with body X and code CALL_INSN
4367 and output it after the instruction AFTER. */
4370 emit_call_insn_after_noloc (rtx x, rtx after)
4372 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4375 /* Make an instruction with body X and code CALL_INSN
4376 and output it after the instruction AFTER. */
4379 emit_debug_insn_after_noloc (rtx x, rtx after)
4381 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4384 /* Make an insn of code BARRIER
4385 and output it after the insn AFTER. */
4388 emit_barrier_after (rtx after)
4390 rtx insn = rtx_alloc (BARRIER);
4392 INSN_UID (insn) = cur_insn_uid++;
4394 add_insn_after (insn, after, NULL);
4395 return insn;
4398 /* Emit the label LABEL after the insn AFTER. */
4401 emit_label_after (rtx label, rtx after)
4403 /* This can be called twice for the same label
4404 as a result of the confusion that follows a syntax error!
4405 So make it harmless. */
4406 if (INSN_UID (label) == 0)
4408 INSN_UID (label) = cur_insn_uid++;
4409 add_insn_after (label, after, NULL);
4412 return label;
4415 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4418 emit_note_after (enum insn_note subtype, rtx after)
4420 rtx note = rtx_alloc (NOTE);
4421 INSN_UID (note) = cur_insn_uid++;
4422 NOTE_KIND (note) = subtype;
4423 BLOCK_FOR_INSN (note) = NULL;
4424 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4425 add_insn_after (note, after, NULL);
4426 return note;
4429 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4430 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4432 static rtx
4433 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4434 rtx (*make_raw) (rtx))
4436 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4438 if (pattern == NULL_RTX || !loc)
4439 return last;
4441 after = NEXT_INSN (after);
4442 while (1)
4444 if (active_insn_p (after) && !INSN_LOCATOR (after))
4445 INSN_LOCATOR (after) = loc;
4446 if (after == last)
4447 break;
4448 after = NEXT_INSN (after);
4450 return last;
4453 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4454 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4455 any DEBUG_INSNs. */
4457 static rtx
4458 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4459 rtx (*make_raw) (rtx))
4461 rtx prev = after;
4463 if (skip_debug_insns)
4464 while (DEBUG_INSN_P (prev))
4465 prev = PREV_INSN (prev);
4467 if (INSN_P (prev))
4468 return emit_pattern_after_setloc (pattern, after, INSN_LOCATOR (prev),
4469 make_raw);
4470 else
4471 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4474 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4476 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4478 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4481 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4483 emit_insn_after (rtx pattern, rtx after)
4485 return emit_pattern_after (pattern, after, true, make_insn_raw);
4488 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4490 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4492 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4495 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4497 emit_jump_insn_after (rtx pattern, rtx after)
4499 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4502 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4504 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4506 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4509 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4511 emit_call_insn_after (rtx pattern, rtx after)
4513 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4516 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4518 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4520 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4523 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4525 emit_debug_insn_after (rtx pattern, rtx after)
4527 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4530 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4531 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4532 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4533 CALL_INSN, etc. */
4535 static rtx
4536 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4537 rtx (*make_raw) (rtx))
4539 rtx first = PREV_INSN (before);
4540 rtx last = emit_pattern_before_noloc (pattern, before,
4541 insnp ? before : NULL_RTX,
4542 NULL, make_raw);
4544 if (pattern == NULL_RTX || !loc)
4545 return last;
4547 if (!first)
4548 first = get_insns ();
4549 else
4550 first = NEXT_INSN (first);
4551 while (1)
4553 if (active_insn_p (first) && !INSN_LOCATOR (first))
4554 INSN_LOCATOR (first) = loc;
4555 if (first == last)
4556 break;
4557 first = NEXT_INSN (first);
4559 return last;
4562 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4563 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4564 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4565 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4567 static rtx
4568 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4569 bool insnp, rtx (*make_raw) (rtx))
4571 rtx next = before;
4573 if (skip_debug_insns)
4574 while (DEBUG_INSN_P (next))
4575 next = PREV_INSN (next);
4577 if (INSN_P (next))
4578 return emit_pattern_before_setloc (pattern, before, INSN_LOCATOR (next),
4579 insnp, make_raw);
4580 else
4581 return emit_pattern_before_noloc (pattern, before,
4582 insnp ? before : NULL_RTX,
4583 NULL, make_raw);
4586 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4588 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4590 return emit_pattern_before_setloc (pattern, before, loc, true,
4591 make_insn_raw);
4594 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4596 emit_insn_before (rtx pattern, rtx before)
4598 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4601 /* like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4603 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4605 return emit_pattern_before_setloc (pattern, before, loc, false,
4606 make_jump_insn_raw);
4609 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4611 emit_jump_insn_before (rtx pattern, rtx before)
4613 return emit_pattern_before (pattern, before, true, false,
4614 make_jump_insn_raw);
4617 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4619 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4621 return emit_pattern_before_setloc (pattern, before, loc, false,
4622 make_call_insn_raw);
4625 /* Like emit_call_insn_before_noloc,
4626 but set insn_locator according to BEFORE. */
4628 emit_call_insn_before (rtx pattern, rtx before)
4630 return emit_pattern_before (pattern, before, true, false,
4631 make_call_insn_raw);
4634 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4636 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4638 return emit_pattern_before_setloc (pattern, before, loc, false,
4639 make_debug_insn_raw);
4642 /* Like emit_debug_insn_before_noloc,
4643 but set insn_locator according to BEFORE. */
4645 emit_debug_insn_before (rtx pattern, rtx before)
4647 return emit_pattern_before (pattern, before, false, false,
4648 make_debug_insn_raw);
4651 /* Take X and emit it at the end of the doubly-linked
4652 INSN list.
4654 Returns the last insn emitted. */
4657 emit_insn (rtx x)
4659 rtx last = get_last_insn();
4660 rtx insn;
4662 if (x == NULL_RTX)
4663 return last;
4665 switch (GET_CODE (x))
4667 case DEBUG_INSN:
4668 case INSN:
4669 case JUMP_INSN:
4670 case CALL_INSN:
4671 case CODE_LABEL:
4672 case BARRIER:
4673 case NOTE:
4674 insn = x;
4675 while (insn)
4677 rtx next = NEXT_INSN (insn);
4678 add_insn (insn);
4679 last = insn;
4680 insn = next;
4682 break;
4684 #ifdef ENABLE_RTL_CHECKING
4685 case SEQUENCE:
4686 gcc_unreachable ();
4687 break;
4688 #endif
4690 default:
4691 last = make_insn_raw (x);
4692 add_insn (last);
4693 break;
4696 return last;
4699 /* Make an insn of code DEBUG_INSN with pattern X
4700 and add it to the end of the doubly-linked list. */
4703 emit_debug_insn (rtx x)
4705 rtx last = get_last_insn();
4706 rtx insn;
4708 if (x == NULL_RTX)
4709 return last;
4711 switch (GET_CODE (x))
4713 case DEBUG_INSN:
4714 case INSN:
4715 case JUMP_INSN:
4716 case CALL_INSN:
4717 case CODE_LABEL:
4718 case BARRIER:
4719 case NOTE:
4720 insn = x;
4721 while (insn)
4723 rtx next = NEXT_INSN (insn);
4724 add_insn (insn);
4725 last = insn;
4726 insn = next;
4728 break;
4730 #ifdef ENABLE_RTL_CHECKING
4731 case SEQUENCE:
4732 gcc_unreachable ();
4733 break;
4734 #endif
4736 default:
4737 last = make_debug_insn_raw (x);
4738 add_insn (last);
4739 break;
4742 return last;
4745 /* Make an insn of code JUMP_INSN with pattern X
4746 and add it to the end of the doubly-linked list. */
4749 emit_jump_insn (rtx x)
4751 rtx last = NULL_RTX, insn;
4753 switch (GET_CODE (x))
4755 case DEBUG_INSN:
4756 case INSN:
4757 case JUMP_INSN:
4758 case CALL_INSN:
4759 case CODE_LABEL:
4760 case BARRIER:
4761 case NOTE:
4762 insn = x;
4763 while (insn)
4765 rtx next = NEXT_INSN (insn);
4766 add_insn (insn);
4767 last = insn;
4768 insn = next;
4770 break;
4772 #ifdef ENABLE_RTL_CHECKING
4773 case SEQUENCE:
4774 gcc_unreachable ();
4775 break;
4776 #endif
4778 default:
4779 last = make_jump_insn_raw (x);
4780 add_insn (last);
4781 break;
4784 return last;
4787 /* Make an insn of code CALL_INSN with pattern X
4788 and add it to the end of the doubly-linked list. */
4791 emit_call_insn (rtx x)
4793 rtx insn;
4795 switch (GET_CODE (x))
4797 case DEBUG_INSN:
4798 case INSN:
4799 case JUMP_INSN:
4800 case CALL_INSN:
4801 case CODE_LABEL:
4802 case BARRIER:
4803 case NOTE:
4804 insn = emit_insn (x);
4805 break;
4807 #ifdef ENABLE_RTL_CHECKING
4808 case SEQUENCE:
4809 gcc_unreachable ();
4810 break;
4811 #endif
4813 default:
4814 insn = make_call_insn_raw (x);
4815 add_insn (insn);
4816 break;
4819 return insn;
4822 /* Add the label LABEL to the end of the doubly-linked list. */
4825 emit_label (rtx label)
4827 /* This can be called twice for the same label
4828 as a result of the confusion that follows a syntax error!
4829 So make it harmless. */
4830 if (INSN_UID (label) == 0)
4832 INSN_UID (label) = cur_insn_uid++;
4833 add_insn (label);
4835 return label;
4838 /* Make an insn of code BARRIER
4839 and add it to the end of the doubly-linked list. */
4842 emit_barrier (void)
4844 rtx barrier = rtx_alloc (BARRIER);
4845 INSN_UID (barrier) = cur_insn_uid++;
4846 add_insn (barrier);
4847 return barrier;
4850 /* Emit a copy of note ORIG. */
4853 emit_note_copy (rtx orig)
4855 rtx note;
4857 note = rtx_alloc (NOTE);
4859 INSN_UID (note) = cur_insn_uid++;
4860 NOTE_DATA (note) = NOTE_DATA (orig);
4861 NOTE_KIND (note) = NOTE_KIND (orig);
4862 BLOCK_FOR_INSN (note) = NULL;
4863 add_insn (note);
4865 return note;
4868 /* Make an insn of code NOTE or type NOTE_NO
4869 and add it to the end of the doubly-linked list. */
4872 emit_note (enum insn_note kind)
4874 rtx note;
4876 note = rtx_alloc (NOTE);
4877 INSN_UID (note) = cur_insn_uid++;
4878 NOTE_KIND (note) = kind;
4879 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4880 BLOCK_FOR_INSN (note) = NULL;
4881 add_insn (note);
4882 return note;
4885 /* Emit a clobber of lvalue X. */
4888 emit_clobber (rtx x)
4890 /* CONCATs should not appear in the insn stream. */
4891 if (GET_CODE (x) == CONCAT)
4893 emit_clobber (XEXP (x, 0));
4894 return emit_clobber (XEXP (x, 1));
4896 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4899 /* Return a sequence of insns to clobber lvalue X. */
4902 gen_clobber (rtx x)
4904 rtx seq;
4906 start_sequence ();
4907 emit_clobber (x);
4908 seq = get_insns ();
4909 end_sequence ();
4910 return seq;
4913 /* Emit a use of rvalue X. */
4916 emit_use (rtx x)
4918 /* CONCATs should not appear in the insn stream. */
4919 if (GET_CODE (x) == CONCAT)
4921 emit_use (XEXP (x, 0));
4922 return emit_use (XEXP (x, 1));
4924 return emit_insn (gen_rtx_USE (VOIDmode, x));
4927 /* Return a sequence of insns to use rvalue X. */
4930 gen_use (rtx x)
4932 rtx seq;
4934 start_sequence ();
4935 emit_use (x);
4936 seq = get_insns ();
4937 end_sequence ();
4938 return seq;
4941 /* Cause next statement to emit a line note even if the line number
4942 has not changed. */
4944 void
4945 force_next_line_note (void)
4947 last_location = -1;
4950 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4951 note of this type already exists, remove it first. */
4954 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4956 rtx note = find_reg_note (insn, kind, NULL_RTX);
4958 switch (kind)
4960 case REG_EQUAL:
4961 case REG_EQUIV:
4962 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4963 has multiple sets (some callers assume single_set
4964 means the insn only has one set, when in fact it
4965 means the insn only has one * useful * set). */
4966 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4968 gcc_assert (!note);
4969 return NULL_RTX;
4972 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4973 It serves no useful purpose and breaks eliminate_regs. */
4974 if (GET_CODE (datum) == ASM_OPERANDS)
4975 return NULL_RTX;
4977 if (note)
4979 XEXP (note, 0) = datum;
4980 df_notes_rescan (insn);
4981 return note;
4983 break;
4985 default:
4986 if (note)
4988 XEXP (note, 0) = datum;
4989 return note;
4991 break;
4994 add_reg_note (insn, kind, datum);
4996 switch (kind)
4998 case REG_EQUAL:
4999 case REG_EQUIV:
5000 df_notes_rescan (insn);
5001 break;
5002 default:
5003 break;
5006 return REG_NOTES (insn);
5009 /* Like set_unique_reg_note, but don't do anything unless INSN sets DST. */
5011 set_dst_reg_note (rtx insn, enum reg_note kind, rtx datum, rtx dst)
5013 rtx set = single_set (insn);
5015 if (set && SET_DEST (set) == dst)
5016 return set_unique_reg_note (insn, kind, datum);
5017 return NULL_RTX;
5020 /* Return an indication of which type of insn should have X as a body.
5021 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5023 static enum rtx_code
5024 classify_insn (rtx x)
5026 if (LABEL_P (x))
5027 return CODE_LABEL;
5028 if (GET_CODE (x) == CALL)
5029 return CALL_INSN;
5030 if (ANY_RETURN_P (x))
5031 return JUMP_INSN;
5032 if (GET_CODE (x) == SET)
5034 if (SET_DEST (x) == pc_rtx)
5035 return JUMP_INSN;
5036 else if (GET_CODE (SET_SRC (x)) == CALL)
5037 return CALL_INSN;
5038 else
5039 return INSN;
5041 if (GET_CODE (x) == PARALLEL)
5043 int j;
5044 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5045 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5046 return CALL_INSN;
5047 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5048 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5049 return JUMP_INSN;
5050 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5051 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5052 return CALL_INSN;
5054 return INSN;
5057 /* Emit the rtl pattern X as an appropriate kind of insn.
5058 If X is a label, it is simply added into the insn chain. */
5061 emit (rtx x)
5063 enum rtx_code code = classify_insn (x);
5065 switch (code)
5067 case CODE_LABEL:
5068 return emit_label (x);
5069 case INSN:
5070 return emit_insn (x);
5071 case JUMP_INSN:
5073 rtx insn = emit_jump_insn (x);
5074 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5075 return emit_barrier ();
5076 return insn;
5078 case CALL_INSN:
5079 return emit_call_insn (x);
5080 case DEBUG_INSN:
5081 return emit_debug_insn (x);
5082 default:
5083 gcc_unreachable ();
5087 /* Space for free sequence stack entries. */
5088 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5090 /* Begin emitting insns to a sequence. If this sequence will contain
5091 something that might cause the compiler to pop arguments to function
5092 calls (because those pops have previously been deferred; see
5093 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5094 before calling this function. That will ensure that the deferred
5095 pops are not accidentally emitted in the middle of this sequence. */
5097 void
5098 start_sequence (void)
5100 struct sequence_stack *tem;
5102 if (free_sequence_stack != NULL)
5104 tem = free_sequence_stack;
5105 free_sequence_stack = tem->next;
5107 else
5108 tem = ggc_alloc_sequence_stack ();
5110 tem->next = seq_stack;
5111 tem->first = get_insns ();
5112 tem->last = get_last_insn ();
5114 seq_stack = tem;
5116 set_first_insn (0);
5117 set_last_insn (0);
5120 /* Set up the insn chain starting with FIRST as the current sequence,
5121 saving the previously current one. See the documentation for
5122 start_sequence for more information about how to use this function. */
5124 void
5125 push_to_sequence (rtx first)
5127 rtx last;
5129 start_sequence ();
5131 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last))
5134 set_first_insn (first);
5135 set_last_insn (last);
5138 /* Like push_to_sequence, but take the last insn as an argument to avoid
5139 looping through the list. */
5141 void
5142 push_to_sequence2 (rtx first, rtx last)
5144 start_sequence ();
5146 set_first_insn (first);
5147 set_last_insn (last);
5150 /* Set up the outer-level insn chain
5151 as the current sequence, saving the previously current one. */
5153 void
5154 push_topmost_sequence (void)
5156 struct sequence_stack *stack, *top = NULL;
5158 start_sequence ();
5160 for (stack = seq_stack; stack; stack = stack->next)
5161 top = stack;
5163 set_first_insn (top->first);
5164 set_last_insn (top->last);
5167 /* After emitting to the outer-level insn chain, update the outer-level
5168 insn chain, and restore the previous saved state. */
5170 void
5171 pop_topmost_sequence (void)
5173 struct sequence_stack *stack, *top = NULL;
5175 for (stack = seq_stack; stack; stack = stack->next)
5176 top = stack;
5178 top->first = get_insns ();
5179 top->last = get_last_insn ();
5181 end_sequence ();
5184 /* After emitting to a sequence, restore previous saved state.
5186 To get the contents of the sequence just made, you must call
5187 `get_insns' *before* calling here.
5189 If the compiler might have deferred popping arguments while
5190 generating this sequence, and this sequence will not be immediately
5191 inserted into the instruction stream, use do_pending_stack_adjust
5192 before calling get_insns. That will ensure that the deferred
5193 pops are inserted into this sequence, and not into some random
5194 location in the instruction stream. See INHIBIT_DEFER_POP for more
5195 information about deferred popping of arguments. */
5197 void
5198 end_sequence (void)
5200 struct sequence_stack *tem = seq_stack;
5202 set_first_insn (tem->first);
5203 set_last_insn (tem->last);
5204 seq_stack = tem->next;
5206 memset (tem, 0, sizeof (*tem));
5207 tem->next = free_sequence_stack;
5208 free_sequence_stack = tem;
5211 /* Return 1 if currently emitting into a sequence. */
5214 in_sequence_p (void)
5216 return seq_stack != 0;
5219 /* Put the various virtual registers into REGNO_REG_RTX. */
5221 static void
5222 init_virtual_regs (void)
5224 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5225 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5226 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5227 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5228 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5229 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5230 = virtual_preferred_stack_boundary_rtx;
5234 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5235 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5236 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5237 static int copy_insn_n_scratches;
5239 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5240 copied an ASM_OPERANDS.
5241 In that case, it is the original input-operand vector. */
5242 static rtvec orig_asm_operands_vector;
5244 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5245 copied an ASM_OPERANDS.
5246 In that case, it is the copied input-operand vector. */
5247 static rtvec copy_asm_operands_vector;
5249 /* Likewise for the constraints vector. */
5250 static rtvec orig_asm_constraints_vector;
5251 static rtvec copy_asm_constraints_vector;
5253 /* Recursively create a new copy of an rtx for copy_insn.
5254 This function differs from copy_rtx in that it handles SCRATCHes and
5255 ASM_OPERANDs properly.
5256 Normally, this function is not used directly; use copy_insn as front end.
5257 However, you could first copy an insn pattern with copy_insn and then use
5258 this function afterwards to properly copy any REG_NOTEs containing
5259 SCRATCHes. */
5262 copy_insn_1 (rtx orig)
5264 rtx copy;
5265 int i, j;
5266 RTX_CODE code;
5267 const char *format_ptr;
5269 if (orig == NULL)
5270 return NULL;
5272 code = GET_CODE (orig);
5274 switch (code)
5276 case REG:
5277 case DEBUG_EXPR:
5278 case CONST_INT:
5279 case CONST_DOUBLE:
5280 case CONST_FIXED:
5281 case CONST_VECTOR:
5282 case SYMBOL_REF:
5283 case CODE_LABEL:
5284 case PC:
5285 case CC0:
5286 case RETURN:
5287 case SIMPLE_RETURN:
5288 return orig;
5289 case CLOBBER:
5290 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5291 return orig;
5292 break;
5294 case SCRATCH:
5295 for (i = 0; i < copy_insn_n_scratches; i++)
5296 if (copy_insn_scratch_in[i] == orig)
5297 return copy_insn_scratch_out[i];
5298 break;
5300 case CONST:
5301 if (shared_const_p (orig))
5302 return orig;
5303 break;
5305 /* A MEM with a constant address is not sharable. The problem is that
5306 the constant address may need to be reloaded. If the mem is shared,
5307 then reloading one copy of this mem will cause all copies to appear
5308 to have been reloaded. */
5310 default:
5311 break;
5314 /* Copy the various flags, fields, and other information. We assume
5315 that all fields need copying, and then clear the fields that should
5316 not be copied. That is the sensible default behavior, and forces
5317 us to explicitly document why we are *not* copying a flag. */
5318 copy = shallow_copy_rtx (orig);
5320 /* We do not copy the USED flag, which is used as a mark bit during
5321 walks over the RTL. */
5322 RTX_FLAG (copy, used) = 0;
5324 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5325 if (INSN_P (orig))
5327 RTX_FLAG (copy, jump) = 0;
5328 RTX_FLAG (copy, call) = 0;
5329 RTX_FLAG (copy, frame_related) = 0;
5332 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5334 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5335 switch (*format_ptr++)
5337 case 'e':
5338 if (XEXP (orig, i) != NULL)
5339 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5340 break;
5342 case 'E':
5343 case 'V':
5344 if (XVEC (orig, i) == orig_asm_constraints_vector)
5345 XVEC (copy, i) = copy_asm_constraints_vector;
5346 else if (XVEC (orig, i) == orig_asm_operands_vector)
5347 XVEC (copy, i) = copy_asm_operands_vector;
5348 else if (XVEC (orig, i) != NULL)
5350 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5351 for (j = 0; j < XVECLEN (copy, i); j++)
5352 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5354 break;
5356 case 't':
5357 case 'w':
5358 case 'i':
5359 case 's':
5360 case 'S':
5361 case 'u':
5362 case '0':
5363 /* These are left unchanged. */
5364 break;
5366 default:
5367 gcc_unreachable ();
5370 if (code == SCRATCH)
5372 i = copy_insn_n_scratches++;
5373 gcc_assert (i < MAX_RECOG_OPERANDS);
5374 copy_insn_scratch_in[i] = orig;
5375 copy_insn_scratch_out[i] = copy;
5377 else if (code == ASM_OPERANDS)
5379 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5380 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5381 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5382 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5385 return copy;
5388 /* Create a new copy of an rtx.
5389 This function differs from copy_rtx in that it handles SCRATCHes and
5390 ASM_OPERANDs properly.
5391 INSN doesn't really have to be a full INSN; it could be just the
5392 pattern. */
5394 copy_insn (rtx insn)
5396 copy_insn_n_scratches = 0;
5397 orig_asm_operands_vector = 0;
5398 orig_asm_constraints_vector = 0;
5399 copy_asm_operands_vector = 0;
5400 copy_asm_constraints_vector = 0;
5401 return copy_insn_1 (insn);
5404 /* Initialize data structures and variables in this file
5405 before generating rtl for each function. */
5407 void
5408 init_emit (void)
5410 set_first_insn (NULL);
5411 set_last_insn (NULL);
5412 if (MIN_NONDEBUG_INSN_UID)
5413 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5414 else
5415 cur_insn_uid = 1;
5416 cur_debug_insn_uid = 1;
5417 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5418 last_location = UNKNOWN_LOCATION;
5419 first_label_num = label_num;
5420 seq_stack = NULL;
5422 /* Init the tables that describe all the pseudo regs. */
5424 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5426 crtl->emit.regno_pointer_align
5427 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5429 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5431 /* Put copies of all the hard registers into regno_reg_rtx. */
5432 memcpy (regno_reg_rtx,
5433 initial_regno_reg_rtx,
5434 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5436 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5437 init_virtual_regs ();
5439 /* Indicate that the virtual registers and stack locations are
5440 all pointers. */
5441 REG_POINTER (stack_pointer_rtx) = 1;
5442 REG_POINTER (frame_pointer_rtx) = 1;
5443 REG_POINTER (hard_frame_pointer_rtx) = 1;
5444 REG_POINTER (arg_pointer_rtx) = 1;
5446 REG_POINTER (virtual_incoming_args_rtx) = 1;
5447 REG_POINTER (virtual_stack_vars_rtx) = 1;
5448 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5449 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5450 REG_POINTER (virtual_cfa_rtx) = 1;
5452 #ifdef STACK_BOUNDARY
5453 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5454 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5455 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5456 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5458 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5459 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5460 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5461 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5462 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5463 #endif
5465 #ifdef INIT_EXPANDERS
5466 INIT_EXPANDERS;
5467 #endif
5470 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5472 static rtx
5473 gen_const_vector (enum machine_mode mode, int constant)
5475 rtx tem;
5476 rtvec v;
5477 int units, i;
5478 enum machine_mode inner;
5480 units = GET_MODE_NUNITS (mode);
5481 inner = GET_MODE_INNER (mode);
5483 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5485 v = rtvec_alloc (units);
5487 /* We need to call this function after we set the scalar const_tiny_rtx
5488 entries. */
5489 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5491 for (i = 0; i < units; ++i)
5492 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5494 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5495 return tem;
5498 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5499 all elements are zero, and the one vector when all elements are one. */
5501 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5503 enum machine_mode inner = GET_MODE_INNER (mode);
5504 int nunits = GET_MODE_NUNITS (mode);
5505 rtx x;
5506 int i;
5508 /* Check to see if all of the elements have the same value. */
5509 x = RTVEC_ELT (v, nunits - 1);
5510 for (i = nunits - 2; i >= 0; i--)
5511 if (RTVEC_ELT (v, i) != x)
5512 break;
5514 /* If the values are all the same, check to see if we can use one of the
5515 standard constant vectors. */
5516 if (i == -1)
5518 if (x == CONST0_RTX (inner))
5519 return CONST0_RTX (mode);
5520 else if (x == CONST1_RTX (inner))
5521 return CONST1_RTX (mode);
5522 else if (x == CONSTM1_RTX (inner))
5523 return CONSTM1_RTX (mode);
5526 return gen_rtx_raw_CONST_VECTOR (mode, v);
5529 /* Initialise global register information required by all functions. */
5531 void
5532 init_emit_regs (void)
5534 int i;
5535 enum machine_mode mode;
5536 mem_attrs *attrs;
5538 /* Reset register attributes */
5539 htab_empty (reg_attrs_htab);
5541 /* We need reg_raw_mode, so initialize the modes now. */
5542 init_reg_modes_target ();
5544 /* Assign register numbers to the globally defined register rtx. */
5545 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5546 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5547 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5548 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5549 virtual_incoming_args_rtx =
5550 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5551 virtual_stack_vars_rtx =
5552 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5553 virtual_stack_dynamic_rtx =
5554 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5555 virtual_outgoing_args_rtx =
5556 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5557 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5558 virtual_preferred_stack_boundary_rtx =
5559 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5561 /* Initialize RTL for commonly used hard registers. These are
5562 copied into regno_reg_rtx as we begin to compile each function. */
5563 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5564 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5566 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5567 return_address_pointer_rtx
5568 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5569 #endif
5571 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5572 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5573 else
5574 pic_offset_table_rtx = NULL_RTX;
5576 for (i = 0; i < (int) MAX_MACHINE_MODE; i++)
5578 mode = (enum machine_mode) i;
5579 attrs = ggc_alloc_cleared_mem_attrs ();
5580 attrs->align = BITS_PER_UNIT;
5581 attrs->addrspace = ADDR_SPACE_GENERIC;
5582 if (mode != BLKmode)
5584 attrs->size_known_p = true;
5585 attrs->size = GET_MODE_SIZE (mode);
5586 if (STRICT_ALIGNMENT)
5587 attrs->align = GET_MODE_ALIGNMENT (mode);
5589 mode_mem_attrs[i] = attrs;
5593 /* Create some permanent unique rtl objects shared between all functions. */
5595 void
5596 init_emit_once (void)
5598 int i;
5599 enum machine_mode mode;
5600 enum machine_mode double_mode;
5602 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5603 hash tables. */
5604 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5605 const_int_htab_eq, NULL);
5607 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5608 const_double_htab_eq, NULL);
5610 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5611 const_fixed_htab_eq, NULL);
5613 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5614 mem_attrs_htab_eq, NULL);
5615 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5616 reg_attrs_htab_eq, NULL);
5618 /* Compute the word and byte modes. */
5620 byte_mode = VOIDmode;
5621 word_mode = VOIDmode;
5622 double_mode = VOIDmode;
5624 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5625 mode != VOIDmode;
5626 mode = GET_MODE_WIDER_MODE (mode))
5628 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5629 && byte_mode == VOIDmode)
5630 byte_mode = mode;
5632 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5633 && word_mode == VOIDmode)
5634 word_mode = mode;
5637 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5638 mode != VOIDmode;
5639 mode = GET_MODE_WIDER_MODE (mode))
5641 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5642 && double_mode == VOIDmode)
5643 double_mode = mode;
5646 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5648 #ifdef INIT_EXPANDERS
5649 /* This is to initialize {init|mark|free}_machine_status before the first
5650 call to push_function_context_to. This is needed by the Chill front
5651 end which calls push_function_context_to before the first call to
5652 init_function_start. */
5653 INIT_EXPANDERS;
5654 #endif
5656 /* Create the unique rtx's for certain rtx codes and operand values. */
5658 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5659 tries to use these variables. */
5660 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5661 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5662 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5664 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5665 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5666 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5667 else
5668 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5670 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5671 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5672 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5674 dconstm1 = dconst1;
5675 dconstm1.sign = 1;
5677 dconsthalf = dconst1;
5678 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5680 for (i = 0; i < 3; i++)
5682 const REAL_VALUE_TYPE *const r =
5683 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5685 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5686 mode != VOIDmode;
5687 mode = GET_MODE_WIDER_MODE (mode))
5688 const_tiny_rtx[i][(int) mode] =
5689 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5691 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5692 mode != VOIDmode;
5693 mode = GET_MODE_WIDER_MODE (mode))
5694 const_tiny_rtx[i][(int) mode] =
5695 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5697 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5699 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5700 mode != VOIDmode;
5701 mode = GET_MODE_WIDER_MODE (mode))
5702 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5704 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5705 mode != VOIDmode;
5706 mode = GET_MODE_WIDER_MODE (mode))
5707 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5710 const_tiny_rtx[3][(int) VOIDmode] = constm1_rtx;
5712 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5713 mode != VOIDmode;
5714 mode = GET_MODE_WIDER_MODE (mode))
5715 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5717 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5718 mode != VOIDmode;
5719 mode = GET_MODE_WIDER_MODE (mode))
5720 const_tiny_rtx[3][(int) mode] = constm1_rtx;
5722 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5723 mode != VOIDmode;
5724 mode = GET_MODE_WIDER_MODE (mode))
5726 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5727 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5730 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5731 mode != VOIDmode;
5732 mode = GET_MODE_WIDER_MODE (mode))
5734 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5735 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5738 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5739 mode != VOIDmode;
5740 mode = GET_MODE_WIDER_MODE (mode))
5742 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5743 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5744 const_tiny_rtx[3][(int) mode] = gen_const_vector (mode, 3);
5747 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5748 mode != VOIDmode;
5749 mode = GET_MODE_WIDER_MODE (mode))
5751 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5752 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5755 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5756 mode != VOIDmode;
5757 mode = GET_MODE_WIDER_MODE (mode))
5759 FCONST0(mode).data.high = 0;
5760 FCONST0(mode).data.low = 0;
5761 FCONST0(mode).mode = mode;
5762 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5763 FCONST0 (mode), mode);
5766 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5767 mode != VOIDmode;
5768 mode = GET_MODE_WIDER_MODE (mode))
5770 FCONST0(mode).data.high = 0;
5771 FCONST0(mode).data.low = 0;
5772 FCONST0(mode).mode = mode;
5773 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5774 FCONST0 (mode), mode);
5777 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5778 mode != VOIDmode;
5779 mode = GET_MODE_WIDER_MODE (mode))
5781 FCONST0(mode).data.high = 0;
5782 FCONST0(mode).data.low = 0;
5783 FCONST0(mode).mode = mode;
5784 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5785 FCONST0 (mode), mode);
5787 /* We store the value 1. */
5788 FCONST1(mode).data.high = 0;
5789 FCONST1(mode).data.low = 0;
5790 FCONST1(mode).mode = mode;
5791 lshift_double (1, 0, GET_MODE_FBIT (mode),
5792 2 * HOST_BITS_PER_WIDE_INT,
5793 &FCONST1(mode).data.low,
5794 &FCONST1(mode).data.high,
5795 SIGNED_FIXED_POINT_MODE_P (mode));
5796 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5797 FCONST1 (mode), mode);
5800 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5801 mode != VOIDmode;
5802 mode = GET_MODE_WIDER_MODE (mode))
5804 FCONST0(mode).data.high = 0;
5805 FCONST0(mode).data.low = 0;
5806 FCONST0(mode).mode = mode;
5807 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5808 FCONST0 (mode), mode);
5810 /* We store the value 1. */
5811 FCONST1(mode).data.high = 0;
5812 FCONST1(mode).data.low = 0;
5813 FCONST1(mode).mode = mode;
5814 lshift_double (1, 0, GET_MODE_FBIT (mode),
5815 2 * HOST_BITS_PER_WIDE_INT,
5816 &FCONST1(mode).data.low,
5817 &FCONST1(mode).data.high,
5818 SIGNED_FIXED_POINT_MODE_P (mode));
5819 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5820 FCONST1 (mode), mode);
5823 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5824 mode != VOIDmode;
5825 mode = GET_MODE_WIDER_MODE (mode))
5827 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5830 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5831 mode != VOIDmode;
5832 mode = GET_MODE_WIDER_MODE (mode))
5834 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5837 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5838 mode != VOIDmode;
5839 mode = GET_MODE_WIDER_MODE (mode))
5841 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5842 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5845 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5846 mode != VOIDmode;
5847 mode = GET_MODE_WIDER_MODE (mode))
5849 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5850 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5853 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5854 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5855 const_tiny_rtx[0][i] = const0_rtx;
5857 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5858 if (STORE_FLAG_VALUE == 1)
5859 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5861 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5862 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5863 simple_return_rtx = gen_rtx_fmt_ (SIMPLE_RETURN, VOIDmode);
5864 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5867 /* Produce exact duplicate of insn INSN after AFTER.
5868 Care updating of libcall regions if present. */
5871 emit_copy_of_insn_after (rtx insn, rtx after)
5873 rtx new_rtx, link;
5875 switch (GET_CODE (insn))
5877 case INSN:
5878 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5879 break;
5881 case JUMP_INSN:
5882 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5883 break;
5885 case DEBUG_INSN:
5886 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5887 break;
5889 case CALL_INSN:
5890 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5891 if (CALL_INSN_FUNCTION_USAGE (insn))
5892 CALL_INSN_FUNCTION_USAGE (new_rtx)
5893 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5894 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5895 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5896 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5897 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5898 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5899 break;
5901 default:
5902 gcc_unreachable ();
5905 /* Update LABEL_NUSES. */
5906 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5908 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
5910 /* If the old insn is frame related, then so is the new one. This is
5911 primarily needed for IA-64 unwind info which marks epilogue insns,
5912 which may be duplicated by the basic block reordering code. */
5913 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5915 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5916 will make them. REG_LABEL_TARGETs are created there too, but are
5917 supposed to be sticky, so we copy them. */
5918 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5919 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5921 if (GET_CODE (link) == EXPR_LIST)
5922 add_reg_note (new_rtx, REG_NOTE_KIND (link),
5923 copy_insn_1 (XEXP (link, 0)));
5924 else
5925 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
5928 INSN_CODE (new_rtx) = INSN_CODE (insn);
5929 return new_rtx;
5932 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5934 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5936 if (hard_reg_clobbers[mode][regno])
5937 return hard_reg_clobbers[mode][regno];
5938 else
5939 return (hard_reg_clobbers[mode][regno] =
5940 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5943 #include "gt-emit-rtl.h"