2011-05-19 Tom de Vries <tom@codesourcery.com>
[official-gcc.git] / gcc / emit-rtl.c
blob2e073b5da1f84661983d3e7e068d968ef7e2195d
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010, 2011
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* Middle-to-low level generation of rtx code and insns.
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "diagnostic-core.h"
42 #include "rtl.h"
43 #include "tree.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "expr.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "hashtab.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
59 #include "df.h"
60 #include "params.h"
61 #include "target.h"
62 #include "tree-flow.h"
64 struct target_rtl default_target_rtl;
65 #if SWITCHABLE_TARGET
66 struct target_rtl *this_target_rtl = &default_target_rtl;
67 #endif
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71 /* Commonly used modes. */
73 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
76 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
78 /* Datastructures maintained for currently processed function in RTL form. */
80 struct rtl_data x_rtl;
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
87 rtx * regno_reg_rtx;
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
92 static GTY(()) int label_num = 1;
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx. */
98 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
100 rtx const_true_rtx;
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
106 REAL_VALUE_TYPE dconsthalf;
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
117 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
119 /* A hash table storing CONST_INTs whose absolute value is greater
120 than MAX_SAVED_CONST_INT. */
122 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
123 htab_t const_int_htab;
125 /* A hash table storing memory attribute structures. */
126 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
127 htab_t mem_attrs_htab;
129 /* A hash table storing register attribute structures. */
130 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
131 htab_t reg_attrs_htab;
133 /* A hash table storing all CONST_DOUBLEs. */
134 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
135 htab_t const_double_htab;
137 /* A hash table storing all CONST_FIXEDs. */
138 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
139 htab_t const_fixed_htab;
141 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
142 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
143 #define last_location (crtl->emit.x_last_location)
144 #define first_label_num (crtl->emit.x_first_label_num)
146 static rtx make_call_insn_raw (rtx);
147 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
148 static void set_used_decls (tree);
149 static void mark_label_nuses (rtx);
150 static hashval_t const_int_htab_hash (const void *);
151 static int const_int_htab_eq (const void *, const void *);
152 static hashval_t const_double_htab_hash (const void *);
153 static int const_double_htab_eq (const void *, const void *);
154 static rtx lookup_const_double (rtx);
155 static hashval_t const_fixed_htab_hash (const void *);
156 static int const_fixed_htab_eq (const void *, const void *);
157 static rtx lookup_const_fixed (rtx);
158 static hashval_t mem_attrs_htab_hash (const void *);
159 static int mem_attrs_htab_eq (const void *, const void *);
160 static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
161 addr_space_t, enum machine_mode);
162 static hashval_t reg_attrs_htab_hash (const void *);
163 static int reg_attrs_htab_eq (const void *, const void *);
164 static reg_attrs *get_reg_attrs (tree, int);
165 static rtx gen_const_vector (enum machine_mode, int);
166 static void copy_rtx_if_shared_1 (rtx *orig);
168 /* Probability of the conditional branch currently proceeded by try_split.
169 Set to -1 otherwise. */
170 int split_branch_probability = -1;
172 /* Returns a hash code for X (which is a really a CONST_INT). */
174 static hashval_t
175 const_int_htab_hash (const void *x)
177 return (hashval_t) INTVAL ((const_rtx) x);
180 /* Returns nonzero if the value represented by X (which is really a
181 CONST_INT) is the same as that given by Y (which is really a
182 HOST_WIDE_INT *). */
184 static int
185 const_int_htab_eq (const void *x, const void *y)
187 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
190 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
191 static hashval_t
192 const_double_htab_hash (const void *x)
194 const_rtx const value = (const_rtx) x;
195 hashval_t h;
197 if (GET_MODE (value) == VOIDmode)
198 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
199 else
201 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
202 /* MODE is used in the comparison, so it should be in the hash. */
203 h ^= GET_MODE (value);
205 return h;
208 /* Returns nonzero if the value represented by X (really a ...)
209 is the same as that represented by Y (really a ...) */
210 static int
211 const_double_htab_eq (const void *x, const void *y)
213 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
215 if (GET_MODE (a) != GET_MODE (b))
216 return 0;
217 if (GET_MODE (a) == VOIDmode)
218 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
219 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
220 else
221 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
222 CONST_DOUBLE_REAL_VALUE (b));
225 /* Returns a hash code for X (which is really a CONST_FIXED). */
227 static hashval_t
228 const_fixed_htab_hash (const void *x)
230 const_rtx const value = (const_rtx) x;
231 hashval_t h;
233 h = fixed_hash (CONST_FIXED_VALUE (value));
234 /* MODE is used in the comparison, so it should be in the hash. */
235 h ^= GET_MODE (value);
236 return h;
239 /* Returns nonzero if the value represented by X (really a ...)
240 is the same as that represented by Y (really a ...). */
242 static int
243 const_fixed_htab_eq (const void *x, const void *y)
245 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
247 if (GET_MODE (a) != GET_MODE (b))
248 return 0;
249 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
252 /* Returns a hash code for X (which is a really a mem_attrs *). */
254 static hashval_t
255 mem_attrs_htab_hash (const void *x)
257 const mem_attrs *const p = (const mem_attrs *) x;
259 return (p->alias ^ (p->align * 1000)
260 ^ (p->addrspace * 4000)
261 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
262 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
263 ^ (size_t) iterative_hash_expr (p->expr, 0));
266 /* Returns nonzero if the value represented by X (which is really a
267 mem_attrs *) is the same as that given by Y (which is also really a
268 mem_attrs *). */
270 static int
271 mem_attrs_htab_eq (const void *x, const void *y)
273 const mem_attrs *const p = (const mem_attrs *) x;
274 const mem_attrs *const q = (const mem_attrs *) y;
276 return (p->alias == q->alias && p->offset == q->offset
277 && p->size == q->size && p->align == q->align
278 && p->addrspace == q->addrspace
279 && (p->expr == q->expr
280 || (p->expr != NULL_TREE && q->expr != NULL_TREE
281 && operand_equal_p (p->expr, q->expr, 0))));
284 /* Allocate a new mem_attrs structure and insert it into the hash table if
285 one identical to it is not already in the table. We are doing this for
286 MEM of mode MODE. */
288 static mem_attrs *
289 get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
290 unsigned int align, addr_space_t addrspace, enum machine_mode mode)
292 mem_attrs attrs;
293 void **slot;
295 /* If everything is the default, we can just return zero.
296 This must match what the corresponding MEM_* macros return when the
297 field is not present. */
298 if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
299 && (size == 0
300 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
301 && (STRICT_ALIGNMENT && mode != BLKmode
302 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
303 return 0;
305 attrs.alias = alias;
306 attrs.expr = expr;
307 attrs.offset = offset;
308 attrs.size = size;
309 attrs.align = align;
310 attrs.addrspace = addrspace;
312 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
313 if (*slot == 0)
315 *slot = ggc_alloc_mem_attrs ();
316 memcpy (*slot, &attrs, sizeof (mem_attrs));
319 return (mem_attrs *) *slot;
322 /* Returns a hash code for X (which is a really a reg_attrs *). */
324 static hashval_t
325 reg_attrs_htab_hash (const void *x)
327 const reg_attrs *const p = (const reg_attrs *) x;
329 return ((p->offset * 1000) ^ (intptr_t) p->decl);
332 /* Returns nonzero if the value represented by X (which is really a
333 reg_attrs *) is the same as that given by Y (which is also really a
334 reg_attrs *). */
336 static int
337 reg_attrs_htab_eq (const void *x, const void *y)
339 const reg_attrs *const p = (const reg_attrs *) x;
340 const reg_attrs *const q = (const reg_attrs *) y;
342 return (p->decl == q->decl && p->offset == q->offset);
344 /* Allocate a new reg_attrs structure and insert it into the hash table if
345 one identical to it is not already in the table. We are doing this for
346 MEM of mode MODE. */
348 static reg_attrs *
349 get_reg_attrs (tree decl, int offset)
351 reg_attrs attrs;
352 void **slot;
354 /* If everything is the default, we can just return zero. */
355 if (decl == 0 && offset == 0)
356 return 0;
358 attrs.decl = decl;
359 attrs.offset = offset;
361 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
362 if (*slot == 0)
364 *slot = ggc_alloc_reg_attrs ();
365 memcpy (*slot, &attrs, sizeof (reg_attrs));
368 return (reg_attrs *) *slot;
372 #if !HAVE_blockage
373 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
374 across this insn. */
377 gen_blockage (void)
379 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
380 MEM_VOLATILE_P (x) = true;
381 return x;
383 #endif
386 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
387 don't attempt to share with the various global pieces of rtl (such as
388 frame_pointer_rtx). */
391 gen_raw_REG (enum machine_mode mode, int regno)
393 rtx x = gen_rtx_raw_REG (mode, regno);
394 ORIGINAL_REGNO (x) = regno;
395 return x;
398 /* There are some RTL codes that require special attention; the generation
399 functions do the raw handling. If you add to this list, modify
400 special_rtx in gengenrtl.c as well. */
403 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
405 void **slot;
407 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
408 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
410 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
411 if (const_true_rtx && arg == STORE_FLAG_VALUE)
412 return const_true_rtx;
413 #endif
415 /* Look up the CONST_INT in the hash table. */
416 slot = htab_find_slot_with_hash (const_int_htab, &arg,
417 (hashval_t) arg, INSERT);
418 if (*slot == 0)
419 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
421 return (rtx) *slot;
425 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
427 return GEN_INT (trunc_int_for_mode (c, mode));
430 /* CONST_DOUBLEs might be created from pairs of integers, or from
431 REAL_VALUE_TYPEs. Also, their length is known only at run time,
432 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
434 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
435 hash table. If so, return its counterpart; otherwise add it
436 to the hash table and return it. */
437 static rtx
438 lookup_const_double (rtx real)
440 void **slot = htab_find_slot (const_double_htab, real, INSERT);
441 if (*slot == 0)
442 *slot = real;
444 return (rtx) *slot;
447 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
448 VALUE in mode MODE. */
450 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
452 rtx real = rtx_alloc (CONST_DOUBLE);
453 PUT_MODE (real, mode);
455 real->u.rv = value;
457 return lookup_const_double (real);
460 /* Determine whether FIXED, a CONST_FIXED, already exists in the
461 hash table. If so, return its counterpart; otherwise add it
462 to the hash table and return it. */
464 static rtx
465 lookup_const_fixed (rtx fixed)
467 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
468 if (*slot == 0)
469 *slot = fixed;
471 return (rtx) *slot;
474 /* Return a CONST_FIXED rtx for a fixed-point value specified by
475 VALUE in mode MODE. */
478 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
480 rtx fixed = rtx_alloc (CONST_FIXED);
481 PUT_MODE (fixed, mode);
483 fixed->u.fv = value;
485 return lookup_const_fixed (fixed);
488 /* Constructs double_int from rtx CST. */
490 double_int
491 rtx_to_double_int (const_rtx cst)
493 double_int r;
495 if (CONST_INT_P (cst))
496 r = shwi_to_double_int (INTVAL (cst));
497 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
499 r.low = CONST_DOUBLE_LOW (cst);
500 r.high = CONST_DOUBLE_HIGH (cst);
502 else
503 gcc_unreachable ();
505 return r;
509 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
510 a double_int. */
513 immed_double_int_const (double_int i, enum machine_mode mode)
515 return immed_double_const (i.low, i.high, mode);
518 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
519 of ints: I0 is the low-order word and I1 is the high-order word.
520 Do not use this routine for non-integer modes; convert to
521 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
524 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
526 rtx value;
527 unsigned int i;
529 /* There are the following cases (note that there are no modes with
530 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
532 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
533 gen_int_mode.
534 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
535 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
536 from copies of the sign bit, and sign of i0 and i1 are the same), then
537 we return a CONST_INT for i0.
538 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
539 if (mode != VOIDmode)
541 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
542 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
543 /* We can get a 0 for an error mark. */
544 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
545 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
547 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
548 return gen_int_mode (i0, mode);
550 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
553 /* If this integer fits in one word, return a CONST_INT. */
554 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
555 return GEN_INT (i0);
557 /* We use VOIDmode for integers. */
558 value = rtx_alloc (CONST_DOUBLE);
559 PUT_MODE (value, VOIDmode);
561 CONST_DOUBLE_LOW (value) = i0;
562 CONST_DOUBLE_HIGH (value) = i1;
564 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
565 XWINT (value, i) = 0;
567 return lookup_const_double (value);
571 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
573 /* In case the MD file explicitly references the frame pointer, have
574 all such references point to the same frame pointer. This is
575 used during frame pointer elimination to distinguish the explicit
576 references to these registers from pseudos that happened to be
577 assigned to them.
579 If we have eliminated the frame pointer or arg pointer, we will
580 be using it as a normal register, for example as a spill
581 register. In such cases, we might be accessing it in a mode that
582 is not Pmode and therefore cannot use the pre-allocated rtx.
584 Also don't do this when we are making new REGs in reload, since
585 we don't want to get confused with the real pointers. */
587 if (mode == Pmode && !reload_in_progress)
589 if (regno == FRAME_POINTER_REGNUM
590 && (!reload_completed || frame_pointer_needed))
591 return frame_pointer_rtx;
592 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
593 if (regno == HARD_FRAME_POINTER_REGNUM
594 && (!reload_completed || frame_pointer_needed))
595 return hard_frame_pointer_rtx;
596 #endif
597 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
598 if (regno == ARG_POINTER_REGNUM)
599 return arg_pointer_rtx;
600 #endif
601 #ifdef RETURN_ADDRESS_POINTER_REGNUM
602 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
603 return return_address_pointer_rtx;
604 #endif
605 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
606 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
607 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
608 return pic_offset_table_rtx;
609 if (regno == STACK_POINTER_REGNUM)
610 return stack_pointer_rtx;
613 #if 0
614 /* If the per-function register table has been set up, try to re-use
615 an existing entry in that table to avoid useless generation of RTL.
617 This code is disabled for now until we can fix the various backends
618 which depend on having non-shared hard registers in some cases. Long
619 term we want to re-enable this code as it can significantly cut down
620 on the amount of useless RTL that gets generated.
622 We'll also need to fix some code that runs after reload that wants to
623 set ORIGINAL_REGNO. */
625 if (cfun
626 && cfun->emit
627 && regno_reg_rtx
628 && regno < FIRST_PSEUDO_REGISTER
629 && reg_raw_mode[regno] == mode)
630 return regno_reg_rtx[regno];
631 #endif
633 return gen_raw_REG (mode, regno);
637 gen_rtx_MEM (enum machine_mode mode, rtx addr)
639 rtx rt = gen_rtx_raw_MEM (mode, addr);
641 /* This field is not cleared by the mere allocation of the rtx, so
642 we clear it here. */
643 MEM_ATTRS (rt) = 0;
645 return rt;
648 /* Generate a memory referring to non-trapping constant memory. */
651 gen_const_mem (enum machine_mode mode, rtx addr)
653 rtx mem = gen_rtx_MEM (mode, addr);
654 MEM_READONLY_P (mem) = 1;
655 MEM_NOTRAP_P (mem) = 1;
656 return mem;
659 /* Generate a MEM referring to fixed portions of the frame, e.g., register
660 save areas. */
663 gen_frame_mem (enum machine_mode mode, rtx addr)
665 rtx mem = gen_rtx_MEM (mode, addr);
666 MEM_NOTRAP_P (mem) = 1;
667 set_mem_alias_set (mem, get_frame_alias_set ());
668 return mem;
671 /* Generate a MEM referring to a temporary use of the stack, not part
672 of the fixed stack frame. For example, something which is pushed
673 by a target splitter. */
675 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
677 rtx mem = gen_rtx_MEM (mode, addr);
678 MEM_NOTRAP_P (mem) = 1;
679 if (!cfun->calls_alloca)
680 set_mem_alias_set (mem, get_frame_alias_set ());
681 return mem;
684 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
685 this construct would be valid, and false otherwise. */
687 bool
688 validate_subreg (enum machine_mode omode, enum machine_mode imode,
689 const_rtx reg, unsigned int offset)
691 unsigned int isize = GET_MODE_SIZE (imode);
692 unsigned int osize = GET_MODE_SIZE (omode);
694 /* All subregs must be aligned. */
695 if (offset % osize != 0)
696 return false;
698 /* The subreg offset cannot be outside the inner object. */
699 if (offset >= isize)
700 return false;
702 /* ??? This should not be here. Temporarily continue to allow word_mode
703 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
704 Generally, backends are doing something sketchy but it'll take time to
705 fix them all. */
706 if (omode == word_mode)
708 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
709 is the culprit here, and not the backends. */
710 else if (osize >= UNITS_PER_WORD && isize >= osize)
712 /* Allow component subregs of complex and vector. Though given the below
713 extraction rules, it's not always clear what that means. */
714 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
715 && GET_MODE_INNER (imode) == omode)
717 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
718 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
719 represent this. It's questionable if this ought to be represented at
720 all -- why can't this all be hidden in post-reload splitters that make
721 arbitrarily mode changes to the registers themselves. */
722 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
724 /* Subregs involving floating point modes are not allowed to
725 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
726 (subreg:SI (reg:DF) 0) isn't. */
727 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
729 if (isize != osize)
730 return false;
733 /* Paradoxical subregs must have offset zero. */
734 if (osize > isize)
735 return offset == 0;
737 /* This is a normal subreg. Verify that the offset is representable. */
739 /* For hard registers, we already have most of these rules collected in
740 subreg_offset_representable_p. */
741 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
743 unsigned int regno = REGNO (reg);
745 #ifdef CANNOT_CHANGE_MODE_CLASS
746 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
747 && GET_MODE_INNER (imode) == omode)
749 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
750 return false;
751 #endif
753 return subreg_offset_representable_p (regno, imode, offset, omode);
756 /* For pseudo registers, we want most of the same checks. Namely:
757 If the register no larger than a word, the subreg must be lowpart.
758 If the register is larger than a word, the subreg must be the lowpart
759 of a subword. A subreg does *not* perform arbitrary bit extraction.
760 Given that we've already checked mode/offset alignment, we only have
761 to check subword subregs here. */
762 if (osize < UNITS_PER_WORD)
764 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
765 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
766 if (offset % UNITS_PER_WORD != low_off)
767 return false;
769 return true;
773 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
775 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
776 return gen_rtx_raw_SUBREG (mode, reg, offset);
779 /* Generate a SUBREG representing the least-significant part of REG if MODE
780 is smaller than mode of REG, otherwise paradoxical SUBREG. */
783 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
785 enum machine_mode inmode;
787 inmode = GET_MODE (reg);
788 if (inmode == VOIDmode)
789 inmode = mode;
790 return gen_rtx_SUBREG (mode, reg,
791 subreg_lowpart_offset (mode, inmode));
795 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
797 rtvec
798 gen_rtvec (int n, ...)
800 int i;
801 rtvec rt_val;
802 va_list p;
804 va_start (p, n);
806 /* Don't allocate an empty rtvec... */
807 if (n == 0)
809 va_end (p);
810 return NULL_RTVEC;
813 rt_val = rtvec_alloc (n);
815 for (i = 0; i < n; i++)
816 rt_val->elem[i] = va_arg (p, rtx);
818 va_end (p);
819 return rt_val;
822 rtvec
823 gen_rtvec_v (int n, rtx *argp)
825 int i;
826 rtvec rt_val;
828 /* Don't allocate an empty rtvec... */
829 if (n == 0)
830 return NULL_RTVEC;
832 rt_val = rtvec_alloc (n);
834 for (i = 0; i < n; i++)
835 rt_val->elem[i] = *argp++;
837 return rt_val;
840 /* Return the number of bytes between the start of an OUTER_MODE
841 in-memory value and the start of an INNER_MODE in-memory value,
842 given that the former is a lowpart of the latter. It may be a
843 paradoxical lowpart, in which case the offset will be negative
844 on big-endian targets. */
847 byte_lowpart_offset (enum machine_mode outer_mode,
848 enum machine_mode inner_mode)
850 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
851 return subreg_lowpart_offset (outer_mode, inner_mode);
852 else
853 return -subreg_lowpart_offset (inner_mode, outer_mode);
856 /* Generate a REG rtx for a new pseudo register of mode MODE.
857 This pseudo is assigned the next sequential register number. */
860 gen_reg_rtx (enum machine_mode mode)
862 rtx val;
863 unsigned int align = GET_MODE_ALIGNMENT (mode);
865 gcc_assert (can_create_pseudo_p ());
867 /* If a virtual register with bigger mode alignment is generated,
868 increase stack alignment estimation because it might be spilled
869 to stack later. */
870 if (SUPPORTS_STACK_ALIGNMENT
871 && crtl->stack_alignment_estimated < align
872 && !crtl->stack_realign_processed)
874 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
875 if (crtl->stack_alignment_estimated < min_align)
876 crtl->stack_alignment_estimated = min_align;
879 if (generating_concat_p
880 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
881 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
883 /* For complex modes, don't make a single pseudo.
884 Instead, make a CONCAT of two pseudos.
885 This allows noncontiguous allocation of the real and imaginary parts,
886 which makes much better code. Besides, allocating DCmode
887 pseudos overstrains reload on some machines like the 386. */
888 rtx realpart, imagpart;
889 enum machine_mode partmode = GET_MODE_INNER (mode);
891 realpart = gen_reg_rtx (partmode);
892 imagpart = gen_reg_rtx (partmode);
893 return gen_rtx_CONCAT (mode, realpart, imagpart);
896 /* Make sure regno_pointer_align, and regno_reg_rtx are large
897 enough to have an element for this pseudo reg number. */
899 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
901 int old_size = crtl->emit.regno_pointer_align_length;
902 char *tmp;
903 rtx *new1;
905 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
906 memset (tmp + old_size, 0, old_size);
907 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
909 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
910 memset (new1 + old_size, 0, old_size * sizeof (rtx));
911 regno_reg_rtx = new1;
913 crtl->emit.regno_pointer_align_length = old_size * 2;
916 val = gen_raw_REG (mode, reg_rtx_no);
917 regno_reg_rtx[reg_rtx_no++] = val;
918 return val;
921 /* Update NEW with the same attributes as REG, but with OFFSET added
922 to the REG_OFFSET. */
924 static void
925 update_reg_offset (rtx new_rtx, rtx reg, int offset)
927 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
928 REG_OFFSET (reg) + offset);
931 /* Generate a register with same attributes as REG, but with OFFSET
932 added to the REG_OFFSET. */
935 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
936 int offset)
938 rtx new_rtx = gen_rtx_REG (mode, regno);
940 update_reg_offset (new_rtx, reg, offset);
941 return new_rtx;
944 /* Generate a new pseudo-register with the same attributes as REG, but
945 with OFFSET added to the REG_OFFSET. */
948 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
950 rtx new_rtx = gen_reg_rtx (mode);
952 update_reg_offset (new_rtx, reg, offset);
953 return new_rtx;
956 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
957 new register is a (possibly paradoxical) lowpart of the old one. */
959 void
960 adjust_reg_mode (rtx reg, enum machine_mode mode)
962 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
963 PUT_MODE (reg, mode);
966 /* Copy REG's attributes from X, if X has any attributes. If REG and X
967 have different modes, REG is a (possibly paradoxical) lowpart of X. */
969 void
970 set_reg_attrs_from_value (rtx reg, rtx x)
972 int offset;
974 /* Hard registers can be reused for multiple purposes within the same
975 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
976 on them is wrong. */
977 if (HARD_REGISTER_P (reg))
978 return;
980 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
981 if (MEM_P (x))
983 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
984 REG_ATTRS (reg)
985 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
986 if (MEM_POINTER (x))
987 mark_reg_pointer (reg, 0);
989 else if (REG_P (x))
991 if (REG_ATTRS (x))
992 update_reg_offset (reg, x, offset);
993 if (REG_POINTER (x))
994 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
998 /* Generate a REG rtx for a new pseudo register, copying the mode
999 and attributes from X. */
1002 gen_reg_rtx_and_attrs (rtx x)
1004 rtx reg = gen_reg_rtx (GET_MODE (x));
1005 set_reg_attrs_from_value (reg, x);
1006 return reg;
1009 /* Set the register attributes for registers contained in PARM_RTX.
1010 Use needed values from memory attributes of MEM. */
1012 void
1013 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1015 if (REG_P (parm_rtx))
1016 set_reg_attrs_from_value (parm_rtx, mem);
1017 else if (GET_CODE (parm_rtx) == PARALLEL)
1019 /* Check for a NULL entry in the first slot, used to indicate that the
1020 parameter goes both on the stack and in registers. */
1021 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1022 for (; i < XVECLEN (parm_rtx, 0); i++)
1024 rtx x = XVECEXP (parm_rtx, 0, i);
1025 if (REG_P (XEXP (x, 0)))
1026 REG_ATTRS (XEXP (x, 0))
1027 = get_reg_attrs (MEM_EXPR (mem),
1028 INTVAL (XEXP (x, 1)));
1033 /* Set the REG_ATTRS for registers in value X, given that X represents
1034 decl T. */
1036 void
1037 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1039 if (GET_CODE (x) == SUBREG)
1041 gcc_assert (subreg_lowpart_p (x));
1042 x = SUBREG_REG (x);
1044 if (REG_P (x))
1045 REG_ATTRS (x)
1046 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1047 DECL_MODE (t)));
1048 if (GET_CODE (x) == CONCAT)
1050 if (REG_P (XEXP (x, 0)))
1051 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1052 if (REG_P (XEXP (x, 1)))
1053 REG_ATTRS (XEXP (x, 1))
1054 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1056 if (GET_CODE (x) == PARALLEL)
1058 int i, start;
1060 /* Check for a NULL entry, used to indicate that the parameter goes
1061 both on the stack and in registers. */
1062 if (XEXP (XVECEXP (x, 0, 0), 0))
1063 start = 0;
1064 else
1065 start = 1;
1067 for (i = start; i < XVECLEN (x, 0); i++)
1069 rtx y = XVECEXP (x, 0, i);
1070 if (REG_P (XEXP (y, 0)))
1071 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1076 /* Assign the RTX X to declaration T. */
1078 void
1079 set_decl_rtl (tree t, rtx x)
1081 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1082 if (x)
1083 set_reg_attrs_for_decl_rtl (t, x);
1086 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1087 if the ABI requires the parameter to be passed by reference. */
1089 void
1090 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1092 DECL_INCOMING_RTL (t) = x;
1093 if (x && !by_reference_p)
1094 set_reg_attrs_for_decl_rtl (t, x);
1097 /* Identify REG (which may be a CONCAT) as a user register. */
1099 void
1100 mark_user_reg (rtx reg)
1102 if (GET_CODE (reg) == CONCAT)
1104 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1105 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1107 else
1109 gcc_assert (REG_P (reg));
1110 REG_USERVAR_P (reg) = 1;
1114 /* Identify REG as a probable pointer register and show its alignment
1115 as ALIGN, if nonzero. */
1117 void
1118 mark_reg_pointer (rtx reg, int align)
1120 if (! REG_POINTER (reg))
1122 REG_POINTER (reg) = 1;
1124 if (align)
1125 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1127 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1128 /* We can no-longer be sure just how aligned this pointer is. */
1129 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1132 /* Return 1 plus largest pseudo reg number used in the current function. */
1135 max_reg_num (void)
1137 return reg_rtx_no;
1140 /* Return 1 + the largest label number used so far in the current function. */
1143 max_label_num (void)
1145 return label_num;
1148 /* Return first label number used in this function (if any were used). */
1151 get_first_label_num (void)
1153 return first_label_num;
1156 /* If the rtx for label was created during the expansion of a nested
1157 function, then first_label_num won't include this label number.
1158 Fix this now so that array indices work later. */
1160 void
1161 maybe_set_first_label_num (rtx x)
1163 if (CODE_LABEL_NUMBER (x) < first_label_num)
1164 first_label_num = CODE_LABEL_NUMBER (x);
1167 /* Return a value representing some low-order bits of X, where the number
1168 of low-order bits is given by MODE. Note that no conversion is done
1169 between floating-point and fixed-point values, rather, the bit
1170 representation is returned.
1172 This function handles the cases in common between gen_lowpart, below,
1173 and two variants in cse.c and combine.c. These are the cases that can
1174 be safely handled at all points in the compilation.
1176 If this is not a case we can handle, return 0. */
1179 gen_lowpart_common (enum machine_mode mode, rtx x)
1181 int msize = GET_MODE_SIZE (mode);
1182 int xsize;
1183 int offset = 0;
1184 enum machine_mode innermode;
1186 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1187 so we have to make one up. Yuk. */
1188 innermode = GET_MODE (x);
1189 if (CONST_INT_P (x)
1190 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1191 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1192 else if (innermode == VOIDmode)
1193 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1195 xsize = GET_MODE_SIZE (innermode);
1197 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1199 if (innermode == mode)
1200 return x;
1202 /* MODE must occupy no more words than the mode of X. */
1203 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1204 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1205 return 0;
1207 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1208 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1209 return 0;
1211 offset = subreg_lowpart_offset (mode, innermode);
1213 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1214 && (GET_MODE_CLASS (mode) == MODE_INT
1215 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1217 /* If we are getting the low-order part of something that has been
1218 sign- or zero-extended, we can either just use the object being
1219 extended or make a narrower extension. If we want an even smaller
1220 piece than the size of the object being extended, call ourselves
1221 recursively.
1223 This case is used mostly by combine and cse. */
1225 if (GET_MODE (XEXP (x, 0)) == mode)
1226 return XEXP (x, 0);
1227 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1228 return gen_lowpart_common (mode, XEXP (x, 0));
1229 else if (msize < xsize)
1230 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1232 else if (GET_CODE (x) == SUBREG || REG_P (x)
1233 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1234 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1235 return simplify_gen_subreg (mode, x, innermode, offset);
1237 /* Otherwise, we can't do this. */
1238 return 0;
1242 gen_highpart (enum machine_mode mode, rtx x)
1244 unsigned int msize = GET_MODE_SIZE (mode);
1245 rtx result;
1247 /* This case loses if X is a subreg. To catch bugs early,
1248 complain if an invalid MODE is used even in other cases. */
1249 gcc_assert (msize <= UNITS_PER_WORD
1250 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1252 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1253 subreg_highpart_offset (mode, GET_MODE (x)));
1254 gcc_assert (result);
1256 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1257 the target if we have a MEM. gen_highpart must return a valid operand,
1258 emitting code if necessary to do so. */
1259 if (MEM_P (result))
1261 result = validize_mem (result);
1262 gcc_assert (result);
1265 return result;
1268 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1269 be VOIDmode constant. */
1271 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1273 if (GET_MODE (exp) != VOIDmode)
1275 gcc_assert (GET_MODE (exp) == innermode);
1276 return gen_highpart (outermode, exp);
1278 return simplify_gen_subreg (outermode, exp, innermode,
1279 subreg_highpart_offset (outermode, innermode));
1282 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1284 unsigned int
1285 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1287 unsigned int offset = 0;
1288 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1290 if (difference > 0)
1292 if (WORDS_BIG_ENDIAN)
1293 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1294 if (BYTES_BIG_ENDIAN)
1295 offset += difference % UNITS_PER_WORD;
1298 return offset;
1301 /* Return offset in bytes to get OUTERMODE high part
1302 of the value in mode INNERMODE stored in memory in target format. */
1303 unsigned int
1304 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1306 unsigned int offset = 0;
1307 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1309 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1311 if (difference > 0)
1313 if (! WORDS_BIG_ENDIAN)
1314 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1315 if (! BYTES_BIG_ENDIAN)
1316 offset += difference % UNITS_PER_WORD;
1319 return offset;
1322 /* Return 1 iff X, assumed to be a SUBREG,
1323 refers to the least significant part of its containing reg.
1324 If X is not a SUBREG, always return 1 (it is its own low part!). */
1327 subreg_lowpart_p (const_rtx x)
1329 if (GET_CODE (x) != SUBREG)
1330 return 1;
1331 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1332 return 0;
1334 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1335 == SUBREG_BYTE (x));
1338 /* Return subword OFFSET of operand OP.
1339 The word number, OFFSET, is interpreted as the word number starting
1340 at the low-order address. OFFSET 0 is the low-order word if not
1341 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1343 If we cannot extract the required word, we return zero. Otherwise,
1344 an rtx corresponding to the requested word will be returned.
1346 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1347 reload has completed, a valid address will always be returned. After
1348 reload, if a valid address cannot be returned, we return zero.
1350 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1351 it is the responsibility of the caller.
1353 MODE is the mode of OP in case it is a CONST_INT.
1355 ??? This is still rather broken for some cases. The problem for the
1356 moment is that all callers of this thing provide no 'goal mode' to
1357 tell us to work with. This exists because all callers were written
1358 in a word based SUBREG world.
1359 Now use of this function can be deprecated by simplify_subreg in most
1360 cases.
1364 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1366 if (mode == VOIDmode)
1367 mode = GET_MODE (op);
1369 gcc_assert (mode != VOIDmode);
1371 /* If OP is narrower than a word, fail. */
1372 if (mode != BLKmode
1373 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1374 return 0;
1376 /* If we want a word outside OP, return zero. */
1377 if (mode != BLKmode
1378 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1379 return const0_rtx;
1381 /* Form a new MEM at the requested address. */
1382 if (MEM_P (op))
1384 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1386 if (! validate_address)
1387 return new_rtx;
1389 else if (reload_completed)
1391 if (! strict_memory_address_addr_space_p (word_mode,
1392 XEXP (new_rtx, 0),
1393 MEM_ADDR_SPACE (op)))
1394 return 0;
1396 else
1397 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1400 /* Rest can be handled by simplify_subreg. */
1401 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1404 /* Similar to `operand_subword', but never return 0. If we can't
1405 extract the required subword, put OP into a register and try again.
1406 The second attempt must succeed. We always validate the address in
1407 this case.
1409 MODE is the mode of OP, in case it is CONST_INT. */
1412 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1414 rtx result = operand_subword (op, offset, 1, mode);
1416 if (result)
1417 return result;
1419 if (mode != BLKmode && mode != VOIDmode)
1421 /* If this is a register which can not be accessed by words, copy it
1422 to a pseudo register. */
1423 if (REG_P (op))
1424 op = copy_to_reg (op);
1425 else
1426 op = force_reg (mode, op);
1429 result = operand_subword (op, offset, 1, mode);
1430 gcc_assert (result);
1432 return result;
1435 /* Returns 1 if both MEM_EXPR can be considered equal
1436 and 0 otherwise. */
1439 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1441 if (expr1 == expr2)
1442 return 1;
1444 if (! expr1 || ! expr2)
1445 return 0;
1447 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1448 return 0;
1450 return operand_equal_p (expr1, expr2, 0);
1453 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1454 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1455 -1 if not known. */
1458 get_mem_align_offset (rtx mem, unsigned int align)
1460 tree expr;
1461 unsigned HOST_WIDE_INT offset;
1463 /* This function can't use
1464 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1465 || !CONST_INT_P (MEM_OFFSET (mem))
1466 || (MAX (MEM_ALIGN (mem),
1467 get_object_alignment (MEM_EXPR (mem), align))
1468 < align))
1469 return -1;
1470 else
1471 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1472 for two reasons:
1473 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1474 for <variable>. get_inner_reference doesn't handle it and
1475 even if it did, the alignment in that case needs to be determined
1476 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1477 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1478 isn't sufficiently aligned, the object it is in might be. */
1479 gcc_assert (MEM_P (mem));
1480 expr = MEM_EXPR (mem);
1481 if (expr == NULL_TREE
1482 || MEM_OFFSET (mem) == NULL_RTX
1483 || !CONST_INT_P (MEM_OFFSET (mem)))
1484 return -1;
1486 offset = INTVAL (MEM_OFFSET (mem));
1487 if (DECL_P (expr))
1489 if (DECL_ALIGN (expr) < align)
1490 return -1;
1492 else if (INDIRECT_REF_P (expr))
1494 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1495 return -1;
1497 else if (TREE_CODE (expr) == COMPONENT_REF)
1499 while (1)
1501 tree inner = TREE_OPERAND (expr, 0);
1502 tree field = TREE_OPERAND (expr, 1);
1503 tree byte_offset = component_ref_field_offset (expr);
1504 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1506 if (!byte_offset
1507 || !host_integerp (byte_offset, 1)
1508 || !host_integerp (bit_offset, 1))
1509 return -1;
1511 offset += tree_low_cst (byte_offset, 1);
1512 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1514 if (inner == NULL_TREE)
1516 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1517 < (unsigned int) align)
1518 return -1;
1519 break;
1521 else if (DECL_P (inner))
1523 if (DECL_ALIGN (inner) < align)
1524 return -1;
1525 break;
1527 else if (TREE_CODE (inner) != COMPONENT_REF)
1528 return -1;
1529 expr = inner;
1532 else
1533 return -1;
1535 return offset & ((align / BITS_PER_UNIT) - 1);
1538 /* Given REF (a MEM) and T, either the type of X or the expression
1539 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1540 if we are making a new object of this type. BITPOS is nonzero if
1541 there is an offset outstanding on T that will be applied later. */
1543 void
1544 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1545 HOST_WIDE_INT bitpos)
1547 alias_set_type alias;
1548 tree expr = NULL;
1549 rtx offset = NULL_RTX;
1550 rtx size = NULL_RTX;
1551 unsigned int align = BITS_PER_UNIT;
1552 HOST_WIDE_INT apply_bitpos = 0;
1553 tree type;
1555 /* It can happen that type_for_mode was given a mode for which there
1556 is no language-level type. In which case it returns NULL, which
1557 we can see here. */
1558 if (t == NULL_TREE)
1559 return;
1561 type = TYPE_P (t) ? t : TREE_TYPE (t);
1562 if (type == error_mark_node)
1563 return;
1565 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1566 wrong answer, as it assumes that DECL_RTL already has the right alias
1567 info. Callers should not set DECL_RTL until after the call to
1568 set_mem_attributes. */
1569 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1571 /* Get the alias set from the expression or type (perhaps using a
1572 front-end routine) and use it. */
1573 alias = get_alias_set (t);
1575 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1576 MEM_IN_STRUCT_P (ref)
1577 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1578 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1580 /* If we are making an object of this type, or if this is a DECL, we know
1581 that it is a scalar if the type is not an aggregate. */
1582 if ((objectp || DECL_P (t))
1583 && ! AGGREGATE_TYPE_P (type)
1584 && TREE_CODE (type) != COMPLEX_TYPE)
1585 MEM_SCALAR_P (ref) = 1;
1587 /* Default values from pre-existing memory attributes if present. */
1588 if (MEM_ATTRS (ref))
1590 /* ??? Can this ever happen? Calling this routine on a MEM that
1591 already carries memory attributes should probably be invalid. */
1592 expr = MEM_EXPR (ref);
1593 offset = MEM_OFFSET (ref);
1594 size = MEM_SIZE (ref);
1595 align = MEM_ALIGN (ref);
1598 /* Otherwise, default values from the mode of the MEM reference. */
1599 else if (GET_MODE (ref) != BLKmode)
1601 /* Respect mode size. */
1602 size = GEN_INT (GET_MODE_SIZE (GET_MODE (ref)));
1603 /* ??? Is this really necessary? We probably should always get
1604 the size from the type below. */
1606 /* Respect mode alignment for STRICT_ALIGNMENT targets if T is a type;
1607 if T is an object, always compute the object alignment below. */
1608 if (STRICT_ALIGNMENT && TYPE_P (t))
1609 align = GET_MODE_ALIGNMENT (GET_MODE (ref));
1610 /* ??? If T is a type, respecting mode alignment may *also* be wrong
1611 e.g. if the type carries an alignment attribute. Should we be
1612 able to simply always use TYPE_ALIGN? */
1615 /* We can set the alignment from the type if we are making an object,
1616 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1617 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1618 align = MAX (align, TYPE_ALIGN (type));
1620 else if (TREE_CODE (t) == MEM_REF)
1622 tree op0 = TREE_OPERAND (t, 0);
1623 if (TREE_CODE (op0) == ADDR_EXPR
1624 && (DECL_P (TREE_OPERAND (op0, 0))
1625 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1627 if (DECL_P (TREE_OPERAND (op0, 0)))
1628 align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1629 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1631 align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1632 #ifdef CONSTANT_ALIGNMENT
1633 align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0), align);
1634 #endif
1636 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1638 unsigned HOST_WIDE_INT ioff
1639 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1640 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1641 align = MIN (aoff, align);
1644 else
1645 /* ??? This isn't fully correct, we can't set the alignment from the
1646 type in all cases. */
1647 align = MAX (align, TYPE_ALIGN (type));
1650 else if (TREE_CODE (t) == TARGET_MEM_REF)
1651 /* ??? This isn't fully correct, we can't set the alignment from the
1652 type in all cases. */
1653 align = MAX (align, TYPE_ALIGN (type));
1655 /* If the size is known, we can set that. */
1656 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1657 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1659 /* If T is not a type, we may be able to deduce some more information about
1660 the expression. */
1661 if (! TYPE_P (t))
1663 tree base;
1664 bool align_computed = false;
1666 if (TREE_THIS_VOLATILE (t))
1667 MEM_VOLATILE_P (ref) = 1;
1669 /* Now remove any conversions: they don't change what the underlying
1670 object is. Likewise for SAVE_EXPR. */
1671 while (CONVERT_EXPR_P (t)
1672 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1673 || TREE_CODE (t) == SAVE_EXPR)
1674 t = TREE_OPERAND (t, 0);
1676 /* Note whether this expression can trap. */
1677 MEM_NOTRAP_P (ref) = !tree_could_trap_p (t);
1679 base = get_base_address (t);
1680 if (base && DECL_P (base)
1681 && TREE_READONLY (base)
1682 && (TREE_STATIC (base) || DECL_EXTERNAL (base))
1683 && !TREE_THIS_VOLATILE (base))
1684 MEM_READONLY_P (ref) = 1;
1686 /* If this expression uses it's parent's alias set, mark it such
1687 that we won't change it. */
1688 if (component_uses_parent_alias_set (t))
1689 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1691 /* If this is a decl, set the attributes of the MEM from it. */
1692 if (DECL_P (t))
1694 expr = t;
1695 offset = const0_rtx;
1696 apply_bitpos = bitpos;
1697 size = (DECL_SIZE_UNIT (t)
1698 && host_integerp (DECL_SIZE_UNIT (t), 1)
1699 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1700 align = DECL_ALIGN (t);
1701 align_computed = true;
1704 /* If this is a constant, we know the alignment. */
1705 else if (CONSTANT_CLASS_P (t))
1707 align = TYPE_ALIGN (type);
1708 #ifdef CONSTANT_ALIGNMENT
1709 align = CONSTANT_ALIGNMENT (t, align);
1710 #endif
1711 align_computed = true;
1714 /* If this is a field reference and not a bit-field, record it. */
1715 /* ??? There is some information that can be gleaned from bit-fields,
1716 such as the word offset in the structure that might be modified.
1717 But skip it for now. */
1718 else if (TREE_CODE (t) == COMPONENT_REF
1719 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1721 expr = t;
1722 offset = const0_rtx;
1723 apply_bitpos = bitpos;
1724 /* ??? Any reason the field size would be different than
1725 the size we got from the type? */
1728 /* If this is an array reference, look for an outer field reference. */
1729 else if (TREE_CODE (t) == ARRAY_REF)
1731 tree off_tree = size_zero_node;
1732 /* We can't modify t, because we use it at the end of the
1733 function. */
1734 tree t2 = t;
1738 tree index = TREE_OPERAND (t2, 1);
1739 tree low_bound = array_ref_low_bound (t2);
1740 tree unit_size = array_ref_element_size (t2);
1742 /* We assume all arrays have sizes that are a multiple of a byte.
1743 First subtract the lower bound, if any, in the type of the
1744 index, then convert to sizetype and multiply by the size of
1745 the array element. */
1746 if (! integer_zerop (low_bound))
1747 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1748 index, low_bound);
1750 off_tree = size_binop (PLUS_EXPR,
1751 size_binop (MULT_EXPR,
1752 fold_convert (sizetype,
1753 index),
1754 unit_size),
1755 off_tree);
1756 t2 = TREE_OPERAND (t2, 0);
1758 while (TREE_CODE (t2) == ARRAY_REF);
1760 if (DECL_P (t2))
1762 expr = t2;
1763 offset = NULL;
1764 if (host_integerp (off_tree, 1))
1766 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1767 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1768 align = DECL_ALIGN (t2);
1769 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1770 align = aoff;
1771 align_computed = true;
1772 offset = GEN_INT (ioff);
1773 apply_bitpos = bitpos;
1776 else if (TREE_CODE (t2) == COMPONENT_REF)
1778 expr = t2;
1779 offset = NULL;
1780 if (host_integerp (off_tree, 1))
1782 offset = GEN_INT (tree_low_cst (off_tree, 1));
1783 apply_bitpos = bitpos;
1785 /* ??? Any reason the field size would be different than
1786 the size we got from the type? */
1789 /* If this is an indirect reference, record it. */
1790 else if (TREE_CODE (t) == MEM_REF)
1792 expr = t;
1793 offset = const0_rtx;
1794 apply_bitpos = bitpos;
1798 /* If this is an indirect reference, record it. */
1799 else if (TREE_CODE (t) == MEM_REF
1800 || TREE_CODE (t) == TARGET_MEM_REF)
1802 expr = t;
1803 offset = const0_rtx;
1804 apply_bitpos = bitpos;
1807 if (!align_computed && !INDIRECT_REF_P (t))
1809 unsigned int obj_align = get_object_alignment (t, BIGGEST_ALIGNMENT);
1810 align = MAX (align, obj_align);
1814 /* If we modified OFFSET based on T, then subtract the outstanding
1815 bit position offset. Similarly, increase the size of the accessed
1816 object to contain the negative offset. */
1817 if (apply_bitpos)
1819 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1820 if (size)
1821 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1824 /* Now set the attributes we computed above. */
1825 MEM_ATTRS (ref)
1826 = get_mem_attrs (alias, expr, offset, size, align,
1827 TYPE_ADDR_SPACE (type), GET_MODE (ref));
1829 /* If this is already known to be a scalar or aggregate, we are done. */
1830 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1831 return;
1833 /* If it is a reference into an aggregate, this is part of an aggregate.
1834 Otherwise we don't know. */
1835 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1836 || TREE_CODE (t) == ARRAY_RANGE_REF
1837 || TREE_CODE (t) == BIT_FIELD_REF)
1838 MEM_IN_STRUCT_P (ref) = 1;
1841 void
1842 set_mem_attributes (rtx ref, tree t, int objectp)
1844 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1847 /* Set the alias set of MEM to SET. */
1849 void
1850 set_mem_alias_set (rtx mem, alias_set_type set)
1852 /* If the new and old alias sets don't conflict, something is wrong. */
1853 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1855 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1856 MEM_SIZE (mem), MEM_ALIGN (mem),
1857 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1860 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1862 void
1863 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1865 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1866 MEM_OFFSET (mem), MEM_SIZE (mem),
1867 MEM_ALIGN (mem), addrspace, GET_MODE (mem));
1870 /* Set the alignment of MEM to ALIGN bits. */
1872 void
1873 set_mem_align (rtx mem, unsigned int align)
1875 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1876 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1877 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1880 /* Set the expr for MEM to EXPR. */
1882 void
1883 set_mem_expr (rtx mem, tree expr)
1885 MEM_ATTRS (mem)
1886 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1887 MEM_SIZE (mem), MEM_ALIGN (mem),
1888 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1891 /* Set the offset of MEM to OFFSET. */
1893 void
1894 set_mem_offset (rtx mem, rtx offset)
1896 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1897 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1898 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1901 /* Set the size of MEM to SIZE. */
1903 void
1904 set_mem_size (rtx mem, rtx size)
1906 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1907 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1908 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1911 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1912 and its address changed to ADDR. (VOIDmode means don't change the mode.
1913 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1914 returned memory location is required to be valid. The memory
1915 attributes are not changed. */
1917 static rtx
1918 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1920 addr_space_t as;
1921 rtx new_rtx;
1923 gcc_assert (MEM_P (memref));
1924 as = MEM_ADDR_SPACE (memref);
1925 if (mode == VOIDmode)
1926 mode = GET_MODE (memref);
1927 if (addr == 0)
1928 addr = XEXP (memref, 0);
1929 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1930 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1931 return memref;
1933 if (validate)
1935 if (reload_in_progress || reload_completed)
1936 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1937 else
1938 addr = memory_address_addr_space (mode, addr, as);
1941 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1942 return memref;
1944 new_rtx = gen_rtx_MEM (mode, addr);
1945 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1946 return new_rtx;
1949 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1950 way we are changing MEMREF, so we only preserve the alias set. */
1953 change_address (rtx memref, enum machine_mode mode, rtx addr)
1955 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1956 enum machine_mode mmode = GET_MODE (new_rtx);
1957 unsigned int align;
1959 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1960 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1962 /* If there are no changes, just return the original memory reference. */
1963 if (new_rtx == memref)
1965 if (MEM_ATTRS (memref) == 0
1966 || (MEM_EXPR (memref) == NULL
1967 && MEM_OFFSET (memref) == NULL
1968 && MEM_SIZE (memref) == size
1969 && MEM_ALIGN (memref) == align))
1970 return new_rtx;
1972 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1973 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1976 MEM_ATTRS (new_rtx)
1977 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
1978 MEM_ADDR_SPACE (memref), mmode);
1980 return new_rtx;
1983 /* Return a memory reference like MEMREF, but with its mode changed
1984 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1985 nonzero, the memory address is forced to be valid.
1986 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1987 and caller is responsible for adjusting MEMREF base register. */
1990 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1991 int validate, int adjust)
1993 rtx addr = XEXP (memref, 0);
1994 rtx new_rtx;
1995 rtx memoffset = MEM_OFFSET (memref);
1996 rtx size = 0;
1997 unsigned int memalign = MEM_ALIGN (memref);
1998 addr_space_t as = MEM_ADDR_SPACE (memref);
1999 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2000 int pbits;
2002 /* If there are no changes, just return the original memory reference. */
2003 if (mode == GET_MODE (memref) && !offset
2004 && (!validate || memory_address_addr_space_p (mode, addr, as)))
2005 return memref;
2007 /* ??? Prefer to create garbage instead of creating shared rtl.
2008 This may happen even if offset is nonzero -- consider
2009 (plus (plus reg reg) const_int) -- so do this always. */
2010 addr = copy_rtx (addr);
2012 /* Convert a possibly large offset to a signed value within the
2013 range of the target address space. */
2014 pbits = GET_MODE_BITSIZE (address_mode);
2015 if (HOST_BITS_PER_WIDE_INT > pbits)
2017 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2018 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2019 >> shift);
2022 if (adjust)
2024 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2025 object, we can merge it into the LO_SUM. */
2026 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2027 && offset >= 0
2028 && (unsigned HOST_WIDE_INT) offset
2029 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2030 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2031 plus_constant (XEXP (addr, 1), offset));
2032 else
2033 addr = plus_constant (addr, offset);
2036 new_rtx = change_address_1 (memref, mode, addr, validate);
2038 /* If the address is a REG, change_address_1 rightfully returns memref,
2039 but this would destroy memref's MEM_ATTRS. */
2040 if (new_rtx == memref && offset != 0)
2041 new_rtx = copy_rtx (new_rtx);
2043 /* Compute the new values of the memory attributes due to this adjustment.
2044 We add the offsets and update the alignment. */
2045 if (memoffset)
2046 memoffset = GEN_INT (offset + INTVAL (memoffset));
2048 /* Compute the new alignment by taking the MIN of the alignment and the
2049 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2050 if zero. */
2051 if (offset != 0)
2052 memalign
2053 = MIN (memalign,
2054 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2056 /* We can compute the size in a number of ways. */
2057 if (GET_MODE (new_rtx) != BLKmode)
2058 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
2059 else if (MEM_SIZE (memref))
2060 size = plus_constant (MEM_SIZE (memref), -offset);
2062 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2063 memoffset, size, memalign, as,
2064 GET_MODE (new_rtx));
2066 /* At some point, we should validate that this offset is within the object,
2067 if all the appropriate values are known. */
2068 return new_rtx;
2071 /* Return a memory reference like MEMREF, but with its mode changed
2072 to MODE and its address changed to ADDR, which is assumed to be
2073 MEMREF offset by OFFSET bytes. If VALIDATE is
2074 nonzero, the memory address is forced to be valid. */
2077 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2078 HOST_WIDE_INT offset, int validate)
2080 memref = change_address_1 (memref, VOIDmode, addr, validate);
2081 return adjust_address_1 (memref, mode, offset, validate, 0);
2084 /* Return a memory reference like MEMREF, but whose address is changed by
2085 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2086 known to be in OFFSET (possibly 1). */
2089 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2091 rtx new_rtx, addr = XEXP (memref, 0);
2092 addr_space_t as = MEM_ADDR_SPACE (memref);
2093 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2095 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2097 /* At this point we don't know _why_ the address is invalid. It
2098 could have secondary memory references, multiplies or anything.
2100 However, if we did go and rearrange things, we can wind up not
2101 being able to recognize the magic around pic_offset_table_rtx.
2102 This stuff is fragile, and is yet another example of why it is
2103 bad to expose PIC machinery too early. */
2104 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
2105 && GET_CODE (addr) == PLUS
2106 && XEXP (addr, 0) == pic_offset_table_rtx)
2108 addr = force_reg (GET_MODE (addr), addr);
2109 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2112 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2113 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2115 /* If there are no changes, just return the original memory reference. */
2116 if (new_rtx == memref)
2117 return new_rtx;
2119 /* Update the alignment to reflect the offset. Reset the offset, which
2120 we don't know. */
2121 MEM_ATTRS (new_rtx)
2122 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2123 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2124 as, GET_MODE (new_rtx));
2125 return new_rtx;
2128 /* Return a memory reference like MEMREF, but with its address changed to
2129 ADDR. The caller is asserting that the actual piece of memory pointed
2130 to is the same, just the form of the address is being changed, such as
2131 by putting something into a register. */
2134 replace_equiv_address (rtx memref, rtx addr)
2136 /* change_address_1 copies the memory attribute structure without change
2137 and that's exactly what we want here. */
2138 update_temp_slot_address (XEXP (memref, 0), addr);
2139 return change_address_1 (memref, VOIDmode, addr, 1);
2142 /* Likewise, but the reference is not required to be valid. */
2145 replace_equiv_address_nv (rtx memref, rtx addr)
2147 return change_address_1 (memref, VOIDmode, addr, 0);
2150 /* Return a memory reference like MEMREF, but with its mode widened to
2151 MODE and offset by OFFSET. This would be used by targets that e.g.
2152 cannot issue QImode memory operations and have to use SImode memory
2153 operations plus masking logic. */
2156 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2158 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2159 tree expr = MEM_EXPR (new_rtx);
2160 rtx memoffset = MEM_OFFSET (new_rtx);
2161 unsigned int size = GET_MODE_SIZE (mode);
2163 /* If there are no changes, just return the original memory reference. */
2164 if (new_rtx == memref)
2165 return new_rtx;
2167 /* If we don't know what offset we were at within the expression, then
2168 we can't know if we've overstepped the bounds. */
2169 if (! memoffset)
2170 expr = NULL_TREE;
2172 while (expr)
2174 if (TREE_CODE (expr) == COMPONENT_REF)
2176 tree field = TREE_OPERAND (expr, 1);
2177 tree offset = component_ref_field_offset (expr);
2179 if (! DECL_SIZE_UNIT (field))
2181 expr = NULL_TREE;
2182 break;
2185 /* Is the field at least as large as the access? If so, ok,
2186 otherwise strip back to the containing structure. */
2187 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2188 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2189 && INTVAL (memoffset) >= 0)
2190 break;
2192 if (! host_integerp (offset, 1))
2194 expr = NULL_TREE;
2195 break;
2198 expr = TREE_OPERAND (expr, 0);
2199 memoffset
2200 = (GEN_INT (INTVAL (memoffset)
2201 + tree_low_cst (offset, 1)
2202 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2203 / BITS_PER_UNIT)));
2205 /* Similarly for the decl. */
2206 else if (DECL_P (expr)
2207 && DECL_SIZE_UNIT (expr)
2208 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2209 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2210 && (! memoffset || INTVAL (memoffset) >= 0))
2211 break;
2212 else
2214 /* The widened memory access overflows the expression, which means
2215 that it could alias another expression. Zap it. */
2216 expr = NULL_TREE;
2217 break;
2221 if (! expr)
2222 memoffset = NULL_RTX;
2224 /* The widened memory may alias other stuff, so zap the alias set. */
2225 /* ??? Maybe use get_alias_set on any remaining expression. */
2227 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2228 MEM_ALIGN (new_rtx),
2229 MEM_ADDR_SPACE (new_rtx), mode);
2231 return new_rtx;
2234 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2235 static GTY(()) tree spill_slot_decl;
2237 tree
2238 get_spill_slot_decl (bool force_build_p)
2240 tree d = spill_slot_decl;
2241 rtx rd;
2243 if (d || !force_build_p)
2244 return d;
2246 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2247 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2248 DECL_ARTIFICIAL (d) = 1;
2249 DECL_IGNORED_P (d) = 1;
2250 TREE_USED (d) = 1;
2251 spill_slot_decl = d;
2253 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2254 MEM_NOTRAP_P (rd) = 1;
2255 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
2256 NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
2257 SET_DECL_RTL (d, rd);
2259 return d;
2262 /* Given MEM, a result from assign_stack_local, fill in the memory
2263 attributes as appropriate for a register allocator spill slot.
2264 These slots are not aliasable by other memory. We arrange for
2265 them all to use a single MEM_EXPR, so that the aliasing code can
2266 work properly in the case of shared spill slots. */
2268 void
2269 set_mem_attrs_for_spill (rtx mem)
2271 alias_set_type alias;
2272 rtx addr, offset;
2273 tree expr;
2275 expr = get_spill_slot_decl (true);
2276 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2278 /* We expect the incoming memory to be of the form:
2279 (mem:MODE (plus (reg sfp) (const_int offset)))
2280 with perhaps the plus missing for offset = 0. */
2281 addr = XEXP (mem, 0);
2282 offset = const0_rtx;
2283 if (GET_CODE (addr) == PLUS
2284 && CONST_INT_P (XEXP (addr, 1)))
2285 offset = XEXP (addr, 1);
2287 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2288 MEM_SIZE (mem), MEM_ALIGN (mem),
2289 ADDR_SPACE_GENERIC, GET_MODE (mem));
2290 MEM_NOTRAP_P (mem) = 1;
2293 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2296 gen_label_rtx (void)
2298 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2299 NULL, label_num++, NULL);
2302 /* For procedure integration. */
2304 /* Install new pointers to the first and last insns in the chain.
2305 Also, set cur_insn_uid to one higher than the last in use.
2306 Used for an inline-procedure after copying the insn chain. */
2308 void
2309 set_new_first_and_last_insn (rtx first, rtx last)
2311 rtx insn;
2313 set_first_insn (first);
2314 set_last_insn (last);
2315 cur_insn_uid = 0;
2317 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2319 int debug_count = 0;
2321 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2322 cur_debug_insn_uid = 0;
2324 for (insn = first; insn; insn = NEXT_INSN (insn))
2325 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2326 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2327 else
2329 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2330 if (DEBUG_INSN_P (insn))
2331 debug_count++;
2334 if (debug_count)
2335 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2336 else
2337 cur_debug_insn_uid++;
2339 else
2340 for (insn = first; insn; insn = NEXT_INSN (insn))
2341 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2343 cur_insn_uid++;
2346 /* Go through all the RTL insn bodies and copy any invalid shared
2347 structure. This routine should only be called once. */
2349 static void
2350 unshare_all_rtl_1 (rtx insn)
2352 /* Unshare just about everything else. */
2353 unshare_all_rtl_in_chain (insn);
2355 /* Make sure the addresses of stack slots found outside the insn chain
2356 (such as, in DECL_RTL of a variable) are not shared
2357 with the insn chain.
2359 This special care is necessary when the stack slot MEM does not
2360 actually appear in the insn chain. If it does appear, its address
2361 is unshared from all else at that point. */
2362 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2365 /* Go through all the RTL insn bodies and copy any invalid shared
2366 structure, again. This is a fairly expensive thing to do so it
2367 should be done sparingly. */
2369 void
2370 unshare_all_rtl_again (rtx insn)
2372 rtx p;
2373 tree decl;
2375 for (p = insn; p; p = NEXT_INSN (p))
2376 if (INSN_P (p))
2378 reset_used_flags (PATTERN (p));
2379 reset_used_flags (REG_NOTES (p));
2382 /* Make sure that virtual stack slots are not shared. */
2383 set_used_decls (DECL_INITIAL (cfun->decl));
2385 /* Make sure that virtual parameters are not shared. */
2386 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2387 set_used_flags (DECL_RTL (decl));
2389 reset_used_flags (stack_slot_list);
2391 unshare_all_rtl_1 (insn);
2394 unsigned int
2395 unshare_all_rtl (void)
2397 unshare_all_rtl_1 (get_insns ());
2398 return 0;
2401 struct rtl_opt_pass pass_unshare_all_rtl =
2404 RTL_PASS,
2405 "unshare", /* name */
2406 NULL, /* gate */
2407 unshare_all_rtl, /* execute */
2408 NULL, /* sub */
2409 NULL, /* next */
2410 0, /* static_pass_number */
2411 TV_NONE, /* tv_id */
2412 0, /* properties_required */
2413 0, /* properties_provided */
2414 0, /* properties_destroyed */
2415 0, /* todo_flags_start */
2416 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2421 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2422 Recursively does the same for subexpressions. */
2424 static void
2425 verify_rtx_sharing (rtx orig, rtx insn)
2427 rtx x = orig;
2428 int i;
2429 enum rtx_code code;
2430 const char *format_ptr;
2432 if (x == 0)
2433 return;
2435 code = GET_CODE (x);
2437 /* These types may be freely shared. */
2439 switch (code)
2441 case REG:
2442 case DEBUG_EXPR:
2443 case VALUE:
2444 case CONST_INT:
2445 case CONST_DOUBLE:
2446 case CONST_FIXED:
2447 case CONST_VECTOR:
2448 case SYMBOL_REF:
2449 case LABEL_REF:
2450 case CODE_LABEL:
2451 case PC:
2452 case CC0:
2453 case RETURN:
2454 case SCRATCH:
2455 return;
2456 /* SCRATCH must be shared because they represent distinct values. */
2457 case CLOBBER:
2458 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2459 return;
2460 break;
2462 case CONST:
2463 if (shared_const_p (orig))
2464 return;
2465 break;
2467 case MEM:
2468 /* A MEM is allowed to be shared if its address is constant. */
2469 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2470 || reload_completed || reload_in_progress)
2471 return;
2473 break;
2475 default:
2476 break;
2479 /* This rtx may not be shared. If it has already been seen,
2480 replace it with a copy of itself. */
2481 #ifdef ENABLE_CHECKING
2482 if (RTX_FLAG (x, used))
2484 error ("invalid rtl sharing found in the insn");
2485 debug_rtx (insn);
2486 error ("shared rtx");
2487 debug_rtx (x);
2488 internal_error ("internal consistency failure");
2490 #endif
2491 gcc_assert (!RTX_FLAG (x, used));
2493 RTX_FLAG (x, used) = 1;
2495 /* Now scan the subexpressions recursively. */
2497 format_ptr = GET_RTX_FORMAT (code);
2499 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2501 switch (*format_ptr++)
2503 case 'e':
2504 verify_rtx_sharing (XEXP (x, i), insn);
2505 break;
2507 case 'E':
2508 if (XVEC (x, i) != NULL)
2510 int j;
2511 int len = XVECLEN (x, i);
2513 for (j = 0; j < len; j++)
2515 /* We allow sharing of ASM_OPERANDS inside single
2516 instruction. */
2517 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2518 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2519 == ASM_OPERANDS))
2520 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2521 else
2522 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2525 break;
2528 return;
2531 /* Go through all the RTL insn bodies and check that there is no unexpected
2532 sharing in between the subexpressions. */
2534 DEBUG_FUNCTION void
2535 verify_rtl_sharing (void)
2537 rtx p;
2539 timevar_push (TV_VERIFY_RTL_SHARING);
2541 for (p = get_insns (); p; p = NEXT_INSN (p))
2542 if (INSN_P (p))
2544 reset_used_flags (PATTERN (p));
2545 reset_used_flags (REG_NOTES (p));
2546 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2548 int i;
2549 rtx q, sequence = PATTERN (p);
2551 for (i = 0; i < XVECLEN (sequence, 0); i++)
2553 q = XVECEXP (sequence, 0, i);
2554 gcc_assert (INSN_P (q));
2555 reset_used_flags (PATTERN (q));
2556 reset_used_flags (REG_NOTES (q));
2561 for (p = get_insns (); p; p = NEXT_INSN (p))
2562 if (INSN_P (p))
2564 verify_rtx_sharing (PATTERN (p), p);
2565 verify_rtx_sharing (REG_NOTES (p), p);
2568 timevar_pop (TV_VERIFY_RTL_SHARING);
2571 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2572 Assumes the mark bits are cleared at entry. */
2574 void
2575 unshare_all_rtl_in_chain (rtx insn)
2577 for (; insn; insn = NEXT_INSN (insn))
2578 if (INSN_P (insn))
2580 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2581 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2585 /* Go through all virtual stack slots of a function and mark them as
2586 shared. We never replace the DECL_RTLs themselves with a copy,
2587 but expressions mentioned into a DECL_RTL cannot be shared with
2588 expressions in the instruction stream.
2590 Note that reload may convert pseudo registers into memories in-place.
2591 Pseudo registers are always shared, but MEMs never are. Thus if we
2592 reset the used flags on MEMs in the instruction stream, we must set
2593 them again on MEMs that appear in DECL_RTLs. */
2595 static void
2596 set_used_decls (tree blk)
2598 tree t;
2600 /* Mark decls. */
2601 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2602 if (DECL_RTL_SET_P (t))
2603 set_used_flags (DECL_RTL (t));
2605 /* Now process sub-blocks. */
2606 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2607 set_used_decls (t);
2610 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2611 Recursively does the same for subexpressions. Uses
2612 copy_rtx_if_shared_1 to reduce stack space. */
2615 copy_rtx_if_shared (rtx orig)
2617 copy_rtx_if_shared_1 (&orig);
2618 return orig;
2621 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2622 use. Recursively does the same for subexpressions. */
2624 static void
2625 copy_rtx_if_shared_1 (rtx *orig1)
2627 rtx x;
2628 int i;
2629 enum rtx_code code;
2630 rtx *last_ptr;
2631 const char *format_ptr;
2632 int copied = 0;
2633 int length;
2635 /* Repeat is used to turn tail-recursion into iteration. */
2636 repeat:
2637 x = *orig1;
2639 if (x == 0)
2640 return;
2642 code = GET_CODE (x);
2644 /* These types may be freely shared. */
2646 switch (code)
2648 case REG:
2649 case DEBUG_EXPR:
2650 case VALUE:
2651 case CONST_INT:
2652 case CONST_DOUBLE:
2653 case CONST_FIXED:
2654 case CONST_VECTOR:
2655 case SYMBOL_REF:
2656 case LABEL_REF:
2657 case CODE_LABEL:
2658 case PC:
2659 case CC0:
2660 case SCRATCH:
2661 /* SCRATCH must be shared because they represent distinct values. */
2662 return;
2663 case CLOBBER:
2664 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2665 return;
2666 break;
2668 case CONST:
2669 if (shared_const_p (x))
2670 return;
2671 break;
2673 case DEBUG_INSN:
2674 case INSN:
2675 case JUMP_INSN:
2676 case CALL_INSN:
2677 case NOTE:
2678 case BARRIER:
2679 /* The chain of insns is not being copied. */
2680 return;
2682 default:
2683 break;
2686 /* This rtx may not be shared. If it has already been seen,
2687 replace it with a copy of itself. */
2689 if (RTX_FLAG (x, used))
2691 x = shallow_copy_rtx (x);
2692 copied = 1;
2694 RTX_FLAG (x, used) = 1;
2696 /* Now scan the subexpressions recursively.
2697 We can store any replaced subexpressions directly into X
2698 since we know X is not shared! Any vectors in X
2699 must be copied if X was copied. */
2701 format_ptr = GET_RTX_FORMAT (code);
2702 length = GET_RTX_LENGTH (code);
2703 last_ptr = NULL;
2705 for (i = 0; i < length; i++)
2707 switch (*format_ptr++)
2709 case 'e':
2710 if (last_ptr)
2711 copy_rtx_if_shared_1 (last_ptr);
2712 last_ptr = &XEXP (x, i);
2713 break;
2715 case 'E':
2716 if (XVEC (x, i) != NULL)
2718 int j;
2719 int len = XVECLEN (x, i);
2721 /* Copy the vector iff I copied the rtx and the length
2722 is nonzero. */
2723 if (copied && len > 0)
2724 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2726 /* Call recursively on all inside the vector. */
2727 for (j = 0; j < len; j++)
2729 if (last_ptr)
2730 copy_rtx_if_shared_1 (last_ptr);
2731 last_ptr = &XVECEXP (x, i, j);
2734 break;
2737 *orig1 = x;
2738 if (last_ptr)
2740 orig1 = last_ptr;
2741 goto repeat;
2743 return;
2746 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2748 static void
2749 mark_used_flags (rtx x, int flag)
2751 int i, j;
2752 enum rtx_code code;
2753 const char *format_ptr;
2754 int length;
2756 /* Repeat is used to turn tail-recursion into iteration. */
2757 repeat:
2758 if (x == 0)
2759 return;
2761 code = GET_CODE (x);
2763 /* These types may be freely shared so we needn't do any resetting
2764 for them. */
2766 switch (code)
2768 case REG:
2769 case DEBUG_EXPR:
2770 case VALUE:
2771 case CONST_INT:
2772 case CONST_DOUBLE:
2773 case CONST_FIXED:
2774 case CONST_VECTOR:
2775 case SYMBOL_REF:
2776 case CODE_LABEL:
2777 case PC:
2778 case CC0:
2779 return;
2781 case DEBUG_INSN:
2782 case INSN:
2783 case JUMP_INSN:
2784 case CALL_INSN:
2785 case NOTE:
2786 case LABEL_REF:
2787 case BARRIER:
2788 /* The chain of insns is not being copied. */
2789 return;
2791 default:
2792 break;
2795 RTX_FLAG (x, used) = flag;
2797 format_ptr = GET_RTX_FORMAT (code);
2798 length = GET_RTX_LENGTH (code);
2800 for (i = 0; i < length; i++)
2802 switch (*format_ptr++)
2804 case 'e':
2805 if (i == length-1)
2807 x = XEXP (x, i);
2808 goto repeat;
2810 mark_used_flags (XEXP (x, i), flag);
2811 break;
2813 case 'E':
2814 for (j = 0; j < XVECLEN (x, i); j++)
2815 mark_used_flags (XVECEXP (x, i, j), flag);
2816 break;
2821 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2822 to look for shared sub-parts. */
2824 void
2825 reset_used_flags (rtx x)
2827 mark_used_flags (x, 0);
2830 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2831 to look for shared sub-parts. */
2833 void
2834 set_used_flags (rtx x)
2836 mark_used_flags (x, 1);
2839 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2840 Return X or the rtx for the pseudo reg the value of X was copied into.
2841 OTHER must be valid as a SET_DEST. */
2844 make_safe_from (rtx x, rtx other)
2846 while (1)
2847 switch (GET_CODE (other))
2849 case SUBREG:
2850 other = SUBREG_REG (other);
2851 break;
2852 case STRICT_LOW_PART:
2853 case SIGN_EXTEND:
2854 case ZERO_EXTEND:
2855 other = XEXP (other, 0);
2856 break;
2857 default:
2858 goto done;
2860 done:
2861 if ((MEM_P (other)
2862 && ! CONSTANT_P (x)
2863 && !REG_P (x)
2864 && GET_CODE (x) != SUBREG)
2865 || (REG_P (other)
2866 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2867 || reg_mentioned_p (other, x))))
2869 rtx temp = gen_reg_rtx (GET_MODE (x));
2870 emit_move_insn (temp, x);
2871 return temp;
2873 return x;
2876 /* Emission of insns (adding them to the doubly-linked list). */
2878 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2881 get_last_insn_anywhere (void)
2883 struct sequence_stack *stack;
2884 if (get_last_insn ())
2885 return get_last_insn ();
2886 for (stack = seq_stack; stack; stack = stack->next)
2887 if (stack->last != 0)
2888 return stack->last;
2889 return 0;
2892 /* Return the first nonnote insn emitted in current sequence or current
2893 function. This routine looks inside SEQUENCEs. */
2896 get_first_nonnote_insn (void)
2898 rtx insn = get_insns ();
2900 if (insn)
2902 if (NOTE_P (insn))
2903 for (insn = next_insn (insn);
2904 insn && NOTE_P (insn);
2905 insn = next_insn (insn))
2906 continue;
2907 else
2909 if (NONJUMP_INSN_P (insn)
2910 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2911 insn = XVECEXP (PATTERN (insn), 0, 0);
2915 return insn;
2918 /* Return the last nonnote insn emitted in current sequence or current
2919 function. This routine looks inside SEQUENCEs. */
2922 get_last_nonnote_insn (void)
2924 rtx insn = get_last_insn ();
2926 if (insn)
2928 if (NOTE_P (insn))
2929 for (insn = previous_insn (insn);
2930 insn && NOTE_P (insn);
2931 insn = previous_insn (insn))
2932 continue;
2933 else
2935 if (NONJUMP_INSN_P (insn)
2936 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2937 insn = XVECEXP (PATTERN (insn), 0,
2938 XVECLEN (PATTERN (insn), 0) - 1);
2942 return insn;
2945 /* Return the number of actual (non-debug) insns emitted in this
2946 function. */
2949 get_max_insn_count (void)
2951 int n = cur_insn_uid;
2953 /* The table size must be stable across -g, to avoid codegen
2954 differences due to debug insns, and not be affected by
2955 -fmin-insn-uid, to avoid excessive table size and to simplify
2956 debugging of -fcompare-debug failures. */
2957 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
2958 n -= cur_debug_insn_uid;
2959 else
2960 n -= MIN_NONDEBUG_INSN_UID;
2962 return n;
2966 /* Return the next insn. If it is a SEQUENCE, return the first insn
2967 of the sequence. */
2970 next_insn (rtx insn)
2972 if (insn)
2974 insn = NEXT_INSN (insn);
2975 if (insn && NONJUMP_INSN_P (insn)
2976 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2977 insn = XVECEXP (PATTERN (insn), 0, 0);
2980 return insn;
2983 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2984 of the sequence. */
2987 previous_insn (rtx insn)
2989 if (insn)
2991 insn = PREV_INSN (insn);
2992 if (insn && NONJUMP_INSN_P (insn)
2993 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2994 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2997 return insn;
3000 /* Return the next insn after INSN that is not a NOTE. This routine does not
3001 look inside SEQUENCEs. */
3004 next_nonnote_insn (rtx insn)
3006 while (insn)
3008 insn = NEXT_INSN (insn);
3009 if (insn == 0 || !NOTE_P (insn))
3010 break;
3013 return insn;
3016 /* Return the next insn after INSN that is not a NOTE, but stop the
3017 search before we enter another basic block. This routine does not
3018 look inside SEQUENCEs. */
3021 next_nonnote_insn_bb (rtx insn)
3023 while (insn)
3025 insn = NEXT_INSN (insn);
3026 if (insn == 0 || !NOTE_P (insn))
3027 break;
3028 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3029 return NULL_RTX;
3032 return insn;
3035 /* Return the previous insn before INSN that is not a NOTE. This routine does
3036 not look inside SEQUENCEs. */
3039 prev_nonnote_insn (rtx insn)
3041 while (insn)
3043 insn = PREV_INSN (insn);
3044 if (insn == 0 || !NOTE_P (insn))
3045 break;
3048 return insn;
3051 /* Return the previous insn before INSN that is not a NOTE, but stop
3052 the search before we enter another basic block. This routine does
3053 not look inside SEQUENCEs. */
3056 prev_nonnote_insn_bb (rtx insn)
3058 while (insn)
3060 insn = PREV_INSN (insn);
3061 if (insn == 0 || !NOTE_P (insn))
3062 break;
3063 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3064 return NULL_RTX;
3067 return insn;
3070 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3071 routine does not look inside SEQUENCEs. */
3074 next_nondebug_insn (rtx insn)
3076 while (insn)
3078 insn = NEXT_INSN (insn);
3079 if (insn == 0 || !DEBUG_INSN_P (insn))
3080 break;
3083 return insn;
3086 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3087 This routine does not look inside SEQUENCEs. */
3090 prev_nondebug_insn (rtx insn)
3092 while (insn)
3094 insn = PREV_INSN (insn);
3095 if (insn == 0 || !DEBUG_INSN_P (insn))
3096 break;
3099 return insn;
3102 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3103 This routine does not look inside SEQUENCEs. */
3106 next_nonnote_nondebug_insn (rtx insn)
3108 while (insn)
3110 insn = NEXT_INSN (insn);
3111 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3112 break;
3115 return insn;
3118 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3119 This routine does not look inside SEQUENCEs. */
3122 prev_nonnote_nondebug_insn (rtx insn)
3124 while (insn)
3126 insn = PREV_INSN (insn);
3127 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3128 break;
3131 return insn;
3134 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3135 or 0, if there is none. This routine does not look inside
3136 SEQUENCEs. */
3139 next_real_insn (rtx insn)
3141 while (insn)
3143 insn = NEXT_INSN (insn);
3144 if (insn == 0 || INSN_P (insn))
3145 break;
3148 return insn;
3151 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3152 or 0, if there is none. This routine does not look inside
3153 SEQUENCEs. */
3156 prev_real_insn (rtx insn)
3158 while (insn)
3160 insn = PREV_INSN (insn);
3161 if (insn == 0 || INSN_P (insn))
3162 break;
3165 return insn;
3168 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3169 This routine does not look inside SEQUENCEs. */
3172 last_call_insn (void)
3174 rtx insn;
3176 for (insn = get_last_insn ();
3177 insn && !CALL_P (insn);
3178 insn = PREV_INSN (insn))
3181 return insn;
3184 /* Find the next insn after INSN that really does something. This routine
3185 does not look inside SEQUENCEs. After reload this also skips over
3186 standalone USE and CLOBBER insn. */
3189 active_insn_p (const_rtx insn)
3191 return (CALL_P (insn) || JUMP_P (insn)
3192 || (NONJUMP_INSN_P (insn)
3193 && (! reload_completed
3194 || (GET_CODE (PATTERN (insn)) != USE
3195 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3199 next_active_insn (rtx insn)
3201 while (insn)
3203 insn = NEXT_INSN (insn);
3204 if (insn == 0 || active_insn_p (insn))
3205 break;
3208 return insn;
3211 /* Find the last insn before INSN that really does something. This routine
3212 does not look inside SEQUENCEs. After reload this also skips over
3213 standalone USE and CLOBBER insn. */
3216 prev_active_insn (rtx insn)
3218 while (insn)
3220 insn = PREV_INSN (insn);
3221 if (insn == 0 || active_insn_p (insn))
3222 break;
3225 return insn;
3228 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3231 next_label (rtx insn)
3233 while (insn)
3235 insn = NEXT_INSN (insn);
3236 if (insn == 0 || LABEL_P (insn))
3237 break;
3240 return insn;
3243 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3246 prev_label (rtx insn)
3248 while (insn)
3250 insn = PREV_INSN (insn);
3251 if (insn == 0 || LABEL_P (insn))
3252 break;
3255 return insn;
3258 /* Return the last label to mark the same position as LABEL. Return null
3259 if LABEL itself is null. */
3262 skip_consecutive_labels (rtx label)
3264 rtx insn;
3266 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3267 if (LABEL_P (insn))
3268 label = insn;
3270 return label;
3273 #ifdef HAVE_cc0
3274 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3275 and REG_CC_USER notes so we can find it. */
3277 void
3278 link_cc0_insns (rtx insn)
3280 rtx user = next_nonnote_insn (insn);
3282 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3283 user = XVECEXP (PATTERN (user), 0, 0);
3285 add_reg_note (user, REG_CC_SETTER, insn);
3286 add_reg_note (insn, REG_CC_USER, user);
3289 /* Return the next insn that uses CC0 after INSN, which is assumed to
3290 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3291 applied to the result of this function should yield INSN).
3293 Normally, this is simply the next insn. However, if a REG_CC_USER note
3294 is present, it contains the insn that uses CC0.
3296 Return 0 if we can't find the insn. */
3299 next_cc0_user (rtx insn)
3301 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3303 if (note)
3304 return XEXP (note, 0);
3306 insn = next_nonnote_insn (insn);
3307 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3308 insn = XVECEXP (PATTERN (insn), 0, 0);
3310 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3311 return insn;
3313 return 0;
3316 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3317 note, it is the previous insn. */
3320 prev_cc0_setter (rtx insn)
3322 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3324 if (note)
3325 return XEXP (note, 0);
3327 insn = prev_nonnote_insn (insn);
3328 gcc_assert (sets_cc0_p (PATTERN (insn)));
3330 return insn;
3332 #endif
3334 #ifdef AUTO_INC_DEC
3335 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3337 static int
3338 find_auto_inc (rtx *xp, void *data)
3340 rtx x = *xp;
3341 rtx reg = (rtx) data;
3343 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3344 return 0;
3346 switch (GET_CODE (x))
3348 case PRE_DEC:
3349 case PRE_INC:
3350 case POST_DEC:
3351 case POST_INC:
3352 case PRE_MODIFY:
3353 case POST_MODIFY:
3354 if (rtx_equal_p (reg, XEXP (x, 0)))
3355 return 1;
3356 break;
3358 default:
3359 gcc_unreachable ();
3361 return -1;
3363 #endif
3365 /* Increment the label uses for all labels present in rtx. */
3367 static void
3368 mark_label_nuses (rtx x)
3370 enum rtx_code code;
3371 int i, j;
3372 const char *fmt;
3374 code = GET_CODE (x);
3375 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3376 LABEL_NUSES (XEXP (x, 0))++;
3378 fmt = GET_RTX_FORMAT (code);
3379 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3381 if (fmt[i] == 'e')
3382 mark_label_nuses (XEXP (x, i));
3383 else if (fmt[i] == 'E')
3384 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3385 mark_label_nuses (XVECEXP (x, i, j));
3390 /* Try splitting insns that can be split for better scheduling.
3391 PAT is the pattern which might split.
3392 TRIAL is the insn providing PAT.
3393 LAST is nonzero if we should return the last insn of the sequence produced.
3395 If this routine succeeds in splitting, it returns the first or last
3396 replacement insn depending on the value of LAST. Otherwise, it
3397 returns TRIAL. If the insn to be returned can be split, it will be. */
3400 try_split (rtx pat, rtx trial, int last)
3402 rtx before = PREV_INSN (trial);
3403 rtx after = NEXT_INSN (trial);
3404 int has_barrier = 0;
3405 rtx note, seq, tem;
3406 int probability;
3407 rtx insn_last, insn;
3408 int njumps = 0;
3410 /* We're not good at redistributing frame information. */
3411 if (RTX_FRAME_RELATED_P (trial))
3412 return trial;
3414 if (any_condjump_p (trial)
3415 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3416 split_branch_probability = INTVAL (XEXP (note, 0));
3417 probability = split_branch_probability;
3419 seq = split_insns (pat, trial);
3421 split_branch_probability = -1;
3423 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3424 We may need to handle this specially. */
3425 if (after && BARRIER_P (after))
3427 has_barrier = 1;
3428 after = NEXT_INSN (after);
3431 if (!seq)
3432 return trial;
3434 /* Avoid infinite loop if any insn of the result matches
3435 the original pattern. */
3436 insn_last = seq;
3437 while (1)
3439 if (INSN_P (insn_last)
3440 && rtx_equal_p (PATTERN (insn_last), pat))
3441 return trial;
3442 if (!NEXT_INSN (insn_last))
3443 break;
3444 insn_last = NEXT_INSN (insn_last);
3447 /* We will be adding the new sequence to the function. The splitters
3448 may have introduced invalid RTL sharing, so unshare the sequence now. */
3449 unshare_all_rtl_in_chain (seq);
3451 /* Mark labels. */
3452 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3454 if (JUMP_P (insn))
3456 mark_jump_label (PATTERN (insn), insn, 0);
3457 njumps++;
3458 if (probability != -1
3459 && any_condjump_p (insn)
3460 && !find_reg_note (insn, REG_BR_PROB, 0))
3462 /* We can preserve the REG_BR_PROB notes only if exactly
3463 one jump is created, otherwise the machine description
3464 is responsible for this step using
3465 split_branch_probability variable. */
3466 gcc_assert (njumps == 1);
3467 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3472 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3473 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3474 if (CALL_P (trial))
3476 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3477 if (CALL_P (insn))
3479 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3480 while (*p)
3481 p = &XEXP (*p, 1);
3482 *p = CALL_INSN_FUNCTION_USAGE (trial);
3483 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3487 /* Copy notes, particularly those related to the CFG. */
3488 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3490 switch (REG_NOTE_KIND (note))
3492 case REG_EH_REGION:
3493 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3494 break;
3496 case REG_NORETURN:
3497 case REG_SETJMP:
3498 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3500 if (CALL_P (insn))
3501 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3503 break;
3505 case REG_NON_LOCAL_GOTO:
3506 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3508 if (JUMP_P (insn))
3509 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3511 break;
3513 #ifdef AUTO_INC_DEC
3514 case REG_INC:
3515 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3517 rtx reg = XEXP (note, 0);
3518 if (!FIND_REG_INC_NOTE (insn, reg)
3519 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3520 add_reg_note (insn, REG_INC, reg);
3522 break;
3523 #endif
3525 default:
3526 break;
3530 /* If there are LABELS inside the split insns increment the
3531 usage count so we don't delete the label. */
3532 if (INSN_P (trial))
3534 insn = insn_last;
3535 while (insn != NULL_RTX)
3537 /* JUMP_P insns have already been "marked" above. */
3538 if (NONJUMP_INSN_P (insn))
3539 mark_label_nuses (PATTERN (insn));
3541 insn = PREV_INSN (insn);
3545 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3547 delete_insn (trial);
3548 if (has_barrier)
3549 emit_barrier_after (tem);
3551 /* Recursively call try_split for each new insn created; by the
3552 time control returns here that insn will be fully split, so
3553 set LAST and continue from the insn after the one returned.
3554 We can't use next_active_insn here since AFTER may be a note.
3555 Ignore deleted insns, which can be occur if not optimizing. */
3556 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3557 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3558 tem = try_split (PATTERN (tem), tem, 1);
3560 /* Return either the first or the last insn, depending on which was
3561 requested. */
3562 return last
3563 ? (after ? PREV_INSN (after) : get_last_insn ())
3564 : NEXT_INSN (before);
3567 /* Make and return an INSN rtx, initializing all its slots.
3568 Store PATTERN in the pattern slots. */
3571 make_insn_raw (rtx pattern)
3573 rtx insn;
3575 insn = rtx_alloc (INSN);
3577 INSN_UID (insn) = cur_insn_uid++;
3578 PATTERN (insn) = pattern;
3579 INSN_CODE (insn) = -1;
3580 REG_NOTES (insn) = NULL;
3581 INSN_LOCATOR (insn) = curr_insn_locator ();
3582 BLOCK_FOR_INSN (insn) = NULL;
3584 #ifdef ENABLE_RTL_CHECKING
3585 if (insn
3586 && INSN_P (insn)
3587 && (returnjump_p (insn)
3588 || (GET_CODE (insn) == SET
3589 && SET_DEST (insn) == pc_rtx)))
3591 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3592 debug_rtx (insn);
3594 #endif
3596 return insn;
3599 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3602 make_debug_insn_raw (rtx pattern)
3604 rtx insn;
3606 insn = rtx_alloc (DEBUG_INSN);
3607 INSN_UID (insn) = cur_debug_insn_uid++;
3608 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3609 INSN_UID (insn) = cur_insn_uid++;
3611 PATTERN (insn) = pattern;
3612 INSN_CODE (insn) = -1;
3613 REG_NOTES (insn) = NULL;
3614 INSN_LOCATOR (insn) = curr_insn_locator ();
3615 BLOCK_FOR_INSN (insn) = NULL;
3617 return insn;
3620 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3623 make_jump_insn_raw (rtx pattern)
3625 rtx insn;
3627 insn = rtx_alloc (JUMP_INSN);
3628 INSN_UID (insn) = cur_insn_uid++;
3630 PATTERN (insn) = pattern;
3631 INSN_CODE (insn) = -1;
3632 REG_NOTES (insn) = NULL;
3633 JUMP_LABEL (insn) = NULL;
3634 INSN_LOCATOR (insn) = curr_insn_locator ();
3635 BLOCK_FOR_INSN (insn) = NULL;
3637 return insn;
3640 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3642 static rtx
3643 make_call_insn_raw (rtx pattern)
3645 rtx insn;
3647 insn = rtx_alloc (CALL_INSN);
3648 INSN_UID (insn) = cur_insn_uid++;
3650 PATTERN (insn) = pattern;
3651 INSN_CODE (insn) = -1;
3652 REG_NOTES (insn) = NULL;
3653 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3654 INSN_LOCATOR (insn) = curr_insn_locator ();
3655 BLOCK_FOR_INSN (insn) = NULL;
3657 return insn;
3660 /* Add INSN to the end of the doubly-linked list.
3661 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3663 void
3664 add_insn (rtx insn)
3666 PREV_INSN (insn) = get_last_insn();
3667 NEXT_INSN (insn) = 0;
3669 if (NULL != get_last_insn())
3670 NEXT_INSN (get_last_insn ()) = insn;
3672 if (NULL == get_insns ())
3673 set_first_insn (insn);
3675 set_last_insn (insn);
3678 /* Add INSN into the doubly-linked list after insn AFTER. This and
3679 the next should be the only functions called to insert an insn once
3680 delay slots have been filled since only they know how to update a
3681 SEQUENCE. */
3683 void
3684 add_insn_after (rtx insn, rtx after, basic_block bb)
3686 rtx next = NEXT_INSN (after);
3688 gcc_assert (!optimize || !INSN_DELETED_P (after));
3690 NEXT_INSN (insn) = next;
3691 PREV_INSN (insn) = after;
3693 if (next)
3695 PREV_INSN (next) = insn;
3696 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3697 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3699 else if (get_last_insn () == after)
3700 set_last_insn (insn);
3701 else
3703 struct sequence_stack *stack = seq_stack;
3704 /* Scan all pending sequences too. */
3705 for (; stack; stack = stack->next)
3706 if (after == stack->last)
3708 stack->last = insn;
3709 break;
3712 gcc_assert (stack);
3715 if (!BARRIER_P (after)
3716 && !BARRIER_P (insn)
3717 && (bb = BLOCK_FOR_INSN (after)))
3719 set_block_for_insn (insn, bb);
3720 if (INSN_P (insn))
3721 df_insn_rescan (insn);
3722 /* Should not happen as first in the BB is always
3723 either NOTE or LABEL. */
3724 if (BB_END (bb) == after
3725 /* Avoid clobbering of structure when creating new BB. */
3726 && !BARRIER_P (insn)
3727 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3728 BB_END (bb) = insn;
3731 NEXT_INSN (after) = insn;
3732 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3734 rtx sequence = PATTERN (after);
3735 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3739 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3740 the previous should be the only functions called to insert an insn
3741 once delay slots have been filled since only they know how to
3742 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3743 bb from before. */
3745 void
3746 add_insn_before (rtx insn, rtx before, basic_block bb)
3748 rtx prev = PREV_INSN (before);
3750 gcc_assert (!optimize || !INSN_DELETED_P (before));
3752 PREV_INSN (insn) = prev;
3753 NEXT_INSN (insn) = before;
3755 if (prev)
3757 NEXT_INSN (prev) = insn;
3758 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3760 rtx sequence = PATTERN (prev);
3761 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3764 else if (get_insns () == before)
3765 set_first_insn (insn);
3766 else
3768 struct sequence_stack *stack = seq_stack;
3769 /* Scan all pending sequences too. */
3770 for (; stack; stack = stack->next)
3771 if (before == stack->first)
3773 stack->first = insn;
3774 break;
3777 gcc_assert (stack);
3780 if (!bb
3781 && !BARRIER_P (before)
3782 && !BARRIER_P (insn))
3783 bb = BLOCK_FOR_INSN (before);
3785 if (bb)
3787 set_block_for_insn (insn, bb);
3788 if (INSN_P (insn))
3789 df_insn_rescan (insn);
3790 /* Should not happen as first in the BB is always either NOTE or
3791 LABEL. */
3792 gcc_assert (BB_HEAD (bb) != insn
3793 /* Avoid clobbering of structure when creating new BB. */
3794 || BARRIER_P (insn)
3795 || NOTE_INSN_BASIC_BLOCK_P (insn));
3798 PREV_INSN (before) = insn;
3799 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3800 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3804 /* Replace insn with an deleted instruction note. */
3806 void
3807 set_insn_deleted (rtx insn)
3809 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3810 PUT_CODE (insn, NOTE);
3811 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3815 /* Remove an insn from its doubly-linked list. This function knows how
3816 to handle sequences. */
3817 void
3818 remove_insn (rtx insn)
3820 rtx next = NEXT_INSN (insn);
3821 rtx prev = PREV_INSN (insn);
3822 basic_block bb;
3824 /* Later in the code, the block will be marked dirty. */
3825 df_insn_delete (NULL, INSN_UID (insn));
3827 if (prev)
3829 NEXT_INSN (prev) = next;
3830 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3832 rtx sequence = PATTERN (prev);
3833 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3836 else if (get_insns () == insn)
3838 if (next)
3839 PREV_INSN (next) = NULL;
3840 set_first_insn (next);
3842 else
3844 struct sequence_stack *stack = seq_stack;
3845 /* Scan all pending sequences too. */
3846 for (; stack; stack = stack->next)
3847 if (insn == stack->first)
3849 stack->first = next;
3850 break;
3853 gcc_assert (stack);
3856 if (next)
3858 PREV_INSN (next) = prev;
3859 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3860 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3862 else if (get_last_insn () == insn)
3863 set_last_insn (prev);
3864 else
3866 struct sequence_stack *stack = seq_stack;
3867 /* Scan all pending sequences too. */
3868 for (; stack; stack = stack->next)
3869 if (insn == stack->last)
3871 stack->last = prev;
3872 break;
3875 gcc_assert (stack);
3877 if (!BARRIER_P (insn)
3878 && (bb = BLOCK_FOR_INSN (insn)))
3880 if (NONDEBUG_INSN_P (insn))
3881 df_set_bb_dirty (bb);
3882 if (BB_HEAD (bb) == insn)
3884 /* Never ever delete the basic block note without deleting whole
3885 basic block. */
3886 gcc_assert (!NOTE_P (insn));
3887 BB_HEAD (bb) = next;
3889 if (BB_END (bb) == insn)
3890 BB_END (bb) = prev;
3894 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3896 void
3897 add_function_usage_to (rtx call_insn, rtx call_fusage)
3899 gcc_assert (call_insn && CALL_P (call_insn));
3901 /* Put the register usage information on the CALL. If there is already
3902 some usage information, put ours at the end. */
3903 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3905 rtx link;
3907 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3908 link = XEXP (link, 1))
3911 XEXP (link, 1) = call_fusage;
3913 else
3914 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3917 /* Delete all insns made since FROM.
3918 FROM becomes the new last instruction. */
3920 void
3921 delete_insns_since (rtx from)
3923 if (from == 0)
3924 set_first_insn (0);
3925 else
3926 NEXT_INSN (from) = 0;
3927 set_last_insn (from);
3930 /* This function is deprecated, please use sequences instead.
3932 Move a consecutive bunch of insns to a different place in the chain.
3933 The insns to be moved are those between FROM and TO.
3934 They are moved to a new position after the insn AFTER.
3935 AFTER must not be FROM or TO or any insn in between.
3937 This function does not know about SEQUENCEs and hence should not be
3938 called after delay-slot filling has been done. */
3940 void
3941 reorder_insns_nobb (rtx from, rtx to, rtx after)
3943 #ifdef ENABLE_CHECKING
3944 rtx x;
3945 for (x = from; x != to; x = NEXT_INSN (x))
3946 gcc_assert (after != x);
3947 gcc_assert (after != to);
3948 #endif
3950 /* Splice this bunch out of where it is now. */
3951 if (PREV_INSN (from))
3952 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3953 if (NEXT_INSN (to))
3954 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3955 if (get_last_insn () == to)
3956 set_last_insn (PREV_INSN (from));
3957 if (get_insns () == from)
3958 set_first_insn (NEXT_INSN (to));
3960 /* Make the new neighbors point to it and it to them. */
3961 if (NEXT_INSN (after))
3962 PREV_INSN (NEXT_INSN (after)) = to;
3964 NEXT_INSN (to) = NEXT_INSN (after);
3965 PREV_INSN (from) = after;
3966 NEXT_INSN (after) = from;
3967 if (after == get_last_insn())
3968 set_last_insn (to);
3971 /* Same as function above, but take care to update BB boundaries. */
3972 void
3973 reorder_insns (rtx from, rtx to, rtx after)
3975 rtx prev = PREV_INSN (from);
3976 basic_block bb, bb2;
3978 reorder_insns_nobb (from, to, after);
3980 if (!BARRIER_P (after)
3981 && (bb = BLOCK_FOR_INSN (after)))
3983 rtx x;
3984 df_set_bb_dirty (bb);
3986 if (!BARRIER_P (from)
3987 && (bb2 = BLOCK_FOR_INSN (from)))
3989 if (BB_END (bb2) == to)
3990 BB_END (bb2) = prev;
3991 df_set_bb_dirty (bb2);
3994 if (BB_END (bb) == after)
3995 BB_END (bb) = to;
3997 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3998 if (!BARRIER_P (x))
3999 df_insn_change_bb (x, bb);
4004 /* Emit insn(s) of given code and pattern
4005 at a specified place within the doubly-linked list.
4007 All of the emit_foo global entry points accept an object
4008 X which is either an insn list or a PATTERN of a single
4009 instruction.
4011 There are thus a few canonical ways to generate code and
4012 emit it at a specific place in the instruction stream. For
4013 example, consider the instruction named SPOT and the fact that
4014 we would like to emit some instructions before SPOT. We might
4015 do it like this:
4017 start_sequence ();
4018 ... emit the new instructions ...
4019 insns_head = get_insns ();
4020 end_sequence ();
4022 emit_insn_before (insns_head, SPOT);
4024 It used to be common to generate SEQUENCE rtl instead, but that
4025 is a relic of the past which no longer occurs. The reason is that
4026 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4027 generated would almost certainly die right after it was created. */
4029 static rtx
4030 emit_pattern_before_noloc (rtx x, rtx before, rtx last, basic_block bb,
4031 rtx (*make_raw) (rtx))
4033 rtx insn;
4035 gcc_assert (before);
4037 if (x == NULL_RTX)
4038 return last;
4040 switch (GET_CODE (x))
4042 case DEBUG_INSN:
4043 case INSN:
4044 case JUMP_INSN:
4045 case CALL_INSN:
4046 case CODE_LABEL:
4047 case BARRIER:
4048 case NOTE:
4049 insn = x;
4050 while (insn)
4052 rtx next = NEXT_INSN (insn);
4053 add_insn_before (insn, before, bb);
4054 last = insn;
4055 insn = next;
4057 break;
4059 #ifdef ENABLE_RTL_CHECKING
4060 case SEQUENCE:
4061 gcc_unreachable ();
4062 break;
4063 #endif
4065 default:
4066 last = (*make_raw) (x);
4067 add_insn_before (last, before, bb);
4068 break;
4071 return last;
4074 /* Make X be output before the instruction BEFORE. */
4077 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4079 return emit_pattern_before_noloc (x, before, before, bb, make_insn_raw);
4082 /* Make an instruction with body X and code JUMP_INSN
4083 and output it before the instruction BEFORE. */
4086 emit_jump_insn_before_noloc (rtx x, rtx before)
4088 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4089 make_jump_insn_raw);
4092 /* Make an instruction with body X and code CALL_INSN
4093 and output it before the instruction BEFORE. */
4096 emit_call_insn_before_noloc (rtx x, rtx before)
4098 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4099 make_call_insn_raw);
4102 /* Make an instruction with body X and code DEBUG_INSN
4103 and output it before the instruction BEFORE. */
4106 emit_debug_insn_before_noloc (rtx x, rtx before)
4108 return emit_pattern_before_noloc (x, before, NULL_RTX, NULL,
4109 make_debug_insn_raw);
4112 /* Make an insn of code BARRIER
4113 and output it before the insn BEFORE. */
4116 emit_barrier_before (rtx before)
4118 rtx insn = rtx_alloc (BARRIER);
4120 INSN_UID (insn) = cur_insn_uid++;
4122 add_insn_before (insn, before, NULL);
4123 return insn;
4126 /* Emit the label LABEL before the insn BEFORE. */
4129 emit_label_before (rtx label, rtx before)
4131 /* This can be called twice for the same label as a result of the
4132 confusion that follows a syntax error! So make it harmless. */
4133 if (INSN_UID (label) == 0)
4135 INSN_UID (label) = cur_insn_uid++;
4136 add_insn_before (label, before, NULL);
4139 return label;
4142 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4145 emit_note_before (enum insn_note subtype, rtx before)
4147 rtx note = rtx_alloc (NOTE);
4148 INSN_UID (note) = cur_insn_uid++;
4149 NOTE_KIND (note) = subtype;
4150 BLOCK_FOR_INSN (note) = NULL;
4151 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4153 add_insn_before (note, before, NULL);
4154 return note;
4157 /* Helper for emit_insn_after, handles lists of instructions
4158 efficiently. */
4160 static rtx
4161 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4163 rtx last;
4164 rtx after_after;
4165 if (!bb && !BARRIER_P (after))
4166 bb = BLOCK_FOR_INSN (after);
4168 if (bb)
4170 df_set_bb_dirty (bb);
4171 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4172 if (!BARRIER_P (last))
4174 set_block_for_insn (last, bb);
4175 df_insn_rescan (last);
4177 if (!BARRIER_P (last))
4179 set_block_for_insn (last, bb);
4180 df_insn_rescan (last);
4182 if (BB_END (bb) == after)
4183 BB_END (bb) = last;
4185 else
4186 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4187 continue;
4189 after_after = NEXT_INSN (after);
4191 NEXT_INSN (after) = first;
4192 PREV_INSN (first) = after;
4193 NEXT_INSN (last) = after_after;
4194 if (after_after)
4195 PREV_INSN (after_after) = last;
4197 if (after == get_last_insn())
4198 set_last_insn (last);
4200 return last;
4203 static rtx
4204 emit_pattern_after_noloc (rtx x, rtx after, basic_block bb,
4205 rtx (*make_raw)(rtx))
4207 rtx last = after;
4209 gcc_assert (after);
4211 if (x == NULL_RTX)
4212 return last;
4214 switch (GET_CODE (x))
4216 case DEBUG_INSN:
4217 case INSN:
4218 case JUMP_INSN:
4219 case CALL_INSN:
4220 case CODE_LABEL:
4221 case BARRIER:
4222 case NOTE:
4223 last = emit_insn_after_1 (x, after, bb);
4224 break;
4226 #ifdef ENABLE_RTL_CHECKING
4227 case SEQUENCE:
4228 gcc_unreachable ();
4229 break;
4230 #endif
4232 default:
4233 last = (*make_raw) (x);
4234 add_insn_after (last, after, bb);
4235 break;
4238 return last;
4241 /* Make X be output after the insn AFTER and set the BB of insn. If
4242 BB is NULL, an attempt is made to infer the BB from AFTER. */
4245 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4247 return emit_pattern_after_noloc (x, after, bb, make_insn_raw);
4251 /* Make an insn of code JUMP_INSN with body X
4252 and output it after the insn AFTER. */
4255 emit_jump_insn_after_noloc (rtx x, rtx after)
4257 return emit_pattern_after_noloc (x, after, NULL, make_jump_insn_raw);
4260 /* Make an instruction with body X and code CALL_INSN
4261 and output it after the instruction AFTER. */
4264 emit_call_insn_after_noloc (rtx x, rtx after)
4266 return emit_pattern_after_noloc (x, after, NULL, make_call_insn_raw);
4269 /* Make an instruction with body X and code CALL_INSN
4270 and output it after the instruction AFTER. */
4273 emit_debug_insn_after_noloc (rtx x, rtx after)
4275 return emit_pattern_after_noloc (x, after, NULL, make_debug_insn_raw);
4278 /* Make an insn of code BARRIER
4279 and output it after the insn AFTER. */
4282 emit_barrier_after (rtx after)
4284 rtx insn = rtx_alloc (BARRIER);
4286 INSN_UID (insn) = cur_insn_uid++;
4288 add_insn_after (insn, after, NULL);
4289 return insn;
4292 /* Emit the label LABEL after the insn AFTER. */
4295 emit_label_after (rtx label, rtx after)
4297 /* This can be called twice for the same label
4298 as a result of the confusion that follows a syntax error!
4299 So make it harmless. */
4300 if (INSN_UID (label) == 0)
4302 INSN_UID (label) = cur_insn_uid++;
4303 add_insn_after (label, after, NULL);
4306 return label;
4309 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4312 emit_note_after (enum insn_note subtype, rtx after)
4314 rtx note = rtx_alloc (NOTE);
4315 INSN_UID (note) = cur_insn_uid++;
4316 NOTE_KIND (note) = subtype;
4317 BLOCK_FOR_INSN (note) = NULL;
4318 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4319 add_insn_after (note, after, NULL);
4320 return note;
4323 /* Insert PATTERN after AFTER, setting its INSN_LOCATION to LOC.
4324 MAKE_RAW indicates how to turn PATTERN into a real insn. */
4326 static rtx
4327 emit_pattern_after_setloc (rtx pattern, rtx after, int loc,
4328 rtx (*make_raw) (rtx))
4330 rtx last = emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4332 if (pattern == NULL_RTX || !loc)
4333 return last;
4335 after = NEXT_INSN (after);
4336 while (1)
4338 if (active_insn_p (after) && !INSN_LOCATOR (after))
4339 INSN_LOCATOR (after) = loc;
4340 if (after == last)
4341 break;
4342 after = NEXT_INSN (after);
4344 return last;
4347 /* Insert PATTERN after AFTER. MAKE_RAW indicates how to turn PATTERN
4348 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert after
4349 any DEBUG_INSNs. */
4351 static rtx
4352 emit_pattern_after (rtx pattern, rtx after, bool skip_debug_insns,
4353 rtx (*make_raw) (rtx))
4355 rtx prev = after;
4357 if (skip_debug_insns)
4358 while (DEBUG_INSN_P (prev))
4359 prev = PREV_INSN (prev);
4361 if (INSN_P (prev))
4362 return emit_pattern_after_setloc (pattern, after, INSN_LOCATOR (prev),
4363 make_raw);
4364 else
4365 return emit_pattern_after_noloc (pattern, after, NULL, make_raw);
4368 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4370 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4372 return emit_pattern_after_setloc (pattern, after, loc, make_insn_raw);
4375 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4377 emit_insn_after (rtx pattern, rtx after)
4379 return emit_pattern_after (pattern, after, true, make_insn_raw);
4382 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4384 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4386 return emit_pattern_after_setloc (pattern, after, loc, make_jump_insn_raw);
4389 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4391 emit_jump_insn_after (rtx pattern, rtx after)
4393 return emit_pattern_after (pattern, after, true, make_jump_insn_raw);
4396 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4398 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4400 return emit_pattern_after_setloc (pattern, after, loc, make_call_insn_raw);
4403 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4405 emit_call_insn_after (rtx pattern, rtx after)
4407 return emit_pattern_after (pattern, after, true, make_call_insn_raw);
4410 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to LOC. */
4412 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4414 return emit_pattern_after_setloc (pattern, after, loc, make_debug_insn_raw);
4417 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4419 emit_debug_insn_after (rtx pattern, rtx after)
4421 return emit_pattern_after (pattern, after, false, make_debug_insn_raw);
4424 /* Insert PATTERN before BEFORE, setting its INSN_LOCATION to LOC.
4425 MAKE_RAW indicates how to turn PATTERN into a real insn. INSNP
4426 indicates if PATTERN is meant for an INSN as opposed to a JUMP_INSN,
4427 CALL_INSN, etc. */
4429 static rtx
4430 emit_pattern_before_setloc (rtx pattern, rtx before, int loc, bool insnp,
4431 rtx (*make_raw) (rtx))
4433 rtx first = PREV_INSN (before);
4434 rtx last = emit_pattern_before_noloc (pattern, before,
4435 insnp ? before : NULL_RTX,
4436 NULL, make_raw);
4438 if (pattern == NULL_RTX || !loc)
4439 return last;
4441 if (!first)
4442 first = get_insns ();
4443 else
4444 first = NEXT_INSN (first);
4445 while (1)
4447 if (active_insn_p (first) && !INSN_LOCATOR (first))
4448 INSN_LOCATOR (first) = loc;
4449 if (first == last)
4450 break;
4451 first = NEXT_INSN (first);
4453 return last;
4456 /* Insert PATTERN before BEFORE. MAKE_RAW indicates how to turn PATTERN
4457 into a real insn. SKIP_DEBUG_INSNS indicates whether to insert
4458 before any DEBUG_INSNs. INSNP indicates if PATTERN is meant for an
4459 INSN as opposed to a JUMP_INSN, CALL_INSN, etc. */
4461 static rtx
4462 emit_pattern_before (rtx pattern, rtx before, bool skip_debug_insns,
4463 bool insnp, rtx (*make_raw) (rtx))
4465 rtx next = before;
4467 if (skip_debug_insns)
4468 while (DEBUG_INSN_P (next))
4469 next = PREV_INSN (next);
4471 if (INSN_P (next))
4472 return emit_pattern_before_setloc (pattern, before, INSN_LOCATOR (next),
4473 insnp, make_raw);
4474 else
4475 return emit_pattern_before_noloc (pattern, before,
4476 insnp ? before : NULL_RTX,
4477 NULL, make_raw);
4480 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4482 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4484 return emit_pattern_before_setloc (pattern, before, loc, true,
4485 make_insn_raw);
4488 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4490 emit_insn_before (rtx pattern, rtx before)
4492 return emit_pattern_before (pattern, before, true, true, make_insn_raw);
4495 /* like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4497 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4499 return emit_pattern_before_setloc (pattern, before, loc, false,
4500 make_jump_insn_raw);
4503 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4505 emit_jump_insn_before (rtx pattern, rtx before)
4507 return emit_pattern_before (pattern, before, true, false,
4508 make_jump_insn_raw);
4511 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4513 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4515 return emit_pattern_before_setloc (pattern, before, loc, false,
4516 make_call_insn_raw);
4519 /* Like emit_call_insn_before_noloc,
4520 but set insn_locator according to BEFORE. */
4522 emit_call_insn_before (rtx pattern, rtx before)
4524 return emit_pattern_before (pattern, before, true, false,
4525 make_call_insn_raw);
4528 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to LOC. */
4530 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4532 return emit_pattern_before_setloc (pattern, before, loc, false,
4533 make_debug_insn_raw);
4536 /* Like emit_debug_insn_before_noloc,
4537 but set insn_locator according to BEFORE. */
4539 emit_debug_insn_before (rtx pattern, rtx before)
4541 return emit_pattern_before (pattern, before, false, false,
4542 make_debug_insn_raw);
4545 /* Take X and emit it at the end of the doubly-linked
4546 INSN list.
4548 Returns the last insn emitted. */
4551 emit_insn (rtx x)
4553 rtx last = get_last_insn();
4554 rtx insn;
4556 if (x == NULL_RTX)
4557 return last;
4559 switch (GET_CODE (x))
4561 case DEBUG_INSN:
4562 case INSN:
4563 case JUMP_INSN:
4564 case CALL_INSN:
4565 case CODE_LABEL:
4566 case BARRIER:
4567 case NOTE:
4568 insn = x;
4569 while (insn)
4571 rtx next = NEXT_INSN (insn);
4572 add_insn (insn);
4573 last = insn;
4574 insn = next;
4576 break;
4578 #ifdef ENABLE_RTL_CHECKING
4579 case SEQUENCE:
4580 gcc_unreachable ();
4581 break;
4582 #endif
4584 default:
4585 last = make_insn_raw (x);
4586 add_insn (last);
4587 break;
4590 return last;
4593 /* Make an insn of code DEBUG_INSN with pattern X
4594 and add it to the end of the doubly-linked list. */
4597 emit_debug_insn (rtx x)
4599 rtx last = get_last_insn();
4600 rtx insn;
4602 if (x == NULL_RTX)
4603 return last;
4605 switch (GET_CODE (x))
4607 case DEBUG_INSN:
4608 case INSN:
4609 case JUMP_INSN:
4610 case CALL_INSN:
4611 case CODE_LABEL:
4612 case BARRIER:
4613 case NOTE:
4614 insn = x;
4615 while (insn)
4617 rtx next = NEXT_INSN (insn);
4618 add_insn (insn);
4619 last = insn;
4620 insn = next;
4622 break;
4624 #ifdef ENABLE_RTL_CHECKING
4625 case SEQUENCE:
4626 gcc_unreachable ();
4627 break;
4628 #endif
4630 default:
4631 last = make_debug_insn_raw (x);
4632 add_insn (last);
4633 break;
4636 return last;
4639 /* Make an insn of code JUMP_INSN with pattern X
4640 and add it to the end of the doubly-linked list. */
4643 emit_jump_insn (rtx x)
4645 rtx last = NULL_RTX, insn;
4647 switch (GET_CODE (x))
4649 case DEBUG_INSN:
4650 case INSN:
4651 case JUMP_INSN:
4652 case CALL_INSN:
4653 case CODE_LABEL:
4654 case BARRIER:
4655 case NOTE:
4656 insn = x;
4657 while (insn)
4659 rtx next = NEXT_INSN (insn);
4660 add_insn (insn);
4661 last = insn;
4662 insn = next;
4664 break;
4666 #ifdef ENABLE_RTL_CHECKING
4667 case SEQUENCE:
4668 gcc_unreachable ();
4669 break;
4670 #endif
4672 default:
4673 last = make_jump_insn_raw (x);
4674 add_insn (last);
4675 break;
4678 return last;
4681 /* Make an insn of code CALL_INSN with pattern X
4682 and add it to the end of the doubly-linked list. */
4685 emit_call_insn (rtx x)
4687 rtx insn;
4689 switch (GET_CODE (x))
4691 case DEBUG_INSN:
4692 case INSN:
4693 case JUMP_INSN:
4694 case CALL_INSN:
4695 case CODE_LABEL:
4696 case BARRIER:
4697 case NOTE:
4698 insn = emit_insn (x);
4699 break;
4701 #ifdef ENABLE_RTL_CHECKING
4702 case SEQUENCE:
4703 gcc_unreachable ();
4704 break;
4705 #endif
4707 default:
4708 insn = make_call_insn_raw (x);
4709 add_insn (insn);
4710 break;
4713 return insn;
4716 /* Add the label LABEL to the end of the doubly-linked list. */
4719 emit_label (rtx label)
4721 /* This can be called twice for the same label
4722 as a result of the confusion that follows a syntax error!
4723 So make it harmless. */
4724 if (INSN_UID (label) == 0)
4726 INSN_UID (label) = cur_insn_uid++;
4727 add_insn (label);
4729 return label;
4732 /* Make an insn of code BARRIER
4733 and add it to the end of the doubly-linked list. */
4736 emit_barrier (void)
4738 rtx barrier = rtx_alloc (BARRIER);
4739 INSN_UID (barrier) = cur_insn_uid++;
4740 add_insn (barrier);
4741 return barrier;
4744 /* Emit a copy of note ORIG. */
4747 emit_note_copy (rtx orig)
4749 rtx note;
4751 note = rtx_alloc (NOTE);
4753 INSN_UID (note) = cur_insn_uid++;
4754 NOTE_DATA (note) = NOTE_DATA (orig);
4755 NOTE_KIND (note) = NOTE_KIND (orig);
4756 BLOCK_FOR_INSN (note) = NULL;
4757 add_insn (note);
4759 return note;
4762 /* Make an insn of code NOTE or type NOTE_NO
4763 and add it to the end of the doubly-linked list. */
4766 emit_note (enum insn_note kind)
4768 rtx note;
4770 note = rtx_alloc (NOTE);
4771 INSN_UID (note) = cur_insn_uid++;
4772 NOTE_KIND (note) = kind;
4773 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4774 BLOCK_FOR_INSN (note) = NULL;
4775 add_insn (note);
4776 return note;
4779 /* Emit a clobber of lvalue X. */
4782 emit_clobber (rtx x)
4784 /* CONCATs should not appear in the insn stream. */
4785 if (GET_CODE (x) == CONCAT)
4787 emit_clobber (XEXP (x, 0));
4788 return emit_clobber (XEXP (x, 1));
4790 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
4793 /* Return a sequence of insns to clobber lvalue X. */
4796 gen_clobber (rtx x)
4798 rtx seq;
4800 start_sequence ();
4801 emit_clobber (x);
4802 seq = get_insns ();
4803 end_sequence ();
4804 return seq;
4807 /* Emit a use of rvalue X. */
4810 emit_use (rtx x)
4812 /* CONCATs should not appear in the insn stream. */
4813 if (GET_CODE (x) == CONCAT)
4815 emit_use (XEXP (x, 0));
4816 return emit_use (XEXP (x, 1));
4818 return emit_insn (gen_rtx_USE (VOIDmode, x));
4821 /* Return a sequence of insns to use rvalue X. */
4824 gen_use (rtx x)
4826 rtx seq;
4828 start_sequence ();
4829 emit_use (x);
4830 seq = get_insns ();
4831 end_sequence ();
4832 return seq;
4835 /* Cause next statement to emit a line note even if the line number
4836 has not changed. */
4838 void
4839 force_next_line_note (void)
4841 last_location = -1;
4844 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4845 note of this type already exists, remove it first. */
4848 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4850 rtx note = find_reg_note (insn, kind, NULL_RTX);
4852 switch (kind)
4854 case REG_EQUAL:
4855 case REG_EQUIV:
4856 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4857 has multiple sets (some callers assume single_set
4858 means the insn only has one set, when in fact it
4859 means the insn only has one * useful * set). */
4860 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4862 gcc_assert (!note);
4863 return NULL_RTX;
4866 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4867 It serves no useful purpose and breaks eliminate_regs. */
4868 if (GET_CODE (datum) == ASM_OPERANDS)
4869 return NULL_RTX;
4871 if (note)
4873 XEXP (note, 0) = datum;
4874 df_notes_rescan (insn);
4875 return note;
4877 break;
4879 default:
4880 if (note)
4882 XEXP (note, 0) = datum;
4883 return note;
4885 break;
4888 add_reg_note (insn, kind, datum);
4890 switch (kind)
4892 case REG_EQUAL:
4893 case REG_EQUIV:
4894 df_notes_rescan (insn);
4895 break;
4896 default:
4897 break;
4900 return REG_NOTES (insn);
4903 /* Return an indication of which type of insn should have X as a body.
4904 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4906 static enum rtx_code
4907 classify_insn (rtx x)
4909 if (LABEL_P (x))
4910 return CODE_LABEL;
4911 if (GET_CODE (x) == CALL)
4912 return CALL_INSN;
4913 if (GET_CODE (x) == RETURN)
4914 return JUMP_INSN;
4915 if (GET_CODE (x) == SET)
4917 if (SET_DEST (x) == pc_rtx)
4918 return JUMP_INSN;
4919 else if (GET_CODE (SET_SRC (x)) == CALL)
4920 return CALL_INSN;
4921 else
4922 return INSN;
4924 if (GET_CODE (x) == PARALLEL)
4926 int j;
4927 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4928 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4929 return CALL_INSN;
4930 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4931 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4932 return JUMP_INSN;
4933 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4934 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4935 return CALL_INSN;
4937 return INSN;
4940 /* Emit the rtl pattern X as an appropriate kind of insn.
4941 If X is a label, it is simply added into the insn chain. */
4944 emit (rtx x)
4946 enum rtx_code code = classify_insn (x);
4948 switch (code)
4950 case CODE_LABEL:
4951 return emit_label (x);
4952 case INSN:
4953 return emit_insn (x);
4954 case JUMP_INSN:
4956 rtx insn = emit_jump_insn (x);
4957 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4958 return emit_barrier ();
4959 return insn;
4961 case CALL_INSN:
4962 return emit_call_insn (x);
4963 case DEBUG_INSN:
4964 return emit_debug_insn (x);
4965 default:
4966 gcc_unreachable ();
4970 /* Space for free sequence stack entries. */
4971 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4973 /* Begin emitting insns to a sequence. If this sequence will contain
4974 something that might cause the compiler to pop arguments to function
4975 calls (because those pops have previously been deferred; see
4976 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4977 before calling this function. That will ensure that the deferred
4978 pops are not accidentally emitted in the middle of this sequence. */
4980 void
4981 start_sequence (void)
4983 struct sequence_stack *tem;
4985 if (free_sequence_stack != NULL)
4987 tem = free_sequence_stack;
4988 free_sequence_stack = tem->next;
4990 else
4991 tem = ggc_alloc_sequence_stack ();
4993 tem->next = seq_stack;
4994 tem->first = get_insns ();
4995 tem->last = get_last_insn ();
4997 seq_stack = tem;
4999 set_first_insn (0);
5000 set_last_insn (0);
5003 /* Set up the insn chain starting with FIRST as the current sequence,
5004 saving the previously current one. See the documentation for
5005 start_sequence for more information about how to use this function. */
5007 void
5008 push_to_sequence (rtx first)
5010 rtx last;
5012 start_sequence ();
5014 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5016 set_first_insn (first);
5017 set_last_insn (last);
5020 /* Like push_to_sequence, but take the last insn as an argument to avoid
5021 looping through the list. */
5023 void
5024 push_to_sequence2 (rtx first, rtx last)
5026 start_sequence ();
5028 set_first_insn (first);
5029 set_last_insn (last);
5032 /* Set up the outer-level insn chain
5033 as the current sequence, saving the previously current one. */
5035 void
5036 push_topmost_sequence (void)
5038 struct sequence_stack *stack, *top = NULL;
5040 start_sequence ();
5042 for (stack = seq_stack; stack; stack = stack->next)
5043 top = stack;
5045 set_first_insn (top->first);
5046 set_last_insn (top->last);
5049 /* After emitting to the outer-level insn chain, update the outer-level
5050 insn chain, and restore the previous saved state. */
5052 void
5053 pop_topmost_sequence (void)
5055 struct sequence_stack *stack, *top = NULL;
5057 for (stack = seq_stack; stack; stack = stack->next)
5058 top = stack;
5060 top->first = get_insns ();
5061 top->last = get_last_insn ();
5063 end_sequence ();
5066 /* After emitting to a sequence, restore previous saved state.
5068 To get the contents of the sequence just made, you must call
5069 `get_insns' *before* calling here.
5071 If the compiler might have deferred popping arguments while
5072 generating this sequence, and this sequence will not be immediately
5073 inserted into the instruction stream, use do_pending_stack_adjust
5074 before calling get_insns. That will ensure that the deferred
5075 pops are inserted into this sequence, and not into some random
5076 location in the instruction stream. See INHIBIT_DEFER_POP for more
5077 information about deferred popping of arguments. */
5079 void
5080 end_sequence (void)
5082 struct sequence_stack *tem = seq_stack;
5084 set_first_insn (tem->first);
5085 set_last_insn (tem->last);
5086 seq_stack = tem->next;
5088 memset (tem, 0, sizeof (*tem));
5089 tem->next = free_sequence_stack;
5090 free_sequence_stack = tem;
5093 /* Return 1 if currently emitting into a sequence. */
5096 in_sequence_p (void)
5098 return seq_stack != 0;
5101 /* Put the various virtual registers into REGNO_REG_RTX. */
5103 static void
5104 init_virtual_regs (void)
5106 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5107 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5108 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5109 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5110 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5111 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5112 = virtual_preferred_stack_boundary_rtx;
5116 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5117 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5118 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5119 static int copy_insn_n_scratches;
5121 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5122 copied an ASM_OPERANDS.
5123 In that case, it is the original input-operand vector. */
5124 static rtvec orig_asm_operands_vector;
5126 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5127 copied an ASM_OPERANDS.
5128 In that case, it is the copied input-operand vector. */
5129 static rtvec copy_asm_operands_vector;
5131 /* Likewise for the constraints vector. */
5132 static rtvec orig_asm_constraints_vector;
5133 static rtvec copy_asm_constraints_vector;
5135 /* Recursively create a new copy of an rtx for copy_insn.
5136 This function differs from copy_rtx in that it handles SCRATCHes and
5137 ASM_OPERANDs properly.
5138 Normally, this function is not used directly; use copy_insn as front end.
5139 However, you could first copy an insn pattern with copy_insn and then use
5140 this function afterwards to properly copy any REG_NOTEs containing
5141 SCRATCHes. */
5144 copy_insn_1 (rtx orig)
5146 rtx copy;
5147 int i, j;
5148 RTX_CODE code;
5149 const char *format_ptr;
5151 if (orig == NULL)
5152 return NULL;
5154 code = GET_CODE (orig);
5156 switch (code)
5158 case REG:
5159 case CONST_INT:
5160 case CONST_DOUBLE:
5161 case CONST_FIXED:
5162 case CONST_VECTOR:
5163 case SYMBOL_REF:
5164 case CODE_LABEL:
5165 case PC:
5166 case CC0:
5167 return orig;
5168 case CLOBBER:
5169 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5170 return orig;
5171 break;
5173 case SCRATCH:
5174 for (i = 0; i < copy_insn_n_scratches; i++)
5175 if (copy_insn_scratch_in[i] == orig)
5176 return copy_insn_scratch_out[i];
5177 break;
5179 case CONST:
5180 if (shared_const_p (orig))
5181 return orig;
5182 break;
5184 /* A MEM with a constant address is not sharable. The problem is that
5185 the constant address may need to be reloaded. If the mem is shared,
5186 then reloading one copy of this mem will cause all copies to appear
5187 to have been reloaded. */
5189 default:
5190 break;
5193 /* Copy the various flags, fields, and other information. We assume
5194 that all fields need copying, and then clear the fields that should
5195 not be copied. That is the sensible default behavior, and forces
5196 us to explicitly document why we are *not* copying a flag. */
5197 copy = shallow_copy_rtx (orig);
5199 /* We do not copy the USED flag, which is used as a mark bit during
5200 walks over the RTL. */
5201 RTX_FLAG (copy, used) = 0;
5203 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5204 if (INSN_P (orig))
5206 RTX_FLAG (copy, jump) = 0;
5207 RTX_FLAG (copy, call) = 0;
5208 RTX_FLAG (copy, frame_related) = 0;
5211 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5213 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5214 switch (*format_ptr++)
5216 case 'e':
5217 if (XEXP (orig, i) != NULL)
5218 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5219 break;
5221 case 'E':
5222 case 'V':
5223 if (XVEC (orig, i) == orig_asm_constraints_vector)
5224 XVEC (copy, i) = copy_asm_constraints_vector;
5225 else if (XVEC (orig, i) == orig_asm_operands_vector)
5226 XVEC (copy, i) = copy_asm_operands_vector;
5227 else if (XVEC (orig, i) != NULL)
5229 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5230 for (j = 0; j < XVECLEN (copy, i); j++)
5231 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5233 break;
5235 case 't':
5236 case 'w':
5237 case 'i':
5238 case 's':
5239 case 'S':
5240 case 'u':
5241 case '0':
5242 /* These are left unchanged. */
5243 break;
5245 default:
5246 gcc_unreachable ();
5249 if (code == SCRATCH)
5251 i = copy_insn_n_scratches++;
5252 gcc_assert (i < MAX_RECOG_OPERANDS);
5253 copy_insn_scratch_in[i] = orig;
5254 copy_insn_scratch_out[i] = copy;
5256 else if (code == ASM_OPERANDS)
5258 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5259 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5260 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5261 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5264 return copy;
5267 /* Create a new copy of an rtx.
5268 This function differs from copy_rtx in that it handles SCRATCHes and
5269 ASM_OPERANDs properly.
5270 INSN doesn't really have to be a full INSN; it could be just the
5271 pattern. */
5273 copy_insn (rtx insn)
5275 copy_insn_n_scratches = 0;
5276 orig_asm_operands_vector = 0;
5277 orig_asm_constraints_vector = 0;
5278 copy_asm_operands_vector = 0;
5279 copy_asm_constraints_vector = 0;
5280 return copy_insn_1 (insn);
5283 /* Initialize data structures and variables in this file
5284 before generating rtl for each function. */
5286 void
5287 init_emit (void)
5289 set_first_insn (NULL);
5290 set_last_insn (NULL);
5291 if (MIN_NONDEBUG_INSN_UID)
5292 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5293 else
5294 cur_insn_uid = 1;
5295 cur_debug_insn_uid = 1;
5296 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5297 last_location = UNKNOWN_LOCATION;
5298 first_label_num = label_num;
5299 seq_stack = NULL;
5301 /* Init the tables that describe all the pseudo regs. */
5303 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5305 crtl->emit.regno_pointer_align
5306 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5308 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5310 /* Put copies of all the hard registers into regno_reg_rtx. */
5311 memcpy (regno_reg_rtx,
5312 initial_regno_reg_rtx,
5313 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5315 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5316 init_virtual_regs ();
5318 /* Indicate that the virtual registers and stack locations are
5319 all pointers. */
5320 REG_POINTER (stack_pointer_rtx) = 1;
5321 REG_POINTER (frame_pointer_rtx) = 1;
5322 REG_POINTER (hard_frame_pointer_rtx) = 1;
5323 REG_POINTER (arg_pointer_rtx) = 1;
5325 REG_POINTER (virtual_incoming_args_rtx) = 1;
5326 REG_POINTER (virtual_stack_vars_rtx) = 1;
5327 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5328 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5329 REG_POINTER (virtual_cfa_rtx) = 1;
5331 #ifdef STACK_BOUNDARY
5332 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5333 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5334 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5335 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5337 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5338 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5339 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5340 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5341 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5342 #endif
5344 #ifdef INIT_EXPANDERS
5345 INIT_EXPANDERS;
5346 #endif
5349 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5351 static rtx
5352 gen_const_vector (enum machine_mode mode, int constant)
5354 rtx tem;
5355 rtvec v;
5356 int units, i;
5357 enum machine_mode inner;
5359 units = GET_MODE_NUNITS (mode);
5360 inner = GET_MODE_INNER (mode);
5362 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5364 v = rtvec_alloc (units);
5366 /* We need to call this function after we set the scalar const_tiny_rtx
5367 entries. */
5368 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5370 for (i = 0; i < units; ++i)
5371 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5373 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5374 return tem;
5377 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5378 all elements are zero, and the one vector when all elements are one. */
5380 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5382 enum machine_mode inner = GET_MODE_INNER (mode);
5383 int nunits = GET_MODE_NUNITS (mode);
5384 rtx x;
5385 int i;
5387 /* Check to see if all of the elements have the same value. */
5388 x = RTVEC_ELT (v, nunits - 1);
5389 for (i = nunits - 2; i >= 0; i--)
5390 if (RTVEC_ELT (v, i) != x)
5391 break;
5393 /* If the values are all the same, check to see if we can use one of the
5394 standard constant vectors. */
5395 if (i == -1)
5397 if (x == CONST0_RTX (inner))
5398 return CONST0_RTX (mode);
5399 else if (x == CONST1_RTX (inner))
5400 return CONST1_RTX (mode);
5403 return gen_rtx_raw_CONST_VECTOR (mode, v);
5406 /* Initialise global register information required by all functions. */
5408 void
5409 init_emit_regs (void)
5411 int i;
5413 /* Reset register attributes */
5414 htab_empty (reg_attrs_htab);
5416 /* We need reg_raw_mode, so initialize the modes now. */
5417 init_reg_modes_target ();
5419 /* Assign register numbers to the globally defined register rtx. */
5420 pc_rtx = gen_rtx_fmt_ (PC, VOIDmode);
5421 ret_rtx = gen_rtx_fmt_ (RETURN, VOIDmode);
5422 cc0_rtx = gen_rtx_fmt_ (CC0, VOIDmode);
5423 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5424 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5425 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5426 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5427 virtual_incoming_args_rtx =
5428 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5429 virtual_stack_vars_rtx =
5430 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5431 virtual_stack_dynamic_rtx =
5432 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5433 virtual_outgoing_args_rtx =
5434 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5435 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5436 virtual_preferred_stack_boundary_rtx =
5437 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5439 /* Initialize RTL for commonly used hard registers. These are
5440 copied into regno_reg_rtx as we begin to compile each function. */
5441 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5442 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5444 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5445 return_address_pointer_rtx
5446 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5447 #endif
5449 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5450 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5451 else
5452 pic_offset_table_rtx = NULL_RTX;
5455 /* Create some permanent unique rtl objects shared between all functions. */
5457 void
5458 init_emit_once (void)
5460 int i;
5461 enum machine_mode mode;
5462 enum machine_mode double_mode;
5464 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5465 hash tables. */
5466 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5467 const_int_htab_eq, NULL);
5469 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5470 const_double_htab_eq, NULL);
5472 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5473 const_fixed_htab_eq, NULL);
5475 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5476 mem_attrs_htab_eq, NULL);
5477 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5478 reg_attrs_htab_eq, NULL);
5480 /* Compute the word and byte modes. */
5482 byte_mode = VOIDmode;
5483 word_mode = VOIDmode;
5484 double_mode = VOIDmode;
5486 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5487 mode != VOIDmode;
5488 mode = GET_MODE_WIDER_MODE (mode))
5490 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5491 && byte_mode == VOIDmode)
5492 byte_mode = mode;
5494 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5495 && word_mode == VOIDmode)
5496 word_mode = mode;
5499 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5500 mode != VOIDmode;
5501 mode = GET_MODE_WIDER_MODE (mode))
5503 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5504 && double_mode == VOIDmode)
5505 double_mode = mode;
5508 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5510 #ifdef INIT_EXPANDERS
5511 /* This is to initialize {init|mark|free}_machine_status before the first
5512 call to push_function_context_to. This is needed by the Chill front
5513 end which calls push_function_context_to before the first call to
5514 init_function_start. */
5515 INIT_EXPANDERS;
5516 #endif
5518 /* Create the unique rtx's for certain rtx codes and operand values. */
5520 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5521 tries to use these variables. */
5522 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5523 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5524 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5526 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5527 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5528 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5529 else
5530 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5532 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5533 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5534 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5536 dconstm1 = dconst1;
5537 dconstm1.sign = 1;
5539 dconsthalf = dconst1;
5540 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5542 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5544 const REAL_VALUE_TYPE *const r =
5545 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5547 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5548 mode != VOIDmode;
5549 mode = GET_MODE_WIDER_MODE (mode))
5550 const_tiny_rtx[i][(int) mode] =
5551 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5553 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5554 mode != VOIDmode;
5555 mode = GET_MODE_WIDER_MODE (mode))
5556 const_tiny_rtx[i][(int) mode] =
5557 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5559 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5561 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5562 mode != VOIDmode;
5563 mode = GET_MODE_WIDER_MODE (mode))
5564 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5566 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5567 mode != VOIDmode;
5568 mode = GET_MODE_WIDER_MODE (mode))
5569 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5572 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5573 mode != VOIDmode;
5574 mode = GET_MODE_WIDER_MODE (mode))
5576 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5577 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5580 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5581 mode != VOIDmode;
5582 mode = GET_MODE_WIDER_MODE (mode))
5584 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5585 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5588 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5589 mode != VOIDmode;
5590 mode = GET_MODE_WIDER_MODE (mode))
5592 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5593 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5596 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5597 mode != VOIDmode;
5598 mode = GET_MODE_WIDER_MODE (mode))
5600 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5601 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5604 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5605 mode != VOIDmode;
5606 mode = GET_MODE_WIDER_MODE (mode))
5608 FCONST0(mode).data.high = 0;
5609 FCONST0(mode).data.low = 0;
5610 FCONST0(mode).mode = mode;
5611 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5612 FCONST0 (mode), mode);
5615 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5616 mode != VOIDmode;
5617 mode = GET_MODE_WIDER_MODE (mode))
5619 FCONST0(mode).data.high = 0;
5620 FCONST0(mode).data.low = 0;
5621 FCONST0(mode).mode = mode;
5622 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5623 FCONST0 (mode), mode);
5626 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5627 mode != VOIDmode;
5628 mode = GET_MODE_WIDER_MODE (mode))
5630 FCONST0(mode).data.high = 0;
5631 FCONST0(mode).data.low = 0;
5632 FCONST0(mode).mode = mode;
5633 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5634 FCONST0 (mode), mode);
5636 /* We store the value 1. */
5637 FCONST1(mode).data.high = 0;
5638 FCONST1(mode).data.low = 0;
5639 FCONST1(mode).mode = mode;
5640 lshift_double (1, 0, GET_MODE_FBIT (mode),
5641 2 * HOST_BITS_PER_WIDE_INT,
5642 &FCONST1(mode).data.low,
5643 &FCONST1(mode).data.high,
5644 SIGNED_FIXED_POINT_MODE_P (mode));
5645 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5646 FCONST1 (mode), mode);
5649 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5650 mode != VOIDmode;
5651 mode = GET_MODE_WIDER_MODE (mode))
5653 FCONST0(mode).data.high = 0;
5654 FCONST0(mode).data.low = 0;
5655 FCONST0(mode).mode = mode;
5656 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5657 FCONST0 (mode), mode);
5659 /* We store the value 1. */
5660 FCONST1(mode).data.high = 0;
5661 FCONST1(mode).data.low = 0;
5662 FCONST1(mode).mode = mode;
5663 lshift_double (1, 0, GET_MODE_FBIT (mode),
5664 2 * HOST_BITS_PER_WIDE_INT,
5665 &FCONST1(mode).data.low,
5666 &FCONST1(mode).data.high,
5667 SIGNED_FIXED_POINT_MODE_P (mode));
5668 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5669 FCONST1 (mode), mode);
5672 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5673 mode != VOIDmode;
5674 mode = GET_MODE_WIDER_MODE (mode))
5676 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5679 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5680 mode != VOIDmode;
5681 mode = GET_MODE_WIDER_MODE (mode))
5683 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5686 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5687 mode != VOIDmode;
5688 mode = GET_MODE_WIDER_MODE (mode))
5690 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5691 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5694 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5695 mode != VOIDmode;
5696 mode = GET_MODE_WIDER_MODE (mode))
5698 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5699 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5702 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5703 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5704 const_tiny_rtx[0][i] = const0_rtx;
5706 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5707 if (STORE_FLAG_VALUE == 1)
5708 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5711 /* Produce exact duplicate of insn INSN after AFTER.
5712 Care updating of libcall regions if present. */
5715 emit_copy_of_insn_after (rtx insn, rtx after)
5717 rtx new_rtx, link;
5719 switch (GET_CODE (insn))
5721 case INSN:
5722 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5723 break;
5725 case JUMP_INSN:
5726 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5727 break;
5729 case DEBUG_INSN:
5730 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5731 break;
5733 case CALL_INSN:
5734 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5735 if (CALL_INSN_FUNCTION_USAGE (insn))
5736 CALL_INSN_FUNCTION_USAGE (new_rtx)
5737 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5738 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5739 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5740 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5741 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5742 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5743 break;
5745 default:
5746 gcc_unreachable ();
5749 /* Update LABEL_NUSES. */
5750 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5752 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
5754 /* If the old insn is frame related, then so is the new one. This is
5755 primarily needed for IA-64 unwind info which marks epilogue insns,
5756 which may be duplicated by the basic block reordering code. */
5757 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5759 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5760 will make them. REG_LABEL_TARGETs are created there too, but are
5761 supposed to be sticky, so we copy them. */
5762 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5763 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5765 if (GET_CODE (link) == EXPR_LIST)
5766 add_reg_note (new_rtx, REG_NOTE_KIND (link),
5767 copy_insn_1 (XEXP (link, 0)));
5768 else
5769 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
5772 INSN_CODE (new_rtx) = INSN_CODE (insn);
5773 return new_rtx;
5776 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5778 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5780 if (hard_reg_clobbers[mode][regno])
5781 return hard_reg_clobbers[mode][regno];
5782 else
5783 return (hard_reg_clobbers[mode][regno] =
5784 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5787 #include "gt-emit-rtl.h"