Fix to expose more LIM when creating mem_ref
[official-gcc.git] / gcc / emit-rtl.c
blob32741dc6b809af671e1e6e457f9f911699037379
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* Middle-to-low level generation of rtx code and insns.
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "diagnostic-core.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
59 #include "tree-pass.h"
60 #include "df.h"
61 #include "params.h"
62 #include "target.h"
64 struct target_rtl default_target_rtl;
65 #if SWITCHABLE_TARGET
66 struct target_rtl *this_target_rtl = &default_target_rtl;
67 #endif
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71 /* Commonly used modes. */
73 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
76 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
78 /* Datastructures maintained for currently processed function in RTL form. */
80 struct rtl_data x_rtl;
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
87 rtx * regno_reg_rtx;
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
92 static GTY(()) int label_num = 1;
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx. */
98 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
100 rtx const_true_rtx;
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
106 REAL_VALUE_TYPE dconsthalf;
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
117 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
119 /* A hash table storing CONST_INTs whose absolute value is greater
120 than MAX_SAVED_CONST_INT. */
122 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
123 htab_t const_int_htab;
125 /* A hash table storing memory attribute structures. */
126 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
127 htab_t mem_attrs_htab;
129 /* A hash table storing register attribute structures. */
130 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
131 htab_t reg_attrs_htab;
133 /* A hash table storing all CONST_DOUBLEs. */
134 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
135 htab_t const_double_htab;
137 /* A hash table storing all CONST_FIXEDs. */
138 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
139 htab_t const_fixed_htab;
141 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
142 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
143 #define last_location (crtl->emit.x_last_location)
144 #define first_label_num (crtl->emit.x_first_label_num)
146 static rtx make_call_insn_raw (rtx);
147 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
148 static void set_used_decls (tree);
149 static void mark_label_nuses (rtx);
150 static hashval_t const_int_htab_hash (const void *);
151 static int const_int_htab_eq (const void *, const void *);
152 static hashval_t const_double_htab_hash (const void *);
153 static int const_double_htab_eq (const void *, const void *);
154 static rtx lookup_const_double (rtx);
155 static hashval_t const_fixed_htab_hash (const void *);
156 static int const_fixed_htab_eq (const void *, const void *);
157 static rtx lookup_const_fixed (rtx);
158 static hashval_t mem_attrs_htab_hash (const void *);
159 static int mem_attrs_htab_eq (const void *, const void *);
160 static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
161 addr_space_t, enum machine_mode);
162 static hashval_t reg_attrs_htab_hash (const void *);
163 static int reg_attrs_htab_eq (const void *, const void *);
164 static reg_attrs *get_reg_attrs (tree, int);
165 static rtx gen_const_vector (enum machine_mode, int);
166 static void copy_rtx_if_shared_1 (rtx *orig);
168 /* Probability of the conditional branch currently proceeded by try_split.
169 Set to -1 otherwise. */
170 int split_branch_probability = -1;
172 /* Returns a hash code for X (which is a really a CONST_INT). */
174 static hashval_t
175 const_int_htab_hash (const void *x)
177 return (hashval_t) INTVAL ((const_rtx) x);
180 /* Returns nonzero if the value represented by X (which is really a
181 CONST_INT) is the same as that given by Y (which is really a
182 HOST_WIDE_INT *). */
184 static int
185 const_int_htab_eq (const void *x, const void *y)
187 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
190 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
191 static hashval_t
192 const_double_htab_hash (const void *x)
194 const_rtx const value = (const_rtx) x;
195 hashval_t h;
197 if (GET_MODE (value) == VOIDmode)
198 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
199 else
201 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
202 /* MODE is used in the comparison, so it should be in the hash. */
203 h ^= GET_MODE (value);
205 return h;
208 /* Returns nonzero if the value represented by X (really a ...)
209 is the same as that represented by Y (really a ...) */
210 static int
211 const_double_htab_eq (const void *x, const void *y)
213 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
215 if (GET_MODE (a) != GET_MODE (b))
216 return 0;
217 if (GET_MODE (a) == VOIDmode)
218 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
219 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
220 else
221 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
222 CONST_DOUBLE_REAL_VALUE (b));
225 /* Returns a hash code for X (which is really a CONST_FIXED). */
227 static hashval_t
228 const_fixed_htab_hash (const void *x)
230 const_rtx const value = (const_rtx) x;
231 hashval_t h;
233 h = fixed_hash (CONST_FIXED_VALUE (value));
234 /* MODE is used in the comparison, so it should be in the hash. */
235 h ^= GET_MODE (value);
236 return h;
239 /* Returns nonzero if the value represented by X (really a ...)
240 is the same as that represented by Y (really a ...). */
242 static int
243 const_fixed_htab_eq (const void *x, const void *y)
245 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
247 if (GET_MODE (a) != GET_MODE (b))
248 return 0;
249 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
252 /* Returns a hash code for X (which is a really a mem_attrs *). */
254 static hashval_t
255 mem_attrs_htab_hash (const void *x)
257 const mem_attrs *const p = (const mem_attrs *) x;
259 return (p->alias ^ (p->align * 1000)
260 ^ (p->addrspace * 4000)
261 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
262 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
263 ^ (size_t) iterative_hash_expr (p->expr, 0));
266 /* Returns nonzero if the value represented by X (which is really a
267 mem_attrs *) is the same as that given by Y (which is also really a
268 mem_attrs *). */
270 static int
271 mem_attrs_htab_eq (const void *x, const void *y)
273 const mem_attrs *const p = (const mem_attrs *) x;
274 const mem_attrs *const q = (const mem_attrs *) y;
276 return (p->alias == q->alias && p->offset == q->offset
277 && p->size == q->size && p->align == q->align
278 && p->addrspace == q->addrspace
279 && (p->expr == q->expr
280 || (p->expr != NULL_TREE && q->expr != NULL_TREE
281 && operand_equal_p (p->expr, q->expr, 0))));
284 /* Allocate a new mem_attrs structure and insert it into the hash table if
285 one identical to it is not already in the table. We are doing this for
286 MEM of mode MODE. */
288 static mem_attrs *
289 get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
290 unsigned int align, addr_space_t addrspace, enum machine_mode mode)
292 mem_attrs attrs;
293 void **slot;
295 /* If everything is the default, we can just return zero.
296 This must match what the corresponding MEM_* macros return when the
297 field is not present. */
298 if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
299 && (size == 0
300 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
301 && (STRICT_ALIGNMENT && mode != BLKmode
302 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
303 return 0;
305 attrs.alias = alias;
306 attrs.expr = expr;
307 attrs.offset = offset;
308 attrs.size = size;
309 attrs.align = align;
310 attrs.addrspace = addrspace;
312 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
313 if (*slot == 0)
315 *slot = ggc_alloc_mem_attrs ();
316 memcpy (*slot, &attrs, sizeof (mem_attrs));
319 return (mem_attrs *) *slot;
322 /* Returns a hash code for X (which is a really a reg_attrs *). */
324 static hashval_t
325 reg_attrs_htab_hash (const void *x)
327 const reg_attrs *const p = (const reg_attrs *) x;
329 return ((p->offset * 1000) ^ (long) p->decl);
332 /* Returns nonzero if the value represented by X (which is really a
333 reg_attrs *) is the same as that given by Y (which is also really a
334 reg_attrs *). */
336 static int
337 reg_attrs_htab_eq (const void *x, const void *y)
339 const reg_attrs *const p = (const reg_attrs *) x;
340 const reg_attrs *const q = (const reg_attrs *) y;
342 return (p->decl == q->decl && p->offset == q->offset);
344 /* Allocate a new reg_attrs structure and insert it into the hash table if
345 one identical to it is not already in the table. We are doing this for
346 MEM of mode MODE. */
348 static reg_attrs *
349 get_reg_attrs (tree decl, int offset)
351 reg_attrs attrs;
352 void **slot;
354 /* If everything is the default, we can just return zero. */
355 if (decl == 0 && offset == 0)
356 return 0;
358 attrs.decl = decl;
359 attrs.offset = offset;
361 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
362 if (*slot == 0)
364 *slot = ggc_alloc_reg_attrs ();
365 memcpy (*slot, &attrs, sizeof (reg_attrs));
368 return (reg_attrs *) *slot;
372 #if !HAVE_blockage
373 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
374 across this insn. */
377 gen_blockage (void)
379 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
380 MEM_VOLATILE_P (x) = true;
381 return x;
383 #endif
386 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
387 don't attempt to share with the various global pieces of rtl (such as
388 frame_pointer_rtx). */
391 gen_raw_REG (enum machine_mode mode, int regno)
393 rtx x = gen_rtx_raw_REG (mode, regno);
394 ORIGINAL_REGNO (x) = regno;
395 return x;
398 /* There are some RTL codes that require special attention; the generation
399 functions do the raw handling. If you add to this list, modify
400 special_rtx in gengenrtl.c as well. */
403 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
405 void **slot;
407 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
408 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
410 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
411 if (const_true_rtx && arg == STORE_FLAG_VALUE)
412 return const_true_rtx;
413 #endif
415 /* Look up the CONST_INT in the hash table. */
416 slot = htab_find_slot_with_hash (const_int_htab, &arg,
417 (hashval_t) arg, INSERT);
418 if (*slot == 0)
419 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
421 return (rtx) *slot;
425 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
427 return GEN_INT (trunc_int_for_mode (c, mode));
430 /* CONST_DOUBLEs might be created from pairs of integers, or from
431 REAL_VALUE_TYPEs. Also, their length is known only at run time,
432 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
434 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
435 hash table. If so, return its counterpart; otherwise add it
436 to the hash table and return it. */
437 static rtx
438 lookup_const_double (rtx real)
440 void **slot = htab_find_slot (const_double_htab, real, INSERT);
441 if (*slot == 0)
442 *slot = real;
444 return (rtx) *slot;
447 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
448 VALUE in mode MODE. */
450 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
452 rtx real = rtx_alloc (CONST_DOUBLE);
453 PUT_MODE (real, mode);
455 real->u.rv = value;
457 return lookup_const_double (real);
460 /* Determine whether FIXED, a CONST_FIXED, already exists in the
461 hash table. If so, return its counterpart; otherwise add it
462 to the hash table and return it. */
464 static rtx
465 lookup_const_fixed (rtx fixed)
467 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
468 if (*slot == 0)
469 *slot = fixed;
471 return (rtx) *slot;
474 /* Return a CONST_FIXED rtx for a fixed-point value specified by
475 VALUE in mode MODE. */
478 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
480 rtx fixed = rtx_alloc (CONST_FIXED);
481 PUT_MODE (fixed, mode);
483 fixed->u.fv = value;
485 return lookup_const_fixed (fixed);
488 /* Constructs double_int from rtx CST. */
490 double_int
491 rtx_to_double_int (const_rtx cst)
493 double_int r;
495 if (CONST_INT_P (cst))
496 r = shwi_to_double_int (INTVAL (cst));
497 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
499 r.low = CONST_DOUBLE_LOW (cst);
500 r.high = CONST_DOUBLE_HIGH (cst);
502 else
503 gcc_unreachable ();
505 return r;
509 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
510 a double_int. */
513 immed_double_int_const (double_int i, enum machine_mode mode)
515 return immed_double_const (i.low, i.high, mode);
518 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
519 of ints: I0 is the low-order word and I1 is the high-order word.
520 Do not use this routine for non-integer modes; convert to
521 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
524 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
526 rtx value;
527 unsigned int i;
529 /* There are the following cases (note that there are no modes with
530 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
532 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
533 gen_int_mode.
534 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
535 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
536 from copies of the sign bit, and sign of i0 and i1 are the same), then
537 we return a CONST_INT for i0.
538 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
539 if (mode != VOIDmode)
541 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
542 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
543 /* We can get a 0 for an error mark. */
544 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
545 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
547 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
548 return gen_int_mode (i0, mode);
550 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
553 /* If this integer fits in one word, return a CONST_INT. */
554 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
555 return GEN_INT (i0);
557 /* We use VOIDmode for integers. */
558 value = rtx_alloc (CONST_DOUBLE);
559 PUT_MODE (value, VOIDmode);
561 CONST_DOUBLE_LOW (value) = i0;
562 CONST_DOUBLE_HIGH (value) = i1;
564 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
565 XWINT (value, i) = 0;
567 return lookup_const_double (value);
571 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
573 /* In case the MD file explicitly references the frame pointer, have
574 all such references point to the same frame pointer. This is
575 used during frame pointer elimination to distinguish the explicit
576 references to these registers from pseudos that happened to be
577 assigned to them.
579 If we have eliminated the frame pointer or arg pointer, we will
580 be using it as a normal register, for example as a spill
581 register. In such cases, we might be accessing it in a mode that
582 is not Pmode and therefore cannot use the pre-allocated rtx.
584 Also don't do this when we are making new REGs in reload, since
585 we don't want to get confused with the real pointers. */
587 if (mode == Pmode && !reload_in_progress)
589 if (regno == FRAME_POINTER_REGNUM
590 && (!reload_completed || frame_pointer_needed))
591 return frame_pointer_rtx;
592 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
593 if (regno == HARD_FRAME_POINTER_REGNUM
594 && (!reload_completed || frame_pointer_needed))
595 return hard_frame_pointer_rtx;
596 #endif
597 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
598 if (regno == ARG_POINTER_REGNUM)
599 return arg_pointer_rtx;
600 #endif
601 #ifdef RETURN_ADDRESS_POINTER_REGNUM
602 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
603 return return_address_pointer_rtx;
604 #endif
605 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
606 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
607 return pic_offset_table_rtx;
608 if (regno == STACK_POINTER_REGNUM)
609 return stack_pointer_rtx;
612 #if 0
613 /* If the per-function register table has been set up, try to re-use
614 an existing entry in that table to avoid useless generation of RTL.
616 This code is disabled for now until we can fix the various backends
617 which depend on having non-shared hard registers in some cases. Long
618 term we want to re-enable this code as it can significantly cut down
619 on the amount of useless RTL that gets generated.
621 We'll also need to fix some code that runs after reload that wants to
622 set ORIGINAL_REGNO. */
624 if (cfun
625 && cfun->emit
626 && regno_reg_rtx
627 && regno < FIRST_PSEUDO_REGISTER
628 && reg_raw_mode[regno] == mode)
629 return regno_reg_rtx[regno];
630 #endif
632 return gen_raw_REG (mode, regno);
636 gen_rtx_MEM (enum machine_mode mode, rtx addr)
638 rtx rt = gen_rtx_raw_MEM (mode, addr);
640 /* This field is not cleared by the mere allocation of the rtx, so
641 we clear it here. */
642 MEM_ATTRS (rt) = 0;
644 return rt;
647 /* Generate a memory referring to non-trapping constant memory. */
650 gen_const_mem (enum machine_mode mode, rtx addr)
652 rtx mem = gen_rtx_MEM (mode, addr);
653 MEM_READONLY_P (mem) = 1;
654 MEM_NOTRAP_P (mem) = 1;
655 return mem;
658 /* Generate a MEM referring to fixed portions of the frame, e.g., register
659 save areas. */
662 gen_frame_mem (enum machine_mode mode, rtx addr)
664 rtx mem = gen_rtx_MEM (mode, addr);
665 MEM_NOTRAP_P (mem) = 1;
666 set_mem_alias_set (mem, get_frame_alias_set ());
667 return mem;
670 /* Generate a MEM referring to a temporary use of the stack, not part
671 of the fixed stack frame. For example, something which is pushed
672 by a target splitter. */
674 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
676 rtx mem = gen_rtx_MEM (mode, addr);
677 MEM_NOTRAP_P (mem) = 1;
678 if (!cfun->calls_alloca)
679 set_mem_alias_set (mem, get_frame_alias_set ());
680 return mem;
683 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
684 this construct would be valid, and false otherwise. */
686 bool
687 validate_subreg (enum machine_mode omode, enum machine_mode imode,
688 const_rtx reg, unsigned int offset)
690 unsigned int isize = GET_MODE_SIZE (imode);
691 unsigned int osize = GET_MODE_SIZE (omode);
693 /* All subregs must be aligned. */
694 if (offset % osize != 0)
695 return false;
697 /* The subreg offset cannot be outside the inner object. */
698 if (offset >= isize)
699 return false;
701 /* ??? This should not be here. Temporarily continue to allow word_mode
702 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
703 Generally, backends are doing something sketchy but it'll take time to
704 fix them all. */
705 if (omode == word_mode)
707 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
708 is the culprit here, and not the backends. */
709 else if (osize >= UNITS_PER_WORD && isize >= osize)
711 /* Allow component subregs of complex and vector. Though given the below
712 extraction rules, it's not always clear what that means. */
713 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
714 && GET_MODE_INNER (imode) == omode)
716 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
717 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
718 represent this. It's questionable if this ought to be represented at
719 all -- why can't this all be hidden in post-reload splitters that make
720 arbitrarily mode changes to the registers themselves. */
721 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
723 /* Subregs involving floating point modes are not allowed to
724 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
725 (subreg:SI (reg:DF) 0) isn't. */
726 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
728 if (isize != osize)
729 return false;
732 /* Paradoxical subregs must have offset zero. */
733 if (osize > isize)
734 return offset == 0;
736 /* This is a normal subreg. Verify that the offset is representable. */
738 /* For hard registers, we already have most of these rules collected in
739 subreg_offset_representable_p. */
740 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
742 unsigned int regno = REGNO (reg);
744 #ifdef CANNOT_CHANGE_MODE_CLASS
745 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
746 && GET_MODE_INNER (imode) == omode)
748 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
749 return false;
750 #endif
752 return subreg_offset_representable_p (regno, imode, offset, omode);
755 /* For pseudo registers, we want most of the same checks. Namely:
756 If the register no larger than a word, the subreg must be lowpart.
757 If the register is larger than a word, the subreg must be the lowpart
758 of a subword. A subreg does *not* perform arbitrary bit extraction.
759 Given that we've already checked mode/offset alignment, we only have
760 to check subword subregs here. */
761 if (osize < UNITS_PER_WORD)
763 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
764 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
765 if (offset % UNITS_PER_WORD != low_off)
766 return false;
768 return true;
772 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
774 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
775 return gen_rtx_raw_SUBREG (mode, reg, offset);
778 /* Generate a SUBREG representing the least-significant part of REG if MODE
779 is smaller than mode of REG, otherwise paradoxical SUBREG. */
782 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
784 enum machine_mode inmode;
786 inmode = GET_MODE (reg);
787 if (inmode == VOIDmode)
788 inmode = mode;
789 return gen_rtx_SUBREG (mode, reg,
790 subreg_lowpart_offset (mode, inmode));
794 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
796 rtvec
797 gen_rtvec (int n, ...)
799 int i;
800 rtvec rt_val;
801 va_list p;
803 va_start (p, n);
805 /* Don't allocate an empty rtvec... */
806 if (n == 0)
807 return NULL_RTVEC;
809 rt_val = rtvec_alloc (n);
811 for (i = 0; i < n; i++)
812 rt_val->elem[i] = va_arg (p, rtx);
814 va_end (p);
815 return rt_val;
818 rtvec
819 gen_rtvec_v (int n, rtx *argp)
821 int i;
822 rtvec rt_val;
824 /* Don't allocate an empty rtvec... */
825 if (n == 0)
826 return NULL_RTVEC;
828 rt_val = rtvec_alloc (n);
830 for (i = 0; i < n; i++)
831 rt_val->elem[i] = *argp++;
833 return rt_val;
836 /* Return the number of bytes between the start of an OUTER_MODE
837 in-memory value and the start of an INNER_MODE in-memory value,
838 given that the former is a lowpart of the latter. It may be a
839 paradoxical lowpart, in which case the offset will be negative
840 on big-endian targets. */
843 byte_lowpart_offset (enum machine_mode outer_mode,
844 enum machine_mode inner_mode)
846 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
847 return subreg_lowpart_offset (outer_mode, inner_mode);
848 else
849 return -subreg_lowpart_offset (inner_mode, outer_mode);
852 /* Generate a REG rtx for a new pseudo register of mode MODE.
853 This pseudo is assigned the next sequential register number. */
856 gen_reg_rtx (enum machine_mode mode)
858 rtx val;
859 unsigned int align = GET_MODE_ALIGNMENT (mode);
861 gcc_assert (can_create_pseudo_p ());
863 /* If a virtual register with bigger mode alignment is generated,
864 increase stack alignment estimation because it might be spilled
865 to stack later. */
866 if (SUPPORTS_STACK_ALIGNMENT
867 && crtl->stack_alignment_estimated < align
868 && !crtl->stack_realign_processed)
870 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
871 if (crtl->stack_alignment_estimated < min_align)
872 crtl->stack_alignment_estimated = min_align;
875 if (generating_concat_p
876 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
877 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
879 /* For complex modes, don't make a single pseudo.
880 Instead, make a CONCAT of two pseudos.
881 This allows noncontiguous allocation of the real and imaginary parts,
882 which makes much better code. Besides, allocating DCmode
883 pseudos overstrains reload on some machines like the 386. */
884 rtx realpart, imagpart;
885 enum machine_mode partmode = GET_MODE_INNER (mode);
887 realpart = gen_reg_rtx (partmode);
888 imagpart = gen_reg_rtx (partmode);
889 return gen_rtx_CONCAT (mode, realpart, imagpart);
892 /* Make sure regno_pointer_align, and regno_reg_rtx are large
893 enough to have an element for this pseudo reg number. */
895 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
897 int old_size = crtl->emit.regno_pointer_align_length;
898 char *tmp;
899 rtx *new1;
901 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
902 memset (tmp + old_size, 0, old_size);
903 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
905 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
906 memset (new1 + old_size, 0, old_size * sizeof (rtx));
907 regno_reg_rtx = new1;
909 crtl->emit.regno_pointer_align_length = old_size * 2;
912 val = gen_raw_REG (mode, reg_rtx_no);
913 regno_reg_rtx[reg_rtx_no++] = val;
914 return val;
917 /* Update NEW with the same attributes as REG, but with OFFSET added
918 to the REG_OFFSET. */
920 static void
921 update_reg_offset (rtx new_rtx, rtx reg, int offset)
923 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
924 REG_OFFSET (reg) + offset);
927 /* Generate a register with same attributes as REG, but with OFFSET
928 added to the REG_OFFSET. */
931 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
932 int offset)
934 rtx new_rtx = gen_rtx_REG (mode, regno);
936 update_reg_offset (new_rtx, reg, offset);
937 return new_rtx;
940 /* Generate a new pseudo-register with the same attributes as REG, but
941 with OFFSET added to the REG_OFFSET. */
944 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
946 rtx new_rtx = gen_reg_rtx (mode);
948 update_reg_offset (new_rtx, reg, offset);
949 return new_rtx;
952 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
953 new register is a (possibly paradoxical) lowpart of the old one. */
955 void
956 adjust_reg_mode (rtx reg, enum machine_mode mode)
958 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
959 PUT_MODE (reg, mode);
962 /* Copy REG's attributes from X, if X has any attributes. If REG and X
963 have different modes, REG is a (possibly paradoxical) lowpart of X. */
965 void
966 set_reg_attrs_from_value (rtx reg, rtx x)
968 int offset;
970 /* Hard registers can be reused for multiple purposes within the same
971 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
972 on them is wrong. */
973 if (HARD_REGISTER_P (reg))
974 return;
976 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
977 if (MEM_P (x))
979 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
980 REG_ATTRS (reg)
981 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
982 if (MEM_POINTER (x))
983 mark_reg_pointer (reg, 0);
985 else if (REG_P (x))
987 if (REG_ATTRS (x))
988 update_reg_offset (reg, x, offset);
989 if (REG_POINTER (x))
990 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
994 /* Generate a REG rtx for a new pseudo register, copying the mode
995 and attributes from X. */
998 gen_reg_rtx_and_attrs (rtx x)
1000 rtx reg = gen_reg_rtx (GET_MODE (x));
1001 set_reg_attrs_from_value (reg, x);
1002 return reg;
1005 /* Set the register attributes for registers contained in PARM_RTX.
1006 Use needed values from memory attributes of MEM. */
1008 void
1009 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1011 if (REG_P (parm_rtx))
1012 set_reg_attrs_from_value (parm_rtx, mem);
1013 else if (GET_CODE (parm_rtx) == PARALLEL)
1015 /* Check for a NULL entry in the first slot, used to indicate that the
1016 parameter goes both on the stack and in registers. */
1017 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1018 for (; i < XVECLEN (parm_rtx, 0); i++)
1020 rtx x = XVECEXP (parm_rtx, 0, i);
1021 if (REG_P (XEXP (x, 0)))
1022 REG_ATTRS (XEXP (x, 0))
1023 = get_reg_attrs (MEM_EXPR (mem),
1024 INTVAL (XEXP (x, 1)));
1029 /* Set the REG_ATTRS for registers in value X, given that X represents
1030 decl T. */
1032 void
1033 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1035 if (GET_CODE (x) == SUBREG)
1037 gcc_assert (subreg_lowpart_p (x));
1038 x = SUBREG_REG (x);
1040 if (REG_P (x))
1041 REG_ATTRS (x)
1042 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1043 DECL_MODE (t)));
1044 if (GET_CODE (x) == CONCAT)
1046 if (REG_P (XEXP (x, 0)))
1047 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1048 if (REG_P (XEXP (x, 1)))
1049 REG_ATTRS (XEXP (x, 1))
1050 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1052 if (GET_CODE (x) == PARALLEL)
1054 int i, start;
1056 /* Check for a NULL entry, used to indicate that the parameter goes
1057 both on the stack and in registers. */
1058 if (XEXP (XVECEXP (x, 0, 0), 0))
1059 start = 0;
1060 else
1061 start = 1;
1063 for (i = start; i < XVECLEN (x, 0); i++)
1065 rtx y = XVECEXP (x, 0, i);
1066 if (REG_P (XEXP (y, 0)))
1067 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1072 /* Assign the RTX X to declaration T. */
1074 void
1075 set_decl_rtl (tree t, rtx x)
1077 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1078 if (x)
1079 set_reg_attrs_for_decl_rtl (t, x);
1082 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1083 if the ABI requires the parameter to be passed by reference. */
1085 void
1086 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1088 DECL_INCOMING_RTL (t) = x;
1089 if (x && !by_reference_p)
1090 set_reg_attrs_for_decl_rtl (t, x);
1093 /* Identify REG (which may be a CONCAT) as a user register. */
1095 void
1096 mark_user_reg (rtx reg)
1098 if (GET_CODE (reg) == CONCAT)
1100 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1101 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1103 else
1105 gcc_assert (REG_P (reg));
1106 REG_USERVAR_P (reg) = 1;
1110 /* Identify REG as a probable pointer register and show its alignment
1111 as ALIGN, if nonzero. */
1113 void
1114 mark_reg_pointer (rtx reg, int align)
1116 if (! REG_POINTER (reg))
1118 REG_POINTER (reg) = 1;
1120 if (align)
1121 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1123 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1124 /* We can no-longer be sure just how aligned this pointer is. */
1125 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1128 /* Return 1 plus largest pseudo reg number used in the current function. */
1131 max_reg_num (void)
1133 return reg_rtx_no;
1136 /* Return 1 + the largest label number used so far in the current function. */
1139 max_label_num (void)
1141 return label_num;
1144 /* Return first label number used in this function (if any were used). */
1147 get_first_label_num (void)
1149 return first_label_num;
1152 /* If the rtx for label was created during the expansion of a nested
1153 function, then first_label_num won't include this label number.
1154 Fix this now so that array indices work later. */
1156 void
1157 maybe_set_first_label_num (rtx x)
1159 if (CODE_LABEL_NUMBER (x) < first_label_num)
1160 first_label_num = CODE_LABEL_NUMBER (x);
1163 /* Return a value representing some low-order bits of X, where the number
1164 of low-order bits is given by MODE. Note that no conversion is done
1165 between floating-point and fixed-point values, rather, the bit
1166 representation is returned.
1168 This function handles the cases in common between gen_lowpart, below,
1169 and two variants in cse.c and combine.c. These are the cases that can
1170 be safely handled at all points in the compilation.
1172 If this is not a case we can handle, return 0. */
1175 gen_lowpart_common (enum machine_mode mode, rtx x)
1177 int msize = GET_MODE_SIZE (mode);
1178 int xsize;
1179 int offset = 0;
1180 enum machine_mode innermode;
1182 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1183 so we have to make one up. Yuk. */
1184 innermode = GET_MODE (x);
1185 if (CONST_INT_P (x)
1186 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1187 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1188 else if (innermode == VOIDmode)
1189 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1191 xsize = GET_MODE_SIZE (innermode);
1193 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1195 if (innermode == mode)
1196 return x;
1198 /* MODE must occupy no more words than the mode of X. */
1199 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1200 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1201 return 0;
1203 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1204 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1205 return 0;
1207 offset = subreg_lowpart_offset (mode, innermode);
1209 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1210 && (GET_MODE_CLASS (mode) == MODE_INT
1211 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1213 /* If we are getting the low-order part of something that has been
1214 sign- or zero-extended, we can either just use the object being
1215 extended or make a narrower extension. If we want an even smaller
1216 piece than the size of the object being extended, call ourselves
1217 recursively.
1219 This case is used mostly by combine and cse. */
1221 if (GET_MODE (XEXP (x, 0)) == mode)
1222 return XEXP (x, 0);
1223 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1224 return gen_lowpart_common (mode, XEXP (x, 0));
1225 else if (msize < xsize)
1226 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1228 else if (GET_CODE (x) == SUBREG || REG_P (x)
1229 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1230 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1231 return simplify_gen_subreg (mode, x, innermode, offset);
1233 /* Otherwise, we can't do this. */
1234 return 0;
1238 gen_highpart (enum machine_mode mode, rtx x)
1240 unsigned int msize = GET_MODE_SIZE (mode);
1241 rtx result;
1243 /* This case loses if X is a subreg. To catch bugs early,
1244 complain if an invalid MODE is used even in other cases. */
1245 gcc_assert (msize <= UNITS_PER_WORD
1246 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1248 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1249 subreg_highpart_offset (mode, GET_MODE (x)));
1250 gcc_assert (result);
1252 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1253 the target if we have a MEM. gen_highpart must return a valid operand,
1254 emitting code if necessary to do so. */
1255 if (MEM_P (result))
1257 result = validize_mem (result);
1258 gcc_assert (result);
1261 return result;
1264 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1265 be VOIDmode constant. */
1267 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1269 if (GET_MODE (exp) != VOIDmode)
1271 gcc_assert (GET_MODE (exp) == innermode);
1272 return gen_highpart (outermode, exp);
1274 return simplify_gen_subreg (outermode, exp, innermode,
1275 subreg_highpart_offset (outermode, innermode));
1278 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1280 unsigned int
1281 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1283 unsigned int offset = 0;
1284 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1286 if (difference > 0)
1288 if (WORDS_BIG_ENDIAN)
1289 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1290 if (BYTES_BIG_ENDIAN)
1291 offset += difference % UNITS_PER_WORD;
1294 return offset;
1297 /* Return offset in bytes to get OUTERMODE high part
1298 of the value in mode INNERMODE stored in memory in target format. */
1299 unsigned int
1300 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1302 unsigned int offset = 0;
1303 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1305 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1307 if (difference > 0)
1309 if (! WORDS_BIG_ENDIAN)
1310 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1311 if (! BYTES_BIG_ENDIAN)
1312 offset += difference % UNITS_PER_WORD;
1315 return offset;
1318 /* Return 1 iff X, assumed to be a SUBREG,
1319 refers to the least significant part of its containing reg.
1320 If X is not a SUBREG, always return 1 (it is its own low part!). */
1323 subreg_lowpart_p (const_rtx x)
1325 if (GET_CODE (x) != SUBREG)
1326 return 1;
1327 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1328 return 0;
1330 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1331 == SUBREG_BYTE (x));
1334 /* Return subword OFFSET of operand OP.
1335 The word number, OFFSET, is interpreted as the word number starting
1336 at the low-order address. OFFSET 0 is the low-order word if not
1337 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1339 If we cannot extract the required word, we return zero. Otherwise,
1340 an rtx corresponding to the requested word will be returned.
1342 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1343 reload has completed, a valid address will always be returned. After
1344 reload, if a valid address cannot be returned, we return zero.
1346 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1347 it is the responsibility of the caller.
1349 MODE is the mode of OP in case it is a CONST_INT.
1351 ??? This is still rather broken for some cases. The problem for the
1352 moment is that all callers of this thing provide no 'goal mode' to
1353 tell us to work with. This exists because all callers were written
1354 in a word based SUBREG world.
1355 Now use of this function can be deprecated by simplify_subreg in most
1356 cases.
1360 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1362 if (mode == VOIDmode)
1363 mode = GET_MODE (op);
1365 gcc_assert (mode != VOIDmode);
1367 /* If OP is narrower than a word, fail. */
1368 if (mode != BLKmode
1369 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1370 return 0;
1372 /* If we want a word outside OP, return zero. */
1373 if (mode != BLKmode
1374 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1375 return const0_rtx;
1377 /* Form a new MEM at the requested address. */
1378 if (MEM_P (op))
1380 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1382 if (! validate_address)
1383 return new_rtx;
1385 else if (reload_completed)
1387 if (! strict_memory_address_addr_space_p (word_mode,
1388 XEXP (new_rtx, 0),
1389 MEM_ADDR_SPACE (op)))
1390 return 0;
1392 else
1393 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1396 /* Rest can be handled by simplify_subreg. */
1397 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1400 /* Similar to `operand_subword', but never return 0. If we can't
1401 extract the required subword, put OP into a register and try again.
1402 The second attempt must succeed. We always validate the address in
1403 this case.
1405 MODE is the mode of OP, in case it is CONST_INT. */
1408 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1410 rtx result = operand_subword (op, offset, 1, mode);
1412 if (result)
1413 return result;
1415 if (mode != BLKmode && mode != VOIDmode)
1417 /* If this is a register which can not be accessed by words, copy it
1418 to a pseudo register. */
1419 if (REG_P (op))
1420 op = copy_to_reg (op);
1421 else
1422 op = force_reg (mode, op);
1425 result = operand_subword (op, offset, 1, mode);
1426 gcc_assert (result);
1428 return result;
1431 /* Returns 1 if both MEM_EXPR can be considered equal
1432 and 0 otherwise. */
1435 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1437 if (expr1 == expr2)
1438 return 1;
1440 if (! expr1 || ! expr2)
1441 return 0;
1443 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1444 return 0;
1446 return operand_equal_p (expr1, expr2, 0);
1449 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1450 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1451 -1 if not known. */
1454 get_mem_align_offset (rtx mem, unsigned int align)
1456 tree expr;
1457 unsigned HOST_WIDE_INT offset;
1459 /* This function can't use
1460 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1461 || !CONST_INT_P (MEM_OFFSET (mem))
1462 || (get_object_alignment (MEM_EXPR (mem), MEM_ALIGN (mem), align)
1463 < align))
1464 return -1;
1465 else
1466 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1467 for two reasons:
1468 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1469 for <variable>. get_inner_reference doesn't handle it and
1470 even if it did, the alignment in that case needs to be determined
1471 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1472 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1473 isn't sufficiently aligned, the object it is in might be. */
1474 gcc_assert (MEM_P (mem));
1475 expr = MEM_EXPR (mem);
1476 if (expr == NULL_TREE
1477 || MEM_OFFSET (mem) == NULL_RTX
1478 || !CONST_INT_P (MEM_OFFSET (mem)))
1479 return -1;
1481 offset = INTVAL (MEM_OFFSET (mem));
1482 if (DECL_P (expr))
1484 if (DECL_ALIGN (expr) < align)
1485 return -1;
1487 else if (INDIRECT_REF_P (expr))
1489 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1490 return -1;
1492 else if (TREE_CODE (expr) == COMPONENT_REF)
1494 while (1)
1496 tree inner = TREE_OPERAND (expr, 0);
1497 tree field = TREE_OPERAND (expr, 1);
1498 tree byte_offset = component_ref_field_offset (expr);
1499 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1501 if (!byte_offset
1502 || !host_integerp (byte_offset, 1)
1503 || !host_integerp (bit_offset, 1))
1504 return -1;
1506 offset += tree_low_cst (byte_offset, 1);
1507 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1509 if (inner == NULL_TREE)
1511 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1512 < (unsigned int) align)
1513 return -1;
1514 break;
1516 else if (DECL_P (inner))
1518 if (DECL_ALIGN (inner) < align)
1519 return -1;
1520 break;
1522 else if (TREE_CODE (inner) != COMPONENT_REF)
1523 return -1;
1524 expr = inner;
1527 else
1528 return -1;
1530 return offset & ((align / BITS_PER_UNIT) - 1);
1533 /* Given REF (a MEM) and T, either the type of X or the expression
1534 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1535 if we are making a new object of this type. BITPOS is nonzero if
1536 there is an offset outstanding on T that will be applied later. */
1538 void
1539 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1540 HOST_WIDE_INT bitpos)
1542 alias_set_type alias = MEM_ALIAS_SET (ref);
1543 tree expr = MEM_EXPR (ref);
1544 rtx offset = MEM_OFFSET (ref);
1545 rtx size = MEM_SIZE (ref);
1546 unsigned int align = MEM_ALIGN (ref);
1547 HOST_WIDE_INT apply_bitpos = 0;
1548 tree type;
1550 /* It can happen that type_for_mode was given a mode for which there
1551 is no language-level type. In which case it returns NULL, which
1552 we can see here. */
1553 if (t == NULL_TREE)
1554 return;
1556 type = TYPE_P (t) ? t : TREE_TYPE (t);
1557 if (type == error_mark_node)
1558 return;
1560 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1561 wrong answer, as it assumes that DECL_RTL already has the right alias
1562 info. Callers should not set DECL_RTL until after the call to
1563 set_mem_attributes. */
1564 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1566 /* Get the alias set from the expression or type (perhaps using a
1567 front-end routine) and use it. */
1568 alias = get_alias_set (t);
1570 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1571 MEM_IN_STRUCT_P (ref)
1572 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1573 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1575 /* If we are making an object of this type, or if this is a DECL, we know
1576 that it is a scalar if the type is not an aggregate. */
1577 if ((objectp || DECL_P (t))
1578 && ! AGGREGATE_TYPE_P (type)
1579 && TREE_CODE (type) != COMPLEX_TYPE)
1580 MEM_SCALAR_P (ref) = 1;
1582 /* We can set the alignment from the type if we are making an object,
1583 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1584 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1585 align = MAX (align, TYPE_ALIGN (type));
1587 else if (TREE_CODE (t) == MEM_REF)
1589 tree op0 = TREE_OPERAND (t, 0);
1590 unsigned HOST_WIDE_INT aoff = BITS_PER_UNIT;
1591 if (host_integerp (TREE_OPERAND (t, 1), 1))
1593 unsigned HOST_WIDE_INT ioff = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1594 aoff = (ioff & -ioff) * BITS_PER_UNIT;
1596 if (TREE_CODE (op0) == ADDR_EXPR && DECL_P (TREE_OPERAND (op0, 0)))
1597 align = MAX (align, DECL_ALIGN (TREE_OPERAND (op0, 0)));
1598 else if (TREE_CODE (op0) == ADDR_EXPR
1599 && CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1601 align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1602 #ifdef CONSTANT_ALIGNMENT
1603 align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0), align);
1604 #endif
1606 else
1607 /* ??? This isn't fully correct, we can't set the alignment from the
1608 type in all cases. */
1609 align = MAX (align, TYPE_ALIGN (type));
1611 if (!integer_zerop (TREE_OPERAND (t, 1)) && aoff < align)
1612 align = aoff;
1615 else if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1617 if (integer_zerop (TREE_OPERAND (t, 1)))
1618 /* We don't know anything about the alignment. */
1619 align = BITS_PER_UNIT;
1620 else
1621 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1624 /* If the size is known, we can set that. */
1625 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1626 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1628 /* If T is not a type, we may be able to deduce some more information about
1629 the expression. */
1630 if (! TYPE_P (t))
1632 tree base;
1633 bool align_computed = false;
1635 if (TREE_THIS_VOLATILE (t))
1636 MEM_VOLATILE_P (ref) = 1;
1638 /* Now remove any conversions: they don't change what the underlying
1639 object is. Likewise for SAVE_EXPR. */
1640 while (CONVERT_EXPR_P (t)
1641 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1642 || TREE_CODE (t) == SAVE_EXPR)
1643 t = TREE_OPERAND (t, 0);
1645 /* We may look through structure-like accesses for the purposes of
1646 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1647 base = t;
1648 while (TREE_CODE (base) == COMPONENT_REF
1649 || TREE_CODE (base) == REALPART_EXPR
1650 || TREE_CODE (base) == IMAGPART_EXPR
1651 || TREE_CODE (base) == BIT_FIELD_REF)
1652 base = TREE_OPERAND (base, 0);
1654 if (TREE_CODE (base) == MEM_REF
1655 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1656 base = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1657 if (DECL_P (base))
1659 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1660 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1661 else
1662 MEM_NOTRAP_P (ref) = 1;
1664 else
1665 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1667 base = get_base_address (base);
1668 if (base && DECL_P (base)
1669 && TREE_READONLY (base)
1670 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1671 MEM_READONLY_P (ref) = 1;
1673 /* If this expression uses it's parent's alias set, mark it such
1674 that we won't change it. */
1675 if (component_uses_parent_alias_set (t))
1676 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1678 /* If this is a decl, set the attributes of the MEM from it. */
1679 if (DECL_P (t))
1681 expr = t;
1682 offset = const0_rtx;
1683 apply_bitpos = bitpos;
1684 size = (DECL_SIZE_UNIT (t)
1685 && host_integerp (DECL_SIZE_UNIT (t), 1)
1686 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1687 align = DECL_ALIGN (t);
1688 align_computed = true;
1691 /* If this is a constant, we know the alignment. */
1692 else if (CONSTANT_CLASS_P (t))
1694 align = TYPE_ALIGN (type);
1695 #ifdef CONSTANT_ALIGNMENT
1696 align = CONSTANT_ALIGNMENT (t, align);
1697 #endif
1698 align_computed = true;
1701 /* If this is a field reference and not a bit-field, record it. */
1702 /* ??? There is some information that can be gleaned from bit-fields,
1703 such as the word offset in the structure that might be modified.
1704 But skip it for now. */
1705 else if (TREE_CODE (t) == COMPONENT_REF
1706 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1708 expr = t;
1709 offset = const0_rtx;
1710 apply_bitpos = bitpos;
1711 /* ??? Any reason the field size would be different than
1712 the size we got from the type? */
1715 /* If this is an array reference, look for an outer field reference. */
1716 else if (TREE_CODE (t) == ARRAY_REF)
1718 tree off_tree = size_zero_node;
1719 /* We can't modify t, because we use it at the end of the
1720 function. */
1721 tree t2 = t;
1725 tree index = TREE_OPERAND (t2, 1);
1726 tree low_bound = array_ref_low_bound (t2);
1727 tree unit_size = array_ref_element_size (t2);
1729 /* We assume all arrays have sizes that are a multiple of a byte.
1730 First subtract the lower bound, if any, in the type of the
1731 index, then convert to sizetype and multiply by the size of
1732 the array element. */
1733 if (! integer_zerop (low_bound))
1734 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1735 index, low_bound);
1737 off_tree = size_binop (PLUS_EXPR,
1738 size_binop (MULT_EXPR,
1739 fold_convert (sizetype,
1740 index),
1741 unit_size),
1742 off_tree);
1743 t2 = TREE_OPERAND (t2, 0);
1745 while (TREE_CODE (t2) == ARRAY_REF);
1747 if (DECL_P (t2))
1749 expr = t2;
1750 offset = NULL;
1751 if (host_integerp (off_tree, 1))
1753 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1754 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1755 align = DECL_ALIGN (t2);
1756 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1757 align = aoff;
1758 align_computed = true;
1759 offset = GEN_INT (ioff);
1760 apply_bitpos = bitpos;
1763 else if (TREE_CODE (t2) == COMPONENT_REF)
1765 expr = t2;
1766 offset = NULL;
1767 if (host_integerp (off_tree, 1))
1769 offset = GEN_INT (tree_low_cst (off_tree, 1));
1770 apply_bitpos = bitpos;
1772 /* ??? Any reason the field size would be different than
1773 the size we got from the type? */
1776 /* If this is an indirect reference, record it. */
1777 else if (TREE_CODE (t) == MEM_REF
1778 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1780 expr = t;
1781 offset = const0_rtx;
1782 apply_bitpos = bitpos;
1786 /* If this is an indirect reference, record it. */
1787 else if (TREE_CODE (t) == MEM_REF
1788 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1790 expr = t;
1791 offset = const0_rtx;
1792 apply_bitpos = bitpos;
1795 if (!align_computed && !INDIRECT_REF_P (t))
1797 unsigned int obj_align
1798 = get_object_alignment (t, align, BIGGEST_ALIGNMENT);
1799 align = MAX (align, obj_align);
1803 /* If we modified OFFSET based on T, then subtract the outstanding
1804 bit position offset. Similarly, increase the size of the accessed
1805 object to contain the negative offset. */
1806 if (apply_bitpos)
1808 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1809 if (size)
1810 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1813 /* Now set the attributes we computed above. */
1814 MEM_ATTRS (ref)
1815 = get_mem_attrs (alias, expr, offset, size, align,
1816 TYPE_ADDR_SPACE (type), GET_MODE (ref));
1818 /* If this is already known to be a scalar or aggregate, we are done. */
1819 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1820 return;
1822 /* If it is a reference into an aggregate, this is part of an aggregate.
1823 Otherwise we don't know. */
1824 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1825 || TREE_CODE (t) == ARRAY_RANGE_REF
1826 || TREE_CODE (t) == BIT_FIELD_REF)
1827 MEM_IN_STRUCT_P (ref) = 1;
1830 void
1831 set_mem_attributes (rtx ref, tree t, int objectp)
1833 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1836 /* Set the alias set of MEM to SET. */
1838 void
1839 set_mem_alias_set (rtx mem, alias_set_type set)
1841 #ifdef ENABLE_CHECKING
1842 /* If the new and old alias sets don't conflict, something is wrong. */
1843 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1844 #endif
1846 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1847 MEM_SIZE (mem), MEM_ALIGN (mem),
1848 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1851 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1853 void
1854 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1856 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1857 MEM_OFFSET (mem), MEM_SIZE (mem),
1858 MEM_ALIGN (mem), addrspace, GET_MODE (mem));
1861 /* Set the alignment of MEM to ALIGN bits. */
1863 void
1864 set_mem_align (rtx mem, unsigned int align)
1866 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1867 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1868 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1871 /* Set the expr for MEM to EXPR. */
1873 void
1874 set_mem_expr (rtx mem, tree expr)
1876 MEM_ATTRS (mem)
1877 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1878 MEM_SIZE (mem), MEM_ALIGN (mem),
1879 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1882 /* Set the offset of MEM to OFFSET. */
1884 void
1885 set_mem_offset (rtx mem, rtx offset)
1887 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1888 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1889 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1892 /* Set the size of MEM to SIZE. */
1894 void
1895 set_mem_size (rtx mem, rtx size)
1897 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1898 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1899 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1902 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1903 and its address changed to ADDR. (VOIDmode means don't change the mode.
1904 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1905 returned memory location is required to be valid. The memory
1906 attributes are not changed. */
1908 static rtx
1909 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1911 addr_space_t as;
1912 rtx new_rtx;
1914 gcc_assert (MEM_P (memref));
1915 as = MEM_ADDR_SPACE (memref);
1916 if (mode == VOIDmode)
1917 mode = GET_MODE (memref);
1918 if (addr == 0)
1919 addr = XEXP (memref, 0);
1920 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1921 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1922 return memref;
1924 if (validate)
1926 if (reload_in_progress || reload_completed)
1927 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1928 else
1929 addr = memory_address_addr_space (mode, addr, as);
1932 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1933 return memref;
1935 new_rtx = gen_rtx_MEM (mode, addr);
1936 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1937 return new_rtx;
1940 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1941 way we are changing MEMREF, so we only preserve the alias set. */
1944 change_address (rtx memref, enum machine_mode mode, rtx addr)
1946 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1947 enum machine_mode mmode = GET_MODE (new_rtx);
1948 unsigned int align;
1950 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1951 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1953 /* If there are no changes, just return the original memory reference. */
1954 if (new_rtx == memref)
1956 if (MEM_ATTRS (memref) == 0
1957 || (MEM_EXPR (memref) == NULL
1958 && MEM_OFFSET (memref) == NULL
1959 && MEM_SIZE (memref) == size
1960 && MEM_ALIGN (memref) == align))
1961 return new_rtx;
1963 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1964 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1967 MEM_ATTRS (new_rtx)
1968 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
1969 MEM_ADDR_SPACE (memref), mmode);
1971 return new_rtx;
1974 /* Return a memory reference like MEMREF, but with its mode changed
1975 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1976 nonzero, the memory address is forced to be valid.
1977 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1978 and caller is responsible for adjusting MEMREF base register. */
1981 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1982 int validate, int adjust)
1984 rtx addr = XEXP (memref, 0);
1985 rtx new_rtx;
1986 rtx memoffset = MEM_OFFSET (memref);
1987 rtx size = 0;
1988 unsigned int memalign = MEM_ALIGN (memref);
1989 addr_space_t as = MEM_ADDR_SPACE (memref);
1990 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
1991 int pbits;
1993 /* If there are no changes, just return the original memory reference. */
1994 if (mode == GET_MODE (memref) && !offset
1995 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1996 return memref;
1998 /* ??? Prefer to create garbage instead of creating shared rtl.
1999 This may happen even if offset is nonzero -- consider
2000 (plus (plus reg reg) const_int) -- so do this always. */
2001 addr = copy_rtx (addr);
2003 /* Convert a possibly large offset to a signed value within the
2004 range of the target address space. */
2005 pbits = GET_MODE_BITSIZE (address_mode);
2006 if (HOST_BITS_PER_WIDE_INT > pbits)
2008 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2009 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2010 >> shift);
2013 if (adjust)
2015 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2016 object, we can merge it into the LO_SUM. */
2017 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2018 && offset >= 0
2019 && (unsigned HOST_WIDE_INT) offset
2020 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2021 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2022 plus_constant (XEXP (addr, 1), offset));
2023 else
2024 addr = plus_constant (addr, offset);
2027 new_rtx = change_address_1 (memref, mode, addr, validate);
2029 /* If the address is a REG, change_address_1 rightfully returns memref,
2030 but this would destroy memref's MEM_ATTRS. */
2031 if (new_rtx == memref && offset != 0)
2032 new_rtx = copy_rtx (new_rtx);
2034 /* Compute the new values of the memory attributes due to this adjustment.
2035 We add the offsets and update the alignment. */
2036 if (memoffset)
2037 memoffset = GEN_INT (offset + INTVAL (memoffset));
2039 /* Compute the new alignment by taking the MIN of the alignment and the
2040 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2041 if zero. */
2042 if (offset != 0)
2043 memalign
2044 = MIN (memalign,
2045 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2047 /* We can compute the size in a number of ways. */
2048 if (GET_MODE (new_rtx) != BLKmode)
2049 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
2050 else if (MEM_SIZE (memref))
2051 size = plus_constant (MEM_SIZE (memref), -offset);
2053 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2054 memoffset, size, memalign, as,
2055 GET_MODE (new_rtx));
2057 /* At some point, we should validate that this offset is within the object,
2058 if all the appropriate values are known. */
2059 return new_rtx;
2062 /* Return a memory reference like MEMREF, but with its mode changed
2063 to MODE and its address changed to ADDR, which is assumed to be
2064 MEMREF offset by OFFSET bytes. If VALIDATE is
2065 nonzero, the memory address is forced to be valid. */
2068 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2069 HOST_WIDE_INT offset, int validate)
2071 memref = change_address_1 (memref, VOIDmode, addr, validate);
2072 return adjust_address_1 (memref, mode, offset, validate, 0);
2075 /* Return a memory reference like MEMREF, but whose address is changed by
2076 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2077 known to be in OFFSET (possibly 1). */
2080 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2082 rtx new_rtx, addr = XEXP (memref, 0);
2083 addr_space_t as = MEM_ADDR_SPACE (memref);
2084 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2086 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2088 /* At this point we don't know _why_ the address is invalid. It
2089 could have secondary memory references, multiplies or anything.
2091 However, if we did go and rearrange things, we can wind up not
2092 being able to recognize the magic around pic_offset_table_rtx.
2093 This stuff is fragile, and is yet another example of why it is
2094 bad to expose PIC machinery too early. */
2095 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
2096 && GET_CODE (addr) == PLUS
2097 && XEXP (addr, 0) == pic_offset_table_rtx)
2099 addr = force_reg (GET_MODE (addr), addr);
2100 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2103 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2104 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2106 /* If there are no changes, just return the original memory reference. */
2107 if (new_rtx == memref)
2108 return new_rtx;
2110 /* Update the alignment to reflect the offset. Reset the offset, which
2111 we don't know. */
2112 MEM_ATTRS (new_rtx)
2113 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2114 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2115 as, GET_MODE (new_rtx));
2116 return new_rtx;
2119 /* Return a memory reference like MEMREF, but with its address changed to
2120 ADDR. The caller is asserting that the actual piece of memory pointed
2121 to is the same, just the form of the address is being changed, such as
2122 by putting something into a register. */
2125 replace_equiv_address (rtx memref, rtx addr)
2127 /* change_address_1 copies the memory attribute structure without change
2128 and that's exactly what we want here. */
2129 update_temp_slot_address (XEXP (memref, 0), addr);
2130 return change_address_1 (memref, VOIDmode, addr, 1);
2133 /* Likewise, but the reference is not required to be valid. */
2136 replace_equiv_address_nv (rtx memref, rtx addr)
2138 return change_address_1 (memref, VOIDmode, addr, 0);
2141 /* Return a memory reference like MEMREF, but with its mode widened to
2142 MODE and offset by OFFSET. This would be used by targets that e.g.
2143 cannot issue QImode memory operations and have to use SImode memory
2144 operations plus masking logic. */
2147 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2149 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2150 tree expr = MEM_EXPR (new_rtx);
2151 rtx memoffset = MEM_OFFSET (new_rtx);
2152 unsigned int size = GET_MODE_SIZE (mode);
2154 /* If there are no changes, just return the original memory reference. */
2155 if (new_rtx == memref)
2156 return new_rtx;
2158 /* If we don't know what offset we were at within the expression, then
2159 we can't know if we've overstepped the bounds. */
2160 if (! memoffset)
2161 expr = NULL_TREE;
2163 while (expr)
2165 if (TREE_CODE (expr) == COMPONENT_REF)
2167 tree field = TREE_OPERAND (expr, 1);
2168 tree offset = component_ref_field_offset (expr);
2170 if (! DECL_SIZE_UNIT (field))
2172 expr = NULL_TREE;
2173 break;
2176 /* Is the field at least as large as the access? If so, ok,
2177 otherwise strip back to the containing structure. */
2178 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2179 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2180 && INTVAL (memoffset) >= 0)
2181 break;
2183 if (! host_integerp (offset, 1))
2185 expr = NULL_TREE;
2186 break;
2189 expr = TREE_OPERAND (expr, 0);
2190 memoffset
2191 = (GEN_INT (INTVAL (memoffset)
2192 + tree_low_cst (offset, 1)
2193 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2194 / BITS_PER_UNIT)));
2196 /* Similarly for the decl. */
2197 else if (DECL_P (expr)
2198 && DECL_SIZE_UNIT (expr)
2199 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2200 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2201 && (! memoffset || INTVAL (memoffset) >= 0))
2202 break;
2203 else
2205 /* The widened memory access overflows the expression, which means
2206 that it could alias another expression. Zap it. */
2207 expr = NULL_TREE;
2208 break;
2212 if (! expr)
2213 memoffset = NULL_RTX;
2215 /* The widened memory may alias other stuff, so zap the alias set. */
2216 /* ??? Maybe use get_alias_set on any remaining expression. */
2218 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2219 MEM_ALIGN (new_rtx),
2220 MEM_ADDR_SPACE (new_rtx), mode);
2222 return new_rtx;
2225 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2226 static GTY(()) tree spill_slot_decl;
2228 tree
2229 get_spill_slot_decl (bool force_build_p)
2231 tree d = spill_slot_decl;
2232 rtx rd;
2234 if (d || !force_build_p)
2235 return d;
2237 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2238 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2239 DECL_ARTIFICIAL (d) = 1;
2240 DECL_IGNORED_P (d) = 1;
2241 TREE_USED (d) = 1;
2242 TREE_THIS_NOTRAP (d) = 1;
2243 spill_slot_decl = d;
2245 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2246 MEM_NOTRAP_P (rd) = 1;
2247 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
2248 NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
2249 SET_DECL_RTL (d, rd);
2251 return d;
2254 /* Given MEM, a result from assign_stack_local, fill in the memory
2255 attributes as appropriate for a register allocator spill slot.
2256 These slots are not aliasable by other memory. We arrange for
2257 them all to use a single MEM_EXPR, so that the aliasing code can
2258 work properly in the case of shared spill slots. */
2260 void
2261 set_mem_attrs_for_spill (rtx mem)
2263 alias_set_type alias;
2264 rtx addr, offset;
2265 tree expr;
2267 expr = get_spill_slot_decl (true);
2268 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2270 /* We expect the incoming memory to be of the form:
2271 (mem:MODE (plus (reg sfp) (const_int offset)))
2272 with perhaps the plus missing for offset = 0. */
2273 addr = XEXP (mem, 0);
2274 offset = const0_rtx;
2275 if (GET_CODE (addr) == PLUS
2276 && CONST_INT_P (XEXP (addr, 1)))
2277 offset = XEXP (addr, 1);
2279 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2280 MEM_SIZE (mem), MEM_ALIGN (mem),
2281 ADDR_SPACE_GENERIC, GET_MODE (mem));
2282 MEM_NOTRAP_P (mem) = 1;
2285 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2288 gen_label_rtx (void)
2290 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2291 NULL, label_num++, NULL);
2294 /* For procedure integration. */
2296 /* Install new pointers to the first and last insns in the chain.
2297 Also, set cur_insn_uid to one higher than the last in use.
2298 Used for an inline-procedure after copying the insn chain. */
2300 void
2301 set_new_first_and_last_insn (rtx first, rtx last)
2303 rtx insn;
2305 set_first_insn (first);
2306 set_last_insn (last);
2307 cur_insn_uid = 0;
2309 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2311 int debug_count = 0;
2313 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2314 cur_debug_insn_uid = 0;
2316 for (insn = first; insn; insn = NEXT_INSN (insn))
2317 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2318 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2319 else
2321 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2322 if (DEBUG_INSN_P (insn))
2323 debug_count++;
2326 if (debug_count)
2327 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2328 else
2329 cur_debug_insn_uid++;
2331 else
2332 for (insn = first; insn; insn = NEXT_INSN (insn))
2333 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2335 cur_insn_uid++;
2338 /* Go through all the RTL insn bodies and copy any invalid shared
2339 structure. This routine should only be called once. */
2341 static void
2342 unshare_all_rtl_1 (rtx insn)
2344 /* Unshare just about everything else. */
2345 unshare_all_rtl_in_chain (insn);
2347 /* Make sure the addresses of stack slots found outside the insn chain
2348 (such as, in DECL_RTL of a variable) are not shared
2349 with the insn chain.
2351 This special care is necessary when the stack slot MEM does not
2352 actually appear in the insn chain. If it does appear, its address
2353 is unshared from all else at that point. */
2354 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2357 /* Go through all the RTL insn bodies and copy any invalid shared
2358 structure, again. This is a fairly expensive thing to do so it
2359 should be done sparingly. */
2361 void
2362 unshare_all_rtl_again (rtx insn)
2364 rtx p;
2365 tree decl;
2367 for (p = insn; p; p = NEXT_INSN (p))
2368 if (INSN_P (p))
2370 reset_used_flags (PATTERN (p));
2371 reset_used_flags (REG_NOTES (p));
2374 /* Make sure that virtual stack slots are not shared. */
2375 set_used_decls (DECL_INITIAL (cfun->decl));
2377 /* Make sure that virtual parameters are not shared. */
2378 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2379 set_used_flags (DECL_RTL (decl));
2381 reset_used_flags (stack_slot_list);
2383 unshare_all_rtl_1 (insn);
2386 unsigned int
2387 unshare_all_rtl (void)
2389 unshare_all_rtl_1 (get_insns ());
2390 return 0;
2393 struct rtl_opt_pass pass_unshare_all_rtl =
2396 RTL_PASS,
2397 "unshare", /* name */
2398 NULL, /* gate */
2399 unshare_all_rtl, /* execute */
2400 NULL, /* sub */
2401 NULL, /* next */
2402 0, /* static_pass_number */
2403 TV_NONE, /* tv_id */
2404 0, /* properties_required */
2405 0, /* properties_provided */
2406 0, /* properties_destroyed */
2407 0, /* todo_flags_start */
2408 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2413 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2414 Recursively does the same for subexpressions. */
2416 static void
2417 verify_rtx_sharing (rtx orig, rtx insn)
2419 rtx x = orig;
2420 int i;
2421 enum rtx_code code;
2422 const char *format_ptr;
2424 if (x == 0)
2425 return;
2427 code = GET_CODE (x);
2429 /* These types may be freely shared. */
2431 switch (code)
2433 case REG:
2434 case DEBUG_EXPR:
2435 case VALUE:
2436 case CONST_INT:
2437 case CONST_DOUBLE:
2438 case CONST_FIXED:
2439 case CONST_VECTOR:
2440 case SYMBOL_REF:
2441 case LABEL_REF:
2442 case CODE_LABEL:
2443 case PC:
2444 case CC0:
2445 case SCRATCH:
2446 return;
2447 /* SCRATCH must be shared because they represent distinct values. */
2448 case CLOBBER:
2449 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2450 return;
2451 break;
2453 case CONST:
2454 if (shared_const_p (orig))
2455 return;
2456 break;
2458 case MEM:
2459 /* A MEM is allowed to be shared if its address is constant. */
2460 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2461 || reload_completed || reload_in_progress)
2462 return;
2464 break;
2466 default:
2467 break;
2470 /* This rtx may not be shared. If it has already been seen,
2471 replace it with a copy of itself. */
2472 #ifdef ENABLE_CHECKING
2473 if (RTX_FLAG (x, used))
2475 error ("invalid rtl sharing found in the insn");
2476 debug_rtx (insn);
2477 error ("shared rtx");
2478 debug_rtx (x);
2479 internal_error ("internal consistency failure");
2481 #endif
2482 gcc_assert (!RTX_FLAG (x, used));
2484 RTX_FLAG (x, used) = 1;
2486 /* Now scan the subexpressions recursively. */
2488 format_ptr = GET_RTX_FORMAT (code);
2490 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2492 switch (*format_ptr++)
2494 case 'e':
2495 verify_rtx_sharing (XEXP (x, i), insn);
2496 break;
2498 case 'E':
2499 if (XVEC (x, i) != NULL)
2501 int j;
2502 int len = XVECLEN (x, i);
2504 for (j = 0; j < len; j++)
2506 /* We allow sharing of ASM_OPERANDS inside single
2507 instruction. */
2508 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2509 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2510 == ASM_OPERANDS))
2511 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2512 else
2513 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2516 break;
2519 return;
2522 /* Go through all the RTL insn bodies and check that there is no unexpected
2523 sharing in between the subexpressions. */
2525 DEBUG_FUNCTION void
2526 verify_rtl_sharing (void)
2528 rtx p;
2530 for (p = get_insns (); p; p = NEXT_INSN (p))
2531 if (INSN_P (p))
2533 reset_used_flags (PATTERN (p));
2534 reset_used_flags (REG_NOTES (p));
2535 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2537 int i;
2538 rtx q, sequence = PATTERN (p);
2540 for (i = 0; i < XVECLEN (sequence, 0); i++)
2542 q = XVECEXP (sequence, 0, i);
2543 gcc_assert (INSN_P (q));
2544 reset_used_flags (PATTERN (q));
2545 reset_used_flags (REG_NOTES (q));
2550 for (p = get_insns (); p; p = NEXT_INSN (p))
2551 if (INSN_P (p))
2553 verify_rtx_sharing (PATTERN (p), p);
2554 verify_rtx_sharing (REG_NOTES (p), p);
2558 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2559 Assumes the mark bits are cleared at entry. */
2561 void
2562 unshare_all_rtl_in_chain (rtx insn)
2564 for (; insn; insn = NEXT_INSN (insn))
2565 if (INSN_P (insn))
2567 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2568 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2572 /* Go through all virtual stack slots of a function and mark them as
2573 shared. We never replace the DECL_RTLs themselves with a copy,
2574 but expressions mentioned into a DECL_RTL cannot be shared with
2575 expressions in the instruction stream.
2577 Note that reload may convert pseudo registers into memories in-place.
2578 Pseudo registers are always shared, but MEMs never are. Thus if we
2579 reset the used flags on MEMs in the instruction stream, we must set
2580 them again on MEMs that appear in DECL_RTLs. */
2582 static void
2583 set_used_decls (tree blk)
2585 tree t;
2587 /* Mark decls. */
2588 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2589 if (DECL_RTL_SET_P (t))
2590 set_used_flags (DECL_RTL (t));
2592 /* Now process sub-blocks. */
2593 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2594 set_used_decls (t);
2597 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2598 Recursively does the same for subexpressions. Uses
2599 copy_rtx_if_shared_1 to reduce stack space. */
2602 copy_rtx_if_shared (rtx orig)
2604 copy_rtx_if_shared_1 (&orig);
2605 return orig;
2608 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2609 use. Recursively does the same for subexpressions. */
2611 static void
2612 copy_rtx_if_shared_1 (rtx *orig1)
2614 rtx x;
2615 int i;
2616 enum rtx_code code;
2617 rtx *last_ptr;
2618 const char *format_ptr;
2619 int copied = 0;
2620 int length;
2622 /* Repeat is used to turn tail-recursion into iteration. */
2623 repeat:
2624 x = *orig1;
2626 if (x == 0)
2627 return;
2629 code = GET_CODE (x);
2631 /* These types may be freely shared. */
2633 switch (code)
2635 case REG:
2636 case DEBUG_EXPR:
2637 case VALUE:
2638 case CONST_INT:
2639 case CONST_DOUBLE:
2640 case CONST_FIXED:
2641 case CONST_VECTOR:
2642 case SYMBOL_REF:
2643 case LABEL_REF:
2644 case CODE_LABEL:
2645 case PC:
2646 case CC0:
2647 case SCRATCH:
2648 /* SCRATCH must be shared because they represent distinct values. */
2649 return;
2650 case CLOBBER:
2651 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2652 return;
2653 break;
2655 case CONST:
2656 if (shared_const_p (x))
2657 return;
2658 break;
2660 case DEBUG_INSN:
2661 case INSN:
2662 case JUMP_INSN:
2663 case CALL_INSN:
2664 case NOTE:
2665 case BARRIER:
2666 /* The chain of insns is not being copied. */
2667 return;
2669 default:
2670 break;
2673 /* This rtx may not be shared. If it has already been seen,
2674 replace it with a copy of itself. */
2676 if (RTX_FLAG (x, used))
2678 x = shallow_copy_rtx (x);
2679 copied = 1;
2681 RTX_FLAG (x, used) = 1;
2683 /* Now scan the subexpressions recursively.
2684 We can store any replaced subexpressions directly into X
2685 since we know X is not shared! Any vectors in X
2686 must be copied if X was copied. */
2688 format_ptr = GET_RTX_FORMAT (code);
2689 length = GET_RTX_LENGTH (code);
2690 last_ptr = NULL;
2692 for (i = 0; i < length; i++)
2694 switch (*format_ptr++)
2696 case 'e':
2697 if (last_ptr)
2698 copy_rtx_if_shared_1 (last_ptr);
2699 last_ptr = &XEXP (x, i);
2700 break;
2702 case 'E':
2703 if (XVEC (x, i) != NULL)
2705 int j;
2706 int len = XVECLEN (x, i);
2708 /* Copy the vector iff I copied the rtx and the length
2709 is nonzero. */
2710 if (copied && len > 0)
2711 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2713 /* Call recursively on all inside the vector. */
2714 for (j = 0; j < len; j++)
2716 if (last_ptr)
2717 copy_rtx_if_shared_1 (last_ptr);
2718 last_ptr = &XVECEXP (x, i, j);
2721 break;
2724 *orig1 = x;
2725 if (last_ptr)
2727 orig1 = last_ptr;
2728 goto repeat;
2730 return;
2733 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2734 to look for shared sub-parts. */
2736 void
2737 reset_used_flags (rtx x)
2739 int i, j;
2740 enum rtx_code code;
2741 const char *format_ptr;
2742 int length;
2744 /* Repeat is used to turn tail-recursion into iteration. */
2745 repeat:
2746 if (x == 0)
2747 return;
2749 code = GET_CODE (x);
2751 /* These types may be freely shared so we needn't do any resetting
2752 for them. */
2754 switch (code)
2756 case REG:
2757 case DEBUG_EXPR:
2758 case VALUE:
2759 case CONST_INT:
2760 case CONST_DOUBLE:
2761 case CONST_FIXED:
2762 case CONST_VECTOR:
2763 case SYMBOL_REF:
2764 case CODE_LABEL:
2765 case PC:
2766 case CC0:
2767 return;
2769 case DEBUG_INSN:
2770 case INSN:
2771 case JUMP_INSN:
2772 case CALL_INSN:
2773 case NOTE:
2774 case LABEL_REF:
2775 case BARRIER:
2776 /* The chain of insns is not being copied. */
2777 return;
2779 default:
2780 break;
2783 RTX_FLAG (x, used) = 0;
2785 format_ptr = GET_RTX_FORMAT (code);
2786 length = GET_RTX_LENGTH (code);
2788 for (i = 0; i < length; i++)
2790 switch (*format_ptr++)
2792 case 'e':
2793 if (i == length-1)
2795 x = XEXP (x, i);
2796 goto repeat;
2798 reset_used_flags (XEXP (x, i));
2799 break;
2801 case 'E':
2802 for (j = 0; j < XVECLEN (x, i); j++)
2803 reset_used_flags (XVECEXP (x, i, j));
2804 break;
2809 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2810 to look for shared sub-parts. */
2812 void
2813 set_used_flags (rtx x)
2815 int i, j;
2816 enum rtx_code code;
2817 const char *format_ptr;
2819 if (x == 0)
2820 return;
2822 code = GET_CODE (x);
2824 /* These types may be freely shared so we needn't do any resetting
2825 for them. */
2827 switch (code)
2829 case REG:
2830 case DEBUG_EXPR:
2831 case VALUE:
2832 case CONST_INT:
2833 case CONST_DOUBLE:
2834 case CONST_FIXED:
2835 case CONST_VECTOR:
2836 case SYMBOL_REF:
2837 case CODE_LABEL:
2838 case PC:
2839 case CC0:
2840 return;
2842 case DEBUG_INSN:
2843 case INSN:
2844 case JUMP_INSN:
2845 case CALL_INSN:
2846 case NOTE:
2847 case LABEL_REF:
2848 case BARRIER:
2849 /* The chain of insns is not being copied. */
2850 return;
2852 default:
2853 break;
2856 RTX_FLAG (x, used) = 1;
2858 format_ptr = GET_RTX_FORMAT (code);
2859 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2861 switch (*format_ptr++)
2863 case 'e':
2864 set_used_flags (XEXP (x, i));
2865 break;
2867 case 'E':
2868 for (j = 0; j < XVECLEN (x, i); j++)
2869 set_used_flags (XVECEXP (x, i, j));
2870 break;
2875 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2876 Return X or the rtx for the pseudo reg the value of X was copied into.
2877 OTHER must be valid as a SET_DEST. */
2880 make_safe_from (rtx x, rtx other)
2882 while (1)
2883 switch (GET_CODE (other))
2885 case SUBREG:
2886 other = SUBREG_REG (other);
2887 break;
2888 case STRICT_LOW_PART:
2889 case SIGN_EXTEND:
2890 case ZERO_EXTEND:
2891 other = XEXP (other, 0);
2892 break;
2893 default:
2894 goto done;
2896 done:
2897 if ((MEM_P (other)
2898 && ! CONSTANT_P (x)
2899 && !REG_P (x)
2900 && GET_CODE (x) != SUBREG)
2901 || (REG_P (other)
2902 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2903 || reg_mentioned_p (other, x))))
2905 rtx temp = gen_reg_rtx (GET_MODE (x));
2906 emit_move_insn (temp, x);
2907 return temp;
2909 return x;
2912 /* Emission of insns (adding them to the doubly-linked list). */
2914 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2917 get_last_insn_anywhere (void)
2919 struct sequence_stack *stack;
2920 if (get_last_insn ())
2921 return get_last_insn ();
2922 for (stack = seq_stack; stack; stack = stack->next)
2923 if (stack->last != 0)
2924 return stack->last;
2925 return 0;
2928 /* Return the first nonnote insn emitted in current sequence or current
2929 function. This routine looks inside SEQUENCEs. */
2932 get_first_nonnote_insn (void)
2934 rtx insn = get_insns ();
2936 if (insn)
2938 if (NOTE_P (insn))
2939 for (insn = next_insn (insn);
2940 insn && NOTE_P (insn);
2941 insn = next_insn (insn))
2942 continue;
2943 else
2945 if (NONJUMP_INSN_P (insn)
2946 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2947 insn = XVECEXP (PATTERN (insn), 0, 0);
2951 return insn;
2954 /* Return the last nonnote insn emitted in current sequence or current
2955 function. This routine looks inside SEQUENCEs. */
2958 get_last_nonnote_insn (void)
2960 rtx insn = get_last_insn ();
2962 if (insn)
2964 if (NOTE_P (insn))
2965 for (insn = previous_insn (insn);
2966 insn && NOTE_P (insn);
2967 insn = previous_insn (insn))
2968 continue;
2969 else
2971 if (NONJUMP_INSN_P (insn)
2972 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2973 insn = XVECEXP (PATTERN (insn), 0,
2974 XVECLEN (PATTERN (insn), 0) - 1);
2978 return insn;
2981 /* Return the number of actual (non-debug) insns emitted in this
2982 function. */
2985 get_max_insn_count (void)
2987 int n = cur_insn_uid;
2989 /* The table size must be stable across -g, to avoid codegen
2990 differences due to debug insns, and not be affected by
2991 -fmin-insn-uid, to avoid excessive table size and to simplify
2992 debugging of -fcompare-debug failures. */
2993 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
2994 n -= cur_debug_insn_uid;
2995 else
2996 n -= MIN_NONDEBUG_INSN_UID;
2998 return n;
3002 /* Return the next insn. If it is a SEQUENCE, return the first insn
3003 of the sequence. */
3006 next_insn (rtx insn)
3008 if (insn)
3010 insn = NEXT_INSN (insn);
3011 if (insn && NONJUMP_INSN_P (insn)
3012 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3013 insn = XVECEXP (PATTERN (insn), 0, 0);
3016 return insn;
3019 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3020 of the sequence. */
3023 previous_insn (rtx insn)
3025 if (insn)
3027 insn = PREV_INSN (insn);
3028 if (insn && NONJUMP_INSN_P (insn)
3029 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3030 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3033 return insn;
3036 /* Return the next insn after INSN that is not a NOTE. This routine does not
3037 look inside SEQUENCEs. */
3040 next_nonnote_insn (rtx insn)
3042 while (insn)
3044 insn = NEXT_INSN (insn);
3045 if (insn == 0 || !NOTE_P (insn))
3046 break;
3049 return insn;
3052 /* Return the next insn after INSN that is not a NOTE, but stop the
3053 search before we enter another basic block. This routine does not
3054 look inside SEQUENCEs. */
3057 next_nonnote_insn_bb (rtx insn)
3059 while (insn)
3061 insn = NEXT_INSN (insn);
3062 if (insn == 0 || !NOTE_P (insn))
3063 break;
3064 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3065 return NULL_RTX;
3068 return insn;
3071 /* Return the previous insn before INSN that is not a NOTE. This routine does
3072 not look inside SEQUENCEs. */
3075 prev_nonnote_insn (rtx insn)
3077 while (insn)
3079 insn = PREV_INSN (insn);
3080 if (insn == 0 || !NOTE_P (insn))
3081 break;
3084 return insn;
3087 /* Return the previous insn before INSN that is not a NOTE, but stop
3088 the search before we enter another basic block. This routine does
3089 not look inside SEQUENCEs. */
3092 prev_nonnote_insn_bb (rtx insn)
3094 while (insn)
3096 insn = PREV_INSN (insn);
3097 if (insn == 0 || !NOTE_P (insn))
3098 break;
3099 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3100 return NULL_RTX;
3103 return insn;
3106 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3107 routine does not look inside SEQUENCEs. */
3110 next_nondebug_insn (rtx insn)
3112 while (insn)
3114 insn = NEXT_INSN (insn);
3115 if (insn == 0 || !DEBUG_INSN_P (insn))
3116 break;
3119 return insn;
3122 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3123 This routine does not look inside SEQUENCEs. */
3126 prev_nondebug_insn (rtx insn)
3128 while (insn)
3130 insn = PREV_INSN (insn);
3131 if (insn == 0 || !DEBUG_INSN_P (insn))
3132 break;
3135 return insn;
3138 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3139 or 0, if there is none. This routine does not look inside
3140 SEQUENCEs. */
3143 next_real_insn (rtx insn)
3145 while (insn)
3147 insn = NEXT_INSN (insn);
3148 if (insn == 0 || INSN_P (insn))
3149 break;
3152 return insn;
3155 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3156 or 0, if there is none. This routine does not look inside
3157 SEQUENCEs. */
3160 prev_real_insn (rtx insn)
3162 while (insn)
3164 insn = PREV_INSN (insn);
3165 if (insn == 0 || INSN_P (insn))
3166 break;
3169 return insn;
3172 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3173 This routine does not look inside SEQUENCEs. */
3176 last_call_insn (void)
3178 rtx insn;
3180 for (insn = get_last_insn ();
3181 insn && !CALL_P (insn);
3182 insn = PREV_INSN (insn))
3185 return insn;
3188 /* Find the next insn after INSN that really does something. This routine
3189 does not look inside SEQUENCEs. After reload this also skips over
3190 standalone USE and CLOBBER insn. */
3193 active_insn_p (const_rtx insn)
3195 return (CALL_P (insn) || JUMP_P (insn)
3196 || (NONJUMP_INSN_P (insn)
3197 && (! reload_completed
3198 || (GET_CODE (PATTERN (insn)) != USE
3199 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3203 next_active_insn (rtx insn)
3205 while (insn)
3207 insn = NEXT_INSN (insn);
3208 if (insn == 0 || active_insn_p (insn))
3209 break;
3212 return insn;
3215 /* Find the last insn before INSN that really does something. This routine
3216 does not look inside SEQUENCEs. After reload this also skips over
3217 standalone USE and CLOBBER insn. */
3220 prev_active_insn (rtx insn)
3222 while (insn)
3224 insn = PREV_INSN (insn);
3225 if (insn == 0 || active_insn_p (insn))
3226 break;
3229 return insn;
3232 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3235 next_label (rtx insn)
3237 while (insn)
3239 insn = NEXT_INSN (insn);
3240 if (insn == 0 || LABEL_P (insn))
3241 break;
3244 return insn;
3247 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3250 prev_label (rtx insn)
3252 while (insn)
3254 insn = PREV_INSN (insn);
3255 if (insn == 0 || LABEL_P (insn))
3256 break;
3259 return insn;
3262 /* Return the last label to mark the same position as LABEL. Return null
3263 if LABEL itself is null. */
3266 skip_consecutive_labels (rtx label)
3268 rtx insn;
3270 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3271 if (LABEL_P (insn))
3272 label = insn;
3274 return label;
3277 #ifdef HAVE_cc0
3278 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3279 and REG_CC_USER notes so we can find it. */
3281 void
3282 link_cc0_insns (rtx insn)
3284 rtx user = next_nonnote_insn (insn);
3286 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3287 user = XVECEXP (PATTERN (user), 0, 0);
3289 add_reg_note (user, REG_CC_SETTER, insn);
3290 add_reg_note (insn, REG_CC_USER, user);
3293 /* Return the next insn that uses CC0 after INSN, which is assumed to
3294 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3295 applied to the result of this function should yield INSN).
3297 Normally, this is simply the next insn. However, if a REG_CC_USER note
3298 is present, it contains the insn that uses CC0.
3300 Return 0 if we can't find the insn. */
3303 next_cc0_user (rtx insn)
3305 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3307 if (note)
3308 return XEXP (note, 0);
3310 insn = next_nonnote_insn (insn);
3311 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3312 insn = XVECEXP (PATTERN (insn), 0, 0);
3314 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3315 return insn;
3317 return 0;
3320 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3321 note, it is the previous insn. */
3324 prev_cc0_setter (rtx insn)
3326 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3328 if (note)
3329 return XEXP (note, 0);
3331 insn = prev_nonnote_insn (insn);
3332 gcc_assert (sets_cc0_p (PATTERN (insn)));
3334 return insn;
3336 #endif
3338 #ifdef AUTO_INC_DEC
3339 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3341 static int
3342 find_auto_inc (rtx *xp, void *data)
3344 rtx x = *xp;
3345 rtx reg = (rtx) data;
3347 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3348 return 0;
3350 switch (GET_CODE (x))
3352 case PRE_DEC:
3353 case PRE_INC:
3354 case POST_DEC:
3355 case POST_INC:
3356 case PRE_MODIFY:
3357 case POST_MODIFY:
3358 if (rtx_equal_p (reg, XEXP (x, 0)))
3359 return 1;
3360 break;
3362 default:
3363 gcc_unreachable ();
3365 return -1;
3367 #endif
3369 /* Increment the label uses for all labels present in rtx. */
3371 static void
3372 mark_label_nuses (rtx x)
3374 enum rtx_code code;
3375 int i, j;
3376 const char *fmt;
3378 code = GET_CODE (x);
3379 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3380 LABEL_NUSES (XEXP (x, 0))++;
3382 fmt = GET_RTX_FORMAT (code);
3383 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3385 if (fmt[i] == 'e')
3386 mark_label_nuses (XEXP (x, i));
3387 else if (fmt[i] == 'E')
3388 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3389 mark_label_nuses (XVECEXP (x, i, j));
3394 /* Try splitting insns that can be split for better scheduling.
3395 PAT is the pattern which might split.
3396 TRIAL is the insn providing PAT.
3397 LAST is nonzero if we should return the last insn of the sequence produced.
3399 If this routine succeeds in splitting, it returns the first or last
3400 replacement insn depending on the value of LAST. Otherwise, it
3401 returns TRIAL. If the insn to be returned can be split, it will be. */
3404 try_split (rtx pat, rtx trial, int last)
3406 rtx before = PREV_INSN (trial);
3407 rtx after = NEXT_INSN (trial);
3408 int has_barrier = 0;
3409 rtx note, seq, tem;
3410 int probability;
3411 rtx insn_last, insn;
3412 int njumps = 0;
3414 /* We're not good at redistributing frame information. */
3415 if (RTX_FRAME_RELATED_P (trial))
3416 return trial;
3418 if (any_condjump_p (trial)
3419 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3420 split_branch_probability = INTVAL (XEXP (note, 0));
3421 probability = split_branch_probability;
3423 seq = split_insns (pat, trial);
3425 split_branch_probability = -1;
3427 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3428 We may need to handle this specially. */
3429 if (after && BARRIER_P (after))
3431 has_barrier = 1;
3432 after = NEXT_INSN (after);
3435 if (!seq)
3436 return trial;
3438 /* Avoid infinite loop if any insn of the result matches
3439 the original pattern. */
3440 insn_last = seq;
3441 while (1)
3443 if (INSN_P (insn_last)
3444 && rtx_equal_p (PATTERN (insn_last), pat))
3445 return trial;
3446 if (!NEXT_INSN (insn_last))
3447 break;
3448 insn_last = NEXT_INSN (insn_last);
3451 /* We will be adding the new sequence to the function. The splitters
3452 may have introduced invalid RTL sharing, so unshare the sequence now. */
3453 unshare_all_rtl_in_chain (seq);
3455 /* Mark labels. */
3456 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3458 if (JUMP_P (insn))
3460 mark_jump_label (PATTERN (insn), insn, 0);
3461 njumps++;
3462 if (probability != -1
3463 && any_condjump_p (insn)
3464 && !find_reg_note (insn, REG_BR_PROB, 0))
3466 /* We can preserve the REG_BR_PROB notes only if exactly
3467 one jump is created, otherwise the machine description
3468 is responsible for this step using
3469 split_branch_probability variable. */
3470 gcc_assert (njumps == 1);
3471 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3476 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3477 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3478 if (CALL_P (trial))
3480 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3481 if (CALL_P (insn))
3483 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3484 while (*p)
3485 p = &XEXP (*p, 1);
3486 *p = CALL_INSN_FUNCTION_USAGE (trial);
3487 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3489 /* Update the debug information for the CALL_INSN. */
3490 if (flag_enable_icf_debug)
3491 (*debug_hooks->copy_call_info) (trial, insn);
3495 /* Copy notes, particularly those related to the CFG. */
3496 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3498 switch (REG_NOTE_KIND (note))
3500 case REG_EH_REGION:
3501 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3502 break;
3504 case REG_NORETURN:
3505 case REG_SETJMP:
3506 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3508 if (CALL_P (insn))
3509 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3511 break;
3513 case REG_NON_LOCAL_GOTO:
3514 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3516 if (JUMP_P (insn))
3517 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3519 break;
3521 #ifdef AUTO_INC_DEC
3522 case REG_INC:
3523 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3525 rtx reg = XEXP (note, 0);
3526 if (!FIND_REG_INC_NOTE (insn, reg)
3527 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3528 add_reg_note (insn, REG_INC, reg);
3530 break;
3531 #endif
3533 default:
3534 break;
3538 /* If there are LABELS inside the split insns increment the
3539 usage count so we don't delete the label. */
3540 if (INSN_P (trial))
3542 insn = insn_last;
3543 while (insn != NULL_RTX)
3545 /* JUMP_P insns have already been "marked" above. */
3546 if (NONJUMP_INSN_P (insn))
3547 mark_label_nuses (PATTERN (insn));
3549 insn = PREV_INSN (insn);
3553 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3555 delete_insn (trial);
3556 if (has_barrier)
3557 emit_barrier_after (tem);
3559 /* Recursively call try_split for each new insn created; by the
3560 time control returns here that insn will be fully split, so
3561 set LAST and continue from the insn after the one returned.
3562 We can't use next_active_insn here since AFTER may be a note.
3563 Ignore deleted insns, which can be occur if not optimizing. */
3564 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3565 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3566 tem = try_split (PATTERN (tem), tem, 1);
3568 /* Return either the first or the last insn, depending on which was
3569 requested. */
3570 return last
3571 ? (after ? PREV_INSN (after) : get_last_insn ())
3572 : NEXT_INSN (before);
3575 /* Make and return an INSN rtx, initializing all its slots.
3576 Store PATTERN in the pattern slots. */
3579 make_insn_raw (rtx pattern)
3581 rtx insn;
3583 insn = rtx_alloc (INSN);
3585 INSN_UID (insn) = cur_insn_uid++;
3586 PATTERN (insn) = pattern;
3587 INSN_CODE (insn) = -1;
3588 REG_NOTES (insn) = NULL;
3589 INSN_LOCATOR (insn) = curr_insn_locator ();
3590 BLOCK_FOR_INSN (insn) = NULL;
3592 #ifdef ENABLE_RTL_CHECKING
3593 if (insn
3594 && INSN_P (insn)
3595 && (returnjump_p (insn)
3596 || (GET_CODE (insn) == SET
3597 && SET_DEST (insn) == pc_rtx)))
3599 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3600 debug_rtx (insn);
3602 #endif
3604 return insn;
3607 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3610 make_debug_insn_raw (rtx pattern)
3612 rtx insn;
3614 insn = rtx_alloc (DEBUG_INSN);
3615 INSN_UID (insn) = cur_debug_insn_uid++;
3616 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3617 INSN_UID (insn) = cur_insn_uid++;
3619 PATTERN (insn) = pattern;
3620 INSN_CODE (insn) = -1;
3621 REG_NOTES (insn) = NULL;
3622 INSN_LOCATOR (insn) = curr_insn_locator ();
3623 BLOCK_FOR_INSN (insn) = NULL;
3625 return insn;
3628 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3631 make_jump_insn_raw (rtx pattern)
3633 rtx insn;
3635 insn = rtx_alloc (JUMP_INSN);
3636 INSN_UID (insn) = cur_insn_uid++;
3638 PATTERN (insn) = pattern;
3639 INSN_CODE (insn) = -1;
3640 REG_NOTES (insn) = NULL;
3641 JUMP_LABEL (insn) = NULL;
3642 INSN_LOCATOR (insn) = curr_insn_locator ();
3643 BLOCK_FOR_INSN (insn) = NULL;
3645 return insn;
3648 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3650 static rtx
3651 make_call_insn_raw (rtx pattern)
3653 rtx insn;
3655 insn = rtx_alloc (CALL_INSN);
3656 INSN_UID (insn) = cur_insn_uid++;
3658 PATTERN (insn) = pattern;
3659 INSN_CODE (insn) = -1;
3660 REG_NOTES (insn) = NULL;
3661 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3662 INSN_LOCATOR (insn) = curr_insn_locator ();
3663 BLOCK_FOR_INSN (insn) = NULL;
3665 return insn;
3668 /* Add INSN to the end of the doubly-linked list.
3669 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3671 void
3672 add_insn (rtx insn)
3674 PREV_INSN (insn) = get_last_insn();
3675 NEXT_INSN (insn) = 0;
3677 if (NULL != get_last_insn())
3678 NEXT_INSN (get_last_insn ()) = insn;
3680 if (NULL == get_insns ())
3681 set_first_insn (insn);
3683 set_last_insn (insn);
3686 /* Add INSN into the doubly-linked list after insn AFTER. This and
3687 the next should be the only functions called to insert an insn once
3688 delay slots have been filled since only they know how to update a
3689 SEQUENCE. */
3691 void
3692 add_insn_after (rtx insn, rtx after, basic_block bb)
3694 rtx next = NEXT_INSN (after);
3696 gcc_assert (!optimize || !INSN_DELETED_P (after));
3698 NEXT_INSN (insn) = next;
3699 PREV_INSN (insn) = after;
3701 if (next)
3703 PREV_INSN (next) = insn;
3704 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3705 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3707 else if (get_last_insn () == after)
3708 set_last_insn (insn);
3709 else
3711 struct sequence_stack *stack = seq_stack;
3712 /* Scan all pending sequences too. */
3713 for (; stack; stack = stack->next)
3714 if (after == stack->last)
3716 stack->last = insn;
3717 break;
3720 gcc_assert (stack);
3723 if (!BARRIER_P (after)
3724 && !BARRIER_P (insn)
3725 && (bb = BLOCK_FOR_INSN (after)))
3727 set_block_for_insn (insn, bb);
3728 if (INSN_P (insn))
3729 df_insn_rescan (insn);
3730 /* Should not happen as first in the BB is always
3731 either NOTE or LABEL. */
3732 if (BB_END (bb) == after
3733 /* Avoid clobbering of structure when creating new BB. */
3734 && !BARRIER_P (insn)
3735 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3736 BB_END (bb) = insn;
3739 NEXT_INSN (after) = insn;
3740 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3742 rtx sequence = PATTERN (after);
3743 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3747 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3748 the previous should be the only functions called to insert an insn
3749 once delay slots have been filled since only they know how to
3750 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3751 bb from before. */
3753 void
3754 add_insn_before (rtx insn, rtx before, basic_block bb)
3756 rtx prev = PREV_INSN (before);
3758 gcc_assert (!optimize || !INSN_DELETED_P (before));
3760 PREV_INSN (insn) = prev;
3761 NEXT_INSN (insn) = before;
3763 if (prev)
3765 NEXT_INSN (prev) = insn;
3766 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3768 rtx sequence = PATTERN (prev);
3769 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3772 else if (get_insns () == before)
3773 set_first_insn (insn);
3774 else
3776 struct sequence_stack *stack = seq_stack;
3777 /* Scan all pending sequences too. */
3778 for (; stack; stack = stack->next)
3779 if (before == stack->first)
3781 stack->first = insn;
3782 break;
3785 gcc_assert (stack);
3788 if (!bb
3789 && !BARRIER_P (before)
3790 && !BARRIER_P (insn))
3791 bb = BLOCK_FOR_INSN (before);
3793 if (bb)
3795 set_block_for_insn (insn, bb);
3796 if (INSN_P (insn))
3797 df_insn_rescan (insn);
3798 /* Should not happen as first in the BB is always either NOTE or
3799 LABEL. */
3800 gcc_assert (BB_HEAD (bb) != insn
3801 /* Avoid clobbering of structure when creating new BB. */
3802 || BARRIER_P (insn)
3803 || NOTE_INSN_BASIC_BLOCK_P (insn));
3806 PREV_INSN (before) = insn;
3807 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3808 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3812 /* Replace insn with an deleted instruction note. */
3814 void
3815 set_insn_deleted (rtx insn)
3817 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3818 PUT_CODE (insn, NOTE);
3819 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3823 /* Remove an insn from its doubly-linked list. This function knows how
3824 to handle sequences. */
3825 void
3826 remove_insn (rtx insn)
3828 rtx next = NEXT_INSN (insn);
3829 rtx prev = PREV_INSN (insn);
3830 basic_block bb;
3832 /* Later in the code, the block will be marked dirty. */
3833 df_insn_delete (NULL, INSN_UID (insn));
3835 if (prev)
3837 NEXT_INSN (prev) = next;
3838 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3840 rtx sequence = PATTERN (prev);
3841 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3844 else if (get_insns () == insn)
3846 if (next)
3847 PREV_INSN (next) = NULL;
3848 set_first_insn (next);
3850 else
3852 struct sequence_stack *stack = seq_stack;
3853 /* Scan all pending sequences too. */
3854 for (; stack; stack = stack->next)
3855 if (insn == stack->first)
3857 stack->first = next;
3858 break;
3861 gcc_assert (stack);
3864 if (next)
3866 PREV_INSN (next) = prev;
3867 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3868 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3870 else if (get_last_insn () == insn)
3871 set_last_insn (prev);
3872 else
3874 struct sequence_stack *stack = seq_stack;
3875 /* Scan all pending sequences too. */
3876 for (; stack; stack = stack->next)
3877 if (insn == stack->last)
3879 stack->last = prev;
3880 break;
3883 gcc_assert (stack);
3885 if (!BARRIER_P (insn)
3886 && (bb = BLOCK_FOR_INSN (insn)))
3888 if (INSN_P (insn))
3889 df_set_bb_dirty (bb);
3890 if (BB_HEAD (bb) == insn)
3892 /* Never ever delete the basic block note without deleting whole
3893 basic block. */
3894 gcc_assert (!NOTE_P (insn));
3895 BB_HEAD (bb) = next;
3897 if (BB_END (bb) == insn)
3898 BB_END (bb) = prev;
3902 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3904 void
3905 add_function_usage_to (rtx call_insn, rtx call_fusage)
3907 gcc_assert (call_insn && CALL_P (call_insn));
3909 /* Put the register usage information on the CALL. If there is already
3910 some usage information, put ours at the end. */
3911 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3913 rtx link;
3915 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3916 link = XEXP (link, 1))
3919 XEXP (link, 1) = call_fusage;
3921 else
3922 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3925 /* Delete all insns made since FROM.
3926 FROM becomes the new last instruction. */
3928 void
3929 delete_insns_since (rtx from)
3931 if (from == 0)
3932 set_first_insn (0);
3933 else
3934 NEXT_INSN (from) = 0;
3935 set_last_insn (from);
3938 /* This function is deprecated, please use sequences instead.
3940 Move a consecutive bunch of insns to a different place in the chain.
3941 The insns to be moved are those between FROM and TO.
3942 They are moved to a new position after the insn AFTER.
3943 AFTER must not be FROM or TO or any insn in between.
3945 This function does not know about SEQUENCEs and hence should not be
3946 called after delay-slot filling has been done. */
3948 void
3949 reorder_insns_nobb (rtx from, rtx to, rtx after)
3951 /* Splice this bunch out of where it is now. */
3952 if (PREV_INSN (from))
3953 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3954 if (NEXT_INSN (to))
3955 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3956 if (get_last_insn () == to)
3957 set_last_insn (PREV_INSN (from));
3958 if (get_insns () == from)
3959 set_first_insn (NEXT_INSN (to));
3961 /* Make the new neighbors point to it and it to them. */
3962 if (NEXT_INSN (after))
3963 PREV_INSN (NEXT_INSN (after)) = to;
3965 NEXT_INSN (to) = NEXT_INSN (after);
3966 PREV_INSN (from) = after;
3967 NEXT_INSN (after) = from;
3968 if (after == get_last_insn())
3969 set_last_insn (to);
3972 /* Same as function above, but take care to update BB boundaries. */
3973 void
3974 reorder_insns (rtx from, rtx to, rtx after)
3976 rtx prev = PREV_INSN (from);
3977 basic_block bb, bb2;
3979 reorder_insns_nobb (from, to, after);
3981 if (!BARRIER_P (after)
3982 && (bb = BLOCK_FOR_INSN (after)))
3984 rtx x;
3985 df_set_bb_dirty (bb);
3987 if (!BARRIER_P (from)
3988 && (bb2 = BLOCK_FOR_INSN (from)))
3990 if (BB_END (bb2) == to)
3991 BB_END (bb2) = prev;
3992 df_set_bb_dirty (bb2);
3995 if (BB_END (bb) == after)
3996 BB_END (bb) = to;
3998 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3999 if (!BARRIER_P (x))
4000 df_insn_change_bb (x, bb);
4005 /* Emit insn(s) of given code and pattern
4006 at a specified place within the doubly-linked list.
4008 All of the emit_foo global entry points accept an object
4009 X which is either an insn list or a PATTERN of a single
4010 instruction.
4012 There are thus a few canonical ways to generate code and
4013 emit it at a specific place in the instruction stream. For
4014 example, consider the instruction named SPOT and the fact that
4015 we would like to emit some instructions before SPOT. We might
4016 do it like this:
4018 start_sequence ();
4019 ... emit the new instructions ...
4020 insns_head = get_insns ();
4021 end_sequence ();
4023 emit_insn_before (insns_head, SPOT);
4025 It used to be common to generate SEQUENCE rtl instead, but that
4026 is a relic of the past which no longer occurs. The reason is that
4027 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4028 generated would almost certainly die right after it was created. */
4030 /* Make X be output before the instruction BEFORE. */
4033 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4035 rtx last = before;
4036 rtx insn;
4038 gcc_assert (before);
4040 if (x == NULL_RTX)
4041 return last;
4043 switch (GET_CODE (x))
4045 case DEBUG_INSN:
4046 case INSN:
4047 case JUMP_INSN:
4048 case CALL_INSN:
4049 case CODE_LABEL:
4050 case BARRIER:
4051 case NOTE:
4052 insn = x;
4053 while (insn)
4055 rtx next = NEXT_INSN (insn);
4056 add_insn_before (insn, before, bb);
4057 last = insn;
4058 insn = next;
4060 break;
4062 #ifdef ENABLE_RTL_CHECKING
4063 case SEQUENCE:
4064 gcc_unreachable ();
4065 break;
4066 #endif
4068 default:
4069 last = make_insn_raw (x);
4070 add_insn_before (last, before, bb);
4071 break;
4074 return last;
4077 /* Make an instruction with body X and code JUMP_INSN
4078 and output it before the instruction BEFORE. */
4081 emit_jump_insn_before_noloc (rtx x, rtx before)
4083 rtx insn, last = NULL_RTX;
4085 gcc_assert (before);
4087 switch (GET_CODE (x))
4089 case DEBUG_INSN:
4090 case INSN:
4091 case JUMP_INSN:
4092 case CALL_INSN:
4093 case CODE_LABEL:
4094 case BARRIER:
4095 case NOTE:
4096 insn = x;
4097 while (insn)
4099 rtx next = NEXT_INSN (insn);
4100 add_insn_before (insn, before, NULL);
4101 last = insn;
4102 insn = next;
4104 break;
4106 #ifdef ENABLE_RTL_CHECKING
4107 case SEQUENCE:
4108 gcc_unreachable ();
4109 break;
4110 #endif
4112 default:
4113 last = make_jump_insn_raw (x);
4114 add_insn_before (last, before, NULL);
4115 break;
4118 return last;
4121 /* Make an instruction with body X and code CALL_INSN
4122 and output it before the instruction BEFORE. */
4125 emit_call_insn_before_noloc (rtx x, rtx before)
4127 rtx last = NULL_RTX, insn;
4129 gcc_assert (before);
4131 switch (GET_CODE (x))
4133 case DEBUG_INSN:
4134 case INSN:
4135 case JUMP_INSN:
4136 case CALL_INSN:
4137 case CODE_LABEL:
4138 case BARRIER:
4139 case NOTE:
4140 insn = x;
4141 while (insn)
4143 rtx next = NEXT_INSN (insn);
4144 add_insn_before (insn, before, NULL);
4145 last = insn;
4146 insn = next;
4148 break;
4150 #ifdef ENABLE_RTL_CHECKING
4151 case SEQUENCE:
4152 gcc_unreachable ();
4153 break;
4154 #endif
4156 default:
4157 last = make_call_insn_raw (x);
4158 add_insn_before (last, before, NULL);
4159 break;
4162 return last;
4165 /* Make an instruction with body X and code DEBUG_INSN
4166 and output it before the instruction BEFORE. */
4169 emit_debug_insn_before_noloc (rtx x, rtx before)
4171 rtx last = NULL_RTX, insn;
4173 gcc_assert (before);
4175 switch (GET_CODE (x))
4177 case DEBUG_INSN:
4178 case INSN:
4179 case JUMP_INSN:
4180 case CALL_INSN:
4181 case CODE_LABEL:
4182 case BARRIER:
4183 case NOTE:
4184 insn = x;
4185 while (insn)
4187 rtx next = NEXT_INSN (insn);
4188 add_insn_before (insn, before, NULL);
4189 last = insn;
4190 insn = next;
4192 break;
4194 #ifdef ENABLE_RTL_CHECKING
4195 case SEQUENCE:
4196 gcc_unreachable ();
4197 break;
4198 #endif
4200 default:
4201 last = make_debug_insn_raw (x);
4202 add_insn_before (last, before, NULL);
4203 break;
4206 return last;
4209 /* Make an insn of code BARRIER
4210 and output it before the insn BEFORE. */
4213 emit_barrier_before (rtx before)
4215 rtx insn = rtx_alloc (BARRIER);
4217 INSN_UID (insn) = cur_insn_uid++;
4219 add_insn_before (insn, before, NULL);
4220 return insn;
4223 /* Emit the label LABEL before the insn BEFORE. */
4226 emit_label_before (rtx label, rtx before)
4228 /* This can be called twice for the same label as a result of the
4229 confusion that follows a syntax error! So make it harmless. */
4230 if (INSN_UID (label) == 0)
4232 INSN_UID (label) = cur_insn_uid++;
4233 add_insn_before (label, before, NULL);
4236 return label;
4239 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4242 emit_note_before (enum insn_note subtype, rtx before)
4244 rtx note = rtx_alloc (NOTE);
4245 INSN_UID (note) = cur_insn_uid++;
4246 NOTE_KIND (note) = subtype;
4247 BLOCK_FOR_INSN (note) = NULL;
4248 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4250 add_insn_before (note, before, NULL);
4251 return note;
4254 /* Helper for emit_insn_after, handles lists of instructions
4255 efficiently. */
4257 static rtx
4258 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4260 rtx last;
4261 rtx after_after;
4262 if (!bb && !BARRIER_P (after))
4263 bb = BLOCK_FOR_INSN (after);
4265 if (bb)
4267 df_set_bb_dirty (bb);
4268 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4269 if (!BARRIER_P (last))
4271 set_block_for_insn (last, bb);
4272 df_insn_rescan (last);
4274 if (!BARRIER_P (last))
4276 set_block_for_insn (last, bb);
4277 df_insn_rescan (last);
4279 if (BB_END (bb) == after)
4280 BB_END (bb) = last;
4282 else
4283 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4284 continue;
4286 after_after = NEXT_INSN (after);
4288 NEXT_INSN (after) = first;
4289 PREV_INSN (first) = after;
4290 NEXT_INSN (last) = after_after;
4291 if (after_after)
4292 PREV_INSN (after_after) = last;
4294 if (after == get_last_insn())
4295 set_last_insn (last);
4297 return last;
4300 /* Make X be output after the insn AFTER and set the BB of insn. If
4301 BB is NULL, an attempt is made to infer the BB from AFTER. */
4304 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4306 rtx last = after;
4308 gcc_assert (after);
4310 if (x == NULL_RTX)
4311 return last;
4313 switch (GET_CODE (x))
4315 case DEBUG_INSN:
4316 case INSN:
4317 case JUMP_INSN:
4318 case CALL_INSN:
4319 case CODE_LABEL:
4320 case BARRIER:
4321 case NOTE:
4322 last = emit_insn_after_1 (x, after, bb);
4323 break;
4325 #ifdef ENABLE_RTL_CHECKING
4326 case SEQUENCE:
4327 gcc_unreachable ();
4328 break;
4329 #endif
4331 default:
4332 last = make_insn_raw (x);
4333 add_insn_after (last, after, bb);
4334 break;
4337 return last;
4341 /* Make an insn of code JUMP_INSN with body X
4342 and output it after the insn AFTER. */
4345 emit_jump_insn_after_noloc (rtx x, rtx after)
4347 rtx last;
4349 gcc_assert (after);
4351 switch (GET_CODE (x))
4353 case DEBUG_INSN:
4354 case INSN:
4355 case JUMP_INSN:
4356 case CALL_INSN:
4357 case CODE_LABEL:
4358 case BARRIER:
4359 case NOTE:
4360 last = emit_insn_after_1 (x, after, NULL);
4361 break;
4363 #ifdef ENABLE_RTL_CHECKING
4364 case SEQUENCE:
4365 gcc_unreachable ();
4366 break;
4367 #endif
4369 default:
4370 last = make_jump_insn_raw (x);
4371 add_insn_after (last, after, NULL);
4372 break;
4375 return last;
4378 /* Make an instruction with body X and code CALL_INSN
4379 and output it after the instruction AFTER. */
4382 emit_call_insn_after_noloc (rtx x, rtx after)
4384 rtx last;
4386 gcc_assert (after);
4388 switch (GET_CODE (x))
4390 case DEBUG_INSN:
4391 case INSN:
4392 case JUMP_INSN:
4393 case CALL_INSN:
4394 case CODE_LABEL:
4395 case BARRIER:
4396 case NOTE:
4397 last = emit_insn_after_1 (x, after, NULL);
4398 break;
4400 #ifdef ENABLE_RTL_CHECKING
4401 case SEQUENCE:
4402 gcc_unreachable ();
4403 break;
4404 #endif
4406 default:
4407 last = make_call_insn_raw (x);
4408 add_insn_after (last, after, NULL);
4409 break;
4412 return last;
4415 /* Make an instruction with body X and code CALL_INSN
4416 and output it after the instruction AFTER. */
4419 emit_debug_insn_after_noloc (rtx x, rtx after)
4421 rtx last;
4423 gcc_assert (after);
4425 switch (GET_CODE (x))
4427 case DEBUG_INSN:
4428 case INSN:
4429 case JUMP_INSN:
4430 case CALL_INSN:
4431 case CODE_LABEL:
4432 case BARRIER:
4433 case NOTE:
4434 last = emit_insn_after_1 (x, after, NULL);
4435 break;
4437 #ifdef ENABLE_RTL_CHECKING
4438 case SEQUENCE:
4439 gcc_unreachable ();
4440 break;
4441 #endif
4443 default:
4444 last = make_debug_insn_raw (x);
4445 add_insn_after (last, after, NULL);
4446 break;
4449 return last;
4452 /* Make an insn of code BARRIER
4453 and output it after the insn AFTER. */
4456 emit_barrier_after (rtx after)
4458 rtx insn = rtx_alloc (BARRIER);
4460 INSN_UID (insn) = cur_insn_uid++;
4462 add_insn_after (insn, after, NULL);
4463 return insn;
4466 /* Emit the label LABEL after the insn AFTER. */
4469 emit_label_after (rtx label, rtx after)
4471 /* This can be called twice for the same label
4472 as a result of the confusion that follows a syntax error!
4473 So make it harmless. */
4474 if (INSN_UID (label) == 0)
4476 INSN_UID (label) = cur_insn_uid++;
4477 add_insn_after (label, after, NULL);
4480 return label;
4483 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4486 emit_note_after (enum insn_note subtype, rtx after)
4488 rtx note = rtx_alloc (NOTE);
4489 INSN_UID (note) = cur_insn_uid++;
4490 NOTE_KIND (note) = subtype;
4491 BLOCK_FOR_INSN (note) = NULL;
4492 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4493 add_insn_after (note, after, NULL);
4494 return note;
4497 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4499 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4501 rtx last = emit_insn_after_noloc (pattern, after, NULL);
4503 if (pattern == NULL_RTX || !loc)
4504 return last;
4506 after = NEXT_INSN (after);
4507 while (1)
4509 if (active_insn_p (after) && !INSN_LOCATOR (after))
4510 INSN_LOCATOR (after) = loc;
4511 if (after == last)
4512 break;
4513 after = NEXT_INSN (after);
4515 return last;
4518 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4520 emit_insn_after (rtx pattern, rtx after)
4522 rtx prev = after;
4524 while (DEBUG_INSN_P (prev))
4525 prev = PREV_INSN (prev);
4527 if (INSN_P (prev))
4528 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4529 else
4530 return emit_insn_after_noloc (pattern, after, NULL);
4533 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4535 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4537 rtx last = emit_jump_insn_after_noloc (pattern, after);
4539 if (pattern == NULL_RTX || !loc)
4540 return last;
4542 after = NEXT_INSN (after);
4543 while (1)
4545 if (active_insn_p (after) && !INSN_LOCATOR (after))
4546 INSN_LOCATOR (after) = loc;
4547 if (after == last)
4548 break;
4549 after = NEXT_INSN (after);
4551 return last;
4554 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4556 emit_jump_insn_after (rtx pattern, rtx after)
4558 rtx prev = after;
4560 while (DEBUG_INSN_P (prev))
4561 prev = PREV_INSN (prev);
4563 if (INSN_P (prev))
4564 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4565 else
4566 return emit_jump_insn_after_noloc (pattern, after);
4569 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4571 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4573 rtx last = emit_call_insn_after_noloc (pattern, after);
4575 if (pattern == NULL_RTX || !loc)
4576 return last;
4578 after = NEXT_INSN (after);
4579 while (1)
4581 if (active_insn_p (after) && !INSN_LOCATOR (after))
4582 INSN_LOCATOR (after) = loc;
4583 if (after == last)
4584 break;
4585 after = NEXT_INSN (after);
4587 return last;
4590 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4592 emit_call_insn_after (rtx pattern, rtx after)
4594 rtx prev = after;
4596 while (DEBUG_INSN_P (prev))
4597 prev = PREV_INSN (prev);
4599 if (INSN_P (prev))
4600 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4601 else
4602 return emit_call_insn_after_noloc (pattern, after);
4605 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4607 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4609 rtx last = emit_debug_insn_after_noloc (pattern, after);
4611 if (pattern == NULL_RTX || !loc)
4612 return last;
4614 after = NEXT_INSN (after);
4615 while (1)
4617 if (active_insn_p (after) && !INSN_LOCATOR (after))
4618 INSN_LOCATOR (after) = loc;
4619 if (after == last)
4620 break;
4621 after = NEXT_INSN (after);
4623 return last;
4626 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4628 emit_debug_insn_after (rtx pattern, rtx after)
4630 if (INSN_P (after))
4631 return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4632 else
4633 return emit_debug_insn_after_noloc (pattern, after);
4636 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4638 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4640 rtx first = PREV_INSN (before);
4641 rtx last = emit_insn_before_noloc (pattern, before, NULL);
4643 if (pattern == NULL_RTX || !loc)
4644 return last;
4646 if (!first)
4647 first = get_insns ();
4648 else
4649 first = NEXT_INSN (first);
4650 while (1)
4652 if (active_insn_p (first) && !INSN_LOCATOR (first))
4653 INSN_LOCATOR (first) = loc;
4654 if (first == last)
4655 break;
4656 first = NEXT_INSN (first);
4658 return last;
4661 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4663 emit_insn_before (rtx pattern, rtx before)
4665 rtx next = before;
4667 while (DEBUG_INSN_P (next))
4668 next = PREV_INSN (next);
4670 if (INSN_P (next))
4671 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4672 else
4673 return emit_insn_before_noloc (pattern, before, NULL);
4676 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4678 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4680 rtx first = PREV_INSN (before);
4681 rtx last = emit_jump_insn_before_noloc (pattern, before);
4683 if (pattern == NULL_RTX)
4684 return last;
4686 first = NEXT_INSN (first);
4687 while (1)
4689 if (active_insn_p (first) && !INSN_LOCATOR (first))
4690 INSN_LOCATOR (first) = loc;
4691 if (first == last)
4692 break;
4693 first = NEXT_INSN (first);
4695 return last;
4698 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4700 emit_jump_insn_before (rtx pattern, rtx before)
4702 rtx next = before;
4704 while (DEBUG_INSN_P (next))
4705 next = PREV_INSN (next);
4707 if (INSN_P (next))
4708 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4709 else
4710 return emit_jump_insn_before_noloc (pattern, before);
4713 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4715 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4717 rtx first = PREV_INSN (before);
4718 rtx last = emit_call_insn_before_noloc (pattern, before);
4720 if (pattern == NULL_RTX)
4721 return last;
4723 first = NEXT_INSN (first);
4724 while (1)
4726 if (active_insn_p (first) && !INSN_LOCATOR (first))
4727 INSN_LOCATOR (first) = loc;
4728 if (first == last)
4729 break;
4730 first = NEXT_INSN (first);
4732 return last;
4735 /* like emit_call_insn_before_noloc,
4736 but set insn_locator according to before. */
4738 emit_call_insn_before (rtx pattern, rtx before)
4740 rtx next = before;
4742 while (DEBUG_INSN_P (next))
4743 next = PREV_INSN (next);
4745 if (INSN_P (next))
4746 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4747 else
4748 return emit_call_insn_before_noloc (pattern, before);
4751 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4753 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4755 rtx first = PREV_INSN (before);
4756 rtx last = emit_debug_insn_before_noloc (pattern, before);
4758 if (pattern == NULL_RTX)
4759 return last;
4761 first = NEXT_INSN (first);
4762 while (1)
4764 if (active_insn_p (first) && !INSN_LOCATOR (first))
4765 INSN_LOCATOR (first) = loc;
4766 if (first == last)
4767 break;
4768 first = NEXT_INSN (first);
4770 return last;
4773 /* like emit_debug_insn_before_noloc,
4774 but set insn_locator according to before. */
4776 emit_debug_insn_before (rtx pattern, rtx before)
4778 if (INSN_P (before))
4779 return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4780 else
4781 return emit_debug_insn_before_noloc (pattern, before);
4784 /* Take X and emit it at the end of the doubly-linked
4785 INSN list.
4787 Returns the last insn emitted. */
4790 emit_insn (rtx x)
4792 rtx last = get_last_insn();
4793 rtx insn;
4795 if (x == NULL_RTX)
4796 return last;
4798 switch (GET_CODE (x))
4800 case DEBUG_INSN:
4801 case INSN:
4802 case JUMP_INSN:
4803 case CALL_INSN:
4804 case CODE_LABEL:
4805 case BARRIER:
4806 case NOTE:
4807 insn = x;
4808 while (insn)
4810 rtx next = NEXT_INSN (insn);
4811 add_insn (insn);
4812 last = insn;
4813 insn = next;
4815 break;
4817 #ifdef ENABLE_RTL_CHECKING
4818 case SEQUENCE:
4819 gcc_unreachable ();
4820 break;
4821 #endif
4823 default:
4824 last = make_insn_raw (x);
4825 add_insn (last);
4826 break;
4829 return last;
4832 /* Make an insn of code DEBUG_INSN with pattern X
4833 and add it to the end of the doubly-linked list. */
4836 emit_debug_insn (rtx x)
4838 rtx last = get_last_insn();
4839 rtx insn;
4841 if (x == NULL_RTX)
4842 return last;
4844 switch (GET_CODE (x))
4846 case DEBUG_INSN:
4847 case INSN:
4848 case JUMP_INSN:
4849 case CALL_INSN:
4850 case CODE_LABEL:
4851 case BARRIER:
4852 case NOTE:
4853 insn = x;
4854 while (insn)
4856 rtx next = NEXT_INSN (insn);
4857 add_insn (insn);
4858 last = insn;
4859 insn = next;
4861 break;
4863 #ifdef ENABLE_RTL_CHECKING
4864 case SEQUENCE:
4865 gcc_unreachable ();
4866 break;
4867 #endif
4869 default:
4870 last = make_debug_insn_raw (x);
4871 add_insn (last);
4872 break;
4875 return last;
4878 /* Make an insn of code JUMP_INSN with pattern X
4879 and add it to the end of the doubly-linked list. */
4882 emit_jump_insn (rtx x)
4884 rtx last = NULL_RTX, insn;
4886 switch (GET_CODE (x))
4888 case DEBUG_INSN:
4889 case INSN:
4890 case JUMP_INSN:
4891 case CALL_INSN:
4892 case CODE_LABEL:
4893 case BARRIER:
4894 case NOTE:
4895 insn = x;
4896 while (insn)
4898 rtx next = NEXT_INSN (insn);
4899 add_insn (insn);
4900 last = insn;
4901 insn = next;
4903 break;
4905 #ifdef ENABLE_RTL_CHECKING
4906 case SEQUENCE:
4907 gcc_unreachable ();
4908 break;
4909 #endif
4911 default:
4912 last = make_jump_insn_raw (x);
4913 add_insn (last);
4914 break;
4917 return last;
4920 /* Make an insn of code CALL_INSN with pattern X
4921 and add it to the end of the doubly-linked list. */
4924 emit_call_insn (rtx x)
4926 rtx insn;
4928 switch (GET_CODE (x))
4930 case DEBUG_INSN:
4931 case INSN:
4932 case JUMP_INSN:
4933 case CALL_INSN:
4934 case CODE_LABEL:
4935 case BARRIER:
4936 case NOTE:
4937 insn = emit_insn (x);
4938 break;
4940 #ifdef ENABLE_RTL_CHECKING
4941 case SEQUENCE:
4942 gcc_unreachable ();
4943 break;
4944 #endif
4946 default:
4947 insn = make_call_insn_raw (x);
4948 add_insn (insn);
4949 break;
4952 return insn;
4955 /* Add the label LABEL to the end of the doubly-linked list. */
4958 emit_label (rtx label)
4960 /* This can be called twice for the same label
4961 as a result of the confusion that follows a syntax error!
4962 So make it harmless. */
4963 if (INSN_UID (label) == 0)
4965 INSN_UID (label) = cur_insn_uid++;
4966 add_insn (label);
4968 return label;
4971 /* Make an insn of code BARRIER
4972 and add it to the end of the doubly-linked list. */
4975 emit_barrier (void)
4977 rtx barrier = rtx_alloc (BARRIER);
4978 INSN_UID (barrier) = cur_insn_uid++;
4979 add_insn (barrier);
4980 return barrier;
4983 /* Emit a copy of note ORIG. */
4986 emit_note_copy (rtx orig)
4988 rtx note;
4990 note = rtx_alloc (NOTE);
4992 INSN_UID (note) = cur_insn_uid++;
4993 NOTE_DATA (note) = NOTE_DATA (orig);
4994 NOTE_KIND (note) = NOTE_KIND (orig);
4995 BLOCK_FOR_INSN (note) = NULL;
4996 add_insn (note);
4998 return note;
5001 /* Make an insn of code NOTE or type NOTE_NO
5002 and add it to the end of the doubly-linked list. */
5005 emit_note (enum insn_note kind)
5007 rtx note;
5009 note = rtx_alloc (NOTE);
5010 INSN_UID (note) = cur_insn_uid++;
5011 NOTE_KIND (note) = kind;
5012 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
5013 BLOCK_FOR_INSN (note) = NULL;
5014 add_insn (note);
5015 return note;
5018 /* Emit a clobber of lvalue X. */
5021 emit_clobber (rtx x)
5023 /* CONCATs should not appear in the insn stream. */
5024 if (GET_CODE (x) == CONCAT)
5026 emit_clobber (XEXP (x, 0));
5027 return emit_clobber (XEXP (x, 1));
5029 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5032 /* Return a sequence of insns to clobber lvalue X. */
5035 gen_clobber (rtx x)
5037 rtx seq;
5039 start_sequence ();
5040 emit_clobber (x);
5041 seq = get_insns ();
5042 end_sequence ();
5043 return seq;
5046 /* Emit a use of rvalue X. */
5049 emit_use (rtx x)
5051 /* CONCATs should not appear in the insn stream. */
5052 if (GET_CODE (x) == CONCAT)
5054 emit_use (XEXP (x, 0));
5055 return emit_use (XEXP (x, 1));
5057 return emit_insn (gen_rtx_USE (VOIDmode, x));
5060 /* Return a sequence of insns to use rvalue X. */
5063 gen_use (rtx x)
5065 rtx seq;
5067 start_sequence ();
5068 emit_use (x);
5069 seq = get_insns ();
5070 end_sequence ();
5071 return seq;
5074 /* Cause next statement to emit a line note even if the line number
5075 has not changed. */
5077 void
5078 force_next_line_note (void)
5080 last_location = -1;
5083 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5084 note of this type already exists, remove it first. */
5087 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5089 rtx note = find_reg_note (insn, kind, NULL_RTX);
5091 switch (kind)
5093 case REG_EQUAL:
5094 case REG_EQUIV:
5095 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5096 has multiple sets (some callers assume single_set
5097 means the insn only has one set, when in fact it
5098 means the insn only has one * useful * set). */
5099 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5101 gcc_assert (!note);
5102 return NULL_RTX;
5105 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5106 It serves no useful purpose and breaks eliminate_regs. */
5107 if (GET_CODE (datum) == ASM_OPERANDS)
5108 return NULL_RTX;
5110 if (note)
5112 XEXP (note, 0) = datum;
5113 df_notes_rescan (insn);
5114 return note;
5116 break;
5118 default:
5119 if (note)
5121 XEXP (note, 0) = datum;
5122 return note;
5124 break;
5127 add_reg_note (insn, kind, datum);
5129 switch (kind)
5131 case REG_EQUAL:
5132 case REG_EQUIV:
5133 df_notes_rescan (insn);
5134 break;
5135 default:
5136 break;
5139 return REG_NOTES (insn);
5142 /* Return an indication of which type of insn should have X as a body.
5143 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5145 static enum rtx_code
5146 classify_insn (rtx x)
5148 if (LABEL_P (x))
5149 return CODE_LABEL;
5150 if (GET_CODE (x) == CALL)
5151 return CALL_INSN;
5152 if (GET_CODE (x) == RETURN)
5153 return JUMP_INSN;
5154 if (GET_CODE (x) == SET)
5156 if (SET_DEST (x) == pc_rtx)
5157 return JUMP_INSN;
5158 else if (GET_CODE (SET_SRC (x)) == CALL)
5159 return CALL_INSN;
5160 else
5161 return INSN;
5163 if (GET_CODE (x) == PARALLEL)
5165 int j;
5166 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5167 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5168 return CALL_INSN;
5169 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5170 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5171 return JUMP_INSN;
5172 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5173 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5174 return CALL_INSN;
5176 return INSN;
5179 /* Emit the rtl pattern X as an appropriate kind of insn.
5180 If X is a label, it is simply added into the insn chain. */
5183 emit (rtx x)
5185 enum rtx_code code = classify_insn (x);
5187 switch (code)
5189 case CODE_LABEL:
5190 return emit_label (x);
5191 case INSN:
5192 return emit_insn (x);
5193 case JUMP_INSN:
5195 rtx insn = emit_jump_insn (x);
5196 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5197 return emit_barrier ();
5198 return insn;
5200 case CALL_INSN:
5201 return emit_call_insn (x);
5202 case DEBUG_INSN:
5203 return emit_debug_insn (x);
5204 default:
5205 gcc_unreachable ();
5209 /* Space for free sequence stack entries. */
5210 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5212 /* Begin emitting insns to a sequence. If this sequence will contain
5213 something that might cause the compiler to pop arguments to function
5214 calls (because those pops have previously been deferred; see
5215 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5216 before calling this function. That will ensure that the deferred
5217 pops are not accidentally emitted in the middle of this sequence. */
5219 void
5220 start_sequence (void)
5222 struct sequence_stack *tem;
5224 if (free_sequence_stack != NULL)
5226 tem = free_sequence_stack;
5227 free_sequence_stack = tem->next;
5229 else
5230 tem = ggc_alloc_sequence_stack ();
5232 tem->next = seq_stack;
5233 tem->first = get_insns ();
5234 tem->last = get_last_insn ();
5236 seq_stack = tem;
5238 set_first_insn (0);
5239 set_last_insn (0);
5242 /* Set up the insn chain starting with FIRST as the current sequence,
5243 saving the previously current one. See the documentation for
5244 start_sequence for more information about how to use this function. */
5246 void
5247 push_to_sequence (rtx first)
5249 rtx last;
5251 start_sequence ();
5253 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5255 set_first_insn (first);
5256 set_last_insn (last);
5259 /* Like push_to_sequence, but take the last insn as an argument to avoid
5260 looping through the list. */
5262 void
5263 push_to_sequence2 (rtx first, rtx last)
5265 start_sequence ();
5267 set_first_insn (first);
5268 set_last_insn (last);
5271 /* Set up the outer-level insn chain
5272 as the current sequence, saving the previously current one. */
5274 void
5275 push_topmost_sequence (void)
5277 struct sequence_stack *stack, *top = NULL;
5279 start_sequence ();
5281 for (stack = seq_stack; stack; stack = stack->next)
5282 top = stack;
5284 set_first_insn (top->first);
5285 set_last_insn (top->last);
5288 /* After emitting to the outer-level insn chain, update the outer-level
5289 insn chain, and restore the previous saved state. */
5291 void
5292 pop_topmost_sequence (void)
5294 struct sequence_stack *stack, *top = NULL;
5296 for (stack = seq_stack; stack; stack = stack->next)
5297 top = stack;
5299 top->first = get_insns ();
5300 top->last = get_last_insn ();
5302 end_sequence ();
5305 /* After emitting to a sequence, restore previous saved state.
5307 To get the contents of the sequence just made, you must call
5308 `get_insns' *before* calling here.
5310 If the compiler might have deferred popping arguments while
5311 generating this sequence, and this sequence will not be immediately
5312 inserted into the instruction stream, use do_pending_stack_adjust
5313 before calling get_insns. That will ensure that the deferred
5314 pops are inserted into this sequence, and not into some random
5315 location in the instruction stream. See INHIBIT_DEFER_POP for more
5316 information about deferred popping of arguments. */
5318 void
5319 end_sequence (void)
5321 struct sequence_stack *tem = seq_stack;
5323 set_first_insn (tem->first);
5324 set_last_insn (tem->last);
5325 seq_stack = tem->next;
5327 memset (tem, 0, sizeof (*tem));
5328 tem->next = free_sequence_stack;
5329 free_sequence_stack = tem;
5332 /* Return 1 if currently emitting into a sequence. */
5335 in_sequence_p (void)
5337 return seq_stack != 0;
5340 /* Put the various virtual registers into REGNO_REG_RTX. */
5342 static void
5343 init_virtual_regs (void)
5345 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5346 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5347 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5348 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5349 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5353 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5354 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5355 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5356 static int copy_insn_n_scratches;
5358 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5359 copied an ASM_OPERANDS.
5360 In that case, it is the original input-operand vector. */
5361 static rtvec orig_asm_operands_vector;
5363 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5364 copied an ASM_OPERANDS.
5365 In that case, it is the copied input-operand vector. */
5366 static rtvec copy_asm_operands_vector;
5368 /* Likewise for the constraints vector. */
5369 static rtvec orig_asm_constraints_vector;
5370 static rtvec copy_asm_constraints_vector;
5372 /* Recursively create a new copy of an rtx for copy_insn.
5373 This function differs from copy_rtx in that it handles SCRATCHes and
5374 ASM_OPERANDs properly.
5375 Normally, this function is not used directly; use copy_insn as front end.
5376 However, you could first copy an insn pattern with copy_insn and then use
5377 this function afterwards to properly copy any REG_NOTEs containing
5378 SCRATCHes. */
5381 copy_insn_1 (rtx orig)
5383 rtx copy;
5384 int i, j;
5385 RTX_CODE code;
5386 const char *format_ptr;
5388 if (orig == NULL)
5389 return NULL;
5391 code = GET_CODE (orig);
5393 switch (code)
5395 case REG:
5396 case CONST_INT:
5397 case CONST_DOUBLE:
5398 case CONST_FIXED:
5399 case CONST_VECTOR:
5400 case SYMBOL_REF:
5401 case CODE_LABEL:
5402 case PC:
5403 case CC0:
5404 return orig;
5405 case CLOBBER:
5406 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5407 return orig;
5408 break;
5410 case SCRATCH:
5411 for (i = 0; i < copy_insn_n_scratches; i++)
5412 if (copy_insn_scratch_in[i] == orig)
5413 return copy_insn_scratch_out[i];
5414 break;
5416 case CONST:
5417 if (shared_const_p (orig))
5418 return orig;
5419 break;
5421 /* A MEM with a constant address is not sharable. The problem is that
5422 the constant address may need to be reloaded. If the mem is shared,
5423 then reloading one copy of this mem will cause all copies to appear
5424 to have been reloaded. */
5426 default:
5427 break;
5430 /* Copy the various flags, fields, and other information. We assume
5431 that all fields need copying, and then clear the fields that should
5432 not be copied. That is the sensible default behavior, and forces
5433 us to explicitly document why we are *not* copying a flag. */
5434 copy = shallow_copy_rtx (orig);
5436 /* We do not copy the USED flag, which is used as a mark bit during
5437 walks over the RTL. */
5438 RTX_FLAG (copy, used) = 0;
5440 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5441 if (INSN_P (orig))
5443 RTX_FLAG (copy, jump) = 0;
5444 RTX_FLAG (copy, call) = 0;
5445 RTX_FLAG (copy, frame_related) = 0;
5448 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5450 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5451 switch (*format_ptr++)
5453 case 'e':
5454 if (XEXP (orig, i) != NULL)
5455 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5456 break;
5458 case 'E':
5459 case 'V':
5460 if (XVEC (orig, i) == orig_asm_constraints_vector)
5461 XVEC (copy, i) = copy_asm_constraints_vector;
5462 else if (XVEC (orig, i) == orig_asm_operands_vector)
5463 XVEC (copy, i) = copy_asm_operands_vector;
5464 else if (XVEC (orig, i) != NULL)
5466 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5467 for (j = 0; j < XVECLEN (copy, i); j++)
5468 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5470 break;
5472 case 't':
5473 case 'w':
5474 case 'i':
5475 case 's':
5476 case 'S':
5477 case 'u':
5478 case '0':
5479 /* These are left unchanged. */
5480 break;
5482 default:
5483 gcc_unreachable ();
5486 if (code == SCRATCH)
5488 i = copy_insn_n_scratches++;
5489 gcc_assert (i < MAX_RECOG_OPERANDS);
5490 copy_insn_scratch_in[i] = orig;
5491 copy_insn_scratch_out[i] = copy;
5493 else if (code == ASM_OPERANDS)
5495 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5496 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5497 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5498 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5501 return copy;
5504 /* Create a new copy of an rtx.
5505 This function differs from copy_rtx in that it handles SCRATCHes and
5506 ASM_OPERANDs properly.
5507 INSN doesn't really have to be a full INSN; it could be just the
5508 pattern. */
5510 copy_insn (rtx insn)
5512 copy_insn_n_scratches = 0;
5513 orig_asm_operands_vector = 0;
5514 orig_asm_constraints_vector = 0;
5515 copy_asm_operands_vector = 0;
5516 copy_asm_constraints_vector = 0;
5517 return copy_insn_1 (insn);
5520 /* Initialize data structures and variables in this file
5521 before generating rtl for each function. */
5523 void
5524 init_emit (void)
5526 set_first_insn (NULL);
5527 set_last_insn (NULL);
5528 if (MIN_NONDEBUG_INSN_UID)
5529 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5530 else
5531 cur_insn_uid = 1;
5532 cur_debug_insn_uid = 1;
5533 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5534 last_location = UNKNOWN_LOCATION;
5535 first_label_num = label_num;
5536 seq_stack = NULL;
5538 /* Init the tables that describe all the pseudo regs. */
5540 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5542 crtl->emit.regno_pointer_align
5543 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5545 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5547 /* Put copies of all the hard registers into regno_reg_rtx. */
5548 memcpy (regno_reg_rtx,
5549 initial_regno_reg_rtx,
5550 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5552 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5553 init_virtual_regs ();
5555 /* Indicate that the virtual registers and stack locations are
5556 all pointers. */
5557 REG_POINTER (stack_pointer_rtx) = 1;
5558 REG_POINTER (frame_pointer_rtx) = 1;
5559 REG_POINTER (hard_frame_pointer_rtx) = 1;
5560 REG_POINTER (arg_pointer_rtx) = 1;
5562 REG_POINTER (virtual_incoming_args_rtx) = 1;
5563 REG_POINTER (virtual_stack_vars_rtx) = 1;
5564 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5565 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5566 REG_POINTER (virtual_cfa_rtx) = 1;
5568 #ifdef STACK_BOUNDARY
5569 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5570 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5571 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5572 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5574 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5575 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5576 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5577 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5578 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5579 #endif
5581 #ifdef INIT_EXPANDERS
5582 INIT_EXPANDERS;
5583 #endif
5586 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5588 static rtx
5589 gen_const_vector (enum machine_mode mode, int constant)
5591 rtx tem;
5592 rtvec v;
5593 int units, i;
5594 enum machine_mode inner;
5596 units = GET_MODE_NUNITS (mode);
5597 inner = GET_MODE_INNER (mode);
5599 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5601 v = rtvec_alloc (units);
5603 /* We need to call this function after we set the scalar const_tiny_rtx
5604 entries. */
5605 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5607 for (i = 0; i < units; ++i)
5608 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5610 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5611 return tem;
5614 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5615 all elements are zero, and the one vector when all elements are one. */
5617 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5619 enum machine_mode inner = GET_MODE_INNER (mode);
5620 int nunits = GET_MODE_NUNITS (mode);
5621 rtx x;
5622 int i;
5624 /* Check to see if all of the elements have the same value. */
5625 x = RTVEC_ELT (v, nunits - 1);
5626 for (i = nunits - 2; i >= 0; i--)
5627 if (RTVEC_ELT (v, i) != x)
5628 break;
5630 /* If the values are all the same, check to see if we can use one of the
5631 standard constant vectors. */
5632 if (i == -1)
5634 if (x == CONST0_RTX (inner))
5635 return CONST0_RTX (mode);
5636 else if (x == CONST1_RTX (inner))
5637 return CONST1_RTX (mode);
5640 return gen_rtx_raw_CONST_VECTOR (mode, v);
5643 /* Initialise global register information required by all functions. */
5645 void
5646 init_emit_regs (void)
5648 int i;
5650 /* Reset register attributes */
5651 htab_empty (reg_attrs_htab);
5653 /* We need reg_raw_mode, so initialize the modes now. */
5654 init_reg_modes_target ();
5656 /* Assign register numbers to the globally defined register rtx. */
5657 pc_rtx = gen_rtx_PC (VOIDmode);
5658 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5659 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5660 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5661 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5662 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5663 virtual_incoming_args_rtx =
5664 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5665 virtual_stack_vars_rtx =
5666 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5667 virtual_stack_dynamic_rtx =
5668 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5669 virtual_outgoing_args_rtx =
5670 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5671 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5673 /* Initialize RTL for commonly used hard registers. These are
5674 copied into regno_reg_rtx as we begin to compile each function. */
5675 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5676 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5678 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5679 return_address_pointer_rtx
5680 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5681 #endif
5683 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5684 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5685 else
5686 pic_offset_table_rtx = NULL_RTX;
5689 /* Create some permanent unique rtl objects shared between all functions. */
5691 void
5692 init_emit_once (void)
5694 int i;
5695 enum machine_mode mode;
5696 enum machine_mode double_mode;
5698 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5699 hash tables. */
5700 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5701 const_int_htab_eq, NULL);
5703 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5704 const_double_htab_eq, NULL);
5706 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5707 const_fixed_htab_eq, NULL);
5709 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5710 mem_attrs_htab_eq, NULL);
5711 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5712 reg_attrs_htab_eq, NULL);
5714 /* Compute the word and byte modes. */
5716 byte_mode = VOIDmode;
5717 word_mode = VOIDmode;
5718 double_mode = VOIDmode;
5720 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5721 mode != VOIDmode;
5722 mode = GET_MODE_WIDER_MODE (mode))
5724 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5725 && byte_mode == VOIDmode)
5726 byte_mode = mode;
5728 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5729 && word_mode == VOIDmode)
5730 word_mode = mode;
5733 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5734 mode != VOIDmode;
5735 mode = GET_MODE_WIDER_MODE (mode))
5737 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5738 && double_mode == VOIDmode)
5739 double_mode = mode;
5742 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5744 #ifdef INIT_EXPANDERS
5745 /* This is to initialize {init|mark|free}_machine_status before the first
5746 call to push_function_context_to. This is needed by the Chill front
5747 end which calls push_function_context_to before the first call to
5748 init_function_start. */
5749 INIT_EXPANDERS;
5750 #endif
5752 /* Create the unique rtx's for certain rtx codes and operand values. */
5754 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5755 tries to use these variables. */
5756 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5757 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5758 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5760 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5761 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5762 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5763 else
5764 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5766 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5767 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5768 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5770 dconstm1 = dconst1;
5771 dconstm1.sign = 1;
5773 dconsthalf = dconst1;
5774 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5776 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5778 const REAL_VALUE_TYPE *const r =
5779 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5781 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5782 mode != VOIDmode;
5783 mode = GET_MODE_WIDER_MODE (mode))
5784 const_tiny_rtx[i][(int) mode] =
5785 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5787 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5788 mode != VOIDmode;
5789 mode = GET_MODE_WIDER_MODE (mode))
5790 const_tiny_rtx[i][(int) mode] =
5791 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5793 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5795 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5796 mode != VOIDmode;
5797 mode = GET_MODE_WIDER_MODE (mode))
5798 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5800 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5801 mode != VOIDmode;
5802 mode = GET_MODE_WIDER_MODE (mode))
5803 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5806 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5807 mode != VOIDmode;
5808 mode = GET_MODE_WIDER_MODE (mode))
5810 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5811 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5814 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5815 mode != VOIDmode;
5816 mode = GET_MODE_WIDER_MODE (mode))
5818 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5819 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5822 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5823 mode != VOIDmode;
5824 mode = GET_MODE_WIDER_MODE (mode))
5826 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5827 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5830 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5831 mode != VOIDmode;
5832 mode = GET_MODE_WIDER_MODE (mode))
5834 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5835 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5838 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5839 mode != VOIDmode;
5840 mode = GET_MODE_WIDER_MODE (mode))
5842 FCONST0(mode).data.high = 0;
5843 FCONST0(mode).data.low = 0;
5844 FCONST0(mode).mode = mode;
5845 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5846 FCONST0 (mode), mode);
5849 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5850 mode != VOIDmode;
5851 mode = GET_MODE_WIDER_MODE (mode))
5853 FCONST0(mode).data.high = 0;
5854 FCONST0(mode).data.low = 0;
5855 FCONST0(mode).mode = mode;
5856 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5857 FCONST0 (mode), mode);
5860 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5861 mode != VOIDmode;
5862 mode = GET_MODE_WIDER_MODE (mode))
5864 FCONST0(mode).data.high = 0;
5865 FCONST0(mode).data.low = 0;
5866 FCONST0(mode).mode = mode;
5867 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5868 FCONST0 (mode), mode);
5870 /* We store the value 1. */
5871 FCONST1(mode).data.high = 0;
5872 FCONST1(mode).data.low = 0;
5873 FCONST1(mode).mode = mode;
5874 lshift_double (1, 0, GET_MODE_FBIT (mode),
5875 2 * HOST_BITS_PER_WIDE_INT,
5876 &FCONST1(mode).data.low,
5877 &FCONST1(mode).data.high,
5878 SIGNED_FIXED_POINT_MODE_P (mode));
5879 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5880 FCONST1 (mode), mode);
5883 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5884 mode != VOIDmode;
5885 mode = GET_MODE_WIDER_MODE (mode))
5887 FCONST0(mode).data.high = 0;
5888 FCONST0(mode).data.low = 0;
5889 FCONST0(mode).mode = mode;
5890 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5891 FCONST0 (mode), mode);
5893 /* We store the value 1. */
5894 FCONST1(mode).data.high = 0;
5895 FCONST1(mode).data.low = 0;
5896 FCONST1(mode).mode = mode;
5897 lshift_double (1, 0, GET_MODE_FBIT (mode),
5898 2 * HOST_BITS_PER_WIDE_INT,
5899 &FCONST1(mode).data.low,
5900 &FCONST1(mode).data.high,
5901 SIGNED_FIXED_POINT_MODE_P (mode));
5902 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5903 FCONST1 (mode), mode);
5906 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5907 mode != VOIDmode;
5908 mode = GET_MODE_WIDER_MODE (mode))
5910 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5913 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5914 mode != VOIDmode;
5915 mode = GET_MODE_WIDER_MODE (mode))
5917 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5920 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5921 mode != VOIDmode;
5922 mode = GET_MODE_WIDER_MODE (mode))
5924 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5925 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5928 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5929 mode != VOIDmode;
5930 mode = GET_MODE_WIDER_MODE (mode))
5932 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5933 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5936 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5937 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5938 const_tiny_rtx[0][i] = const0_rtx;
5940 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5941 if (STORE_FLAG_VALUE == 1)
5942 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5945 /* Produce exact duplicate of insn INSN after AFTER.
5946 Care updating of libcall regions if present. */
5949 emit_copy_of_insn_after (rtx insn, rtx after)
5951 rtx new_rtx, link;
5953 switch (GET_CODE (insn))
5955 case INSN:
5956 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5957 break;
5959 case JUMP_INSN:
5960 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5961 break;
5963 case DEBUG_INSN:
5964 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5965 break;
5967 case CALL_INSN:
5968 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5969 if (CALL_INSN_FUNCTION_USAGE (insn))
5970 CALL_INSN_FUNCTION_USAGE (new_rtx)
5971 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5972 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5973 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5974 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5975 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5976 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5977 break;
5979 default:
5980 gcc_unreachable ();
5983 /* Update LABEL_NUSES. */
5984 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5986 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
5988 /* If the old insn is frame related, then so is the new one. This is
5989 primarily needed for IA-64 unwind info which marks epilogue insns,
5990 which may be duplicated by the basic block reordering code. */
5991 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5993 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5994 will make them. REG_LABEL_TARGETs are created there too, but are
5995 supposed to be sticky, so we copy them. */
5996 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5997 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5999 if (GET_CODE (link) == EXPR_LIST)
6000 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6001 copy_insn_1 (XEXP (link, 0)));
6002 else
6003 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
6006 INSN_CODE (new_rtx) = INSN_CODE (insn);
6007 return new_rtx;
6010 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6012 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6014 if (hard_reg_clobbers[mode][regno])
6015 return hard_reg_clobbers[mode][regno];
6016 else
6017 return (hard_reg_clobbers[mode][regno] =
6018 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6021 #include "gt-emit-rtl.h"