clean up and renames beginigs of a testsuite
[official-gcc.git] / gcc / emit-rtl.c
blob267d63462f0a112a9f38e443f88fd5f8d88348fc
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* Middle-to-low level generation of rtx code and insns.
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "diagnostic-core.h"
42 #include "toplev.h"
43 #include "rtl.h"
44 #include "tree.h"
45 #include "tm_p.h"
46 #include "flags.h"
47 #include "function.h"
48 #include "expr.h"
49 #include "regs.h"
50 #include "hard-reg-set.h"
51 #include "hashtab.h"
52 #include "insn-config.h"
53 #include "recog.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
59 #include "tree-pass.h"
60 #include "df.h"
61 #include "params.h"
62 #include "target.h"
64 struct target_rtl default_target_rtl;
65 #if SWITCHABLE_TARGET
66 struct target_rtl *this_target_rtl = &default_target_rtl;
67 #endif
69 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
71 /* Commonly used modes. */
73 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
74 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
75 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
76 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
78 /* Datastructures maintained for currently processed function in RTL form. */
80 struct rtl_data x_rtl;
82 /* Indexed by pseudo register number, gives the rtx for that pseudo.
83 Allocated in parallel with regno_pointer_align.
84 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
85 with length attribute nested in top level structures. */
87 rtx * regno_reg_rtx;
89 /* This is *not* reset after each function. It gives each CODE_LABEL
90 in the entire compilation a unique label number. */
92 static GTY(()) int label_num = 1;
94 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
95 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
96 record a copy of const[012]_rtx. */
98 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
100 rtx const_true_rtx;
102 REAL_VALUE_TYPE dconst0;
103 REAL_VALUE_TYPE dconst1;
104 REAL_VALUE_TYPE dconst2;
105 REAL_VALUE_TYPE dconstm1;
106 REAL_VALUE_TYPE dconsthalf;
108 /* Record fixed-point constant 0 and 1. */
109 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
110 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
112 /* We make one copy of (const_int C) where C is in
113 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
114 to save space during the compilation and simplify comparisons of
115 integers. */
117 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
119 /* A hash table storing CONST_INTs whose absolute value is greater
120 than MAX_SAVED_CONST_INT. */
122 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
123 htab_t const_int_htab;
125 /* A hash table storing memory attribute structures. */
126 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
127 htab_t mem_attrs_htab;
129 /* A hash table storing register attribute structures. */
130 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
131 htab_t reg_attrs_htab;
133 /* A hash table storing all CONST_DOUBLEs. */
134 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
135 htab_t const_double_htab;
137 /* A hash table storing all CONST_FIXEDs. */
138 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
139 htab_t const_fixed_htab;
141 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
142 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
143 #define last_location (crtl->emit.x_last_location)
144 #define first_label_num (crtl->emit.x_first_label_num)
146 static rtx make_call_insn_raw (rtx);
147 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
148 static void set_used_decls (tree);
149 static void mark_label_nuses (rtx);
150 static hashval_t const_int_htab_hash (const void *);
151 static int const_int_htab_eq (const void *, const void *);
152 static hashval_t const_double_htab_hash (const void *);
153 static int const_double_htab_eq (const void *, const void *);
154 static rtx lookup_const_double (rtx);
155 static hashval_t const_fixed_htab_hash (const void *);
156 static int const_fixed_htab_eq (const void *, const void *);
157 static rtx lookup_const_fixed (rtx);
158 static hashval_t mem_attrs_htab_hash (const void *);
159 static int mem_attrs_htab_eq (const void *, const void *);
160 static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
161 addr_space_t, enum machine_mode);
162 static hashval_t reg_attrs_htab_hash (const void *);
163 static int reg_attrs_htab_eq (const void *, const void *);
164 static reg_attrs *get_reg_attrs (tree, int);
165 static rtx gen_const_vector (enum machine_mode, int);
166 static void copy_rtx_if_shared_1 (rtx *orig);
168 /* Probability of the conditional branch currently proceeded by try_split.
169 Set to -1 otherwise. */
170 int split_branch_probability = -1;
172 /* Returns a hash code for X (which is a really a CONST_INT). */
174 static hashval_t
175 const_int_htab_hash (const void *x)
177 return (hashval_t) INTVAL ((const_rtx) x);
180 /* Returns nonzero if the value represented by X (which is really a
181 CONST_INT) is the same as that given by Y (which is really a
182 HOST_WIDE_INT *). */
184 static int
185 const_int_htab_eq (const void *x, const void *y)
187 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
190 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
191 static hashval_t
192 const_double_htab_hash (const void *x)
194 const_rtx const value = (const_rtx) x;
195 hashval_t h;
197 if (GET_MODE (value) == VOIDmode)
198 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
199 else
201 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
202 /* MODE is used in the comparison, so it should be in the hash. */
203 h ^= GET_MODE (value);
205 return h;
208 /* Returns nonzero if the value represented by X (really a ...)
209 is the same as that represented by Y (really a ...) */
210 static int
211 const_double_htab_eq (const void *x, const void *y)
213 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
215 if (GET_MODE (a) != GET_MODE (b))
216 return 0;
217 if (GET_MODE (a) == VOIDmode)
218 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
219 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
220 else
221 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
222 CONST_DOUBLE_REAL_VALUE (b));
225 /* Returns a hash code for X (which is really a CONST_FIXED). */
227 static hashval_t
228 const_fixed_htab_hash (const void *x)
230 const_rtx const value = (const_rtx) x;
231 hashval_t h;
233 h = fixed_hash (CONST_FIXED_VALUE (value));
234 /* MODE is used in the comparison, so it should be in the hash. */
235 h ^= GET_MODE (value);
236 return h;
239 /* Returns nonzero if the value represented by X (really a ...)
240 is the same as that represented by Y (really a ...). */
242 static int
243 const_fixed_htab_eq (const void *x, const void *y)
245 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
247 if (GET_MODE (a) != GET_MODE (b))
248 return 0;
249 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
252 /* Returns a hash code for X (which is a really a mem_attrs *). */
254 static hashval_t
255 mem_attrs_htab_hash (const void *x)
257 const mem_attrs *const p = (const mem_attrs *) x;
259 return (p->alias ^ (p->align * 1000)
260 ^ (p->addrspace * 4000)
261 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
262 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
263 ^ (size_t) iterative_hash_expr (p->expr, 0));
266 /* Returns nonzero if the value represented by X (which is really a
267 mem_attrs *) is the same as that given by Y (which is also really a
268 mem_attrs *). */
270 static int
271 mem_attrs_htab_eq (const void *x, const void *y)
273 const mem_attrs *const p = (const mem_attrs *) x;
274 const mem_attrs *const q = (const mem_attrs *) y;
276 return (p->alias == q->alias && p->offset == q->offset
277 && p->size == q->size && p->align == q->align
278 && p->addrspace == q->addrspace
279 && (p->expr == q->expr
280 || (p->expr != NULL_TREE && q->expr != NULL_TREE
281 && operand_equal_p (p->expr, q->expr, 0))));
284 /* Allocate a new mem_attrs structure and insert it into the hash table if
285 one identical to it is not already in the table. We are doing this for
286 MEM of mode MODE. */
288 static mem_attrs *
289 get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
290 unsigned int align, addr_space_t addrspace, enum machine_mode mode)
292 mem_attrs attrs;
293 void **slot;
295 /* If everything is the default, we can just return zero.
296 This must match what the corresponding MEM_* macros return when the
297 field is not present. */
298 if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
299 && (size == 0
300 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
301 && (STRICT_ALIGNMENT && mode != BLKmode
302 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
303 return 0;
305 attrs.alias = alias;
306 attrs.expr = expr;
307 attrs.offset = offset;
308 attrs.size = size;
309 attrs.align = align;
310 attrs.addrspace = addrspace;
312 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
313 if (*slot == 0)
315 *slot = ggc_alloc_mem_attrs ();
316 memcpy (*slot, &attrs, sizeof (mem_attrs));
319 return (mem_attrs *) *slot;
322 /* Returns a hash code for X (which is a really a reg_attrs *). */
324 static hashval_t
325 reg_attrs_htab_hash (const void *x)
327 const reg_attrs *const p = (const reg_attrs *) x;
329 return ((p->offset * 1000) ^ (long) p->decl);
332 /* Returns nonzero if the value represented by X (which is really a
333 reg_attrs *) is the same as that given by Y (which is also really a
334 reg_attrs *). */
336 static int
337 reg_attrs_htab_eq (const void *x, const void *y)
339 const reg_attrs *const p = (const reg_attrs *) x;
340 const reg_attrs *const q = (const reg_attrs *) y;
342 return (p->decl == q->decl && p->offset == q->offset);
344 /* Allocate a new reg_attrs structure and insert it into the hash table if
345 one identical to it is not already in the table. We are doing this for
346 MEM of mode MODE. */
348 static reg_attrs *
349 get_reg_attrs (tree decl, int offset)
351 reg_attrs attrs;
352 void **slot;
354 /* If everything is the default, we can just return zero. */
355 if (decl == 0 && offset == 0)
356 return 0;
358 attrs.decl = decl;
359 attrs.offset = offset;
361 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
362 if (*slot == 0)
364 *slot = ggc_alloc_reg_attrs ();
365 memcpy (*slot, &attrs, sizeof (reg_attrs));
368 return (reg_attrs *) *slot;
372 #if !HAVE_blockage
373 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
374 across this insn. */
377 gen_blockage (void)
379 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
380 MEM_VOLATILE_P (x) = true;
381 return x;
383 #endif
386 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
387 don't attempt to share with the various global pieces of rtl (such as
388 frame_pointer_rtx). */
391 gen_raw_REG (enum machine_mode mode, int regno)
393 rtx x = gen_rtx_raw_REG (mode, regno);
394 ORIGINAL_REGNO (x) = regno;
395 return x;
398 /* There are some RTL codes that require special attention; the generation
399 functions do the raw handling. If you add to this list, modify
400 special_rtx in gengenrtl.c as well. */
403 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
405 void **slot;
407 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
408 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
410 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
411 if (const_true_rtx && arg == STORE_FLAG_VALUE)
412 return const_true_rtx;
413 #endif
415 /* Look up the CONST_INT in the hash table. */
416 slot = htab_find_slot_with_hash (const_int_htab, &arg,
417 (hashval_t) arg, INSERT);
418 if (*slot == 0)
419 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
421 return (rtx) *slot;
425 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
427 return GEN_INT (trunc_int_for_mode (c, mode));
430 /* CONST_DOUBLEs might be created from pairs of integers, or from
431 REAL_VALUE_TYPEs. Also, their length is known only at run time,
432 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
434 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
435 hash table. If so, return its counterpart; otherwise add it
436 to the hash table and return it. */
437 static rtx
438 lookup_const_double (rtx real)
440 void **slot = htab_find_slot (const_double_htab, real, INSERT);
441 if (*slot == 0)
442 *slot = real;
444 return (rtx) *slot;
447 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
448 VALUE in mode MODE. */
450 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
452 rtx real = rtx_alloc (CONST_DOUBLE);
453 PUT_MODE (real, mode);
455 real->u.rv = value;
457 return lookup_const_double (real);
460 /* Determine whether FIXED, a CONST_FIXED, already exists in the
461 hash table. If so, return its counterpart; otherwise add it
462 to the hash table and return it. */
464 static rtx
465 lookup_const_fixed (rtx fixed)
467 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
468 if (*slot == 0)
469 *slot = fixed;
471 return (rtx) *slot;
474 /* Return a CONST_FIXED rtx for a fixed-point value specified by
475 VALUE in mode MODE. */
478 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
480 rtx fixed = rtx_alloc (CONST_FIXED);
481 PUT_MODE (fixed, mode);
483 fixed->u.fv = value;
485 return lookup_const_fixed (fixed);
488 /* Constructs double_int from rtx CST. */
490 double_int
491 rtx_to_double_int (const_rtx cst)
493 double_int r;
495 if (CONST_INT_P (cst))
496 r = shwi_to_double_int (INTVAL (cst));
497 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
499 r.low = CONST_DOUBLE_LOW (cst);
500 r.high = CONST_DOUBLE_HIGH (cst);
502 else
503 gcc_unreachable ();
505 return r;
509 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
510 a double_int. */
513 immed_double_int_const (double_int i, enum machine_mode mode)
515 return immed_double_const (i.low, i.high, mode);
518 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
519 of ints: I0 is the low-order word and I1 is the high-order word.
520 Do not use this routine for non-integer modes; convert to
521 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
524 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
526 rtx value;
527 unsigned int i;
529 /* There are the following cases (note that there are no modes with
530 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
532 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
533 gen_int_mode.
534 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
535 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
536 from copies of the sign bit, and sign of i0 and i1 are the same), then
537 we return a CONST_INT for i0.
538 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
539 if (mode != VOIDmode)
541 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
542 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
543 /* We can get a 0 for an error mark. */
544 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
545 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
547 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
548 return gen_int_mode (i0, mode);
550 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
553 /* If this integer fits in one word, return a CONST_INT. */
554 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
555 return GEN_INT (i0);
557 /* We use VOIDmode for integers. */
558 value = rtx_alloc (CONST_DOUBLE);
559 PUT_MODE (value, VOIDmode);
561 CONST_DOUBLE_LOW (value) = i0;
562 CONST_DOUBLE_HIGH (value) = i1;
564 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
565 XWINT (value, i) = 0;
567 return lookup_const_double (value);
571 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
573 /* In case the MD file explicitly references the frame pointer, have
574 all such references point to the same frame pointer. This is
575 used during frame pointer elimination to distinguish the explicit
576 references to these registers from pseudos that happened to be
577 assigned to them.
579 If we have eliminated the frame pointer or arg pointer, we will
580 be using it as a normal register, for example as a spill
581 register. In such cases, we might be accessing it in a mode that
582 is not Pmode and therefore cannot use the pre-allocated rtx.
584 Also don't do this when we are making new REGs in reload, since
585 we don't want to get confused with the real pointers. */
587 if (mode == Pmode && !reload_in_progress)
589 if (regno == FRAME_POINTER_REGNUM
590 && (!reload_completed || frame_pointer_needed))
591 return frame_pointer_rtx;
592 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
593 if (regno == HARD_FRAME_POINTER_REGNUM
594 && (!reload_completed || frame_pointer_needed))
595 return hard_frame_pointer_rtx;
596 #endif
597 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
598 if (regno == ARG_POINTER_REGNUM)
599 return arg_pointer_rtx;
600 #endif
601 #ifdef RETURN_ADDRESS_POINTER_REGNUM
602 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
603 return return_address_pointer_rtx;
604 #endif
605 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
606 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
607 return pic_offset_table_rtx;
608 if (regno == STACK_POINTER_REGNUM)
609 return stack_pointer_rtx;
612 #if 0
613 /* If the per-function register table has been set up, try to re-use
614 an existing entry in that table to avoid useless generation of RTL.
616 This code is disabled for now until we can fix the various backends
617 which depend on having non-shared hard registers in some cases. Long
618 term we want to re-enable this code as it can significantly cut down
619 on the amount of useless RTL that gets generated.
621 We'll also need to fix some code that runs after reload that wants to
622 set ORIGINAL_REGNO. */
624 if (cfun
625 && cfun->emit
626 && regno_reg_rtx
627 && regno < FIRST_PSEUDO_REGISTER
628 && reg_raw_mode[regno] == mode)
629 return regno_reg_rtx[regno];
630 #endif
632 return gen_raw_REG (mode, regno);
636 gen_rtx_MEM (enum machine_mode mode, rtx addr)
638 rtx rt = gen_rtx_raw_MEM (mode, addr);
640 /* This field is not cleared by the mere allocation of the rtx, so
641 we clear it here. */
642 MEM_ATTRS (rt) = 0;
644 return rt;
647 /* Generate a memory referring to non-trapping constant memory. */
650 gen_const_mem (enum machine_mode mode, rtx addr)
652 rtx mem = gen_rtx_MEM (mode, addr);
653 MEM_READONLY_P (mem) = 1;
654 MEM_NOTRAP_P (mem) = 1;
655 return mem;
658 /* Generate a MEM referring to fixed portions of the frame, e.g., register
659 save areas. */
662 gen_frame_mem (enum machine_mode mode, rtx addr)
664 rtx mem = gen_rtx_MEM (mode, addr);
665 MEM_NOTRAP_P (mem) = 1;
666 set_mem_alias_set (mem, get_frame_alias_set ());
667 return mem;
670 /* Generate a MEM referring to a temporary use of the stack, not part
671 of the fixed stack frame. For example, something which is pushed
672 by a target splitter. */
674 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
676 rtx mem = gen_rtx_MEM (mode, addr);
677 MEM_NOTRAP_P (mem) = 1;
678 if (!cfun->calls_alloca)
679 set_mem_alias_set (mem, get_frame_alias_set ());
680 return mem;
683 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
684 this construct would be valid, and false otherwise. */
686 bool
687 validate_subreg (enum machine_mode omode, enum machine_mode imode,
688 const_rtx reg, unsigned int offset)
690 unsigned int isize = GET_MODE_SIZE (imode);
691 unsigned int osize = GET_MODE_SIZE (omode);
693 /* All subregs must be aligned. */
694 if (offset % osize != 0)
695 return false;
697 /* The subreg offset cannot be outside the inner object. */
698 if (offset >= isize)
699 return false;
701 /* ??? This should not be here. Temporarily continue to allow word_mode
702 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
703 Generally, backends are doing something sketchy but it'll take time to
704 fix them all. */
705 if (omode == word_mode)
707 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
708 is the culprit here, and not the backends. */
709 else if (osize >= UNITS_PER_WORD && isize >= osize)
711 /* Allow component subregs of complex and vector. Though given the below
712 extraction rules, it's not always clear what that means. */
713 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
714 && GET_MODE_INNER (imode) == omode)
716 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
717 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
718 represent this. It's questionable if this ought to be represented at
719 all -- why can't this all be hidden in post-reload splitters that make
720 arbitrarily mode changes to the registers themselves. */
721 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
723 /* Subregs involving floating point modes are not allowed to
724 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
725 (subreg:SI (reg:DF) 0) isn't. */
726 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
728 if (isize != osize)
729 return false;
732 /* Paradoxical subregs must have offset zero. */
733 if (osize > isize)
734 return offset == 0;
736 /* This is a normal subreg. Verify that the offset is representable. */
738 /* For hard registers, we already have most of these rules collected in
739 subreg_offset_representable_p. */
740 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
742 unsigned int regno = REGNO (reg);
744 #ifdef CANNOT_CHANGE_MODE_CLASS
745 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
746 && GET_MODE_INNER (imode) == omode)
748 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
749 return false;
750 #endif
752 return subreg_offset_representable_p (regno, imode, offset, omode);
755 /* For pseudo registers, we want most of the same checks. Namely:
756 If the register no larger than a word, the subreg must be lowpart.
757 If the register is larger than a word, the subreg must be the lowpart
758 of a subword. A subreg does *not* perform arbitrary bit extraction.
759 Given that we've already checked mode/offset alignment, we only have
760 to check subword subregs here. */
761 if (osize < UNITS_PER_WORD)
763 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
764 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
765 if (offset % UNITS_PER_WORD != low_off)
766 return false;
768 return true;
772 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
774 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
775 return gen_rtx_raw_SUBREG (mode, reg, offset);
778 /* Generate a SUBREG representing the least-significant part of REG if MODE
779 is smaller than mode of REG, otherwise paradoxical SUBREG. */
782 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
784 enum machine_mode inmode;
786 inmode = GET_MODE (reg);
787 if (inmode == VOIDmode)
788 inmode = mode;
789 return gen_rtx_SUBREG (mode, reg,
790 subreg_lowpart_offset (mode, inmode));
794 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
796 rtvec
797 gen_rtvec (int n, ...)
799 int i;
800 rtvec rt_val;
801 va_list p;
803 va_start (p, n);
805 /* Don't allocate an empty rtvec... */
806 if (n == 0)
807 return NULL_RTVEC;
809 rt_val = rtvec_alloc (n);
811 for (i = 0; i < n; i++)
812 rt_val->elem[i] = va_arg (p, rtx);
814 va_end (p);
815 return rt_val;
818 rtvec
819 gen_rtvec_v (int n, rtx *argp)
821 int i;
822 rtvec rt_val;
824 /* Don't allocate an empty rtvec... */
825 if (n == 0)
826 return NULL_RTVEC;
828 rt_val = rtvec_alloc (n);
830 for (i = 0; i < n; i++)
831 rt_val->elem[i] = *argp++;
833 return rt_val;
836 /* Return the number of bytes between the start of an OUTER_MODE
837 in-memory value and the start of an INNER_MODE in-memory value,
838 given that the former is a lowpart of the latter. It may be a
839 paradoxical lowpart, in which case the offset will be negative
840 on big-endian targets. */
843 byte_lowpart_offset (enum machine_mode outer_mode,
844 enum machine_mode inner_mode)
846 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
847 return subreg_lowpart_offset (outer_mode, inner_mode);
848 else
849 return -subreg_lowpart_offset (inner_mode, outer_mode);
852 /* Generate a REG rtx for a new pseudo register of mode MODE.
853 This pseudo is assigned the next sequential register number. */
856 gen_reg_rtx (enum machine_mode mode)
858 rtx val;
859 unsigned int align = GET_MODE_ALIGNMENT (mode);
861 gcc_assert (can_create_pseudo_p ());
863 /* If a virtual register with bigger mode alignment is generated,
864 increase stack alignment estimation because it might be spilled
865 to stack later. */
866 if (SUPPORTS_STACK_ALIGNMENT
867 && crtl->stack_alignment_estimated < align
868 && !crtl->stack_realign_processed)
870 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
871 if (crtl->stack_alignment_estimated < min_align)
872 crtl->stack_alignment_estimated = min_align;
875 if (generating_concat_p
876 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
877 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
879 /* For complex modes, don't make a single pseudo.
880 Instead, make a CONCAT of two pseudos.
881 This allows noncontiguous allocation of the real and imaginary parts,
882 which makes much better code. Besides, allocating DCmode
883 pseudos overstrains reload on some machines like the 386. */
884 rtx realpart, imagpart;
885 enum machine_mode partmode = GET_MODE_INNER (mode);
887 realpart = gen_reg_rtx (partmode);
888 imagpart = gen_reg_rtx (partmode);
889 return gen_rtx_CONCAT (mode, realpart, imagpart);
892 /* Make sure regno_pointer_align, and regno_reg_rtx are large
893 enough to have an element for this pseudo reg number. */
895 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
897 int old_size = crtl->emit.regno_pointer_align_length;
898 char *tmp;
899 rtx *new1;
901 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
902 memset (tmp + old_size, 0, old_size);
903 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
905 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
906 memset (new1 + old_size, 0, old_size * sizeof (rtx));
907 regno_reg_rtx = new1;
909 crtl->emit.regno_pointer_align_length = old_size * 2;
912 val = gen_raw_REG (mode, reg_rtx_no);
913 regno_reg_rtx[reg_rtx_no++] = val;
914 return val;
917 /* Update NEW with the same attributes as REG, but with OFFSET added
918 to the REG_OFFSET. */
920 static void
921 update_reg_offset (rtx new_rtx, rtx reg, int offset)
923 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
924 REG_OFFSET (reg) + offset);
927 /* Generate a register with same attributes as REG, but with OFFSET
928 added to the REG_OFFSET. */
931 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
932 int offset)
934 rtx new_rtx = gen_rtx_REG (mode, regno);
936 update_reg_offset (new_rtx, reg, offset);
937 return new_rtx;
940 /* Generate a new pseudo-register with the same attributes as REG, but
941 with OFFSET added to the REG_OFFSET. */
944 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
946 rtx new_rtx = gen_reg_rtx (mode);
948 update_reg_offset (new_rtx, reg, offset);
949 return new_rtx;
952 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
953 new register is a (possibly paradoxical) lowpart of the old one. */
955 void
956 adjust_reg_mode (rtx reg, enum machine_mode mode)
958 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
959 PUT_MODE (reg, mode);
962 /* Copy REG's attributes from X, if X has any attributes. If REG and X
963 have different modes, REG is a (possibly paradoxical) lowpart of X. */
965 void
966 set_reg_attrs_from_value (rtx reg, rtx x)
968 int offset;
970 /* Hard registers can be reused for multiple purposes within the same
971 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
972 on them is wrong. */
973 if (HARD_REGISTER_P (reg))
974 return;
976 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
977 if (MEM_P (x))
979 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
980 REG_ATTRS (reg)
981 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
982 if (MEM_POINTER (x))
983 mark_reg_pointer (reg, 0);
985 else if (REG_P (x))
987 if (REG_ATTRS (x))
988 update_reg_offset (reg, x, offset);
989 if (REG_POINTER (x))
990 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
994 /* Generate a REG rtx for a new pseudo register, copying the mode
995 and attributes from X. */
998 gen_reg_rtx_and_attrs (rtx x)
1000 rtx reg = gen_reg_rtx (GET_MODE (x));
1001 set_reg_attrs_from_value (reg, x);
1002 return reg;
1005 /* Set the register attributes for registers contained in PARM_RTX.
1006 Use needed values from memory attributes of MEM. */
1008 void
1009 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1011 if (REG_P (parm_rtx))
1012 set_reg_attrs_from_value (parm_rtx, mem);
1013 else if (GET_CODE (parm_rtx) == PARALLEL)
1015 /* Check for a NULL entry in the first slot, used to indicate that the
1016 parameter goes both on the stack and in registers. */
1017 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1018 for (; i < XVECLEN (parm_rtx, 0); i++)
1020 rtx x = XVECEXP (parm_rtx, 0, i);
1021 if (REG_P (XEXP (x, 0)))
1022 REG_ATTRS (XEXP (x, 0))
1023 = get_reg_attrs (MEM_EXPR (mem),
1024 INTVAL (XEXP (x, 1)));
1029 /* Set the REG_ATTRS for registers in value X, given that X represents
1030 decl T. */
1032 void
1033 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1035 if (GET_CODE (x) == SUBREG)
1037 gcc_assert (subreg_lowpart_p (x));
1038 x = SUBREG_REG (x);
1040 if (REG_P (x))
1041 REG_ATTRS (x)
1042 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1043 DECL_MODE (t)));
1044 if (GET_CODE (x) == CONCAT)
1046 if (REG_P (XEXP (x, 0)))
1047 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1048 if (REG_P (XEXP (x, 1)))
1049 REG_ATTRS (XEXP (x, 1))
1050 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1052 if (GET_CODE (x) == PARALLEL)
1054 int i, start;
1056 /* Check for a NULL entry, used to indicate that the parameter goes
1057 both on the stack and in registers. */
1058 if (XEXP (XVECEXP (x, 0, 0), 0))
1059 start = 0;
1060 else
1061 start = 1;
1063 for (i = start; i < XVECLEN (x, 0); i++)
1065 rtx y = XVECEXP (x, 0, i);
1066 if (REG_P (XEXP (y, 0)))
1067 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1072 /* Assign the RTX X to declaration T. */
1074 void
1075 set_decl_rtl (tree t, rtx x)
1077 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1078 if (x)
1079 set_reg_attrs_for_decl_rtl (t, x);
1082 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1083 if the ABI requires the parameter to be passed by reference. */
1085 void
1086 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1088 DECL_INCOMING_RTL (t) = x;
1089 if (x && !by_reference_p)
1090 set_reg_attrs_for_decl_rtl (t, x);
1093 /* Identify REG (which may be a CONCAT) as a user register. */
1095 void
1096 mark_user_reg (rtx reg)
1098 if (GET_CODE (reg) == CONCAT)
1100 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1101 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1103 else
1105 gcc_assert (REG_P (reg));
1106 REG_USERVAR_P (reg) = 1;
1110 /* Identify REG as a probable pointer register and show its alignment
1111 as ALIGN, if nonzero. */
1113 void
1114 mark_reg_pointer (rtx reg, int align)
1116 if (! REG_POINTER (reg))
1118 REG_POINTER (reg) = 1;
1120 if (align)
1121 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1123 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1124 /* We can no-longer be sure just how aligned this pointer is. */
1125 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1128 /* Return 1 plus largest pseudo reg number used in the current function. */
1131 max_reg_num (void)
1133 return reg_rtx_no;
1136 /* Return 1 + the largest label number used so far in the current function. */
1139 max_label_num (void)
1141 return label_num;
1144 /* Return first label number used in this function (if any were used). */
1147 get_first_label_num (void)
1149 return first_label_num;
1152 /* If the rtx for label was created during the expansion of a nested
1153 function, then first_label_num won't include this label number.
1154 Fix this now so that array indices work later. */
1156 void
1157 maybe_set_first_label_num (rtx x)
1159 if (CODE_LABEL_NUMBER (x) < first_label_num)
1160 first_label_num = CODE_LABEL_NUMBER (x);
1163 /* Return a value representing some low-order bits of X, where the number
1164 of low-order bits is given by MODE. Note that no conversion is done
1165 between floating-point and fixed-point values, rather, the bit
1166 representation is returned.
1168 This function handles the cases in common between gen_lowpart, below,
1169 and two variants in cse.c and combine.c. These are the cases that can
1170 be safely handled at all points in the compilation.
1172 If this is not a case we can handle, return 0. */
1175 gen_lowpart_common (enum machine_mode mode, rtx x)
1177 int msize = GET_MODE_SIZE (mode);
1178 int xsize;
1179 int offset = 0;
1180 enum machine_mode innermode;
1182 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1183 so we have to make one up. Yuk. */
1184 innermode = GET_MODE (x);
1185 if (CONST_INT_P (x)
1186 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1187 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1188 else if (innermode == VOIDmode)
1189 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1191 xsize = GET_MODE_SIZE (innermode);
1193 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1195 if (innermode == mode)
1196 return x;
1198 /* MODE must occupy no more words than the mode of X. */
1199 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1200 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1201 return 0;
1203 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1204 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1205 return 0;
1207 offset = subreg_lowpart_offset (mode, innermode);
1209 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1210 && (GET_MODE_CLASS (mode) == MODE_INT
1211 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1213 /* If we are getting the low-order part of something that has been
1214 sign- or zero-extended, we can either just use the object being
1215 extended or make a narrower extension. If we want an even smaller
1216 piece than the size of the object being extended, call ourselves
1217 recursively.
1219 This case is used mostly by combine and cse. */
1221 if (GET_MODE (XEXP (x, 0)) == mode)
1222 return XEXP (x, 0);
1223 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1224 return gen_lowpart_common (mode, XEXP (x, 0));
1225 else if (msize < xsize)
1226 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1228 else if (GET_CODE (x) == SUBREG || REG_P (x)
1229 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1230 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1231 return simplify_gen_subreg (mode, x, innermode, offset);
1233 /* Otherwise, we can't do this. */
1234 return 0;
1238 gen_highpart (enum machine_mode mode, rtx x)
1240 unsigned int msize = GET_MODE_SIZE (mode);
1241 rtx result;
1243 /* This case loses if X is a subreg. To catch bugs early,
1244 complain if an invalid MODE is used even in other cases. */
1245 gcc_assert (msize <= UNITS_PER_WORD
1246 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1248 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1249 subreg_highpart_offset (mode, GET_MODE (x)));
1250 gcc_assert (result);
1252 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1253 the target if we have a MEM. gen_highpart must return a valid operand,
1254 emitting code if necessary to do so. */
1255 if (MEM_P (result))
1257 result = validize_mem (result);
1258 gcc_assert (result);
1261 return result;
1264 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1265 be VOIDmode constant. */
1267 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1269 if (GET_MODE (exp) != VOIDmode)
1271 gcc_assert (GET_MODE (exp) == innermode);
1272 return gen_highpart (outermode, exp);
1274 return simplify_gen_subreg (outermode, exp, innermode,
1275 subreg_highpart_offset (outermode, innermode));
1278 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1280 unsigned int
1281 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1283 unsigned int offset = 0;
1284 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1286 if (difference > 0)
1288 if (WORDS_BIG_ENDIAN)
1289 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1290 if (BYTES_BIG_ENDIAN)
1291 offset += difference % UNITS_PER_WORD;
1294 return offset;
1297 /* Return offset in bytes to get OUTERMODE high part
1298 of the value in mode INNERMODE stored in memory in target format. */
1299 unsigned int
1300 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1302 unsigned int offset = 0;
1303 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1305 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1307 if (difference > 0)
1309 if (! WORDS_BIG_ENDIAN)
1310 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1311 if (! BYTES_BIG_ENDIAN)
1312 offset += difference % UNITS_PER_WORD;
1315 return offset;
1318 /* Return 1 iff X, assumed to be a SUBREG,
1319 refers to the least significant part of its containing reg.
1320 If X is not a SUBREG, always return 1 (it is its own low part!). */
1323 subreg_lowpart_p (const_rtx x)
1325 if (GET_CODE (x) != SUBREG)
1326 return 1;
1327 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1328 return 0;
1330 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1331 == SUBREG_BYTE (x));
1334 /* Return subword OFFSET of operand OP.
1335 The word number, OFFSET, is interpreted as the word number starting
1336 at the low-order address. OFFSET 0 is the low-order word if not
1337 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1339 If we cannot extract the required word, we return zero. Otherwise,
1340 an rtx corresponding to the requested word will be returned.
1342 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1343 reload has completed, a valid address will always be returned. After
1344 reload, if a valid address cannot be returned, we return zero.
1346 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1347 it is the responsibility of the caller.
1349 MODE is the mode of OP in case it is a CONST_INT.
1351 ??? This is still rather broken for some cases. The problem for the
1352 moment is that all callers of this thing provide no 'goal mode' to
1353 tell us to work with. This exists because all callers were written
1354 in a word based SUBREG world.
1355 Now use of this function can be deprecated by simplify_subreg in most
1356 cases.
1360 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1362 if (mode == VOIDmode)
1363 mode = GET_MODE (op);
1365 gcc_assert (mode != VOIDmode);
1367 /* If OP is narrower than a word, fail. */
1368 if (mode != BLKmode
1369 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1370 return 0;
1372 /* If we want a word outside OP, return zero. */
1373 if (mode != BLKmode
1374 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1375 return const0_rtx;
1377 /* Form a new MEM at the requested address. */
1378 if (MEM_P (op))
1380 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1382 if (! validate_address)
1383 return new_rtx;
1385 else if (reload_completed)
1387 if (! strict_memory_address_addr_space_p (word_mode,
1388 XEXP (new_rtx, 0),
1389 MEM_ADDR_SPACE (op)))
1390 return 0;
1392 else
1393 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1396 /* Rest can be handled by simplify_subreg. */
1397 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1400 /* Similar to `operand_subword', but never return 0. If we can't
1401 extract the required subword, put OP into a register and try again.
1402 The second attempt must succeed. We always validate the address in
1403 this case.
1405 MODE is the mode of OP, in case it is CONST_INT. */
1408 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1410 rtx result = operand_subword (op, offset, 1, mode);
1412 if (result)
1413 return result;
1415 if (mode != BLKmode && mode != VOIDmode)
1417 /* If this is a register which can not be accessed by words, copy it
1418 to a pseudo register. */
1419 if (REG_P (op))
1420 op = copy_to_reg (op);
1421 else
1422 op = force_reg (mode, op);
1425 result = operand_subword (op, offset, 1, mode);
1426 gcc_assert (result);
1428 return result;
1431 /* Returns 1 if both MEM_EXPR can be considered equal
1432 and 0 otherwise. */
1435 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1437 if (expr1 == expr2)
1438 return 1;
1440 if (! expr1 || ! expr2)
1441 return 0;
1443 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1444 return 0;
1446 return operand_equal_p (expr1, expr2, 0);
1449 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1450 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1451 -1 if not known. */
1454 get_mem_align_offset (rtx mem, unsigned int align)
1456 tree expr;
1457 unsigned HOST_WIDE_INT offset;
1459 /* This function can't use
1460 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1461 || !CONST_INT_P (MEM_OFFSET (mem))
1462 || (MAX (MEM_ALIGN (mem),
1463 get_object_alignment (MEM_EXPR (mem), align))
1464 < align))
1465 return -1;
1466 else
1467 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1468 for two reasons:
1469 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1470 for <variable>. get_inner_reference doesn't handle it and
1471 even if it did, the alignment in that case needs to be determined
1472 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1473 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1474 isn't sufficiently aligned, the object it is in might be. */
1475 gcc_assert (MEM_P (mem));
1476 expr = MEM_EXPR (mem);
1477 if (expr == NULL_TREE
1478 || MEM_OFFSET (mem) == NULL_RTX
1479 || !CONST_INT_P (MEM_OFFSET (mem)))
1480 return -1;
1482 offset = INTVAL (MEM_OFFSET (mem));
1483 if (DECL_P (expr))
1485 if (DECL_ALIGN (expr) < align)
1486 return -1;
1488 else if (INDIRECT_REF_P (expr))
1490 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1491 return -1;
1493 else if (TREE_CODE (expr) == COMPONENT_REF)
1495 while (1)
1497 tree inner = TREE_OPERAND (expr, 0);
1498 tree field = TREE_OPERAND (expr, 1);
1499 tree byte_offset = component_ref_field_offset (expr);
1500 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1502 if (!byte_offset
1503 || !host_integerp (byte_offset, 1)
1504 || !host_integerp (bit_offset, 1))
1505 return -1;
1507 offset += tree_low_cst (byte_offset, 1);
1508 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1510 if (inner == NULL_TREE)
1512 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1513 < (unsigned int) align)
1514 return -1;
1515 break;
1517 else if (DECL_P (inner))
1519 if (DECL_ALIGN (inner) < align)
1520 return -1;
1521 break;
1523 else if (TREE_CODE (inner) != COMPONENT_REF)
1524 return -1;
1525 expr = inner;
1528 else
1529 return -1;
1531 return offset & ((align / BITS_PER_UNIT) - 1);
1534 /* Given REF (a MEM) and T, either the type of X or the expression
1535 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1536 if we are making a new object of this type. BITPOS is nonzero if
1537 there is an offset outstanding on T that will be applied later. */
1539 void
1540 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1541 HOST_WIDE_INT bitpos)
1543 alias_set_type alias = MEM_ALIAS_SET (ref);
1544 tree expr = MEM_EXPR (ref);
1545 rtx offset = MEM_OFFSET (ref);
1546 rtx size = MEM_SIZE (ref);
1547 unsigned int align = MEM_ALIGN (ref);
1548 HOST_WIDE_INT apply_bitpos = 0;
1549 tree type;
1551 /* It can happen that type_for_mode was given a mode for which there
1552 is no language-level type. In which case it returns NULL, which
1553 we can see here. */
1554 if (t == NULL_TREE)
1555 return;
1557 type = TYPE_P (t) ? t : TREE_TYPE (t);
1558 if (type == error_mark_node)
1559 return;
1561 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1562 wrong answer, as it assumes that DECL_RTL already has the right alias
1563 info. Callers should not set DECL_RTL until after the call to
1564 set_mem_attributes. */
1565 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1567 /* Get the alias set from the expression or type (perhaps using a
1568 front-end routine) and use it. */
1569 alias = get_alias_set (t);
1571 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1572 MEM_IN_STRUCT_P (ref)
1573 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1574 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1576 /* If we are making an object of this type, or if this is a DECL, we know
1577 that it is a scalar if the type is not an aggregate. */
1578 if ((objectp || DECL_P (t))
1579 && ! AGGREGATE_TYPE_P (type)
1580 && TREE_CODE (type) != COMPLEX_TYPE)
1581 MEM_SCALAR_P (ref) = 1;
1583 /* We can set the alignment from the type if we are making an object,
1584 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1585 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1586 align = MAX (align, TYPE_ALIGN (type));
1588 else if (TREE_CODE (t) == MEM_REF)
1590 tree op0 = TREE_OPERAND (t, 0);
1591 if (TREE_CODE (op0) == ADDR_EXPR
1592 && (DECL_P (TREE_OPERAND (op0, 0))
1593 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1595 if (DECL_P (TREE_OPERAND (op0, 0)))
1596 align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1597 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1599 align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1600 #ifdef CONSTANT_ALIGNMENT
1601 align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0), align);
1602 #endif
1604 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1606 unsigned HOST_WIDE_INT ioff
1607 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1608 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1609 align = MIN (aoff, align);
1612 else
1613 /* ??? This isn't fully correct, we can't set the alignment from the
1614 type in all cases. */
1615 align = MAX (align, TYPE_ALIGN (type));
1618 else if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1620 if (integer_zerop (TREE_OPERAND (t, 1)))
1621 /* We don't know anything about the alignment. */
1622 align = BITS_PER_UNIT;
1623 else
1624 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1627 /* If the size is known, we can set that. */
1628 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1629 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1631 /* If T is not a type, we may be able to deduce some more information about
1632 the expression. */
1633 if (! TYPE_P (t))
1635 tree base;
1636 bool align_computed = false;
1638 if (TREE_THIS_VOLATILE (t))
1639 MEM_VOLATILE_P (ref) = 1;
1641 /* Now remove any conversions: they don't change what the underlying
1642 object is. Likewise for SAVE_EXPR. */
1643 while (CONVERT_EXPR_P (t)
1644 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1645 || TREE_CODE (t) == SAVE_EXPR)
1646 t = TREE_OPERAND (t, 0);
1648 /* We may look through structure-like accesses for the purposes of
1649 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1650 base = t;
1651 while (TREE_CODE (base) == COMPONENT_REF
1652 || TREE_CODE (base) == REALPART_EXPR
1653 || TREE_CODE (base) == IMAGPART_EXPR
1654 || TREE_CODE (base) == BIT_FIELD_REF)
1655 base = TREE_OPERAND (base, 0);
1657 if (TREE_CODE (base) == MEM_REF
1658 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1659 base = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1660 if (DECL_P (base))
1662 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1663 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1664 else
1665 MEM_NOTRAP_P (ref) = 1;
1667 else
1668 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1670 base = get_base_address (base);
1671 if (base && DECL_P (base)
1672 && TREE_READONLY (base)
1673 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1674 MEM_READONLY_P (ref) = 1;
1676 /* If this expression uses it's parent's alias set, mark it such
1677 that we won't change it. */
1678 if (component_uses_parent_alias_set (t))
1679 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1681 /* If this is a decl, set the attributes of the MEM from it. */
1682 if (DECL_P (t))
1684 expr = t;
1685 offset = const0_rtx;
1686 apply_bitpos = bitpos;
1687 size = (DECL_SIZE_UNIT (t)
1688 && host_integerp (DECL_SIZE_UNIT (t), 1)
1689 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1690 align = DECL_ALIGN (t);
1691 align_computed = true;
1694 /* If this is a constant, we know the alignment. */
1695 else if (CONSTANT_CLASS_P (t))
1697 align = TYPE_ALIGN (type);
1698 #ifdef CONSTANT_ALIGNMENT
1699 align = CONSTANT_ALIGNMENT (t, align);
1700 #endif
1701 align_computed = true;
1704 /* If this is a field reference and not a bit-field, record it. */
1705 /* ??? There is some information that can be gleaned from bit-fields,
1706 such as the word offset in the structure that might be modified.
1707 But skip it for now. */
1708 else if (TREE_CODE (t) == COMPONENT_REF
1709 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1711 expr = t;
1712 offset = const0_rtx;
1713 apply_bitpos = bitpos;
1714 /* ??? Any reason the field size would be different than
1715 the size we got from the type? */
1718 /* If this is an array reference, look for an outer field reference. */
1719 else if (TREE_CODE (t) == ARRAY_REF)
1721 tree off_tree = size_zero_node;
1722 /* We can't modify t, because we use it at the end of the
1723 function. */
1724 tree t2 = t;
1728 tree index = TREE_OPERAND (t2, 1);
1729 tree low_bound = array_ref_low_bound (t2);
1730 tree unit_size = array_ref_element_size (t2);
1732 /* We assume all arrays have sizes that are a multiple of a byte.
1733 First subtract the lower bound, if any, in the type of the
1734 index, then convert to sizetype and multiply by the size of
1735 the array element. */
1736 if (! integer_zerop (low_bound))
1737 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1738 index, low_bound);
1740 off_tree = size_binop (PLUS_EXPR,
1741 size_binop (MULT_EXPR,
1742 fold_convert (sizetype,
1743 index),
1744 unit_size),
1745 off_tree);
1746 t2 = TREE_OPERAND (t2, 0);
1748 while (TREE_CODE (t2) == ARRAY_REF);
1750 if (DECL_P (t2))
1752 expr = t2;
1753 offset = NULL;
1754 if (host_integerp (off_tree, 1))
1756 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1757 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1758 align = DECL_ALIGN (t2);
1759 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1760 align = aoff;
1761 align_computed = true;
1762 offset = GEN_INT (ioff);
1763 apply_bitpos = bitpos;
1766 else if (TREE_CODE (t2) == COMPONENT_REF)
1768 expr = t2;
1769 offset = NULL;
1770 if (host_integerp (off_tree, 1))
1772 offset = GEN_INT (tree_low_cst (off_tree, 1));
1773 apply_bitpos = bitpos;
1775 /* ??? Any reason the field size would be different than
1776 the size we got from the type? */
1779 /* If this is an indirect reference, record it. */
1780 else if (TREE_CODE (t) == MEM_REF
1781 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1783 expr = t;
1784 offset = const0_rtx;
1785 apply_bitpos = bitpos;
1789 /* If this is an indirect reference, record it. */
1790 else if (TREE_CODE (t) == MEM_REF
1791 || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1793 expr = t;
1794 offset = const0_rtx;
1795 apply_bitpos = bitpos;
1798 if (!align_computed && !INDIRECT_REF_P (t))
1800 unsigned int obj_align = get_object_alignment (t, BIGGEST_ALIGNMENT);
1801 align = MAX (align, obj_align);
1805 /* If we modified OFFSET based on T, then subtract the outstanding
1806 bit position offset. Similarly, increase the size of the accessed
1807 object to contain the negative offset. */
1808 if (apply_bitpos)
1810 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1811 if (size)
1812 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1815 /* Now set the attributes we computed above. */
1816 MEM_ATTRS (ref)
1817 = get_mem_attrs (alias, expr, offset, size, align,
1818 TYPE_ADDR_SPACE (type), GET_MODE (ref));
1820 /* If this is already known to be a scalar or aggregate, we are done. */
1821 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1822 return;
1824 /* If it is a reference into an aggregate, this is part of an aggregate.
1825 Otherwise we don't know. */
1826 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1827 || TREE_CODE (t) == ARRAY_RANGE_REF
1828 || TREE_CODE (t) == BIT_FIELD_REF)
1829 MEM_IN_STRUCT_P (ref) = 1;
1832 void
1833 set_mem_attributes (rtx ref, tree t, int objectp)
1835 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1838 /* Set the alias set of MEM to SET. */
1840 void
1841 set_mem_alias_set (rtx mem, alias_set_type set)
1843 #ifdef ENABLE_CHECKING
1844 /* If the new and old alias sets don't conflict, something is wrong. */
1845 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1846 #endif
1848 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1849 MEM_SIZE (mem), MEM_ALIGN (mem),
1850 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1853 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1855 void
1856 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1858 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1859 MEM_OFFSET (mem), MEM_SIZE (mem),
1860 MEM_ALIGN (mem), addrspace, GET_MODE (mem));
1863 /* Set the alignment of MEM to ALIGN bits. */
1865 void
1866 set_mem_align (rtx mem, unsigned int align)
1868 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1869 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1870 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1873 /* Set the expr for MEM to EXPR. */
1875 void
1876 set_mem_expr (rtx mem, tree expr)
1878 MEM_ATTRS (mem)
1879 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1880 MEM_SIZE (mem), MEM_ALIGN (mem),
1881 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1884 /* Set the offset of MEM to OFFSET. */
1886 void
1887 set_mem_offset (rtx mem, rtx offset)
1889 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1890 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1891 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1894 /* Set the size of MEM to SIZE. */
1896 void
1897 set_mem_size (rtx mem, rtx size)
1899 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1900 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1901 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1904 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1905 and its address changed to ADDR. (VOIDmode means don't change the mode.
1906 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1907 returned memory location is required to be valid. The memory
1908 attributes are not changed. */
1910 static rtx
1911 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1913 addr_space_t as;
1914 rtx new_rtx;
1916 gcc_assert (MEM_P (memref));
1917 as = MEM_ADDR_SPACE (memref);
1918 if (mode == VOIDmode)
1919 mode = GET_MODE (memref);
1920 if (addr == 0)
1921 addr = XEXP (memref, 0);
1922 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1923 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1924 return memref;
1926 if (validate)
1928 if (reload_in_progress || reload_completed)
1929 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1930 else
1931 addr = memory_address_addr_space (mode, addr, as);
1934 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1935 return memref;
1937 new_rtx = gen_rtx_MEM (mode, addr);
1938 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1939 return new_rtx;
1942 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1943 way we are changing MEMREF, so we only preserve the alias set. */
1946 change_address (rtx memref, enum machine_mode mode, rtx addr)
1948 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1949 enum machine_mode mmode = GET_MODE (new_rtx);
1950 unsigned int align;
1952 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1953 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1955 /* If there are no changes, just return the original memory reference. */
1956 if (new_rtx == memref)
1958 if (MEM_ATTRS (memref) == 0
1959 || (MEM_EXPR (memref) == NULL
1960 && MEM_OFFSET (memref) == NULL
1961 && MEM_SIZE (memref) == size
1962 && MEM_ALIGN (memref) == align))
1963 return new_rtx;
1965 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1966 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1969 MEM_ATTRS (new_rtx)
1970 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
1971 MEM_ADDR_SPACE (memref), mmode);
1973 return new_rtx;
1976 /* Return a memory reference like MEMREF, but with its mode changed
1977 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1978 nonzero, the memory address is forced to be valid.
1979 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1980 and caller is responsible for adjusting MEMREF base register. */
1983 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1984 int validate, int adjust)
1986 rtx addr = XEXP (memref, 0);
1987 rtx new_rtx;
1988 rtx memoffset = MEM_OFFSET (memref);
1989 rtx size = 0;
1990 unsigned int memalign = MEM_ALIGN (memref);
1991 addr_space_t as = MEM_ADDR_SPACE (memref);
1992 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
1993 int pbits;
1995 /* If there are no changes, just return the original memory reference. */
1996 if (mode == GET_MODE (memref) && !offset
1997 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1998 return memref;
2000 /* ??? Prefer to create garbage instead of creating shared rtl.
2001 This may happen even if offset is nonzero -- consider
2002 (plus (plus reg reg) const_int) -- so do this always. */
2003 addr = copy_rtx (addr);
2005 /* Convert a possibly large offset to a signed value within the
2006 range of the target address space. */
2007 pbits = GET_MODE_BITSIZE (address_mode);
2008 if (HOST_BITS_PER_WIDE_INT > pbits)
2010 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2011 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2012 >> shift);
2015 if (adjust)
2017 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2018 object, we can merge it into the LO_SUM. */
2019 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2020 && offset >= 0
2021 && (unsigned HOST_WIDE_INT) offset
2022 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2023 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2024 plus_constant (XEXP (addr, 1), offset));
2025 else
2026 addr = plus_constant (addr, offset);
2029 new_rtx = change_address_1 (memref, mode, addr, validate);
2031 /* If the address is a REG, change_address_1 rightfully returns memref,
2032 but this would destroy memref's MEM_ATTRS. */
2033 if (new_rtx == memref && offset != 0)
2034 new_rtx = copy_rtx (new_rtx);
2036 /* Compute the new values of the memory attributes due to this adjustment.
2037 We add the offsets and update the alignment. */
2038 if (memoffset)
2039 memoffset = GEN_INT (offset + INTVAL (memoffset));
2041 /* Compute the new alignment by taking the MIN of the alignment and the
2042 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2043 if zero. */
2044 if (offset != 0)
2045 memalign
2046 = MIN (memalign,
2047 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2049 /* We can compute the size in a number of ways. */
2050 if (GET_MODE (new_rtx) != BLKmode)
2051 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
2052 else if (MEM_SIZE (memref))
2053 size = plus_constant (MEM_SIZE (memref), -offset);
2055 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2056 memoffset, size, memalign, as,
2057 GET_MODE (new_rtx));
2059 /* At some point, we should validate that this offset is within the object,
2060 if all the appropriate values are known. */
2061 return new_rtx;
2064 /* Return a memory reference like MEMREF, but with its mode changed
2065 to MODE and its address changed to ADDR, which is assumed to be
2066 MEMREF offset by OFFSET bytes. If VALIDATE is
2067 nonzero, the memory address is forced to be valid. */
2070 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2071 HOST_WIDE_INT offset, int validate)
2073 memref = change_address_1 (memref, VOIDmode, addr, validate);
2074 return adjust_address_1 (memref, mode, offset, validate, 0);
2077 /* Return a memory reference like MEMREF, but whose address is changed by
2078 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2079 known to be in OFFSET (possibly 1). */
2082 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2084 rtx new_rtx, addr = XEXP (memref, 0);
2085 addr_space_t as = MEM_ADDR_SPACE (memref);
2086 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2088 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2090 /* At this point we don't know _why_ the address is invalid. It
2091 could have secondary memory references, multiplies or anything.
2093 However, if we did go and rearrange things, we can wind up not
2094 being able to recognize the magic around pic_offset_table_rtx.
2095 This stuff is fragile, and is yet another example of why it is
2096 bad to expose PIC machinery too early. */
2097 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
2098 && GET_CODE (addr) == PLUS
2099 && XEXP (addr, 0) == pic_offset_table_rtx)
2101 addr = force_reg (GET_MODE (addr), addr);
2102 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2105 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2106 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2108 /* If there are no changes, just return the original memory reference. */
2109 if (new_rtx == memref)
2110 return new_rtx;
2112 /* Update the alignment to reflect the offset. Reset the offset, which
2113 we don't know. */
2114 MEM_ATTRS (new_rtx)
2115 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2116 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2117 as, GET_MODE (new_rtx));
2118 return new_rtx;
2121 /* Return a memory reference like MEMREF, but with its address changed to
2122 ADDR. The caller is asserting that the actual piece of memory pointed
2123 to is the same, just the form of the address is being changed, such as
2124 by putting something into a register. */
2127 replace_equiv_address (rtx memref, rtx addr)
2129 /* change_address_1 copies the memory attribute structure without change
2130 and that's exactly what we want here. */
2131 update_temp_slot_address (XEXP (memref, 0), addr);
2132 return change_address_1 (memref, VOIDmode, addr, 1);
2135 /* Likewise, but the reference is not required to be valid. */
2138 replace_equiv_address_nv (rtx memref, rtx addr)
2140 return change_address_1 (memref, VOIDmode, addr, 0);
2143 /* Return a memory reference like MEMREF, but with its mode widened to
2144 MODE and offset by OFFSET. This would be used by targets that e.g.
2145 cannot issue QImode memory operations and have to use SImode memory
2146 operations plus masking logic. */
2149 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2151 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2152 tree expr = MEM_EXPR (new_rtx);
2153 rtx memoffset = MEM_OFFSET (new_rtx);
2154 unsigned int size = GET_MODE_SIZE (mode);
2156 /* If there are no changes, just return the original memory reference. */
2157 if (new_rtx == memref)
2158 return new_rtx;
2160 /* If we don't know what offset we were at within the expression, then
2161 we can't know if we've overstepped the bounds. */
2162 if (! memoffset)
2163 expr = NULL_TREE;
2165 while (expr)
2167 if (TREE_CODE (expr) == COMPONENT_REF)
2169 tree field = TREE_OPERAND (expr, 1);
2170 tree offset = component_ref_field_offset (expr);
2172 if (! DECL_SIZE_UNIT (field))
2174 expr = NULL_TREE;
2175 break;
2178 /* Is the field at least as large as the access? If so, ok,
2179 otherwise strip back to the containing structure. */
2180 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2181 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2182 && INTVAL (memoffset) >= 0)
2183 break;
2185 if (! host_integerp (offset, 1))
2187 expr = NULL_TREE;
2188 break;
2191 expr = TREE_OPERAND (expr, 0);
2192 memoffset
2193 = (GEN_INT (INTVAL (memoffset)
2194 + tree_low_cst (offset, 1)
2195 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2196 / BITS_PER_UNIT)));
2198 /* Similarly for the decl. */
2199 else if (DECL_P (expr)
2200 && DECL_SIZE_UNIT (expr)
2201 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2202 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2203 && (! memoffset || INTVAL (memoffset) >= 0))
2204 break;
2205 else
2207 /* The widened memory access overflows the expression, which means
2208 that it could alias another expression. Zap it. */
2209 expr = NULL_TREE;
2210 break;
2214 if (! expr)
2215 memoffset = NULL_RTX;
2217 /* The widened memory may alias other stuff, so zap the alias set. */
2218 /* ??? Maybe use get_alias_set on any remaining expression. */
2220 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2221 MEM_ALIGN (new_rtx),
2222 MEM_ADDR_SPACE (new_rtx), mode);
2224 return new_rtx;
2227 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2228 static GTY(()) tree spill_slot_decl;
2230 tree
2231 get_spill_slot_decl (bool force_build_p)
2233 tree d = spill_slot_decl;
2234 rtx rd;
2236 if (d || !force_build_p)
2237 return d;
2239 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2240 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2241 DECL_ARTIFICIAL (d) = 1;
2242 DECL_IGNORED_P (d) = 1;
2243 TREE_USED (d) = 1;
2244 TREE_THIS_NOTRAP (d) = 1;
2245 spill_slot_decl = d;
2247 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2248 MEM_NOTRAP_P (rd) = 1;
2249 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
2250 NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
2251 SET_DECL_RTL (d, rd);
2253 return d;
2256 /* Given MEM, a result from assign_stack_local, fill in the memory
2257 attributes as appropriate for a register allocator spill slot.
2258 These slots are not aliasable by other memory. We arrange for
2259 them all to use a single MEM_EXPR, so that the aliasing code can
2260 work properly in the case of shared spill slots. */
2262 void
2263 set_mem_attrs_for_spill (rtx mem)
2265 alias_set_type alias;
2266 rtx addr, offset;
2267 tree expr;
2269 expr = get_spill_slot_decl (true);
2270 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2272 /* We expect the incoming memory to be of the form:
2273 (mem:MODE (plus (reg sfp) (const_int offset)))
2274 with perhaps the plus missing for offset = 0. */
2275 addr = XEXP (mem, 0);
2276 offset = const0_rtx;
2277 if (GET_CODE (addr) == PLUS
2278 && CONST_INT_P (XEXP (addr, 1)))
2279 offset = XEXP (addr, 1);
2281 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2282 MEM_SIZE (mem), MEM_ALIGN (mem),
2283 ADDR_SPACE_GENERIC, GET_MODE (mem));
2284 MEM_NOTRAP_P (mem) = 1;
2287 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2290 gen_label_rtx (void)
2292 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2293 NULL, label_num++, NULL);
2296 /* For procedure integration. */
2298 /* Install new pointers to the first and last insns in the chain.
2299 Also, set cur_insn_uid to one higher than the last in use.
2300 Used for an inline-procedure after copying the insn chain. */
2302 void
2303 set_new_first_and_last_insn (rtx first, rtx last)
2305 rtx insn;
2307 set_first_insn (first);
2308 set_last_insn (last);
2309 cur_insn_uid = 0;
2311 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2313 int debug_count = 0;
2315 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2316 cur_debug_insn_uid = 0;
2318 for (insn = first; insn; insn = NEXT_INSN (insn))
2319 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2320 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2321 else
2323 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2324 if (DEBUG_INSN_P (insn))
2325 debug_count++;
2328 if (debug_count)
2329 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2330 else
2331 cur_debug_insn_uid++;
2333 else
2334 for (insn = first; insn; insn = NEXT_INSN (insn))
2335 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2337 cur_insn_uid++;
2340 /* Go through all the RTL insn bodies and copy any invalid shared
2341 structure. This routine should only be called once. */
2343 static void
2344 unshare_all_rtl_1 (rtx insn)
2346 /* Unshare just about everything else. */
2347 unshare_all_rtl_in_chain (insn);
2349 /* Make sure the addresses of stack slots found outside the insn chain
2350 (such as, in DECL_RTL of a variable) are not shared
2351 with the insn chain.
2353 This special care is necessary when the stack slot MEM does not
2354 actually appear in the insn chain. If it does appear, its address
2355 is unshared from all else at that point. */
2356 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2359 /* Go through all the RTL insn bodies and copy any invalid shared
2360 structure, again. This is a fairly expensive thing to do so it
2361 should be done sparingly. */
2363 void
2364 unshare_all_rtl_again (rtx insn)
2366 rtx p;
2367 tree decl;
2369 for (p = insn; p; p = NEXT_INSN (p))
2370 if (INSN_P (p))
2372 reset_used_flags (PATTERN (p));
2373 reset_used_flags (REG_NOTES (p));
2376 /* Make sure that virtual stack slots are not shared. */
2377 set_used_decls (DECL_INITIAL (cfun->decl));
2379 /* Make sure that virtual parameters are not shared. */
2380 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2381 set_used_flags (DECL_RTL (decl));
2383 reset_used_flags (stack_slot_list);
2385 unshare_all_rtl_1 (insn);
2388 unsigned int
2389 unshare_all_rtl (void)
2391 unshare_all_rtl_1 (get_insns ());
2392 return 0;
2395 struct rtl_opt_pass pass_unshare_all_rtl =
2398 RTL_PASS,
2399 "unshare", /* name */
2400 NULL, /* gate */
2401 unshare_all_rtl, /* execute */
2402 NULL, /* sub */
2403 NULL, /* next */
2404 0, /* static_pass_number */
2405 TV_NONE, /* tv_id */
2406 0, /* properties_required */
2407 0, /* properties_provided */
2408 0, /* properties_destroyed */
2409 0, /* todo_flags_start */
2410 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2415 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2416 Recursively does the same for subexpressions. */
2418 static void
2419 verify_rtx_sharing (rtx orig, rtx insn)
2421 rtx x = orig;
2422 int i;
2423 enum rtx_code code;
2424 const char *format_ptr;
2426 if (x == 0)
2427 return;
2429 code = GET_CODE (x);
2431 /* These types may be freely shared. */
2433 switch (code)
2435 case REG:
2436 case DEBUG_EXPR:
2437 case VALUE:
2438 case CONST_INT:
2439 case CONST_DOUBLE:
2440 case CONST_FIXED:
2441 case CONST_VECTOR:
2442 case SYMBOL_REF:
2443 case LABEL_REF:
2444 case CODE_LABEL:
2445 case PC:
2446 case CC0:
2447 case SCRATCH:
2448 return;
2449 /* SCRATCH must be shared because they represent distinct values. */
2450 case CLOBBER:
2451 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2452 return;
2453 break;
2455 case CONST:
2456 if (shared_const_p (orig))
2457 return;
2458 break;
2460 case MEM:
2461 /* A MEM is allowed to be shared if its address is constant. */
2462 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2463 || reload_completed || reload_in_progress)
2464 return;
2466 break;
2468 default:
2469 break;
2472 /* This rtx may not be shared. If it has already been seen,
2473 replace it with a copy of itself. */
2474 #ifdef ENABLE_CHECKING
2475 if (RTX_FLAG (x, used))
2477 error ("invalid rtl sharing found in the insn");
2478 debug_rtx (insn);
2479 error ("shared rtx");
2480 debug_rtx (x);
2481 internal_error ("internal consistency failure");
2483 #endif
2484 gcc_assert (!RTX_FLAG (x, used));
2486 RTX_FLAG (x, used) = 1;
2488 /* Now scan the subexpressions recursively. */
2490 format_ptr = GET_RTX_FORMAT (code);
2492 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2494 switch (*format_ptr++)
2496 case 'e':
2497 verify_rtx_sharing (XEXP (x, i), insn);
2498 break;
2500 case 'E':
2501 if (XVEC (x, i) != NULL)
2503 int j;
2504 int len = XVECLEN (x, i);
2506 for (j = 0; j < len; j++)
2508 /* We allow sharing of ASM_OPERANDS inside single
2509 instruction. */
2510 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2511 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2512 == ASM_OPERANDS))
2513 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2514 else
2515 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2518 break;
2521 return;
2524 /* Go through all the RTL insn bodies and check that there is no unexpected
2525 sharing in between the subexpressions. */
2527 DEBUG_FUNCTION void
2528 verify_rtl_sharing (void)
2530 rtx p;
2532 for (p = get_insns (); p; p = NEXT_INSN (p))
2533 if (INSN_P (p))
2535 reset_used_flags (PATTERN (p));
2536 reset_used_flags (REG_NOTES (p));
2537 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2539 int i;
2540 rtx q, sequence = PATTERN (p);
2542 for (i = 0; i < XVECLEN (sequence, 0); i++)
2544 q = XVECEXP (sequence, 0, i);
2545 gcc_assert (INSN_P (q));
2546 reset_used_flags (PATTERN (q));
2547 reset_used_flags (REG_NOTES (q));
2552 for (p = get_insns (); p; p = NEXT_INSN (p))
2553 if (INSN_P (p))
2555 verify_rtx_sharing (PATTERN (p), p);
2556 verify_rtx_sharing (REG_NOTES (p), p);
2560 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2561 Assumes the mark bits are cleared at entry. */
2563 void
2564 unshare_all_rtl_in_chain (rtx insn)
2566 for (; insn; insn = NEXT_INSN (insn))
2567 if (INSN_P (insn))
2569 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2570 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2574 /* Go through all virtual stack slots of a function and mark them as
2575 shared. We never replace the DECL_RTLs themselves with a copy,
2576 but expressions mentioned into a DECL_RTL cannot be shared with
2577 expressions in the instruction stream.
2579 Note that reload may convert pseudo registers into memories in-place.
2580 Pseudo registers are always shared, but MEMs never are. Thus if we
2581 reset the used flags on MEMs in the instruction stream, we must set
2582 them again on MEMs that appear in DECL_RTLs. */
2584 static void
2585 set_used_decls (tree blk)
2587 tree t;
2589 /* Mark decls. */
2590 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2591 if (DECL_RTL_SET_P (t))
2592 set_used_flags (DECL_RTL (t));
2594 /* Now process sub-blocks. */
2595 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2596 set_used_decls (t);
2599 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2600 Recursively does the same for subexpressions. Uses
2601 copy_rtx_if_shared_1 to reduce stack space. */
2604 copy_rtx_if_shared (rtx orig)
2606 copy_rtx_if_shared_1 (&orig);
2607 return orig;
2610 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2611 use. Recursively does the same for subexpressions. */
2613 static void
2614 copy_rtx_if_shared_1 (rtx *orig1)
2616 rtx x;
2617 int i;
2618 enum rtx_code code;
2619 rtx *last_ptr;
2620 const char *format_ptr;
2621 int copied = 0;
2622 int length;
2624 /* Repeat is used to turn tail-recursion into iteration. */
2625 repeat:
2626 x = *orig1;
2628 if (x == 0)
2629 return;
2631 code = GET_CODE (x);
2633 /* These types may be freely shared. */
2635 switch (code)
2637 case REG:
2638 case DEBUG_EXPR:
2639 case VALUE:
2640 case CONST_INT:
2641 case CONST_DOUBLE:
2642 case CONST_FIXED:
2643 case CONST_VECTOR:
2644 case SYMBOL_REF:
2645 case LABEL_REF:
2646 case CODE_LABEL:
2647 case PC:
2648 case CC0:
2649 case SCRATCH:
2650 /* SCRATCH must be shared because they represent distinct values. */
2651 return;
2652 case CLOBBER:
2653 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2654 return;
2655 break;
2657 case CONST:
2658 if (shared_const_p (x))
2659 return;
2660 break;
2662 case DEBUG_INSN:
2663 case INSN:
2664 case JUMP_INSN:
2665 case CALL_INSN:
2666 case NOTE:
2667 case BARRIER:
2668 /* The chain of insns is not being copied. */
2669 return;
2671 default:
2672 break;
2675 /* This rtx may not be shared. If it has already been seen,
2676 replace it with a copy of itself. */
2678 if (RTX_FLAG (x, used))
2680 x = shallow_copy_rtx (x);
2681 copied = 1;
2683 RTX_FLAG (x, used) = 1;
2685 /* Now scan the subexpressions recursively.
2686 We can store any replaced subexpressions directly into X
2687 since we know X is not shared! Any vectors in X
2688 must be copied if X was copied. */
2690 format_ptr = GET_RTX_FORMAT (code);
2691 length = GET_RTX_LENGTH (code);
2692 last_ptr = NULL;
2694 for (i = 0; i < length; i++)
2696 switch (*format_ptr++)
2698 case 'e':
2699 if (last_ptr)
2700 copy_rtx_if_shared_1 (last_ptr);
2701 last_ptr = &XEXP (x, i);
2702 break;
2704 case 'E':
2705 if (XVEC (x, i) != NULL)
2707 int j;
2708 int len = XVECLEN (x, i);
2710 /* Copy the vector iff I copied the rtx and the length
2711 is nonzero. */
2712 if (copied && len > 0)
2713 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2715 /* Call recursively on all inside the vector. */
2716 for (j = 0; j < len; j++)
2718 if (last_ptr)
2719 copy_rtx_if_shared_1 (last_ptr);
2720 last_ptr = &XVECEXP (x, i, j);
2723 break;
2726 *orig1 = x;
2727 if (last_ptr)
2729 orig1 = last_ptr;
2730 goto repeat;
2732 return;
2735 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2736 to look for shared sub-parts. */
2738 void
2739 reset_used_flags (rtx x)
2741 int i, j;
2742 enum rtx_code code;
2743 const char *format_ptr;
2744 int length;
2746 /* Repeat is used to turn tail-recursion into iteration. */
2747 repeat:
2748 if (x == 0)
2749 return;
2751 code = GET_CODE (x);
2753 /* These types may be freely shared so we needn't do any resetting
2754 for them. */
2756 switch (code)
2758 case REG:
2759 case DEBUG_EXPR:
2760 case VALUE:
2761 case CONST_INT:
2762 case CONST_DOUBLE:
2763 case CONST_FIXED:
2764 case CONST_VECTOR:
2765 case SYMBOL_REF:
2766 case CODE_LABEL:
2767 case PC:
2768 case CC0:
2769 return;
2771 case DEBUG_INSN:
2772 case INSN:
2773 case JUMP_INSN:
2774 case CALL_INSN:
2775 case NOTE:
2776 case LABEL_REF:
2777 case BARRIER:
2778 /* The chain of insns is not being copied. */
2779 return;
2781 default:
2782 break;
2785 RTX_FLAG (x, used) = 0;
2787 format_ptr = GET_RTX_FORMAT (code);
2788 length = GET_RTX_LENGTH (code);
2790 for (i = 0; i < length; i++)
2792 switch (*format_ptr++)
2794 case 'e':
2795 if (i == length-1)
2797 x = XEXP (x, i);
2798 goto repeat;
2800 reset_used_flags (XEXP (x, i));
2801 break;
2803 case 'E':
2804 for (j = 0; j < XVECLEN (x, i); j++)
2805 reset_used_flags (XVECEXP (x, i, j));
2806 break;
2811 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2812 to look for shared sub-parts. */
2814 void
2815 set_used_flags (rtx x)
2817 int i, j;
2818 enum rtx_code code;
2819 const char *format_ptr;
2821 if (x == 0)
2822 return;
2824 code = GET_CODE (x);
2826 /* These types may be freely shared so we needn't do any resetting
2827 for them. */
2829 switch (code)
2831 case REG:
2832 case DEBUG_EXPR:
2833 case VALUE:
2834 case CONST_INT:
2835 case CONST_DOUBLE:
2836 case CONST_FIXED:
2837 case CONST_VECTOR:
2838 case SYMBOL_REF:
2839 case CODE_LABEL:
2840 case PC:
2841 case CC0:
2842 return;
2844 case DEBUG_INSN:
2845 case INSN:
2846 case JUMP_INSN:
2847 case CALL_INSN:
2848 case NOTE:
2849 case LABEL_REF:
2850 case BARRIER:
2851 /* The chain of insns is not being copied. */
2852 return;
2854 default:
2855 break;
2858 RTX_FLAG (x, used) = 1;
2860 format_ptr = GET_RTX_FORMAT (code);
2861 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2863 switch (*format_ptr++)
2865 case 'e':
2866 set_used_flags (XEXP (x, i));
2867 break;
2869 case 'E':
2870 for (j = 0; j < XVECLEN (x, i); j++)
2871 set_used_flags (XVECEXP (x, i, j));
2872 break;
2877 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2878 Return X or the rtx for the pseudo reg the value of X was copied into.
2879 OTHER must be valid as a SET_DEST. */
2882 make_safe_from (rtx x, rtx other)
2884 while (1)
2885 switch (GET_CODE (other))
2887 case SUBREG:
2888 other = SUBREG_REG (other);
2889 break;
2890 case STRICT_LOW_PART:
2891 case SIGN_EXTEND:
2892 case ZERO_EXTEND:
2893 other = XEXP (other, 0);
2894 break;
2895 default:
2896 goto done;
2898 done:
2899 if ((MEM_P (other)
2900 && ! CONSTANT_P (x)
2901 && !REG_P (x)
2902 && GET_CODE (x) != SUBREG)
2903 || (REG_P (other)
2904 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2905 || reg_mentioned_p (other, x))))
2907 rtx temp = gen_reg_rtx (GET_MODE (x));
2908 emit_move_insn (temp, x);
2909 return temp;
2911 return x;
2914 /* Emission of insns (adding them to the doubly-linked list). */
2916 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2919 get_last_insn_anywhere (void)
2921 struct sequence_stack *stack;
2922 if (get_last_insn ())
2923 return get_last_insn ();
2924 for (stack = seq_stack; stack; stack = stack->next)
2925 if (stack->last != 0)
2926 return stack->last;
2927 return 0;
2930 /* Return the first nonnote insn emitted in current sequence or current
2931 function. This routine looks inside SEQUENCEs. */
2934 get_first_nonnote_insn (void)
2936 rtx insn = get_insns ();
2938 if (insn)
2940 if (NOTE_P (insn))
2941 for (insn = next_insn (insn);
2942 insn && NOTE_P (insn);
2943 insn = next_insn (insn))
2944 continue;
2945 else
2947 if (NONJUMP_INSN_P (insn)
2948 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2949 insn = XVECEXP (PATTERN (insn), 0, 0);
2953 return insn;
2956 /* Return the last nonnote insn emitted in current sequence or current
2957 function. This routine looks inside SEQUENCEs. */
2960 get_last_nonnote_insn (void)
2962 rtx insn = get_last_insn ();
2964 if (insn)
2966 if (NOTE_P (insn))
2967 for (insn = previous_insn (insn);
2968 insn && NOTE_P (insn);
2969 insn = previous_insn (insn))
2970 continue;
2971 else
2973 if (NONJUMP_INSN_P (insn)
2974 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2975 insn = XVECEXP (PATTERN (insn), 0,
2976 XVECLEN (PATTERN (insn), 0) - 1);
2980 return insn;
2983 /* Return the number of actual (non-debug) insns emitted in this
2984 function. */
2987 get_max_insn_count (void)
2989 int n = cur_insn_uid;
2991 /* The table size must be stable across -g, to avoid codegen
2992 differences due to debug insns, and not be affected by
2993 -fmin-insn-uid, to avoid excessive table size and to simplify
2994 debugging of -fcompare-debug failures. */
2995 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
2996 n -= cur_debug_insn_uid;
2997 else
2998 n -= MIN_NONDEBUG_INSN_UID;
3000 return n;
3004 /* Return the next insn. If it is a SEQUENCE, return the first insn
3005 of the sequence. */
3008 next_insn (rtx insn)
3010 if (insn)
3012 insn = NEXT_INSN (insn);
3013 if (insn && NONJUMP_INSN_P (insn)
3014 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3015 insn = XVECEXP (PATTERN (insn), 0, 0);
3018 return insn;
3021 /* Return the previous insn. If it is a SEQUENCE, return the last insn
3022 of the sequence. */
3025 previous_insn (rtx insn)
3027 if (insn)
3029 insn = PREV_INSN (insn);
3030 if (insn && NONJUMP_INSN_P (insn)
3031 && GET_CODE (PATTERN (insn)) == SEQUENCE)
3032 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
3035 return insn;
3038 /* Return the next insn after INSN that is not a NOTE. This routine does not
3039 look inside SEQUENCEs. */
3042 next_nonnote_insn (rtx insn)
3044 while (insn)
3046 insn = NEXT_INSN (insn);
3047 if (insn == 0 || !NOTE_P (insn))
3048 break;
3051 return insn;
3054 /* Return the next insn after INSN that is not a NOTE, but stop the
3055 search before we enter another basic block. This routine does not
3056 look inside SEQUENCEs. */
3059 next_nonnote_insn_bb (rtx insn)
3061 while (insn)
3063 insn = NEXT_INSN (insn);
3064 if (insn == 0 || !NOTE_P (insn))
3065 break;
3066 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3067 return NULL_RTX;
3070 return insn;
3073 /* Return the previous insn before INSN that is not a NOTE. This routine does
3074 not look inside SEQUENCEs. */
3077 prev_nonnote_insn (rtx insn)
3079 while (insn)
3081 insn = PREV_INSN (insn);
3082 if (insn == 0 || !NOTE_P (insn))
3083 break;
3086 return insn;
3089 /* Return the previous insn before INSN that is not a NOTE, but stop
3090 the search before we enter another basic block. This routine does
3091 not look inside SEQUENCEs. */
3094 prev_nonnote_insn_bb (rtx insn)
3096 while (insn)
3098 insn = PREV_INSN (insn);
3099 if (insn == 0 || !NOTE_P (insn))
3100 break;
3101 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3102 return NULL_RTX;
3105 return insn;
3108 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3109 routine does not look inside SEQUENCEs. */
3112 next_nondebug_insn (rtx insn)
3114 while (insn)
3116 insn = NEXT_INSN (insn);
3117 if (insn == 0 || !DEBUG_INSN_P (insn))
3118 break;
3121 return insn;
3124 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3125 This routine does not look inside SEQUENCEs. */
3128 prev_nondebug_insn (rtx insn)
3130 while (insn)
3132 insn = PREV_INSN (insn);
3133 if (insn == 0 || !DEBUG_INSN_P (insn))
3134 break;
3137 return insn;
3140 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3141 This routine does not look inside SEQUENCEs. */
3144 next_nonnote_nondebug_insn (rtx insn)
3146 while (insn)
3148 insn = NEXT_INSN (insn);
3149 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3150 break;
3153 return insn;
3156 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3157 This routine does not look inside SEQUENCEs. */
3160 prev_nonnote_nondebug_insn (rtx insn)
3162 while (insn)
3164 insn = PREV_INSN (insn);
3165 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3166 break;
3169 return insn;
3172 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3173 or 0, if there is none. This routine does not look inside
3174 SEQUENCEs. */
3177 next_real_insn (rtx insn)
3179 while (insn)
3181 insn = NEXT_INSN (insn);
3182 if (insn == 0 || INSN_P (insn))
3183 break;
3186 return insn;
3189 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3190 or 0, if there is none. This routine does not look inside
3191 SEQUENCEs. */
3194 prev_real_insn (rtx insn)
3196 while (insn)
3198 insn = PREV_INSN (insn);
3199 if (insn == 0 || INSN_P (insn))
3200 break;
3203 return insn;
3206 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3207 This routine does not look inside SEQUENCEs. */
3210 last_call_insn (void)
3212 rtx insn;
3214 for (insn = get_last_insn ();
3215 insn && !CALL_P (insn);
3216 insn = PREV_INSN (insn))
3219 return insn;
3222 /* Find the next insn after INSN that really does something. This routine
3223 does not look inside SEQUENCEs. After reload this also skips over
3224 standalone USE and CLOBBER insn. */
3227 active_insn_p (const_rtx insn)
3229 return (CALL_P (insn) || JUMP_P (insn)
3230 || (NONJUMP_INSN_P (insn)
3231 && (! reload_completed
3232 || (GET_CODE (PATTERN (insn)) != USE
3233 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3237 next_active_insn (rtx insn)
3239 while (insn)
3241 insn = NEXT_INSN (insn);
3242 if (insn == 0 || active_insn_p (insn))
3243 break;
3246 return insn;
3249 /* Find the last insn before INSN that really does something. This routine
3250 does not look inside SEQUENCEs. After reload this also skips over
3251 standalone USE and CLOBBER insn. */
3254 prev_active_insn (rtx insn)
3256 while (insn)
3258 insn = PREV_INSN (insn);
3259 if (insn == 0 || active_insn_p (insn))
3260 break;
3263 return insn;
3266 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3269 next_label (rtx insn)
3271 while (insn)
3273 insn = NEXT_INSN (insn);
3274 if (insn == 0 || LABEL_P (insn))
3275 break;
3278 return insn;
3281 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3284 prev_label (rtx insn)
3286 while (insn)
3288 insn = PREV_INSN (insn);
3289 if (insn == 0 || LABEL_P (insn))
3290 break;
3293 return insn;
3296 /* Return the last label to mark the same position as LABEL. Return null
3297 if LABEL itself is null. */
3300 skip_consecutive_labels (rtx label)
3302 rtx insn;
3304 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3305 if (LABEL_P (insn))
3306 label = insn;
3308 return label;
3311 #ifdef HAVE_cc0
3312 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3313 and REG_CC_USER notes so we can find it. */
3315 void
3316 link_cc0_insns (rtx insn)
3318 rtx user = next_nonnote_insn (insn);
3320 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3321 user = XVECEXP (PATTERN (user), 0, 0);
3323 add_reg_note (user, REG_CC_SETTER, insn);
3324 add_reg_note (insn, REG_CC_USER, user);
3327 /* Return the next insn that uses CC0 after INSN, which is assumed to
3328 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3329 applied to the result of this function should yield INSN).
3331 Normally, this is simply the next insn. However, if a REG_CC_USER note
3332 is present, it contains the insn that uses CC0.
3334 Return 0 if we can't find the insn. */
3337 next_cc0_user (rtx insn)
3339 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3341 if (note)
3342 return XEXP (note, 0);
3344 insn = next_nonnote_insn (insn);
3345 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3346 insn = XVECEXP (PATTERN (insn), 0, 0);
3348 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3349 return insn;
3351 return 0;
3354 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3355 note, it is the previous insn. */
3358 prev_cc0_setter (rtx insn)
3360 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3362 if (note)
3363 return XEXP (note, 0);
3365 insn = prev_nonnote_insn (insn);
3366 gcc_assert (sets_cc0_p (PATTERN (insn)));
3368 return insn;
3370 #endif
3372 #ifdef AUTO_INC_DEC
3373 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3375 static int
3376 find_auto_inc (rtx *xp, void *data)
3378 rtx x = *xp;
3379 rtx reg = (rtx) data;
3381 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3382 return 0;
3384 switch (GET_CODE (x))
3386 case PRE_DEC:
3387 case PRE_INC:
3388 case POST_DEC:
3389 case POST_INC:
3390 case PRE_MODIFY:
3391 case POST_MODIFY:
3392 if (rtx_equal_p (reg, XEXP (x, 0)))
3393 return 1;
3394 break;
3396 default:
3397 gcc_unreachable ();
3399 return -1;
3401 #endif
3403 /* Increment the label uses for all labels present in rtx. */
3405 static void
3406 mark_label_nuses (rtx x)
3408 enum rtx_code code;
3409 int i, j;
3410 const char *fmt;
3412 code = GET_CODE (x);
3413 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3414 LABEL_NUSES (XEXP (x, 0))++;
3416 fmt = GET_RTX_FORMAT (code);
3417 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3419 if (fmt[i] == 'e')
3420 mark_label_nuses (XEXP (x, i));
3421 else if (fmt[i] == 'E')
3422 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3423 mark_label_nuses (XVECEXP (x, i, j));
3428 /* Try splitting insns that can be split for better scheduling.
3429 PAT is the pattern which might split.
3430 TRIAL is the insn providing PAT.
3431 LAST is nonzero if we should return the last insn of the sequence produced.
3433 If this routine succeeds in splitting, it returns the first or last
3434 replacement insn depending on the value of LAST. Otherwise, it
3435 returns TRIAL. If the insn to be returned can be split, it will be. */
3438 try_split (rtx pat, rtx trial, int last)
3440 rtx before = PREV_INSN (trial);
3441 rtx after = NEXT_INSN (trial);
3442 int has_barrier = 0;
3443 rtx note, seq, tem;
3444 int probability;
3445 rtx insn_last, insn;
3446 int njumps = 0;
3448 /* We're not good at redistributing frame information. */
3449 if (RTX_FRAME_RELATED_P (trial))
3450 return trial;
3452 if (any_condjump_p (trial)
3453 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3454 split_branch_probability = INTVAL (XEXP (note, 0));
3455 probability = split_branch_probability;
3457 seq = split_insns (pat, trial);
3459 split_branch_probability = -1;
3461 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3462 We may need to handle this specially. */
3463 if (after && BARRIER_P (after))
3465 has_barrier = 1;
3466 after = NEXT_INSN (after);
3469 if (!seq)
3470 return trial;
3472 /* Avoid infinite loop if any insn of the result matches
3473 the original pattern. */
3474 insn_last = seq;
3475 while (1)
3477 if (INSN_P (insn_last)
3478 && rtx_equal_p (PATTERN (insn_last), pat))
3479 return trial;
3480 if (!NEXT_INSN (insn_last))
3481 break;
3482 insn_last = NEXT_INSN (insn_last);
3485 /* We will be adding the new sequence to the function. The splitters
3486 may have introduced invalid RTL sharing, so unshare the sequence now. */
3487 unshare_all_rtl_in_chain (seq);
3489 /* Mark labels. */
3490 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3492 if (JUMP_P (insn))
3494 mark_jump_label (PATTERN (insn), insn, 0);
3495 njumps++;
3496 if (probability != -1
3497 && any_condjump_p (insn)
3498 && !find_reg_note (insn, REG_BR_PROB, 0))
3500 /* We can preserve the REG_BR_PROB notes only if exactly
3501 one jump is created, otherwise the machine description
3502 is responsible for this step using
3503 split_branch_probability variable. */
3504 gcc_assert (njumps == 1);
3505 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3510 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3511 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3512 if (CALL_P (trial))
3514 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3515 if (CALL_P (insn))
3517 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3518 while (*p)
3519 p = &XEXP (*p, 1);
3520 *p = CALL_INSN_FUNCTION_USAGE (trial);
3521 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3523 /* Update the debug information for the CALL_INSN. */
3524 if (flag_enable_icf_debug)
3525 (*debug_hooks->copy_call_info) (trial, insn);
3529 /* Copy notes, particularly those related to the CFG. */
3530 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3532 switch (REG_NOTE_KIND (note))
3534 case REG_EH_REGION:
3535 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3536 break;
3538 case REG_NORETURN:
3539 case REG_SETJMP:
3540 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3542 if (CALL_P (insn))
3543 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3545 break;
3547 case REG_NON_LOCAL_GOTO:
3548 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3550 if (JUMP_P (insn))
3551 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3553 break;
3555 #ifdef AUTO_INC_DEC
3556 case REG_INC:
3557 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3559 rtx reg = XEXP (note, 0);
3560 if (!FIND_REG_INC_NOTE (insn, reg)
3561 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3562 add_reg_note (insn, REG_INC, reg);
3564 break;
3565 #endif
3567 default:
3568 break;
3572 /* If there are LABELS inside the split insns increment the
3573 usage count so we don't delete the label. */
3574 if (INSN_P (trial))
3576 insn = insn_last;
3577 while (insn != NULL_RTX)
3579 /* JUMP_P insns have already been "marked" above. */
3580 if (NONJUMP_INSN_P (insn))
3581 mark_label_nuses (PATTERN (insn));
3583 insn = PREV_INSN (insn);
3587 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3589 delete_insn (trial);
3590 if (has_barrier)
3591 emit_barrier_after (tem);
3593 /* Recursively call try_split for each new insn created; by the
3594 time control returns here that insn will be fully split, so
3595 set LAST and continue from the insn after the one returned.
3596 We can't use next_active_insn here since AFTER may be a note.
3597 Ignore deleted insns, which can be occur if not optimizing. */
3598 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3599 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3600 tem = try_split (PATTERN (tem), tem, 1);
3602 /* Return either the first or the last insn, depending on which was
3603 requested. */
3604 return last
3605 ? (after ? PREV_INSN (after) : get_last_insn ())
3606 : NEXT_INSN (before);
3609 /* Make and return an INSN rtx, initializing all its slots.
3610 Store PATTERN in the pattern slots. */
3613 make_insn_raw (rtx pattern)
3615 rtx insn;
3617 insn = rtx_alloc (INSN);
3619 INSN_UID (insn) = cur_insn_uid++;
3620 PATTERN (insn) = pattern;
3621 INSN_CODE (insn) = -1;
3622 REG_NOTES (insn) = NULL;
3623 INSN_LOCATOR (insn) = curr_insn_locator ();
3624 BLOCK_FOR_INSN (insn) = NULL;
3626 #ifdef ENABLE_RTL_CHECKING
3627 if (insn
3628 && INSN_P (insn)
3629 && (returnjump_p (insn)
3630 || (GET_CODE (insn) == SET
3631 && SET_DEST (insn) == pc_rtx)))
3633 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3634 debug_rtx (insn);
3636 #endif
3638 return insn;
3641 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3644 make_debug_insn_raw (rtx pattern)
3646 rtx insn;
3648 insn = rtx_alloc (DEBUG_INSN);
3649 INSN_UID (insn) = cur_debug_insn_uid++;
3650 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3651 INSN_UID (insn) = cur_insn_uid++;
3653 PATTERN (insn) = pattern;
3654 INSN_CODE (insn) = -1;
3655 REG_NOTES (insn) = NULL;
3656 INSN_LOCATOR (insn) = curr_insn_locator ();
3657 BLOCK_FOR_INSN (insn) = NULL;
3659 return insn;
3662 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3665 make_jump_insn_raw (rtx pattern)
3667 rtx insn;
3669 insn = rtx_alloc (JUMP_INSN);
3670 INSN_UID (insn) = cur_insn_uid++;
3672 PATTERN (insn) = pattern;
3673 INSN_CODE (insn) = -1;
3674 REG_NOTES (insn) = NULL;
3675 JUMP_LABEL (insn) = NULL;
3676 INSN_LOCATOR (insn) = curr_insn_locator ();
3677 BLOCK_FOR_INSN (insn) = NULL;
3679 return insn;
3682 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3684 static rtx
3685 make_call_insn_raw (rtx pattern)
3687 rtx insn;
3689 insn = rtx_alloc (CALL_INSN);
3690 INSN_UID (insn) = cur_insn_uid++;
3692 PATTERN (insn) = pattern;
3693 INSN_CODE (insn) = -1;
3694 REG_NOTES (insn) = NULL;
3695 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3696 INSN_LOCATOR (insn) = curr_insn_locator ();
3697 BLOCK_FOR_INSN (insn) = NULL;
3699 return insn;
3702 /* Add INSN to the end of the doubly-linked list.
3703 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3705 void
3706 add_insn (rtx insn)
3708 PREV_INSN (insn) = get_last_insn();
3709 NEXT_INSN (insn) = 0;
3711 if (NULL != get_last_insn())
3712 NEXT_INSN (get_last_insn ()) = insn;
3714 if (NULL == get_insns ())
3715 set_first_insn (insn);
3717 set_last_insn (insn);
3720 /* Add INSN into the doubly-linked list after insn AFTER. This and
3721 the next should be the only functions called to insert an insn once
3722 delay slots have been filled since only they know how to update a
3723 SEQUENCE. */
3725 void
3726 add_insn_after (rtx insn, rtx after, basic_block bb)
3728 rtx next = NEXT_INSN (after);
3730 gcc_assert (!optimize || !INSN_DELETED_P (after));
3732 NEXT_INSN (insn) = next;
3733 PREV_INSN (insn) = after;
3735 if (next)
3737 PREV_INSN (next) = insn;
3738 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3739 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3741 else if (get_last_insn () == after)
3742 set_last_insn (insn);
3743 else
3745 struct sequence_stack *stack = seq_stack;
3746 /* Scan all pending sequences too. */
3747 for (; stack; stack = stack->next)
3748 if (after == stack->last)
3750 stack->last = insn;
3751 break;
3754 gcc_assert (stack);
3757 if (!BARRIER_P (after)
3758 && !BARRIER_P (insn)
3759 && (bb = BLOCK_FOR_INSN (after)))
3761 set_block_for_insn (insn, bb);
3762 if (INSN_P (insn))
3763 df_insn_rescan (insn);
3764 /* Should not happen as first in the BB is always
3765 either NOTE or LABEL. */
3766 if (BB_END (bb) == after
3767 /* Avoid clobbering of structure when creating new BB. */
3768 && !BARRIER_P (insn)
3769 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3770 BB_END (bb) = insn;
3773 NEXT_INSN (after) = insn;
3774 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3776 rtx sequence = PATTERN (after);
3777 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3781 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3782 the previous should be the only functions called to insert an insn
3783 once delay slots have been filled since only they know how to
3784 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3785 bb from before. */
3787 void
3788 add_insn_before (rtx insn, rtx before, basic_block bb)
3790 rtx prev = PREV_INSN (before);
3792 gcc_assert (!optimize || !INSN_DELETED_P (before));
3794 PREV_INSN (insn) = prev;
3795 NEXT_INSN (insn) = before;
3797 if (prev)
3799 NEXT_INSN (prev) = insn;
3800 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3802 rtx sequence = PATTERN (prev);
3803 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3806 else if (get_insns () == before)
3807 set_first_insn (insn);
3808 else
3810 struct sequence_stack *stack = seq_stack;
3811 /* Scan all pending sequences too. */
3812 for (; stack; stack = stack->next)
3813 if (before == stack->first)
3815 stack->first = insn;
3816 break;
3819 gcc_assert (stack);
3822 if (!bb
3823 && !BARRIER_P (before)
3824 && !BARRIER_P (insn))
3825 bb = BLOCK_FOR_INSN (before);
3827 if (bb)
3829 set_block_for_insn (insn, bb);
3830 if (INSN_P (insn))
3831 df_insn_rescan (insn);
3832 /* Should not happen as first in the BB is always either NOTE or
3833 LABEL. */
3834 gcc_assert (BB_HEAD (bb) != insn
3835 /* Avoid clobbering of structure when creating new BB. */
3836 || BARRIER_P (insn)
3837 || NOTE_INSN_BASIC_BLOCK_P (insn));
3840 PREV_INSN (before) = insn;
3841 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3842 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3846 /* Replace insn with an deleted instruction note. */
3848 void
3849 set_insn_deleted (rtx insn)
3851 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3852 PUT_CODE (insn, NOTE);
3853 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3857 /* Remove an insn from its doubly-linked list. This function knows how
3858 to handle sequences. */
3859 void
3860 remove_insn (rtx insn)
3862 rtx next = NEXT_INSN (insn);
3863 rtx prev = PREV_INSN (insn);
3864 basic_block bb;
3866 /* Later in the code, the block will be marked dirty. */
3867 df_insn_delete (NULL, INSN_UID (insn));
3869 if (prev)
3871 NEXT_INSN (prev) = next;
3872 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3874 rtx sequence = PATTERN (prev);
3875 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3878 else if (get_insns () == insn)
3880 if (next)
3881 PREV_INSN (next) = NULL;
3882 set_first_insn (next);
3884 else
3886 struct sequence_stack *stack = seq_stack;
3887 /* Scan all pending sequences too. */
3888 for (; stack; stack = stack->next)
3889 if (insn == stack->first)
3891 stack->first = next;
3892 break;
3895 gcc_assert (stack);
3898 if (next)
3900 PREV_INSN (next) = prev;
3901 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3902 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3904 else if (get_last_insn () == insn)
3905 set_last_insn (prev);
3906 else
3908 struct sequence_stack *stack = seq_stack;
3909 /* Scan all pending sequences too. */
3910 for (; stack; stack = stack->next)
3911 if (insn == stack->last)
3913 stack->last = prev;
3914 break;
3917 gcc_assert (stack);
3919 if (!BARRIER_P (insn)
3920 && (bb = BLOCK_FOR_INSN (insn)))
3922 if (INSN_P (insn))
3923 df_set_bb_dirty (bb);
3924 if (BB_HEAD (bb) == insn)
3926 /* Never ever delete the basic block note without deleting whole
3927 basic block. */
3928 gcc_assert (!NOTE_P (insn));
3929 BB_HEAD (bb) = next;
3931 if (BB_END (bb) == insn)
3932 BB_END (bb) = prev;
3936 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3938 void
3939 add_function_usage_to (rtx call_insn, rtx call_fusage)
3941 gcc_assert (call_insn && CALL_P (call_insn));
3943 /* Put the register usage information on the CALL. If there is already
3944 some usage information, put ours at the end. */
3945 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3947 rtx link;
3949 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3950 link = XEXP (link, 1))
3953 XEXP (link, 1) = call_fusage;
3955 else
3956 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3959 /* Delete all insns made since FROM.
3960 FROM becomes the new last instruction. */
3962 void
3963 delete_insns_since (rtx from)
3965 if (from == 0)
3966 set_first_insn (0);
3967 else
3968 NEXT_INSN (from) = 0;
3969 set_last_insn (from);
3972 /* This function is deprecated, please use sequences instead.
3974 Move a consecutive bunch of insns to a different place in the chain.
3975 The insns to be moved are those between FROM and TO.
3976 They are moved to a new position after the insn AFTER.
3977 AFTER must not be FROM or TO or any insn in between.
3979 This function does not know about SEQUENCEs and hence should not be
3980 called after delay-slot filling has been done. */
3982 void
3983 reorder_insns_nobb (rtx from, rtx to, rtx after)
3985 /* Splice this bunch out of where it is now. */
3986 if (PREV_INSN (from))
3987 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3988 if (NEXT_INSN (to))
3989 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3990 if (get_last_insn () == to)
3991 set_last_insn (PREV_INSN (from));
3992 if (get_insns () == from)
3993 set_first_insn (NEXT_INSN (to));
3995 /* Make the new neighbors point to it and it to them. */
3996 if (NEXT_INSN (after))
3997 PREV_INSN (NEXT_INSN (after)) = to;
3999 NEXT_INSN (to) = NEXT_INSN (after);
4000 PREV_INSN (from) = after;
4001 NEXT_INSN (after) = from;
4002 if (after == get_last_insn())
4003 set_last_insn (to);
4006 /* Same as function above, but take care to update BB boundaries. */
4007 void
4008 reorder_insns (rtx from, rtx to, rtx after)
4010 rtx prev = PREV_INSN (from);
4011 basic_block bb, bb2;
4013 reorder_insns_nobb (from, to, after);
4015 if (!BARRIER_P (after)
4016 && (bb = BLOCK_FOR_INSN (after)))
4018 rtx x;
4019 df_set_bb_dirty (bb);
4021 if (!BARRIER_P (from)
4022 && (bb2 = BLOCK_FOR_INSN (from)))
4024 if (BB_END (bb2) == to)
4025 BB_END (bb2) = prev;
4026 df_set_bb_dirty (bb2);
4029 if (BB_END (bb) == after)
4030 BB_END (bb) = to;
4032 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
4033 if (!BARRIER_P (x))
4034 df_insn_change_bb (x, bb);
4039 /* Emit insn(s) of given code and pattern
4040 at a specified place within the doubly-linked list.
4042 All of the emit_foo global entry points accept an object
4043 X which is either an insn list or a PATTERN of a single
4044 instruction.
4046 There are thus a few canonical ways to generate code and
4047 emit it at a specific place in the instruction stream. For
4048 example, consider the instruction named SPOT and the fact that
4049 we would like to emit some instructions before SPOT. We might
4050 do it like this:
4052 start_sequence ();
4053 ... emit the new instructions ...
4054 insns_head = get_insns ();
4055 end_sequence ();
4057 emit_insn_before (insns_head, SPOT);
4059 It used to be common to generate SEQUENCE rtl instead, but that
4060 is a relic of the past which no longer occurs. The reason is that
4061 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4062 generated would almost certainly die right after it was created. */
4064 /* Make X be output before the instruction BEFORE. */
4067 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4069 rtx last = before;
4070 rtx insn;
4072 gcc_assert (before);
4074 if (x == NULL_RTX)
4075 return last;
4077 switch (GET_CODE (x))
4079 case DEBUG_INSN:
4080 case INSN:
4081 case JUMP_INSN:
4082 case CALL_INSN:
4083 case CODE_LABEL:
4084 case BARRIER:
4085 case NOTE:
4086 insn = x;
4087 while (insn)
4089 rtx next = NEXT_INSN (insn);
4090 add_insn_before (insn, before, bb);
4091 last = insn;
4092 insn = next;
4094 break;
4096 #ifdef ENABLE_RTL_CHECKING
4097 case SEQUENCE:
4098 gcc_unreachable ();
4099 break;
4100 #endif
4102 default:
4103 last = make_insn_raw (x);
4104 add_insn_before (last, before, bb);
4105 break;
4108 return last;
4111 /* Make an instruction with body X and code JUMP_INSN
4112 and output it before the instruction BEFORE. */
4115 emit_jump_insn_before_noloc (rtx x, rtx before)
4117 rtx insn, last = NULL_RTX;
4119 gcc_assert (before);
4121 switch (GET_CODE (x))
4123 case DEBUG_INSN:
4124 case INSN:
4125 case JUMP_INSN:
4126 case CALL_INSN:
4127 case CODE_LABEL:
4128 case BARRIER:
4129 case NOTE:
4130 insn = x;
4131 while (insn)
4133 rtx next = NEXT_INSN (insn);
4134 add_insn_before (insn, before, NULL);
4135 last = insn;
4136 insn = next;
4138 break;
4140 #ifdef ENABLE_RTL_CHECKING
4141 case SEQUENCE:
4142 gcc_unreachable ();
4143 break;
4144 #endif
4146 default:
4147 last = make_jump_insn_raw (x);
4148 add_insn_before (last, before, NULL);
4149 break;
4152 return last;
4155 /* Make an instruction with body X and code CALL_INSN
4156 and output it before the instruction BEFORE. */
4159 emit_call_insn_before_noloc (rtx x, rtx before)
4161 rtx last = NULL_RTX, insn;
4163 gcc_assert (before);
4165 switch (GET_CODE (x))
4167 case DEBUG_INSN:
4168 case INSN:
4169 case JUMP_INSN:
4170 case CALL_INSN:
4171 case CODE_LABEL:
4172 case BARRIER:
4173 case NOTE:
4174 insn = x;
4175 while (insn)
4177 rtx next = NEXT_INSN (insn);
4178 add_insn_before (insn, before, NULL);
4179 last = insn;
4180 insn = next;
4182 break;
4184 #ifdef ENABLE_RTL_CHECKING
4185 case SEQUENCE:
4186 gcc_unreachable ();
4187 break;
4188 #endif
4190 default:
4191 last = make_call_insn_raw (x);
4192 add_insn_before (last, before, NULL);
4193 break;
4196 return last;
4199 /* Make an instruction with body X and code DEBUG_INSN
4200 and output it before the instruction BEFORE. */
4203 emit_debug_insn_before_noloc (rtx x, rtx before)
4205 rtx last = NULL_RTX, insn;
4207 gcc_assert (before);
4209 switch (GET_CODE (x))
4211 case DEBUG_INSN:
4212 case INSN:
4213 case JUMP_INSN:
4214 case CALL_INSN:
4215 case CODE_LABEL:
4216 case BARRIER:
4217 case NOTE:
4218 insn = x;
4219 while (insn)
4221 rtx next = NEXT_INSN (insn);
4222 add_insn_before (insn, before, NULL);
4223 last = insn;
4224 insn = next;
4226 break;
4228 #ifdef ENABLE_RTL_CHECKING
4229 case SEQUENCE:
4230 gcc_unreachable ();
4231 break;
4232 #endif
4234 default:
4235 last = make_debug_insn_raw (x);
4236 add_insn_before (last, before, NULL);
4237 break;
4240 return last;
4243 /* Make an insn of code BARRIER
4244 and output it before the insn BEFORE. */
4247 emit_barrier_before (rtx before)
4249 rtx insn = rtx_alloc (BARRIER);
4251 INSN_UID (insn) = cur_insn_uid++;
4253 add_insn_before (insn, before, NULL);
4254 return insn;
4257 /* Emit the label LABEL before the insn BEFORE. */
4260 emit_label_before (rtx label, rtx before)
4262 /* This can be called twice for the same label as a result of the
4263 confusion that follows a syntax error! So make it harmless. */
4264 if (INSN_UID (label) == 0)
4266 INSN_UID (label) = cur_insn_uid++;
4267 add_insn_before (label, before, NULL);
4270 return label;
4273 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4276 emit_note_before (enum insn_note subtype, rtx before)
4278 rtx note = rtx_alloc (NOTE);
4279 INSN_UID (note) = cur_insn_uid++;
4280 NOTE_KIND (note) = subtype;
4281 BLOCK_FOR_INSN (note) = NULL;
4282 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4284 add_insn_before (note, before, NULL);
4285 return note;
4288 /* Helper for emit_insn_after, handles lists of instructions
4289 efficiently. */
4291 static rtx
4292 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4294 rtx last;
4295 rtx after_after;
4296 if (!bb && !BARRIER_P (after))
4297 bb = BLOCK_FOR_INSN (after);
4299 if (bb)
4301 df_set_bb_dirty (bb);
4302 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4303 if (!BARRIER_P (last))
4305 set_block_for_insn (last, bb);
4306 df_insn_rescan (last);
4308 if (!BARRIER_P (last))
4310 set_block_for_insn (last, bb);
4311 df_insn_rescan (last);
4313 if (BB_END (bb) == after)
4314 BB_END (bb) = last;
4316 else
4317 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4318 continue;
4320 after_after = NEXT_INSN (after);
4322 NEXT_INSN (after) = first;
4323 PREV_INSN (first) = after;
4324 NEXT_INSN (last) = after_after;
4325 if (after_after)
4326 PREV_INSN (after_after) = last;
4328 if (after == get_last_insn())
4329 set_last_insn (last);
4331 return last;
4334 /* Make X be output after the insn AFTER and set the BB of insn. If
4335 BB is NULL, an attempt is made to infer the BB from AFTER. */
4338 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4340 rtx last = after;
4342 gcc_assert (after);
4344 if (x == NULL_RTX)
4345 return last;
4347 switch (GET_CODE (x))
4349 case DEBUG_INSN:
4350 case INSN:
4351 case JUMP_INSN:
4352 case CALL_INSN:
4353 case CODE_LABEL:
4354 case BARRIER:
4355 case NOTE:
4356 last = emit_insn_after_1 (x, after, bb);
4357 break;
4359 #ifdef ENABLE_RTL_CHECKING
4360 case SEQUENCE:
4361 gcc_unreachable ();
4362 break;
4363 #endif
4365 default:
4366 last = make_insn_raw (x);
4367 add_insn_after (last, after, bb);
4368 break;
4371 return last;
4375 /* Make an insn of code JUMP_INSN with body X
4376 and output it after the insn AFTER. */
4379 emit_jump_insn_after_noloc (rtx x, rtx after)
4381 rtx last;
4383 gcc_assert (after);
4385 switch (GET_CODE (x))
4387 case DEBUG_INSN:
4388 case INSN:
4389 case JUMP_INSN:
4390 case CALL_INSN:
4391 case CODE_LABEL:
4392 case BARRIER:
4393 case NOTE:
4394 last = emit_insn_after_1 (x, after, NULL);
4395 break;
4397 #ifdef ENABLE_RTL_CHECKING
4398 case SEQUENCE:
4399 gcc_unreachable ();
4400 break;
4401 #endif
4403 default:
4404 last = make_jump_insn_raw (x);
4405 add_insn_after (last, after, NULL);
4406 break;
4409 return last;
4412 /* Make an instruction with body X and code CALL_INSN
4413 and output it after the instruction AFTER. */
4416 emit_call_insn_after_noloc (rtx x, rtx after)
4418 rtx last;
4420 gcc_assert (after);
4422 switch (GET_CODE (x))
4424 case DEBUG_INSN:
4425 case INSN:
4426 case JUMP_INSN:
4427 case CALL_INSN:
4428 case CODE_LABEL:
4429 case BARRIER:
4430 case NOTE:
4431 last = emit_insn_after_1 (x, after, NULL);
4432 break;
4434 #ifdef ENABLE_RTL_CHECKING
4435 case SEQUENCE:
4436 gcc_unreachable ();
4437 break;
4438 #endif
4440 default:
4441 last = make_call_insn_raw (x);
4442 add_insn_after (last, after, NULL);
4443 break;
4446 return last;
4449 /* Make an instruction with body X and code CALL_INSN
4450 and output it after the instruction AFTER. */
4453 emit_debug_insn_after_noloc (rtx x, rtx after)
4455 rtx last;
4457 gcc_assert (after);
4459 switch (GET_CODE (x))
4461 case DEBUG_INSN:
4462 case INSN:
4463 case JUMP_INSN:
4464 case CALL_INSN:
4465 case CODE_LABEL:
4466 case BARRIER:
4467 case NOTE:
4468 last = emit_insn_after_1 (x, after, NULL);
4469 break;
4471 #ifdef ENABLE_RTL_CHECKING
4472 case SEQUENCE:
4473 gcc_unreachable ();
4474 break;
4475 #endif
4477 default:
4478 last = make_debug_insn_raw (x);
4479 add_insn_after (last, after, NULL);
4480 break;
4483 return last;
4486 /* Make an insn of code BARRIER
4487 and output it after the insn AFTER. */
4490 emit_barrier_after (rtx after)
4492 rtx insn = rtx_alloc (BARRIER);
4494 INSN_UID (insn) = cur_insn_uid++;
4496 add_insn_after (insn, after, NULL);
4497 return insn;
4500 /* Emit the label LABEL after the insn AFTER. */
4503 emit_label_after (rtx label, rtx after)
4505 /* This can be called twice for the same label
4506 as a result of the confusion that follows a syntax error!
4507 So make it harmless. */
4508 if (INSN_UID (label) == 0)
4510 INSN_UID (label) = cur_insn_uid++;
4511 add_insn_after (label, after, NULL);
4514 return label;
4517 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4520 emit_note_after (enum insn_note subtype, rtx after)
4522 rtx note = rtx_alloc (NOTE);
4523 INSN_UID (note) = cur_insn_uid++;
4524 NOTE_KIND (note) = subtype;
4525 BLOCK_FOR_INSN (note) = NULL;
4526 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4527 add_insn_after (note, after, NULL);
4528 return note;
4531 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4533 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4535 rtx last = emit_insn_after_noloc (pattern, after, NULL);
4537 if (pattern == NULL_RTX || !loc)
4538 return last;
4540 after = NEXT_INSN (after);
4541 while (1)
4543 if (active_insn_p (after) && !INSN_LOCATOR (after))
4544 INSN_LOCATOR (after) = loc;
4545 if (after == last)
4546 break;
4547 after = NEXT_INSN (after);
4549 return last;
4552 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4554 emit_insn_after (rtx pattern, rtx after)
4556 rtx prev = after;
4558 while (DEBUG_INSN_P (prev))
4559 prev = PREV_INSN (prev);
4561 if (INSN_P (prev))
4562 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4563 else
4564 return emit_insn_after_noloc (pattern, after, NULL);
4567 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4569 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4571 rtx last = emit_jump_insn_after_noloc (pattern, after);
4573 if (pattern == NULL_RTX || !loc)
4574 return last;
4576 after = NEXT_INSN (after);
4577 while (1)
4579 if (active_insn_p (after) && !INSN_LOCATOR (after))
4580 INSN_LOCATOR (after) = loc;
4581 if (after == last)
4582 break;
4583 after = NEXT_INSN (after);
4585 return last;
4588 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4590 emit_jump_insn_after (rtx pattern, rtx after)
4592 rtx prev = after;
4594 while (DEBUG_INSN_P (prev))
4595 prev = PREV_INSN (prev);
4597 if (INSN_P (prev))
4598 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4599 else
4600 return emit_jump_insn_after_noloc (pattern, after);
4603 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4605 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4607 rtx last = emit_call_insn_after_noloc (pattern, after);
4609 if (pattern == NULL_RTX || !loc)
4610 return last;
4612 after = NEXT_INSN (after);
4613 while (1)
4615 if (active_insn_p (after) && !INSN_LOCATOR (after))
4616 INSN_LOCATOR (after) = loc;
4617 if (after == last)
4618 break;
4619 after = NEXT_INSN (after);
4621 return last;
4624 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4626 emit_call_insn_after (rtx pattern, rtx after)
4628 rtx prev = after;
4630 while (DEBUG_INSN_P (prev))
4631 prev = PREV_INSN (prev);
4633 if (INSN_P (prev))
4634 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4635 else
4636 return emit_call_insn_after_noloc (pattern, after);
4639 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4641 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4643 rtx last = emit_debug_insn_after_noloc (pattern, after);
4645 if (pattern == NULL_RTX || !loc)
4646 return last;
4648 after = NEXT_INSN (after);
4649 while (1)
4651 if (active_insn_p (after) && !INSN_LOCATOR (after))
4652 INSN_LOCATOR (after) = loc;
4653 if (after == last)
4654 break;
4655 after = NEXT_INSN (after);
4657 return last;
4660 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4662 emit_debug_insn_after (rtx pattern, rtx after)
4664 if (INSN_P (after))
4665 return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4666 else
4667 return emit_debug_insn_after_noloc (pattern, after);
4670 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4672 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4674 rtx first = PREV_INSN (before);
4675 rtx last = emit_insn_before_noloc (pattern, before, NULL);
4677 if (pattern == NULL_RTX || !loc)
4678 return last;
4680 if (!first)
4681 first = get_insns ();
4682 else
4683 first = NEXT_INSN (first);
4684 while (1)
4686 if (active_insn_p (first) && !INSN_LOCATOR (first))
4687 INSN_LOCATOR (first) = loc;
4688 if (first == last)
4689 break;
4690 first = NEXT_INSN (first);
4692 return last;
4695 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4697 emit_insn_before (rtx pattern, rtx before)
4699 rtx next = before;
4701 while (DEBUG_INSN_P (next))
4702 next = PREV_INSN (next);
4704 if (INSN_P (next))
4705 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4706 else
4707 return emit_insn_before_noloc (pattern, before, NULL);
4710 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4712 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4714 rtx first = PREV_INSN (before);
4715 rtx last = emit_jump_insn_before_noloc (pattern, before);
4717 if (pattern == NULL_RTX)
4718 return last;
4720 first = NEXT_INSN (first);
4721 while (1)
4723 if (active_insn_p (first) && !INSN_LOCATOR (first))
4724 INSN_LOCATOR (first) = loc;
4725 if (first == last)
4726 break;
4727 first = NEXT_INSN (first);
4729 return last;
4732 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4734 emit_jump_insn_before (rtx pattern, rtx before)
4736 rtx next = before;
4738 while (DEBUG_INSN_P (next))
4739 next = PREV_INSN (next);
4741 if (INSN_P (next))
4742 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4743 else
4744 return emit_jump_insn_before_noloc (pattern, before);
4747 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4749 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4751 rtx first = PREV_INSN (before);
4752 rtx last = emit_call_insn_before_noloc (pattern, before);
4754 if (pattern == NULL_RTX)
4755 return last;
4757 first = NEXT_INSN (first);
4758 while (1)
4760 if (active_insn_p (first) && !INSN_LOCATOR (first))
4761 INSN_LOCATOR (first) = loc;
4762 if (first == last)
4763 break;
4764 first = NEXT_INSN (first);
4766 return last;
4769 /* like emit_call_insn_before_noloc,
4770 but set insn_locator according to before. */
4772 emit_call_insn_before (rtx pattern, rtx before)
4774 rtx next = before;
4776 while (DEBUG_INSN_P (next))
4777 next = PREV_INSN (next);
4779 if (INSN_P (next))
4780 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4781 else
4782 return emit_call_insn_before_noloc (pattern, before);
4785 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4787 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4789 rtx first = PREV_INSN (before);
4790 rtx last = emit_debug_insn_before_noloc (pattern, before);
4792 if (pattern == NULL_RTX)
4793 return last;
4795 first = NEXT_INSN (first);
4796 while (1)
4798 if (active_insn_p (first) && !INSN_LOCATOR (first))
4799 INSN_LOCATOR (first) = loc;
4800 if (first == last)
4801 break;
4802 first = NEXT_INSN (first);
4804 return last;
4807 /* like emit_debug_insn_before_noloc,
4808 but set insn_locator according to before. */
4810 emit_debug_insn_before (rtx pattern, rtx before)
4812 if (INSN_P (before))
4813 return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4814 else
4815 return emit_debug_insn_before_noloc (pattern, before);
4818 /* Take X and emit it at the end of the doubly-linked
4819 INSN list.
4821 Returns the last insn emitted. */
4824 emit_insn (rtx x)
4826 rtx last = get_last_insn();
4827 rtx insn;
4829 if (x == NULL_RTX)
4830 return last;
4832 switch (GET_CODE (x))
4834 case DEBUG_INSN:
4835 case INSN:
4836 case JUMP_INSN:
4837 case CALL_INSN:
4838 case CODE_LABEL:
4839 case BARRIER:
4840 case NOTE:
4841 insn = x;
4842 while (insn)
4844 rtx next = NEXT_INSN (insn);
4845 add_insn (insn);
4846 last = insn;
4847 insn = next;
4849 break;
4851 #ifdef ENABLE_RTL_CHECKING
4852 case SEQUENCE:
4853 gcc_unreachable ();
4854 break;
4855 #endif
4857 default:
4858 last = make_insn_raw (x);
4859 add_insn (last);
4860 break;
4863 return last;
4866 /* Make an insn of code DEBUG_INSN with pattern X
4867 and add it to the end of the doubly-linked list. */
4870 emit_debug_insn (rtx x)
4872 rtx last = get_last_insn();
4873 rtx insn;
4875 if (x == NULL_RTX)
4876 return last;
4878 switch (GET_CODE (x))
4880 case DEBUG_INSN:
4881 case INSN:
4882 case JUMP_INSN:
4883 case CALL_INSN:
4884 case CODE_LABEL:
4885 case BARRIER:
4886 case NOTE:
4887 insn = x;
4888 while (insn)
4890 rtx next = NEXT_INSN (insn);
4891 add_insn (insn);
4892 last = insn;
4893 insn = next;
4895 break;
4897 #ifdef ENABLE_RTL_CHECKING
4898 case SEQUENCE:
4899 gcc_unreachable ();
4900 break;
4901 #endif
4903 default:
4904 last = make_debug_insn_raw (x);
4905 add_insn (last);
4906 break;
4909 return last;
4912 /* Make an insn of code JUMP_INSN with pattern X
4913 and add it to the end of the doubly-linked list. */
4916 emit_jump_insn (rtx x)
4918 rtx last = NULL_RTX, insn;
4920 switch (GET_CODE (x))
4922 case DEBUG_INSN:
4923 case INSN:
4924 case JUMP_INSN:
4925 case CALL_INSN:
4926 case CODE_LABEL:
4927 case BARRIER:
4928 case NOTE:
4929 insn = x;
4930 while (insn)
4932 rtx next = NEXT_INSN (insn);
4933 add_insn (insn);
4934 last = insn;
4935 insn = next;
4937 break;
4939 #ifdef ENABLE_RTL_CHECKING
4940 case SEQUENCE:
4941 gcc_unreachable ();
4942 break;
4943 #endif
4945 default:
4946 last = make_jump_insn_raw (x);
4947 add_insn (last);
4948 break;
4951 return last;
4954 /* Make an insn of code CALL_INSN with pattern X
4955 and add it to the end of the doubly-linked list. */
4958 emit_call_insn (rtx x)
4960 rtx insn;
4962 switch (GET_CODE (x))
4964 case DEBUG_INSN:
4965 case INSN:
4966 case JUMP_INSN:
4967 case CALL_INSN:
4968 case CODE_LABEL:
4969 case BARRIER:
4970 case NOTE:
4971 insn = emit_insn (x);
4972 break;
4974 #ifdef ENABLE_RTL_CHECKING
4975 case SEQUENCE:
4976 gcc_unreachable ();
4977 break;
4978 #endif
4980 default:
4981 insn = make_call_insn_raw (x);
4982 add_insn (insn);
4983 break;
4986 return insn;
4989 /* Add the label LABEL to the end of the doubly-linked list. */
4992 emit_label (rtx label)
4994 /* This can be called twice for the same label
4995 as a result of the confusion that follows a syntax error!
4996 So make it harmless. */
4997 if (INSN_UID (label) == 0)
4999 INSN_UID (label) = cur_insn_uid++;
5000 add_insn (label);
5002 return label;
5005 /* Make an insn of code BARRIER
5006 and add it to the end of the doubly-linked list. */
5009 emit_barrier (void)
5011 rtx barrier = rtx_alloc (BARRIER);
5012 INSN_UID (barrier) = cur_insn_uid++;
5013 add_insn (barrier);
5014 return barrier;
5017 /* Emit a copy of note ORIG. */
5020 emit_note_copy (rtx orig)
5022 rtx note;
5024 note = rtx_alloc (NOTE);
5026 INSN_UID (note) = cur_insn_uid++;
5027 NOTE_DATA (note) = NOTE_DATA (orig);
5028 NOTE_KIND (note) = NOTE_KIND (orig);
5029 BLOCK_FOR_INSN (note) = NULL;
5030 add_insn (note);
5032 return note;
5035 /* Make an insn of code NOTE or type NOTE_NO
5036 and add it to the end of the doubly-linked list. */
5039 emit_note (enum insn_note kind)
5041 rtx note;
5043 note = rtx_alloc (NOTE);
5044 INSN_UID (note) = cur_insn_uid++;
5045 NOTE_KIND (note) = kind;
5046 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
5047 BLOCK_FOR_INSN (note) = NULL;
5048 add_insn (note);
5049 return note;
5052 /* Emit a clobber of lvalue X. */
5055 emit_clobber (rtx x)
5057 /* CONCATs should not appear in the insn stream. */
5058 if (GET_CODE (x) == CONCAT)
5060 emit_clobber (XEXP (x, 0));
5061 return emit_clobber (XEXP (x, 1));
5063 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5066 /* Return a sequence of insns to clobber lvalue X. */
5069 gen_clobber (rtx x)
5071 rtx seq;
5073 start_sequence ();
5074 emit_clobber (x);
5075 seq = get_insns ();
5076 end_sequence ();
5077 return seq;
5080 /* Emit a use of rvalue X. */
5083 emit_use (rtx x)
5085 /* CONCATs should not appear in the insn stream. */
5086 if (GET_CODE (x) == CONCAT)
5088 emit_use (XEXP (x, 0));
5089 return emit_use (XEXP (x, 1));
5091 return emit_insn (gen_rtx_USE (VOIDmode, x));
5094 /* Return a sequence of insns to use rvalue X. */
5097 gen_use (rtx x)
5099 rtx seq;
5101 start_sequence ();
5102 emit_use (x);
5103 seq = get_insns ();
5104 end_sequence ();
5105 return seq;
5108 /* Cause next statement to emit a line note even if the line number
5109 has not changed. */
5111 void
5112 force_next_line_note (void)
5114 last_location = -1;
5117 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5118 note of this type already exists, remove it first. */
5121 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5123 rtx note = find_reg_note (insn, kind, NULL_RTX);
5125 switch (kind)
5127 case REG_EQUAL:
5128 case REG_EQUIV:
5129 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5130 has multiple sets (some callers assume single_set
5131 means the insn only has one set, when in fact it
5132 means the insn only has one * useful * set). */
5133 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5135 gcc_assert (!note);
5136 return NULL_RTX;
5139 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5140 It serves no useful purpose and breaks eliminate_regs. */
5141 if (GET_CODE (datum) == ASM_OPERANDS)
5142 return NULL_RTX;
5144 if (note)
5146 XEXP (note, 0) = datum;
5147 df_notes_rescan (insn);
5148 return note;
5150 break;
5152 default:
5153 if (note)
5155 XEXP (note, 0) = datum;
5156 return note;
5158 break;
5161 add_reg_note (insn, kind, datum);
5163 switch (kind)
5165 case REG_EQUAL:
5166 case REG_EQUIV:
5167 df_notes_rescan (insn);
5168 break;
5169 default:
5170 break;
5173 return REG_NOTES (insn);
5176 /* Return an indication of which type of insn should have X as a body.
5177 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5179 static enum rtx_code
5180 classify_insn (rtx x)
5182 if (LABEL_P (x))
5183 return CODE_LABEL;
5184 if (GET_CODE (x) == CALL)
5185 return CALL_INSN;
5186 if (GET_CODE (x) == RETURN)
5187 return JUMP_INSN;
5188 if (GET_CODE (x) == SET)
5190 if (SET_DEST (x) == pc_rtx)
5191 return JUMP_INSN;
5192 else if (GET_CODE (SET_SRC (x)) == CALL)
5193 return CALL_INSN;
5194 else
5195 return INSN;
5197 if (GET_CODE (x) == PARALLEL)
5199 int j;
5200 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5201 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5202 return CALL_INSN;
5203 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5204 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5205 return JUMP_INSN;
5206 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5207 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5208 return CALL_INSN;
5210 return INSN;
5213 /* Emit the rtl pattern X as an appropriate kind of insn.
5214 If X is a label, it is simply added into the insn chain. */
5217 emit (rtx x)
5219 enum rtx_code code = classify_insn (x);
5221 switch (code)
5223 case CODE_LABEL:
5224 return emit_label (x);
5225 case INSN:
5226 return emit_insn (x);
5227 case JUMP_INSN:
5229 rtx insn = emit_jump_insn (x);
5230 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5231 return emit_barrier ();
5232 return insn;
5234 case CALL_INSN:
5235 return emit_call_insn (x);
5236 case DEBUG_INSN:
5237 return emit_debug_insn (x);
5238 default:
5239 gcc_unreachable ();
5243 /* Space for free sequence stack entries. */
5244 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5246 /* Begin emitting insns to a sequence. If this sequence will contain
5247 something that might cause the compiler to pop arguments to function
5248 calls (because those pops have previously been deferred; see
5249 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5250 before calling this function. That will ensure that the deferred
5251 pops are not accidentally emitted in the middle of this sequence. */
5253 void
5254 start_sequence (void)
5256 struct sequence_stack *tem;
5258 if (free_sequence_stack != NULL)
5260 tem = free_sequence_stack;
5261 free_sequence_stack = tem->next;
5263 else
5264 tem = ggc_alloc_sequence_stack ();
5266 tem->next = seq_stack;
5267 tem->first = get_insns ();
5268 tem->last = get_last_insn ();
5270 seq_stack = tem;
5272 set_first_insn (0);
5273 set_last_insn (0);
5276 /* Set up the insn chain starting with FIRST as the current sequence,
5277 saving the previously current one. See the documentation for
5278 start_sequence for more information about how to use this function. */
5280 void
5281 push_to_sequence (rtx first)
5283 rtx last;
5285 start_sequence ();
5287 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5289 set_first_insn (first);
5290 set_last_insn (last);
5293 /* Like push_to_sequence, but take the last insn as an argument to avoid
5294 looping through the list. */
5296 void
5297 push_to_sequence2 (rtx first, rtx last)
5299 start_sequence ();
5301 set_first_insn (first);
5302 set_last_insn (last);
5305 /* Set up the outer-level insn chain
5306 as the current sequence, saving the previously current one. */
5308 void
5309 push_topmost_sequence (void)
5311 struct sequence_stack *stack, *top = NULL;
5313 start_sequence ();
5315 for (stack = seq_stack; stack; stack = stack->next)
5316 top = stack;
5318 set_first_insn (top->first);
5319 set_last_insn (top->last);
5322 /* After emitting to the outer-level insn chain, update the outer-level
5323 insn chain, and restore the previous saved state. */
5325 void
5326 pop_topmost_sequence (void)
5328 struct sequence_stack *stack, *top = NULL;
5330 for (stack = seq_stack; stack; stack = stack->next)
5331 top = stack;
5333 top->first = get_insns ();
5334 top->last = get_last_insn ();
5336 end_sequence ();
5339 /* After emitting to a sequence, restore previous saved state.
5341 To get the contents of the sequence just made, you must call
5342 `get_insns' *before* calling here.
5344 If the compiler might have deferred popping arguments while
5345 generating this sequence, and this sequence will not be immediately
5346 inserted into the instruction stream, use do_pending_stack_adjust
5347 before calling get_insns. That will ensure that the deferred
5348 pops are inserted into this sequence, and not into some random
5349 location in the instruction stream. See INHIBIT_DEFER_POP for more
5350 information about deferred popping of arguments. */
5352 void
5353 end_sequence (void)
5355 struct sequence_stack *tem = seq_stack;
5357 set_first_insn (tem->first);
5358 set_last_insn (tem->last);
5359 seq_stack = tem->next;
5361 memset (tem, 0, sizeof (*tem));
5362 tem->next = free_sequence_stack;
5363 free_sequence_stack = tem;
5366 /* Return 1 if currently emitting into a sequence. */
5369 in_sequence_p (void)
5371 return seq_stack != 0;
5374 /* Put the various virtual registers into REGNO_REG_RTX. */
5376 static void
5377 init_virtual_regs (void)
5379 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5380 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5381 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5382 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5383 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5387 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5388 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5389 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5390 static int copy_insn_n_scratches;
5392 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5393 copied an ASM_OPERANDS.
5394 In that case, it is the original input-operand vector. */
5395 static rtvec orig_asm_operands_vector;
5397 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5398 copied an ASM_OPERANDS.
5399 In that case, it is the copied input-operand vector. */
5400 static rtvec copy_asm_operands_vector;
5402 /* Likewise for the constraints vector. */
5403 static rtvec orig_asm_constraints_vector;
5404 static rtvec copy_asm_constraints_vector;
5406 /* Recursively create a new copy of an rtx for copy_insn.
5407 This function differs from copy_rtx in that it handles SCRATCHes and
5408 ASM_OPERANDs properly.
5409 Normally, this function is not used directly; use copy_insn as front end.
5410 However, you could first copy an insn pattern with copy_insn and then use
5411 this function afterwards to properly copy any REG_NOTEs containing
5412 SCRATCHes. */
5415 copy_insn_1 (rtx orig)
5417 rtx copy;
5418 int i, j;
5419 RTX_CODE code;
5420 const char *format_ptr;
5422 if (orig == NULL)
5423 return NULL;
5425 code = GET_CODE (orig);
5427 switch (code)
5429 case REG:
5430 case CONST_INT:
5431 case CONST_DOUBLE:
5432 case CONST_FIXED:
5433 case CONST_VECTOR:
5434 case SYMBOL_REF:
5435 case CODE_LABEL:
5436 case PC:
5437 case CC0:
5438 return orig;
5439 case CLOBBER:
5440 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5441 return orig;
5442 break;
5444 case SCRATCH:
5445 for (i = 0; i < copy_insn_n_scratches; i++)
5446 if (copy_insn_scratch_in[i] == orig)
5447 return copy_insn_scratch_out[i];
5448 break;
5450 case CONST:
5451 if (shared_const_p (orig))
5452 return orig;
5453 break;
5455 /* A MEM with a constant address is not sharable. The problem is that
5456 the constant address may need to be reloaded. If the mem is shared,
5457 then reloading one copy of this mem will cause all copies to appear
5458 to have been reloaded. */
5460 default:
5461 break;
5464 /* Copy the various flags, fields, and other information. We assume
5465 that all fields need copying, and then clear the fields that should
5466 not be copied. That is the sensible default behavior, and forces
5467 us to explicitly document why we are *not* copying a flag. */
5468 copy = shallow_copy_rtx (orig);
5470 /* We do not copy the USED flag, which is used as a mark bit during
5471 walks over the RTL. */
5472 RTX_FLAG (copy, used) = 0;
5474 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5475 if (INSN_P (orig))
5477 RTX_FLAG (copy, jump) = 0;
5478 RTX_FLAG (copy, call) = 0;
5479 RTX_FLAG (copy, frame_related) = 0;
5482 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5484 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5485 switch (*format_ptr++)
5487 case 'e':
5488 if (XEXP (orig, i) != NULL)
5489 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5490 break;
5492 case 'E':
5493 case 'V':
5494 if (XVEC (orig, i) == orig_asm_constraints_vector)
5495 XVEC (copy, i) = copy_asm_constraints_vector;
5496 else if (XVEC (orig, i) == orig_asm_operands_vector)
5497 XVEC (copy, i) = copy_asm_operands_vector;
5498 else if (XVEC (orig, i) != NULL)
5500 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5501 for (j = 0; j < XVECLEN (copy, i); j++)
5502 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5504 break;
5506 case 't':
5507 case 'w':
5508 case 'i':
5509 case 's':
5510 case 'S':
5511 case 'u':
5512 case '0':
5513 /* These are left unchanged. */
5514 break;
5516 default:
5517 gcc_unreachable ();
5520 if (code == SCRATCH)
5522 i = copy_insn_n_scratches++;
5523 gcc_assert (i < MAX_RECOG_OPERANDS);
5524 copy_insn_scratch_in[i] = orig;
5525 copy_insn_scratch_out[i] = copy;
5527 else if (code == ASM_OPERANDS)
5529 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5530 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5531 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5532 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5535 return copy;
5538 /* Create a new copy of an rtx.
5539 This function differs from copy_rtx in that it handles SCRATCHes and
5540 ASM_OPERANDs properly.
5541 INSN doesn't really have to be a full INSN; it could be just the
5542 pattern. */
5544 copy_insn (rtx insn)
5546 copy_insn_n_scratches = 0;
5547 orig_asm_operands_vector = 0;
5548 orig_asm_constraints_vector = 0;
5549 copy_asm_operands_vector = 0;
5550 copy_asm_constraints_vector = 0;
5551 return copy_insn_1 (insn);
5554 /* Initialize data structures and variables in this file
5555 before generating rtl for each function. */
5557 void
5558 init_emit (void)
5560 set_first_insn (NULL);
5561 set_last_insn (NULL);
5562 if (MIN_NONDEBUG_INSN_UID)
5563 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5564 else
5565 cur_insn_uid = 1;
5566 cur_debug_insn_uid = 1;
5567 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5568 last_location = UNKNOWN_LOCATION;
5569 first_label_num = label_num;
5570 seq_stack = NULL;
5572 /* Init the tables that describe all the pseudo regs. */
5574 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5576 crtl->emit.regno_pointer_align
5577 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5579 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5581 /* Put copies of all the hard registers into regno_reg_rtx. */
5582 memcpy (regno_reg_rtx,
5583 initial_regno_reg_rtx,
5584 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5586 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5587 init_virtual_regs ();
5589 /* Indicate that the virtual registers and stack locations are
5590 all pointers. */
5591 REG_POINTER (stack_pointer_rtx) = 1;
5592 REG_POINTER (frame_pointer_rtx) = 1;
5593 REG_POINTER (hard_frame_pointer_rtx) = 1;
5594 REG_POINTER (arg_pointer_rtx) = 1;
5596 REG_POINTER (virtual_incoming_args_rtx) = 1;
5597 REG_POINTER (virtual_stack_vars_rtx) = 1;
5598 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5599 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5600 REG_POINTER (virtual_cfa_rtx) = 1;
5602 #ifdef STACK_BOUNDARY
5603 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5604 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5605 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5606 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5608 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5609 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5610 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5611 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5612 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5613 #endif
5615 #ifdef INIT_EXPANDERS
5616 INIT_EXPANDERS;
5617 #endif
5620 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5622 static rtx
5623 gen_const_vector (enum machine_mode mode, int constant)
5625 rtx tem;
5626 rtvec v;
5627 int units, i;
5628 enum machine_mode inner;
5630 units = GET_MODE_NUNITS (mode);
5631 inner = GET_MODE_INNER (mode);
5633 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5635 v = rtvec_alloc (units);
5637 /* We need to call this function after we set the scalar const_tiny_rtx
5638 entries. */
5639 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5641 for (i = 0; i < units; ++i)
5642 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5644 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5645 return tem;
5648 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5649 all elements are zero, and the one vector when all elements are one. */
5651 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5653 enum machine_mode inner = GET_MODE_INNER (mode);
5654 int nunits = GET_MODE_NUNITS (mode);
5655 rtx x;
5656 int i;
5658 /* Check to see if all of the elements have the same value. */
5659 x = RTVEC_ELT (v, nunits - 1);
5660 for (i = nunits - 2; i >= 0; i--)
5661 if (RTVEC_ELT (v, i) != x)
5662 break;
5664 /* If the values are all the same, check to see if we can use one of the
5665 standard constant vectors. */
5666 if (i == -1)
5668 if (x == CONST0_RTX (inner))
5669 return CONST0_RTX (mode);
5670 else if (x == CONST1_RTX (inner))
5671 return CONST1_RTX (mode);
5674 return gen_rtx_raw_CONST_VECTOR (mode, v);
5677 /* Initialise global register information required by all functions. */
5679 void
5680 init_emit_regs (void)
5682 int i;
5684 /* Reset register attributes */
5685 htab_empty (reg_attrs_htab);
5687 /* We need reg_raw_mode, so initialize the modes now. */
5688 init_reg_modes_target ();
5690 /* Assign register numbers to the globally defined register rtx. */
5691 pc_rtx = gen_rtx_PC (VOIDmode);
5692 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5693 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5694 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5695 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5696 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5697 virtual_incoming_args_rtx =
5698 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5699 virtual_stack_vars_rtx =
5700 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5701 virtual_stack_dynamic_rtx =
5702 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5703 virtual_outgoing_args_rtx =
5704 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5705 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5707 /* Initialize RTL for commonly used hard registers. These are
5708 copied into regno_reg_rtx as we begin to compile each function. */
5709 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5710 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5712 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5713 return_address_pointer_rtx
5714 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5715 #endif
5717 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5718 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5719 else
5720 pic_offset_table_rtx = NULL_RTX;
5723 /* Create some permanent unique rtl objects shared between all functions. */
5725 void
5726 init_emit_once (void)
5728 int i;
5729 enum machine_mode mode;
5730 enum machine_mode double_mode;
5732 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5733 hash tables. */
5734 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5735 const_int_htab_eq, NULL);
5737 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5738 const_double_htab_eq, NULL);
5740 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5741 const_fixed_htab_eq, NULL);
5743 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5744 mem_attrs_htab_eq, NULL);
5745 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5746 reg_attrs_htab_eq, NULL);
5748 /* Compute the word and byte modes. */
5750 byte_mode = VOIDmode;
5751 word_mode = VOIDmode;
5752 double_mode = VOIDmode;
5754 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5755 mode != VOIDmode;
5756 mode = GET_MODE_WIDER_MODE (mode))
5758 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5759 && byte_mode == VOIDmode)
5760 byte_mode = mode;
5762 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5763 && word_mode == VOIDmode)
5764 word_mode = mode;
5767 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5768 mode != VOIDmode;
5769 mode = GET_MODE_WIDER_MODE (mode))
5771 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5772 && double_mode == VOIDmode)
5773 double_mode = mode;
5776 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5778 #ifdef INIT_EXPANDERS
5779 /* This is to initialize {init|mark|free}_machine_status before the first
5780 call to push_function_context_to. This is needed by the Chill front
5781 end which calls push_function_context_to before the first call to
5782 init_function_start. */
5783 INIT_EXPANDERS;
5784 #endif
5786 /* Create the unique rtx's for certain rtx codes and operand values. */
5788 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5789 tries to use these variables. */
5790 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5791 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5792 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5794 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5795 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5796 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5797 else
5798 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5800 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5801 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5802 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5804 dconstm1 = dconst1;
5805 dconstm1.sign = 1;
5807 dconsthalf = dconst1;
5808 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5810 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5812 const REAL_VALUE_TYPE *const r =
5813 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5815 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5816 mode != VOIDmode;
5817 mode = GET_MODE_WIDER_MODE (mode))
5818 const_tiny_rtx[i][(int) mode] =
5819 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5821 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5822 mode != VOIDmode;
5823 mode = GET_MODE_WIDER_MODE (mode))
5824 const_tiny_rtx[i][(int) mode] =
5825 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5827 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5829 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5830 mode != VOIDmode;
5831 mode = GET_MODE_WIDER_MODE (mode))
5832 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5834 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5835 mode != VOIDmode;
5836 mode = GET_MODE_WIDER_MODE (mode))
5837 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5840 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5841 mode != VOIDmode;
5842 mode = GET_MODE_WIDER_MODE (mode))
5844 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5845 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5848 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5849 mode != VOIDmode;
5850 mode = GET_MODE_WIDER_MODE (mode))
5852 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5853 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5856 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5857 mode != VOIDmode;
5858 mode = GET_MODE_WIDER_MODE (mode))
5860 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5861 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5864 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5865 mode != VOIDmode;
5866 mode = GET_MODE_WIDER_MODE (mode))
5868 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5869 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5872 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5873 mode != VOIDmode;
5874 mode = GET_MODE_WIDER_MODE (mode))
5876 FCONST0(mode).data.high = 0;
5877 FCONST0(mode).data.low = 0;
5878 FCONST0(mode).mode = mode;
5879 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5880 FCONST0 (mode), mode);
5883 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5884 mode != VOIDmode;
5885 mode = GET_MODE_WIDER_MODE (mode))
5887 FCONST0(mode).data.high = 0;
5888 FCONST0(mode).data.low = 0;
5889 FCONST0(mode).mode = mode;
5890 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5891 FCONST0 (mode), mode);
5894 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5895 mode != VOIDmode;
5896 mode = GET_MODE_WIDER_MODE (mode))
5898 FCONST0(mode).data.high = 0;
5899 FCONST0(mode).data.low = 0;
5900 FCONST0(mode).mode = mode;
5901 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5902 FCONST0 (mode), mode);
5904 /* We store the value 1. */
5905 FCONST1(mode).data.high = 0;
5906 FCONST1(mode).data.low = 0;
5907 FCONST1(mode).mode = mode;
5908 lshift_double (1, 0, GET_MODE_FBIT (mode),
5909 2 * HOST_BITS_PER_WIDE_INT,
5910 &FCONST1(mode).data.low,
5911 &FCONST1(mode).data.high,
5912 SIGNED_FIXED_POINT_MODE_P (mode));
5913 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5914 FCONST1 (mode), mode);
5917 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5918 mode != VOIDmode;
5919 mode = GET_MODE_WIDER_MODE (mode))
5921 FCONST0(mode).data.high = 0;
5922 FCONST0(mode).data.low = 0;
5923 FCONST0(mode).mode = mode;
5924 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5925 FCONST0 (mode), mode);
5927 /* We store the value 1. */
5928 FCONST1(mode).data.high = 0;
5929 FCONST1(mode).data.low = 0;
5930 FCONST1(mode).mode = mode;
5931 lshift_double (1, 0, GET_MODE_FBIT (mode),
5932 2 * HOST_BITS_PER_WIDE_INT,
5933 &FCONST1(mode).data.low,
5934 &FCONST1(mode).data.high,
5935 SIGNED_FIXED_POINT_MODE_P (mode));
5936 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5937 FCONST1 (mode), mode);
5940 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5941 mode != VOIDmode;
5942 mode = GET_MODE_WIDER_MODE (mode))
5944 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5947 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5948 mode != VOIDmode;
5949 mode = GET_MODE_WIDER_MODE (mode))
5951 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5954 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5955 mode != VOIDmode;
5956 mode = GET_MODE_WIDER_MODE (mode))
5958 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5959 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5962 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5963 mode != VOIDmode;
5964 mode = GET_MODE_WIDER_MODE (mode))
5966 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5967 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5970 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5971 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5972 const_tiny_rtx[0][i] = const0_rtx;
5974 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5975 if (STORE_FLAG_VALUE == 1)
5976 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5979 /* Produce exact duplicate of insn INSN after AFTER.
5980 Care updating of libcall regions if present. */
5983 emit_copy_of_insn_after (rtx insn, rtx after)
5985 rtx new_rtx, link;
5987 switch (GET_CODE (insn))
5989 case INSN:
5990 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5991 break;
5993 case JUMP_INSN:
5994 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5995 break;
5997 case DEBUG_INSN:
5998 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5999 break;
6001 case CALL_INSN:
6002 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
6003 if (CALL_INSN_FUNCTION_USAGE (insn))
6004 CALL_INSN_FUNCTION_USAGE (new_rtx)
6005 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
6006 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
6007 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
6008 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
6009 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
6010 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
6011 break;
6013 default:
6014 gcc_unreachable ();
6017 /* Update LABEL_NUSES. */
6018 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
6020 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
6022 /* If the old insn is frame related, then so is the new one. This is
6023 primarily needed for IA-64 unwind info which marks epilogue insns,
6024 which may be duplicated by the basic block reordering code. */
6025 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
6027 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
6028 will make them. REG_LABEL_TARGETs are created there too, but are
6029 supposed to be sticky, so we copy them. */
6030 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
6031 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
6033 if (GET_CODE (link) == EXPR_LIST)
6034 add_reg_note (new_rtx, REG_NOTE_KIND (link),
6035 copy_insn_1 (XEXP (link, 0)));
6036 else
6037 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
6040 INSN_CODE (new_rtx) = INSN_CODE (insn);
6041 return new_rtx;
6044 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6046 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6048 if (hard_reg_clobbers[mode][regno])
6049 return hard_reg_clobbers[mode][regno];
6050 else
6051 return (hard_reg_clobbers[mode][regno] =
6052 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6055 #include "gt-emit-rtl.h"