Merge from mainline (167278:168000).
[official-gcc/graphite-test-results.git] / gcc / emit-rtl.c
blob4a5b2908fdd744b0906bf2e1a33c3aa85f06f4d5
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010
5 Free Software Foundation, Inc.
7 This file is part of GCC.
9 GCC is free software; you can redistribute it and/or modify it under
10 the terms of the GNU General Public License as published by the Free
11 Software Foundation; either version 3, or (at your option) any later
12 version.
14 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
15 WARRANTY; without even the implied warranty of MERCHANTABILITY or
16 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 for more details.
19 You should have received a copy of the GNU General Public License
20 along with GCC; see the file COPYING3. If not see
21 <http://www.gnu.org/licenses/>. */
24 /* Middle-to-low level generation of rtx code and insns.
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "diagnostic-core.h"
42 #include "rtl.h"
43 #include "tree.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "expr.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "hashtab.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "bitmap.h"
54 #include "basic-block.h"
55 #include "ggc.h"
56 #include "debug.h"
57 #include "langhooks.h"
58 #include "tree-pass.h"
59 #include "df.h"
60 #include "params.h"
61 #include "target.h"
63 struct target_rtl default_target_rtl;
64 #if SWITCHABLE_TARGET
65 struct target_rtl *this_target_rtl = &default_target_rtl;
66 #endif
68 #define initial_regno_reg_rtx (this_target_rtl->x_initial_regno_reg_rtx)
70 /* Commonly used modes. */
72 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
73 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
74 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
75 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
77 /* Datastructures maintained for currently processed function in RTL form. */
79 struct rtl_data x_rtl;
81 /* Indexed by pseudo register number, gives the rtx for that pseudo.
82 Allocated in parallel with regno_pointer_align.
83 FIXME: We could put it into emit_status struct, but gengtype is not able to deal
84 with length attribute nested in top level structures. */
86 rtx * regno_reg_rtx;
88 /* This is *not* reset after each function. It gives each CODE_LABEL
89 in the entire compilation a unique label number. */
91 static GTY(()) int label_num = 1;
93 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
94 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
95 record a copy of const[012]_rtx. */
97 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
99 rtx const_true_rtx;
101 REAL_VALUE_TYPE dconst0;
102 REAL_VALUE_TYPE dconst1;
103 REAL_VALUE_TYPE dconst2;
104 REAL_VALUE_TYPE dconstm1;
105 REAL_VALUE_TYPE dconsthalf;
107 /* Record fixed-point constant 0 and 1. */
108 FIXED_VALUE_TYPE fconst0[MAX_FCONST0];
109 FIXED_VALUE_TYPE fconst1[MAX_FCONST1];
111 /* We make one copy of (const_int C) where C is in
112 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
113 to save space during the compilation and simplify comparisons of
114 integers. */
116 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
118 /* A hash table storing CONST_INTs whose absolute value is greater
119 than MAX_SAVED_CONST_INT. */
121 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
122 htab_t const_int_htab;
124 /* A hash table storing memory attribute structures. */
125 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
126 htab_t mem_attrs_htab;
128 /* A hash table storing register attribute structures. */
129 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
130 htab_t reg_attrs_htab;
132 /* A hash table storing all CONST_DOUBLEs. */
133 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
134 htab_t const_double_htab;
136 /* A hash table storing all CONST_FIXEDs. */
137 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
138 htab_t const_fixed_htab;
140 #define cur_insn_uid (crtl->emit.x_cur_insn_uid)
141 #define cur_debug_insn_uid (crtl->emit.x_cur_debug_insn_uid)
142 #define last_location (crtl->emit.x_last_location)
143 #define first_label_num (crtl->emit.x_first_label_num)
145 static rtx make_call_insn_raw (rtx);
146 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
147 static void set_used_decls (tree);
148 static void mark_label_nuses (rtx);
149 static hashval_t const_int_htab_hash (const void *);
150 static int const_int_htab_eq (const void *, const void *);
151 static hashval_t const_double_htab_hash (const void *);
152 static int const_double_htab_eq (const void *, const void *);
153 static rtx lookup_const_double (rtx);
154 static hashval_t const_fixed_htab_hash (const void *);
155 static int const_fixed_htab_eq (const void *, const void *);
156 static rtx lookup_const_fixed (rtx);
157 static hashval_t mem_attrs_htab_hash (const void *);
158 static int mem_attrs_htab_eq (const void *, const void *);
159 static mem_attrs *get_mem_attrs (alias_set_type, tree, rtx, rtx, unsigned int,
160 addr_space_t, enum machine_mode);
161 static hashval_t reg_attrs_htab_hash (const void *);
162 static int reg_attrs_htab_eq (const void *, const void *);
163 static reg_attrs *get_reg_attrs (tree, int);
164 static rtx gen_const_vector (enum machine_mode, int);
165 static void copy_rtx_if_shared_1 (rtx *orig);
167 /* Probability of the conditional branch currently proceeded by try_split.
168 Set to -1 otherwise. */
169 int split_branch_probability = -1;
171 /* Returns a hash code for X (which is a really a CONST_INT). */
173 static hashval_t
174 const_int_htab_hash (const void *x)
176 return (hashval_t) INTVAL ((const_rtx) x);
179 /* Returns nonzero if the value represented by X (which is really a
180 CONST_INT) is the same as that given by Y (which is really a
181 HOST_WIDE_INT *). */
183 static int
184 const_int_htab_eq (const void *x, const void *y)
186 return (INTVAL ((const_rtx) x) == *((const HOST_WIDE_INT *) y));
189 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
190 static hashval_t
191 const_double_htab_hash (const void *x)
193 const_rtx const value = (const_rtx) x;
194 hashval_t h;
196 if (GET_MODE (value) == VOIDmode)
197 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
198 else
200 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
201 /* MODE is used in the comparison, so it should be in the hash. */
202 h ^= GET_MODE (value);
204 return h;
207 /* Returns nonzero if the value represented by X (really a ...)
208 is the same as that represented by Y (really a ...) */
209 static int
210 const_double_htab_eq (const void *x, const void *y)
212 const_rtx const a = (const_rtx)x, b = (const_rtx)y;
214 if (GET_MODE (a) != GET_MODE (b))
215 return 0;
216 if (GET_MODE (a) == VOIDmode)
217 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
218 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
219 else
220 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
221 CONST_DOUBLE_REAL_VALUE (b));
224 /* Returns a hash code for X (which is really a CONST_FIXED). */
226 static hashval_t
227 const_fixed_htab_hash (const void *x)
229 const_rtx const value = (const_rtx) x;
230 hashval_t h;
232 h = fixed_hash (CONST_FIXED_VALUE (value));
233 /* MODE is used in the comparison, so it should be in the hash. */
234 h ^= GET_MODE (value);
235 return h;
238 /* Returns nonzero if the value represented by X (really a ...)
239 is the same as that represented by Y (really a ...). */
241 static int
242 const_fixed_htab_eq (const void *x, const void *y)
244 const_rtx const a = (const_rtx) x, b = (const_rtx) y;
246 if (GET_MODE (a) != GET_MODE (b))
247 return 0;
248 return fixed_identical (CONST_FIXED_VALUE (a), CONST_FIXED_VALUE (b));
251 /* Returns a hash code for X (which is a really a mem_attrs *). */
253 static hashval_t
254 mem_attrs_htab_hash (const void *x)
256 const mem_attrs *const p = (const mem_attrs *) x;
258 return (p->alias ^ (p->align * 1000)
259 ^ (p->addrspace * 4000)
260 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
261 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
262 ^ (size_t) iterative_hash_expr (p->expr, 0));
265 /* Returns nonzero if the value represented by X (which is really a
266 mem_attrs *) is the same as that given by Y (which is also really a
267 mem_attrs *). */
269 static int
270 mem_attrs_htab_eq (const void *x, const void *y)
272 const mem_attrs *const p = (const mem_attrs *) x;
273 const mem_attrs *const q = (const mem_attrs *) y;
275 return (p->alias == q->alias && p->offset == q->offset
276 && p->size == q->size && p->align == q->align
277 && p->addrspace == q->addrspace
278 && (p->expr == q->expr
279 || (p->expr != NULL_TREE && q->expr != NULL_TREE
280 && operand_equal_p (p->expr, q->expr, 0))));
283 /* Allocate a new mem_attrs structure and insert it into the hash table if
284 one identical to it is not already in the table. We are doing this for
285 MEM of mode MODE. */
287 static mem_attrs *
288 get_mem_attrs (alias_set_type alias, tree expr, rtx offset, rtx size,
289 unsigned int align, addr_space_t addrspace, enum machine_mode mode)
291 mem_attrs attrs;
292 void **slot;
294 /* If everything is the default, we can just return zero.
295 This must match what the corresponding MEM_* macros return when the
296 field is not present. */
297 if (alias == 0 && expr == 0 && offset == 0 && addrspace == 0
298 && (size == 0
299 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
300 && (STRICT_ALIGNMENT && mode != BLKmode
301 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
302 return 0;
304 attrs.alias = alias;
305 attrs.expr = expr;
306 attrs.offset = offset;
307 attrs.size = size;
308 attrs.align = align;
309 attrs.addrspace = addrspace;
311 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
312 if (*slot == 0)
314 *slot = ggc_alloc_mem_attrs ();
315 memcpy (*slot, &attrs, sizeof (mem_attrs));
318 return (mem_attrs *) *slot;
321 /* Returns a hash code for X (which is a really a reg_attrs *). */
323 static hashval_t
324 reg_attrs_htab_hash (const void *x)
326 const reg_attrs *const p = (const reg_attrs *) x;
328 return ((p->offset * 1000) ^ (long) p->decl);
331 /* Returns nonzero if the value represented by X (which is really a
332 reg_attrs *) is the same as that given by Y (which is also really a
333 reg_attrs *). */
335 static int
336 reg_attrs_htab_eq (const void *x, const void *y)
338 const reg_attrs *const p = (const reg_attrs *) x;
339 const reg_attrs *const q = (const reg_attrs *) y;
341 return (p->decl == q->decl && p->offset == q->offset);
343 /* Allocate a new reg_attrs structure and insert it into the hash table if
344 one identical to it is not already in the table. We are doing this for
345 MEM of mode MODE. */
347 static reg_attrs *
348 get_reg_attrs (tree decl, int offset)
350 reg_attrs attrs;
351 void **slot;
353 /* If everything is the default, we can just return zero. */
354 if (decl == 0 && offset == 0)
355 return 0;
357 attrs.decl = decl;
358 attrs.offset = offset;
360 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
361 if (*slot == 0)
363 *slot = ggc_alloc_reg_attrs ();
364 memcpy (*slot, &attrs, sizeof (reg_attrs));
367 return (reg_attrs *) *slot;
371 #if !HAVE_blockage
372 /* Generate an empty ASM_INPUT, which is used to block attempts to schedule
373 across this insn. */
376 gen_blockage (void)
378 rtx x = gen_rtx_ASM_INPUT (VOIDmode, "");
379 MEM_VOLATILE_P (x) = true;
380 return x;
382 #endif
385 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
386 don't attempt to share with the various global pieces of rtl (such as
387 frame_pointer_rtx). */
390 gen_raw_REG (enum machine_mode mode, int regno)
392 rtx x = gen_rtx_raw_REG (mode, regno);
393 ORIGINAL_REGNO (x) = regno;
394 return x;
397 /* There are some RTL codes that require special attention; the generation
398 functions do the raw handling. If you add to this list, modify
399 special_rtx in gengenrtl.c as well. */
402 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
404 void **slot;
406 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
407 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
409 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
410 if (const_true_rtx && arg == STORE_FLAG_VALUE)
411 return const_true_rtx;
412 #endif
414 /* Look up the CONST_INT in the hash table. */
415 slot = htab_find_slot_with_hash (const_int_htab, &arg,
416 (hashval_t) arg, INSERT);
417 if (*slot == 0)
418 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
420 return (rtx) *slot;
424 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
426 return GEN_INT (trunc_int_for_mode (c, mode));
429 /* CONST_DOUBLEs might be created from pairs of integers, or from
430 REAL_VALUE_TYPEs. Also, their length is known only at run time,
431 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
433 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
434 hash table. If so, return its counterpart; otherwise add it
435 to the hash table and return it. */
436 static rtx
437 lookup_const_double (rtx real)
439 void **slot = htab_find_slot (const_double_htab, real, INSERT);
440 if (*slot == 0)
441 *slot = real;
443 return (rtx) *slot;
446 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
447 VALUE in mode MODE. */
449 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
451 rtx real = rtx_alloc (CONST_DOUBLE);
452 PUT_MODE (real, mode);
454 real->u.rv = value;
456 return lookup_const_double (real);
459 /* Determine whether FIXED, a CONST_FIXED, already exists in the
460 hash table. If so, return its counterpart; otherwise add it
461 to the hash table and return it. */
463 static rtx
464 lookup_const_fixed (rtx fixed)
466 void **slot = htab_find_slot (const_fixed_htab, fixed, INSERT);
467 if (*slot == 0)
468 *slot = fixed;
470 return (rtx) *slot;
473 /* Return a CONST_FIXED rtx for a fixed-point value specified by
474 VALUE in mode MODE. */
477 const_fixed_from_fixed_value (FIXED_VALUE_TYPE value, enum machine_mode mode)
479 rtx fixed = rtx_alloc (CONST_FIXED);
480 PUT_MODE (fixed, mode);
482 fixed->u.fv = value;
484 return lookup_const_fixed (fixed);
487 /* Constructs double_int from rtx CST. */
489 double_int
490 rtx_to_double_int (const_rtx cst)
492 double_int r;
494 if (CONST_INT_P (cst))
495 r = shwi_to_double_int (INTVAL (cst));
496 else if (CONST_DOUBLE_P (cst) && GET_MODE (cst) == VOIDmode)
498 r.low = CONST_DOUBLE_LOW (cst);
499 r.high = CONST_DOUBLE_HIGH (cst);
501 else
502 gcc_unreachable ();
504 return r;
508 /* Return a CONST_DOUBLE or CONST_INT for a value specified as
509 a double_int. */
512 immed_double_int_const (double_int i, enum machine_mode mode)
514 return immed_double_const (i.low, i.high, mode);
517 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
518 of ints: I0 is the low-order word and I1 is the high-order word.
519 Do not use this routine for non-integer modes; convert to
520 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
523 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
525 rtx value;
526 unsigned int i;
528 /* There are the following cases (note that there are no modes with
529 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
531 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
532 gen_int_mode.
533 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
534 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
535 from copies of the sign bit, and sign of i0 and i1 are the same), then
536 we return a CONST_INT for i0.
537 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
538 if (mode != VOIDmode)
540 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
541 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
542 /* We can get a 0 for an error mark. */
543 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
544 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
546 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
547 return gen_int_mode (i0, mode);
549 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
552 /* If this integer fits in one word, return a CONST_INT. */
553 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
554 return GEN_INT (i0);
556 /* We use VOIDmode for integers. */
557 value = rtx_alloc (CONST_DOUBLE);
558 PUT_MODE (value, VOIDmode);
560 CONST_DOUBLE_LOW (value) = i0;
561 CONST_DOUBLE_HIGH (value) = i1;
563 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
564 XWINT (value, i) = 0;
566 return lookup_const_double (value);
570 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
572 /* In case the MD file explicitly references the frame pointer, have
573 all such references point to the same frame pointer. This is
574 used during frame pointer elimination to distinguish the explicit
575 references to these registers from pseudos that happened to be
576 assigned to them.
578 If we have eliminated the frame pointer or arg pointer, we will
579 be using it as a normal register, for example as a spill
580 register. In such cases, we might be accessing it in a mode that
581 is not Pmode and therefore cannot use the pre-allocated rtx.
583 Also don't do this when we are making new REGs in reload, since
584 we don't want to get confused with the real pointers. */
586 if (mode == Pmode && !reload_in_progress)
588 if (regno == FRAME_POINTER_REGNUM
589 && (!reload_completed || frame_pointer_needed))
590 return frame_pointer_rtx;
591 #if !HARD_FRAME_POINTER_IS_FRAME_POINTER
592 if (regno == HARD_FRAME_POINTER_REGNUM
593 && (!reload_completed || frame_pointer_needed))
594 return hard_frame_pointer_rtx;
595 #endif
596 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && !HARD_FRAME_POINTER_IS_ARG_POINTER
597 if (regno == ARG_POINTER_REGNUM)
598 return arg_pointer_rtx;
599 #endif
600 #ifdef RETURN_ADDRESS_POINTER_REGNUM
601 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
602 return return_address_pointer_rtx;
603 #endif
604 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
605 && PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM
606 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
607 return pic_offset_table_rtx;
608 if (regno == STACK_POINTER_REGNUM)
609 return stack_pointer_rtx;
612 #if 0
613 /* If the per-function register table has been set up, try to re-use
614 an existing entry in that table to avoid useless generation of RTL.
616 This code is disabled for now until we can fix the various backends
617 which depend on having non-shared hard registers in some cases. Long
618 term we want to re-enable this code as it can significantly cut down
619 on the amount of useless RTL that gets generated.
621 We'll also need to fix some code that runs after reload that wants to
622 set ORIGINAL_REGNO. */
624 if (cfun
625 && cfun->emit
626 && regno_reg_rtx
627 && regno < FIRST_PSEUDO_REGISTER
628 && reg_raw_mode[regno] == mode)
629 return regno_reg_rtx[regno];
630 #endif
632 return gen_raw_REG (mode, regno);
636 gen_rtx_MEM (enum machine_mode mode, rtx addr)
638 rtx rt = gen_rtx_raw_MEM (mode, addr);
640 /* This field is not cleared by the mere allocation of the rtx, so
641 we clear it here. */
642 MEM_ATTRS (rt) = 0;
644 return rt;
647 /* Generate a memory referring to non-trapping constant memory. */
650 gen_const_mem (enum machine_mode mode, rtx addr)
652 rtx mem = gen_rtx_MEM (mode, addr);
653 MEM_READONLY_P (mem) = 1;
654 MEM_NOTRAP_P (mem) = 1;
655 return mem;
658 /* Generate a MEM referring to fixed portions of the frame, e.g., register
659 save areas. */
662 gen_frame_mem (enum machine_mode mode, rtx addr)
664 rtx mem = gen_rtx_MEM (mode, addr);
665 MEM_NOTRAP_P (mem) = 1;
666 set_mem_alias_set (mem, get_frame_alias_set ());
667 return mem;
670 /* Generate a MEM referring to a temporary use of the stack, not part
671 of the fixed stack frame. For example, something which is pushed
672 by a target splitter. */
674 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
676 rtx mem = gen_rtx_MEM (mode, addr);
677 MEM_NOTRAP_P (mem) = 1;
678 if (!cfun->calls_alloca)
679 set_mem_alias_set (mem, get_frame_alias_set ());
680 return mem;
683 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
684 this construct would be valid, and false otherwise. */
686 bool
687 validate_subreg (enum machine_mode omode, enum machine_mode imode,
688 const_rtx reg, unsigned int offset)
690 unsigned int isize = GET_MODE_SIZE (imode);
691 unsigned int osize = GET_MODE_SIZE (omode);
693 /* All subregs must be aligned. */
694 if (offset % osize != 0)
695 return false;
697 /* The subreg offset cannot be outside the inner object. */
698 if (offset >= isize)
699 return false;
701 /* ??? This should not be here. Temporarily continue to allow word_mode
702 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
703 Generally, backends are doing something sketchy but it'll take time to
704 fix them all. */
705 if (omode == word_mode)
707 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
708 is the culprit here, and not the backends. */
709 else if (osize >= UNITS_PER_WORD && isize >= osize)
711 /* Allow component subregs of complex and vector. Though given the below
712 extraction rules, it's not always clear what that means. */
713 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
714 && GET_MODE_INNER (imode) == omode)
716 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
717 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
718 represent this. It's questionable if this ought to be represented at
719 all -- why can't this all be hidden in post-reload splitters that make
720 arbitrarily mode changes to the registers themselves. */
721 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
723 /* Subregs involving floating point modes are not allowed to
724 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
725 (subreg:SI (reg:DF) 0) isn't. */
726 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
728 if (isize != osize)
729 return false;
732 /* Paradoxical subregs must have offset zero. */
733 if (osize > isize)
734 return offset == 0;
736 /* This is a normal subreg. Verify that the offset is representable. */
738 /* For hard registers, we already have most of these rules collected in
739 subreg_offset_representable_p. */
740 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
742 unsigned int regno = REGNO (reg);
744 #ifdef CANNOT_CHANGE_MODE_CLASS
745 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
746 && GET_MODE_INNER (imode) == omode)
748 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
749 return false;
750 #endif
752 return subreg_offset_representable_p (regno, imode, offset, omode);
755 /* For pseudo registers, we want most of the same checks. Namely:
756 If the register no larger than a word, the subreg must be lowpart.
757 If the register is larger than a word, the subreg must be the lowpart
758 of a subword. A subreg does *not* perform arbitrary bit extraction.
759 Given that we've already checked mode/offset alignment, we only have
760 to check subword subregs here. */
761 if (osize < UNITS_PER_WORD)
763 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
764 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
765 if (offset % UNITS_PER_WORD != low_off)
766 return false;
768 return true;
772 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
774 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
775 return gen_rtx_raw_SUBREG (mode, reg, offset);
778 /* Generate a SUBREG representing the least-significant part of REG if MODE
779 is smaller than mode of REG, otherwise paradoxical SUBREG. */
782 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
784 enum machine_mode inmode;
786 inmode = GET_MODE (reg);
787 if (inmode == VOIDmode)
788 inmode = mode;
789 return gen_rtx_SUBREG (mode, reg,
790 subreg_lowpart_offset (mode, inmode));
794 /* Create an rtvec and stores within it the RTXen passed in the arguments. */
796 rtvec
797 gen_rtvec (int n, ...)
799 int i;
800 rtvec rt_val;
801 va_list p;
803 va_start (p, n);
805 /* Don't allocate an empty rtvec... */
806 if (n == 0)
807 return NULL_RTVEC;
809 rt_val = rtvec_alloc (n);
811 for (i = 0; i < n; i++)
812 rt_val->elem[i] = va_arg (p, rtx);
814 va_end (p);
815 return rt_val;
818 rtvec
819 gen_rtvec_v (int n, rtx *argp)
821 int i;
822 rtvec rt_val;
824 /* Don't allocate an empty rtvec... */
825 if (n == 0)
826 return NULL_RTVEC;
828 rt_val = rtvec_alloc (n);
830 for (i = 0; i < n; i++)
831 rt_val->elem[i] = *argp++;
833 return rt_val;
836 /* Return the number of bytes between the start of an OUTER_MODE
837 in-memory value and the start of an INNER_MODE in-memory value,
838 given that the former is a lowpart of the latter. It may be a
839 paradoxical lowpart, in which case the offset will be negative
840 on big-endian targets. */
843 byte_lowpart_offset (enum machine_mode outer_mode,
844 enum machine_mode inner_mode)
846 if (GET_MODE_SIZE (outer_mode) < GET_MODE_SIZE (inner_mode))
847 return subreg_lowpart_offset (outer_mode, inner_mode);
848 else
849 return -subreg_lowpart_offset (inner_mode, outer_mode);
852 /* Generate a REG rtx for a new pseudo register of mode MODE.
853 This pseudo is assigned the next sequential register number. */
856 gen_reg_rtx (enum machine_mode mode)
858 rtx val;
859 unsigned int align = GET_MODE_ALIGNMENT (mode);
861 gcc_assert (can_create_pseudo_p ());
863 /* If a virtual register with bigger mode alignment is generated,
864 increase stack alignment estimation because it might be spilled
865 to stack later. */
866 if (SUPPORTS_STACK_ALIGNMENT
867 && crtl->stack_alignment_estimated < align
868 && !crtl->stack_realign_processed)
870 unsigned int min_align = MINIMUM_ALIGNMENT (NULL, mode, align);
871 if (crtl->stack_alignment_estimated < min_align)
872 crtl->stack_alignment_estimated = min_align;
875 if (generating_concat_p
876 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
877 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
879 /* For complex modes, don't make a single pseudo.
880 Instead, make a CONCAT of two pseudos.
881 This allows noncontiguous allocation of the real and imaginary parts,
882 which makes much better code. Besides, allocating DCmode
883 pseudos overstrains reload on some machines like the 386. */
884 rtx realpart, imagpart;
885 enum machine_mode partmode = GET_MODE_INNER (mode);
887 realpart = gen_reg_rtx (partmode);
888 imagpart = gen_reg_rtx (partmode);
889 return gen_rtx_CONCAT (mode, realpart, imagpart);
892 /* Make sure regno_pointer_align, and regno_reg_rtx are large
893 enough to have an element for this pseudo reg number. */
895 if (reg_rtx_no == crtl->emit.regno_pointer_align_length)
897 int old_size = crtl->emit.regno_pointer_align_length;
898 char *tmp;
899 rtx *new1;
901 tmp = XRESIZEVEC (char, crtl->emit.regno_pointer_align, old_size * 2);
902 memset (tmp + old_size, 0, old_size);
903 crtl->emit.regno_pointer_align = (unsigned char *) tmp;
905 new1 = GGC_RESIZEVEC (rtx, regno_reg_rtx, old_size * 2);
906 memset (new1 + old_size, 0, old_size * sizeof (rtx));
907 regno_reg_rtx = new1;
909 crtl->emit.regno_pointer_align_length = old_size * 2;
912 val = gen_raw_REG (mode, reg_rtx_no);
913 regno_reg_rtx[reg_rtx_no++] = val;
914 return val;
917 /* Update NEW with the same attributes as REG, but with OFFSET added
918 to the REG_OFFSET. */
920 static void
921 update_reg_offset (rtx new_rtx, rtx reg, int offset)
923 REG_ATTRS (new_rtx) = get_reg_attrs (REG_EXPR (reg),
924 REG_OFFSET (reg) + offset);
927 /* Generate a register with same attributes as REG, but with OFFSET
928 added to the REG_OFFSET. */
931 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
932 int offset)
934 rtx new_rtx = gen_rtx_REG (mode, regno);
936 update_reg_offset (new_rtx, reg, offset);
937 return new_rtx;
940 /* Generate a new pseudo-register with the same attributes as REG, but
941 with OFFSET added to the REG_OFFSET. */
944 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
946 rtx new_rtx = gen_reg_rtx (mode);
948 update_reg_offset (new_rtx, reg, offset);
949 return new_rtx;
952 /* Adjust REG in-place so that it has mode MODE. It is assumed that the
953 new register is a (possibly paradoxical) lowpart of the old one. */
955 void
956 adjust_reg_mode (rtx reg, enum machine_mode mode)
958 update_reg_offset (reg, reg, byte_lowpart_offset (mode, GET_MODE (reg)));
959 PUT_MODE (reg, mode);
962 /* Copy REG's attributes from X, if X has any attributes. If REG and X
963 have different modes, REG is a (possibly paradoxical) lowpart of X. */
965 void
966 set_reg_attrs_from_value (rtx reg, rtx x)
968 int offset;
970 /* Hard registers can be reused for multiple purposes within the same
971 function, so setting REG_ATTRS, REG_POINTER and REG_POINTER_ALIGN
972 on them is wrong. */
973 if (HARD_REGISTER_P (reg))
974 return;
976 offset = byte_lowpart_offset (GET_MODE (reg), GET_MODE (x));
977 if (MEM_P (x))
979 if (MEM_OFFSET (x) && CONST_INT_P (MEM_OFFSET (x)))
980 REG_ATTRS (reg)
981 = get_reg_attrs (MEM_EXPR (x), INTVAL (MEM_OFFSET (x)) + offset);
982 if (MEM_POINTER (x))
983 mark_reg_pointer (reg, 0);
985 else if (REG_P (x))
987 if (REG_ATTRS (x))
988 update_reg_offset (reg, x, offset);
989 if (REG_POINTER (x))
990 mark_reg_pointer (reg, REGNO_POINTER_ALIGN (REGNO (x)));
994 /* Generate a REG rtx for a new pseudo register, copying the mode
995 and attributes from X. */
998 gen_reg_rtx_and_attrs (rtx x)
1000 rtx reg = gen_reg_rtx (GET_MODE (x));
1001 set_reg_attrs_from_value (reg, x);
1002 return reg;
1005 /* Set the register attributes for registers contained in PARM_RTX.
1006 Use needed values from memory attributes of MEM. */
1008 void
1009 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
1011 if (REG_P (parm_rtx))
1012 set_reg_attrs_from_value (parm_rtx, mem);
1013 else if (GET_CODE (parm_rtx) == PARALLEL)
1015 /* Check for a NULL entry in the first slot, used to indicate that the
1016 parameter goes both on the stack and in registers. */
1017 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
1018 for (; i < XVECLEN (parm_rtx, 0); i++)
1020 rtx x = XVECEXP (parm_rtx, 0, i);
1021 if (REG_P (XEXP (x, 0)))
1022 REG_ATTRS (XEXP (x, 0))
1023 = get_reg_attrs (MEM_EXPR (mem),
1024 INTVAL (XEXP (x, 1)));
1029 /* Set the REG_ATTRS for registers in value X, given that X represents
1030 decl T. */
1032 void
1033 set_reg_attrs_for_decl_rtl (tree t, rtx x)
1035 if (GET_CODE (x) == SUBREG)
1037 gcc_assert (subreg_lowpart_p (x));
1038 x = SUBREG_REG (x);
1040 if (REG_P (x))
1041 REG_ATTRS (x)
1042 = get_reg_attrs (t, byte_lowpart_offset (GET_MODE (x),
1043 DECL_MODE (t)));
1044 if (GET_CODE (x) == CONCAT)
1046 if (REG_P (XEXP (x, 0)))
1047 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1048 if (REG_P (XEXP (x, 1)))
1049 REG_ATTRS (XEXP (x, 1))
1050 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1052 if (GET_CODE (x) == PARALLEL)
1054 int i, start;
1056 /* Check for a NULL entry, used to indicate that the parameter goes
1057 both on the stack and in registers. */
1058 if (XEXP (XVECEXP (x, 0, 0), 0))
1059 start = 0;
1060 else
1061 start = 1;
1063 for (i = start; i < XVECLEN (x, 0); i++)
1065 rtx y = XVECEXP (x, 0, i);
1066 if (REG_P (XEXP (y, 0)))
1067 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1072 /* Assign the RTX X to declaration T. */
1074 void
1075 set_decl_rtl (tree t, rtx x)
1077 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
1078 if (x)
1079 set_reg_attrs_for_decl_rtl (t, x);
1082 /* Assign the RTX X to parameter declaration T. BY_REFERENCE_P is true
1083 if the ABI requires the parameter to be passed by reference. */
1085 void
1086 set_decl_incoming_rtl (tree t, rtx x, bool by_reference_p)
1088 DECL_INCOMING_RTL (t) = x;
1089 if (x && !by_reference_p)
1090 set_reg_attrs_for_decl_rtl (t, x);
1093 /* Identify REG (which may be a CONCAT) as a user register. */
1095 void
1096 mark_user_reg (rtx reg)
1098 if (GET_CODE (reg) == CONCAT)
1100 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1101 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1103 else
1105 gcc_assert (REG_P (reg));
1106 REG_USERVAR_P (reg) = 1;
1110 /* Identify REG as a probable pointer register and show its alignment
1111 as ALIGN, if nonzero. */
1113 void
1114 mark_reg_pointer (rtx reg, int align)
1116 if (! REG_POINTER (reg))
1118 REG_POINTER (reg) = 1;
1120 if (align)
1121 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1123 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1124 /* We can no-longer be sure just how aligned this pointer is. */
1125 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1128 /* Return 1 plus largest pseudo reg number used in the current function. */
1131 max_reg_num (void)
1133 return reg_rtx_no;
1136 /* Return 1 + the largest label number used so far in the current function. */
1139 max_label_num (void)
1141 return label_num;
1144 /* Return first label number used in this function (if any were used). */
1147 get_first_label_num (void)
1149 return first_label_num;
1152 /* If the rtx for label was created during the expansion of a nested
1153 function, then first_label_num won't include this label number.
1154 Fix this now so that array indices work later. */
1156 void
1157 maybe_set_first_label_num (rtx x)
1159 if (CODE_LABEL_NUMBER (x) < first_label_num)
1160 first_label_num = CODE_LABEL_NUMBER (x);
1163 /* Return a value representing some low-order bits of X, where the number
1164 of low-order bits is given by MODE. Note that no conversion is done
1165 between floating-point and fixed-point values, rather, the bit
1166 representation is returned.
1168 This function handles the cases in common between gen_lowpart, below,
1169 and two variants in cse.c and combine.c. These are the cases that can
1170 be safely handled at all points in the compilation.
1172 If this is not a case we can handle, return 0. */
1175 gen_lowpart_common (enum machine_mode mode, rtx x)
1177 int msize = GET_MODE_SIZE (mode);
1178 int xsize;
1179 int offset = 0;
1180 enum machine_mode innermode;
1182 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1183 so we have to make one up. Yuk. */
1184 innermode = GET_MODE (x);
1185 if (CONST_INT_P (x)
1186 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1187 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1188 else if (innermode == VOIDmode)
1189 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1191 xsize = GET_MODE_SIZE (innermode);
1193 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1195 if (innermode == mode)
1196 return x;
1198 /* MODE must occupy no more words than the mode of X. */
1199 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1200 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1201 return 0;
1203 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1204 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1205 return 0;
1207 offset = subreg_lowpart_offset (mode, innermode);
1209 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1210 && (GET_MODE_CLASS (mode) == MODE_INT
1211 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1213 /* If we are getting the low-order part of something that has been
1214 sign- or zero-extended, we can either just use the object being
1215 extended or make a narrower extension. If we want an even smaller
1216 piece than the size of the object being extended, call ourselves
1217 recursively.
1219 This case is used mostly by combine and cse. */
1221 if (GET_MODE (XEXP (x, 0)) == mode)
1222 return XEXP (x, 0);
1223 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1224 return gen_lowpart_common (mode, XEXP (x, 0));
1225 else if (msize < xsize)
1226 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1228 else if (GET_CODE (x) == SUBREG || REG_P (x)
1229 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1230 || GET_CODE (x) == CONST_DOUBLE || CONST_INT_P (x))
1231 return simplify_gen_subreg (mode, x, innermode, offset);
1233 /* Otherwise, we can't do this. */
1234 return 0;
1238 gen_highpart (enum machine_mode mode, rtx x)
1240 unsigned int msize = GET_MODE_SIZE (mode);
1241 rtx result;
1243 /* This case loses if X is a subreg. To catch bugs early,
1244 complain if an invalid MODE is used even in other cases. */
1245 gcc_assert (msize <= UNITS_PER_WORD
1246 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1248 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1249 subreg_highpart_offset (mode, GET_MODE (x)));
1250 gcc_assert (result);
1252 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1253 the target if we have a MEM. gen_highpart must return a valid operand,
1254 emitting code if necessary to do so. */
1255 if (MEM_P (result))
1257 result = validize_mem (result);
1258 gcc_assert (result);
1261 return result;
1264 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1265 be VOIDmode constant. */
1267 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1269 if (GET_MODE (exp) != VOIDmode)
1271 gcc_assert (GET_MODE (exp) == innermode);
1272 return gen_highpart (outermode, exp);
1274 return simplify_gen_subreg (outermode, exp, innermode,
1275 subreg_highpart_offset (outermode, innermode));
1278 /* Return the SUBREG_BYTE for an OUTERMODE lowpart of an INNERMODE value. */
1280 unsigned int
1281 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1283 unsigned int offset = 0;
1284 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1286 if (difference > 0)
1288 if (WORDS_BIG_ENDIAN)
1289 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1290 if (BYTES_BIG_ENDIAN)
1291 offset += difference % UNITS_PER_WORD;
1294 return offset;
1297 /* Return offset in bytes to get OUTERMODE high part
1298 of the value in mode INNERMODE stored in memory in target format. */
1299 unsigned int
1300 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1302 unsigned int offset = 0;
1303 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1305 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1307 if (difference > 0)
1309 if (! WORDS_BIG_ENDIAN)
1310 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1311 if (! BYTES_BIG_ENDIAN)
1312 offset += difference % UNITS_PER_WORD;
1315 return offset;
1318 /* Return 1 iff X, assumed to be a SUBREG,
1319 refers to the least significant part of its containing reg.
1320 If X is not a SUBREG, always return 1 (it is its own low part!). */
1323 subreg_lowpart_p (const_rtx x)
1325 if (GET_CODE (x) != SUBREG)
1326 return 1;
1327 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1328 return 0;
1330 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1331 == SUBREG_BYTE (x));
1334 /* Return subword OFFSET of operand OP.
1335 The word number, OFFSET, is interpreted as the word number starting
1336 at the low-order address. OFFSET 0 is the low-order word if not
1337 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1339 If we cannot extract the required word, we return zero. Otherwise,
1340 an rtx corresponding to the requested word will be returned.
1342 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1343 reload has completed, a valid address will always be returned. After
1344 reload, if a valid address cannot be returned, we return zero.
1346 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1347 it is the responsibility of the caller.
1349 MODE is the mode of OP in case it is a CONST_INT.
1351 ??? This is still rather broken for some cases. The problem for the
1352 moment is that all callers of this thing provide no 'goal mode' to
1353 tell us to work with. This exists because all callers were written
1354 in a word based SUBREG world.
1355 Now use of this function can be deprecated by simplify_subreg in most
1356 cases.
1360 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1362 if (mode == VOIDmode)
1363 mode = GET_MODE (op);
1365 gcc_assert (mode != VOIDmode);
1367 /* If OP is narrower than a word, fail. */
1368 if (mode != BLKmode
1369 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1370 return 0;
1372 /* If we want a word outside OP, return zero. */
1373 if (mode != BLKmode
1374 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1375 return const0_rtx;
1377 /* Form a new MEM at the requested address. */
1378 if (MEM_P (op))
1380 rtx new_rtx = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1382 if (! validate_address)
1383 return new_rtx;
1385 else if (reload_completed)
1387 if (! strict_memory_address_addr_space_p (word_mode,
1388 XEXP (new_rtx, 0),
1389 MEM_ADDR_SPACE (op)))
1390 return 0;
1392 else
1393 return replace_equiv_address (new_rtx, XEXP (new_rtx, 0));
1396 /* Rest can be handled by simplify_subreg. */
1397 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1400 /* Similar to `operand_subword', but never return 0. If we can't
1401 extract the required subword, put OP into a register and try again.
1402 The second attempt must succeed. We always validate the address in
1403 this case.
1405 MODE is the mode of OP, in case it is CONST_INT. */
1408 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1410 rtx result = operand_subword (op, offset, 1, mode);
1412 if (result)
1413 return result;
1415 if (mode != BLKmode && mode != VOIDmode)
1417 /* If this is a register which can not be accessed by words, copy it
1418 to a pseudo register. */
1419 if (REG_P (op))
1420 op = copy_to_reg (op);
1421 else
1422 op = force_reg (mode, op);
1425 result = operand_subword (op, offset, 1, mode);
1426 gcc_assert (result);
1428 return result;
1431 /* Returns 1 if both MEM_EXPR can be considered equal
1432 and 0 otherwise. */
1435 mem_expr_equal_p (const_tree expr1, const_tree expr2)
1437 if (expr1 == expr2)
1438 return 1;
1440 if (! expr1 || ! expr2)
1441 return 0;
1443 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1444 return 0;
1446 return operand_equal_p (expr1, expr2, 0);
1449 /* Return OFFSET if XEXP (MEM, 0) - OFFSET is known to be ALIGN
1450 bits aligned for 0 <= OFFSET < ALIGN / BITS_PER_UNIT, or
1451 -1 if not known. */
1454 get_mem_align_offset (rtx mem, unsigned int align)
1456 tree expr;
1457 unsigned HOST_WIDE_INT offset;
1459 /* This function can't use
1460 if (!MEM_EXPR (mem) || !MEM_OFFSET (mem)
1461 || !CONST_INT_P (MEM_OFFSET (mem))
1462 || (MAX (MEM_ALIGN (mem),
1463 get_object_alignment (MEM_EXPR (mem), align))
1464 < align))
1465 return -1;
1466 else
1467 return (- INTVAL (MEM_OFFSET (mem))) & (align / BITS_PER_UNIT - 1);
1468 for two reasons:
1469 - COMPONENT_REFs in MEM_EXPR can have NULL first operand,
1470 for <variable>. get_inner_reference doesn't handle it and
1471 even if it did, the alignment in that case needs to be determined
1472 from DECL_FIELD_CONTEXT's TYPE_ALIGN.
1473 - it would do suboptimal job for COMPONENT_REFs, even if MEM_EXPR
1474 isn't sufficiently aligned, the object it is in might be. */
1475 gcc_assert (MEM_P (mem));
1476 expr = MEM_EXPR (mem);
1477 if (expr == NULL_TREE
1478 || MEM_OFFSET (mem) == NULL_RTX
1479 || !CONST_INT_P (MEM_OFFSET (mem)))
1480 return -1;
1482 offset = INTVAL (MEM_OFFSET (mem));
1483 if (DECL_P (expr))
1485 if (DECL_ALIGN (expr) < align)
1486 return -1;
1488 else if (INDIRECT_REF_P (expr))
1490 if (TYPE_ALIGN (TREE_TYPE (expr)) < (unsigned int) align)
1491 return -1;
1493 else if (TREE_CODE (expr) == COMPONENT_REF)
1495 while (1)
1497 tree inner = TREE_OPERAND (expr, 0);
1498 tree field = TREE_OPERAND (expr, 1);
1499 tree byte_offset = component_ref_field_offset (expr);
1500 tree bit_offset = DECL_FIELD_BIT_OFFSET (field);
1502 if (!byte_offset
1503 || !host_integerp (byte_offset, 1)
1504 || !host_integerp (bit_offset, 1))
1505 return -1;
1507 offset += tree_low_cst (byte_offset, 1);
1508 offset += tree_low_cst (bit_offset, 1) / BITS_PER_UNIT;
1510 if (inner == NULL_TREE)
1512 if (TYPE_ALIGN (DECL_FIELD_CONTEXT (field))
1513 < (unsigned int) align)
1514 return -1;
1515 break;
1517 else if (DECL_P (inner))
1519 if (DECL_ALIGN (inner) < align)
1520 return -1;
1521 break;
1523 else if (TREE_CODE (inner) != COMPONENT_REF)
1524 return -1;
1525 expr = inner;
1528 else
1529 return -1;
1531 return offset & ((align / BITS_PER_UNIT) - 1);
1534 /* Given REF (a MEM) and T, either the type of X or the expression
1535 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1536 if we are making a new object of this type. BITPOS is nonzero if
1537 there is an offset outstanding on T that will be applied later. */
1539 void
1540 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1541 HOST_WIDE_INT bitpos)
1543 alias_set_type alias = MEM_ALIAS_SET (ref);
1544 tree expr = MEM_EXPR (ref);
1545 rtx offset = MEM_OFFSET (ref);
1546 rtx size = MEM_SIZE (ref);
1547 unsigned int align = MEM_ALIGN (ref);
1548 HOST_WIDE_INT apply_bitpos = 0;
1549 tree type;
1551 /* It can happen that type_for_mode was given a mode for which there
1552 is no language-level type. In which case it returns NULL, which
1553 we can see here. */
1554 if (t == NULL_TREE)
1555 return;
1557 type = TYPE_P (t) ? t : TREE_TYPE (t);
1558 if (type == error_mark_node)
1559 return;
1561 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1562 wrong answer, as it assumes that DECL_RTL already has the right alias
1563 info. Callers should not set DECL_RTL until after the call to
1564 set_mem_attributes. */
1565 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1567 /* Get the alias set from the expression or type (perhaps using a
1568 front-end routine) and use it. */
1569 alias = get_alias_set (t);
1571 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1572 MEM_IN_STRUCT_P (ref)
1573 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1574 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1576 /* If we are making an object of this type, or if this is a DECL, we know
1577 that it is a scalar if the type is not an aggregate. */
1578 if ((objectp || DECL_P (t))
1579 && ! AGGREGATE_TYPE_P (type)
1580 && TREE_CODE (type) != COMPLEX_TYPE)
1581 MEM_SCALAR_P (ref) = 1;
1583 /* We can set the alignment from the type if we are making an object,
1584 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1585 if (objectp || TREE_CODE (t) == INDIRECT_REF || TYPE_ALIGN_OK (type))
1586 align = MAX (align, TYPE_ALIGN (type));
1588 else if (TREE_CODE (t) == MEM_REF)
1590 tree op0 = TREE_OPERAND (t, 0);
1591 if (TREE_CODE (op0) == ADDR_EXPR
1592 && (DECL_P (TREE_OPERAND (op0, 0))
1593 || CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))))
1595 if (DECL_P (TREE_OPERAND (op0, 0)))
1596 align = DECL_ALIGN (TREE_OPERAND (op0, 0));
1597 else if (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0)))
1599 align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (op0, 0)));
1600 #ifdef CONSTANT_ALIGNMENT
1601 align = CONSTANT_ALIGNMENT (TREE_OPERAND (op0, 0), align);
1602 #endif
1604 if (TREE_INT_CST_LOW (TREE_OPERAND (t, 1)) != 0)
1606 unsigned HOST_WIDE_INT ioff
1607 = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
1608 unsigned HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1609 align = MIN (aoff, align);
1612 else
1613 /* ??? This isn't fully correct, we can't set the alignment from the
1614 type in all cases. */
1615 align = MAX (align, TYPE_ALIGN (type));
1618 else if (TREE_CODE (t) == TARGET_MEM_REF)
1619 /* ??? This isn't fully correct, we can't set the alignment from the
1620 type in all cases. */
1621 align = MAX (align, TYPE_ALIGN (type));
1623 /* If the size is known, we can set that. */
1624 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1625 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1627 /* If T is not a type, we may be able to deduce some more information about
1628 the expression. */
1629 if (! TYPE_P (t))
1631 tree base;
1632 bool align_computed = false;
1634 if (TREE_THIS_VOLATILE (t))
1635 MEM_VOLATILE_P (ref) = 1;
1637 /* Now remove any conversions: they don't change what the underlying
1638 object is. Likewise for SAVE_EXPR. */
1639 while (CONVERT_EXPR_P (t)
1640 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1641 || TREE_CODE (t) == SAVE_EXPR)
1642 t = TREE_OPERAND (t, 0);
1644 /* We may look through structure-like accesses for the purposes of
1645 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1646 base = t;
1647 while (TREE_CODE (base) == COMPONENT_REF
1648 || TREE_CODE (base) == REALPART_EXPR
1649 || TREE_CODE (base) == IMAGPART_EXPR
1650 || TREE_CODE (base) == BIT_FIELD_REF)
1651 base = TREE_OPERAND (base, 0);
1653 if (TREE_CODE (base) == MEM_REF
1654 && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
1655 base = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
1656 if (DECL_P (base))
1658 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1659 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1660 else
1661 MEM_NOTRAP_P (ref) = 1;
1663 else if (TREE_CODE (base) == INDIRECT_REF
1664 || TREE_CODE (base) == MEM_REF
1665 || TREE_CODE (base) == TARGET_MEM_REF
1666 || TREE_CODE (base) == ARRAY_REF
1667 || TREE_CODE (base) == ARRAY_RANGE_REF)
1668 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1670 base = get_base_address (base);
1671 if (base && DECL_P (base)
1672 && TREE_READONLY (base)
1673 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1674 MEM_READONLY_P (ref) = 1;
1676 /* If this expression uses it's parent's alias set, mark it such
1677 that we won't change it. */
1678 if (component_uses_parent_alias_set (t))
1679 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1681 /* If this is a decl, set the attributes of the MEM from it. */
1682 if (DECL_P (t))
1684 expr = t;
1685 offset = const0_rtx;
1686 apply_bitpos = bitpos;
1687 size = (DECL_SIZE_UNIT (t)
1688 && host_integerp (DECL_SIZE_UNIT (t), 1)
1689 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1690 align = DECL_ALIGN (t);
1691 align_computed = true;
1694 /* If this is a constant, we know the alignment. */
1695 else if (CONSTANT_CLASS_P (t))
1697 align = TYPE_ALIGN (type);
1698 #ifdef CONSTANT_ALIGNMENT
1699 align = CONSTANT_ALIGNMENT (t, align);
1700 #endif
1701 align_computed = true;
1704 /* If this is a field reference and not a bit-field, record it. */
1705 /* ??? There is some information that can be gleaned from bit-fields,
1706 such as the word offset in the structure that might be modified.
1707 But skip it for now. */
1708 else if (TREE_CODE (t) == COMPONENT_REF
1709 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1711 expr = t;
1712 offset = const0_rtx;
1713 apply_bitpos = bitpos;
1714 /* ??? Any reason the field size would be different than
1715 the size we got from the type? */
1718 /* If this is an array reference, look for an outer field reference. */
1719 else if (TREE_CODE (t) == ARRAY_REF)
1721 tree off_tree = size_zero_node;
1722 /* We can't modify t, because we use it at the end of the
1723 function. */
1724 tree t2 = t;
1728 tree index = TREE_OPERAND (t2, 1);
1729 tree low_bound = array_ref_low_bound (t2);
1730 tree unit_size = array_ref_element_size (t2);
1732 /* We assume all arrays have sizes that are a multiple of a byte.
1733 First subtract the lower bound, if any, in the type of the
1734 index, then convert to sizetype and multiply by the size of
1735 the array element. */
1736 if (! integer_zerop (low_bound))
1737 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1738 index, low_bound);
1740 off_tree = size_binop (PLUS_EXPR,
1741 size_binop (MULT_EXPR,
1742 fold_convert (sizetype,
1743 index),
1744 unit_size),
1745 off_tree);
1746 t2 = TREE_OPERAND (t2, 0);
1748 while (TREE_CODE (t2) == ARRAY_REF);
1750 if (DECL_P (t2))
1752 expr = t2;
1753 offset = NULL;
1754 if (host_integerp (off_tree, 1))
1756 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1757 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1758 align = DECL_ALIGN (t2);
1759 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1760 align = aoff;
1761 align_computed = true;
1762 offset = GEN_INT (ioff);
1763 apply_bitpos = bitpos;
1766 else if (TREE_CODE (t2) == COMPONENT_REF)
1768 expr = t2;
1769 offset = NULL;
1770 if (host_integerp (off_tree, 1))
1772 offset = GEN_INT (tree_low_cst (off_tree, 1));
1773 apply_bitpos = bitpos;
1775 /* ??? Any reason the field size would be different than
1776 the size we got from the type? */
1779 /* If this is an indirect reference, record it. */
1780 else if (TREE_CODE (t) == MEM_REF)
1782 expr = t;
1783 offset = const0_rtx;
1784 apply_bitpos = bitpos;
1788 /* If this is an indirect reference, record it. */
1789 else if (TREE_CODE (t) == MEM_REF
1790 || TREE_CODE (t) == TARGET_MEM_REF)
1792 expr = t;
1793 offset = const0_rtx;
1794 apply_bitpos = bitpos;
1797 if (!align_computed && !INDIRECT_REF_P (t))
1799 unsigned int obj_align = get_object_alignment (t, BIGGEST_ALIGNMENT);
1800 align = MAX (align, obj_align);
1804 /* If we modified OFFSET based on T, then subtract the outstanding
1805 bit position offset. Similarly, increase the size of the accessed
1806 object to contain the negative offset. */
1807 if (apply_bitpos)
1809 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1810 if (size)
1811 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1814 /* Now set the attributes we computed above. */
1815 MEM_ATTRS (ref)
1816 = get_mem_attrs (alias, expr, offset, size, align,
1817 TYPE_ADDR_SPACE (type), GET_MODE (ref));
1819 /* If this is already known to be a scalar or aggregate, we are done. */
1820 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1821 return;
1823 /* If it is a reference into an aggregate, this is part of an aggregate.
1824 Otherwise we don't know. */
1825 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1826 || TREE_CODE (t) == ARRAY_RANGE_REF
1827 || TREE_CODE (t) == BIT_FIELD_REF)
1828 MEM_IN_STRUCT_P (ref) = 1;
1831 void
1832 set_mem_attributes (rtx ref, tree t, int objectp)
1834 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1837 /* Set the alias set of MEM to SET. */
1839 void
1840 set_mem_alias_set (rtx mem, alias_set_type set)
1842 /* If the new and old alias sets don't conflict, something is wrong. */
1843 gcc_checking_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1845 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1846 MEM_SIZE (mem), MEM_ALIGN (mem),
1847 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1850 /* Set the address space of MEM to ADDRSPACE (target-defined). */
1852 void
1853 set_mem_addr_space (rtx mem, addr_space_t addrspace)
1855 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1856 MEM_OFFSET (mem), MEM_SIZE (mem),
1857 MEM_ALIGN (mem), addrspace, GET_MODE (mem));
1860 /* Set the alignment of MEM to ALIGN bits. */
1862 void
1863 set_mem_align (rtx mem, unsigned int align)
1865 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1866 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1867 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1870 /* Set the expr for MEM to EXPR. */
1872 void
1873 set_mem_expr (rtx mem, tree expr)
1875 MEM_ATTRS (mem)
1876 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1877 MEM_SIZE (mem), MEM_ALIGN (mem),
1878 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1881 /* Set the offset of MEM to OFFSET. */
1883 void
1884 set_mem_offset (rtx mem, rtx offset)
1886 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1887 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1888 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1891 /* Set the size of MEM to SIZE. */
1893 void
1894 set_mem_size (rtx mem, rtx size)
1896 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1897 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1898 MEM_ADDR_SPACE (mem), GET_MODE (mem));
1901 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1902 and its address changed to ADDR. (VOIDmode means don't change the mode.
1903 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1904 returned memory location is required to be valid. The memory
1905 attributes are not changed. */
1907 static rtx
1908 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1910 addr_space_t as;
1911 rtx new_rtx;
1913 gcc_assert (MEM_P (memref));
1914 as = MEM_ADDR_SPACE (memref);
1915 if (mode == VOIDmode)
1916 mode = GET_MODE (memref);
1917 if (addr == 0)
1918 addr = XEXP (memref, 0);
1919 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1920 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1921 return memref;
1923 if (validate)
1925 if (reload_in_progress || reload_completed)
1926 gcc_assert (memory_address_addr_space_p (mode, addr, as));
1927 else
1928 addr = memory_address_addr_space (mode, addr, as);
1931 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1932 return memref;
1934 new_rtx = gen_rtx_MEM (mode, addr);
1935 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1936 return new_rtx;
1939 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1940 way we are changing MEMREF, so we only preserve the alias set. */
1943 change_address (rtx memref, enum machine_mode mode, rtx addr)
1945 rtx new_rtx = change_address_1 (memref, mode, addr, 1), size;
1946 enum machine_mode mmode = GET_MODE (new_rtx);
1947 unsigned int align;
1949 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1950 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1952 /* If there are no changes, just return the original memory reference. */
1953 if (new_rtx == memref)
1955 if (MEM_ATTRS (memref) == 0
1956 || (MEM_EXPR (memref) == NULL
1957 && MEM_OFFSET (memref) == NULL
1958 && MEM_SIZE (memref) == size
1959 && MEM_ALIGN (memref) == align))
1960 return new_rtx;
1962 new_rtx = gen_rtx_MEM (mmode, XEXP (memref, 0));
1963 MEM_COPY_ATTRIBUTES (new_rtx, memref);
1966 MEM_ATTRS (new_rtx)
1967 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align,
1968 MEM_ADDR_SPACE (memref), mmode);
1970 return new_rtx;
1973 /* Return a memory reference like MEMREF, but with its mode changed
1974 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1975 nonzero, the memory address is forced to be valid.
1976 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1977 and caller is responsible for adjusting MEMREF base register. */
1980 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1981 int validate, int adjust)
1983 rtx addr = XEXP (memref, 0);
1984 rtx new_rtx;
1985 rtx memoffset = MEM_OFFSET (memref);
1986 rtx size = 0;
1987 unsigned int memalign = MEM_ALIGN (memref);
1988 addr_space_t as = MEM_ADDR_SPACE (memref);
1989 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
1990 int pbits;
1992 /* If there are no changes, just return the original memory reference. */
1993 if (mode == GET_MODE (memref) && !offset
1994 && (!validate || memory_address_addr_space_p (mode, addr, as)))
1995 return memref;
1997 /* ??? Prefer to create garbage instead of creating shared rtl.
1998 This may happen even if offset is nonzero -- consider
1999 (plus (plus reg reg) const_int) -- so do this always. */
2000 addr = copy_rtx (addr);
2002 /* Convert a possibly large offset to a signed value within the
2003 range of the target address space. */
2004 pbits = GET_MODE_BITSIZE (address_mode);
2005 if (HOST_BITS_PER_WIDE_INT > pbits)
2007 int shift = HOST_BITS_PER_WIDE_INT - pbits;
2008 offset = (((HOST_WIDE_INT) ((unsigned HOST_WIDE_INT) offset << shift))
2009 >> shift);
2012 if (adjust)
2014 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
2015 object, we can merge it into the LO_SUM. */
2016 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
2017 && offset >= 0
2018 && (unsigned HOST_WIDE_INT) offset
2019 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
2020 addr = gen_rtx_LO_SUM (address_mode, XEXP (addr, 0),
2021 plus_constant (XEXP (addr, 1), offset));
2022 else
2023 addr = plus_constant (addr, offset);
2026 new_rtx = change_address_1 (memref, mode, addr, validate);
2028 /* If the address is a REG, change_address_1 rightfully returns memref,
2029 but this would destroy memref's MEM_ATTRS. */
2030 if (new_rtx == memref && offset != 0)
2031 new_rtx = copy_rtx (new_rtx);
2033 /* Compute the new values of the memory attributes due to this adjustment.
2034 We add the offsets and update the alignment. */
2035 if (memoffset)
2036 memoffset = GEN_INT (offset + INTVAL (memoffset));
2038 /* Compute the new alignment by taking the MIN of the alignment and the
2039 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
2040 if zero. */
2041 if (offset != 0)
2042 memalign
2043 = MIN (memalign,
2044 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
2046 /* We can compute the size in a number of ways. */
2047 if (GET_MODE (new_rtx) != BLKmode)
2048 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new_rtx)));
2049 else if (MEM_SIZE (memref))
2050 size = plus_constant (MEM_SIZE (memref), -offset);
2052 MEM_ATTRS (new_rtx) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
2053 memoffset, size, memalign, as,
2054 GET_MODE (new_rtx));
2056 /* At some point, we should validate that this offset is within the object,
2057 if all the appropriate values are known. */
2058 return new_rtx;
2061 /* Return a memory reference like MEMREF, but with its mode changed
2062 to MODE and its address changed to ADDR, which is assumed to be
2063 MEMREF offset by OFFSET bytes. If VALIDATE is
2064 nonzero, the memory address is forced to be valid. */
2067 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
2068 HOST_WIDE_INT offset, int validate)
2070 memref = change_address_1 (memref, VOIDmode, addr, validate);
2071 return adjust_address_1 (memref, mode, offset, validate, 0);
2074 /* Return a memory reference like MEMREF, but whose address is changed by
2075 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
2076 known to be in OFFSET (possibly 1). */
2079 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
2081 rtx new_rtx, addr = XEXP (memref, 0);
2082 addr_space_t as = MEM_ADDR_SPACE (memref);
2083 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
2085 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2087 /* At this point we don't know _why_ the address is invalid. It
2088 could have secondary memory references, multiplies or anything.
2090 However, if we did go and rearrange things, we can wind up not
2091 being able to recognize the magic around pic_offset_table_rtx.
2092 This stuff is fragile, and is yet another example of why it is
2093 bad to expose PIC machinery too early. */
2094 if (! memory_address_addr_space_p (GET_MODE (memref), new_rtx, as)
2095 && GET_CODE (addr) == PLUS
2096 && XEXP (addr, 0) == pic_offset_table_rtx)
2098 addr = force_reg (GET_MODE (addr), addr);
2099 new_rtx = simplify_gen_binary (PLUS, address_mode, addr, offset);
2102 update_temp_slot_address (XEXP (memref, 0), new_rtx);
2103 new_rtx = change_address_1 (memref, VOIDmode, new_rtx, 1);
2105 /* If there are no changes, just return the original memory reference. */
2106 if (new_rtx == memref)
2107 return new_rtx;
2109 /* Update the alignment to reflect the offset. Reset the offset, which
2110 we don't know. */
2111 MEM_ATTRS (new_rtx)
2112 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
2113 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
2114 as, GET_MODE (new_rtx));
2115 return new_rtx;
2118 /* Return a memory reference like MEMREF, but with its address changed to
2119 ADDR. The caller is asserting that the actual piece of memory pointed
2120 to is the same, just the form of the address is being changed, such as
2121 by putting something into a register. */
2124 replace_equiv_address (rtx memref, rtx addr)
2126 /* change_address_1 copies the memory attribute structure without change
2127 and that's exactly what we want here. */
2128 update_temp_slot_address (XEXP (memref, 0), addr);
2129 return change_address_1 (memref, VOIDmode, addr, 1);
2132 /* Likewise, but the reference is not required to be valid. */
2135 replace_equiv_address_nv (rtx memref, rtx addr)
2137 return change_address_1 (memref, VOIDmode, addr, 0);
2140 /* Return a memory reference like MEMREF, but with its mode widened to
2141 MODE and offset by OFFSET. This would be used by targets that e.g.
2142 cannot issue QImode memory operations and have to use SImode memory
2143 operations plus masking logic. */
2146 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2148 rtx new_rtx = adjust_address_1 (memref, mode, offset, 1, 1);
2149 tree expr = MEM_EXPR (new_rtx);
2150 rtx memoffset = MEM_OFFSET (new_rtx);
2151 unsigned int size = GET_MODE_SIZE (mode);
2153 /* If there are no changes, just return the original memory reference. */
2154 if (new_rtx == memref)
2155 return new_rtx;
2157 /* If we don't know what offset we were at within the expression, then
2158 we can't know if we've overstepped the bounds. */
2159 if (! memoffset)
2160 expr = NULL_TREE;
2162 while (expr)
2164 if (TREE_CODE (expr) == COMPONENT_REF)
2166 tree field = TREE_OPERAND (expr, 1);
2167 tree offset = component_ref_field_offset (expr);
2169 if (! DECL_SIZE_UNIT (field))
2171 expr = NULL_TREE;
2172 break;
2175 /* Is the field at least as large as the access? If so, ok,
2176 otherwise strip back to the containing structure. */
2177 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2178 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2179 && INTVAL (memoffset) >= 0)
2180 break;
2182 if (! host_integerp (offset, 1))
2184 expr = NULL_TREE;
2185 break;
2188 expr = TREE_OPERAND (expr, 0);
2189 memoffset
2190 = (GEN_INT (INTVAL (memoffset)
2191 + tree_low_cst (offset, 1)
2192 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2193 / BITS_PER_UNIT)));
2195 /* Similarly for the decl. */
2196 else if (DECL_P (expr)
2197 && DECL_SIZE_UNIT (expr)
2198 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2199 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2200 && (! memoffset || INTVAL (memoffset) >= 0))
2201 break;
2202 else
2204 /* The widened memory access overflows the expression, which means
2205 that it could alias another expression. Zap it. */
2206 expr = NULL_TREE;
2207 break;
2211 if (! expr)
2212 memoffset = NULL_RTX;
2214 /* The widened memory may alias other stuff, so zap the alias set. */
2215 /* ??? Maybe use get_alias_set on any remaining expression. */
2217 MEM_ATTRS (new_rtx) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2218 MEM_ALIGN (new_rtx),
2219 MEM_ADDR_SPACE (new_rtx), mode);
2221 return new_rtx;
2224 /* A fake decl that is used as the MEM_EXPR of spill slots. */
2225 static GTY(()) tree spill_slot_decl;
2227 tree
2228 get_spill_slot_decl (bool force_build_p)
2230 tree d = spill_slot_decl;
2231 rtx rd;
2233 if (d || !force_build_p)
2234 return d;
2236 d = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
2237 VAR_DECL, get_identifier ("%sfp"), void_type_node);
2238 DECL_ARTIFICIAL (d) = 1;
2239 DECL_IGNORED_P (d) = 1;
2240 TREE_USED (d) = 1;
2241 spill_slot_decl = d;
2243 rd = gen_rtx_MEM (BLKmode, frame_pointer_rtx);
2244 MEM_NOTRAP_P (rd) = 1;
2245 MEM_ATTRS (rd) = get_mem_attrs (new_alias_set (), d, const0_rtx,
2246 NULL_RTX, 0, ADDR_SPACE_GENERIC, BLKmode);
2247 SET_DECL_RTL (d, rd);
2249 return d;
2252 /* Given MEM, a result from assign_stack_local, fill in the memory
2253 attributes as appropriate for a register allocator spill slot.
2254 These slots are not aliasable by other memory. We arrange for
2255 them all to use a single MEM_EXPR, so that the aliasing code can
2256 work properly in the case of shared spill slots. */
2258 void
2259 set_mem_attrs_for_spill (rtx mem)
2261 alias_set_type alias;
2262 rtx addr, offset;
2263 tree expr;
2265 expr = get_spill_slot_decl (true);
2266 alias = MEM_ALIAS_SET (DECL_RTL (expr));
2268 /* We expect the incoming memory to be of the form:
2269 (mem:MODE (plus (reg sfp) (const_int offset)))
2270 with perhaps the plus missing for offset = 0. */
2271 addr = XEXP (mem, 0);
2272 offset = const0_rtx;
2273 if (GET_CODE (addr) == PLUS
2274 && CONST_INT_P (XEXP (addr, 1)))
2275 offset = XEXP (addr, 1);
2277 MEM_ATTRS (mem) = get_mem_attrs (alias, expr, offset,
2278 MEM_SIZE (mem), MEM_ALIGN (mem),
2279 ADDR_SPACE_GENERIC, GET_MODE (mem));
2280 MEM_NOTRAP_P (mem) = 1;
2283 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2286 gen_label_rtx (void)
2288 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2289 NULL, label_num++, NULL);
2292 /* For procedure integration. */
2294 /* Install new pointers to the first and last insns in the chain.
2295 Also, set cur_insn_uid to one higher than the last in use.
2296 Used for an inline-procedure after copying the insn chain. */
2298 void
2299 set_new_first_and_last_insn (rtx first, rtx last)
2301 rtx insn;
2303 set_first_insn (first);
2304 set_last_insn (last);
2305 cur_insn_uid = 0;
2307 if (MIN_NONDEBUG_INSN_UID || MAY_HAVE_DEBUG_INSNS)
2309 int debug_count = 0;
2311 cur_insn_uid = MIN_NONDEBUG_INSN_UID - 1;
2312 cur_debug_insn_uid = 0;
2314 for (insn = first; insn; insn = NEXT_INSN (insn))
2315 if (INSN_UID (insn) < MIN_NONDEBUG_INSN_UID)
2316 cur_debug_insn_uid = MAX (cur_debug_insn_uid, INSN_UID (insn));
2317 else
2319 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2320 if (DEBUG_INSN_P (insn))
2321 debug_count++;
2324 if (debug_count)
2325 cur_debug_insn_uid = MIN_NONDEBUG_INSN_UID + debug_count;
2326 else
2327 cur_debug_insn_uid++;
2329 else
2330 for (insn = first; insn; insn = NEXT_INSN (insn))
2331 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2333 cur_insn_uid++;
2336 /* Go through all the RTL insn bodies and copy any invalid shared
2337 structure. This routine should only be called once. */
2339 static void
2340 unshare_all_rtl_1 (rtx insn)
2342 /* Unshare just about everything else. */
2343 unshare_all_rtl_in_chain (insn);
2345 /* Make sure the addresses of stack slots found outside the insn chain
2346 (such as, in DECL_RTL of a variable) are not shared
2347 with the insn chain.
2349 This special care is necessary when the stack slot MEM does not
2350 actually appear in the insn chain. If it does appear, its address
2351 is unshared from all else at that point. */
2352 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2355 /* Go through all the RTL insn bodies and copy any invalid shared
2356 structure, again. This is a fairly expensive thing to do so it
2357 should be done sparingly. */
2359 void
2360 unshare_all_rtl_again (rtx insn)
2362 rtx p;
2363 tree decl;
2365 for (p = insn; p; p = NEXT_INSN (p))
2366 if (INSN_P (p))
2368 reset_used_flags (PATTERN (p));
2369 reset_used_flags (REG_NOTES (p));
2372 /* Make sure that virtual stack slots are not shared. */
2373 set_used_decls (DECL_INITIAL (cfun->decl));
2375 /* Make sure that virtual parameters are not shared. */
2376 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = DECL_CHAIN (decl))
2377 set_used_flags (DECL_RTL (decl));
2379 reset_used_flags (stack_slot_list);
2381 unshare_all_rtl_1 (insn);
2384 unsigned int
2385 unshare_all_rtl (void)
2387 unshare_all_rtl_1 (get_insns ());
2388 return 0;
2391 struct rtl_opt_pass pass_unshare_all_rtl =
2394 RTL_PASS,
2395 "unshare", /* name */
2396 NULL, /* gate */
2397 unshare_all_rtl, /* execute */
2398 NULL, /* sub */
2399 NULL, /* next */
2400 0, /* static_pass_number */
2401 TV_NONE, /* tv_id */
2402 0, /* properties_required */
2403 0, /* properties_provided */
2404 0, /* properties_destroyed */
2405 0, /* todo_flags_start */
2406 TODO_dump_func | TODO_verify_rtl_sharing /* todo_flags_finish */
2411 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2412 Recursively does the same for subexpressions. */
2414 static void
2415 verify_rtx_sharing (rtx orig, rtx insn)
2417 rtx x = orig;
2418 int i;
2419 enum rtx_code code;
2420 const char *format_ptr;
2422 if (x == 0)
2423 return;
2425 code = GET_CODE (x);
2427 /* These types may be freely shared. */
2429 switch (code)
2431 case REG:
2432 case DEBUG_EXPR:
2433 case VALUE:
2434 case CONST_INT:
2435 case CONST_DOUBLE:
2436 case CONST_FIXED:
2437 case CONST_VECTOR:
2438 case SYMBOL_REF:
2439 case LABEL_REF:
2440 case CODE_LABEL:
2441 case PC:
2442 case CC0:
2443 case SCRATCH:
2444 return;
2445 /* SCRATCH must be shared because they represent distinct values. */
2446 case CLOBBER:
2447 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2448 return;
2449 break;
2451 case CONST:
2452 if (shared_const_p (orig))
2453 return;
2454 break;
2456 case MEM:
2457 /* A MEM is allowed to be shared if its address is constant. */
2458 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2459 || reload_completed || reload_in_progress)
2460 return;
2462 break;
2464 default:
2465 break;
2468 /* This rtx may not be shared. If it has already been seen,
2469 replace it with a copy of itself. */
2470 #ifdef ENABLE_CHECKING
2471 if (RTX_FLAG (x, used))
2473 error ("invalid rtl sharing found in the insn");
2474 debug_rtx (insn);
2475 error ("shared rtx");
2476 debug_rtx (x);
2477 internal_error ("internal consistency failure");
2479 #endif
2480 gcc_assert (!RTX_FLAG (x, used));
2482 RTX_FLAG (x, used) = 1;
2484 /* Now scan the subexpressions recursively. */
2486 format_ptr = GET_RTX_FORMAT (code);
2488 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2490 switch (*format_ptr++)
2492 case 'e':
2493 verify_rtx_sharing (XEXP (x, i), insn);
2494 break;
2496 case 'E':
2497 if (XVEC (x, i) != NULL)
2499 int j;
2500 int len = XVECLEN (x, i);
2502 for (j = 0; j < len; j++)
2504 /* We allow sharing of ASM_OPERANDS inside single
2505 instruction. */
2506 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2507 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2508 == ASM_OPERANDS))
2509 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2510 else
2511 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2514 break;
2517 return;
2520 /* Go through all the RTL insn bodies and check that there is no unexpected
2521 sharing in between the subexpressions. */
2523 DEBUG_FUNCTION void
2524 verify_rtl_sharing (void)
2526 rtx p;
2528 timevar_push (TV_VERIFY_RTL_SHARING);
2530 for (p = get_insns (); p; p = NEXT_INSN (p))
2531 if (INSN_P (p))
2533 reset_used_flags (PATTERN (p));
2534 reset_used_flags (REG_NOTES (p));
2535 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2537 int i;
2538 rtx q, sequence = PATTERN (p);
2540 for (i = 0; i < XVECLEN (sequence, 0); i++)
2542 q = XVECEXP (sequence, 0, i);
2543 gcc_assert (INSN_P (q));
2544 reset_used_flags (PATTERN (q));
2545 reset_used_flags (REG_NOTES (q));
2550 for (p = get_insns (); p; p = NEXT_INSN (p))
2551 if (INSN_P (p))
2553 verify_rtx_sharing (PATTERN (p), p);
2554 verify_rtx_sharing (REG_NOTES (p), p);
2557 timevar_pop (TV_VERIFY_RTL_SHARING);
2560 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2561 Assumes the mark bits are cleared at entry. */
2563 void
2564 unshare_all_rtl_in_chain (rtx insn)
2566 for (; insn; insn = NEXT_INSN (insn))
2567 if (INSN_P (insn))
2569 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2570 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2574 /* Go through all virtual stack slots of a function and mark them as
2575 shared. We never replace the DECL_RTLs themselves with a copy,
2576 but expressions mentioned into a DECL_RTL cannot be shared with
2577 expressions in the instruction stream.
2579 Note that reload may convert pseudo registers into memories in-place.
2580 Pseudo registers are always shared, but MEMs never are. Thus if we
2581 reset the used flags on MEMs in the instruction stream, we must set
2582 them again on MEMs that appear in DECL_RTLs. */
2584 static void
2585 set_used_decls (tree blk)
2587 tree t;
2589 /* Mark decls. */
2590 for (t = BLOCK_VARS (blk); t; t = DECL_CHAIN (t))
2591 if (DECL_RTL_SET_P (t))
2592 set_used_flags (DECL_RTL (t));
2594 /* Now process sub-blocks. */
2595 for (t = BLOCK_SUBBLOCKS (blk); t; t = BLOCK_CHAIN (t))
2596 set_used_decls (t);
2599 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2600 Recursively does the same for subexpressions. Uses
2601 copy_rtx_if_shared_1 to reduce stack space. */
2604 copy_rtx_if_shared (rtx orig)
2606 copy_rtx_if_shared_1 (&orig);
2607 return orig;
2610 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2611 use. Recursively does the same for subexpressions. */
2613 static void
2614 copy_rtx_if_shared_1 (rtx *orig1)
2616 rtx x;
2617 int i;
2618 enum rtx_code code;
2619 rtx *last_ptr;
2620 const char *format_ptr;
2621 int copied = 0;
2622 int length;
2624 /* Repeat is used to turn tail-recursion into iteration. */
2625 repeat:
2626 x = *orig1;
2628 if (x == 0)
2629 return;
2631 code = GET_CODE (x);
2633 /* These types may be freely shared. */
2635 switch (code)
2637 case REG:
2638 case DEBUG_EXPR:
2639 case VALUE:
2640 case CONST_INT:
2641 case CONST_DOUBLE:
2642 case CONST_FIXED:
2643 case CONST_VECTOR:
2644 case SYMBOL_REF:
2645 case LABEL_REF:
2646 case CODE_LABEL:
2647 case PC:
2648 case CC0:
2649 case SCRATCH:
2650 /* SCRATCH must be shared because they represent distinct values. */
2651 return;
2652 case CLOBBER:
2653 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2654 return;
2655 break;
2657 case CONST:
2658 if (shared_const_p (x))
2659 return;
2660 break;
2662 case DEBUG_INSN:
2663 case INSN:
2664 case JUMP_INSN:
2665 case CALL_INSN:
2666 case NOTE:
2667 case BARRIER:
2668 /* The chain of insns is not being copied. */
2669 return;
2671 default:
2672 break;
2675 /* This rtx may not be shared. If it has already been seen,
2676 replace it with a copy of itself. */
2678 if (RTX_FLAG (x, used))
2680 x = shallow_copy_rtx (x);
2681 copied = 1;
2683 RTX_FLAG (x, used) = 1;
2685 /* Now scan the subexpressions recursively.
2686 We can store any replaced subexpressions directly into X
2687 since we know X is not shared! Any vectors in X
2688 must be copied if X was copied. */
2690 format_ptr = GET_RTX_FORMAT (code);
2691 length = GET_RTX_LENGTH (code);
2692 last_ptr = NULL;
2694 for (i = 0; i < length; i++)
2696 switch (*format_ptr++)
2698 case 'e':
2699 if (last_ptr)
2700 copy_rtx_if_shared_1 (last_ptr);
2701 last_ptr = &XEXP (x, i);
2702 break;
2704 case 'E':
2705 if (XVEC (x, i) != NULL)
2707 int j;
2708 int len = XVECLEN (x, i);
2710 /* Copy the vector iff I copied the rtx and the length
2711 is nonzero. */
2712 if (copied && len > 0)
2713 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2715 /* Call recursively on all inside the vector. */
2716 for (j = 0; j < len; j++)
2718 if (last_ptr)
2719 copy_rtx_if_shared_1 (last_ptr);
2720 last_ptr = &XVECEXP (x, i, j);
2723 break;
2726 *orig1 = x;
2727 if (last_ptr)
2729 orig1 = last_ptr;
2730 goto repeat;
2732 return;
2735 /* Set the USED bit in X and its non-shareable subparts to FLAG. */
2737 static void
2738 mark_used_flags (rtx x, int flag)
2740 int i, j;
2741 enum rtx_code code;
2742 const char *format_ptr;
2743 int length;
2745 /* Repeat is used to turn tail-recursion into iteration. */
2746 repeat:
2747 if (x == 0)
2748 return;
2750 code = GET_CODE (x);
2752 /* These types may be freely shared so we needn't do any resetting
2753 for them. */
2755 switch (code)
2757 case REG:
2758 case DEBUG_EXPR:
2759 case VALUE:
2760 case CONST_INT:
2761 case CONST_DOUBLE:
2762 case CONST_FIXED:
2763 case CONST_VECTOR:
2764 case SYMBOL_REF:
2765 case CODE_LABEL:
2766 case PC:
2767 case CC0:
2768 return;
2770 case DEBUG_INSN:
2771 case INSN:
2772 case JUMP_INSN:
2773 case CALL_INSN:
2774 case NOTE:
2775 case LABEL_REF:
2776 case BARRIER:
2777 /* The chain of insns is not being copied. */
2778 return;
2780 default:
2781 break;
2784 RTX_FLAG (x, used) = flag;
2786 format_ptr = GET_RTX_FORMAT (code);
2787 length = GET_RTX_LENGTH (code);
2789 for (i = 0; i < length; i++)
2791 switch (*format_ptr++)
2793 case 'e':
2794 if (i == length-1)
2796 x = XEXP (x, i);
2797 goto repeat;
2799 mark_used_flags (XEXP (x, i), flag);
2800 break;
2802 case 'E':
2803 for (j = 0; j < XVECLEN (x, i); j++)
2804 mark_used_flags (XVECEXP (x, i, j), flag);
2805 break;
2810 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2811 to look for shared sub-parts. */
2813 void
2814 reset_used_flags (rtx x)
2816 mark_used_flags (x, 0);
2819 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2820 to look for shared sub-parts. */
2822 void
2823 set_used_flags (rtx x)
2825 mark_used_flags (x, 1);
2828 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2829 Return X or the rtx for the pseudo reg the value of X was copied into.
2830 OTHER must be valid as a SET_DEST. */
2833 make_safe_from (rtx x, rtx other)
2835 while (1)
2836 switch (GET_CODE (other))
2838 case SUBREG:
2839 other = SUBREG_REG (other);
2840 break;
2841 case STRICT_LOW_PART:
2842 case SIGN_EXTEND:
2843 case ZERO_EXTEND:
2844 other = XEXP (other, 0);
2845 break;
2846 default:
2847 goto done;
2849 done:
2850 if ((MEM_P (other)
2851 && ! CONSTANT_P (x)
2852 && !REG_P (x)
2853 && GET_CODE (x) != SUBREG)
2854 || (REG_P (other)
2855 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2856 || reg_mentioned_p (other, x))))
2858 rtx temp = gen_reg_rtx (GET_MODE (x));
2859 emit_move_insn (temp, x);
2860 return temp;
2862 return x;
2865 /* Emission of insns (adding them to the doubly-linked list). */
2867 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2870 get_last_insn_anywhere (void)
2872 struct sequence_stack *stack;
2873 if (get_last_insn ())
2874 return get_last_insn ();
2875 for (stack = seq_stack; stack; stack = stack->next)
2876 if (stack->last != 0)
2877 return stack->last;
2878 return 0;
2881 /* Return the first nonnote insn emitted in current sequence or current
2882 function. This routine looks inside SEQUENCEs. */
2885 get_first_nonnote_insn (void)
2887 rtx insn = get_insns ();
2889 if (insn)
2891 if (NOTE_P (insn))
2892 for (insn = next_insn (insn);
2893 insn && NOTE_P (insn);
2894 insn = next_insn (insn))
2895 continue;
2896 else
2898 if (NONJUMP_INSN_P (insn)
2899 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2900 insn = XVECEXP (PATTERN (insn), 0, 0);
2904 return insn;
2907 /* Return the last nonnote insn emitted in current sequence or current
2908 function. This routine looks inside SEQUENCEs. */
2911 get_last_nonnote_insn (void)
2913 rtx insn = get_last_insn ();
2915 if (insn)
2917 if (NOTE_P (insn))
2918 for (insn = previous_insn (insn);
2919 insn && NOTE_P (insn);
2920 insn = previous_insn (insn))
2921 continue;
2922 else
2924 if (NONJUMP_INSN_P (insn)
2925 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2926 insn = XVECEXP (PATTERN (insn), 0,
2927 XVECLEN (PATTERN (insn), 0) - 1);
2931 return insn;
2934 /* Return the number of actual (non-debug) insns emitted in this
2935 function. */
2938 get_max_insn_count (void)
2940 int n = cur_insn_uid;
2942 /* The table size must be stable across -g, to avoid codegen
2943 differences due to debug insns, and not be affected by
2944 -fmin-insn-uid, to avoid excessive table size and to simplify
2945 debugging of -fcompare-debug failures. */
2946 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
2947 n -= cur_debug_insn_uid;
2948 else
2949 n -= MIN_NONDEBUG_INSN_UID;
2951 return n;
2955 /* Return the next insn. If it is a SEQUENCE, return the first insn
2956 of the sequence. */
2959 next_insn (rtx insn)
2961 if (insn)
2963 insn = NEXT_INSN (insn);
2964 if (insn && NONJUMP_INSN_P (insn)
2965 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2966 insn = XVECEXP (PATTERN (insn), 0, 0);
2969 return insn;
2972 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2973 of the sequence. */
2976 previous_insn (rtx insn)
2978 if (insn)
2980 insn = PREV_INSN (insn);
2981 if (insn && NONJUMP_INSN_P (insn)
2982 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2983 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2986 return insn;
2989 /* Return the next insn after INSN that is not a NOTE. This routine does not
2990 look inside SEQUENCEs. */
2993 next_nonnote_insn (rtx insn)
2995 while (insn)
2997 insn = NEXT_INSN (insn);
2998 if (insn == 0 || !NOTE_P (insn))
2999 break;
3002 return insn;
3005 /* Return the next insn after INSN that is not a NOTE, but stop the
3006 search before we enter another basic block. This routine does not
3007 look inside SEQUENCEs. */
3010 next_nonnote_insn_bb (rtx insn)
3012 while (insn)
3014 insn = NEXT_INSN (insn);
3015 if (insn == 0 || !NOTE_P (insn))
3016 break;
3017 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3018 return NULL_RTX;
3021 return insn;
3024 /* Return the previous insn before INSN that is not a NOTE. This routine does
3025 not look inside SEQUENCEs. */
3028 prev_nonnote_insn (rtx insn)
3030 while (insn)
3032 insn = PREV_INSN (insn);
3033 if (insn == 0 || !NOTE_P (insn))
3034 break;
3037 return insn;
3040 /* Return the previous insn before INSN that is not a NOTE, but stop
3041 the search before we enter another basic block. This routine does
3042 not look inside SEQUENCEs. */
3045 prev_nonnote_insn_bb (rtx insn)
3047 while (insn)
3049 insn = PREV_INSN (insn);
3050 if (insn == 0 || !NOTE_P (insn))
3051 break;
3052 if (NOTE_INSN_BASIC_BLOCK_P (insn))
3053 return NULL_RTX;
3056 return insn;
3059 /* Return the next insn after INSN that is not a DEBUG_INSN. This
3060 routine does not look inside SEQUENCEs. */
3063 next_nondebug_insn (rtx insn)
3065 while (insn)
3067 insn = NEXT_INSN (insn);
3068 if (insn == 0 || !DEBUG_INSN_P (insn))
3069 break;
3072 return insn;
3075 /* Return the previous insn before INSN that is not a DEBUG_INSN.
3076 This routine does not look inside SEQUENCEs. */
3079 prev_nondebug_insn (rtx insn)
3081 while (insn)
3083 insn = PREV_INSN (insn);
3084 if (insn == 0 || !DEBUG_INSN_P (insn))
3085 break;
3088 return insn;
3091 /* Return the next insn after INSN that is not a NOTE nor DEBUG_INSN.
3092 This routine does not look inside SEQUENCEs. */
3095 next_nonnote_nondebug_insn (rtx insn)
3097 while (insn)
3099 insn = NEXT_INSN (insn);
3100 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3101 break;
3104 return insn;
3107 /* Return the previous insn before INSN that is not a NOTE nor DEBUG_INSN.
3108 This routine does not look inside SEQUENCEs. */
3111 prev_nonnote_nondebug_insn (rtx insn)
3113 while (insn)
3115 insn = PREV_INSN (insn);
3116 if (insn == 0 || (!NOTE_P (insn) && !DEBUG_INSN_P (insn)))
3117 break;
3120 return insn;
3123 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
3124 or 0, if there is none. This routine does not look inside
3125 SEQUENCEs. */
3128 next_real_insn (rtx insn)
3130 while (insn)
3132 insn = NEXT_INSN (insn);
3133 if (insn == 0 || INSN_P (insn))
3134 break;
3137 return insn;
3140 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
3141 or 0, if there is none. This routine does not look inside
3142 SEQUENCEs. */
3145 prev_real_insn (rtx insn)
3147 while (insn)
3149 insn = PREV_INSN (insn);
3150 if (insn == 0 || INSN_P (insn))
3151 break;
3154 return insn;
3157 /* Return the last CALL_INSN in the current list, or 0 if there is none.
3158 This routine does not look inside SEQUENCEs. */
3161 last_call_insn (void)
3163 rtx insn;
3165 for (insn = get_last_insn ();
3166 insn && !CALL_P (insn);
3167 insn = PREV_INSN (insn))
3170 return insn;
3173 /* Find the next insn after INSN that really does something. This routine
3174 does not look inside SEQUENCEs. After reload this also skips over
3175 standalone USE and CLOBBER insn. */
3178 active_insn_p (const_rtx insn)
3180 return (CALL_P (insn) || JUMP_P (insn)
3181 || (NONJUMP_INSN_P (insn)
3182 && (! reload_completed
3183 || (GET_CODE (PATTERN (insn)) != USE
3184 && GET_CODE (PATTERN (insn)) != CLOBBER))));
3188 next_active_insn (rtx insn)
3190 while (insn)
3192 insn = NEXT_INSN (insn);
3193 if (insn == 0 || active_insn_p (insn))
3194 break;
3197 return insn;
3200 /* Find the last insn before INSN that really does something. This routine
3201 does not look inside SEQUENCEs. After reload this also skips over
3202 standalone USE and CLOBBER insn. */
3205 prev_active_insn (rtx insn)
3207 while (insn)
3209 insn = PREV_INSN (insn);
3210 if (insn == 0 || active_insn_p (insn))
3211 break;
3214 return insn;
3217 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
3220 next_label (rtx insn)
3222 while (insn)
3224 insn = NEXT_INSN (insn);
3225 if (insn == 0 || LABEL_P (insn))
3226 break;
3229 return insn;
3232 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
3235 prev_label (rtx insn)
3237 while (insn)
3239 insn = PREV_INSN (insn);
3240 if (insn == 0 || LABEL_P (insn))
3241 break;
3244 return insn;
3247 /* Return the last label to mark the same position as LABEL. Return null
3248 if LABEL itself is null. */
3251 skip_consecutive_labels (rtx label)
3253 rtx insn;
3255 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
3256 if (LABEL_P (insn))
3257 label = insn;
3259 return label;
3262 #ifdef HAVE_cc0
3263 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
3264 and REG_CC_USER notes so we can find it. */
3266 void
3267 link_cc0_insns (rtx insn)
3269 rtx user = next_nonnote_insn (insn);
3271 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3272 user = XVECEXP (PATTERN (user), 0, 0);
3274 add_reg_note (user, REG_CC_SETTER, insn);
3275 add_reg_note (insn, REG_CC_USER, user);
3278 /* Return the next insn that uses CC0 after INSN, which is assumed to
3279 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3280 applied to the result of this function should yield INSN).
3282 Normally, this is simply the next insn. However, if a REG_CC_USER note
3283 is present, it contains the insn that uses CC0.
3285 Return 0 if we can't find the insn. */
3288 next_cc0_user (rtx insn)
3290 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3292 if (note)
3293 return XEXP (note, 0);
3295 insn = next_nonnote_insn (insn);
3296 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3297 insn = XVECEXP (PATTERN (insn), 0, 0);
3299 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3300 return insn;
3302 return 0;
3305 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3306 note, it is the previous insn. */
3309 prev_cc0_setter (rtx insn)
3311 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3313 if (note)
3314 return XEXP (note, 0);
3316 insn = prev_nonnote_insn (insn);
3317 gcc_assert (sets_cc0_p (PATTERN (insn)));
3319 return insn;
3321 #endif
3323 #ifdef AUTO_INC_DEC
3324 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3326 static int
3327 find_auto_inc (rtx *xp, void *data)
3329 rtx x = *xp;
3330 rtx reg = (rtx) data;
3332 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3333 return 0;
3335 switch (GET_CODE (x))
3337 case PRE_DEC:
3338 case PRE_INC:
3339 case POST_DEC:
3340 case POST_INC:
3341 case PRE_MODIFY:
3342 case POST_MODIFY:
3343 if (rtx_equal_p (reg, XEXP (x, 0)))
3344 return 1;
3345 break;
3347 default:
3348 gcc_unreachable ();
3350 return -1;
3352 #endif
3354 /* Increment the label uses for all labels present in rtx. */
3356 static void
3357 mark_label_nuses (rtx x)
3359 enum rtx_code code;
3360 int i, j;
3361 const char *fmt;
3363 code = GET_CODE (x);
3364 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3365 LABEL_NUSES (XEXP (x, 0))++;
3367 fmt = GET_RTX_FORMAT (code);
3368 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3370 if (fmt[i] == 'e')
3371 mark_label_nuses (XEXP (x, i));
3372 else if (fmt[i] == 'E')
3373 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3374 mark_label_nuses (XVECEXP (x, i, j));
3379 /* Try splitting insns that can be split for better scheduling.
3380 PAT is the pattern which might split.
3381 TRIAL is the insn providing PAT.
3382 LAST is nonzero if we should return the last insn of the sequence produced.
3384 If this routine succeeds in splitting, it returns the first or last
3385 replacement insn depending on the value of LAST. Otherwise, it
3386 returns TRIAL. If the insn to be returned can be split, it will be. */
3389 try_split (rtx pat, rtx trial, int last)
3391 rtx before = PREV_INSN (trial);
3392 rtx after = NEXT_INSN (trial);
3393 int has_barrier = 0;
3394 rtx note, seq, tem;
3395 int probability;
3396 rtx insn_last, insn;
3397 int njumps = 0;
3399 /* We're not good at redistributing frame information. */
3400 if (RTX_FRAME_RELATED_P (trial))
3401 return trial;
3403 if (any_condjump_p (trial)
3404 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3405 split_branch_probability = INTVAL (XEXP (note, 0));
3406 probability = split_branch_probability;
3408 seq = split_insns (pat, trial);
3410 split_branch_probability = -1;
3412 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3413 We may need to handle this specially. */
3414 if (after && BARRIER_P (after))
3416 has_barrier = 1;
3417 after = NEXT_INSN (after);
3420 if (!seq)
3421 return trial;
3423 /* Avoid infinite loop if any insn of the result matches
3424 the original pattern. */
3425 insn_last = seq;
3426 while (1)
3428 if (INSN_P (insn_last)
3429 && rtx_equal_p (PATTERN (insn_last), pat))
3430 return trial;
3431 if (!NEXT_INSN (insn_last))
3432 break;
3433 insn_last = NEXT_INSN (insn_last);
3436 /* We will be adding the new sequence to the function. The splitters
3437 may have introduced invalid RTL sharing, so unshare the sequence now. */
3438 unshare_all_rtl_in_chain (seq);
3440 /* Mark labels. */
3441 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3443 if (JUMP_P (insn))
3445 mark_jump_label (PATTERN (insn), insn, 0);
3446 njumps++;
3447 if (probability != -1
3448 && any_condjump_p (insn)
3449 && !find_reg_note (insn, REG_BR_PROB, 0))
3451 /* We can preserve the REG_BR_PROB notes only if exactly
3452 one jump is created, otherwise the machine description
3453 is responsible for this step using
3454 split_branch_probability variable. */
3455 gcc_assert (njumps == 1);
3456 add_reg_note (insn, REG_BR_PROB, GEN_INT (probability));
3461 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3462 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3463 if (CALL_P (trial))
3465 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3466 if (CALL_P (insn))
3468 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3469 while (*p)
3470 p = &XEXP (*p, 1);
3471 *p = CALL_INSN_FUNCTION_USAGE (trial);
3472 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3474 /* Update the debug information for the CALL_INSN. */
3475 if (flag_enable_icf_debug)
3476 (*debug_hooks->copy_call_info) (trial, insn);
3480 /* Copy notes, particularly those related to the CFG. */
3481 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3483 switch (REG_NOTE_KIND (note))
3485 case REG_EH_REGION:
3486 copy_reg_eh_region_note_backward (note, insn_last, NULL);
3487 break;
3489 case REG_NORETURN:
3490 case REG_SETJMP:
3491 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3493 if (CALL_P (insn))
3494 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3496 break;
3498 case REG_NON_LOCAL_GOTO:
3499 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3501 if (JUMP_P (insn))
3502 add_reg_note (insn, REG_NOTE_KIND (note), XEXP (note, 0));
3504 break;
3506 #ifdef AUTO_INC_DEC
3507 case REG_INC:
3508 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3510 rtx reg = XEXP (note, 0);
3511 if (!FIND_REG_INC_NOTE (insn, reg)
3512 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3513 add_reg_note (insn, REG_INC, reg);
3515 break;
3516 #endif
3518 default:
3519 break;
3523 /* If there are LABELS inside the split insns increment the
3524 usage count so we don't delete the label. */
3525 if (INSN_P (trial))
3527 insn = insn_last;
3528 while (insn != NULL_RTX)
3530 /* JUMP_P insns have already been "marked" above. */
3531 if (NONJUMP_INSN_P (insn))
3532 mark_label_nuses (PATTERN (insn));
3534 insn = PREV_INSN (insn);
3538 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3540 delete_insn (trial);
3541 if (has_barrier)
3542 emit_barrier_after (tem);
3544 /* Recursively call try_split for each new insn created; by the
3545 time control returns here that insn will be fully split, so
3546 set LAST and continue from the insn after the one returned.
3547 We can't use next_active_insn here since AFTER may be a note.
3548 Ignore deleted insns, which can be occur if not optimizing. */
3549 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3550 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3551 tem = try_split (PATTERN (tem), tem, 1);
3553 /* Return either the first or the last insn, depending on which was
3554 requested. */
3555 return last
3556 ? (after ? PREV_INSN (after) : get_last_insn ())
3557 : NEXT_INSN (before);
3560 /* Make and return an INSN rtx, initializing all its slots.
3561 Store PATTERN in the pattern slots. */
3564 make_insn_raw (rtx pattern)
3566 rtx insn;
3568 insn = rtx_alloc (INSN);
3570 INSN_UID (insn) = cur_insn_uid++;
3571 PATTERN (insn) = pattern;
3572 INSN_CODE (insn) = -1;
3573 REG_NOTES (insn) = NULL;
3574 INSN_LOCATOR (insn) = curr_insn_locator ();
3575 BLOCK_FOR_INSN (insn) = NULL;
3577 #ifdef ENABLE_RTL_CHECKING
3578 if (insn
3579 && INSN_P (insn)
3580 && (returnjump_p (insn)
3581 || (GET_CODE (insn) == SET
3582 && SET_DEST (insn) == pc_rtx)))
3584 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3585 debug_rtx (insn);
3587 #endif
3589 return insn;
3592 /* Like `make_insn_raw' but make a DEBUG_INSN instead of an insn. */
3595 make_debug_insn_raw (rtx pattern)
3597 rtx insn;
3599 insn = rtx_alloc (DEBUG_INSN);
3600 INSN_UID (insn) = cur_debug_insn_uid++;
3601 if (cur_debug_insn_uid > MIN_NONDEBUG_INSN_UID)
3602 INSN_UID (insn) = cur_insn_uid++;
3604 PATTERN (insn) = pattern;
3605 INSN_CODE (insn) = -1;
3606 REG_NOTES (insn) = NULL;
3607 INSN_LOCATOR (insn) = curr_insn_locator ();
3608 BLOCK_FOR_INSN (insn) = NULL;
3610 return insn;
3613 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3616 make_jump_insn_raw (rtx pattern)
3618 rtx insn;
3620 insn = rtx_alloc (JUMP_INSN);
3621 INSN_UID (insn) = cur_insn_uid++;
3623 PATTERN (insn) = pattern;
3624 INSN_CODE (insn) = -1;
3625 REG_NOTES (insn) = NULL;
3626 JUMP_LABEL (insn) = NULL;
3627 INSN_LOCATOR (insn) = curr_insn_locator ();
3628 BLOCK_FOR_INSN (insn) = NULL;
3630 return insn;
3633 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3635 static rtx
3636 make_call_insn_raw (rtx pattern)
3638 rtx insn;
3640 insn = rtx_alloc (CALL_INSN);
3641 INSN_UID (insn) = cur_insn_uid++;
3643 PATTERN (insn) = pattern;
3644 INSN_CODE (insn) = -1;
3645 REG_NOTES (insn) = NULL;
3646 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3647 INSN_LOCATOR (insn) = curr_insn_locator ();
3648 BLOCK_FOR_INSN (insn) = NULL;
3650 return insn;
3653 /* Add INSN to the end of the doubly-linked list.
3654 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3656 void
3657 add_insn (rtx insn)
3659 PREV_INSN (insn) = get_last_insn();
3660 NEXT_INSN (insn) = 0;
3662 if (NULL != get_last_insn())
3663 NEXT_INSN (get_last_insn ()) = insn;
3665 if (NULL == get_insns ())
3666 set_first_insn (insn);
3668 set_last_insn (insn);
3671 /* Add INSN into the doubly-linked list after insn AFTER. This and
3672 the next should be the only functions called to insert an insn once
3673 delay slots have been filled since only they know how to update a
3674 SEQUENCE. */
3676 void
3677 add_insn_after (rtx insn, rtx after, basic_block bb)
3679 rtx next = NEXT_INSN (after);
3681 gcc_assert (!optimize || !INSN_DELETED_P (after));
3683 NEXT_INSN (insn) = next;
3684 PREV_INSN (insn) = after;
3686 if (next)
3688 PREV_INSN (next) = insn;
3689 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3690 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3692 else if (get_last_insn () == after)
3693 set_last_insn (insn);
3694 else
3696 struct sequence_stack *stack = seq_stack;
3697 /* Scan all pending sequences too. */
3698 for (; stack; stack = stack->next)
3699 if (after == stack->last)
3701 stack->last = insn;
3702 break;
3705 gcc_assert (stack);
3708 if (!BARRIER_P (after)
3709 && !BARRIER_P (insn)
3710 && (bb = BLOCK_FOR_INSN (after)))
3712 set_block_for_insn (insn, bb);
3713 if (INSN_P (insn))
3714 df_insn_rescan (insn);
3715 /* Should not happen as first in the BB is always
3716 either NOTE or LABEL. */
3717 if (BB_END (bb) == after
3718 /* Avoid clobbering of structure when creating new BB. */
3719 && !BARRIER_P (insn)
3720 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3721 BB_END (bb) = insn;
3724 NEXT_INSN (after) = insn;
3725 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3727 rtx sequence = PATTERN (after);
3728 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3732 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3733 the previous should be the only functions called to insert an insn
3734 once delay slots have been filled since only they know how to
3735 update a SEQUENCE. If BB is NULL, an attempt is made to infer the
3736 bb from before. */
3738 void
3739 add_insn_before (rtx insn, rtx before, basic_block bb)
3741 rtx prev = PREV_INSN (before);
3743 gcc_assert (!optimize || !INSN_DELETED_P (before));
3745 PREV_INSN (insn) = prev;
3746 NEXT_INSN (insn) = before;
3748 if (prev)
3750 NEXT_INSN (prev) = insn;
3751 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3753 rtx sequence = PATTERN (prev);
3754 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3757 else if (get_insns () == before)
3758 set_first_insn (insn);
3759 else
3761 struct sequence_stack *stack = seq_stack;
3762 /* Scan all pending sequences too. */
3763 for (; stack; stack = stack->next)
3764 if (before == stack->first)
3766 stack->first = insn;
3767 break;
3770 gcc_assert (stack);
3773 if (!bb
3774 && !BARRIER_P (before)
3775 && !BARRIER_P (insn))
3776 bb = BLOCK_FOR_INSN (before);
3778 if (bb)
3780 set_block_for_insn (insn, bb);
3781 if (INSN_P (insn))
3782 df_insn_rescan (insn);
3783 /* Should not happen as first in the BB is always either NOTE or
3784 LABEL. */
3785 gcc_assert (BB_HEAD (bb) != insn
3786 /* Avoid clobbering of structure when creating new BB. */
3787 || BARRIER_P (insn)
3788 || NOTE_INSN_BASIC_BLOCK_P (insn));
3791 PREV_INSN (before) = insn;
3792 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3793 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3797 /* Replace insn with an deleted instruction note. */
3799 void
3800 set_insn_deleted (rtx insn)
3802 df_insn_delete (BLOCK_FOR_INSN (insn), INSN_UID (insn));
3803 PUT_CODE (insn, NOTE);
3804 NOTE_KIND (insn) = NOTE_INSN_DELETED;
3808 /* Remove an insn from its doubly-linked list. This function knows how
3809 to handle sequences. */
3810 void
3811 remove_insn (rtx insn)
3813 rtx next = NEXT_INSN (insn);
3814 rtx prev = PREV_INSN (insn);
3815 basic_block bb;
3817 /* Later in the code, the block will be marked dirty. */
3818 df_insn_delete (NULL, INSN_UID (insn));
3820 if (prev)
3822 NEXT_INSN (prev) = next;
3823 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3825 rtx sequence = PATTERN (prev);
3826 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3829 else if (get_insns () == insn)
3831 if (next)
3832 PREV_INSN (next) = NULL;
3833 set_first_insn (next);
3835 else
3837 struct sequence_stack *stack = seq_stack;
3838 /* Scan all pending sequences too. */
3839 for (; stack; stack = stack->next)
3840 if (insn == stack->first)
3842 stack->first = next;
3843 break;
3846 gcc_assert (stack);
3849 if (next)
3851 PREV_INSN (next) = prev;
3852 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3853 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3855 else if (get_last_insn () == insn)
3856 set_last_insn (prev);
3857 else
3859 struct sequence_stack *stack = seq_stack;
3860 /* Scan all pending sequences too. */
3861 for (; stack; stack = stack->next)
3862 if (insn == stack->last)
3864 stack->last = prev;
3865 break;
3868 gcc_assert (stack);
3870 if (!BARRIER_P (insn)
3871 && (bb = BLOCK_FOR_INSN (insn)))
3873 if (NONDEBUG_INSN_P (insn))
3874 df_set_bb_dirty (bb);
3875 if (BB_HEAD (bb) == insn)
3877 /* Never ever delete the basic block note without deleting whole
3878 basic block. */
3879 gcc_assert (!NOTE_P (insn));
3880 BB_HEAD (bb) = next;
3882 if (BB_END (bb) == insn)
3883 BB_END (bb) = prev;
3887 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3889 void
3890 add_function_usage_to (rtx call_insn, rtx call_fusage)
3892 gcc_assert (call_insn && CALL_P (call_insn));
3894 /* Put the register usage information on the CALL. If there is already
3895 some usage information, put ours at the end. */
3896 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3898 rtx link;
3900 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3901 link = XEXP (link, 1))
3904 XEXP (link, 1) = call_fusage;
3906 else
3907 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3910 /* Delete all insns made since FROM.
3911 FROM becomes the new last instruction. */
3913 void
3914 delete_insns_since (rtx from)
3916 if (from == 0)
3917 set_first_insn (0);
3918 else
3919 NEXT_INSN (from) = 0;
3920 set_last_insn (from);
3923 /* This function is deprecated, please use sequences instead.
3925 Move a consecutive bunch of insns to a different place in the chain.
3926 The insns to be moved are those between FROM and TO.
3927 They are moved to a new position after the insn AFTER.
3928 AFTER must not be FROM or TO or any insn in between.
3930 This function does not know about SEQUENCEs and hence should not be
3931 called after delay-slot filling has been done. */
3933 void
3934 reorder_insns_nobb (rtx from, rtx to, rtx after)
3936 #ifdef ENABLE_CHECKING
3937 rtx x;
3938 for (x = from; x != to; x = NEXT_INSN (x))
3939 gcc_assert (after != x);
3940 gcc_assert (after != to);
3941 #endif
3943 /* Splice this bunch out of where it is now. */
3944 if (PREV_INSN (from))
3945 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3946 if (NEXT_INSN (to))
3947 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3948 if (get_last_insn () == to)
3949 set_last_insn (PREV_INSN (from));
3950 if (get_insns () == from)
3951 set_first_insn (NEXT_INSN (to));
3953 /* Make the new neighbors point to it and it to them. */
3954 if (NEXT_INSN (after))
3955 PREV_INSN (NEXT_INSN (after)) = to;
3957 NEXT_INSN (to) = NEXT_INSN (after);
3958 PREV_INSN (from) = after;
3959 NEXT_INSN (after) = from;
3960 if (after == get_last_insn())
3961 set_last_insn (to);
3964 /* Same as function above, but take care to update BB boundaries. */
3965 void
3966 reorder_insns (rtx from, rtx to, rtx after)
3968 rtx prev = PREV_INSN (from);
3969 basic_block bb, bb2;
3971 reorder_insns_nobb (from, to, after);
3973 if (!BARRIER_P (after)
3974 && (bb = BLOCK_FOR_INSN (after)))
3976 rtx x;
3977 df_set_bb_dirty (bb);
3979 if (!BARRIER_P (from)
3980 && (bb2 = BLOCK_FOR_INSN (from)))
3982 if (BB_END (bb2) == to)
3983 BB_END (bb2) = prev;
3984 df_set_bb_dirty (bb2);
3987 if (BB_END (bb) == after)
3988 BB_END (bb) = to;
3990 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3991 if (!BARRIER_P (x))
3992 df_insn_change_bb (x, bb);
3997 /* Emit insn(s) of given code and pattern
3998 at a specified place within the doubly-linked list.
4000 All of the emit_foo global entry points accept an object
4001 X which is either an insn list or a PATTERN of a single
4002 instruction.
4004 There are thus a few canonical ways to generate code and
4005 emit it at a specific place in the instruction stream. For
4006 example, consider the instruction named SPOT and the fact that
4007 we would like to emit some instructions before SPOT. We might
4008 do it like this:
4010 start_sequence ();
4011 ... emit the new instructions ...
4012 insns_head = get_insns ();
4013 end_sequence ();
4015 emit_insn_before (insns_head, SPOT);
4017 It used to be common to generate SEQUENCE rtl instead, but that
4018 is a relic of the past which no longer occurs. The reason is that
4019 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
4020 generated would almost certainly die right after it was created. */
4022 /* Make X be output before the instruction BEFORE. */
4025 emit_insn_before_noloc (rtx x, rtx before, basic_block bb)
4027 rtx last = before;
4028 rtx insn;
4030 gcc_assert (before);
4032 if (x == NULL_RTX)
4033 return last;
4035 switch (GET_CODE (x))
4037 case DEBUG_INSN:
4038 case INSN:
4039 case JUMP_INSN:
4040 case CALL_INSN:
4041 case CODE_LABEL:
4042 case BARRIER:
4043 case NOTE:
4044 insn = x;
4045 while (insn)
4047 rtx next = NEXT_INSN (insn);
4048 add_insn_before (insn, before, bb);
4049 last = insn;
4050 insn = next;
4052 break;
4054 #ifdef ENABLE_RTL_CHECKING
4055 case SEQUENCE:
4056 gcc_unreachable ();
4057 break;
4058 #endif
4060 default:
4061 last = make_insn_raw (x);
4062 add_insn_before (last, before, bb);
4063 break;
4066 return last;
4069 /* Make an instruction with body X and code JUMP_INSN
4070 and output it before the instruction BEFORE. */
4073 emit_jump_insn_before_noloc (rtx x, rtx before)
4075 rtx insn, last = NULL_RTX;
4077 gcc_assert (before);
4079 switch (GET_CODE (x))
4081 case DEBUG_INSN:
4082 case INSN:
4083 case JUMP_INSN:
4084 case CALL_INSN:
4085 case CODE_LABEL:
4086 case BARRIER:
4087 case NOTE:
4088 insn = x;
4089 while (insn)
4091 rtx next = NEXT_INSN (insn);
4092 add_insn_before (insn, before, NULL);
4093 last = insn;
4094 insn = next;
4096 break;
4098 #ifdef ENABLE_RTL_CHECKING
4099 case SEQUENCE:
4100 gcc_unreachable ();
4101 break;
4102 #endif
4104 default:
4105 last = make_jump_insn_raw (x);
4106 add_insn_before (last, before, NULL);
4107 break;
4110 return last;
4113 /* Make an instruction with body X and code CALL_INSN
4114 and output it before the instruction BEFORE. */
4117 emit_call_insn_before_noloc (rtx x, rtx before)
4119 rtx last = NULL_RTX, insn;
4121 gcc_assert (before);
4123 switch (GET_CODE (x))
4125 case DEBUG_INSN:
4126 case INSN:
4127 case JUMP_INSN:
4128 case CALL_INSN:
4129 case CODE_LABEL:
4130 case BARRIER:
4131 case NOTE:
4132 insn = x;
4133 while (insn)
4135 rtx next = NEXT_INSN (insn);
4136 add_insn_before (insn, before, NULL);
4137 last = insn;
4138 insn = next;
4140 break;
4142 #ifdef ENABLE_RTL_CHECKING
4143 case SEQUENCE:
4144 gcc_unreachable ();
4145 break;
4146 #endif
4148 default:
4149 last = make_call_insn_raw (x);
4150 add_insn_before (last, before, NULL);
4151 break;
4154 return last;
4157 /* Make an instruction with body X and code DEBUG_INSN
4158 and output it before the instruction BEFORE. */
4161 emit_debug_insn_before_noloc (rtx x, rtx before)
4163 rtx last = NULL_RTX, insn;
4165 gcc_assert (before);
4167 switch (GET_CODE (x))
4169 case DEBUG_INSN:
4170 case INSN:
4171 case JUMP_INSN:
4172 case CALL_INSN:
4173 case CODE_LABEL:
4174 case BARRIER:
4175 case NOTE:
4176 insn = x;
4177 while (insn)
4179 rtx next = NEXT_INSN (insn);
4180 add_insn_before (insn, before, NULL);
4181 last = insn;
4182 insn = next;
4184 break;
4186 #ifdef ENABLE_RTL_CHECKING
4187 case SEQUENCE:
4188 gcc_unreachable ();
4189 break;
4190 #endif
4192 default:
4193 last = make_debug_insn_raw (x);
4194 add_insn_before (last, before, NULL);
4195 break;
4198 return last;
4201 /* Make an insn of code BARRIER
4202 and output it before the insn BEFORE. */
4205 emit_barrier_before (rtx before)
4207 rtx insn = rtx_alloc (BARRIER);
4209 INSN_UID (insn) = cur_insn_uid++;
4211 add_insn_before (insn, before, NULL);
4212 return insn;
4215 /* Emit the label LABEL before the insn BEFORE. */
4218 emit_label_before (rtx label, rtx before)
4220 /* This can be called twice for the same label as a result of the
4221 confusion that follows a syntax error! So make it harmless. */
4222 if (INSN_UID (label) == 0)
4224 INSN_UID (label) = cur_insn_uid++;
4225 add_insn_before (label, before, NULL);
4228 return label;
4231 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
4234 emit_note_before (enum insn_note subtype, rtx before)
4236 rtx note = rtx_alloc (NOTE);
4237 INSN_UID (note) = cur_insn_uid++;
4238 NOTE_KIND (note) = subtype;
4239 BLOCK_FOR_INSN (note) = NULL;
4240 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4242 add_insn_before (note, before, NULL);
4243 return note;
4246 /* Helper for emit_insn_after, handles lists of instructions
4247 efficiently. */
4249 static rtx
4250 emit_insn_after_1 (rtx first, rtx after, basic_block bb)
4252 rtx last;
4253 rtx after_after;
4254 if (!bb && !BARRIER_P (after))
4255 bb = BLOCK_FOR_INSN (after);
4257 if (bb)
4259 df_set_bb_dirty (bb);
4260 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4261 if (!BARRIER_P (last))
4263 set_block_for_insn (last, bb);
4264 df_insn_rescan (last);
4266 if (!BARRIER_P (last))
4268 set_block_for_insn (last, bb);
4269 df_insn_rescan (last);
4271 if (BB_END (bb) == after)
4272 BB_END (bb) = last;
4274 else
4275 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
4276 continue;
4278 after_after = NEXT_INSN (after);
4280 NEXT_INSN (after) = first;
4281 PREV_INSN (first) = after;
4282 NEXT_INSN (last) = after_after;
4283 if (after_after)
4284 PREV_INSN (after_after) = last;
4286 if (after == get_last_insn())
4287 set_last_insn (last);
4289 return last;
4292 /* Make X be output after the insn AFTER and set the BB of insn. If
4293 BB is NULL, an attempt is made to infer the BB from AFTER. */
4296 emit_insn_after_noloc (rtx x, rtx after, basic_block bb)
4298 rtx last = after;
4300 gcc_assert (after);
4302 if (x == NULL_RTX)
4303 return last;
4305 switch (GET_CODE (x))
4307 case DEBUG_INSN:
4308 case INSN:
4309 case JUMP_INSN:
4310 case CALL_INSN:
4311 case CODE_LABEL:
4312 case BARRIER:
4313 case NOTE:
4314 last = emit_insn_after_1 (x, after, bb);
4315 break;
4317 #ifdef ENABLE_RTL_CHECKING
4318 case SEQUENCE:
4319 gcc_unreachable ();
4320 break;
4321 #endif
4323 default:
4324 last = make_insn_raw (x);
4325 add_insn_after (last, after, bb);
4326 break;
4329 return last;
4333 /* Make an insn of code JUMP_INSN with body X
4334 and output it after the insn AFTER. */
4337 emit_jump_insn_after_noloc (rtx x, rtx after)
4339 rtx last;
4341 gcc_assert (after);
4343 switch (GET_CODE (x))
4345 case DEBUG_INSN:
4346 case INSN:
4347 case JUMP_INSN:
4348 case CALL_INSN:
4349 case CODE_LABEL:
4350 case BARRIER:
4351 case NOTE:
4352 last = emit_insn_after_1 (x, after, NULL);
4353 break;
4355 #ifdef ENABLE_RTL_CHECKING
4356 case SEQUENCE:
4357 gcc_unreachable ();
4358 break;
4359 #endif
4361 default:
4362 last = make_jump_insn_raw (x);
4363 add_insn_after (last, after, NULL);
4364 break;
4367 return last;
4370 /* Make an instruction with body X and code CALL_INSN
4371 and output it after the instruction AFTER. */
4374 emit_call_insn_after_noloc (rtx x, rtx after)
4376 rtx last;
4378 gcc_assert (after);
4380 switch (GET_CODE (x))
4382 case DEBUG_INSN:
4383 case INSN:
4384 case JUMP_INSN:
4385 case CALL_INSN:
4386 case CODE_LABEL:
4387 case BARRIER:
4388 case NOTE:
4389 last = emit_insn_after_1 (x, after, NULL);
4390 break;
4392 #ifdef ENABLE_RTL_CHECKING
4393 case SEQUENCE:
4394 gcc_unreachable ();
4395 break;
4396 #endif
4398 default:
4399 last = make_call_insn_raw (x);
4400 add_insn_after (last, after, NULL);
4401 break;
4404 return last;
4407 /* Make an instruction with body X and code CALL_INSN
4408 and output it after the instruction AFTER. */
4411 emit_debug_insn_after_noloc (rtx x, rtx after)
4413 rtx last;
4415 gcc_assert (after);
4417 switch (GET_CODE (x))
4419 case DEBUG_INSN:
4420 case INSN:
4421 case JUMP_INSN:
4422 case CALL_INSN:
4423 case CODE_LABEL:
4424 case BARRIER:
4425 case NOTE:
4426 last = emit_insn_after_1 (x, after, NULL);
4427 break;
4429 #ifdef ENABLE_RTL_CHECKING
4430 case SEQUENCE:
4431 gcc_unreachable ();
4432 break;
4433 #endif
4435 default:
4436 last = make_debug_insn_raw (x);
4437 add_insn_after (last, after, NULL);
4438 break;
4441 return last;
4444 /* Make an insn of code BARRIER
4445 and output it after the insn AFTER. */
4448 emit_barrier_after (rtx after)
4450 rtx insn = rtx_alloc (BARRIER);
4452 INSN_UID (insn) = cur_insn_uid++;
4454 add_insn_after (insn, after, NULL);
4455 return insn;
4458 /* Emit the label LABEL after the insn AFTER. */
4461 emit_label_after (rtx label, rtx after)
4463 /* This can be called twice for the same label
4464 as a result of the confusion that follows a syntax error!
4465 So make it harmless. */
4466 if (INSN_UID (label) == 0)
4468 INSN_UID (label) = cur_insn_uid++;
4469 add_insn_after (label, after, NULL);
4472 return label;
4475 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4478 emit_note_after (enum insn_note subtype, rtx after)
4480 rtx note = rtx_alloc (NOTE);
4481 INSN_UID (note) = cur_insn_uid++;
4482 NOTE_KIND (note) = subtype;
4483 BLOCK_FOR_INSN (note) = NULL;
4484 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4485 add_insn_after (note, after, NULL);
4486 return note;
4489 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4491 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4493 rtx last = emit_insn_after_noloc (pattern, after, NULL);
4495 if (pattern == NULL_RTX || !loc)
4496 return last;
4498 after = NEXT_INSN (after);
4499 while (1)
4501 if (active_insn_p (after) && !INSN_LOCATOR (after))
4502 INSN_LOCATOR (after) = loc;
4503 if (after == last)
4504 break;
4505 after = NEXT_INSN (after);
4507 return last;
4510 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4512 emit_insn_after (rtx pattern, rtx after)
4514 rtx prev = after;
4516 while (DEBUG_INSN_P (prev))
4517 prev = PREV_INSN (prev);
4519 if (INSN_P (prev))
4520 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4521 else
4522 return emit_insn_after_noloc (pattern, after, NULL);
4525 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4527 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4529 rtx last = emit_jump_insn_after_noloc (pattern, after);
4531 if (pattern == NULL_RTX || !loc)
4532 return last;
4534 after = NEXT_INSN (after);
4535 while (1)
4537 if (active_insn_p (after) && !INSN_LOCATOR (after))
4538 INSN_LOCATOR (after) = loc;
4539 if (after == last)
4540 break;
4541 after = NEXT_INSN (after);
4543 return last;
4546 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4548 emit_jump_insn_after (rtx pattern, rtx after)
4550 rtx prev = after;
4552 while (DEBUG_INSN_P (prev))
4553 prev = PREV_INSN (prev);
4555 if (INSN_P (prev))
4556 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4557 else
4558 return emit_jump_insn_after_noloc (pattern, after);
4561 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4563 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4565 rtx last = emit_call_insn_after_noloc (pattern, after);
4567 if (pattern == NULL_RTX || !loc)
4568 return last;
4570 after = NEXT_INSN (after);
4571 while (1)
4573 if (active_insn_p (after) && !INSN_LOCATOR (after))
4574 INSN_LOCATOR (after) = loc;
4575 if (after == last)
4576 break;
4577 after = NEXT_INSN (after);
4579 return last;
4582 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4584 emit_call_insn_after (rtx pattern, rtx after)
4586 rtx prev = after;
4588 while (DEBUG_INSN_P (prev))
4589 prev = PREV_INSN (prev);
4591 if (INSN_P (prev))
4592 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (prev));
4593 else
4594 return emit_call_insn_after_noloc (pattern, after);
4597 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4599 emit_debug_insn_after_setloc (rtx pattern, rtx after, int loc)
4601 rtx last = emit_debug_insn_after_noloc (pattern, after);
4603 if (pattern == NULL_RTX || !loc)
4604 return last;
4606 after = NEXT_INSN (after);
4607 while (1)
4609 if (active_insn_p (after) && !INSN_LOCATOR (after))
4610 INSN_LOCATOR (after) = loc;
4611 if (after == last)
4612 break;
4613 after = NEXT_INSN (after);
4615 return last;
4618 /* Like emit_debug_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4620 emit_debug_insn_after (rtx pattern, rtx after)
4622 if (INSN_P (after))
4623 return emit_debug_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4624 else
4625 return emit_debug_insn_after_noloc (pattern, after);
4628 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4630 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4632 rtx first = PREV_INSN (before);
4633 rtx last = emit_insn_before_noloc (pattern, before, NULL);
4635 if (pattern == NULL_RTX || !loc)
4636 return last;
4638 if (!first)
4639 first = get_insns ();
4640 else
4641 first = NEXT_INSN (first);
4642 while (1)
4644 if (active_insn_p (first) && !INSN_LOCATOR (first))
4645 INSN_LOCATOR (first) = loc;
4646 if (first == last)
4647 break;
4648 first = NEXT_INSN (first);
4650 return last;
4653 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4655 emit_insn_before (rtx pattern, rtx before)
4657 rtx next = before;
4659 while (DEBUG_INSN_P (next))
4660 next = PREV_INSN (next);
4662 if (INSN_P (next))
4663 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4664 else
4665 return emit_insn_before_noloc (pattern, before, NULL);
4668 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4670 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4672 rtx first = PREV_INSN (before);
4673 rtx last = emit_jump_insn_before_noloc (pattern, before);
4675 if (pattern == NULL_RTX)
4676 return last;
4678 first = NEXT_INSN (first);
4679 while (1)
4681 if (active_insn_p (first) && !INSN_LOCATOR (first))
4682 INSN_LOCATOR (first) = loc;
4683 if (first == last)
4684 break;
4685 first = NEXT_INSN (first);
4687 return last;
4690 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4692 emit_jump_insn_before (rtx pattern, rtx before)
4694 rtx next = before;
4696 while (DEBUG_INSN_P (next))
4697 next = PREV_INSN (next);
4699 if (INSN_P (next))
4700 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4701 else
4702 return emit_jump_insn_before_noloc (pattern, before);
4705 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4707 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4709 rtx first = PREV_INSN (before);
4710 rtx last = emit_call_insn_before_noloc (pattern, before);
4712 if (pattern == NULL_RTX)
4713 return last;
4715 first = NEXT_INSN (first);
4716 while (1)
4718 if (active_insn_p (first) && !INSN_LOCATOR (first))
4719 INSN_LOCATOR (first) = loc;
4720 if (first == last)
4721 break;
4722 first = NEXT_INSN (first);
4724 return last;
4727 /* like emit_call_insn_before_noloc,
4728 but set insn_locator according to before. */
4730 emit_call_insn_before (rtx pattern, rtx before)
4732 rtx next = before;
4734 while (DEBUG_INSN_P (next))
4735 next = PREV_INSN (next);
4737 if (INSN_P (next))
4738 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (next));
4739 else
4740 return emit_call_insn_before_noloc (pattern, before);
4743 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4745 emit_debug_insn_before_setloc (rtx pattern, rtx before, int loc)
4747 rtx first = PREV_INSN (before);
4748 rtx last = emit_debug_insn_before_noloc (pattern, before);
4750 if (pattern == NULL_RTX)
4751 return last;
4753 first = NEXT_INSN (first);
4754 while (1)
4756 if (active_insn_p (first) && !INSN_LOCATOR (first))
4757 INSN_LOCATOR (first) = loc;
4758 if (first == last)
4759 break;
4760 first = NEXT_INSN (first);
4762 return last;
4765 /* like emit_debug_insn_before_noloc,
4766 but set insn_locator according to before. */
4768 emit_debug_insn_before (rtx pattern, rtx before)
4770 if (INSN_P (before))
4771 return emit_debug_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4772 else
4773 return emit_debug_insn_before_noloc (pattern, before);
4776 /* Take X and emit it at the end of the doubly-linked
4777 INSN list.
4779 Returns the last insn emitted. */
4782 emit_insn (rtx x)
4784 rtx last = get_last_insn();
4785 rtx insn;
4787 if (x == NULL_RTX)
4788 return last;
4790 switch (GET_CODE (x))
4792 case DEBUG_INSN:
4793 case INSN:
4794 case JUMP_INSN:
4795 case CALL_INSN:
4796 case CODE_LABEL:
4797 case BARRIER:
4798 case NOTE:
4799 insn = x;
4800 while (insn)
4802 rtx next = NEXT_INSN (insn);
4803 add_insn (insn);
4804 last = insn;
4805 insn = next;
4807 break;
4809 #ifdef ENABLE_RTL_CHECKING
4810 case SEQUENCE:
4811 gcc_unreachable ();
4812 break;
4813 #endif
4815 default:
4816 last = make_insn_raw (x);
4817 add_insn (last);
4818 break;
4821 return last;
4824 /* Make an insn of code DEBUG_INSN with pattern X
4825 and add it to the end of the doubly-linked list. */
4828 emit_debug_insn (rtx x)
4830 rtx last = get_last_insn();
4831 rtx insn;
4833 if (x == NULL_RTX)
4834 return last;
4836 switch (GET_CODE (x))
4838 case DEBUG_INSN:
4839 case INSN:
4840 case JUMP_INSN:
4841 case CALL_INSN:
4842 case CODE_LABEL:
4843 case BARRIER:
4844 case NOTE:
4845 insn = x;
4846 while (insn)
4848 rtx next = NEXT_INSN (insn);
4849 add_insn (insn);
4850 last = insn;
4851 insn = next;
4853 break;
4855 #ifdef ENABLE_RTL_CHECKING
4856 case SEQUENCE:
4857 gcc_unreachable ();
4858 break;
4859 #endif
4861 default:
4862 last = make_debug_insn_raw (x);
4863 add_insn (last);
4864 break;
4867 return last;
4870 /* Make an insn of code JUMP_INSN with pattern X
4871 and add it to the end of the doubly-linked list. */
4874 emit_jump_insn (rtx x)
4876 rtx last = NULL_RTX, insn;
4878 switch (GET_CODE (x))
4880 case DEBUG_INSN:
4881 case INSN:
4882 case JUMP_INSN:
4883 case CALL_INSN:
4884 case CODE_LABEL:
4885 case BARRIER:
4886 case NOTE:
4887 insn = x;
4888 while (insn)
4890 rtx next = NEXT_INSN (insn);
4891 add_insn (insn);
4892 last = insn;
4893 insn = next;
4895 break;
4897 #ifdef ENABLE_RTL_CHECKING
4898 case SEQUENCE:
4899 gcc_unreachable ();
4900 break;
4901 #endif
4903 default:
4904 last = make_jump_insn_raw (x);
4905 add_insn (last);
4906 break;
4909 return last;
4912 /* Make an insn of code CALL_INSN with pattern X
4913 and add it to the end of the doubly-linked list. */
4916 emit_call_insn (rtx x)
4918 rtx insn;
4920 switch (GET_CODE (x))
4922 case DEBUG_INSN:
4923 case INSN:
4924 case JUMP_INSN:
4925 case CALL_INSN:
4926 case CODE_LABEL:
4927 case BARRIER:
4928 case NOTE:
4929 insn = emit_insn (x);
4930 break;
4932 #ifdef ENABLE_RTL_CHECKING
4933 case SEQUENCE:
4934 gcc_unreachable ();
4935 break;
4936 #endif
4938 default:
4939 insn = make_call_insn_raw (x);
4940 add_insn (insn);
4941 break;
4944 return insn;
4947 /* Add the label LABEL to the end of the doubly-linked list. */
4950 emit_label (rtx label)
4952 /* This can be called twice for the same label
4953 as a result of the confusion that follows a syntax error!
4954 So make it harmless. */
4955 if (INSN_UID (label) == 0)
4957 INSN_UID (label) = cur_insn_uid++;
4958 add_insn (label);
4960 return label;
4963 /* Make an insn of code BARRIER
4964 and add it to the end of the doubly-linked list. */
4967 emit_barrier (void)
4969 rtx barrier = rtx_alloc (BARRIER);
4970 INSN_UID (barrier) = cur_insn_uid++;
4971 add_insn (barrier);
4972 return barrier;
4975 /* Emit a copy of note ORIG. */
4978 emit_note_copy (rtx orig)
4980 rtx note;
4982 note = rtx_alloc (NOTE);
4984 INSN_UID (note) = cur_insn_uid++;
4985 NOTE_DATA (note) = NOTE_DATA (orig);
4986 NOTE_KIND (note) = NOTE_KIND (orig);
4987 BLOCK_FOR_INSN (note) = NULL;
4988 add_insn (note);
4990 return note;
4993 /* Make an insn of code NOTE or type NOTE_NO
4994 and add it to the end of the doubly-linked list. */
4997 emit_note (enum insn_note kind)
4999 rtx note;
5001 note = rtx_alloc (NOTE);
5002 INSN_UID (note) = cur_insn_uid++;
5003 NOTE_KIND (note) = kind;
5004 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
5005 BLOCK_FOR_INSN (note) = NULL;
5006 add_insn (note);
5007 return note;
5010 /* Emit a clobber of lvalue X. */
5013 emit_clobber (rtx x)
5015 /* CONCATs should not appear in the insn stream. */
5016 if (GET_CODE (x) == CONCAT)
5018 emit_clobber (XEXP (x, 0));
5019 return emit_clobber (XEXP (x, 1));
5021 return emit_insn (gen_rtx_CLOBBER (VOIDmode, x));
5024 /* Return a sequence of insns to clobber lvalue X. */
5027 gen_clobber (rtx x)
5029 rtx seq;
5031 start_sequence ();
5032 emit_clobber (x);
5033 seq = get_insns ();
5034 end_sequence ();
5035 return seq;
5038 /* Emit a use of rvalue X. */
5041 emit_use (rtx x)
5043 /* CONCATs should not appear in the insn stream. */
5044 if (GET_CODE (x) == CONCAT)
5046 emit_use (XEXP (x, 0));
5047 return emit_use (XEXP (x, 1));
5049 return emit_insn (gen_rtx_USE (VOIDmode, x));
5052 /* Return a sequence of insns to use rvalue X. */
5055 gen_use (rtx x)
5057 rtx seq;
5059 start_sequence ();
5060 emit_use (x);
5061 seq = get_insns ();
5062 end_sequence ();
5063 return seq;
5066 /* Cause next statement to emit a line note even if the line number
5067 has not changed. */
5069 void
5070 force_next_line_note (void)
5072 last_location = -1;
5075 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
5076 note of this type already exists, remove it first. */
5079 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
5081 rtx note = find_reg_note (insn, kind, NULL_RTX);
5083 switch (kind)
5085 case REG_EQUAL:
5086 case REG_EQUIV:
5087 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
5088 has multiple sets (some callers assume single_set
5089 means the insn only has one set, when in fact it
5090 means the insn only has one * useful * set). */
5091 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
5093 gcc_assert (!note);
5094 return NULL_RTX;
5097 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
5098 It serves no useful purpose and breaks eliminate_regs. */
5099 if (GET_CODE (datum) == ASM_OPERANDS)
5100 return NULL_RTX;
5102 if (note)
5104 XEXP (note, 0) = datum;
5105 df_notes_rescan (insn);
5106 return note;
5108 break;
5110 default:
5111 if (note)
5113 XEXP (note, 0) = datum;
5114 return note;
5116 break;
5119 add_reg_note (insn, kind, datum);
5121 switch (kind)
5123 case REG_EQUAL:
5124 case REG_EQUIV:
5125 df_notes_rescan (insn);
5126 break;
5127 default:
5128 break;
5131 return REG_NOTES (insn);
5134 /* Return an indication of which type of insn should have X as a body.
5135 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
5137 static enum rtx_code
5138 classify_insn (rtx x)
5140 if (LABEL_P (x))
5141 return CODE_LABEL;
5142 if (GET_CODE (x) == CALL)
5143 return CALL_INSN;
5144 if (GET_CODE (x) == RETURN)
5145 return JUMP_INSN;
5146 if (GET_CODE (x) == SET)
5148 if (SET_DEST (x) == pc_rtx)
5149 return JUMP_INSN;
5150 else if (GET_CODE (SET_SRC (x)) == CALL)
5151 return CALL_INSN;
5152 else
5153 return INSN;
5155 if (GET_CODE (x) == PARALLEL)
5157 int j;
5158 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
5159 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
5160 return CALL_INSN;
5161 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5162 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
5163 return JUMP_INSN;
5164 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
5165 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
5166 return CALL_INSN;
5168 return INSN;
5171 /* Emit the rtl pattern X as an appropriate kind of insn.
5172 If X is a label, it is simply added into the insn chain. */
5175 emit (rtx x)
5177 enum rtx_code code = classify_insn (x);
5179 switch (code)
5181 case CODE_LABEL:
5182 return emit_label (x);
5183 case INSN:
5184 return emit_insn (x);
5185 case JUMP_INSN:
5187 rtx insn = emit_jump_insn (x);
5188 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
5189 return emit_barrier ();
5190 return insn;
5192 case CALL_INSN:
5193 return emit_call_insn (x);
5194 case DEBUG_INSN:
5195 return emit_debug_insn (x);
5196 default:
5197 gcc_unreachable ();
5201 /* Space for free sequence stack entries. */
5202 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
5204 /* Begin emitting insns to a sequence. If this sequence will contain
5205 something that might cause the compiler to pop arguments to function
5206 calls (because those pops have previously been deferred; see
5207 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
5208 before calling this function. That will ensure that the deferred
5209 pops are not accidentally emitted in the middle of this sequence. */
5211 void
5212 start_sequence (void)
5214 struct sequence_stack *tem;
5216 if (free_sequence_stack != NULL)
5218 tem = free_sequence_stack;
5219 free_sequence_stack = tem->next;
5221 else
5222 tem = ggc_alloc_sequence_stack ();
5224 tem->next = seq_stack;
5225 tem->first = get_insns ();
5226 tem->last = get_last_insn ();
5228 seq_stack = tem;
5230 set_first_insn (0);
5231 set_last_insn (0);
5234 /* Set up the insn chain starting with FIRST as the current sequence,
5235 saving the previously current one. See the documentation for
5236 start_sequence for more information about how to use this function. */
5238 void
5239 push_to_sequence (rtx first)
5241 rtx last;
5243 start_sequence ();
5245 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
5247 set_first_insn (first);
5248 set_last_insn (last);
5251 /* Like push_to_sequence, but take the last insn as an argument to avoid
5252 looping through the list. */
5254 void
5255 push_to_sequence2 (rtx first, rtx last)
5257 start_sequence ();
5259 set_first_insn (first);
5260 set_last_insn (last);
5263 /* Set up the outer-level insn chain
5264 as the current sequence, saving the previously current one. */
5266 void
5267 push_topmost_sequence (void)
5269 struct sequence_stack *stack, *top = NULL;
5271 start_sequence ();
5273 for (stack = seq_stack; stack; stack = stack->next)
5274 top = stack;
5276 set_first_insn (top->first);
5277 set_last_insn (top->last);
5280 /* After emitting to the outer-level insn chain, update the outer-level
5281 insn chain, and restore the previous saved state. */
5283 void
5284 pop_topmost_sequence (void)
5286 struct sequence_stack *stack, *top = NULL;
5288 for (stack = seq_stack; stack; stack = stack->next)
5289 top = stack;
5291 top->first = get_insns ();
5292 top->last = get_last_insn ();
5294 end_sequence ();
5297 /* After emitting to a sequence, restore previous saved state.
5299 To get the contents of the sequence just made, you must call
5300 `get_insns' *before* calling here.
5302 If the compiler might have deferred popping arguments while
5303 generating this sequence, and this sequence will not be immediately
5304 inserted into the instruction stream, use do_pending_stack_adjust
5305 before calling get_insns. That will ensure that the deferred
5306 pops are inserted into this sequence, and not into some random
5307 location in the instruction stream. See INHIBIT_DEFER_POP for more
5308 information about deferred popping of arguments. */
5310 void
5311 end_sequence (void)
5313 struct sequence_stack *tem = seq_stack;
5315 set_first_insn (tem->first);
5316 set_last_insn (tem->last);
5317 seq_stack = tem->next;
5319 memset (tem, 0, sizeof (*tem));
5320 tem->next = free_sequence_stack;
5321 free_sequence_stack = tem;
5324 /* Return 1 if currently emitting into a sequence. */
5327 in_sequence_p (void)
5329 return seq_stack != 0;
5332 /* Put the various virtual registers into REGNO_REG_RTX. */
5334 static void
5335 init_virtual_regs (void)
5337 regno_reg_rtx[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
5338 regno_reg_rtx[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
5339 regno_reg_rtx[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
5340 regno_reg_rtx[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
5341 regno_reg_rtx[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
5342 regno_reg_rtx[VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM]
5343 = virtual_preferred_stack_boundary_rtx;
5347 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
5348 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
5349 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
5350 static int copy_insn_n_scratches;
5352 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5353 copied an ASM_OPERANDS.
5354 In that case, it is the original input-operand vector. */
5355 static rtvec orig_asm_operands_vector;
5357 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
5358 copied an ASM_OPERANDS.
5359 In that case, it is the copied input-operand vector. */
5360 static rtvec copy_asm_operands_vector;
5362 /* Likewise for the constraints vector. */
5363 static rtvec orig_asm_constraints_vector;
5364 static rtvec copy_asm_constraints_vector;
5366 /* Recursively create a new copy of an rtx for copy_insn.
5367 This function differs from copy_rtx in that it handles SCRATCHes and
5368 ASM_OPERANDs properly.
5369 Normally, this function is not used directly; use copy_insn as front end.
5370 However, you could first copy an insn pattern with copy_insn and then use
5371 this function afterwards to properly copy any REG_NOTEs containing
5372 SCRATCHes. */
5375 copy_insn_1 (rtx orig)
5377 rtx copy;
5378 int i, j;
5379 RTX_CODE code;
5380 const char *format_ptr;
5382 if (orig == NULL)
5383 return NULL;
5385 code = GET_CODE (orig);
5387 switch (code)
5389 case REG:
5390 case CONST_INT:
5391 case CONST_DOUBLE:
5392 case CONST_FIXED:
5393 case CONST_VECTOR:
5394 case SYMBOL_REF:
5395 case CODE_LABEL:
5396 case PC:
5397 case CC0:
5398 return orig;
5399 case CLOBBER:
5400 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
5401 return orig;
5402 break;
5404 case SCRATCH:
5405 for (i = 0; i < copy_insn_n_scratches; i++)
5406 if (copy_insn_scratch_in[i] == orig)
5407 return copy_insn_scratch_out[i];
5408 break;
5410 case CONST:
5411 if (shared_const_p (orig))
5412 return orig;
5413 break;
5415 /* A MEM with a constant address is not sharable. The problem is that
5416 the constant address may need to be reloaded. If the mem is shared,
5417 then reloading one copy of this mem will cause all copies to appear
5418 to have been reloaded. */
5420 default:
5421 break;
5424 /* Copy the various flags, fields, and other information. We assume
5425 that all fields need copying, and then clear the fields that should
5426 not be copied. That is the sensible default behavior, and forces
5427 us to explicitly document why we are *not* copying a flag. */
5428 copy = shallow_copy_rtx (orig);
5430 /* We do not copy the USED flag, which is used as a mark bit during
5431 walks over the RTL. */
5432 RTX_FLAG (copy, used) = 0;
5434 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
5435 if (INSN_P (orig))
5437 RTX_FLAG (copy, jump) = 0;
5438 RTX_FLAG (copy, call) = 0;
5439 RTX_FLAG (copy, frame_related) = 0;
5442 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
5444 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
5445 switch (*format_ptr++)
5447 case 'e':
5448 if (XEXP (orig, i) != NULL)
5449 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
5450 break;
5452 case 'E':
5453 case 'V':
5454 if (XVEC (orig, i) == orig_asm_constraints_vector)
5455 XVEC (copy, i) = copy_asm_constraints_vector;
5456 else if (XVEC (orig, i) == orig_asm_operands_vector)
5457 XVEC (copy, i) = copy_asm_operands_vector;
5458 else if (XVEC (orig, i) != NULL)
5460 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
5461 for (j = 0; j < XVECLEN (copy, i); j++)
5462 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
5464 break;
5466 case 't':
5467 case 'w':
5468 case 'i':
5469 case 's':
5470 case 'S':
5471 case 'u':
5472 case '0':
5473 /* These are left unchanged. */
5474 break;
5476 default:
5477 gcc_unreachable ();
5480 if (code == SCRATCH)
5482 i = copy_insn_n_scratches++;
5483 gcc_assert (i < MAX_RECOG_OPERANDS);
5484 copy_insn_scratch_in[i] = orig;
5485 copy_insn_scratch_out[i] = copy;
5487 else if (code == ASM_OPERANDS)
5489 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
5490 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
5491 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
5492 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
5495 return copy;
5498 /* Create a new copy of an rtx.
5499 This function differs from copy_rtx in that it handles SCRATCHes and
5500 ASM_OPERANDs properly.
5501 INSN doesn't really have to be a full INSN; it could be just the
5502 pattern. */
5504 copy_insn (rtx insn)
5506 copy_insn_n_scratches = 0;
5507 orig_asm_operands_vector = 0;
5508 orig_asm_constraints_vector = 0;
5509 copy_asm_operands_vector = 0;
5510 copy_asm_constraints_vector = 0;
5511 return copy_insn_1 (insn);
5514 /* Initialize data structures and variables in this file
5515 before generating rtl for each function. */
5517 void
5518 init_emit (void)
5520 set_first_insn (NULL);
5521 set_last_insn (NULL);
5522 if (MIN_NONDEBUG_INSN_UID)
5523 cur_insn_uid = MIN_NONDEBUG_INSN_UID;
5524 else
5525 cur_insn_uid = 1;
5526 cur_debug_insn_uid = 1;
5527 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
5528 last_location = UNKNOWN_LOCATION;
5529 first_label_num = label_num;
5530 seq_stack = NULL;
5532 /* Init the tables that describe all the pseudo regs. */
5534 crtl->emit.regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
5536 crtl->emit.regno_pointer_align
5537 = XCNEWVEC (unsigned char, crtl->emit.regno_pointer_align_length);
5539 regno_reg_rtx = ggc_alloc_vec_rtx (crtl->emit.regno_pointer_align_length);
5541 /* Put copies of all the hard registers into regno_reg_rtx. */
5542 memcpy (regno_reg_rtx,
5543 initial_regno_reg_rtx,
5544 FIRST_PSEUDO_REGISTER * sizeof (rtx));
5546 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
5547 init_virtual_regs ();
5549 /* Indicate that the virtual registers and stack locations are
5550 all pointers. */
5551 REG_POINTER (stack_pointer_rtx) = 1;
5552 REG_POINTER (frame_pointer_rtx) = 1;
5553 REG_POINTER (hard_frame_pointer_rtx) = 1;
5554 REG_POINTER (arg_pointer_rtx) = 1;
5556 REG_POINTER (virtual_incoming_args_rtx) = 1;
5557 REG_POINTER (virtual_stack_vars_rtx) = 1;
5558 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
5559 REG_POINTER (virtual_outgoing_args_rtx) = 1;
5560 REG_POINTER (virtual_cfa_rtx) = 1;
5562 #ifdef STACK_BOUNDARY
5563 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
5564 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5565 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
5566 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
5568 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
5569 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
5570 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
5571 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
5572 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
5573 #endif
5575 #ifdef INIT_EXPANDERS
5576 INIT_EXPANDERS;
5577 #endif
5580 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
5582 static rtx
5583 gen_const_vector (enum machine_mode mode, int constant)
5585 rtx tem;
5586 rtvec v;
5587 int units, i;
5588 enum machine_mode inner;
5590 units = GET_MODE_NUNITS (mode);
5591 inner = GET_MODE_INNER (mode);
5593 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
5595 v = rtvec_alloc (units);
5597 /* We need to call this function after we set the scalar const_tiny_rtx
5598 entries. */
5599 gcc_assert (const_tiny_rtx[constant][(int) inner]);
5601 for (i = 0; i < units; ++i)
5602 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
5604 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
5605 return tem;
5608 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
5609 all elements are zero, and the one vector when all elements are one. */
5611 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
5613 enum machine_mode inner = GET_MODE_INNER (mode);
5614 int nunits = GET_MODE_NUNITS (mode);
5615 rtx x;
5616 int i;
5618 /* Check to see if all of the elements have the same value. */
5619 x = RTVEC_ELT (v, nunits - 1);
5620 for (i = nunits - 2; i >= 0; i--)
5621 if (RTVEC_ELT (v, i) != x)
5622 break;
5624 /* If the values are all the same, check to see if we can use one of the
5625 standard constant vectors. */
5626 if (i == -1)
5628 if (x == CONST0_RTX (inner))
5629 return CONST0_RTX (mode);
5630 else if (x == CONST1_RTX (inner))
5631 return CONST1_RTX (mode);
5634 return gen_rtx_raw_CONST_VECTOR (mode, v);
5637 /* Initialise global register information required by all functions. */
5639 void
5640 init_emit_regs (void)
5642 int i;
5644 /* Reset register attributes */
5645 htab_empty (reg_attrs_htab);
5647 /* We need reg_raw_mode, so initialize the modes now. */
5648 init_reg_modes_target ();
5650 /* Assign register numbers to the globally defined register rtx. */
5651 pc_rtx = gen_rtx_PC (VOIDmode);
5652 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5653 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5654 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5655 hard_frame_pointer_rtx = gen_raw_REG (Pmode, HARD_FRAME_POINTER_REGNUM);
5656 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5657 virtual_incoming_args_rtx =
5658 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5659 virtual_stack_vars_rtx =
5660 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5661 virtual_stack_dynamic_rtx =
5662 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5663 virtual_outgoing_args_rtx =
5664 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5665 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5666 virtual_preferred_stack_boundary_rtx =
5667 gen_raw_REG (Pmode, VIRTUAL_PREFERRED_STACK_BOUNDARY_REGNUM);
5669 /* Initialize RTL for commonly used hard registers. These are
5670 copied into regno_reg_rtx as we begin to compile each function. */
5671 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5672 initial_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5674 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5675 return_address_pointer_rtx
5676 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5677 #endif
5679 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5680 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5681 else
5682 pic_offset_table_rtx = NULL_RTX;
5685 /* Create some permanent unique rtl objects shared between all functions. */
5687 void
5688 init_emit_once (void)
5690 int i;
5691 enum machine_mode mode;
5692 enum machine_mode double_mode;
5694 /* Initialize the CONST_INT, CONST_DOUBLE, CONST_FIXED, and memory attribute
5695 hash tables. */
5696 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5697 const_int_htab_eq, NULL);
5699 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5700 const_double_htab_eq, NULL);
5702 const_fixed_htab = htab_create_ggc (37, const_fixed_htab_hash,
5703 const_fixed_htab_eq, NULL);
5705 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5706 mem_attrs_htab_eq, NULL);
5707 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5708 reg_attrs_htab_eq, NULL);
5710 /* Compute the word and byte modes. */
5712 byte_mode = VOIDmode;
5713 word_mode = VOIDmode;
5714 double_mode = VOIDmode;
5716 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5717 mode != VOIDmode;
5718 mode = GET_MODE_WIDER_MODE (mode))
5720 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5721 && byte_mode == VOIDmode)
5722 byte_mode = mode;
5724 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5725 && word_mode == VOIDmode)
5726 word_mode = mode;
5729 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5730 mode != VOIDmode;
5731 mode = GET_MODE_WIDER_MODE (mode))
5733 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5734 && double_mode == VOIDmode)
5735 double_mode = mode;
5738 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5740 #ifdef INIT_EXPANDERS
5741 /* This is to initialize {init|mark|free}_machine_status before the first
5742 call to push_function_context_to. This is needed by the Chill front
5743 end which calls push_function_context_to before the first call to
5744 init_function_start. */
5745 INIT_EXPANDERS;
5746 #endif
5748 /* Create the unique rtx's for certain rtx codes and operand values. */
5750 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5751 tries to use these variables. */
5752 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5753 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5754 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5756 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5757 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5758 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5759 else
5760 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5762 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5763 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5764 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5766 dconstm1 = dconst1;
5767 dconstm1.sign = 1;
5769 dconsthalf = dconst1;
5770 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5772 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5774 const REAL_VALUE_TYPE *const r =
5775 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5777 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5778 mode != VOIDmode;
5779 mode = GET_MODE_WIDER_MODE (mode))
5780 const_tiny_rtx[i][(int) mode] =
5781 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5783 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5784 mode != VOIDmode;
5785 mode = GET_MODE_WIDER_MODE (mode))
5786 const_tiny_rtx[i][(int) mode] =
5787 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5789 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5791 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5792 mode != VOIDmode;
5793 mode = GET_MODE_WIDER_MODE (mode))
5794 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5796 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5797 mode != VOIDmode;
5798 mode = GET_MODE_WIDER_MODE (mode))
5799 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5802 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_INT);
5803 mode != VOIDmode;
5804 mode = GET_MODE_WIDER_MODE (mode))
5806 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5807 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5810 for (mode = GET_CLASS_NARROWEST_MODE (MODE_COMPLEX_FLOAT);
5811 mode != VOIDmode;
5812 mode = GET_MODE_WIDER_MODE (mode))
5814 rtx inner = const_tiny_rtx[0][(int)GET_MODE_INNER (mode)];
5815 const_tiny_rtx[0][(int) mode] = gen_rtx_CONCAT (mode, inner, inner);
5818 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5819 mode != VOIDmode;
5820 mode = GET_MODE_WIDER_MODE (mode))
5822 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5823 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5826 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5827 mode != VOIDmode;
5828 mode = GET_MODE_WIDER_MODE (mode))
5830 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5831 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5834 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FRACT);
5835 mode != VOIDmode;
5836 mode = GET_MODE_WIDER_MODE (mode))
5838 FCONST0(mode).data.high = 0;
5839 FCONST0(mode).data.low = 0;
5840 FCONST0(mode).mode = mode;
5841 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5842 FCONST0 (mode), mode);
5845 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UFRACT);
5846 mode != VOIDmode;
5847 mode = GET_MODE_WIDER_MODE (mode))
5849 FCONST0(mode).data.high = 0;
5850 FCONST0(mode).data.low = 0;
5851 FCONST0(mode).mode = mode;
5852 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5853 FCONST0 (mode), mode);
5856 for (mode = GET_CLASS_NARROWEST_MODE (MODE_ACCUM);
5857 mode != VOIDmode;
5858 mode = GET_MODE_WIDER_MODE (mode))
5860 FCONST0(mode).data.high = 0;
5861 FCONST0(mode).data.low = 0;
5862 FCONST0(mode).mode = mode;
5863 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5864 FCONST0 (mode), mode);
5866 /* We store the value 1. */
5867 FCONST1(mode).data.high = 0;
5868 FCONST1(mode).data.low = 0;
5869 FCONST1(mode).mode = mode;
5870 lshift_double (1, 0, GET_MODE_FBIT (mode),
5871 2 * HOST_BITS_PER_WIDE_INT,
5872 &FCONST1(mode).data.low,
5873 &FCONST1(mode).data.high,
5874 SIGNED_FIXED_POINT_MODE_P (mode));
5875 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5876 FCONST1 (mode), mode);
5879 for (mode = GET_CLASS_NARROWEST_MODE (MODE_UACCUM);
5880 mode != VOIDmode;
5881 mode = GET_MODE_WIDER_MODE (mode))
5883 FCONST0(mode).data.high = 0;
5884 FCONST0(mode).data.low = 0;
5885 FCONST0(mode).mode = mode;
5886 const_tiny_rtx[0][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5887 FCONST0 (mode), mode);
5889 /* We store the value 1. */
5890 FCONST1(mode).data.high = 0;
5891 FCONST1(mode).data.low = 0;
5892 FCONST1(mode).mode = mode;
5893 lshift_double (1, 0, GET_MODE_FBIT (mode),
5894 2 * HOST_BITS_PER_WIDE_INT,
5895 &FCONST1(mode).data.low,
5896 &FCONST1(mode).data.high,
5897 SIGNED_FIXED_POINT_MODE_P (mode));
5898 const_tiny_rtx[1][(int) mode] = CONST_FIXED_FROM_FIXED_VALUE (
5899 FCONST1 (mode), mode);
5902 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FRACT);
5903 mode != VOIDmode;
5904 mode = GET_MODE_WIDER_MODE (mode))
5906 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5909 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UFRACT);
5910 mode != VOIDmode;
5911 mode = GET_MODE_WIDER_MODE (mode))
5913 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5916 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_ACCUM);
5917 mode != VOIDmode;
5918 mode = GET_MODE_WIDER_MODE (mode))
5920 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5921 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5924 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_UACCUM);
5925 mode != VOIDmode;
5926 mode = GET_MODE_WIDER_MODE (mode))
5928 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5929 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5932 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5933 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5934 const_tiny_rtx[0][i] = const0_rtx;
5936 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5937 if (STORE_FLAG_VALUE == 1)
5938 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5941 /* Produce exact duplicate of insn INSN after AFTER.
5942 Care updating of libcall regions if present. */
5945 emit_copy_of_insn_after (rtx insn, rtx after)
5947 rtx new_rtx, link;
5949 switch (GET_CODE (insn))
5951 case INSN:
5952 new_rtx = emit_insn_after (copy_insn (PATTERN (insn)), after);
5953 break;
5955 case JUMP_INSN:
5956 new_rtx = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5957 break;
5959 case DEBUG_INSN:
5960 new_rtx = emit_debug_insn_after (copy_insn (PATTERN (insn)), after);
5961 break;
5963 case CALL_INSN:
5964 new_rtx = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5965 if (CALL_INSN_FUNCTION_USAGE (insn))
5966 CALL_INSN_FUNCTION_USAGE (new_rtx)
5967 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5968 SIBLING_CALL_P (new_rtx) = SIBLING_CALL_P (insn);
5969 RTL_CONST_CALL_P (new_rtx) = RTL_CONST_CALL_P (insn);
5970 RTL_PURE_CALL_P (new_rtx) = RTL_PURE_CALL_P (insn);
5971 RTL_LOOPING_CONST_OR_PURE_CALL_P (new_rtx)
5972 = RTL_LOOPING_CONST_OR_PURE_CALL_P (insn);
5973 break;
5975 default:
5976 gcc_unreachable ();
5979 /* Update LABEL_NUSES. */
5980 mark_jump_label (PATTERN (new_rtx), new_rtx, 0);
5982 INSN_LOCATOR (new_rtx) = INSN_LOCATOR (insn);
5984 /* If the old insn is frame related, then so is the new one. This is
5985 primarily needed for IA-64 unwind info which marks epilogue insns,
5986 which may be duplicated by the basic block reordering code. */
5987 RTX_FRAME_RELATED_P (new_rtx) = RTX_FRAME_RELATED_P (insn);
5989 /* Copy all REG_NOTES except REG_LABEL_OPERAND since mark_jump_label
5990 will make them. REG_LABEL_TARGETs are created there too, but are
5991 supposed to be sticky, so we copy them. */
5992 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5993 if (REG_NOTE_KIND (link) != REG_LABEL_OPERAND)
5995 if (GET_CODE (link) == EXPR_LIST)
5996 add_reg_note (new_rtx, REG_NOTE_KIND (link),
5997 copy_insn_1 (XEXP (link, 0)));
5998 else
5999 add_reg_note (new_rtx, REG_NOTE_KIND (link), XEXP (link, 0));
6002 INSN_CODE (new_rtx) = INSN_CODE (insn);
6003 return new_rtx;
6006 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
6008 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
6010 if (hard_reg_clobbers[mode][regno])
6011 return hard_reg_clobbers[mode][regno];
6012 else
6013 return (hard_reg_clobbers[mode][regno] =
6014 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
6017 #include "gt-emit-rtl.h"