gfortran.h (gfc_expr): Remove from_H, add "representation" struct.
[official-gcc.git] / gcc / emit-rtl.c
blob17f8c2d8446b45c7b91690ab18671a3338dc1e2c
1 /* Emit RTL for the GCC expander.
2 Copyright (C) 1987, 1988, 1992, 1993, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007
4 Free Software Foundation, Inc.
6 This file is part of GCC.
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 2, or (at your option) any later
11 version.
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING. If not, write to the Free
20 Software Foundation, 51 Franklin Street, Fifth Floor, Boston, MA
21 02110-1301, USA. */
24 /* Middle-to-low level generation of rtx code and insns.
26 This file contains support functions for creating rtl expressions
27 and manipulating them in the doubly-linked chain of insns.
29 The patterns of the insns are created by machine-dependent
30 routines in insn-emit.c, which is generated automatically from
31 the machine description. These routines make the individual rtx's
32 of the pattern with `gen_rtx_fmt_ee' and others in genrtl.[ch],
33 which are automatically generated from rtl.def; what is machine
34 dependent is the kind of rtx's they make and what arguments they
35 use. */
37 #include "config.h"
38 #include "system.h"
39 #include "coretypes.h"
40 #include "tm.h"
41 #include "toplev.h"
42 #include "rtl.h"
43 #include "tree.h"
44 #include "tm_p.h"
45 #include "flags.h"
46 #include "function.h"
47 #include "expr.h"
48 #include "regs.h"
49 #include "hard-reg-set.h"
50 #include "hashtab.h"
51 #include "insn-config.h"
52 #include "recog.h"
53 #include "real.h"
54 #include "bitmap.h"
55 #include "basic-block.h"
56 #include "ggc.h"
57 #include "debug.h"
58 #include "langhooks.h"
59 #include "tree-pass.h"
61 /* Commonly used modes. */
63 enum machine_mode byte_mode; /* Mode whose width is BITS_PER_UNIT. */
64 enum machine_mode word_mode; /* Mode whose width is BITS_PER_WORD. */
65 enum machine_mode double_mode; /* Mode whose width is DOUBLE_TYPE_SIZE. */
66 enum machine_mode ptr_mode; /* Mode whose width is POINTER_SIZE. */
69 /* This is *not* reset after each function. It gives each CODE_LABEL
70 in the entire compilation a unique label number. */
72 static GTY(()) int label_num = 1;
74 /* Nonzero means do not generate NOTEs for source line numbers. */
76 static int no_line_numbers;
78 /* Commonly used rtx's, so that we only need space for one copy.
79 These are initialized once for the entire compilation.
80 All of these are unique; no other rtx-object will be equal to any
81 of these. */
83 rtx global_rtl[GR_MAX];
85 /* Commonly used RTL for hard registers. These objects are not necessarily
86 unique, so we allocate them separately from global_rtl. They are
87 initialized once per compilation unit, then copied into regno_reg_rtx
88 at the beginning of each function. */
89 static GTY(()) rtx static_regno_reg_rtx[FIRST_PSEUDO_REGISTER];
91 /* We record floating-point CONST_DOUBLEs in each floating-point mode for
92 the values of 0, 1, and 2. For the integer entries and VOIDmode, we
93 record a copy of const[012]_rtx. */
95 rtx const_tiny_rtx[3][(int) MAX_MACHINE_MODE];
97 rtx const_true_rtx;
99 REAL_VALUE_TYPE dconst0;
100 REAL_VALUE_TYPE dconst1;
101 REAL_VALUE_TYPE dconst2;
102 REAL_VALUE_TYPE dconst3;
103 REAL_VALUE_TYPE dconst10;
104 REAL_VALUE_TYPE dconstm1;
105 REAL_VALUE_TYPE dconstm2;
106 REAL_VALUE_TYPE dconsthalf;
107 REAL_VALUE_TYPE dconstthird;
108 REAL_VALUE_TYPE dconstsqrt2;
109 REAL_VALUE_TYPE dconste;
111 /* All references to the following fixed hard registers go through
112 these unique rtl objects. On machines where the frame-pointer and
113 arg-pointer are the same register, they use the same unique object.
115 After register allocation, other rtl objects which used to be pseudo-regs
116 may be clobbered to refer to the frame-pointer register.
117 But references that were originally to the frame-pointer can be
118 distinguished from the others because they contain frame_pointer_rtx.
120 When to use frame_pointer_rtx and hard_frame_pointer_rtx is a little
121 tricky: until register elimination has taken place hard_frame_pointer_rtx
122 should be used if it is being set, and frame_pointer_rtx otherwise. After
123 register elimination hard_frame_pointer_rtx should always be used.
124 On machines where the two registers are same (most) then these are the
125 same.
127 In an inline procedure, the stack and frame pointer rtxs may not be
128 used for anything else. */
129 rtx static_chain_rtx; /* (REG:Pmode STATIC_CHAIN_REGNUM) */
130 rtx static_chain_incoming_rtx; /* (REG:Pmode STATIC_CHAIN_INCOMING_REGNUM) */
131 rtx pic_offset_table_rtx; /* (REG:Pmode PIC_OFFSET_TABLE_REGNUM) */
133 /* This is used to implement __builtin_return_address for some machines.
134 See for instance the MIPS port. */
135 rtx return_address_pointer_rtx; /* (REG:Pmode RETURN_ADDRESS_POINTER_REGNUM) */
137 /* We make one copy of (const_int C) where C is in
138 [- MAX_SAVED_CONST_INT, MAX_SAVED_CONST_INT]
139 to save space during the compilation and simplify comparisons of
140 integers. */
142 rtx const_int_rtx[MAX_SAVED_CONST_INT * 2 + 1];
144 /* A hash table storing CONST_INTs whose absolute value is greater
145 than MAX_SAVED_CONST_INT. */
147 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
148 htab_t const_int_htab;
150 /* A hash table storing memory attribute structures. */
151 static GTY ((if_marked ("ggc_marked_p"), param_is (struct mem_attrs)))
152 htab_t mem_attrs_htab;
154 /* A hash table storing register attribute structures. */
155 static GTY ((if_marked ("ggc_marked_p"), param_is (struct reg_attrs)))
156 htab_t reg_attrs_htab;
158 /* A hash table storing all CONST_DOUBLEs. */
159 static GTY ((if_marked ("ggc_marked_p"), param_is (struct rtx_def)))
160 htab_t const_double_htab;
162 #define first_insn (cfun->emit->x_first_insn)
163 #define last_insn (cfun->emit->x_last_insn)
164 #define cur_insn_uid (cfun->emit->x_cur_insn_uid)
165 #define last_location (cfun->emit->x_last_location)
166 #define first_label_num (cfun->emit->x_first_label_num)
168 static rtx make_call_insn_raw (rtx);
169 static rtx change_address_1 (rtx, enum machine_mode, rtx, int);
170 static void reset_used_decls (tree);
171 static void mark_label_nuses (rtx);
172 static hashval_t const_int_htab_hash (const void *);
173 static int const_int_htab_eq (const void *, const void *);
174 static hashval_t const_double_htab_hash (const void *);
175 static int const_double_htab_eq (const void *, const void *);
176 static rtx lookup_const_double (rtx);
177 static hashval_t mem_attrs_htab_hash (const void *);
178 static int mem_attrs_htab_eq (const void *, const void *);
179 static mem_attrs *get_mem_attrs (HOST_WIDE_INT, tree, rtx, rtx, unsigned int,
180 enum machine_mode);
181 static hashval_t reg_attrs_htab_hash (const void *);
182 static int reg_attrs_htab_eq (const void *, const void *);
183 static reg_attrs *get_reg_attrs (tree, int);
184 static tree component_ref_for_mem_expr (tree);
185 static rtx gen_const_vector (enum machine_mode, int);
186 static void copy_rtx_if_shared_1 (rtx *orig);
188 /* Probability of the conditional branch currently proceeded by try_split.
189 Set to -1 otherwise. */
190 int split_branch_probability = -1;
192 /* Returns a hash code for X (which is a really a CONST_INT). */
194 static hashval_t
195 const_int_htab_hash (const void *x)
197 return (hashval_t) INTVAL ((rtx) x);
200 /* Returns nonzero if the value represented by X (which is really a
201 CONST_INT) is the same as that given by Y (which is really a
202 HOST_WIDE_INT *). */
204 static int
205 const_int_htab_eq (const void *x, const void *y)
207 return (INTVAL ((rtx) x) == *((const HOST_WIDE_INT *) y));
210 /* Returns a hash code for X (which is really a CONST_DOUBLE). */
211 static hashval_t
212 const_double_htab_hash (const void *x)
214 rtx value = (rtx) x;
215 hashval_t h;
217 if (GET_MODE (value) == VOIDmode)
218 h = CONST_DOUBLE_LOW (value) ^ CONST_DOUBLE_HIGH (value);
219 else
221 h = real_hash (CONST_DOUBLE_REAL_VALUE (value));
222 /* MODE is used in the comparison, so it should be in the hash. */
223 h ^= GET_MODE (value);
225 return h;
228 /* Returns nonzero if the value represented by X (really a ...)
229 is the same as that represented by Y (really a ...) */
230 static int
231 const_double_htab_eq (const void *x, const void *y)
233 rtx a = (rtx)x, b = (rtx)y;
235 if (GET_MODE (a) != GET_MODE (b))
236 return 0;
237 if (GET_MODE (a) == VOIDmode)
238 return (CONST_DOUBLE_LOW (a) == CONST_DOUBLE_LOW (b)
239 && CONST_DOUBLE_HIGH (a) == CONST_DOUBLE_HIGH (b));
240 else
241 return real_identical (CONST_DOUBLE_REAL_VALUE (a),
242 CONST_DOUBLE_REAL_VALUE (b));
245 /* Returns a hash code for X (which is a really a mem_attrs *). */
247 static hashval_t
248 mem_attrs_htab_hash (const void *x)
250 mem_attrs *p = (mem_attrs *) x;
252 return (p->alias ^ (p->align * 1000)
253 ^ ((p->offset ? INTVAL (p->offset) : 0) * 50000)
254 ^ ((p->size ? INTVAL (p->size) : 0) * 2500000)
255 ^ (size_t) iterative_hash_expr (p->expr, 0));
258 /* Returns nonzero if the value represented by X (which is really a
259 mem_attrs *) is the same as that given by Y (which is also really a
260 mem_attrs *). */
262 static int
263 mem_attrs_htab_eq (const void *x, const void *y)
265 mem_attrs *p = (mem_attrs *) x;
266 mem_attrs *q = (mem_attrs *) y;
268 return (p->alias == q->alias && p->offset == q->offset
269 && p->size == q->size && p->align == q->align
270 && (p->expr == q->expr
271 || (p->expr != NULL_TREE && q->expr != NULL_TREE
272 && operand_equal_p (p->expr, q->expr, 0))));
275 /* Allocate a new mem_attrs structure and insert it into the hash table if
276 one identical to it is not already in the table. We are doing this for
277 MEM of mode MODE. */
279 static mem_attrs *
280 get_mem_attrs (HOST_WIDE_INT alias, tree expr, rtx offset, rtx size,
281 unsigned int align, enum machine_mode mode)
283 mem_attrs attrs;
284 void **slot;
286 /* If everything is the default, we can just return zero.
287 This must match what the corresponding MEM_* macros return when the
288 field is not present. */
289 if (alias == 0 && expr == 0 && offset == 0
290 && (size == 0
291 || (mode != BLKmode && GET_MODE_SIZE (mode) == INTVAL (size)))
292 && (STRICT_ALIGNMENT && mode != BLKmode
293 ? align == GET_MODE_ALIGNMENT (mode) : align == BITS_PER_UNIT))
294 return 0;
296 attrs.alias = alias;
297 attrs.expr = expr;
298 attrs.offset = offset;
299 attrs.size = size;
300 attrs.align = align;
302 slot = htab_find_slot (mem_attrs_htab, &attrs, INSERT);
303 if (*slot == 0)
305 *slot = ggc_alloc (sizeof (mem_attrs));
306 memcpy (*slot, &attrs, sizeof (mem_attrs));
309 return *slot;
312 /* Returns a hash code for X (which is a really a reg_attrs *). */
314 static hashval_t
315 reg_attrs_htab_hash (const void *x)
317 reg_attrs *p = (reg_attrs *) x;
319 return ((p->offset * 1000) ^ (long) p->decl);
322 /* Returns nonzero if the value represented by X (which is really a
323 reg_attrs *) is the same as that given by Y (which is also really a
324 reg_attrs *). */
326 static int
327 reg_attrs_htab_eq (const void *x, const void *y)
329 reg_attrs *p = (reg_attrs *) x;
330 reg_attrs *q = (reg_attrs *) y;
332 return (p->decl == q->decl && p->offset == q->offset);
334 /* Allocate a new reg_attrs structure and insert it into the hash table if
335 one identical to it is not already in the table. We are doing this for
336 MEM of mode MODE. */
338 static reg_attrs *
339 get_reg_attrs (tree decl, int offset)
341 reg_attrs attrs;
342 void **slot;
344 /* If everything is the default, we can just return zero. */
345 if (decl == 0 && offset == 0)
346 return 0;
348 attrs.decl = decl;
349 attrs.offset = offset;
351 slot = htab_find_slot (reg_attrs_htab, &attrs, INSERT);
352 if (*slot == 0)
354 *slot = ggc_alloc (sizeof (reg_attrs));
355 memcpy (*slot, &attrs, sizeof (reg_attrs));
358 return *slot;
361 /* Generate a new REG rtx. Make sure ORIGINAL_REGNO is set properly, and
362 don't attempt to share with the various global pieces of rtl (such as
363 frame_pointer_rtx). */
366 gen_raw_REG (enum machine_mode mode, int regno)
368 rtx x = gen_rtx_raw_REG (mode, regno);
369 ORIGINAL_REGNO (x) = regno;
370 return x;
373 /* There are some RTL codes that require special attention; the generation
374 functions do the raw handling. If you add to this list, modify
375 special_rtx in gengenrtl.c as well. */
378 gen_rtx_CONST_INT (enum machine_mode mode ATTRIBUTE_UNUSED, HOST_WIDE_INT arg)
380 void **slot;
382 if (arg >= - MAX_SAVED_CONST_INT && arg <= MAX_SAVED_CONST_INT)
383 return const_int_rtx[arg + MAX_SAVED_CONST_INT];
385 #if STORE_FLAG_VALUE != 1 && STORE_FLAG_VALUE != -1
386 if (const_true_rtx && arg == STORE_FLAG_VALUE)
387 return const_true_rtx;
388 #endif
390 /* Look up the CONST_INT in the hash table. */
391 slot = htab_find_slot_with_hash (const_int_htab, &arg,
392 (hashval_t) arg, INSERT);
393 if (*slot == 0)
394 *slot = gen_rtx_raw_CONST_INT (VOIDmode, arg);
396 return (rtx) *slot;
400 gen_int_mode (HOST_WIDE_INT c, enum machine_mode mode)
402 return GEN_INT (trunc_int_for_mode (c, mode));
405 /* CONST_DOUBLEs might be created from pairs of integers, or from
406 REAL_VALUE_TYPEs. Also, their length is known only at run time,
407 so we cannot use gen_rtx_raw_CONST_DOUBLE. */
409 /* Determine whether REAL, a CONST_DOUBLE, already exists in the
410 hash table. If so, return its counterpart; otherwise add it
411 to the hash table and return it. */
412 static rtx
413 lookup_const_double (rtx real)
415 void **slot = htab_find_slot (const_double_htab, real, INSERT);
416 if (*slot == 0)
417 *slot = real;
419 return (rtx) *slot;
422 /* Return a CONST_DOUBLE rtx for a floating-point value specified by
423 VALUE in mode MODE. */
425 const_double_from_real_value (REAL_VALUE_TYPE value, enum machine_mode mode)
427 rtx real = rtx_alloc (CONST_DOUBLE);
428 PUT_MODE (real, mode);
430 real->u.rv = value;
432 return lookup_const_double (real);
435 /* Return a CONST_DOUBLE or CONST_INT for a value specified as a pair
436 of ints: I0 is the low-order word and I1 is the high-order word.
437 Do not use this routine for non-integer modes; convert to
438 REAL_VALUE_TYPE and use CONST_DOUBLE_FROM_REAL_VALUE. */
441 immed_double_const (HOST_WIDE_INT i0, HOST_WIDE_INT i1, enum machine_mode mode)
443 rtx value;
444 unsigned int i;
446 /* There are the following cases (note that there are no modes with
447 HOST_BITS_PER_WIDE_INT < GET_MODE_BITSIZE (mode) < 2 * HOST_BITS_PER_WIDE_INT):
449 1) If GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT, then we use
450 gen_int_mode.
451 2) GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT, but the value of
452 the integer fits into HOST_WIDE_INT anyway (i.e., i1 consists only
453 from copies of the sign bit, and sign of i0 and i1 are the same), then
454 we return a CONST_INT for i0.
455 3) Otherwise, we create a CONST_DOUBLE for i0 and i1. */
456 if (mode != VOIDmode)
458 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT
459 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT
460 /* We can get a 0 for an error mark. */
461 || GET_MODE_CLASS (mode) == MODE_VECTOR_INT
462 || GET_MODE_CLASS (mode) == MODE_VECTOR_FLOAT);
464 if (GET_MODE_BITSIZE (mode) <= HOST_BITS_PER_WIDE_INT)
465 return gen_int_mode (i0, mode);
467 gcc_assert (GET_MODE_BITSIZE (mode) == 2 * HOST_BITS_PER_WIDE_INT);
470 /* If this integer fits in one word, return a CONST_INT. */
471 if ((i1 == 0 && i0 >= 0) || (i1 == ~0 && i0 < 0))
472 return GEN_INT (i0);
474 /* We use VOIDmode for integers. */
475 value = rtx_alloc (CONST_DOUBLE);
476 PUT_MODE (value, VOIDmode);
478 CONST_DOUBLE_LOW (value) = i0;
479 CONST_DOUBLE_HIGH (value) = i1;
481 for (i = 2; i < (sizeof CONST_DOUBLE_FORMAT - 1); i++)
482 XWINT (value, i) = 0;
484 return lookup_const_double (value);
488 gen_rtx_REG (enum machine_mode mode, unsigned int regno)
490 /* In case the MD file explicitly references the frame pointer, have
491 all such references point to the same frame pointer. This is
492 used during frame pointer elimination to distinguish the explicit
493 references to these registers from pseudos that happened to be
494 assigned to them.
496 If we have eliminated the frame pointer or arg pointer, we will
497 be using it as a normal register, for example as a spill
498 register. In such cases, we might be accessing it in a mode that
499 is not Pmode and therefore cannot use the pre-allocated rtx.
501 Also don't do this when we are making new REGs in reload, since
502 we don't want to get confused with the real pointers. */
504 if (mode == Pmode && !reload_in_progress)
506 if (regno == FRAME_POINTER_REGNUM
507 && (!reload_completed || frame_pointer_needed))
508 return frame_pointer_rtx;
509 #if FRAME_POINTER_REGNUM != HARD_FRAME_POINTER_REGNUM
510 if (regno == HARD_FRAME_POINTER_REGNUM
511 && (!reload_completed || frame_pointer_needed))
512 return hard_frame_pointer_rtx;
513 #endif
514 #if FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM && HARD_FRAME_POINTER_REGNUM != ARG_POINTER_REGNUM
515 if (regno == ARG_POINTER_REGNUM)
516 return arg_pointer_rtx;
517 #endif
518 #ifdef RETURN_ADDRESS_POINTER_REGNUM
519 if (regno == RETURN_ADDRESS_POINTER_REGNUM)
520 return return_address_pointer_rtx;
521 #endif
522 if (regno == (unsigned) PIC_OFFSET_TABLE_REGNUM
523 && fixed_regs[PIC_OFFSET_TABLE_REGNUM])
524 return pic_offset_table_rtx;
525 if (regno == STACK_POINTER_REGNUM)
526 return stack_pointer_rtx;
529 #if 0
530 /* If the per-function register table has been set up, try to re-use
531 an existing entry in that table to avoid useless generation of RTL.
533 This code is disabled for now until we can fix the various backends
534 which depend on having non-shared hard registers in some cases. Long
535 term we want to re-enable this code as it can significantly cut down
536 on the amount of useless RTL that gets generated.
538 We'll also need to fix some code that runs after reload that wants to
539 set ORIGINAL_REGNO. */
541 if (cfun
542 && cfun->emit
543 && regno_reg_rtx
544 && regno < FIRST_PSEUDO_REGISTER
545 && reg_raw_mode[regno] == mode)
546 return regno_reg_rtx[regno];
547 #endif
549 return gen_raw_REG (mode, regno);
553 gen_rtx_MEM (enum machine_mode mode, rtx addr)
555 rtx rt = gen_rtx_raw_MEM (mode, addr);
557 /* This field is not cleared by the mere allocation of the rtx, so
558 we clear it here. */
559 MEM_ATTRS (rt) = 0;
561 return rt;
564 /* Generate a memory referring to non-trapping constant memory. */
567 gen_const_mem (enum machine_mode mode, rtx addr)
569 rtx mem = gen_rtx_MEM (mode, addr);
570 MEM_READONLY_P (mem) = 1;
571 MEM_NOTRAP_P (mem) = 1;
572 return mem;
575 /* Generate a MEM referring to fixed portions of the frame, e.g., register
576 save areas. */
579 gen_frame_mem (enum machine_mode mode, rtx addr)
581 rtx mem = gen_rtx_MEM (mode, addr);
582 MEM_NOTRAP_P (mem) = 1;
583 set_mem_alias_set (mem, get_frame_alias_set ());
584 return mem;
587 /* Generate a MEM referring to a temporary use of the stack, not part
588 of the fixed stack frame. For example, something which is pushed
589 by a target splitter. */
591 gen_tmp_stack_mem (enum machine_mode mode, rtx addr)
593 rtx mem = gen_rtx_MEM (mode, addr);
594 MEM_NOTRAP_P (mem) = 1;
595 if (!current_function_calls_alloca)
596 set_mem_alias_set (mem, get_frame_alias_set ());
597 return mem;
600 /* We want to create (subreg:OMODE (obj:IMODE) OFFSET). Return true if
601 this construct would be valid, and false otherwise. */
603 bool
604 validate_subreg (enum machine_mode omode, enum machine_mode imode,
605 rtx reg, unsigned int offset)
607 unsigned int isize = GET_MODE_SIZE (imode);
608 unsigned int osize = GET_MODE_SIZE (omode);
610 /* All subregs must be aligned. */
611 if (offset % osize != 0)
612 return false;
614 /* The subreg offset cannot be outside the inner object. */
615 if (offset >= isize)
616 return false;
618 /* ??? This should not be here. Temporarily continue to allow word_mode
619 subregs of anything. The most common offender is (subreg:SI (reg:DF)).
620 Generally, backends are doing something sketchy but it'll take time to
621 fix them all. */
622 if (omode == word_mode)
624 /* ??? Similarly, e.g. with (subreg:DF (reg:TI)). Though store_bit_field
625 is the culprit here, and not the backends. */
626 else if (osize >= UNITS_PER_WORD && isize >= osize)
628 /* Allow component subregs of complex and vector. Though given the below
629 extraction rules, it's not always clear what that means. */
630 else if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
631 && GET_MODE_INNER (imode) == omode)
633 /* ??? x86 sse code makes heavy use of *paradoxical* vector subregs,
634 i.e. (subreg:V4SF (reg:SF) 0). This surely isn't the cleanest way to
635 represent this. It's questionable if this ought to be represented at
636 all -- why can't this all be hidden in post-reload splitters that make
637 arbitrarily mode changes to the registers themselves. */
638 else if (VECTOR_MODE_P (omode) && GET_MODE_INNER (omode) == imode)
640 /* Subregs involving floating point modes are not allowed to
641 change size. Therefore (subreg:DI (reg:DF) 0) is fine, but
642 (subreg:SI (reg:DF) 0) isn't. */
643 else if (FLOAT_MODE_P (imode) || FLOAT_MODE_P (omode))
645 if (isize != osize)
646 return false;
649 /* Paradoxical subregs must have offset zero. */
650 if (osize > isize)
651 return offset == 0;
653 /* This is a normal subreg. Verify that the offset is representable. */
655 /* For hard registers, we already have most of these rules collected in
656 subreg_offset_representable_p. */
657 if (reg && REG_P (reg) && HARD_REGISTER_P (reg))
659 unsigned int regno = REGNO (reg);
661 #ifdef CANNOT_CHANGE_MODE_CLASS
662 if ((COMPLEX_MODE_P (imode) || VECTOR_MODE_P (imode))
663 && GET_MODE_INNER (imode) == omode)
665 else if (REG_CANNOT_CHANGE_MODE_P (regno, imode, omode))
666 return false;
667 #endif
669 return subreg_offset_representable_p (regno, imode, offset, omode);
672 /* For pseudo registers, we want most of the same checks. Namely:
673 If the register no larger than a word, the subreg must be lowpart.
674 If the register is larger than a word, the subreg must be the lowpart
675 of a subword. A subreg does *not* perform arbitrary bit extraction.
676 Given that we've already checked mode/offset alignment, we only have
677 to check subword subregs here. */
678 if (osize < UNITS_PER_WORD)
680 enum machine_mode wmode = isize > UNITS_PER_WORD ? word_mode : imode;
681 unsigned int low_off = subreg_lowpart_offset (omode, wmode);
682 if (offset % UNITS_PER_WORD != low_off)
683 return false;
685 return true;
689 gen_rtx_SUBREG (enum machine_mode mode, rtx reg, int offset)
691 gcc_assert (validate_subreg (mode, GET_MODE (reg), reg, offset));
692 return gen_rtx_raw_SUBREG (mode, reg, offset);
695 /* Generate a SUBREG representing the least-significant part of REG if MODE
696 is smaller than mode of REG, otherwise paradoxical SUBREG. */
699 gen_lowpart_SUBREG (enum machine_mode mode, rtx reg)
701 enum machine_mode inmode;
703 inmode = GET_MODE (reg);
704 if (inmode == VOIDmode)
705 inmode = mode;
706 return gen_rtx_SUBREG (mode, reg,
707 subreg_lowpart_offset (mode, inmode));
710 /* gen_rtvec (n, [rt1, ..., rtn])
712 ** This routine creates an rtvec and stores within it the
713 ** pointers to rtx's which are its arguments.
716 /*VARARGS1*/
717 rtvec
718 gen_rtvec (int n, ...)
720 int i, save_n;
721 rtx *vector;
722 va_list p;
724 va_start (p, n);
726 if (n == 0)
727 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
729 vector = alloca (n * sizeof (rtx));
731 for (i = 0; i < n; i++)
732 vector[i] = va_arg (p, rtx);
734 /* The definition of VA_* in K&R C causes `n' to go out of scope. */
735 save_n = n;
736 va_end (p);
738 return gen_rtvec_v (save_n, vector);
741 rtvec
742 gen_rtvec_v (int n, rtx *argp)
744 int i;
745 rtvec rt_val;
747 if (n == 0)
748 return NULL_RTVEC; /* Don't allocate an empty rtvec... */
750 rt_val = rtvec_alloc (n); /* Allocate an rtvec... */
752 for (i = 0; i < n; i++)
753 rt_val->elem[i] = *argp++;
755 return rt_val;
758 /* Generate a REG rtx for a new pseudo register of mode MODE.
759 This pseudo is assigned the next sequential register number. */
762 gen_reg_rtx (enum machine_mode mode)
764 struct function *f = cfun;
765 rtx val;
767 /* Don't let anything called after initial flow analysis create new
768 registers. */
769 gcc_assert (!no_new_pseudos);
771 if (generating_concat_p
772 && (GET_MODE_CLASS (mode) == MODE_COMPLEX_FLOAT
773 || GET_MODE_CLASS (mode) == MODE_COMPLEX_INT))
775 /* For complex modes, don't make a single pseudo.
776 Instead, make a CONCAT of two pseudos.
777 This allows noncontiguous allocation of the real and imaginary parts,
778 which makes much better code. Besides, allocating DCmode
779 pseudos overstrains reload on some machines like the 386. */
780 rtx realpart, imagpart;
781 enum machine_mode partmode = GET_MODE_INNER (mode);
783 realpart = gen_reg_rtx (partmode);
784 imagpart = gen_reg_rtx (partmode);
785 return gen_rtx_CONCAT (mode, realpart, imagpart);
788 /* Make sure regno_pointer_align, and regno_reg_rtx are large
789 enough to have an element for this pseudo reg number. */
791 if (reg_rtx_no == f->emit->regno_pointer_align_length)
793 int old_size = f->emit->regno_pointer_align_length;
794 char *new;
795 rtx *new1;
797 new = ggc_realloc (f->emit->regno_pointer_align, old_size * 2);
798 memset (new + old_size, 0, old_size);
799 f->emit->regno_pointer_align = (unsigned char *) new;
801 new1 = ggc_realloc (f->emit->x_regno_reg_rtx,
802 old_size * 2 * sizeof (rtx));
803 memset (new1 + old_size, 0, old_size * sizeof (rtx));
804 regno_reg_rtx = new1;
806 f->emit->regno_pointer_align_length = old_size * 2;
809 val = gen_raw_REG (mode, reg_rtx_no);
810 regno_reg_rtx[reg_rtx_no++] = val;
811 return val;
814 /* Update NEW with the same attributes as REG, but offsetted by OFFSET.
815 Do the big endian correction if needed. */
817 static void
818 update_reg_offset (rtx new, rtx reg, int offset)
820 tree decl;
821 HOST_WIDE_INT var_size;
823 /* PR middle-end/14084
824 The problem appears when a variable is stored in a larger register
825 and later it is used in the original mode or some mode in between
826 or some part of variable is accessed.
828 On little endian machines there is no problem because
829 the REG_OFFSET of the start of the variable is the same when
830 accessed in any mode (it is 0).
832 However, this is not true on big endian machines.
833 The offset of the start of the variable is different when accessed
834 in different modes.
835 When we are taking a part of the REG we have to change the OFFSET
836 from offset WRT size of mode of REG to offset WRT size of variable.
838 If we would not do the big endian correction the resulting REG_OFFSET
839 would be larger than the size of the DECL.
841 Examples of correction, for BYTES_BIG_ENDIAN WORDS_BIG_ENDIAN machine:
843 REG.mode MODE DECL size old offset new offset description
844 DI SI 4 4 0 int32 in SImode
845 DI SI 1 4 0 char in SImode
846 DI QI 1 7 0 char in QImode
847 DI QI 4 5 1 1st element in QImode
848 of char[4]
849 DI HI 4 6 2 1st element in HImode
850 of int16[2]
852 If the size of DECL is equal or greater than the size of REG
853 we can't do this correction because the register holds the
854 whole variable or a part of the variable and thus the REG_OFFSET
855 is already correct. */
857 decl = REG_EXPR (reg);
858 if ((BYTES_BIG_ENDIAN || WORDS_BIG_ENDIAN)
859 && decl != NULL
860 && offset > 0
861 && GET_MODE_SIZE (GET_MODE (reg)) > GET_MODE_SIZE (GET_MODE (new))
862 && ((var_size = int_size_in_bytes (TREE_TYPE (decl))) > 0
863 && var_size < GET_MODE_SIZE (GET_MODE (reg))))
865 int offset_le;
867 /* Convert machine endian to little endian WRT size of mode of REG. */
868 if (WORDS_BIG_ENDIAN)
869 offset_le = ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
870 / UNITS_PER_WORD) * UNITS_PER_WORD;
871 else
872 offset_le = (offset / UNITS_PER_WORD) * UNITS_PER_WORD;
874 if (BYTES_BIG_ENDIAN)
875 offset_le += ((GET_MODE_SIZE (GET_MODE (reg)) - 1 - offset)
876 % UNITS_PER_WORD);
877 else
878 offset_le += offset % UNITS_PER_WORD;
880 if (offset_le >= var_size)
882 /* MODE is wider than the variable so the new reg will cover
883 the whole variable so the resulting OFFSET should be 0. */
884 offset = 0;
886 else
888 /* Convert little endian to machine endian WRT size of variable. */
889 if (WORDS_BIG_ENDIAN)
890 offset = ((var_size - 1 - offset_le)
891 / UNITS_PER_WORD) * UNITS_PER_WORD;
892 else
893 offset = (offset_le / UNITS_PER_WORD) * UNITS_PER_WORD;
895 if (BYTES_BIG_ENDIAN)
896 offset += ((var_size - 1 - offset_le)
897 % UNITS_PER_WORD);
898 else
899 offset += offset_le % UNITS_PER_WORD;
903 REG_ATTRS (new) = get_reg_attrs (REG_EXPR (reg),
904 REG_OFFSET (reg) + offset);
907 /* Generate a register with same attributes as REG, but offsetted by
908 OFFSET. */
911 gen_rtx_REG_offset (rtx reg, enum machine_mode mode, unsigned int regno,
912 int offset)
914 rtx new = gen_rtx_REG (mode, regno);
916 update_reg_offset (new, reg, offset);
917 return new;
920 /* Generate a new pseudo-register with the same attributes as REG, but
921 offsetted by OFFSET. */
924 gen_reg_rtx_offset (rtx reg, enum machine_mode mode, int offset)
926 rtx new = gen_reg_rtx (mode);
928 update_reg_offset (new, reg, offset);
929 return new;
932 /* Set the decl for MEM to DECL. */
934 void
935 set_reg_attrs_from_mem (rtx reg, rtx mem)
937 if (MEM_OFFSET (mem) && GET_CODE (MEM_OFFSET (mem)) == CONST_INT)
938 REG_ATTRS (reg)
939 = get_reg_attrs (MEM_EXPR (mem), INTVAL (MEM_OFFSET (mem)));
942 /* Set the register attributes for registers contained in PARM_RTX.
943 Use needed values from memory attributes of MEM. */
945 void
946 set_reg_attrs_for_parm (rtx parm_rtx, rtx mem)
948 if (REG_P (parm_rtx))
949 set_reg_attrs_from_mem (parm_rtx, mem);
950 else if (GET_CODE (parm_rtx) == PARALLEL)
952 /* Check for a NULL entry in the first slot, used to indicate that the
953 parameter goes both on the stack and in registers. */
954 int i = XEXP (XVECEXP (parm_rtx, 0, 0), 0) ? 0 : 1;
955 for (; i < XVECLEN (parm_rtx, 0); i++)
957 rtx x = XVECEXP (parm_rtx, 0, i);
958 if (REG_P (XEXP (x, 0)))
959 REG_ATTRS (XEXP (x, 0))
960 = get_reg_attrs (MEM_EXPR (mem),
961 INTVAL (XEXP (x, 1)));
966 /* Assign the RTX X to declaration T. */
967 void
968 set_decl_rtl (tree t, rtx x)
970 DECL_WRTL_CHECK (t)->decl_with_rtl.rtl = x;
972 if (!x)
973 return;
974 /* For register, we maintain the reverse information too. */
975 if (REG_P (x))
976 REG_ATTRS (x) = get_reg_attrs (t, 0);
977 else if (GET_CODE (x) == SUBREG)
978 REG_ATTRS (SUBREG_REG (x))
979 = get_reg_attrs (t, -SUBREG_BYTE (x));
980 if (GET_CODE (x) == CONCAT)
982 if (REG_P (XEXP (x, 0)))
983 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
984 if (REG_P (XEXP (x, 1)))
985 REG_ATTRS (XEXP (x, 1))
986 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
988 if (GET_CODE (x) == PARALLEL)
990 int i;
991 for (i = 0; i < XVECLEN (x, 0); i++)
993 rtx y = XVECEXP (x, 0, i);
994 if (REG_P (XEXP (y, 0)))
995 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1000 /* Assign the RTX X to parameter declaration T. */
1001 void
1002 set_decl_incoming_rtl (tree t, rtx x)
1004 DECL_INCOMING_RTL (t) = x;
1006 if (!x)
1007 return;
1008 /* For register, we maintain the reverse information too. */
1009 if (REG_P (x))
1010 REG_ATTRS (x) = get_reg_attrs (t, 0);
1011 else if (GET_CODE (x) == SUBREG)
1012 REG_ATTRS (SUBREG_REG (x))
1013 = get_reg_attrs (t, -SUBREG_BYTE (x));
1014 if (GET_CODE (x) == CONCAT)
1016 if (REG_P (XEXP (x, 0)))
1017 REG_ATTRS (XEXP (x, 0)) = get_reg_attrs (t, 0);
1018 if (REG_P (XEXP (x, 1)))
1019 REG_ATTRS (XEXP (x, 1))
1020 = get_reg_attrs (t, GET_MODE_UNIT_SIZE (GET_MODE (XEXP (x, 0))));
1022 if (GET_CODE (x) == PARALLEL)
1024 int i, start;
1026 /* Check for a NULL entry, used to indicate that the parameter goes
1027 both on the stack and in registers. */
1028 if (XEXP (XVECEXP (x, 0, 0), 0))
1029 start = 0;
1030 else
1031 start = 1;
1033 for (i = start; i < XVECLEN (x, 0); i++)
1035 rtx y = XVECEXP (x, 0, i);
1036 if (REG_P (XEXP (y, 0)))
1037 REG_ATTRS (XEXP (y, 0)) = get_reg_attrs (t, INTVAL (XEXP (y, 1)));
1042 /* Identify REG (which may be a CONCAT) as a user register. */
1044 void
1045 mark_user_reg (rtx reg)
1047 if (GET_CODE (reg) == CONCAT)
1049 REG_USERVAR_P (XEXP (reg, 0)) = 1;
1050 REG_USERVAR_P (XEXP (reg, 1)) = 1;
1052 else
1054 gcc_assert (REG_P (reg));
1055 REG_USERVAR_P (reg) = 1;
1059 /* Identify REG as a probable pointer register and show its alignment
1060 as ALIGN, if nonzero. */
1062 void
1063 mark_reg_pointer (rtx reg, int align)
1065 if (! REG_POINTER (reg))
1067 REG_POINTER (reg) = 1;
1069 if (align)
1070 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1072 else if (align && align < REGNO_POINTER_ALIGN (REGNO (reg)))
1073 /* We can no-longer be sure just how aligned this pointer is. */
1074 REGNO_POINTER_ALIGN (REGNO (reg)) = align;
1077 /* Return 1 plus largest pseudo reg number used in the current function. */
1080 max_reg_num (void)
1082 return reg_rtx_no;
1085 /* Return 1 + the largest label number used so far in the current function. */
1088 max_label_num (void)
1090 return label_num;
1093 /* Return first label number used in this function (if any were used). */
1096 get_first_label_num (void)
1098 return first_label_num;
1101 /* If the rtx for label was created during the expansion of a nested
1102 function, then first_label_num won't include this label number.
1103 Fix this now so that array indicies work later. */
1105 void
1106 maybe_set_first_label_num (rtx x)
1108 if (CODE_LABEL_NUMBER (x) < first_label_num)
1109 first_label_num = CODE_LABEL_NUMBER (x);
1112 /* Return a value representing some low-order bits of X, where the number
1113 of low-order bits is given by MODE. Note that no conversion is done
1114 between floating-point and fixed-point values, rather, the bit
1115 representation is returned.
1117 This function handles the cases in common between gen_lowpart, below,
1118 and two variants in cse.c and combine.c. These are the cases that can
1119 be safely handled at all points in the compilation.
1121 If this is not a case we can handle, return 0. */
1124 gen_lowpart_common (enum machine_mode mode, rtx x)
1126 int msize = GET_MODE_SIZE (mode);
1127 int xsize;
1128 int offset = 0;
1129 enum machine_mode innermode;
1131 /* Unfortunately, this routine doesn't take a parameter for the mode of X,
1132 so we have to make one up. Yuk. */
1133 innermode = GET_MODE (x);
1134 if (GET_CODE (x) == CONST_INT
1135 && msize * BITS_PER_UNIT <= HOST_BITS_PER_WIDE_INT)
1136 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT, MODE_INT, 0);
1137 else if (innermode == VOIDmode)
1138 innermode = mode_for_size (HOST_BITS_PER_WIDE_INT * 2, MODE_INT, 0);
1140 xsize = GET_MODE_SIZE (innermode);
1142 gcc_assert (innermode != VOIDmode && innermode != BLKmode);
1144 if (innermode == mode)
1145 return x;
1147 /* MODE must occupy no more words than the mode of X. */
1148 if ((msize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD
1149 > ((xsize + (UNITS_PER_WORD - 1)) / UNITS_PER_WORD))
1150 return 0;
1152 /* Don't allow generating paradoxical FLOAT_MODE subregs. */
1153 if (SCALAR_FLOAT_MODE_P (mode) && msize > xsize)
1154 return 0;
1156 offset = subreg_lowpart_offset (mode, innermode);
1158 if ((GET_CODE (x) == ZERO_EXTEND || GET_CODE (x) == SIGN_EXTEND)
1159 && (GET_MODE_CLASS (mode) == MODE_INT
1160 || GET_MODE_CLASS (mode) == MODE_PARTIAL_INT))
1162 /* If we are getting the low-order part of something that has been
1163 sign- or zero-extended, we can either just use the object being
1164 extended or make a narrower extension. If we want an even smaller
1165 piece than the size of the object being extended, call ourselves
1166 recursively.
1168 This case is used mostly by combine and cse. */
1170 if (GET_MODE (XEXP (x, 0)) == mode)
1171 return XEXP (x, 0);
1172 else if (msize < GET_MODE_SIZE (GET_MODE (XEXP (x, 0))))
1173 return gen_lowpart_common (mode, XEXP (x, 0));
1174 else if (msize < xsize)
1175 return gen_rtx_fmt_e (GET_CODE (x), mode, XEXP (x, 0));
1177 else if (GET_CODE (x) == SUBREG || REG_P (x)
1178 || GET_CODE (x) == CONCAT || GET_CODE (x) == CONST_VECTOR
1179 || GET_CODE (x) == CONST_DOUBLE || GET_CODE (x) == CONST_INT)
1180 return simplify_gen_subreg (mode, x, innermode, offset);
1182 /* Otherwise, we can't do this. */
1183 return 0;
1187 gen_highpart (enum machine_mode mode, rtx x)
1189 unsigned int msize = GET_MODE_SIZE (mode);
1190 rtx result;
1192 /* This case loses if X is a subreg. To catch bugs early,
1193 complain if an invalid MODE is used even in other cases. */
1194 gcc_assert (msize <= UNITS_PER_WORD
1195 || msize == (unsigned int) GET_MODE_UNIT_SIZE (GET_MODE (x)));
1197 result = simplify_gen_subreg (mode, x, GET_MODE (x),
1198 subreg_highpart_offset (mode, GET_MODE (x)));
1199 gcc_assert (result);
1201 /* simplify_gen_subreg is not guaranteed to return a valid operand for
1202 the target if we have a MEM. gen_highpart must return a valid operand,
1203 emitting code if necessary to do so. */
1204 if (MEM_P (result))
1206 result = validize_mem (result);
1207 gcc_assert (result);
1210 return result;
1213 /* Like gen_highpart, but accept mode of EXP operand in case EXP can
1214 be VOIDmode constant. */
1216 gen_highpart_mode (enum machine_mode outermode, enum machine_mode innermode, rtx exp)
1218 if (GET_MODE (exp) != VOIDmode)
1220 gcc_assert (GET_MODE (exp) == innermode);
1221 return gen_highpart (outermode, exp);
1223 return simplify_gen_subreg (outermode, exp, innermode,
1224 subreg_highpart_offset (outermode, innermode));
1227 /* Return offset in bytes to get OUTERMODE low part
1228 of the value in mode INNERMODE stored in memory in target format. */
1230 unsigned int
1231 subreg_lowpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1233 unsigned int offset = 0;
1234 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1236 if (difference > 0)
1238 if (WORDS_BIG_ENDIAN)
1239 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1240 if (BYTES_BIG_ENDIAN)
1241 offset += difference % UNITS_PER_WORD;
1244 return offset;
1247 /* Return offset in bytes to get OUTERMODE high part
1248 of the value in mode INNERMODE stored in memory in target format. */
1249 unsigned int
1250 subreg_highpart_offset (enum machine_mode outermode, enum machine_mode innermode)
1252 unsigned int offset = 0;
1253 int difference = (GET_MODE_SIZE (innermode) - GET_MODE_SIZE (outermode));
1255 gcc_assert (GET_MODE_SIZE (innermode) >= GET_MODE_SIZE (outermode));
1257 if (difference > 0)
1259 if (! WORDS_BIG_ENDIAN)
1260 offset += (difference / UNITS_PER_WORD) * UNITS_PER_WORD;
1261 if (! BYTES_BIG_ENDIAN)
1262 offset += difference % UNITS_PER_WORD;
1265 return offset;
1268 /* Return 1 iff X, assumed to be a SUBREG,
1269 refers to the least significant part of its containing reg.
1270 If X is not a SUBREG, always return 1 (it is its own low part!). */
1273 subreg_lowpart_p (rtx x)
1275 if (GET_CODE (x) != SUBREG)
1276 return 1;
1277 else if (GET_MODE (SUBREG_REG (x)) == VOIDmode)
1278 return 0;
1280 return (subreg_lowpart_offset (GET_MODE (x), GET_MODE (SUBREG_REG (x)))
1281 == SUBREG_BYTE (x));
1284 /* Return subword OFFSET of operand OP.
1285 The word number, OFFSET, is interpreted as the word number starting
1286 at the low-order address. OFFSET 0 is the low-order word if not
1287 WORDS_BIG_ENDIAN, otherwise it is the high-order word.
1289 If we cannot extract the required word, we return zero. Otherwise,
1290 an rtx corresponding to the requested word will be returned.
1292 VALIDATE_ADDRESS is nonzero if the address should be validated. Before
1293 reload has completed, a valid address will always be returned. After
1294 reload, if a valid address cannot be returned, we return zero.
1296 If VALIDATE_ADDRESS is zero, we simply form the required address; validating
1297 it is the responsibility of the caller.
1299 MODE is the mode of OP in case it is a CONST_INT.
1301 ??? This is still rather broken for some cases. The problem for the
1302 moment is that all callers of this thing provide no 'goal mode' to
1303 tell us to work with. This exists because all callers were written
1304 in a word based SUBREG world.
1305 Now use of this function can be deprecated by simplify_subreg in most
1306 cases.
1310 operand_subword (rtx op, unsigned int offset, int validate_address, enum machine_mode mode)
1312 if (mode == VOIDmode)
1313 mode = GET_MODE (op);
1315 gcc_assert (mode != VOIDmode);
1317 /* If OP is narrower than a word, fail. */
1318 if (mode != BLKmode
1319 && (GET_MODE_SIZE (mode) < UNITS_PER_WORD))
1320 return 0;
1322 /* If we want a word outside OP, return zero. */
1323 if (mode != BLKmode
1324 && (offset + 1) * UNITS_PER_WORD > GET_MODE_SIZE (mode))
1325 return const0_rtx;
1327 /* Form a new MEM at the requested address. */
1328 if (MEM_P (op))
1330 rtx new = adjust_address_nv (op, word_mode, offset * UNITS_PER_WORD);
1332 if (! validate_address)
1333 return new;
1335 else if (reload_completed)
1337 if (! strict_memory_address_p (word_mode, XEXP (new, 0)))
1338 return 0;
1340 else
1341 return replace_equiv_address (new, XEXP (new, 0));
1344 /* Rest can be handled by simplify_subreg. */
1345 return simplify_gen_subreg (word_mode, op, mode, (offset * UNITS_PER_WORD));
1348 /* Similar to `operand_subword', but never return 0. If we can't
1349 extract the required subword, put OP into a register and try again.
1350 The second attempt must succeed. We always validate the address in
1351 this case.
1353 MODE is the mode of OP, in case it is CONST_INT. */
1356 operand_subword_force (rtx op, unsigned int offset, enum machine_mode mode)
1358 rtx result = operand_subword (op, offset, 1, mode);
1360 if (result)
1361 return result;
1363 if (mode != BLKmode && mode != VOIDmode)
1365 /* If this is a register which can not be accessed by words, copy it
1366 to a pseudo register. */
1367 if (REG_P (op))
1368 op = copy_to_reg (op);
1369 else
1370 op = force_reg (mode, op);
1373 result = operand_subword (op, offset, 1, mode);
1374 gcc_assert (result);
1376 return result;
1379 /* Within a MEM_EXPR, we care about either (1) a component ref of a decl,
1380 or (2) a component ref of something variable. Represent the later with
1381 a NULL expression. */
1383 static tree
1384 component_ref_for_mem_expr (tree ref)
1386 tree inner = TREE_OPERAND (ref, 0);
1388 if (TREE_CODE (inner) == COMPONENT_REF)
1389 inner = component_ref_for_mem_expr (inner);
1390 else
1392 /* Now remove any conversions: they don't change what the underlying
1393 object is. Likewise for SAVE_EXPR. */
1394 while (TREE_CODE (inner) == NOP_EXPR || TREE_CODE (inner) == CONVERT_EXPR
1395 || TREE_CODE (inner) == NON_LVALUE_EXPR
1396 || TREE_CODE (inner) == VIEW_CONVERT_EXPR
1397 || TREE_CODE (inner) == SAVE_EXPR)
1398 inner = TREE_OPERAND (inner, 0);
1400 if (! DECL_P (inner))
1401 inner = NULL_TREE;
1404 if (inner == TREE_OPERAND (ref, 0))
1405 return ref;
1406 else
1407 return build3 (COMPONENT_REF, TREE_TYPE (ref), inner,
1408 TREE_OPERAND (ref, 1), NULL_TREE);
1411 /* Returns 1 if both MEM_EXPR can be considered equal
1412 and 0 otherwise. */
1415 mem_expr_equal_p (tree expr1, tree expr2)
1417 if (expr1 == expr2)
1418 return 1;
1420 if (! expr1 || ! expr2)
1421 return 0;
1423 if (TREE_CODE (expr1) != TREE_CODE (expr2))
1424 return 0;
1426 if (TREE_CODE (expr1) == COMPONENT_REF)
1427 return
1428 mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1429 TREE_OPERAND (expr2, 0))
1430 && mem_expr_equal_p (TREE_OPERAND (expr1, 1), /* field decl */
1431 TREE_OPERAND (expr2, 1));
1433 if (INDIRECT_REF_P (expr1))
1434 return mem_expr_equal_p (TREE_OPERAND (expr1, 0),
1435 TREE_OPERAND (expr2, 0));
1437 /* ARRAY_REFs, ARRAY_RANGE_REFs and BIT_FIELD_REFs should already
1438 have been resolved here. */
1439 gcc_assert (DECL_P (expr1));
1441 /* Decls with different pointers can't be equal. */
1442 return 0;
1445 /* Given REF, a MEM, and T, either the type of X or the expression
1446 corresponding to REF, set the memory attributes. OBJECTP is nonzero
1447 if we are making a new object of this type. BITPOS is nonzero if
1448 there is an offset outstanding on T that will be applied later. */
1450 void
1451 set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
1452 HOST_WIDE_INT bitpos)
1454 HOST_WIDE_INT alias = MEM_ALIAS_SET (ref);
1455 tree expr = MEM_EXPR (ref);
1456 rtx offset = MEM_OFFSET (ref);
1457 rtx size = MEM_SIZE (ref);
1458 unsigned int align = MEM_ALIGN (ref);
1459 HOST_WIDE_INT apply_bitpos = 0;
1460 tree type;
1462 /* It can happen that type_for_mode was given a mode for which there
1463 is no language-level type. In which case it returns NULL, which
1464 we can see here. */
1465 if (t == NULL_TREE)
1466 return;
1468 type = TYPE_P (t) ? t : TREE_TYPE (t);
1469 if (type == error_mark_node)
1470 return;
1472 /* If we have already set DECL_RTL = ref, get_alias_set will get the
1473 wrong answer, as it assumes that DECL_RTL already has the right alias
1474 info. Callers should not set DECL_RTL until after the call to
1475 set_mem_attributes. */
1476 gcc_assert (!DECL_P (t) || ref != DECL_RTL_IF_SET (t));
1478 /* Get the alias set from the expression or type (perhaps using a
1479 front-end routine) and use it. */
1480 alias = get_alias_set (t);
1482 MEM_VOLATILE_P (ref) |= TYPE_VOLATILE (type);
1483 MEM_IN_STRUCT_P (ref)
1484 = AGGREGATE_TYPE_P (type) || TREE_CODE (type) == COMPLEX_TYPE;
1485 MEM_POINTER (ref) = POINTER_TYPE_P (type);
1487 /* If we are making an object of this type, or if this is a DECL, we know
1488 that it is a scalar if the type is not an aggregate. */
1489 if ((objectp || DECL_P (t))
1490 && ! AGGREGATE_TYPE_P (type)
1491 && TREE_CODE (type) != COMPLEX_TYPE)
1492 MEM_SCALAR_P (ref) = 1;
1494 /* We can set the alignment from the type if we are making an object,
1495 this is an INDIRECT_REF, or if TYPE_ALIGN_OK. */
1496 if (objectp || TREE_CODE (t) == INDIRECT_REF
1497 || TREE_CODE (t) == ALIGN_INDIRECT_REF
1498 || TYPE_ALIGN_OK (type))
1499 align = MAX (align, TYPE_ALIGN (type));
1500 else
1501 if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
1503 if (integer_zerop (TREE_OPERAND (t, 1)))
1504 /* We don't know anything about the alignment. */
1505 align = BITS_PER_UNIT;
1506 else
1507 align = tree_low_cst (TREE_OPERAND (t, 1), 1);
1510 /* If the size is known, we can set that. */
1511 if (TYPE_SIZE_UNIT (type) && host_integerp (TYPE_SIZE_UNIT (type), 1))
1512 size = GEN_INT (tree_low_cst (TYPE_SIZE_UNIT (type), 1));
1514 /* If T is not a type, we may be able to deduce some more information about
1515 the expression. */
1516 if (! TYPE_P (t))
1518 tree base;
1520 if (TREE_THIS_VOLATILE (t))
1521 MEM_VOLATILE_P (ref) = 1;
1523 /* Now remove any conversions: they don't change what the underlying
1524 object is. Likewise for SAVE_EXPR. */
1525 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
1526 || TREE_CODE (t) == NON_LVALUE_EXPR
1527 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1528 || TREE_CODE (t) == SAVE_EXPR)
1529 t = TREE_OPERAND (t, 0);
1531 /* We may look through structure-like accesses for the purposes of
1532 examining TREE_THIS_NOTRAP, but not array-like accesses. */
1533 base = t;
1534 while (TREE_CODE (base) == COMPONENT_REF
1535 || TREE_CODE (base) == REALPART_EXPR
1536 || TREE_CODE (base) == IMAGPART_EXPR
1537 || TREE_CODE (base) == BIT_FIELD_REF)
1538 base = TREE_OPERAND (base, 0);
1540 if (DECL_P (base))
1542 if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
1543 MEM_NOTRAP_P (ref) = !DECL_WEAK (base);
1544 else
1545 MEM_NOTRAP_P (ref) = 1;
1547 else
1548 MEM_NOTRAP_P (ref) = TREE_THIS_NOTRAP (base);
1550 base = get_base_address (base);
1551 if (base && DECL_P (base)
1552 && TREE_READONLY (base)
1553 && (TREE_STATIC (base) || DECL_EXTERNAL (base)))
1555 tree base_type = TREE_TYPE (base);
1556 gcc_assert (!(base_type && TYPE_NEEDS_CONSTRUCTING (base_type))
1557 || DECL_ARTIFICIAL (base));
1558 MEM_READONLY_P (ref) = 1;
1561 /* If this expression uses it's parent's alias set, mark it such
1562 that we won't change it. */
1563 if (component_uses_parent_alias_set (t))
1564 MEM_KEEP_ALIAS_SET_P (ref) = 1;
1566 /* If this is a decl, set the attributes of the MEM from it. */
1567 if (DECL_P (t))
1569 expr = t;
1570 offset = const0_rtx;
1571 apply_bitpos = bitpos;
1572 size = (DECL_SIZE_UNIT (t)
1573 && host_integerp (DECL_SIZE_UNIT (t), 1)
1574 ? GEN_INT (tree_low_cst (DECL_SIZE_UNIT (t), 1)) : 0);
1575 align = DECL_ALIGN (t);
1578 /* If this is a constant, we know the alignment. */
1579 else if (CONSTANT_CLASS_P (t))
1581 align = TYPE_ALIGN (type);
1582 #ifdef CONSTANT_ALIGNMENT
1583 align = CONSTANT_ALIGNMENT (t, align);
1584 #endif
1587 /* If this is a field reference and not a bit-field, record it. */
1588 /* ??? There is some information that can be gleened from bit-fields,
1589 such as the word offset in the structure that might be modified.
1590 But skip it for now. */
1591 else if (TREE_CODE (t) == COMPONENT_REF
1592 && ! DECL_BIT_FIELD (TREE_OPERAND (t, 1)))
1594 expr = component_ref_for_mem_expr (t);
1595 offset = const0_rtx;
1596 apply_bitpos = bitpos;
1597 /* ??? Any reason the field size would be different than
1598 the size we got from the type? */
1601 /* If this is an array reference, look for an outer field reference. */
1602 else if (TREE_CODE (t) == ARRAY_REF)
1604 tree off_tree = size_zero_node;
1605 /* We can't modify t, because we use it at the end of the
1606 function. */
1607 tree t2 = t;
1611 tree index = TREE_OPERAND (t2, 1);
1612 tree low_bound = array_ref_low_bound (t2);
1613 tree unit_size = array_ref_element_size (t2);
1615 /* We assume all arrays have sizes that are a multiple of a byte.
1616 First subtract the lower bound, if any, in the type of the
1617 index, then convert to sizetype and multiply by the size of
1618 the array element. */
1619 if (! integer_zerop (low_bound))
1620 index = fold_build2 (MINUS_EXPR, TREE_TYPE (index),
1621 index, low_bound);
1623 off_tree = size_binop (PLUS_EXPR,
1624 size_binop (MULT_EXPR,
1625 fold_convert (sizetype,
1626 index),
1627 unit_size),
1628 off_tree);
1629 t2 = TREE_OPERAND (t2, 0);
1631 while (TREE_CODE (t2) == ARRAY_REF);
1633 if (DECL_P (t2))
1635 expr = t2;
1636 offset = NULL;
1637 if (host_integerp (off_tree, 1))
1639 HOST_WIDE_INT ioff = tree_low_cst (off_tree, 1);
1640 HOST_WIDE_INT aoff = (ioff & -ioff) * BITS_PER_UNIT;
1641 align = DECL_ALIGN (t2);
1642 if (aoff && (unsigned HOST_WIDE_INT) aoff < align)
1643 align = aoff;
1644 offset = GEN_INT (ioff);
1645 apply_bitpos = bitpos;
1648 else if (TREE_CODE (t2) == COMPONENT_REF)
1650 expr = component_ref_for_mem_expr (t2);
1651 if (host_integerp (off_tree, 1))
1653 offset = GEN_INT (tree_low_cst (off_tree, 1));
1654 apply_bitpos = bitpos;
1656 /* ??? Any reason the field size would be different than
1657 the size we got from the type? */
1659 else if (flag_argument_noalias > 1
1660 && (INDIRECT_REF_P (t2))
1661 && TREE_CODE (TREE_OPERAND (t2, 0)) == PARM_DECL)
1663 expr = t2;
1664 offset = NULL;
1668 /* If this is a Fortran indirect argument reference, record the
1669 parameter decl. */
1670 else if (flag_argument_noalias > 1
1671 && (INDIRECT_REF_P (t))
1672 && TREE_CODE (TREE_OPERAND (t, 0)) == PARM_DECL)
1674 expr = t;
1675 offset = NULL;
1679 /* If we modified OFFSET based on T, then subtract the outstanding
1680 bit position offset. Similarly, increase the size of the accessed
1681 object to contain the negative offset. */
1682 if (apply_bitpos)
1684 offset = plus_constant (offset, -(apply_bitpos / BITS_PER_UNIT));
1685 if (size)
1686 size = plus_constant (size, apply_bitpos / BITS_PER_UNIT);
1689 if (TREE_CODE (t) == ALIGN_INDIRECT_REF)
1691 /* Force EXPR and OFFSE to NULL, since we don't know exactly what
1692 we're overlapping. */
1693 offset = NULL;
1694 expr = NULL;
1697 /* Now set the attributes we computed above. */
1698 MEM_ATTRS (ref)
1699 = get_mem_attrs (alias, expr, offset, size, align, GET_MODE (ref));
1701 /* If this is already known to be a scalar or aggregate, we are done. */
1702 if (MEM_IN_STRUCT_P (ref) || MEM_SCALAR_P (ref))
1703 return;
1705 /* If it is a reference into an aggregate, this is part of an aggregate.
1706 Otherwise we don't know. */
1707 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
1708 || TREE_CODE (t) == ARRAY_RANGE_REF
1709 || TREE_CODE (t) == BIT_FIELD_REF)
1710 MEM_IN_STRUCT_P (ref) = 1;
1713 void
1714 set_mem_attributes (rtx ref, tree t, int objectp)
1716 set_mem_attributes_minus_bitpos (ref, t, objectp, 0);
1719 /* Set the decl for MEM to DECL. */
1721 void
1722 set_mem_attrs_from_reg (rtx mem, rtx reg)
1724 MEM_ATTRS (mem)
1725 = get_mem_attrs (MEM_ALIAS_SET (mem), REG_EXPR (reg),
1726 GEN_INT (REG_OFFSET (reg)),
1727 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1730 /* Set the alias set of MEM to SET. */
1732 void
1733 set_mem_alias_set (rtx mem, HOST_WIDE_INT set)
1735 #ifdef ENABLE_CHECKING
1736 /* If the new and old alias sets don't conflict, something is wrong. */
1737 gcc_assert (alias_sets_conflict_p (set, MEM_ALIAS_SET (mem)));
1738 #endif
1740 MEM_ATTRS (mem) = get_mem_attrs (set, MEM_EXPR (mem), MEM_OFFSET (mem),
1741 MEM_SIZE (mem), MEM_ALIGN (mem),
1742 GET_MODE (mem));
1745 /* Set the alignment of MEM to ALIGN bits. */
1747 void
1748 set_mem_align (rtx mem, unsigned int align)
1750 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1751 MEM_OFFSET (mem), MEM_SIZE (mem), align,
1752 GET_MODE (mem));
1755 /* Set the expr for MEM to EXPR. */
1757 void
1758 set_mem_expr (rtx mem, tree expr)
1760 MEM_ATTRS (mem)
1761 = get_mem_attrs (MEM_ALIAS_SET (mem), expr, MEM_OFFSET (mem),
1762 MEM_SIZE (mem), MEM_ALIGN (mem), GET_MODE (mem));
1765 /* Set the offset of MEM to OFFSET. */
1767 void
1768 set_mem_offset (rtx mem, rtx offset)
1770 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1771 offset, MEM_SIZE (mem), MEM_ALIGN (mem),
1772 GET_MODE (mem));
1775 /* Set the size of MEM to SIZE. */
1777 void
1778 set_mem_size (rtx mem, rtx size)
1780 MEM_ATTRS (mem) = get_mem_attrs (MEM_ALIAS_SET (mem), MEM_EXPR (mem),
1781 MEM_OFFSET (mem), size, MEM_ALIGN (mem),
1782 GET_MODE (mem));
1785 /* Return a memory reference like MEMREF, but with its mode changed to MODE
1786 and its address changed to ADDR. (VOIDmode means don't change the mode.
1787 NULL for ADDR means don't change the address.) VALIDATE is nonzero if the
1788 returned memory location is required to be valid. The memory
1789 attributes are not changed. */
1791 static rtx
1792 change_address_1 (rtx memref, enum machine_mode mode, rtx addr, int validate)
1794 rtx new;
1796 gcc_assert (MEM_P (memref));
1797 if (mode == VOIDmode)
1798 mode = GET_MODE (memref);
1799 if (addr == 0)
1800 addr = XEXP (memref, 0);
1801 if (mode == GET_MODE (memref) && addr == XEXP (memref, 0)
1802 && (!validate || memory_address_p (mode, addr)))
1803 return memref;
1805 if (validate)
1807 if (reload_in_progress || reload_completed)
1808 gcc_assert (memory_address_p (mode, addr));
1809 else
1810 addr = memory_address (mode, addr);
1813 if (rtx_equal_p (addr, XEXP (memref, 0)) && mode == GET_MODE (memref))
1814 return memref;
1816 new = gen_rtx_MEM (mode, addr);
1817 MEM_COPY_ATTRIBUTES (new, memref);
1818 return new;
1821 /* Like change_address_1 with VALIDATE nonzero, but we are not saying in what
1822 way we are changing MEMREF, so we only preserve the alias set. */
1825 change_address (rtx memref, enum machine_mode mode, rtx addr)
1827 rtx new = change_address_1 (memref, mode, addr, 1), size;
1828 enum machine_mode mmode = GET_MODE (new);
1829 unsigned int align;
1831 size = mmode == BLKmode ? 0 : GEN_INT (GET_MODE_SIZE (mmode));
1832 align = mmode == BLKmode ? BITS_PER_UNIT : GET_MODE_ALIGNMENT (mmode);
1834 /* If there are no changes, just return the original memory reference. */
1835 if (new == memref)
1837 if (MEM_ATTRS (memref) == 0
1838 || (MEM_EXPR (memref) == NULL
1839 && MEM_OFFSET (memref) == NULL
1840 && MEM_SIZE (memref) == size
1841 && MEM_ALIGN (memref) == align))
1842 return new;
1844 new = gen_rtx_MEM (mmode, XEXP (memref, 0));
1845 MEM_COPY_ATTRIBUTES (new, memref);
1848 MEM_ATTRS (new)
1849 = get_mem_attrs (MEM_ALIAS_SET (memref), 0, 0, size, align, mmode);
1851 return new;
1854 /* Return a memory reference like MEMREF, but with its mode changed
1855 to MODE and its address offset by OFFSET bytes. If VALIDATE is
1856 nonzero, the memory address is forced to be valid.
1857 If ADJUST is zero, OFFSET is only used to update MEM_ATTRS
1858 and caller is responsible for adjusting MEMREF base register. */
1861 adjust_address_1 (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset,
1862 int validate, int adjust)
1864 rtx addr = XEXP (memref, 0);
1865 rtx new;
1866 rtx memoffset = MEM_OFFSET (memref);
1867 rtx size = 0;
1868 unsigned int memalign = MEM_ALIGN (memref);
1870 /* If there are no changes, just return the original memory reference. */
1871 if (mode == GET_MODE (memref) && !offset
1872 && (!validate || memory_address_p (mode, addr)))
1873 return memref;
1875 /* ??? Prefer to create garbage instead of creating shared rtl.
1876 This may happen even if offset is nonzero -- consider
1877 (plus (plus reg reg) const_int) -- so do this always. */
1878 addr = copy_rtx (addr);
1880 if (adjust)
1882 /* If MEMREF is a LO_SUM and the offset is within the alignment of the
1883 object, we can merge it into the LO_SUM. */
1884 if (GET_MODE (memref) != BLKmode && GET_CODE (addr) == LO_SUM
1885 && offset >= 0
1886 && (unsigned HOST_WIDE_INT) offset
1887 < GET_MODE_ALIGNMENT (GET_MODE (memref)) / BITS_PER_UNIT)
1888 addr = gen_rtx_LO_SUM (Pmode, XEXP (addr, 0),
1889 plus_constant (XEXP (addr, 1), offset));
1890 else
1891 addr = plus_constant (addr, offset);
1894 new = change_address_1 (memref, mode, addr, validate);
1896 /* Compute the new values of the memory attributes due to this adjustment.
1897 We add the offsets and update the alignment. */
1898 if (memoffset)
1899 memoffset = GEN_INT (offset + INTVAL (memoffset));
1901 /* Compute the new alignment by taking the MIN of the alignment and the
1902 lowest-order set bit in OFFSET, but don't change the alignment if OFFSET
1903 if zero. */
1904 if (offset != 0)
1905 memalign
1906 = MIN (memalign,
1907 (unsigned HOST_WIDE_INT) (offset & -offset) * BITS_PER_UNIT);
1909 /* We can compute the size in a number of ways. */
1910 if (GET_MODE (new) != BLKmode)
1911 size = GEN_INT (GET_MODE_SIZE (GET_MODE (new)));
1912 else if (MEM_SIZE (memref))
1913 size = plus_constant (MEM_SIZE (memref), -offset);
1915 MEM_ATTRS (new) = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref),
1916 memoffset, size, memalign, GET_MODE (new));
1918 /* At some point, we should validate that this offset is within the object,
1919 if all the appropriate values are known. */
1920 return new;
1923 /* Return a memory reference like MEMREF, but with its mode changed
1924 to MODE and its address changed to ADDR, which is assumed to be
1925 MEMREF offseted by OFFSET bytes. If VALIDATE is
1926 nonzero, the memory address is forced to be valid. */
1929 adjust_automodify_address_1 (rtx memref, enum machine_mode mode, rtx addr,
1930 HOST_WIDE_INT offset, int validate)
1932 memref = change_address_1 (memref, VOIDmode, addr, validate);
1933 return adjust_address_1 (memref, mode, offset, validate, 0);
1936 /* Return a memory reference like MEMREF, but whose address is changed by
1937 adding OFFSET, an RTX, to it. POW2 is the highest power of two factor
1938 known to be in OFFSET (possibly 1). */
1941 offset_address (rtx memref, rtx offset, unsigned HOST_WIDE_INT pow2)
1943 rtx new, addr = XEXP (memref, 0);
1945 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1947 /* At this point we don't know _why_ the address is invalid. It
1948 could have secondary memory references, multiplies or anything.
1950 However, if we did go and rearrange things, we can wind up not
1951 being able to recognize the magic around pic_offset_table_rtx.
1952 This stuff is fragile, and is yet another example of why it is
1953 bad to expose PIC machinery too early. */
1954 if (! memory_address_p (GET_MODE (memref), new)
1955 && GET_CODE (addr) == PLUS
1956 && XEXP (addr, 0) == pic_offset_table_rtx)
1958 addr = force_reg (GET_MODE (addr), addr);
1959 new = simplify_gen_binary (PLUS, Pmode, addr, offset);
1962 update_temp_slot_address (XEXP (memref, 0), new);
1963 new = change_address_1 (memref, VOIDmode, new, 1);
1965 /* If there are no changes, just return the original memory reference. */
1966 if (new == memref)
1967 return new;
1969 /* Update the alignment to reflect the offset. Reset the offset, which
1970 we don't know. */
1971 MEM_ATTRS (new)
1972 = get_mem_attrs (MEM_ALIAS_SET (memref), MEM_EXPR (memref), 0, 0,
1973 MIN (MEM_ALIGN (memref), pow2 * BITS_PER_UNIT),
1974 GET_MODE (new));
1975 return new;
1978 /* Return a memory reference like MEMREF, but with its address changed to
1979 ADDR. The caller is asserting that the actual piece of memory pointed
1980 to is the same, just the form of the address is being changed, such as
1981 by putting something into a register. */
1984 replace_equiv_address (rtx memref, rtx addr)
1986 /* change_address_1 copies the memory attribute structure without change
1987 and that's exactly what we want here. */
1988 update_temp_slot_address (XEXP (memref, 0), addr);
1989 return change_address_1 (memref, VOIDmode, addr, 1);
1992 /* Likewise, but the reference is not required to be valid. */
1995 replace_equiv_address_nv (rtx memref, rtx addr)
1997 return change_address_1 (memref, VOIDmode, addr, 0);
2000 /* Return a memory reference like MEMREF, but with its mode widened to
2001 MODE and offset by OFFSET. This would be used by targets that e.g.
2002 cannot issue QImode memory operations and have to use SImode memory
2003 operations plus masking logic. */
2006 widen_memory_access (rtx memref, enum machine_mode mode, HOST_WIDE_INT offset)
2008 rtx new = adjust_address_1 (memref, mode, offset, 1, 1);
2009 tree expr = MEM_EXPR (new);
2010 rtx memoffset = MEM_OFFSET (new);
2011 unsigned int size = GET_MODE_SIZE (mode);
2013 /* If there are no changes, just return the original memory reference. */
2014 if (new == memref)
2015 return new;
2017 /* If we don't know what offset we were at within the expression, then
2018 we can't know if we've overstepped the bounds. */
2019 if (! memoffset)
2020 expr = NULL_TREE;
2022 while (expr)
2024 if (TREE_CODE (expr) == COMPONENT_REF)
2026 tree field = TREE_OPERAND (expr, 1);
2027 tree offset = component_ref_field_offset (expr);
2029 if (! DECL_SIZE_UNIT (field))
2031 expr = NULL_TREE;
2032 break;
2035 /* Is the field at least as large as the access? If so, ok,
2036 otherwise strip back to the containing structure. */
2037 if (TREE_CODE (DECL_SIZE_UNIT (field)) == INTEGER_CST
2038 && compare_tree_int (DECL_SIZE_UNIT (field), size) >= 0
2039 && INTVAL (memoffset) >= 0)
2040 break;
2042 if (! host_integerp (offset, 1))
2044 expr = NULL_TREE;
2045 break;
2048 expr = TREE_OPERAND (expr, 0);
2049 memoffset
2050 = (GEN_INT (INTVAL (memoffset)
2051 + tree_low_cst (offset, 1)
2052 + (tree_low_cst (DECL_FIELD_BIT_OFFSET (field), 1)
2053 / BITS_PER_UNIT)));
2055 /* Similarly for the decl. */
2056 else if (DECL_P (expr)
2057 && DECL_SIZE_UNIT (expr)
2058 && TREE_CODE (DECL_SIZE_UNIT (expr)) == INTEGER_CST
2059 && compare_tree_int (DECL_SIZE_UNIT (expr), size) >= 0
2060 && (! memoffset || INTVAL (memoffset) >= 0))
2061 break;
2062 else
2064 /* The widened memory access overflows the expression, which means
2065 that it could alias another expression. Zap it. */
2066 expr = NULL_TREE;
2067 break;
2071 if (! expr)
2072 memoffset = NULL_RTX;
2074 /* The widened memory may alias other stuff, so zap the alias set. */
2075 /* ??? Maybe use get_alias_set on any remaining expression. */
2077 MEM_ATTRS (new) = get_mem_attrs (0, expr, memoffset, GEN_INT (size),
2078 MEM_ALIGN (new), mode);
2080 return new;
2083 /* Return a newly created CODE_LABEL rtx with a unique label number. */
2086 gen_label_rtx (void)
2088 return gen_rtx_CODE_LABEL (VOIDmode, 0, NULL_RTX, NULL_RTX,
2089 NULL, label_num++, NULL);
2092 /* For procedure integration. */
2094 /* Install new pointers to the first and last insns in the chain.
2095 Also, set cur_insn_uid to one higher than the last in use.
2096 Used for an inline-procedure after copying the insn chain. */
2098 void
2099 set_new_first_and_last_insn (rtx first, rtx last)
2101 rtx insn;
2103 first_insn = first;
2104 last_insn = last;
2105 cur_insn_uid = 0;
2107 for (insn = first; insn; insn = NEXT_INSN (insn))
2108 cur_insn_uid = MAX (cur_insn_uid, INSN_UID (insn));
2110 cur_insn_uid++;
2113 /* Go through all the RTL insn bodies and copy any invalid shared
2114 structure. This routine should only be called once. */
2116 static void
2117 unshare_all_rtl_1 (rtx insn)
2119 /* Unshare just about everything else. */
2120 unshare_all_rtl_in_chain (insn);
2122 /* Make sure the addresses of stack slots found outside the insn chain
2123 (such as, in DECL_RTL of a variable) are not shared
2124 with the insn chain.
2126 This special care is necessary when the stack slot MEM does not
2127 actually appear in the insn chain. If it does appear, its address
2128 is unshared from all else at that point. */
2129 stack_slot_list = copy_rtx_if_shared (stack_slot_list);
2132 /* Go through all the RTL insn bodies and copy any invalid shared
2133 structure, again. This is a fairly expensive thing to do so it
2134 should be done sparingly. */
2136 void
2137 unshare_all_rtl_again (rtx insn)
2139 rtx p;
2140 tree decl;
2142 for (p = insn; p; p = NEXT_INSN (p))
2143 if (INSN_P (p))
2145 reset_used_flags (PATTERN (p));
2146 reset_used_flags (REG_NOTES (p));
2147 reset_used_flags (LOG_LINKS (p));
2150 /* Make sure that virtual stack slots are not shared. */
2151 reset_used_decls (DECL_INITIAL (cfun->decl));
2153 /* Make sure that virtual parameters are not shared. */
2154 for (decl = DECL_ARGUMENTS (cfun->decl); decl; decl = TREE_CHAIN (decl))
2155 reset_used_flags (DECL_RTL (decl));
2157 reset_used_flags (stack_slot_list);
2159 unshare_all_rtl_1 (insn);
2162 unsigned int
2163 unshare_all_rtl (void)
2165 unshare_all_rtl_1 (get_insns ());
2166 return 0;
2169 struct tree_opt_pass pass_unshare_all_rtl =
2171 "unshare", /* name */
2172 NULL, /* gate */
2173 unshare_all_rtl, /* execute */
2174 NULL, /* sub */
2175 NULL, /* next */
2176 0, /* static_pass_number */
2177 0, /* tv_id */
2178 0, /* properties_required */
2179 0, /* properties_provided */
2180 0, /* properties_destroyed */
2181 0, /* todo_flags_start */
2182 TODO_dump_func, /* todo_flags_finish */
2183 0 /* letter */
2187 /* Check that ORIG is not marked when it should not be and mark ORIG as in use,
2188 Recursively does the same for subexpressions. */
2190 static void
2191 verify_rtx_sharing (rtx orig, rtx insn)
2193 rtx x = orig;
2194 int i;
2195 enum rtx_code code;
2196 const char *format_ptr;
2198 if (x == 0)
2199 return;
2201 code = GET_CODE (x);
2203 /* These types may be freely shared. */
2205 switch (code)
2207 case REG:
2208 case CONST_INT:
2209 case CONST_DOUBLE:
2210 case CONST_VECTOR:
2211 case SYMBOL_REF:
2212 case LABEL_REF:
2213 case CODE_LABEL:
2214 case PC:
2215 case CC0:
2216 case SCRATCH:
2217 return;
2218 /* SCRATCH must be shared because they represent distinct values. */
2219 case CLOBBER:
2220 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2221 return;
2222 break;
2224 case CONST:
2225 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2226 a LABEL_REF, it isn't sharable. */
2227 if (GET_CODE (XEXP (x, 0)) == PLUS
2228 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2229 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2230 return;
2231 break;
2233 case MEM:
2234 /* A MEM is allowed to be shared if its address is constant. */
2235 if (CONSTANT_ADDRESS_P (XEXP (x, 0))
2236 || reload_completed || reload_in_progress)
2237 return;
2239 break;
2241 default:
2242 break;
2245 /* This rtx may not be shared. If it has already been seen,
2246 replace it with a copy of itself. */
2247 #ifdef ENABLE_CHECKING
2248 if (RTX_FLAG (x, used))
2250 error ("invalid rtl sharing found in the insn");
2251 debug_rtx (insn);
2252 error ("shared rtx");
2253 debug_rtx (x);
2254 internal_error ("internal consistency failure");
2256 #endif
2257 gcc_assert (!RTX_FLAG (x, used));
2259 RTX_FLAG (x, used) = 1;
2261 /* Now scan the subexpressions recursively. */
2263 format_ptr = GET_RTX_FORMAT (code);
2265 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2267 switch (*format_ptr++)
2269 case 'e':
2270 verify_rtx_sharing (XEXP (x, i), insn);
2271 break;
2273 case 'E':
2274 if (XVEC (x, i) != NULL)
2276 int j;
2277 int len = XVECLEN (x, i);
2279 for (j = 0; j < len; j++)
2281 /* We allow sharing of ASM_OPERANDS inside single
2282 instruction. */
2283 if (j && GET_CODE (XVECEXP (x, i, j)) == SET
2284 && (GET_CODE (SET_SRC (XVECEXP (x, i, j)))
2285 == ASM_OPERANDS))
2286 verify_rtx_sharing (SET_DEST (XVECEXP (x, i, j)), insn);
2287 else
2288 verify_rtx_sharing (XVECEXP (x, i, j), insn);
2291 break;
2294 return;
2297 /* Go through all the RTL insn bodies and check that there is no unexpected
2298 sharing in between the subexpressions. */
2300 void
2301 verify_rtl_sharing (void)
2303 rtx p;
2305 for (p = get_insns (); p; p = NEXT_INSN (p))
2306 if (INSN_P (p))
2308 reset_used_flags (PATTERN (p));
2309 reset_used_flags (REG_NOTES (p));
2310 reset_used_flags (LOG_LINKS (p));
2311 if (GET_CODE (PATTERN (p)) == SEQUENCE)
2313 int i;
2314 rtx q, sequence = PATTERN (p);
2316 for (i = 0; i < XVECLEN (sequence, 0); i++)
2318 q = XVECEXP (sequence, 0, i);
2319 gcc_assert (INSN_P (q));
2320 reset_used_flags (PATTERN (q));
2321 reset_used_flags (REG_NOTES (q));
2322 reset_used_flags (LOG_LINKS (q));
2327 for (p = get_insns (); p; p = NEXT_INSN (p))
2328 if (INSN_P (p))
2330 verify_rtx_sharing (PATTERN (p), p);
2331 verify_rtx_sharing (REG_NOTES (p), p);
2332 verify_rtx_sharing (LOG_LINKS (p), p);
2336 /* Go through all the RTL insn bodies and copy any invalid shared structure.
2337 Assumes the mark bits are cleared at entry. */
2339 void
2340 unshare_all_rtl_in_chain (rtx insn)
2342 for (; insn; insn = NEXT_INSN (insn))
2343 if (INSN_P (insn))
2345 PATTERN (insn) = copy_rtx_if_shared (PATTERN (insn));
2346 REG_NOTES (insn) = copy_rtx_if_shared (REG_NOTES (insn));
2347 LOG_LINKS (insn) = copy_rtx_if_shared (LOG_LINKS (insn));
2351 /* Go through all virtual stack slots of a function and mark them as
2352 not shared. */
2353 static void
2354 reset_used_decls (tree blk)
2356 tree t;
2358 /* Mark decls. */
2359 for (t = BLOCK_VARS (blk); t; t = TREE_CHAIN (t))
2360 if (DECL_RTL_SET_P (t))
2361 reset_used_flags (DECL_RTL (t));
2363 /* Now process sub-blocks. */
2364 for (t = BLOCK_SUBBLOCKS (blk); t; t = TREE_CHAIN (t))
2365 reset_used_decls (t);
2368 /* Mark ORIG as in use, and return a copy of it if it was already in use.
2369 Recursively does the same for subexpressions. Uses
2370 copy_rtx_if_shared_1 to reduce stack space. */
2373 copy_rtx_if_shared (rtx orig)
2375 copy_rtx_if_shared_1 (&orig);
2376 return orig;
2379 /* Mark *ORIG1 as in use, and set it to a copy of it if it was already in
2380 use. Recursively does the same for subexpressions. */
2382 static void
2383 copy_rtx_if_shared_1 (rtx *orig1)
2385 rtx x;
2386 int i;
2387 enum rtx_code code;
2388 rtx *last_ptr;
2389 const char *format_ptr;
2390 int copied = 0;
2391 int length;
2393 /* Repeat is used to turn tail-recursion into iteration. */
2394 repeat:
2395 x = *orig1;
2397 if (x == 0)
2398 return;
2400 code = GET_CODE (x);
2402 /* These types may be freely shared. */
2404 switch (code)
2406 case REG:
2407 case CONST_INT:
2408 case CONST_DOUBLE:
2409 case CONST_VECTOR:
2410 case SYMBOL_REF:
2411 case LABEL_REF:
2412 case CODE_LABEL:
2413 case PC:
2414 case CC0:
2415 case SCRATCH:
2416 /* SCRATCH must be shared because they represent distinct values. */
2417 return;
2418 case CLOBBER:
2419 if (REG_P (XEXP (x, 0)) && REGNO (XEXP (x, 0)) < FIRST_PSEUDO_REGISTER)
2420 return;
2421 break;
2423 case CONST:
2424 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
2425 a LABEL_REF, it isn't sharable. */
2426 if (GET_CODE (XEXP (x, 0)) == PLUS
2427 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
2428 && GET_CODE (XEXP (XEXP (x, 0), 1)) == CONST_INT)
2429 return;
2430 break;
2432 case INSN:
2433 case JUMP_INSN:
2434 case CALL_INSN:
2435 case NOTE:
2436 case BARRIER:
2437 /* The chain of insns is not being copied. */
2438 return;
2440 default:
2441 break;
2444 /* This rtx may not be shared. If it has already been seen,
2445 replace it with a copy of itself. */
2447 if (RTX_FLAG (x, used))
2449 x = shallow_copy_rtx (x);
2450 copied = 1;
2452 RTX_FLAG (x, used) = 1;
2454 /* Now scan the subexpressions recursively.
2455 We can store any replaced subexpressions directly into X
2456 since we know X is not shared! Any vectors in X
2457 must be copied if X was copied. */
2459 format_ptr = GET_RTX_FORMAT (code);
2460 length = GET_RTX_LENGTH (code);
2461 last_ptr = NULL;
2463 for (i = 0; i < length; i++)
2465 switch (*format_ptr++)
2467 case 'e':
2468 if (last_ptr)
2469 copy_rtx_if_shared_1 (last_ptr);
2470 last_ptr = &XEXP (x, i);
2471 break;
2473 case 'E':
2474 if (XVEC (x, i) != NULL)
2476 int j;
2477 int len = XVECLEN (x, i);
2479 /* Copy the vector iff I copied the rtx and the length
2480 is nonzero. */
2481 if (copied && len > 0)
2482 XVEC (x, i) = gen_rtvec_v (len, XVEC (x, i)->elem);
2484 /* Call recursively on all inside the vector. */
2485 for (j = 0; j < len; j++)
2487 if (last_ptr)
2488 copy_rtx_if_shared_1 (last_ptr);
2489 last_ptr = &XVECEXP (x, i, j);
2492 break;
2495 *orig1 = x;
2496 if (last_ptr)
2498 orig1 = last_ptr;
2499 goto repeat;
2501 return;
2504 /* Clear all the USED bits in X to allow copy_rtx_if_shared to be used
2505 to look for shared sub-parts. */
2507 void
2508 reset_used_flags (rtx x)
2510 int i, j;
2511 enum rtx_code code;
2512 const char *format_ptr;
2513 int length;
2515 /* Repeat is used to turn tail-recursion into iteration. */
2516 repeat:
2517 if (x == 0)
2518 return;
2520 code = GET_CODE (x);
2522 /* These types may be freely shared so we needn't do any resetting
2523 for them. */
2525 switch (code)
2527 case REG:
2528 case CONST_INT:
2529 case CONST_DOUBLE:
2530 case CONST_VECTOR:
2531 case SYMBOL_REF:
2532 case CODE_LABEL:
2533 case PC:
2534 case CC0:
2535 return;
2537 case INSN:
2538 case JUMP_INSN:
2539 case CALL_INSN:
2540 case NOTE:
2541 case LABEL_REF:
2542 case BARRIER:
2543 /* The chain of insns is not being copied. */
2544 return;
2546 default:
2547 break;
2550 RTX_FLAG (x, used) = 0;
2552 format_ptr = GET_RTX_FORMAT (code);
2553 length = GET_RTX_LENGTH (code);
2555 for (i = 0; i < length; i++)
2557 switch (*format_ptr++)
2559 case 'e':
2560 if (i == length-1)
2562 x = XEXP (x, i);
2563 goto repeat;
2565 reset_used_flags (XEXP (x, i));
2566 break;
2568 case 'E':
2569 for (j = 0; j < XVECLEN (x, i); j++)
2570 reset_used_flags (XVECEXP (x, i, j));
2571 break;
2576 /* Set all the USED bits in X to allow copy_rtx_if_shared to be used
2577 to look for shared sub-parts. */
2579 void
2580 set_used_flags (rtx x)
2582 int i, j;
2583 enum rtx_code code;
2584 const char *format_ptr;
2586 if (x == 0)
2587 return;
2589 code = GET_CODE (x);
2591 /* These types may be freely shared so we needn't do any resetting
2592 for them. */
2594 switch (code)
2596 case REG:
2597 case CONST_INT:
2598 case CONST_DOUBLE:
2599 case CONST_VECTOR:
2600 case SYMBOL_REF:
2601 case CODE_LABEL:
2602 case PC:
2603 case CC0:
2604 return;
2606 case INSN:
2607 case JUMP_INSN:
2608 case CALL_INSN:
2609 case NOTE:
2610 case LABEL_REF:
2611 case BARRIER:
2612 /* The chain of insns is not being copied. */
2613 return;
2615 default:
2616 break;
2619 RTX_FLAG (x, used) = 1;
2621 format_ptr = GET_RTX_FORMAT (code);
2622 for (i = 0; i < GET_RTX_LENGTH (code); i++)
2624 switch (*format_ptr++)
2626 case 'e':
2627 set_used_flags (XEXP (x, i));
2628 break;
2630 case 'E':
2631 for (j = 0; j < XVECLEN (x, i); j++)
2632 set_used_flags (XVECEXP (x, i, j));
2633 break;
2638 /* Copy X if necessary so that it won't be altered by changes in OTHER.
2639 Return X or the rtx for the pseudo reg the value of X was copied into.
2640 OTHER must be valid as a SET_DEST. */
2643 make_safe_from (rtx x, rtx other)
2645 while (1)
2646 switch (GET_CODE (other))
2648 case SUBREG:
2649 other = SUBREG_REG (other);
2650 break;
2651 case STRICT_LOW_PART:
2652 case SIGN_EXTEND:
2653 case ZERO_EXTEND:
2654 other = XEXP (other, 0);
2655 break;
2656 default:
2657 goto done;
2659 done:
2660 if ((MEM_P (other)
2661 && ! CONSTANT_P (x)
2662 && !REG_P (x)
2663 && GET_CODE (x) != SUBREG)
2664 || (REG_P (other)
2665 && (REGNO (other) < FIRST_PSEUDO_REGISTER
2666 || reg_mentioned_p (other, x))))
2668 rtx temp = gen_reg_rtx (GET_MODE (x));
2669 emit_move_insn (temp, x);
2670 return temp;
2672 return x;
2675 /* Emission of insns (adding them to the doubly-linked list). */
2677 /* Return the first insn of the current sequence or current function. */
2680 get_insns (void)
2682 return first_insn;
2685 /* Specify a new insn as the first in the chain. */
2687 void
2688 set_first_insn (rtx insn)
2690 gcc_assert (!PREV_INSN (insn));
2691 first_insn = insn;
2694 /* Return the last insn emitted in current sequence or current function. */
2697 get_last_insn (void)
2699 return last_insn;
2702 /* Specify a new insn as the last in the chain. */
2704 void
2705 set_last_insn (rtx insn)
2707 gcc_assert (!NEXT_INSN (insn));
2708 last_insn = insn;
2711 /* Return the last insn emitted, even if it is in a sequence now pushed. */
2714 get_last_insn_anywhere (void)
2716 struct sequence_stack *stack;
2717 if (last_insn)
2718 return last_insn;
2719 for (stack = seq_stack; stack; stack = stack->next)
2720 if (stack->last != 0)
2721 return stack->last;
2722 return 0;
2725 /* Return the first nonnote insn emitted in current sequence or current
2726 function. This routine looks inside SEQUENCEs. */
2729 get_first_nonnote_insn (void)
2731 rtx insn = first_insn;
2733 if (insn)
2735 if (NOTE_P (insn))
2736 for (insn = next_insn (insn);
2737 insn && NOTE_P (insn);
2738 insn = next_insn (insn))
2739 continue;
2740 else
2742 if (NONJUMP_INSN_P (insn)
2743 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2744 insn = XVECEXP (PATTERN (insn), 0, 0);
2748 return insn;
2751 /* Return the last nonnote insn emitted in current sequence or current
2752 function. This routine looks inside SEQUENCEs. */
2755 get_last_nonnote_insn (void)
2757 rtx insn = last_insn;
2759 if (insn)
2761 if (NOTE_P (insn))
2762 for (insn = previous_insn (insn);
2763 insn && NOTE_P (insn);
2764 insn = previous_insn (insn))
2765 continue;
2766 else
2768 if (NONJUMP_INSN_P (insn)
2769 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2770 insn = XVECEXP (PATTERN (insn), 0,
2771 XVECLEN (PATTERN (insn), 0) - 1);
2775 return insn;
2778 /* Return a number larger than any instruction's uid in this function. */
2781 get_max_uid (void)
2783 return cur_insn_uid;
2786 /* Return the next insn. If it is a SEQUENCE, return the first insn
2787 of the sequence. */
2790 next_insn (rtx insn)
2792 if (insn)
2794 insn = NEXT_INSN (insn);
2795 if (insn && NONJUMP_INSN_P (insn)
2796 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2797 insn = XVECEXP (PATTERN (insn), 0, 0);
2800 return insn;
2803 /* Return the previous insn. If it is a SEQUENCE, return the last insn
2804 of the sequence. */
2807 previous_insn (rtx insn)
2809 if (insn)
2811 insn = PREV_INSN (insn);
2812 if (insn && NONJUMP_INSN_P (insn)
2813 && GET_CODE (PATTERN (insn)) == SEQUENCE)
2814 insn = XVECEXP (PATTERN (insn), 0, XVECLEN (PATTERN (insn), 0) - 1);
2817 return insn;
2820 /* Return the next insn after INSN that is not a NOTE. This routine does not
2821 look inside SEQUENCEs. */
2824 next_nonnote_insn (rtx insn)
2826 while (insn)
2828 insn = NEXT_INSN (insn);
2829 if (insn == 0 || !NOTE_P (insn))
2830 break;
2833 return insn;
2836 /* Return the previous insn before INSN that is not a NOTE. This routine does
2837 not look inside SEQUENCEs. */
2840 prev_nonnote_insn (rtx insn)
2842 while (insn)
2844 insn = PREV_INSN (insn);
2845 if (insn == 0 || !NOTE_P (insn))
2846 break;
2849 return insn;
2852 /* Return the next INSN, CALL_INSN or JUMP_INSN after INSN;
2853 or 0, if there is none. This routine does not look inside
2854 SEQUENCEs. */
2857 next_real_insn (rtx insn)
2859 while (insn)
2861 insn = NEXT_INSN (insn);
2862 if (insn == 0 || INSN_P (insn))
2863 break;
2866 return insn;
2869 /* Return the last INSN, CALL_INSN or JUMP_INSN before INSN;
2870 or 0, if there is none. This routine does not look inside
2871 SEQUENCEs. */
2874 prev_real_insn (rtx insn)
2876 while (insn)
2878 insn = PREV_INSN (insn);
2879 if (insn == 0 || INSN_P (insn))
2880 break;
2883 return insn;
2886 /* Return the last CALL_INSN in the current list, or 0 if there is none.
2887 This routine does not look inside SEQUENCEs. */
2890 last_call_insn (void)
2892 rtx insn;
2894 for (insn = get_last_insn ();
2895 insn && !CALL_P (insn);
2896 insn = PREV_INSN (insn))
2899 return insn;
2902 /* Find the next insn after INSN that really does something. This routine
2903 does not look inside SEQUENCEs. Until reload has completed, this is the
2904 same as next_real_insn. */
2907 active_insn_p (rtx insn)
2909 return (CALL_P (insn) || JUMP_P (insn)
2910 || (NONJUMP_INSN_P (insn)
2911 && (! reload_completed
2912 || (GET_CODE (PATTERN (insn)) != USE
2913 && GET_CODE (PATTERN (insn)) != CLOBBER))));
2917 next_active_insn (rtx insn)
2919 while (insn)
2921 insn = NEXT_INSN (insn);
2922 if (insn == 0 || active_insn_p (insn))
2923 break;
2926 return insn;
2929 /* Find the last insn before INSN that really does something. This routine
2930 does not look inside SEQUENCEs. Until reload has completed, this is the
2931 same as prev_real_insn. */
2934 prev_active_insn (rtx insn)
2936 while (insn)
2938 insn = PREV_INSN (insn);
2939 if (insn == 0 || active_insn_p (insn))
2940 break;
2943 return insn;
2946 /* Return the next CODE_LABEL after the insn INSN, or 0 if there is none. */
2949 next_label (rtx insn)
2951 while (insn)
2953 insn = NEXT_INSN (insn);
2954 if (insn == 0 || LABEL_P (insn))
2955 break;
2958 return insn;
2961 /* Return the last CODE_LABEL before the insn INSN, or 0 if there is none. */
2964 prev_label (rtx insn)
2966 while (insn)
2968 insn = PREV_INSN (insn);
2969 if (insn == 0 || LABEL_P (insn))
2970 break;
2973 return insn;
2976 /* Return the last label to mark the same position as LABEL. Return null
2977 if LABEL itself is null. */
2980 skip_consecutive_labels (rtx label)
2982 rtx insn;
2984 for (insn = label; insn != 0 && !INSN_P (insn); insn = NEXT_INSN (insn))
2985 if (LABEL_P (insn))
2986 label = insn;
2988 return label;
2991 #ifdef HAVE_cc0
2992 /* INSN uses CC0 and is being moved into a delay slot. Set up REG_CC_SETTER
2993 and REG_CC_USER notes so we can find it. */
2995 void
2996 link_cc0_insns (rtx insn)
2998 rtx user = next_nonnote_insn (insn);
3000 if (NONJUMP_INSN_P (user) && GET_CODE (PATTERN (user)) == SEQUENCE)
3001 user = XVECEXP (PATTERN (user), 0, 0);
3003 REG_NOTES (user) = gen_rtx_INSN_LIST (REG_CC_SETTER, insn,
3004 REG_NOTES (user));
3005 REG_NOTES (insn) = gen_rtx_INSN_LIST (REG_CC_USER, user, REG_NOTES (insn));
3008 /* Return the next insn that uses CC0 after INSN, which is assumed to
3009 set it. This is the inverse of prev_cc0_setter (i.e., prev_cc0_setter
3010 applied to the result of this function should yield INSN).
3012 Normally, this is simply the next insn. However, if a REG_CC_USER note
3013 is present, it contains the insn that uses CC0.
3015 Return 0 if we can't find the insn. */
3018 next_cc0_user (rtx insn)
3020 rtx note = find_reg_note (insn, REG_CC_USER, NULL_RTX);
3022 if (note)
3023 return XEXP (note, 0);
3025 insn = next_nonnote_insn (insn);
3026 if (insn && NONJUMP_INSN_P (insn) && GET_CODE (PATTERN (insn)) == SEQUENCE)
3027 insn = XVECEXP (PATTERN (insn), 0, 0);
3029 if (insn && INSN_P (insn) && reg_mentioned_p (cc0_rtx, PATTERN (insn)))
3030 return insn;
3032 return 0;
3035 /* Find the insn that set CC0 for INSN. Unless INSN has a REG_CC_SETTER
3036 note, it is the previous insn. */
3039 prev_cc0_setter (rtx insn)
3041 rtx note = find_reg_note (insn, REG_CC_SETTER, NULL_RTX);
3043 if (note)
3044 return XEXP (note, 0);
3046 insn = prev_nonnote_insn (insn);
3047 gcc_assert (sets_cc0_p (PATTERN (insn)));
3049 return insn;
3051 #endif
3053 #ifdef AUTO_INC_DEC
3054 /* Find a RTX_AUTOINC class rtx which matches DATA. */
3056 static int
3057 find_auto_inc (rtx *xp, void *data)
3059 rtx x = *xp;
3060 rtx reg = data;
3062 if (GET_RTX_CLASS (GET_CODE (x)) != RTX_AUTOINC)
3063 return 0;
3065 switch (GET_CODE (x))
3067 case PRE_DEC:
3068 case PRE_INC:
3069 case POST_DEC:
3070 case POST_INC:
3071 case PRE_MODIFY:
3072 case POST_MODIFY:
3073 if (rtx_equal_p (reg, XEXP (x, 0)))
3074 return 1;
3075 break;
3077 default:
3078 gcc_unreachable ();
3080 return -1;
3082 #endif
3084 /* Increment the label uses for all labels present in rtx. */
3086 static void
3087 mark_label_nuses (rtx x)
3089 enum rtx_code code;
3090 int i, j;
3091 const char *fmt;
3093 code = GET_CODE (x);
3094 if (code == LABEL_REF && LABEL_P (XEXP (x, 0)))
3095 LABEL_NUSES (XEXP (x, 0))++;
3097 fmt = GET_RTX_FORMAT (code);
3098 for (i = GET_RTX_LENGTH (code) - 1; i >= 0; i--)
3100 if (fmt[i] == 'e')
3101 mark_label_nuses (XEXP (x, i));
3102 else if (fmt[i] == 'E')
3103 for (j = XVECLEN (x, i) - 1; j >= 0; j--)
3104 mark_label_nuses (XVECEXP (x, i, j));
3109 /* Try splitting insns that can be split for better scheduling.
3110 PAT is the pattern which might split.
3111 TRIAL is the insn providing PAT.
3112 LAST is nonzero if we should return the last insn of the sequence produced.
3114 If this routine succeeds in splitting, it returns the first or last
3115 replacement insn depending on the value of LAST. Otherwise, it
3116 returns TRIAL. If the insn to be returned can be split, it will be. */
3119 try_split (rtx pat, rtx trial, int last)
3121 rtx before = PREV_INSN (trial);
3122 rtx after = NEXT_INSN (trial);
3123 int has_barrier = 0;
3124 rtx tem;
3125 rtx note, seq;
3126 int probability;
3127 rtx insn_last, insn;
3128 int njumps = 0;
3130 if (any_condjump_p (trial)
3131 && (note = find_reg_note (trial, REG_BR_PROB, 0)))
3132 split_branch_probability = INTVAL (XEXP (note, 0));
3133 probability = split_branch_probability;
3135 seq = split_insns (pat, trial);
3137 split_branch_probability = -1;
3139 /* If we are splitting a JUMP_INSN, it might be followed by a BARRIER.
3140 We may need to handle this specially. */
3141 if (after && BARRIER_P (after))
3143 has_barrier = 1;
3144 after = NEXT_INSN (after);
3147 if (!seq)
3148 return trial;
3150 /* Avoid infinite loop if any insn of the result matches
3151 the original pattern. */
3152 insn_last = seq;
3153 while (1)
3155 if (INSN_P (insn_last)
3156 && rtx_equal_p (PATTERN (insn_last), pat))
3157 return trial;
3158 if (!NEXT_INSN (insn_last))
3159 break;
3160 insn_last = NEXT_INSN (insn_last);
3163 /* Mark labels. */
3164 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3166 if (JUMP_P (insn))
3168 mark_jump_label (PATTERN (insn), insn, 0);
3169 njumps++;
3170 if (probability != -1
3171 && any_condjump_p (insn)
3172 && !find_reg_note (insn, REG_BR_PROB, 0))
3174 /* We can preserve the REG_BR_PROB notes only if exactly
3175 one jump is created, otherwise the machine description
3176 is responsible for this step using
3177 split_branch_probability variable. */
3178 gcc_assert (njumps == 1);
3179 REG_NOTES (insn)
3180 = gen_rtx_EXPR_LIST (REG_BR_PROB,
3181 GEN_INT (probability),
3182 REG_NOTES (insn));
3187 /* If we are splitting a CALL_INSN, look for the CALL_INSN
3188 in SEQ and copy our CALL_INSN_FUNCTION_USAGE to it. */
3189 if (CALL_P (trial))
3191 for (insn = insn_last; insn ; insn = PREV_INSN (insn))
3192 if (CALL_P (insn))
3194 rtx *p = &CALL_INSN_FUNCTION_USAGE (insn);
3195 while (*p)
3196 p = &XEXP (*p, 1);
3197 *p = CALL_INSN_FUNCTION_USAGE (trial);
3198 SIBLING_CALL_P (insn) = SIBLING_CALL_P (trial);
3202 /* Copy notes, particularly those related to the CFG. */
3203 for (note = REG_NOTES (trial); note; note = XEXP (note, 1))
3205 switch (REG_NOTE_KIND (note))
3207 case REG_EH_REGION:
3208 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3210 if (CALL_P (insn)
3211 || (flag_non_call_exceptions && INSN_P (insn)
3212 && may_trap_p (PATTERN (insn))))
3213 REG_NOTES (insn)
3214 = gen_rtx_EXPR_LIST (REG_EH_REGION,
3215 XEXP (note, 0),
3216 REG_NOTES (insn));
3218 break;
3220 case REG_NORETURN:
3221 case REG_SETJMP:
3222 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3224 if (CALL_P (insn))
3225 REG_NOTES (insn)
3226 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3227 XEXP (note, 0),
3228 REG_NOTES (insn));
3230 break;
3232 case REG_NON_LOCAL_GOTO:
3233 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3235 if (JUMP_P (insn))
3236 REG_NOTES (insn)
3237 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (note),
3238 XEXP (note, 0),
3239 REG_NOTES (insn));
3241 break;
3243 #ifdef AUTO_INC_DEC
3244 case REG_INC:
3245 for (insn = insn_last; insn != NULL_RTX; insn = PREV_INSN (insn))
3247 rtx reg = XEXP (note, 0);
3248 if (!FIND_REG_INC_NOTE (insn, reg)
3249 && for_each_rtx (&PATTERN (insn), find_auto_inc, reg) > 0)
3250 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_INC, reg,
3251 REG_NOTES (insn));
3253 break;
3254 #endif
3256 default:
3257 break;
3261 /* If there are LABELS inside the split insns increment the
3262 usage count so we don't delete the label. */
3263 if (NONJUMP_INSN_P (trial))
3265 insn = insn_last;
3266 while (insn != NULL_RTX)
3268 if (NONJUMP_INSN_P (insn))
3269 mark_label_nuses (PATTERN (insn));
3271 insn = PREV_INSN (insn);
3275 tem = emit_insn_after_setloc (seq, trial, INSN_LOCATOR (trial));
3277 delete_insn (trial);
3278 if (has_barrier)
3279 emit_barrier_after (tem);
3281 /* Recursively call try_split for each new insn created; by the
3282 time control returns here that insn will be fully split, so
3283 set LAST and continue from the insn after the one returned.
3284 We can't use next_active_insn here since AFTER may be a note.
3285 Ignore deleted insns, which can be occur if not optimizing. */
3286 for (tem = NEXT_INSN (before); tem != after; tem = NEXT_INSN (tem))
3287 if (! INSN_DELETED_P (tem) && INSN_P (tem))
3288 tem = try_split (PATTERN (tem), tem, 1);
3290 /* Return either the first or the last insn, depending on which was
3291 requested. */
3292 return last
3293 ? (after ? PREV_INSN (after) : last_insn)
3294 : NEXT_INSN (before);
3297 /* Make and return an INSN rtx, initializing all its slots.
3298 Store PATTERN in the pattern slots. */
3301 make_insn_raw (rtx pattern)
3303 rtx insn;
3305 insn = rtx_alloc (INSN);
3307 INSN_UID (insn) = cur_insn_uid++;
3308 PATTERN (insn) = pattern;
3309 INSN_CODE (insn) = -1;
3310 LOG_LINKS (insn) = NULL;
3311 REG_NOTES (insn) = NULL;
3312 INSN_LOCATOR (insn) = curr_insn_locator ();
3313 BLOCK_FOR_INSN (insn) = NULL;
3315 #ifdef ENABLE_RTL_CHECKING
3316 if (insn
3317 && INSN_P (insn)
3318 && (returnjump_p (insn)
3319 || (GET_CODE (insn) == SET
3320 && SET_DEST (insn) == pc_rtx)))
3322 warning (0, "ICE: emit_insn used where emit_jump_insn needed:\n");
3323 debug_rtx (insn);
3325 #endif
3327 return insn;
3330 /* Like `make_insn_raw' but make a JUMP_INSN instead of an insn. */
3333 make_jump_insn_raw (rtx pattern)
3335 rtx insn;
3337 insn = rtx_alloc (JUMP_INSN);
3338 INSN_UID (insn) = cur_insn_uid++;
3340 PATTERN (insn) = pattern;
3341 INSN_CODE (insn) = -1;
3342 LOG_LINKS (insn) = NULL;
3343 REG_NOTES (insn) = NULL;
3344 JUMP_LABEL (insn) = NULL;
3345 INSN_LOCATOR (insn) = curr_insn_locator ();
3346 BLOCK_FOR_INSN (insn) = NULL;
3348 return insn;
3351 /* Like `make_insn_raw' but make a CALL_INSN instead of an insn. */
3353 static rtx
3354 make_call_insn_raw (rtx pattern)
3356 rtx insn;
3358 insn = rtx_alloc (CALL_INSN);
3359 INSN_UID (insn) = cur_insn_uid++;
3361 PATTERN (insn) = pattern;
3362 INSN_CODE (insn) = -1;
3363 LOG_LINKS (insn) = NULL;
3364 REG_NOTES (insn) = NULL;
3365 CALL_INSN_FUNCTION_USAGE (insn) = NULL;
3366 INSN_LOCATOR (insn) = curr_insn_locator ();
3367 BLOCK_FOR_INSN (insn) = NULL;
3369 return insn;
3372 /* Add INSN to the end of the doubly-linked list.
3373 INSN may be an INSN, JUMP_INSN, CALL_INSN, CODE_LABEL, BARRIER or NOTE. */
3375 void
3376 add_insn (rtx insn)
3378 PREV_INSN (insn) = last_insn;
3379 NEXT_INSN (insn) = 0;
3381 if (NULL != last_insn)
3382 NEXT_INSN (last_insn) = insn;
3384 if (NULL == first_insn)
3385 first_insn = insn;
3387 last_insn = insn;
3390 /* Add INSN into the doubly-linked list after insn AFTER. This and
3391 the next should be the only functions called to insert an insn once
3392 delay slots have been filled since only they know how to update a
3393 SEQUENCE. */
3395 void
3396 add_insn_after (rtx insn, rtx after)
3398 rtx next = NEXT_INSN (after);
3399 basic_block bb;
3401 gcc_assert (!optimize || !INSN_DELETED_P (after));
3403 NEXT_INSN (insn) = next;
3404 PREV_INSN (insn) = after;
3406 if (next)
3408 PREV_INSN (next) = insn;
3409 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3410 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = insn;
3412 else if (last_insn == after)
3413 last_insn = insn;
3414 else
3416 struct sequence_stack *stack = seq_stack;
3417 /* Scan all pending sequences too. */
3418 for (; stack; stack = stack->next)
3419 if (after == stack->last)
3421 stack->last = insn;
3422 break;
3425 gcc_assert (stack);
3428 if (!BARRIER_P (after)
3429 && !BARRIER_P (insn)
3430 && (bb = BLOCK_FOR_INSN (after)))
3432 set_block_for_insn (insn, bb);
3433 if (INSN_P (insn))
3434 bb->flags |= BB_DIRTY;
3435 /* Should not happen as first in the BB is always
3436 either NOTE or LABEL. */
3437 if (BB_END (bb) == after
3438 /* Avoid clobbering of structure when creating new BB. */
3439 && !BARRIER_P (insn)
3440 && !NOTE_INSN_BASIC_BLOCK_P (insn))
3441 BB_END (bb) = insn;
3444 NEXT_INSN (after) = insn;
3445 if (NONJUMP_INSN_P (after) && GET_CODE (PATTERN (after)) == SEQUENCE)
3447 rtx sequence = PATTERN (after);
3448 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3452 /* Add INSN into the doubly-linked list before insn BEFORE. This and
3453 the previous should be the only functions called to insert an insn once
3454 delay slots have been filled since only they know how to update a
3455 SEQUENCE. */
3457 void
3458 add_insn_before (rtx insn, rtx before)
3460 rtx prev = PREV_INSN (before);
3461 basic_block bb;
3463 gcc_assert (!optimize || !INSN_DELETED_P (before));
3465 PREV_INSN (insn) = prev;
3466 NEXT_INSN (insn) = before;
3468 if (prev)
3470 NEXT_INSN (prev) = insn;
3471 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3473 rtx sequence = PATTERN (prev);
3474 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = insn;
3477 else if (first_insn == before)
3478 first_insn = insn;
3479 else
3481 struct sequence_stack *stack = seq_stack;
3482 /* Scan all pending sequences too. */
3483 for (; stack; stack = stack->next)
3484 if (before == stack->first)
3486 stack->first = insn;
3487 break;
3490 gcc_assert (stack);
3493 if (!BARRIER_P (before)
3494 && !BARRIER_P (insn)
3495 && (bb = BLOCK_FOR_INSN (before)))
3497 set_block_for_insn (insn, bb);
3498 if (INSN_P (insn))
3499 bb->flags |= BB_DIRTY;
3500 /* Should not happen as first in the BB is always either NOTE or
3501 LABEL. */
3502 gcc_assert (BB_HEAD (bb) != insn
3503 /* Avoid clobbering of structure when creating new BB. */
3504 || BARRIER_P (insn)
3505 || NOTE_INSN_BASIC_BLOCK_P (insn));
3508 PREV_INSN (before) = insn;
3509 if (NONJUMP_INSN_P (before) && GET_CODE (PATTERN (before)) == SEQUENCE)
3510 PREV_INSN (XVECEXP (PATTERN (before), 0, 0)) = insn;
3513 /* Remove an insn from its doubly-linked list. This function knows how
3514 to handle sequences. */
3515 void
3516 remove_insn (rtx insn)
3518 rtx next = NEXT_INSN (insn);
3519 rtx prev = PREV_INSN (insn);
3520 basic_block bb;
3522 if (prev)
3524 NEXT_INSN (prev) = next;
3525 if (NONJUMP_INSN_P (prev) && GET_CODE (PATTERN (prev)) == SEQUENCE)
3527 rtx sequence = PATTERN (prev);
3528 NEXT_INSN (XVECEXP (sequence, 0, XVECLEN (sequence, 0) - 1)) = next;
3531 else if (first_insn == insn)
3532 first_insn = next;
3533 else
3535 struct sequence_stack *stack = seq_stack;
3536 /* Scan all pending sequences too. */
3537 for (; stack; stack = stack->next)
3538 if (insn == stack->first)
3540 stack->first = next;
3541 break;
3544 gcc_assert (stack);
3547 if (next)
3549 PREV_INSN (next) = prev;
3550 if (NONJUMP_INSN_P (next) && GET_CODE (PATTERN (next)) == SEQUENCE)
3551 PREV_INSN (XVECEXP (PATTERN (next), 0, 0)) = prev;
3553 else if (last_insn == insn)
3554 last_insn = prev;
3555 else
3557 struct sequence_stack *stack = seq_stack;
3558 /* Scan all pending sequences too. */
3559 for (; stack; stack = stack->next)
3560 if (insn == stack->last)
3562 stack->last = prev;
3563 break;
3566 gcc_assert (stack);
3568 if (!BARRIER_P (insn)
3569 && (bb = BLOCK_FOR_INSN (insn)))
3571 if (INSN_P (insn))
3572 bb->flags |= BB_DIRTY;
3573 if (BB_HEAD (bb) == insn)
3575 /* Never ever delete the basic block note without deleting whole
3576 basic block. */
3577 gcc_assert (!NOTE_P (insn));
3578 BB_HEAD (bb) = next;
3580 if (BB_END (bb) == insn)
3581 BB_END (bb) = prev;
3585 /* Append CALL_FUSAGE to the CALL_INSN_FUNCTION_USAGE for CALL_INSN. */
3587 void
3588 add_function_usage_to (rtx call_insn, rtx call_fusage)
3590 gcc_assert (call_insn && CALL_P (call_insn));
3592 /* Put the register usage information on the CALL. If there is already
3593 some usage information, put ours at the end. */
3594 if (CALL_INSN_FUNCTION_USAGE (call_insn))
3596 rtx link;
3598 for (link = CALL_INSN_FUNCTION_USAGE (call_insn); XEXP (link, 1) != 0;
3599 link = XEXP (link, 1))
3602 XEXP (link, 1) = call_fusage;
3604 else
3605 CALL_INSN_FUNCTION_USAGE (call_insn) = call_fusage;
3608 /* Delete all insns made since FROM.
3609 FROM becomes the new last instruction. */
3611 void
3612 delete_insns_since (rtx from)
3614 if (from == 0)
3615 first_insn = 0;
3616 else
3617 NEXT_INSN (from) = 0;
3618 last_insn = from;
3621 /* This function is deprecated, please use sequences instead.
3623 Move a consecutive bunch of insns to a different place in the chain.
3624 The insns to be moved are those between FROM and TO.
3625 They are moved to a new position after the insn AFTER.
3626 AFTER must not be FROM or TO or any insn in between.
3628 This function does not know about SEQUENCEs and hence should not be
3629 called after delay-slot filling has been done. */
3631 void
3632 reorder_insns_nobb (rtx from, rtx to, rtx after)
3634 /* Splice this bunch out of where it is now. */
3635 if (PREV_INSN (from))
3636 NEXT_INSN (PREV_INSN (from)) = NEXT_INSN (to);
3637 if (NEXT_INSN (to))
3638 PREV_INSN (NEXT_INSN (to)) = PREV_INSN (from);
3639 if (last_insn == to)
3640 last_insn = PREV_INSN (from);
3641 if (first_insn == from)
3642 first_insn = NEXT_INSN (to);
3644 /* Make the new neighbors point to it and it to them. */
3645 if (NEXT_INSN (after))
3646 PREV_INSN (NEXT_INSN (after)) = to;
3648 NEXT_INSN (to) = NEXT_INSN (after);
3649 PREV_INSN (from) = after;
3650 NEXT_INSN (after) = from;
3651 if (after == last_insn)
3652 last_insn = to;
3655 /* Same as function above, but take care to update BB boundaries. */
3656 void
3657 reorder_insns (rtx from, rtx to, rtx after)
3659 rtx prev = PREV_INSN (from);
3660 basic_block bb, bb2;
3662 reorder_insns_nobb (from, to, after);
3664 if (!BARRIER_P (after)
3665 && (bb = BLOCK_FOR_INSN (after)))
3667 rtx x;
3668 bb->flags |= BB_DIRTY;
3670 if (!BARRIER_P (from)
3671 && (bb2 = BLOCK_FOR_INSN (from)))
3673 if (BB_END (bb2) == to)
3674 BB_END (bb2) = prev;
3675 bb2->flags |= BB_DIRTY;
3678 if (BB_END (bb) == after)
3679 BB_END (bb) = to;
3681 for (x = from; x != NEXT_INSN (to); x = NEXT_INSN (x))
3682 if (!BARRIER_P (x))
3683 set_block_for_insn (x, bb);
3688 /* Emit insn(s) of given code and pattern
3689 at a specified place within the doubly-linked list.
3691 All of the emit_foo global entry points accept an object
3692 X which is either an insn list or a PATTERN of a single
3693 instruction.
3695 There are thus a few canonical ways to generate code and
3696 emit it at a specific place in the instruction stream. For
3697 example, consider the instruction named SPOT and the fact that
3698 we would like to emit some instructions before SPOT. We might
3699 do it like this:
3701 start_sequence ();
3702 ... emit the new instructions ...
3703 insns_head = get_insns ();
3704 end_sequence ();
3706 emit_insn_before (insns_head, SPOT);
3708 It used to be common to generate SEQUENCE rtl instead, but that
3709 is a relic of the past which no longer occurs. The reason is that
3710 SEQUENCE rtl results in much fragmented RTL memory since the SEQUENCE
3711 generated would almost certainly die right after it was created. */
3713 /* Make X be output before the instruction BEFORE. */
3716 emit_insn_before_noloc (rtx x, rtx before)
3718 rtx last = before;
3719 rtx insn;
3721 gcc_assert (before);
3723 if (x == NULL_RTX)
3724 return last;
3726 switch (GET_CODE (x))
3728 case INSN:
3729 case JUMP_INSN:
3730 case CALL_INSN:
3731 case CODE_LABEL:
3732 case BARRIER:
3733 case NOTE:
3734 insn = x;
3735 while (insn)
3737 rtx next = NEXT_INSN (insn);
3738 add_insn_before (insn, before);
3739 last = insn;
3740 insn = next;
3742 break;
3744 #ifdef ENABLE_RTL_CHECKING
3745 case SEQUENCE:
3746 gcc_unreachable ();
3747 break;
3748 #endif
3750 default:
3751 last = make_insn_raw (x);
3752 add_insn_before (last, before);
3753 break;
3756 return last;
3759 /* Make an instruction with body X and code JUMP_INSN
3760 and output it before the instruction BEFORE. */
3763 emit_jump_insn_before_noloc (rtx x, rtx before)
3765 rtx insn, last = NULL_RTX;
3767 gcc_assert (before);
3769 switch (GET_CODE (x))
3771 case INSN:
3772 case JUMP_INSN:
3773 case CALL_INSN:
3774 case CODE_LABEL:
3775 case BARRIER:
3776 case NOTE:
3777 insn = x;
3778 while (insn)
3780 rtx next = NEXT_INSN (insn);
3781 add_insn_before (insn, before);
3782 last = insn;
3783 insn = next;
3785 break;
3787 #ifdef ENABLE_RTL_CHECKING
3788 case SEQUENCE:
3789 gcc_unreachable ();
3790 break;
3791 #endif
3793 default:
3794 last = make_jump_insn_raw (x);
3795 add_insn_before (last, before);
3796 break;
3799 return last;
3802 /* Make an instruction with body X and code CALL_INSN
3803 and output it before the instruction BEFORE. */
3806 emit_call_insn_before_noloc (rtx x, rtx before)
3808 rtx last = NULL_RTX, insn;
3810 gcc_assert (before);
3812 switch (GET_CODE (x))
3814 case INSN:
3815 case JUMP_INSN:
3816 case CALL_INSN:
3817 case CODE_LABEL:
3818 case BARRIER:
3819 case NOTE:
3820 insn = x;
3821 while (insn)
3823 rtx next = NEXT_INSN (insn);
3824 add_insn_before (insn, before);
3825 last = insn;
3826 insn = next;
3828 break;
3830 #ifdef ENABLE_RTL_CHECKING
3831 case SEQUENCE:
3832 gcc_unreachable ();
3833 break;
3834 #endif
3836 default:
3837 last = make_call_insn_raw (x);
3838 add_insn_before (last, before);
3839 break;
3842 return last;
3845 /* Make an insn of code BARRIER
3846 and output it before the insn BEFORE. */
3849 emit_barrier_before (rtx before)
3851 rtx insn = rtx_alloc (BARRIER);
3853 INSN_UID (insn) = cur_insn_uid++;
3855 add_insn_before (insn, before);
3856 return insn;
3859 /* Emit the label LABEL before the insn BEFORE. */
3862 emit_label_before (rtx label, rtx before)
3864 /* This can be called twice for the same label as a result of the
3865 confusion that follows a syntax error! So make it harmless. */
3866 if (INSN_UID (label) == 0)
3868 INSN_UID (label) = cur_insn_uid++;
3869 add_insn_before (label, before);
3872 return label;
3875 /* Emit a note of subtype SUBTYPE before the insn BEFORE. */
3878 emit_note_before (enum insn_note subtype, rtx before)
3880 rtx note = rtx_alloc (NOTE);
3881 INSN_UID (note) = cur_insn_uid++;
3882 NOTE_KIND (note) = subtype;
3883 BLOCK_FOR_INSN (note) = NULL;
3884 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
3886 add_insn_before (note, before);
3887 return note;
3890 /* Helper for emit_insn_after, handles lists of instructions
3891 efficiently. */
3893 static rtx emit_insn_after_1 (rtx, rtx);
3895 static rtx
3896 emit_insn_after_1 (rtx first, rtx after)
3898 rtx last;
3899 rtx after_after;
3900 basic_block bb;
3902 if (!BARRIER_P (after)
3903 && (bb = BLOCK_FOR_INSN (after)))
3905 bb->flags |= BB_DIRTY;
3906 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3907 if (!BARRIER_P (last))
3908 set_block_for_insn (last, bb);
3909 if (!BARRIER_P (last))
3910 set_block_for_insn (last, bb);
3911 if (BB_END (bb) == after)
3912 BB_END (bb) = last;
3914 else
3915 for (last = first; NEXT_INSN (last); last = NEXT_INSN (last))
3916 continue;
3918 after_after = NEXT_INSN (after);
3920 NEXT_INSN (after) = first;
3921 PREV_INSN (first) = after;
3922 NEXT_INSN (last) = after_after;
3923 if (after_after)
3924 PREV_INSN (after_after) = last;
3926 if (after == last_insn)
3927 last_insn = last;
3928 return last;
3931 /* Make X be output after the insn AFTER. */
3934 emit_insn_after_noloc (rtx x, rtx after)
3936 rtx last = after;
3938 gcc_assert (after);
3940 if (x == NULL_RTX)
3941 return last;
3943 switch (GET_CODE (x))
3945 case INSN:
3946 case JUMP_INSN:
3947 case CALL_INSN:
3948 case CODE_LABEL:
3949 case BARRIER:
3950 case NOTE:
3951 last = emit_insn_after_1 (x, after);
3952 break;
3954 #ifdef ENABLE_RTL_CHECKING
3955 case SEQUENCE:
3956 gcc_unreachable ();
3957 break;
3958 #endif
3960 default:
3961 last = make_insn_raw (x);
3962 add_insn_after (last, after);
3963 break;
3966 return last;
3970 /* Make an insn of code JUMP_INSN with body X
3971 and output it after the insn AFTER. */
3974 emit_jump_insn_after_noloc (rtx x, rtx after)
3976 rtx last;
3978 gcc_assert (after);
3980 switch (GET_CODE (x))
3982 case INSN:
3983 case JUMP_INSN:
3984 case CALL_INSN:
3985 case CODE_LABEL:
3986 case BARRIER:
3987 case NOTE:
3988 last = emit_insn_after_1 (x, after);
3989 break;
3991 #ifdef ENABLE_RTL_CHECKING
3992 case SEQUENCE:
3993 gcc_unreachable ();
3994 break;
3995 #endif
3997 default:
3998 last = make_jump_insn_raw (x);
3999 add_insn_after (last, after);
4000 break;
4003 return last;
4006 /* Make an instruction with body X and code CALL_INSN
4007 and output it after the instruction AFTER. */
4010 emit_call_insn_after_noloc (rtx x, rtx after)
4012 rtx last;
4014 gcc_assert (after);
4016 switch (GET_CODE (x))
4018 case INSN:
4019 case JUMP_INSN:
4020 case CALL_INSN:
4021 case CODE_LABEL:
4022 case BARRIER:
4023 case NOTE:
4024 last = emit_insn_after_1 (x, after);
4025 break;
4027 #ifdef ENABLE_RTL_CHECKING
4028 case SEQUENCE:
4029 gcc_unreachable ();
4030 break;
4031 #endif
4033 default:
4034 last = make_call_insn_raw (x);
4035 add_insn_after (last, after);
4036 break;
4039 return last;
4042 /* Make an insn of code BARRIER
4043 and output it after the insn AFTER. */
4046 emit_barrier_after (rtx after)
4048 rtx insn = rtx_alloc (BARRIER);
4050 INSN_UID (insn) = cur_insn_uid++;
4052 add_insn_after (insn, after);
4053 return insn;
4056 /* Emit the label LABEL after the insn AFTER. */
4059 emit_label_after (rtx label, rtx after)
4061 /* This can be called twice for the same label
4062 as a result of the confusion that follows a syntax error!
4063 So make it harmless. */
4064 if (INSN_UID (label) == 0)
4066 INSN_UID (label) = cur_insn_uid++;
4067 add_insn_after (label, after);
4070 return label;
4073 /* Emit a note of subtype SUBTYPE after the insn AFTER. */
4076 emit_note_after (enum insn_note subtype, rtx after)
4078 rtx note = rtx_alloc (NOTE);
4079 INSN_UID (note) = cur_insn_uid++;
4080 NOTE_KIND (note) = subtype;
4081 BLOCK_FOR_INSN (note) = NULL;
4082 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4083 add_insn_after (note, after);
4084 return note;
4087 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4089 emit_insn_after_setloc (rtx pattern, rtx after, int loc)
4091 rtx last = emit_insn_after_noloc (pattern, after);
4093 if (pattern == NULL_RTX || !loc)
4094 return last;
4096 after = NEXT_INSN (after);
4097 while (1)
4099 if (active_insn_p (after) && !INSN_LOCATOR (after))
4100 INSN_LOCATOR (after) = loc;
4101 if (after == last)
4102 break;
4103 after = NEXT_INSN (after);
4105 return last;
4108 /* Like emit_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4110 emit_insn_after (rtx pattern, rtx after)
4112 if (INSN_P (after))
4113 return emit_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4114 else
4115 return emit_insn_after_noloc (pattern, after);
4118 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4120 emit_jump_insn_after_setloc (rtx pattern, rtx after, int loc)
4122 rtx last = emit_jump_insn_after_noloc (pattern, after);
4124 if (pattern == NULL_RTX || !loc)
4125 return last;
4127 after = NEXT_INSN (after);
4128 while (1)
4130 if (active_insn_p (after) && !INSN_LOCATOR (after))
4131 INSN_LOCATOR (after) = loc;
4132 if (after == last)
4133 break;
4134 after = NEXT_INSN (after);
4136 return last;
4139 /* Like emit_jump_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4141 emit_jump_insn_after (rtx pattern, rtx after)
4143 if (INSN_P (after))
4144 return emit_jump_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4145 else
4146 return emit_jump_insn_after_noloc (pattern, after);
4149 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to SCOPE. */
4151 emit_call_insn_after_setloc (rtx pattern, rtx after, int loc)
4153 rtx last = emit_call_insn_after_noloc (pattern, after);
4155 if (pattern == NULL_RTX || !loc)
4156 return last;
4158 after = NEXT_INSN (after);
4159 while (1)
4161 if (active_insn_p (after) && !INSN_LOCATOR (after))
4162 INSN_LOCATOR (after) = loc;
4163 if (after == last)
4164 break;
4165 after = NEXT_INSN (after);
4167 return last;
4170 /* Like emit_call_insn_after_noloc, but set INSN_LOCATOR according to AFTER. */
4172 emit_call_insn_after (rtx pattern, rtx after)
4174 if (INSN_P (after))
4175 return emit_call_insn_after_setloc (pattern, after, INSN_LOCATOR (after));
4176 else
4177 return emit_call_insn_after_noloc (pattern, after);
4180 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to SCOPE. */
4182 emit_insn_before_setloc (rtx pattern, rtx before, int loc)
4184 rtx first = PREV_INSN (before);
4185 rtx last = emit_insn_before_noloc (pattern, before);
4187 if (pattern == NULL_RTX || !loc)
4188 return last;
4190 if (!first)
4191 first = get_insns ();
4192 else
4193 first = NEXT_INSN (first);
4194 while (1)
4196 if (active_insn_p (first) && !INSN_LOCATOR (first))
4197 INSN_LOCATOR (first) = loc;
4198 if (first == last)
4199 break;
4200 first = NEXT_INSN (first);
4202 return last;
4205 /* Like emit_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4207 emit_insn_before (rtx pattern, rtx before)
4209 if (INSN_P (before))
4210 return emit_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4211 else
4212 return emit_insn_before_noloc (pattern, before);
4215 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4217 emit_jump_insn_before_setloc (rtx pattern, rtx before, int loc)
4219 rtx first = PREV_INSN (before);
4220 rtx last = emit_jump_insn_before_noloc (pattern, before);
4222 if (pattern == NULL_RTX)
4223 return last;
4225 first = NEXT_INSN (first);
4226 while (1)
4228 if (active_insn_p (first) && !INSN_LOCATOR (first))
4229 INSN_LOCATOR (first) = loc;
4230 if (first == last)
4231 break;
4232 first = NEXT_INSN (first);
4234 return last;
4237 /* Like emit_jump_insn_before_noloc, but set INSN_LOCATOR according to BEFORE. */
4239 emit_jump_insn_before (rtx pattern, rtx before)
4241 if (INSN_P (before))
4242 return emit_jump_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4243 else
4244 return emit_jump_insn_before_noloc (pattern, before);
4247 /* like emit_insn_before_noloc, but set insn_locator according to scope. */
4249 emit_call_insn_before_setloc (rtx pattern, rtx before, int loc)
4251 rtx first = PREV_INSN (before);
4252 rtx last = emit_call_insn_before_noloc (pattern, before);
4254 if (pattern == NULL_RTX)
4255 return last;
4257 first = NEXT_INSN (first);
4258 while (1)
4260 if (active_insn_p (first) && !INSN_LOCATOR (first))
4261 INSN_LOCATOR (first) = loc;
4262 if (first == last)
4263 break;
4264 first = NEXT_INSN (first);
4266 return last;
4269 /* like emit_call_insn_before_noloc,
4270 but set insn_locator according to before. */
4272 emit_call_insn_before (rtx pattern, rtx before)
4274 if (INSN_P (before))
4275 return emit_call_insn_before_setloc (pattern, before, INSN_LOCATOR (before));
4276 else
4277 return emit_call_insn_before_noloc (pattern, before);
4280 /* Take X and emit it at the end of the doubly-linked
4281 INSN list.
4283 Returns the last insn emitted. */
4286 emit_insn (rtx x)
4288 rtx last = last_insn;
4289 rtx insn;
4291 if (x == NULL_RTX)
4292 return last;
4294 switch (GET_CODE (x))
4296 case INSN:
4297 case JUMP_INSN:
4298 case CALL_INSN:
4299 case CODE_LABEL:
4300 case BARRIER:
4301 case NOTE:
4302 insn = x;
4303 while (insn)
4305 rtx next = NEXT_INSN (insn);
4306 add_insn (insn);
4307 last = insn;
4308 insn = next;
4310 break;
4312 #ifdef ENABLE_RTL_CHECKING
4313 case SEQUENCE:
4314 gcc_unreachable ();
4315 break;
4316 #endif
4318 default:
4319 last = make_insn_raw (x);
4320 add_insn (last);
4321 break;
4324 return last;
4327 /* Make an insn of code JUMP_INSN with pattern X
4328 and add it to the end of the doubly-linked list. */
4331 emit_jump_insn (rtx x)
4333 rtx last = NULL_RTX, insn;
4335 switch (GET_CODE (x))
4337 case INSN:
4338 case JUMP_INSN:
4339 case CALL_INSN:
4340 case CODE_LABEL:
4341 case BARRIER:
4342 case NOTE:
4343 insn = x;
4344 while (insn)
4346 rtx next = NEXT_INSN (insn);
4347 add_insn (insn);
4348 last = insn;
4349 insn = next;
4351 break;
4353 #ifdef ENABLE_RTL_CHECKING
4354 case SEQUENCE:
4355 gcc_unreachable ();
4356 break;
4357 #endif
4359 default:
4360 last = make_jump_insn_raw (x);
4361 add_insn (last);
4362 break;
4365 return last;
4368 /* Make an insn of code CALL_INSN with pattern X
4369 and add it to the end of the doubly-linked list. */
4372 emit_call_insn (rtx x)
4374 rtx insn;
4376 switch (GET_CODE (x))
4378 case INSN:
4379 case JUMP_INSN:
4380 case CALL_INSN:
4381 case CODE_LABEL:
4382 case BARRIER:
4383 case NOTE:
4384 insn = emit_insn (x);
4385 break;
4387 #ifdef ENABLE_RTL_CHECKING
4388 case SEQUENCE:
4389 gcc_unreachable ();
4390 break;
4391 #endif
4393 default:
4394 insn = make_call_insn_raw (x);
4395 add_insn (insn);
4396 break;
4399 return insn;
4402 /* Add the label LABEL to the end of the doubly-linked list. */
4405 emit_label (rtx label)
4407 /* This can be called twice for the same label
4408 as a result of the confusion that follows a syntax error!
4409 So make it harmless. */
4410 if (INSN_UID (label) == 0)
4412 INSN_UID (label) = cur_insn_uid++;
4413 add_insn (label);
4415 return label;
4418 /* Make an insn of code BARRIER
4419 and add it to the end of the doubly-linked list. */
4422 emit_barrier (void)
4424 rtx barrier = rtx_alloc (BARRIER);
4425 INSN_UID (barrier) = cur_insn_uid++;
4426 add_insn (barrier);
4427 return barrier;
4430 /* Emit a copy of note ORIG. */
4433 emit_note_copy (rtx orig)
4435 rtx note;
4437 note = rtx_alloc (NOTE);
4439 INSN_UID (note) = cur_insn_uid++;
4440 NOTE_DATA (note) = NOTE_DATA (orig);
4441 NOTE_KIND (note) = NOTE_KIND (orig);
4442 BLOCK_FOR_INSN (note) = NULL;
4443 add_insn (note);
4445 return note;
4448 /* Make an insn of code NOTE or type NOTE_NO
4449 and add it to the end of the doubly-linked list. */
4452 emit_note (enum insn_note kind)
4454 rtx note;
4456 note = rtx_alloc (NOTE);
4457 INSN_UID (note) = cur_insn_uid++;
4458 NOTE_KIND (note) = kind;
4459 memset (&NOTE_DATA (note), 0, sizeof (NOTE_DATA (note)));
4460 BLOCK_FOR_INSN (note) = NULL;
4461 add_insn (note);
4462 return note;
4465 /* Cause next statement to emit a line note even if the line number
4466 has not changed. */
4468 void
4469 force_next_line_note (void)
4471 #ifdef USE_MAPPED_LOCATION
4472 last_location = -1;
4473 #else
4474 last_location.line = -1;
4475 #endif
4478 /* Place a note of KIND on insn INSN with DATUM as the datum. If a
4479 note of this type already exists, remove it first. */
4482 set_unique_reg_note (rtx insn, enum reg_note kind, rtx datum)
4484 rtx note = find_reg_note (insn, kind, NULL_RTX);
4486 switch (kind)
4488 case REG_EQUAL:
4489 case REG_EQUIV:
4490 /* Don't add REG_EQUAL/REG_EQUIV notes if the insn
4491 has multiple sets (some callers assume single_set
4492 means the insn only has one set, when in fact it
4493 means the insn only has one * useful * set). */
4494 if (GET_CODE (PATTERN (insn)) == PARALLEL && multiple_sets (insn))
4496 gcc_assert (!note);
4497 return NULL_RTX;
4500 /* Don't add ASM_OPERAND REG_EQUAL/REG_EQUIV notes.
4501 It serves no useful purpose and breaks eliminate_regs. */
4502 if (GET_CODE (datum) == ASM_OPERANDS)
4503 return NULL_RTX;
4504 break;
4506 default:
4507 break;
4510 if (note)
4512 XEXP (note, 0) = datum;
4513 return note;
4516 REG_NOTES (insn) = gen_rtx_EXPR_LIST (kind, datum, REG_NOTES (insn));
4517 return REG_NOTES (insn);
4520 /* Return an indication of which type of insn should have X as a body.
4521 The value is CODE_LABEL, INSN, CALL_INSN or JUMP_INSN. */
4523 static enum rtx_code
4524 classify_insn (rtx x)
4526 if (LABEL_P (x))
4527 return CODE_LABEL;
4528 if (GET_CODE (x) == CALL)
4529 return CALL_INSN;
4530 if (GET_CODE (x) == RETURN)
4531 return JUMP_INSN;
4532 if (GET_CODE (x) == SET)
4534 if (SET_DEST (x) == pc_rtx)
4535 return JUMP_INSN;
4536 else if (GET_CODE (SET_SRC (x)) == CALL)
4537 return CALL_INSN;
4538 else
4539 return INSN;
4541 if (GET_CODE (x) == PARALLEL)
4543 int j;
4544 for (j = XVECLEN (x, 0) - 1; j >= 0; j--)
4545 if (GET_CODE (XVECEXP (x, 0, j)) == CALL)
4546 return CALL_INSN;
4547 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4548 && SET_DEST (XVECEXP (x, 0, j)) == pc_rtx)
4549 return JUMP_INSN;
4550 else if (GET_CODE (XVECEXP (x, 0, j)) == SET
4551 && GET_CODE (SET_SRC (XVECEXP (x, 0, j))) == CALL)
4552 return CALL_INSN;
4554 return INSN;
4557 /* Emit the rtl pattern X as an appropriate kind of insn.
4558 If X is a label, it is simply added into the insn chain. */
4561 emit (rtx x)
4563 enum rtx_code code = classify_insn (x);
4565 switch (code)
4567 case CODE_LABEL:
4568 return emit_label (x);
4569 case INSN:
4570 return emit_insn (x);
4571 case JUMP_INSN:
4573 rtx insn = emit_jump_insn (x);
4574 if (any_uncondjump_p (insn) || GET_CODE (x) == RETURN)
4575 return emit_barrier ();
4576 return insn;
4578 case CALL_INSN:
4579 return emit_call_insn (x);
4580 default:
4581 gcc_unreachable ();
4585 /* Space for free sequence stack entries. */
4586 static GTY ((deletable)) struct sequence_stack *free_sequence_stack;
4588 /* Begin emitting insns to a sequence. If this sequence will contain
4589 something that might cause the compiler to pop arguments to function
4590 calls (because those pops have previously been deferred; see
4591 INHIBIT_DEFER_POP for more details), use do_pending_stack_adjust
4592 before calling this function. That will ensure that the deferred
4593 pops are not accidentally emitted in the middle of this sequence. */
4595 void
4596 start_sequence (void)
4598 struct sequence_stack *tem;
4600 if (free_sequence_stack != NULL)
4602 tem = free_sequence_stack;
4603 free_sequence_stack = tem->next;
4605 else
4606 tem = ggc_alloc (sizeof (struct sequence_stack));
4608 tem->next = seq_stack;
4609 tem->first = first_insn;
4610 tem->last = last_insn;
4612 seq_stack = tem;
4614 first_insn = 0;
4615 last_insn = 0;
4618 /* Set up the insn chain starting with FIRST as the current sequence,
4619 saving the previously current one. See the documentation for
4620 start_sequence for more information about how to use this function. */
4622 void
4623 push_to_sequence (rtx first)
4625 rtx last;
4627 start_sequence ();
4629 for (last = first; last && NEXT_INSN (last); last = NEXT_INSN (last));
4631 first_insn = first;
4632 last_insn = last;
4635 /* Set up the outer-level insn chain
4636 as the current sequence, saving the previously current one. */
4638 void
4639 push_topmost_sequence (void)
4641 struct sequence_stack *stack, *top = NULL;
4643 start_sequence ();
4645 for (stack = seq_stack; stack; stack = stack->next)
4646 top = stack;
4648 first_insn = top->first;
4649 last_insn = top->last;
4652 /* After emitting to the outer-level insn chain, update the outer-level
4653 insn chain, and restore the previous saved state. */
4655 void
4656 pop_topmost_sequence (void)
4658 struct sequence_stack *stack, *top = NULL;
4660 for (stack = seq_stack; stack; stack = stack->next)
4661 top = stack;
4663 top->first = first_insn;
4664 top->last = last_insn;
4666 end_sequence ();
4669 /* After emitting to a sequence, restore previous saved state.
4671 To get the contents of the sequence just made, you must call
4672 `get_insns' *before* calling here.
4674 If the compiler might have deferred popping arguments while
4675 generating this sequence, and this sequence will not be immediately
4676 inserted into the instruction stream, use do_pending_stack_adjust
4677 before calling get_insns. That will ensure that the deferred
4678 pops are inserted into this sequence, and not into some random
4679 location in the instruction stream. See INHIBIT_DEFER_POP for more
4680 information about deferred popping of arguments. */
4682 void
4683 end_sequence (void)
4685 struct sequence_stack *tem = seq_stack;
4687 first_insn = tem->first;
4688 last_insn = tem->last;
4689 seq_stack = tem->next;
4691 memset (tem, 0, sizeof (*tem));
4692 tem->next = free_sequence_stack;
4693 free_sequence_stack = tem;
4696 /* Return 1 if currently emitting into a sequence. */
4699 in_sequence_p (void)
4701 return seq_stack != 0;
4704 /* Put the various virtual registers into REGNO_REG_RTX. */
4706 static void
4707 init_virtual_regs (struct emit_status *es)
4709 rtx *ptr = es->x_regno_reg_rtx;
4710 ptr[VIRTUAL_INCOMING_ARGS_REGNUM] = virtual_incoming_args_rtx;
4711 ptr[VIRTUAL_STACK_VARS_REGNUM] = virtual_stack_vars_rtx;
4712 ptr[VIRTUAL_STACK_DYNAMIC_REGNUM] = virtual_stack_dynamic_rtx;
4713 ptr[VIRTUAL_OUTGOING_ARGS_REGNUM] = virtual_outgoing_args_rtx;
4714 ptr[VIRTUAL_CFA_REGNUM] = virtual_cfa_rtx;
4718 /* Used by copy_insn_1 to avoid copying SCRATCHes more than once. */
4719 static rtx copy_insn_scratch_in[MAX_RECOG_OPERANDS];
4720 static rtx copy_insn_scratch_out[MAX_RECOG_OPERANDS];
4721 static int copy_insn_n_scratches;
4723 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4724 copied an ASM_OPERANDS.
4725 In that case, it is the original input-operand vector. */
4726 static rtvec orig_asm_operands_vector;
4728 /* When an insn is being copied by copy_insn_1, this is nonzero if we have
4729 copied an ASM_OPERANDS.
4730 In that case, it is the copied input-operand vector. */
4731 static rtvec copy_asm_operands_vector;
4733 /* Likewise for the constraints vector. */
4734 static rtvec orig_asm_constraints_vector;
4735 static rtvec copy_asm_constraints_vector;
4737 /* Recursively create a new copy of an rtx for copy_insn.
4738 This function differs from copy_rtx in that it handles SCRATCHes and
4739 ASM_OPERANDs properly.
4740 Normally, this function is not used directly; use copy_insn as front end.
4741 However, you could first copy an insn pattern with copy_insn and then use
4742 this function afterwards to properly copy any REG_NOTEs containing
4743 SCRATCHes. */
4746 copy_insn_1 (rtx orig)
4748 rtx copy;
4749 int i, j;
4750 RTX_CODE code;
4751 const char *format_ptr;
4753 code = GET_CODE (orig);
4755 switch (code)
4757 case REG:
4758 case CONST_INT:
4759 case CONST_DOUBLE:
4760 case CONST_VECTOR:
4761 case SYMBOL_REF:
4762 case CODE_LABEL:
4763 case PC:
4764 case CC0:
4765 return orig;
4766 case CLOBBER:
4767 if (REG_P (XEXP (orig, 0)) && REGNO (XEXP (orig, 0)) < FIRST_PSEUDO_REGISTER)
4768 return orig;
4769 break;
4771 case SCRATCH:
4772 for (i = 0; i < copy_insn_n_scratches; i++)
4773 if (copy_insn_scratch_in[i] == orig)
4774 return copy_insn_scratch_out[i];
4775 break;
4777 case CONST:
4778 /* CONST can be shared if it contains a SYMBOL_REF. If it contains
4779 a LABEL_REF, it isn't sharable. */
4780 if (GET_CODE (XEXP (orig, 0)) == PLUS
4781 && GET_CODE (XEXP (XEXP (orig, 0), 0)) == SYMBOL_REF
4782 && GET_CODE (XEXP (XEXP (orig, 0), 1)) == CONST_INT)
4783 return orig;
4784 break;
4786 /* A MEM with a constant address is not sharable. The problem is that
4787 the constant address may need to be reloaded. If the mem is shared,
4788 then reloading one copy of this mem will cause all copies to appear
4789 to have been reloaded. */
4791 default:
4792 break;
4795 /* Copy the various flags, fields, and other information. We assume
4796 that all fields need copying, and then clear the fields that should
4797 not be copied. That is the sensible default behavior, and forces
4798 us to explicitly document why we are *not* copying a flag. */
4799 copy = shallow_copy_rtx (orig);
4801 /* We do not copy the USED flag, which is used as a mark bit during
4802 walks over the RTL. */
4803 RTX_FLAG (copy, used) = 0;
4805 /* We do not copy JUMP, CALL, or FRAME_RELATED for INSNs. */
4806 if (INSN_P (orig))
4808 RTX_FLAG (copy, jump) = 0;
4809 RTX_FLAG (copy, call) = 0;
4810 RTX_FLAG (copy, frame_related) = 0;
4813 format_ptr = GET_RTX_FORMAT (GET_CODE (copy));
4815 for (i = 0; i < GET_RTX_LENGTH (GET_CODE (copy)); i++)
4816 switch (*format_ptr++)
4818 case 'e':
4819 if (XEXP (orig, i) != NULL)
4820 XEXP (copy, i) = copy_insn_1 (XEXP (orig, i));
4821 break;
4823 case 'E':
4824 case 'V':
4825 if (XVEC (orig, i) == orig_asm_constraints_vector)
4826 XVEC (copy, i) = copy_asm_constraints_vector;
4827 else if (XVEC (orig, i) == orig_asm_operands_vector)
4828 XVEC (copy, i) = copy_asm_operands_vector;
4829 else if (XVEC (orig, i) != NULL)
4831 XVEC (copy, i) = rtvec_alloc (XVECLEN (orig, i));
4832 for (j = 0; j < XVECLEN (copy, i); j++)
4833 XVECEXP (copy, i, j) = copy_insn_1 (XVECEXP (orig, i, j));
4835 break;
4837 case 't':
4838 case 'w':
4839 case 'i':
4840 case 's':
4841 case 'S':
4842 case 'u':
4843 case '0':
4844 /* These are left unchanged. */
4845 break;
4847 default:
4848 gcc_unreachable ();
4851 if (code == SCRATCH)
4853 i = copy_insn_n_scratches++;
4854 gcc_assert (i < MAX_RECOG_OPERANDS);
4855 copy_insn_scratch_in[i] = orig;
4856 copy_insn_scratch_out[i] = copy;
4858 else if (code == ASM_OPERANDS)
4860 orig_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (orig);
4861 copy_asm_operands_vector = ASM_OPERANDS_INPUT_VEC (copy);
4862 orig_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (orig);
4863 copy_asm_constraints_vector = ASM_OPERANDS_INPUT_CONSTRAINT_VEC (copy);
4866 return copy;
4869 /* Create a new copy of an rtx.
4870 This function differs from copy_rtx in that it handles SCRATCHes and
4871 ASM_OPERANDs properly.
4872 INSN doesn't really have to be a full INSN; it could be just the
4873 pattern. */
4875 copy_insn (rtx insn)
4877 copy_insn_n_scratches = 0;
4878 orig_asm_operands_vector = 0;
4879 orig_asm_constraints_vector = 0;
4880 copy_asm_operands_vector = 0;
4881 copy_asm_constraints_vector = 0;
4882 return copy_insn_1 (insn);
4885 /* Initialize data structures and variables in this file
4886 before generating rtl for each function. */
4888 void
4889 init_emit (void)
4891 struct function *f = cfun;
4893 f->emit = ggc_alloc (sizeof (struct emit_status));
4894 first_insn = NULL;
4895 last_insn = NULL;
4896 cur_insn_uid = 1;
4897 reg_rtx_no = LAST_VIRTUAL_REGISTER + 1;
4898 last_location = UNKNOWN_LOCATION;
4899 first_label_num = label_num;
4900 seq_stack = NULL;
4902 /* Init the tables that describe all the pseudo regs. */
4904 f->emit->regno_pointer_align_length = LAST_VIRTUAL_REGISTER + 101;
4906 f->emit->regno_pointer_align
4907 = ggc_alloc_cleared (f->emit->regno_pointer_align_length
4908 * sizeof (unsigned char));
4910 regno_reg_rtx
4911 = ggc_alloc (f->emit->regno_pointer_align_length * sizeof (rtx));
4913 /* Put copies of all the hard registers into regno_reg_rtx. */
4914 memcpy (regno_reg_rtx,
4915 static_regno_reg_rtx,
4916 FIRST_PSEUDO_REGISTER * sizeof (rtx));
4918 /* Put copies of all the virtual register rtx into regno_reg_rtx. */
4919 init_virtual_regs (f->emit);
4921 /* Indicate that the virtual registers and stack locations are
4922 all pointers. */
4923 REG_POINTER (stack_pointer_rtx) = 1;
4924 REG_POINTER (frame_pointer_rtx) = 1;
4925 REG_POINTER (hard_frame_pointer_rtx) = 1;
4926 REG_POINTER (arg_pointer_rtx) = 1;
4928 REG_POINTER (virtual_incoming_args_rtx) = 1;
4929 REG_POINTER (virtual_stack_vars_rtx) = 1;
4930 REG_POINTER (virtual_stack_dynamic_rtx) = 1;
4931 REG_POINTER (virtual_outgoing_args_rtx) = 1;
4932 REG_POINTER (virtual_cfa_rtx) = 1;
4934 #ifdef STACK_BOUNDARY
4935 REGNO_POINTER_ALIGN (STACK_POINTER_REGNUM) = STACK_BOUNDARY;
4936 REGNO_POINTER_ALIGN (FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4937 REGNO_POINTER_ALIGN (HARD_FRAME_POINTER_REGNUM) = STACK_BOUNDARY;
4938 REGNO_POINTER_ALIGN (ARG_POINTER_REGNUM) = STACK_BOUNDARY;
4940 REGNO_POINTER_ALIGN (VIRTUAL_INCOMING_ARGS_REGNUM) = STACK_BOUNDARY;
4941 REGNO_POINTER_ALIGN (VIRTUAL_STACK_VARS_REGNUM) = STACK_BOUNDARY;
4942 REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM) = STACK_BOUNDARY;
4943 REGNO_POINTER_ALIGN (VIRTUAL_OUTGOING_ARGS_REGNUM) = STACK_BOUNDARY;
4944 REGNO_POINTER_ALIGN (VIRTUAL_CFA_REGNUM) = BITS_PER_WORD;
4945 #endif
4947 #ifdef INIT_EXPANDERS
4948 INIT_EXPANDERS;
4949 #endif
4952 /* Generate a vector constant for mode MODE and constant value CONSTANT. */
4954 static rtx
4955 gen_const_vector (enum machine_mode mode, int constant)
4957 rtx tem;
4958 rtvec v;
4959 int units, i;
4960 enum machine_mode inner;
4962 units = GET_MODE_NUNITS (mode);
4963 inner = GET_MODE_INNER (mode);
4965 gcc_assert (!DECIMAL_FLOAT_MODE_P (inner));
4967 v = rtvec_alloc (units);
4969 /* We need to call this function after we set the scalar const_tiny_rtx
4970 entries. */
4971 gcc_assert (const_tiny_rtx[constant][(int) inner]);
4973 for (i = 0; i < units; ++i)
4974 RTVEC_ELT (v, i) = const_tiny_rtx[constant][(int) inner];
4976 tem = gen_rtx_raw_CONST_VECTOR (mode, v);
4977 return tem;
4980 /* Generate a vector like gen_rtx_raw_CONST_VEC, but use the zero vector when
4981 all elements are zero, and the one vector when all elements are one. */
4983 gen_rtx_CONST_VECTOR (enum machine_mode mode, rtvec v)
4985 enum machine_mode inner = GET_MODE_INNER (mode);
4986 int nunits = GET_MODE_NUNITS (mode);
4987 rtx x;
4988 int i;
4990 /* Check to see if all of the elements have the same value. */
4991 x = RTVEC_ELT (v, nunits - 1);
4992 for (i = nunits - 2; i >= 0; i--)
4993 if (RTVEC_ELT (v, i) != x)
4994 break;
4996 /* If the values are all the same, check to see if we can use one of the
4997 standard constant vectors. */
4998 if (i == -1)
5000 if (x == CONST0_RTX (inner))
5001 return CONST0_RTX (mode);
5002 else if (x == CONST1_RTX (inner))
5003 return CONST1_RTX (mode);
5006 return gen_rtx_raw_CONST_VECTOR (mode, v);
5009 /* Create some permanent unique rtl objects shared between all functions.
5010 LINE_NUMBERS is nonzero if line numbers are to be generated. */
5012 void
5013 init_emit_once (int line_numbers)
5015 int i;
5016 enum machine_mode mode;
5017 enum machine_mode double_mode;
5019 /* We need reg_raw_mode, so initialize the modes now. */
5020 init_reg_modes_once ();
5022 /* Initialize the CONST_INT, CONST_DOUBLE, and memory attribute hash
5023 tables. */
5024 const_int_htab = htab_create_ggc (37, const_int_htab_hash,
5025 const_int_htab_eq, NULL);
5027 const_double_htab = htab_create_ggc (37, const_double_htab_hash,
5028 const_double_htab_eq, NULL);
5030 mem_attrs_htab = htab_create_ggc (37, mem_attrs_htab_hash,
5031 mem_attrs_htab_eq, NULL);
5032 reg_attrs_htab = htab_create_ggc (37, reg_attrs_htab_hash,
5033 reg_attrs_htab_eq, NULL);
5035 no_line_numbers = ! line_numbers;
5037 /* Compute the word and byte modes. */
5039 byte_mode = VOIDmode;
5040 word_mode = VOIDmode;
5041 double_mode = VOIDmode;
5043 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5044 mode != VOIDmode;
5045 mode = GET_MODE_WIDER_MODE (mode))
5047 if (GET_MODE_BITSIZE (mode) == BITS_PER_UNIT
5048 && byte_mode == VOIDmode)
5049 byte_mode = mode;
5051 if (GET_MODE_BITSIZE (mode) == BITS_PER_WORD
5052 && word_mode == VOIDmode)
5053 word_mode = mode;
5056 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5057 mode != VOIDmode;
5058 mode = GET_MODE_WIDER_MODE (mode))
5060 if (GET_MODE_BITSIZE (mode) == DOUBLE_TYPE_SIZE
5061 && double_mode == VOIDmode)
5062 double_mode = mode;
5065 ptr_mode = mode_for_size (POINTER_SIZE, GET_MODE_CLASS (Pmode), 0);
5067 /* Assign register numbers to the globally defined register rtx.
5068 This must be done at runtime because the register number field
5069 is in a union and some compilers can't initialize unions. */
5071 pc_rtx = gen_rtx_PC (VOIDmode);
5072 cc0_rtx = gen_rtx_CC0 (VOIDmode);
5073 stack_pointer_rtx = gen_raw_REG (Pmode, STACK_POINTER_REGNUM);
5074 frame_pointer_rtx = gen_raw_REG (Pmode, FRAME_POINTER_REGNUM);
5075 if (hard_frame_pointer_rtx == 0)
5076 hard_frame_pointer_rtx = gen_raw_REG (Pmode,
5077 HARD_FRAME_POINTER_REGNUM);
5078 if (arg_pointer_rtx == 0)
5079 arg_pointer_rtx = gen_raw_REG (Pmode, ARG_POINTER_REGNUM);
5080 virtual_incoming_args_rtx =
5081 gen_raw_REG (Pmode, VIRTUAL_INCOMING_ARGS_REGNUM);
5082 virtual_stack_vars_rtx =
5083 gen_raw_REG (Pmode, VIRTUAL_STACK_VARS_REGNUM);
5084 virtual_stack_dynamic_rtx =
5085 gen_raw_REG (Pmode, VIRTUAL_STACK_DYNAMIC_REGNUM);
5086 virtual_outgoing_args_rtx =
5087 gen_raw_REG (Pmode, VIRTUAL_OUTGOING_ARGS_REGNUM);
5088 virtual_cfa_rtx = gen_raw_REG (Pmode, VIRTUAL_CFA_REGNUM);
5090 /* Initialize RTL for commonly used hard registers. These are
5091 copied into regno_reg_rtx as we begin to compile each function. */
5092 for (i = 0; i < FIRST_PSEUDO_REGISTER; i++)
5093 static_regno_reg_rtx[i] = gen_raw_REG (reg_raw_mode[i], i);
5095 #ifdef INIT_EXPANDERS
5096 /* This is to initialize {init|mark|free}_machine_status before the first
5097 call to push_function_context_to. This is needed by the Chill front
5098 end which calls push_function_context_to before the first call to
5099 init_function_start. */
5100 INIT_EXPANDERS;
5101 #endif
5103 /* Create the unique rtx's for certain rtx codes and operand values. */
5105 /* Don't use gen_rtx_CONST_INT here since gen_rtx_CONST_INT in this case
5106 tries to use these variables. */
5107 for (i = - MAX_SAVED_CONST_INT; i <= MAX_SAVED_CONST_INT; i++)
5108 const_int_rtx[i + MAX_SAVED_CONST_INT] =
5109 gen_rtx_raw_CONST_INT (VOIDmode, (HOST_WIDE_INT) i);
5111 if (STORE_FLAG_VALUE >= - MAX_SAVED_CONST_INT
5112 && STORE_FLAG_VALUE <= MAX_SAVED_CONST_INT)
5113 const_true_rtx = const_int_rtx[STORE_FLAG_VALUE + MAX_SAVED_CONST_INT];
5114 else
5115 const_true_rtx = gen_rtx_CONST_INT (VOIDmode, STORE_FLAG_VALUE);
5117 REAL_VALUE_FROM_INT (dconst0, 0, 0, double_mode);
5118 REAL_VALUE_FROM_INT (dconst1, 1, 0, double_mode);
5119 REAL_VALUE_FROM_INT (dconst2, 2, 0, double_mode);
5120 REAL_VALUE_FROM_INT (dconst3, 3, 0, double_mode);
5121 REAL_VALUE_FROM_INT (dconst10, 10, 0, double_mode);
5122 REAL_VALUE_FROM_INT (dconstm1, -1, -1, double_mode);
5123 REAL_VALUE_FROM_INT (dconstm2, -2, -1, double_mode);
5125 dconsthalf = dconst1;
5126 SET_REAL_EXP (&dconsthalf, REAL_EXP (&dconsthalf) - 1);
5128 real_arithmetic (&dconstthird, RDIV_EXPR, &dconst1, &dconst3);
5130 /* Initialize mathematical constants for constant folding builtins.
5131 These constants need to be given to at least 160 bits precision. */
5132 real_from_string (&dconstsqrt2,
5133 "1.4142135623730950488016887242096980785696718753769480731766797379907");
5134 real_from_string (&dconste,
5135 "2.7182818284590452353602874713526624977572470936999595749669676277241");
5137 for (i = 0; i < (int) ARRAY_SIZE (const_tiny_rtx); i++)
5139 REAL_VALUE_TYPE *r =
5140 (i == 0 ? &dconst0 : i == 1 ? &dconst1 : &dconst2);
5142 for (mode = GET_CLASS_NARROWEST_MODE (MODE_FLOAT);
5143 mode != VOIDmode;
5144 mode = GET_MODE_WIDER_MODE (mode))
5145 const_tiny_rtx[i][(int) mode] =
5146 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5148 for (mode = GET_CLASS_NARROWEST_MODE (MODE_DECIMAL_FLOAT);
5149 mode != VOIDmode;
5150 mode = GET_MODE_WIDER_MODE (mode))
5151 const_tiny_rtx[i][(int) mode] =
5152 CONST_DOUBLE_FROM_REAL_VALUE (*r, mode);
5154 const_tiny_rtx[i][(int) VOIDmode] = GEN_INT (i);
5156 for (mode = GET_CLASS_NARROWEST_MODE (MODE_INT);
5157 mode != VOIDmode;
5158 mode = GET_MODE_WIDER_MODE (mode))
5159 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5161 for (mode = GET_CLASS_NARROWEST_MODE (MODE_PARTIAL_INT);
5162 mode != VOIDmode;
5163 mode = GET_MODE_WIDER_MODE (mode))
5164 const_tiny_rtx[i][(int) mode] = GEN_INT (i);
5167 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_INT);
5168 mode != VOIDmode;
5169 mode = GET_MODE_WIDER_MODE (mode))
5171 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5172 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5175 for (mode = GET_CLASS_NARROWEST_MODE (MODE_VECTOR_FLOAT);
5176 mode != VOIDmode;
5177 mode = GET_MODE_WIDER_MODE (mode))
5179 const_tiny_rtx[0][(int) mode] = gen_const_vector (mode, 0);
5180 const_tiny_rtx[1][(int) mode] = gen_const_vector (mode, 1);
5183 for (i = (int) CCmode; i < (int) MAX_MACHINE_MODE; ++i)
5184 if (GET_MODE_CLASS ((enum machine_mode) i) == MODE_CC)
5185 const_tiny_rtx[0][i] = const0_rtx;
5187 const_tiny_rtx[0][(int) BImode] = const0_rtx;
5188 if (STORE_FLAG_VALUE == 1)
5189 const_tiny_rtx[1][(int) BImode] = const1_rtx;
5191 #ifdef RETURN_ADDRESS_POINTER_REGNUM
5192 return_address_pointer_rtx
5193 = gen_raw_REG (Pmode, RETURN_ADDRESS_POINTER_REGNUM);
5194 #endif
5196 #ifdef STATIC_CHAIN_REGNUM
5197 static_chain_rtx = gen_rtx_REG (Pmode, STATIC_CHAIN_REGNUM);
5199 #ifdef STATIC_CHAIN_INCOMING_REGNUM
5200 if (STATIC_CHAIN_INCOMING_REGNUM != STATIC_CHAIN_REGNUM)
5201 static_chain_incoming_rtx
5202 = gen_rtx_REG (Pmode, STATIC_CHAIN_INCOMING_REGNUM);
5203 else
5204 #endif
5205 static_chain_incoming_rtx = static_chain_rtx;
5206 #endif
5208 #ifdef STATIC_CHAIN
5209 static_chain_rtx = STATIC_CHAIN;
5211 #ifdef STATIC_CHAIN_INCOMING
5212 static_chain_incoming_rtx = STATIC_CHAIN_INCOMING;
5213 #else
5214 static_chain_incoming_rtx = static_chain_rtx;
5215 #endif
5216 #endif
5218 if ((unsigned) PIC_OFFSET_TABLE_REGNUM != INVALID_REGNUM)
5219 pic_offset_table_rtx = gen_raw_REG (Pmode, PIC_OFFSET_TABLE_REGNUM);
5222 /* Produce exact duplicate of insn INSN after AFTER.
5223 Care updating of libcall regions if present. */
5226 emit_copy_of_insn_after (rtx insn, rtx after)
5228 rtx new;
5229 rtx note1, note2, link;
5231 switch (GET_CODE (insn))
5233 case INSN:
5234 new = emit_insn_after (copy_insn (PATTERN (insn)), after);
5235 break;
5237 case JUMP_INSN:
5238 new = emit_jump_insn_after (copy_insn (PATTERN (insn)), after);
5239 break;
5241 case CALL_INSN:
5242 new = emit_call_insn_after (copy_insn (PATTERN (insn)), after);
5243 if (CALL_INSN_FUNCTION_USAGE (insn))
5244 CALL_INSN_FUNCTION_USAGE (new)
5245 = copy_insn (CALL_INSN_FUNCTION_USAGE (insn));
5246 SIBLING_CALL_P (new) = SIBLING_CALL_P (insn);
5247 CONST_OR_PURE_CALL_P (new) = CONST_OR_PURE_CALL_P (insn);
5248 break;
5250 default:
5251 gcc_unreachable ();
5254 /* Update LABEL_NUSES. */
5255 mark_jump_label (PATTERN (new), new, 0);
5257 INSN_LOCATOR (new) = INSN_LOCATOR (insn);
5259 /* If the old insn is frame related, then so is the new one. This is
5260 primarily needed for IA-64 unwind info which marks epilogue insns,
5261 which may be duplicated by the basic block reordering code. */
5262 RTX_FRAME_RELATED_P (new) = RTX_FRAME_RELATED_P (insn);
5264 /* Copy all REG_NOTES except REG_LABEL since mark_jump_label will
5265 make them. */
5266 for (link = REG_NOTES (insn); link; link = XEXP (link, 1))
5267 if (REG_NOTE_KIND (link) != REG_LABEL)
5269 if (GET_CODE (link) == EXPR_LIST)
5270 REG_NOTES (new)
5271 = gen_rtx_EXPR_LIST (REG_NOTE_KIND (link),
5272 copy_insn_1 (XEXP (link, 0)), REG_NOTES (new));
5273 else
5274 REG_NOTES (new)
5275 = gen_rtx_INSN_LIST (REG_NOTE_KIND (link),
5276 XEXP (link, 0), REG_NOTES (new));
5279 /* Fix the libcall sequences. */
5280 if ((note1 = find_reg_note (new, REG_RETVAL, NULL_RTX)) != NULL)
5282 rtx p = new;
5283 while ((note2 = find_reg_note (p, REG_LIBCALL, NULL_RTX)) == NULL)
5284 p = PREV_INSN (p);
5285 XEXP (note1, 0) = p;
5286 XEXP (note2, 0) = new;
5288 INSN_CODE (new) = INSN_CODE (insn);
5289 return new;
5292 static GTY((deletable)) rtx hard_reg_clobbers [NUM_MACHINE_MODES][FIRST_PSEUDO_REGISTER];
5294 gen_hard_reg_clobber (enum machine_mode mode, unsigned int regno)
5296 if (hard_reg_clobbers[mode][regno])
5297 return hard_reg_clobbers[mode][regno];
5298 else
5299 return (hard_reg_clobbers[mode][regno] =
5300 gen_rtx_CLOBBER (VOIDmode, gen_rtx_REG (mode, regno)));
5303 #include "gt-emit-rtl.h"